From 44139087f41c710a837b6ddef7b4b48814f15939 Mon Sep 17 00:00:00 2001 From: Andres Garcia Date: Thu, 7 May 2026 02:49:57 -0400 Subject: [PATCH 1/7] feat: ConnectUC Piece (#12709) Co-authored-by: samehelhawary --- packages/pieces/community/connectuc/README.md | 7 + .../pieces/community/connectuc/package.json | 16 + .../pieces/community/connectuc/project.json | 66 ++++ .../pieces/community/connectuc/src/index.ts | 67 ++++ .../src/lib/actions/create-contact.ts | 75 ++++ .../src/lib/actions/do-not-disturb.ts | 42 ++ .../connectuc/src/lib/actions/find-cdr.ts | 36 ++ .../src/lib/actions/initiate-call.ts | 44 +++ .../connectuc/src/lib/actions/send-sms.ts | 65 ++++ .../connectuc/src/lib/actions/update-cdr.ts | 63 +++ .../connectuc/src/lib/common/api-helpers.ts | 41 ++ .../connectuc/src/lib/common/props.ts | 358 ++++++++++++++++++ .../src/lib/common/webhook-helpers.ts | 112 ++++++ .../src/lib/triggers/new-call-summary.ts | 47 +++ .../lib/triggers/new-call-transcription.ts | 104 +++++ .../connectuc/src/lib/triggers/new-cdr.ts | 63 +++ .../src/lib/triggers/new-incoming-call.ts | 67 ++++ .../src/lib/triggers/new-outgoing-call.ts | 54 +++ .../src/lib/triggers/new-recording.ts | 54 +++ .../connectuc/src/lib/triggers/new-sms.ts | 62 +++ .../src/lib/triggers/new-voicemail.ts | 57 +++ .../pieces/community/connectuc/tsconfig.json | 20 + .../community/connectuc/tsconfig.lib.json | 15 + tsconfig.base.json | 3 + 24 files changed, 1538 insertions(+) create mode 100644 packages/pieces/community/connectuc/README.md create mode 100644 packages/pieces/community/connectuc/package.json create mode 100644 packages/pieces/community/connectuc/project.json create mode 100644 packages/pieces/community/connectuc/src/index.ts create mode 100644 packages/pieces/community/connectuc/src/lib/actions/create-contact.ts create mode 100644 packages/pieces/community/connectuc/src/lib/actions/do-not-disturb.ts create mode 100644 packages/pieces/community/connectuc/src/lib/actions/find-cdr.ts create mode 100644 packages/pieces/community/connectuc/src/lib/actions/initiate-call.ts create mode 100644 packages/pieces/community/connectuc/src/lib/actions/send-sms.ts create mode 100644 packages/pieces/community/connectuc/src/lib/actions/update-cdr.ts create mode 100644 packages/pieces/community/connectuc/src/lib/common/api-helpers.ts create mode 100644 packages/pieces/community/connectuc/src/lib/common/props.ts create mode 100644 packages/pieces/community/connectuc/src/lib/common/webhook-helpers.ts create mode 100644 packages/pieces/community/connectuc/src/lib/triggers/new-call-summary.ts create mode 100644 packages/pieces/community/connectuc/src/lib/triggers/new-call-transcription.ts create mode 100644 packages/pieces/community/connectuc/src/lib/triggers/new-cdr.ts create mode 100644 packages/pieces/community/connectuc/src/lib/triggers/new-incoming-call.ts create mode 100644 packages/pieces/community/connectuc/src/lib/triggers/new-outgoing-call.ts create mode 100644 packages/pieces/community/connectuc/src/lib/triggers/new-recording.ts create mode 100644 packages/pieces/community/connectuc/src/lib/triggers/new-sms.ts create mode 100644 packages/pieces/community/connectuc/src/lib/triggers/new-voicemail.ts create mode 100644 packages/pieces/community/connectuc/tsconfig.json create mode 100644 packages/pieces/community/connectuc/tsconfig.lib.json diff --git a/packages/pieces/community/connectuc/README.md b/packages/pieces/community/connectuc/README.md new file mode 100644 index 00000000000..844aae06a03 --- /dev/null +++ b/packages/pieces/community/connectuc/README.md @@ -0,0 +1,7 @@ +# pieces-connectuc + +This library was generated with [Nx](https://nx.dev). + +## Building + +Run `nx build pieces-connectuc` to build the library. diff --git a/packages/pieces/community/connectuc/package.json b/packages/pieces/community/connectuc/package.json new file mode 100644 index 00000000000..afa46669b9d --- /dev/null +++ b/packages/pieces/community/connectuc/package.json @@ -0,0 +1,16 @@ +{ + "name": "@activepieces/piece-connectuc", + "version": "0.0.1", + "main": "./dist/src/index.js", + "types": "./dist/src/index.d.ts", + "scripts": { + "build": "tsc -p tsconfig.lib.json && cp package.json dist/", + "lint": "eslint 'src/**/*.ts'" + }, + "dependencies": { + "@activepieces/pieces-common": "workspace:*", + "@activepieces/pieces-framework": "workspace:*", + "@activepieces/shared": "workspace:*", + "tslib": "2.6.2" + } +} diff --git a/packages/pieces/community/connectuc/project.json b/packages/pieces/community/connectuc/project.json new file mode 100644 index 00000000000..a3590c9605b --- /dev/null +++ b/packages/pieces/community/connectuc/project.json @@ -0,0 +1,66 @@ +{ + "name": "pieces-connectuc", + "$schema": "../../../../node_modules/nx/schemas/project-schema.json", + "sourceRoot": "packages/pieces/community/connectuc/src", + "projectType": "library", + "release": { + "version": { + "manifestRootsToUpdate": [ + "dist/{projectRoot}" + ], + "currentVersionResolver": "git-tag", + "fallbackCurrentVersionResolver": "disk" + } + }, + "tags": [], + "targets": { + "build": { + "executor": "@nx/js:tsc", + "outputs": [ + "{options.outputPath}" + ], + "options": { + "outputPath": "dist/packages/pieces/community/connectuc", + "tsConfig": "packages/pieces/community/connectuc/tsconfig.lib.json", + "packageJson": "packages/pieces/community/connectuc/package.json", + "main": "packages/pieces/community/connectuc/src/index.ts", + "assets": [ + "packages/pieces/community/connectuc/*.md", + { + "input": "packages/pieces/community/connectuc/src/i18n", + "output": "./src/i18n", + "glob": "**/!(i18n.json)" + } + ], + "buildableProjectDepsInPackageJsonType": "dependencies", + "updateBuildableProjectDepsInPackageJson": true, + "clean": false + }, + "dependsOn": [ + "prebuild", + "^build" + ] + }, + "nx-release-publish": { + "options": { + "packageRoot": "dist/{projectRoot}" + } + }, + "prebuild": { + "dependsOn": [ + "^build" + ], + "executor": "nx:run-commands", + "options": { + "cwd": "packages/pieces/community/connectuc", + "command": "bun install --no-save --silent" + } + }, + "lint": { + "executor": "@nx/eslint:lint", + "outputs": [ + "{options.outputFile}" + ] + } + } +} diff --git a/packages/pieces/community/connectuc/src/index.ts b/packages/pieces/community/connectuc/src/index.ts new file mode 100644 index 00000000000..b37bb21206d --- /dev/null +++ b/packages/pieces/community/connectuc/src/index.ts @@ -0,0 +1,67 @@ +import { createPiece, PieceAuth, OAuth2PropertyValue } from "@activepieces/pieces-framework"; +import { httpClient, HttpMethod, AuthenticationType } from "@activepieces/pieces-common"; +import { newRecording } from "./lib/triggers/new-recording"; +import { newCallTranscription } from "./lib/triggers/new-call-transcription"; +import { newCallSummary } from "./lib/triggers/new-call-summary"; +import { newCdr } from "./lib/triggers/new-cdr"; +import { newIncomingCall } from "./lib/triggers/new-incoming-call"; +import { newOutgoingCall } from "./lib/triggers/new-outgoing-call"; +import { newVoicemail } from "./lib/triggers/new-voicemail"; +import { newSms } from "./lib/triggers/new-sms"; +import { createContactAction } from "./lib/actions/create-contact"; +import { doNotDisturbAction } from "./lib/actions/do-not-disturb"; +import { findCdrAction } from "./lib/actions/find-cdr"; +import { initiateCallAction } from "./lib/actions/initiate-call"; +import { sendSmsAction } from "./lib/actions/send-sms"; +import { updateCdrAction } from "./lib/actions/update-cdr"; + +export const connectucAuth = PieceAuth.OAuth2({ + authUrl: "https://auth.uc-technologies.com/oauth2/authorize", + tokenUrl: "https://auth.uc-technologies.com/oauth2/token", + required: true, + scope: ['offline_access'], + validate: async ({ auth }) => { + try { + const response = await httpClient.sendRequest({ + method: HttpMethod.GET, + url: "https://auth.uc-technologies.com/oauth2/userinfo", + authentication: { + type: AuthenticationType.BEARER_TOKEN, + token: (auth as OAuth2PropertyValue).access_token, + }, + }); + + if (response.status === 200) { + return { valid: true }; + } + + return { + valid: false, + error: "Failed to validate ConnectUC credentials" + }; + } catch (error: unknown) { + const err = error as { response?: { status?: number }; message?: string }; + if (err.response?.status === 401) { + return { + valid: false, + error: "Invalid or expired access token. Please reconnect your ConnectUC account." + }; + } + + return { + valid: false, + error: `Connection validation failed: ${err.message ?? "Unknown error occurred"}` + }; + } + }, +}); + +export const connectuc = createPiece({ + displayName: "ConnectUC", + auth: connectucAuth, + minimumSupportedRelease: '0.36.1', + logoUrl: "https://cuc-media.s3.us-east-1.amazonaws.com/cuc_logo_120x120.png", + authors: ['dranes'], + actions: [createContactAction, doNotDisturbAction, findCdrAction, initiateCallAction, sendSmsAction, updateCdrAction], + triggers: [newRecording, newCallTranscription, newCallSummary, newCdr, newIncomingCall, newOutgoingCall, newVoicemail, newSms], +}); diff --git a/packages/pieces/community/connectuc/src/lib/actions/create-contact.ts b/packages/pieces/community/connectuc/src/lib/actions/create-contact.ts new file mode 100644 index 00000000000..68dbc55566e --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/actions/create-contact.ts @@ -0,0 +1,75 @@ +import { createAction, Property } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { connectucApiCall } from '../common/api-helpers'; +import { domainProp, subscriberUuidProp } from '../common/props'; +import { HttpMethod } from '@activepieces/pieces-common'; + +export const createContactAction = createAction({ + auth: connectucAuth, + name: 'create-contact', + displayName: 'Create Contact', + description: 'Create a new contact in ConnectUC', + props: { + domain: domainProp(), + user: subscriberUuidProp(), + first_name: Property.ShortText({ + displayName: 'First Name', + description: 'The first name of the contact', + required: true, + }), + last_name: Property.ShortText({ + displayName: 'Last Name', + description: 'The last name of the contact', + required: true, + }), + email: Property.ShortText({ + displayName: 'Email', + description: 'The email address of the contact', + required: false, + }), + phones: Property.Array({ + displayName: 'Phones', + description: 'The phone numbers of the contact', + required: true, + }), + company: Property.ShortText({ + displayName: 'Company', + description: 'The company of the contact', + required: false, + }), + tags: Property.Array({ + displayName: 'Tags', + description: 'The tags of the contact', + required: false, + }), + }, + async run(context) { + const { user, first_name, last_name, email, phones, company, tags } = context.propsValue; + + const tels = phones.map((phone) => ({ number: String(phone), type: 'work' })); + const formattedTags = tags ? tags.map((tag) => ({ name: String(tag) })) : []; + + const body: Record = { + first_name: first_name, + last_name: last_name, + emails: email ? [{ value: email, type: 'work' }] : [], + tels: tels, + company: company || '', + tags: formattedTags, + }; + + try { + const response = await connectucApiCall({ + accessToken: context.auth.access_token, + endpoint: `/users/${user}/contacts`, + method: HttpMethod.POST, + body, + }); + + return response; + } catch (error: unknown) { + const message = error instanceof Error ? error.message : 'Unknown error occurred'; + throw new Error(`Failed to create contact: ${message}`); + } + }, +}); diff --git a/packages/pieces/community/connectuc/src/lib/actions/do-not-disturb.ts b/packages/pieces/community/connectuc/src/lib/actions/do-not-disturb.ts new file mode 100644 index 00000000000..7b9b67f0fe9 --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/actions/do-not-disturb.ts @@ -0,0 +1,42 @@ +import { createAction, Property } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { connectucApiCall } from '../common/api-helpers'; +import { domainProp, subscriberUuidProp } from '../common/props'; +import { HttpMethod } from '@activepieces/pieces-common'; + +export const doNotDisturbAction = createAction({ + auth: connectucAuth, + name: 'do-not-disturb', + displayName: 'Set Do Not Disturb', + description: 'Enable or disable Do Not Disturb status for a user in ConnectUC', + props: { + domain: domainProp(), + user: subscriberUuidProp(), + dnd: Property.Checkbox({ + displayName: 'Do Not Disturb', + description: 'Enable or disable Do Not Disturb status', + required: true, + }), + }, + async run(context) { + const { user, dnd } = context.propsValue; + + const body: Record = { + dnd: dnd, + }; + + try { + const response = await connectucApiCall({ + accessToken: context.auth.access_token, + endpoint: `/users/${user}/dnd/update`, + method: HttpMethod.POST, + body, + }); + + return response; + } catch (error: unknown) { + const message = error instanceof Error ? error.message : 'Unknown error occurred'; + throw new Error(`Failed to set Do Not Disturb status: ${message}`); + } + }, +}); diff --git a/packages/pieces/community/connectuc/src/lib/actions/find-cdr.ts b/packages/pieces/community/connectuc/src/lib/actions/find-cdr.ts new file mode 100644 index 00000000000..1ddab4b9986 --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/actions/find-cdr.ts @@ -0,0 +1,36 @@ +import { createAction, Property } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { connectucApiCall, getUserId } from '../common/api-helpers'; +import { HttpMethod } from '@activepieces/pieces-common'; + +export const findCdrAction = createAction({ + auth: connectucAuth, + name: 'find-cdr', + displayName: 'Find CDR', + description: 'Find a Call Detail Record (CDR) by original call ID', + props: { + origCallid: Property.ShortText({ + displayName: 'Original Call ID', + description: 'The original call ID (origCallid) to search for', + required: true, + }), + }, + async run(context) { + const { origCallid } = context.propsValue; + + try { + const userId = await getUserId(context.auth.access_token); + + const response = await connectucApiCall({ + accessToken: context.auth.access_token, + endpoint: `/users/${userId}/cdrs/${origCallid}`, + method: HttpMethod.GET, + }); + + return response; + } catch (error: unknown) { + const message = error instanceof Error ? error.message : 'Unknown error occurred'; + throw new Error(`Failed to find CDR: ${message}`); + } + }, +}); diff --git a/packages/pieces/community/connectuc/src/lib/actions/initiate-call.ts b/packages/pieces/community/connectuc/src/lib/actions/initiate-call.ts new file mode 100644 index 00000000000..2e9b9293f44 --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/actions/initiate-call.ts @@ -0,0 +1,44 @@ +import { createAction, Property } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { connectucApiCall } from '../common/api-helpers'; +import { domainProp, subscriberUuidProp, deviceProp } from '../common/props'; +import { HttpMethod } from '@activepieces/pieces-common'; + +export const initiateCallAction = createAction({ + auth: connectucAuth, + name: 'initiate-call', + displayName: 'Initiate Call', + description: 'Initiate an outbound call from a ConnectUC extension', + props: { + domain: domainProp(), + user: subscriberUuidProp(), + device: deviceProp(), + toNumber: Property.ShortText({ + displayName: 'To Number', + description: 'The destination number of the call', + required: true, + }), + }, + async run(context) { + const { user, device, toNumber } = context.propsValue; + + const body: Record = { + fromUid: device, + toNumber: toNumber, + }; + + try { + const response = await connectucApiCall({ + accessToken: context.auth.access_token, + endpoint: `/users/${user}/activepieces/initiate-call`, + method: HttpMethod.POST, + body, + }); + + return response; + } catch (error: unknown) { + const message = error instanceof Error ? error.message : 'Unknown error occurred'; + throw new Error(`Failed to initiate call: ${message}`); + } + }, +}); diff --git a/packages/pieces/community/connectuc/src/lib/actions/send-sms.ts b/packages/pieces/community/connectuc/src/lib/actions/send-sms.ts new file mode 100644 index 00000000000..100459d9835 --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/actions/send-sms.ts @@ -0,0 +1,65 @@ +import { createAction, Property } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { connectucApiCall } from '../common/api-helpers'; +import { smsNumberProp } from '../common/props'; +import { HttpMethod } from '@activepieces/pieces-common'; +import { randomUUID } from 'crypto'; + +export const sendSmsAction = createAction({ + auth: connectucAuth, + name: 'send-sms', + displayName: 'Send SMS', + description: 'Send an SMS message through ConnectUC', + props: { + recipients: Property.Array({ + displayName: 'SMS Destinations', + description: 'The phone number to send the SMS to', + required: true, + }), + content: Property.LongText({ + displayName: 'Message', + description: 'The SMS message content', + required: true, + }), + sender: smsNumberProp(), + attachment_urls: Property.Array({ + displayName: 'Media Attachments', + description: 'The media attachment urls for the SMS message', + required: false, + }), + }, + async run(context) { + const { recipients, content, sender, attachment_urls } = context.propsValue; + + const media = attachment_urls && attachment_urls.length > 0 ? attachment_urls.map((url) => { + const urlStr = String(url); + return { + url: urlStr, + filename: urlStr.split('/').pop() ?? 'attachment', + }; + }) : []; + + const body: Record = { + application: 'connectuc', + content: content, + media: media, + recipients: recipients, + referenceId: randomUUID(), + sender: sender, + }; + + try { + const response = await connectucApiCall({ + accessToken: context.auth.access_token, + endpoint: '/sms/messages', + method: HttpMethod.POST, + body, + }); + + return response; + } catch (error: unknown) { + const message = error instanceof Error ? error.message : 'Unknown error occurred'; + throw new Error(`Failed to send SMS: ${message}`); + } + }, +}); diff --git a/packages/pieces/community/connectuc/src/lib/actions/update-cdr.ts b/packages/pieces/community/connectuc/src/lib/actions/update-cdr.ts new file mode 100644 index 00000000000..bae23d37021 --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/actions/update-cdr.ts @@ -0,0 +1,63 @@ +import { createAction, Property } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { connectucApiCall, getUserId } from '../common/api-helpers'; +import { HttpMethod } from '@activepieces/pieces-common'; + +export const updateCdrAction = createAction({ + auth: connectucAuth, + name: 'update-cdr', + displayName: 'Update CDR', + description: 'Update a Call Detail Record (CDR) in ConnectUC', + props: { + cdrId: Property.ShortText({ + displayName: 'CDR ID', + description: 'The ID of the CDR to update', + required: true, + }), + note: Property.LongText({ + displayName: 'Note', + description: 'Note about the call', + required: true, + }), + disposition: Property.ShortText({ + displayName: 'Disposition', + description: 'The call disposition or outcome', + required: false, + }), + reason: Property.ShortText({ + displayName: 'Reason', + description: 'The reason for the call outcome', + required: false, + }), + }, + async run(context) { + const { cdrId, note, disposition, reason } = context.propsValue; + + try { + const userId = await getUserId(context.auth.access_token); + + const body: Record = { + note, + }; + + if (disposition !== undefined && disposition !== null && disposition !== '') { + body['disposition'] = disposition; + } + if (reason !== undefined && reason !== null && reason !== '') { + body['reason'] = reason; + } + + const response = await connectucApiCall({ + accessToken: context.auth.access_token, + endpoint: `/users/${userId}/call/${cdrId}/notes`, + method: HttpMethod.PUT, + body, + }); + + return response; + } catch (error: unknown) { + const message = error instanceof Error ? error.message : 'Unknown error occurred'; + throw new Error(`Failed to update CDR: ${message}`); + } + }, +}); diff --git a/packages/pieces/community/connectuc/src/lib/common/api-helpers.ts b/packages/pieces/community/connectuc/src/lib/common/api-helpers.ts new file mode 100644 index 00000000000..496250fa71c --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/common/api-helpers.ts @@ -0,0 +1,41 @@ +import { httpClient, HttpMethod, AuthenticationType } from '@activepieces/pieces-common'; + +export const CONNECTUC_BASE_URL = 'https://api.connectuc.io'; + +interface ApiCallParams { + accessToken: string; + endpoint: string; + method: HttpMethod; + body?: unknown; + queryParams?: Record; +} + +export async function connectucApiCall(params: ApiCallParams): Promise { + const { accessToken, endpoint, method, body, queryParams } = params; + + const response = await httpClient.sendRequest({ + method, + url: `${CONNECTUC_BASE_URL}${endpoint}`, + authentication: { + type: AuthenticationType.BEARER_TOKEN, + token: accessToken, + }, + body, + queryParams, + }); + + return response.body; +} + +export async function getUserId(accessToken: string): Promise { + const userInfoResponse = await httpClient.sendRequest<{ sub: string }>({ + method: HttpMethod.GET, + url: 'https://auth.uc-technologies.com/oauth2/userinfo', + authentication: { + type: AuthenticationType.BEARER_TOKEN, + token: accessToken, + }, + }); + + return userInfoResponse.body.sub; +} diff --git a/packages/pieces/community/connectuc/src/lib/common/props.ts b/packages/pieces/community/connectuc/src/lib/common/props.ts new file mode 100644 index 00000000000..7f239a16ce6 --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/common/props.ts @@ -0,0 +1,358 @@ +import { Property, OAuth2PropertyValue } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { connectucApiCall } from './api-helpers'; +import { HttpMethod } from '@activepieces/pieces-common'; + +/** + * Reusable domain dropdown property (single-select) + * Fetches domains from /activepieces/domains endpoint + */ +export const domainProp = () => + Property.Dropdown({ + displayName: 'Domain', + description: 'Select domain to which this trigger applies', + required: false, + auth: connectucAuth, + refreshers: [], + options: async ({ auth }) => { + if (!auth) { + return { + disabled: true, + placeholder: 'Please connect your account first', + options: [], + }; + } + + try { + const authValue = auth as OAuth2PropertyValue; + + interface DomainInfo { + domain: string; + reseller: string; + description: string; + } + + const domainsResponse = await connectucApiCall>({ + accessToken: authValue.access_token, + endpoint: '/activepieces/domains', + method: HttpMethod.GET, + }); + + const options = Object.values(domainsResponse).map(domainInfo => ({ + label: `${domainInfo.description} (${domainInfo.domain})`, + value: domainInfo.domain, + })); + + return { + disabled: false, + options, + }; + } catch (error) { + console.error('Error fetching domains:', error); + return { + disabled: true, + placeholder: 'Error loading domains', + options: [], + }; + } + }, + }); + +/** + * Reusable subscriber dropdown property (single-select) + * Fetches subscribers from /activepieces/subscribers endpoint filtered by domain + * Uses subscriber UUID as value + */ +export const subscriberUuidProp = () => + Property.Dropdown({ + displayName: 'User', + description: 'Select the user who owns this contact', + required: true, + auth: connectucAuth, + refreshers: ['domain'], + options: async ({ auth, domain }) => { + if (!auth) { + return { + disabled: true, + placeholder: 'Please connect your account first', + options: [], + }; + } + + if (!domain) { + return { + disabled: true, + placeholder: 'Please select a domain first', + options: [], + }; + } + + try { + const authValue = auth as OAuth2PropertyValue; + + interface Subscriber { + first_name: string; + last_name: string; + user: string; + uuid: string; + } + + const subscribers = await connectucApiCall({ + accessToken: authValue.access_token, + endpoint: '/activepieces/subscribers', + method: HttpMethod.GET, + queryParams: { domain: domain as string }, + }); + + const options = subscribers.map(subscriber => { + const fullName = `${subscriber.first_name} ${subscriber.last_name}`.trim(); + return { + label: `${fullName} (${subscriber.user})`, + value: subscriber.uuid, + }; + }); + + return { disabled: false, options }; + } catch (error) { + console.error('Error fetching subscribers:', error); + return { + disabled: true, + placeholder: 'Error loading subscribers', + options: [], + }; + } + }, + }); + +/** + * Reusable device dropdown property (single-select) + * Fetches registrations from /users/{user}/registration endpoint filtered by selected user + * Uses AOR (without "sip:" prefix) as both label and value + */ +export const deviceProp = () => + Property.Dropdown({ + displayName: 'Device', + description: 'Select the device to use for the call', + required: true, + auth: connectucAuth, + refreshers: ['user'], + options: async ({ auth, user }) => { + if (!auth) { + return { + disabled: true, + placeholder: 'Please connect your account first', + options: [], + }; + } + + if (!user) { + return { + disabled: true, + placeholder: 'Please select a user first', + options: [], + }; + } + + try { + const authValue = auth as OAuth2PropertyValue; + + interface Registration { + aor: string; + } + + const registrations = await connectucApiCall({ + accessToken: authValue.access_token, + endpoint: `/users/${user}/registration`, + method: HttpMethod.GET, + }); + + const options = registrations.map(reg => { + const aor = reg.aor.replace('sip:', ''); + return { label: aor, value: aor }; + }); + + return { disabled: false, options }; + } catch (error) { + console.error('Error fetching registrations:', error); + return { + disabled: true, + placeholder: 'Error loading devices', + options: [], + }; + } + }, + }); + +/** + * Reusable SMS number dropdown property (single-select) + * Fetches SMS numbers from /sms/numbers endpoint + * Uses number as both label and value + */ +export const smsNumberProp = () => + Property.Dropdown({ + displayName: 'SMS Sender', + description: 'Select the SMS sender number', + required: true, + auth: connectucAuth, + refreshers: [], + options: async ({ auth }) => { + if (!auth) { + return { + disabled: true, + placeholder: 'Please connect your account first', + options: [], + }; + } + + try { + const authValue = auth as OAuth2PropertyValue; + + interface SmsNumbersResponse { + numbers: { number: string }[]; + } + + const response = await connectucApiCall({ + accessToken: authValue.access_token, + endpoint: '/sms/numbers', + method: HttpMethod.GET, + }); + + const options = response.numbers.map(n => ({ + label: n.number, + value: n.number, + })); + + return { disabled: false, options }; + } catch (error) { + console.error('Error fetching SMS numbers:', error); + return { + disabled: true, + placeholder: 'Error loading SMS numbers', + options: [], + }; + } + }, + }); + +/** + * Reusable SMS recipients dropdown property (multi-select) + * Fetches SMS numbers from /sms/numbers endpoint + * Uses number as both label and value + */ +export const smsRecipientsProp = () => + Property.MultiSelectDropdown({ + displayName: 'Select Recipients', + description: 'Select the SMS recipient numbers to which this trigger applies', + required: false, + auth: connectucAuth, + refreshers: [], + options: async ({ auth }) => { + if (!auth) { + return { + disabled: true, + placeholder: 'Please connect your account first', + options: [], + }; + } + + try { + const authValue = auth as OAuth2PropertyValue; + + interface SmsNumbersResponse { + numbers: { number: string }[]; + } + + const response = await connectucApiCall({ + accessToken: authValue.access_token, + endpoint: '/sms/numbers', + method: HttpMethod.GET, + }); + + const options = response.numbers.map(n => ({ + label: n.number, + value: n.number, + })); + + return { disabled: false, options }; + } catch (error) { + console.error('Error fetching SMS numbers:', error); + return { + disabled: true, + placeholder: 'Error loading SMS numbers', + options: [], + }; + } + }, + }); + +/** + * Reusable users dropdown property (multi-select) + * Fetches subscribers from /activepieces/subscribers endpoint + * Includes "All Always" option with value "*" + */ +export const usersProp = () => + Property.MultiSelectDropdown({ + displayName: 'Users', + description: 'Select users to which this trigger applies', + required: false, + auth: connectucAuth, + refreshers: ['domain'], + options: async ({ auth, domain }) => { + if (!auth) { + return { + disabled: true, + placeholder: 'Please connect your account first', + options: [], + }; + } + + if (!domain) { + return { + disabled: true, + placeholder: 'Please select a domain first', + options: [], + }; + } + + try { + const authValue = auth as OAuth2PropertyValue; + + interface Subscriber { + first_name: string; + last_name: string; + user: string; + } + + const subscribers = await connectucApiCall({ + accessToken: authValue.access_token, + endpoint: '/activepieces/subscribers', + method: HttpMethod.GET, + queryParams: { domain: domain as string }, + }); + + const subscriberOptions = subscribers.map(subscriber => { + const fullName = `${subscriber.first_name} ${subscriber.last_name}`.trim(); + return { + label: `${fullName} (${subscriber.user})`, + value: subscriber.user, + }; + }); + + const options = subscribers.length > 1 + ? [{ label: 'All Always', value: '*' }, ...subscriberOptions] + : subscriberOptions; + + return { + disabled: false, + options, + }; + } catch (error) { + console.error('Error fetching subscribers:', error); + return { + disabled: true, + placeholder: 'Error loading subscribers', + options: [], + }; + } + }, + }); diff --git a/packages/pieces/community/connectuc/src/lib/common/webhook-helpers.ts b/packages/pieces/community/connectuc/src/lib/common/webhook-helpers.ts new file mode 100644 index 00000000000..9dc34810531 --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/common/webhook-helpers.ts @@ -0,0 +1,112 @@ +import { httpClient, HttpMethod, AuthenticationType } from '@activepieces/pieces-common'; +import { BaseContext, InputPropertyMap, PieceAuthProperty } from '@activepieces/pieces-framework'; +import { isNil } from '@activepieces/shared'; + +export const CONNECTUC_BASE_URL = 'https://api.connectuc.io/activepieces'; +export const CONNECTUC_WEBHOOK_TRIGGER_KEY = 'connectuc_webhook'; + +interface WebhookResponse { + id: string; +} + +interface RegisterWebhookParams< + PieceAuth extends PieceAuthProperty | PieceAuthProperty[] | undefined, + Props extends InputPropertyMap +> { + auth: { + access_token: string; + }; + webhookUrl: string; + event?: string; + events?: string[]; + context: BaseContext; +} + +interface UnregisterWebhookParams< + PieceAuth extends PieceAuthProperty | PieceAuthProperty[] | undefined, + Props extends InputPropertyMap +> { + auth: { + access_token: string; + }; + webhookUrl: string; + context: BaseContext; +} + +export async function registerConnectUCWebhook< + PieceAuth extends PieceAuthProperty | PieceAuthProperty[] | undefined, + Props extends InputPropertyMap +>(params: RegisterWebhookParams): Promise { + const { auth, webhookUrl, event, events, context } = params; + + try { + let projectExternalId; + try { + projectExternalId = await context.project.externalId(); + } catch { + projectExternalId = undefined; + } + + const webhookBody: Record = { + url: webhookUrl, + flowId: context.flows.current.id, + flowVersionId: context.flows.current.version.id, + stepName: context.step.name, + projectId: context.project.id, + projectExternalId, + data: context.propsValue, + }; + + if (event) { + webhookBody['event'] = event; + } + if (events) { + webhookBody['events'] = events; + } + + const response = await httpClient.sendRequest({ + method: HttpMethod.POST, + url: `${CONNECTUC_BASE_URL}/webhook`, + authentication: { + type: AuthenticationType.BEARER_TOKEN, + token: auth.access_token, + }, + body: webhookBody, + }); + + await context.store.put(CONNECTUC_WEBHOOK_TRIGGER_KEY, { + webhookId: response.body.id, + }); + + return response.body; + } catch (error) { + throw error; + } +} + +export async function unregisterConnectUCWebhook< + PieceAuth extends PieceAuthProperty | PieceAuthProperty[] | undefined, + Props extends InputPropertyMap +>(params: UnregisterWebhookParams): Promise { + const { auth, webhookUrl, context } = params; + const webhookData = await context.store.get<{ webhookId: string }>(CONNECTUC_WEBHOOK_TRIGGER_KEY); + + if (!isNil(webhookData) && !isNil(webhookData.webhookId)) { + try { + await httpClient.sendRequest({ + method: HttpMethod.DELETE, + url: `${CONNECTUC_BASE_URL}/webhook`, + authentication: { + type: AuthenticationType.BEARER_TOKEN, + token: auth.access_token, + }, + body: { + url: webhookUrl, + webhookId: webhookData.webhookId, + }, + }); + } catch { + // webhook may already be deleted + } + } +} diff --git a/packages/pieces/community/connectuc/src/lib/triggers/new-call-summary.ts b/packages/pieces/community/connectuc/src/lib/triggers/new-call-summary.ts new file mode 100644 index 00000000000..4b0d5a646a8 --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/triggers/new-call-summary.ts @@ -0,0 +1,47 @@ +import { createTrigger, TriggerStrategy, OAuth2PropertyValue } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { registerConnectUCWebhook, unregisterConnectUCWebhook } from '../common/webhook-helpers'; +import { domainProp, usersProp } from '../common/props'; + +export const newCallSummary = createTrigger({ + auth: connectucAuth, + name: 'newCallSummary', + displayName: 'New Call Summary', + description: 'Triggers when a new call summary is created', + props: { + domain: domainProp(), + users: usersProp(), + }, + sampleData: { + "callId":"233dbsj3mssskjkk22", + "summary":"test summary", + "date": "2026-04-01" + }, + type: TriggerStrategy.WEBHOOK, + async onEnable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await registerConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + event: 'NewCallSummary', + context, + }); + }, + async onDisable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await unregisterConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + context, + }); + }, + async run(context){ + return [context.payload.body] + } +}) diff --git a/packages/pieces/community/connectuc/src/lib/triggers/new-call-transcription.ts b/packages/pieces/community/connectuc/src/lib/triggers/new-call-transcription.ts new file mode 100644 index 00000000000..6848e208480 --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/triggers/new-call-transcription.ts @@ -0,0 +1,104 @@ +import { createTrigger, TriggerStrategy, OAuth2PropertyValue } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { registerConnectUCWebhook, unregisterConnectUCWebhook } from '../common/webhook-helpers'; +import { domainProp, usersProp } from '../common/props'; + +export const newCallTranscription = createTrigger({ + auth: connectucAuth, + name: 'newCallTranscription', + displayName: 'New Call Transcription', + description: 'Triggers when a new call transcription is created', + props: { + domain: domainProp(), + users: usersProp(), + }, + sampleData: { + id: "202602-760134328", + cdrId: "1771251913ef8e5c02029348132eb7853dd1b154aa", + callId: "l2e57o5cj0tlgpbrafjc", + orig_sub: "1004", + orig_domain: "example.11111.service", + term_sub: "1000", + term_domain: "example.11111.service", + comments: [ + { + speaker: "Karina Taylor", + comment: "1", + created: "2026-02-17T00:44:17.000000Z", + startTime: "00:00:00,39", + endTime: "00:00:01,44" + }, + { + speaker: "Sarojini Holmes", + comment: "Call is being recorded.", + created: "2026-02-17T00:44:17.000000Z", + startTime: "00:00:00,79", + endTime: "00:00:02,32" + }, + { + speaker: "Karina Taylor", + comment: "0", + created: "2026-02-17T00:44:17.000000Z", + startTime: "00:00:01,68", + endTime: "00:00:02,47" + }, + { + speaker: "Karina Taylor", + comment: "0 0", + created: "2026-02-17T00:44:17.000000Z", + startTime: "00:00:02,48", + endTime: "00:00:04,08" + }, + { + speaker: "Karina Taylor", + comment: "is unavailable.", + created: "2026-02-17T00:44:17.000000Z", + startTime: "00:00:04,08", + endTime: "00:00:05,20" + }, + { + speaker: "Karina Taylor", + comment: "To dial another extension,", + created: "2026-02-17T00:44:17.000000Z", + startTime: "00:00:06,08", + endTime: "00:00:08,16" + }, + { + speaker: "Karina Taylor", + comment: "press 1.", + created: "2026-02-17T00:44:17.000000Z", + startTime: "00:00:08,24", + endTime: "00:00:09,03" + } + ], + summary: null, + status: "finished" + }, + type: TriggerStrategy.WEBHOOK, + async onEnable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await registerConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + event: 'NewCallTranscription', + context, + }); + }, + async onDisable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await unregisterConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + context, + }); + }, + async run(context){ + return [context.payload.body] + } +}) diff --git a/packages/pieces/community/connectuc/src/lib/triggers/new-cdr.ts b/packages/pieces/community/connectuc/src/lib/triggers/new-cdr.ts new file mode 100644 index 00000000000..543fbe5be55 --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/triggers/new-cdr.ts @@ -0,0 +1,63 @@ +import { createTrigger, TriggerStrategy, OAuth2PropertyValue } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { registerConnectUCWebhook, unregisterConnectUCWebhook } from '../common/webhook-helpers'; +import { domainProp, usersProp } from '../common/props'; + +export const newCdr = createTrigger({ + auth: connectucAuth, + name: 'newCdr', + displayName: 'New CDR', + description: 'Triggers when a new Call Detail Record (CDR) is created', + props: { + domain: domainProp(), + users: usersProp(), + }, + sampleData: { + id: "17624380791dba1cd5a13fcb8b4c6b66094f564b0fa", + contactId: null, + dateTime: "2025-11-06T14:07:59.000Z", + duration: 8, + toLabel: "7862881234", + toNumber: "7862881235", + recordingId: null, + origCallid: "8c0spnvbfr8gltgqfseo", + termCallid: "20251106140759036333-7e6c15a2661d06d7d0281ecd502b4679", + missed: false, + direction: "outgoing", + fromLabel: "Example", + fromNumber: "17869811611", + voicemailId: null, + recordingType: null, + disposition: null, + reason: null, + onnet: 0, + uid: "3462@example.11111.service" + }, + type: TriggerStrategy.WEBHOOK, + async onEnable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await registerConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + event: 'CdrCreated', + context, + }); + }, + async onDisable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await unregisterConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + context, + }); + }, + async run(context){ + return [context.payload.body] + } +}) diff --git a/packages/pieces/community/connectuc/src/lib/triggers/new-incoming-call.ts b/packages/pieces/community/connectuc/src/lib/triggers/new-incoming-call.ts new file mode 100644 index 00000000000..828fe78ea3c --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/triggers/new-incoming-call.ts @@ -0,0 +1,67 @@ +import { createTrigger, TriggerStrategy, OAuth2PropertyValue, Property } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { registerConnectUCWebhook, unregisterConnectUCWebhook } from '../common/webhook-helpers'; +import { domainProp, usersProp } from '../common/props'; + +export const newIncomingCall = createTrigger({ + auth: connectucAuth, + name: 'newIncomingCall', + displayName: 'New Incoming Call', + description: 'Triggers when a new incoming call is received', + props: { + domain: domainProp(), + users: usersProp(), + status: Property.StaticDropdown({ + displayName: 'Status', + description: 'Filter by call status', + required: false, + options: { + options: [ + { label: 'Ringing', value: 'ringing' }, + { label: 'Answered', value: 'answered' }, + { label: 'Both', value: 'both' }, + ], + }, + defaultValue: 'answered', + }), + }, + sampleData: { + orig_callid: "cb4542b9-34ff-123f-9192-005056842248", + term_callid: "20251105153444016549-7e6c15a2661d06d7d0281ecd502b4679", + caller_id: "17862881234", + caller_name: "+17862881234", + time_start: "2025-11-05T15:34:44.000Z", + to: "3462", + status: "ringing", + term_to_uri: "sip:3462w@example.11111.service", + from: "17862911234", + device: "3462w" + }, + type: TriggerStrategy.WEBHOOK, + async onEnable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await registerConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + event: 'NewIncomingCall', + context, + }); + }, + async onDisable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await unregisterConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + context, + }); + }, + async run(context){ + return [context.payload.body] + } +}) diff --git a/packages/pieces/community/connectuc/src/lib/triggers/new-outgoing-call.ts b/packages/pieces/community/connectuc/src/lib/triggers/new-outgoing-call.ts new file mode 100644 index 00000000000..0751608756c --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/triggers/new-outgoing-call.ts @@ -0,0 +1,54 @@ +import { createTrigger, TriggerStrategy, OAuth2PropertyValue } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { registerConnectUCWebhook, unregisterConnectUCWebhook } from '../common/webhook-helpers'; +import { domainProp, usersProp } from '../common/props'; + +export const newOutgoingCall = createTrigger({ + auth: connectucAuth, + name: 'newOutgoingCall', + displayName: 'New Outgoing Call', + description: 'Triggers when a new outgoing call is initiated', + props: { + domain: domainProp(), + users: usersProp(), + }, + sampleData: { + orig_callid: "cb4542b9-34ff-123f-9192-005056842248", + term_callid: "20251105153444016549-7e6c15a2661d06d7d0281ecd502b4679", + caller_id: "3462", + caller_name: "John Doe", + time_start: "2025-11-05T15:34:44.000Z", + to: "17862881234", + direction: "outgoing", + term_to_uri: "sip:17862811234@example.11111.service", + from: "3462", + device: "3462w" + }, + type: TriggerStrategy.WEBHOOK, + async onEnable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await registerConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + event: 'NewOutgoingCall', + context, + }); + }, + async onDisable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await unregisterConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + context, + }); + }, + async run(context){ + return [context.payload.body] + } +}) diff --git a/packages/pieces/community/connectuc/src/lib/triggers/new-recording.ts b/packages/pieces/community/connectuc/src/lib/triggers/new-recording.ts new file mode 100644 index 00000000000..23f8943ea7b --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/triggers/new-recording.ts @@ -0,0 +1,54 @@ +import { createTrigger, TriggerStrategy, OAuth2PropertyValue } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { registerConnectUCWebhook, unregisterConnectUCWebhook } from '../common/webhook-helpers'; +import { domainProp, usersProp } from '../common/props'; + +export const newRecording = createTrigger({ + auth: connectucAuth, + name: 'newRecording', + displayName: 'New Recording', + description: 'Triggers when a new call recording is available', + props: { + domain: domainProp(), + users: usersProp(), + }, + sampleData: { + dateTime: '2025-11-06T14:07:59.000Z', + duration: '20', + unread: true, + mediaUrl: 'https://api.example.com/users/36090f86-ea3d-566e-b97d-6a68999d416a/recordings/eyJ0ZXJtQ2FsbGlkIjoiOGMwc3BudmJmcjhnbHRncWZzZW8iLCJvcmlnQ2FsbGlkIjoiOGMwc3BudmJmcjhnbHRncWZzZW8ifQ==/url', + origCallid: '8c0spnvbfr8gltgqfseo', + recordingId: 'eyJ0ZXJtQ2FsbGlkIjoiOGMwc3BudmJmcjhnbHRncWZzZW8iLCJvcmlnQ2FsbGlkIjoiOGMwc3BudmJmcjhnbHRncWZzZW8ifQ==', + recordingType: 'audio', + download_url: 'https://api.example.com/users/36090f86-ea3d-566e-b97d-6a68999d416a/recordings/eyJ0ZXJtQ2FsbGlkIjoiOGMwc3BudmJmcjhnbHRncWZzZW8iLCJvcmlnQ2FsbGlkIjoiOGMwc3BudmJmcjhnbHRncWZzZW8ifQ==/url', + domain: 'test.11111.service', + user: '101', + }, + type: TriggerStrategy.WEBHOOK, + async onEnable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await registerConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + event: 'RecordingCreated', + context, + }); + }, + async onDisable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await unregisterConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + context, + }); + }, + async run(context){ + return [context.payload.body] + } +}) diff --git a/packages/pieces/community/connectuc/src/lib/triggers/new-sms.ts b/packages/pieces/community/connectuc/src/lib/triggers/new-sms.ts new file mode 100644 index 00000000000..b60e0b984be --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/triggers/new-sms.ts @@ -0,0 +1,62 @@ +import { createTrigger, TriggerStrategy, OAuth2PropertyValue } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { registerConnectUCWebhook, unregisterConnectUCWebhook } from '../common/webhook-helpers'; +import { domainProp, usersProp, smsRecipientsProp } from '../common/props'; + +export const newSms = createTrigger({ + auth: connectucAuth, + name: 'newSms', + displayName: 'New SMS', + description: 'Triggers when a new SMS message is received', + props: { + domain: domainProp(), + users: usersProp(), + recipients: smsRecipientsProp(), + }, + sampleData: { + conversationId: 1234567, + messageId: "i123456789", + referenceId: "01ABC123DEF456GHI789JKL012", + type: "message", + direction: "incoming", + sender: "11234567890", + recipients: ["11234567890"], + content: "Test message from webhook", + contentType: "text/plain", + disposition: "", + options: "", + createdTimestamp: "2025-12-04T14:48:55.000Z", + updatedTimestamp: "", + sentTimestamp: "", + deliveredTimestamp: "", + readTimestamp: "", + media: [] + }, + type: TriggerStrategy.WEBHOOK, + async onEnable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await registerConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + event: 'SMSMessageReceived', + context, + }); + }, + async onDisable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await unregisterConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + context, + }); + }, + async run(context){ + return [context.payload.body] + } +}) diff --git a/packages/pieces/community/connectuc/src/lib/triggers/new-voicemail.ts b/packages/pieces/community/connectuc/src/lib/triggers/new-voicemail.ts new file mode 100644 index 00000000000..de0b2ce470c --- /dev/null +++ b/packages/pieces/community/connectuc/src/lib/triggers/new-voicemail.ts @@ -0,0 +1,57 @@ +import { createTrigger, TriggerStrategy, OAuth2PropertyValue } from '@activepieces/pieces-framework'; +import { connectucAuth } from '../../index'; +import { registerConnectUCWebhook, unregisterConnectUCWebhook } from '../common/webhook-helpers'; +import { domainProp, usersProp } from '../common/props'; + +export const newVoicemail = createTrigger({ + auth: connectucAuth, + name: 'newVoicemail', + displayName: 'New Voicemail', + description: 'Triggers when a new voicemail is received', + props: { + domain: domainProp(), + users: usersProp(), + }, + sampleData: { + id: "vm-20251106141508017931-7e6c15a2661d06d7d0281ecd502b4679", + contactId: null, + dateTime: "2025-11-06T14:15:37.000000Z", + duration: 6, + label: "(786) 288-1234", + tel: "17862881234", + read: false, + transcription: null, + filename: "vm-20251106141508017931-7e6c15a2661d06d7d0281ecd502b4679.wav", + forwarded: null, + type: "vmail/new", + shared_uuid: null, + remotepath: "https://www.test.com" + }, + type: TriggerStrategy.WEBHOOK, + async onEnable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await registerConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + event: 'NewVoicemail', + context, + }); + }, + async onDisable(context){ + const auth = context.auth as OAuth2PropertyValue; + + await unregisterConnectUCWebhook({ + auth: { + access_token: auth.access_token, + }, + webhookUrl: context.webhookUrl, + context, + }); + }, + async run(context){ + return [context.payload.body] + } +}) diff --git a/packages/pieces/community/connectuc/tsconfig.json b/packages/pieces/community/connectuc/tsconfig.json new file mode 100644 index 00000000000..eff240ac143 --- /dev/null +++ b/packages/pieces/community/connectuc/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "../../../../tsconfig.base.json", + "compilerOptions": { + "module": "commonjs", + "forceConsistentCasingInFileNames": true, + "strict": true, + "importHelpers": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true, + "noPropertyAccessFromIndexSignature": true + }, + "files": [], + "include": [], + "references": [ + { + "path": "./tsconfig.lib.json" + } + ] +} diff --git a/packages/pieces/community/connectuc/tsconfig.lib.json b/packages/pieces/community/connectuc/tsconfig.lib.json new file mode 100644 index 00000000000..0ba4caeb858 --- /dev/null +++ b/packages/pieces/community/connectuc/tsconfig.lib.json @@ -0,0 +1,15 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "module": "commonjs", + "rootDir": ".", + "baseUrl": ".", + "paths": {}, + "outDir": "./dist", + "declaration": true, + "declarationMap": true, + "types": ["node"] + }, + "exclude": ["jest.config.ts", "src/**/*.spec.ts", "src/**/*.test.ts"], + "include": ["src/**/*.ts"] +} diff --git a/tsconfig.base.json b/tsconfig.base.json index 36f68711ac0..f99b7a642e3 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -1979,6 +1979,9 @@ ], "@activepieces/piece-proxycurl": [ "packages/pieces/community/proxycurl/src/index.ts" + ], + "@activepieces/piece-connectuc": [ + "packages/pieces/community/connectuc/src/index.ts" ] }, "resolveJsonModule": true From 3d4980c120146f955635f6d70ba2e82349b20a20 Mon Sep 17 00:00:00 2001 From: Bertrand <99965015+bertrandong@users.noreply.github.com> Date: Thu, 7 May 2026 15:23:28 +0800 Subject: [PATCH 2/7] fix(google-drive): fix file extension handling in Google Drive `list-files` action (#13084) Co-authored-by: sanket@activepieces.com --- .../community/google-drive/package.json | 2 +- .../src/lib/action/list-files.action.ts | 22 ++++++++++++++++++- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/packages/pieces/community/google-drive/package.json b/packages/pieces/community/google-drive/package.json index 960af98600f..a107a1dd153 100644 --- a/packages/pieces/community/google-drive/package.json +++ b/packages/pieces/community/google-drive/package.json @@ -1,6 +1,6 @@ { "name": "@activepieces/piece-google-drive", - "version": "0.7.4", + "version": "0.7.5", "main": "./dist/src/index.js", "types": "./dist/src/index.d.ts", "dependencies": { diff --git a/packages/pieces/community/google-drive/src/lib/action/list-files.action.ts b/packages/pieces/community/google-drive/src/lib/action/list-files.action.ts index 58f21a426e1..0bf68f33f83 100644 --- a/packages/pieces/community/google-drive/src/lib/action/list-files.action.ts +++ b/packages/pieces/community/google-drive/src/lib/action/list-files.action.ts @@ -162,19 +162,39 @@ export const googleDriveListFiles = createAction({ // If downloadFiles is enabled, download each file and add URLs to array if (context.propsValue.downloadFiles) { const downloadedFiles: string[] = []; + const extensionMap: Record = { + 'application/pdf': '.pdf', + 'image/jpeg': '.jpg', + 'image/png': '.png', + 'image/tiff': '.tiff', + 'text/plain': '.txt', + 'text/csv': '.csv', + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': '.docx', + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': '.xlsx' + }; + for (const fileWithLevel of filesWithLevel) { const file = fileWithLevel.file; // Skip folders when downloading if (file.mimeType === 'application/vnd.google-apps.folder') { continue; } + + let safeName = file.name; + const correctExtension = extensionMap[file.mimeType]; + if (correctExtension && !safeName.toLowerCase().endsWith(correctExtension)) { + // Check for the .jpeg edge case before appending .jpg + if (!(file.mimeType === 'image/jpeg' && safeName.toLowerCase().endsWith('.jpeg'))) { + safeName = safeName + correctExtension; + } + } try { const fileUrl = await downloadFileFromDrive( context.auth, context.files, file.id, - file.name + safeName ); downloadedFiles.push(fileUrl); } catch (error) { From f58c816d26594f155d6a7568997de79214c12d9e Mon Sep 17 00:00:00 2001 From: Mo AbuAboud Date: Thu, 7 May 2026 09:44:21 +0200 Subject: [PATCH 3/7] fix(worker): union sandbox resolv.conf nameservers into iptables egress allowlist (#13132) --- .../server/worker/src/lib/egress/lifecycle.ts | 50 +++- packages/server/worker/test/e2e/README.md | 10 + .../worker/test/e2e/fixtures/egress-probe.js | 117 +++++++- .../e2e/helpers/outbound-internet-guard.ts | 25 ++ .../e2e/sandbox-dns-eai-again.e2e.test.ts | 222 ++++++++++++++++ .../e2e/sandbox-real-third-party.e2e.test.ts | 251 ++++++++++++++++++ 6 files changed, 668 insertions(+), 7 deletions(-) create mode 100644 packages/server/worker/test/e2e/helpers/outbound-internet-guard.ts create mode 100644 packages/server/worker/test/e2e/sandbox-dns-eai-again.e2e.test.ts create mode 100644 packages/server/worker/test/e2e/sandbox-real-third-party.e2e.test.ts diff --git a/packages/server/worker/src/lib/egress/lifecycle.ts b/packages/server/worker/src/lib/egress/lifecycle.ts index 4dca9eed1a1..7386d6ee5cd 100644 --- a/packages/server/worker/src/lib/egress/lifecycle.ts +++ b/packages/server/worker/src/lib/egress/lifecycle.ts @@ -1,6 +1,8 @@ import dnsSync from 'node:dns' import dns from 'node:dns/promises' +import { readFile } from 'node:fs/promises' import net from 'node:net' +import path from 'node:path' import { ActivepiecesError, ErrorCode, ExecutionMode, NetworkMode, tryCatch, WorkerSettingsResponse } from '@activepieces/shared' import { Logger } from 'pino' import { workerSettings } from '../config/worker-settings' @@ -55,9 +57,23 @@ async function maybeApplyIptablesLockdown({ log, proxy, settings }: ApplyLockdow }, }) } - const nameservers = listDnsNameservers() + const hostNameservers = listDnsNameservers() + const { nameservers: sandboxNameservers, readError: sandboxResolvConfReadError } = await readSandboxResolvConfNameservers() + if (sandboxResolvConfReadError !== undefined) { + log.warn( + { sandboxResolvConf: SANDBOX_RESOLV_CONF_PATH, err: sandboxResolvConfReadError }, + 'Could not read sandbox resolv.conf — DNS allowlist will only include host nameservers, which may not match what the sandbox queries. This is the exact condition that caused the 2026-05-06 EAI_AGAIN outage.', + ) + } + else if (sandboxNameservers.length === 0) { + log.warn( + { sandboxResolvConf: SANDBOX_RESOLV_CONF_PATH }, + 'Sandbox resolv.conf was readable but contained no nameserver lines — DNS allowlist will only include host nameservers.', + ) + } + const nameservers = [...new Set([...hostNameservers, ...sandboxNameservers])] if (nameservers.length === 0) { - const message = 'No DNS nameservers configured on the worker host — refusing to apply kernel lockdown that would starve the sandbox of name resolution. ' + + const message = 'No DNS nameservers configured on the worker host or sandbox resolv.conf — refusing to apply kernel lockdown that would starve the sandbox of name resolution. ' + 'Ensure /etc/resolv.conf has at least one valid nameserver, or inspect dns.getServers() output.' throw new ActivepiecesError( { code: ErrorCode.ENGINE_OPERATION_FAILURE, params: { message } }, @@ -103,6 +119,29 @@ function listDnsNameservers(): string[] { return dnsSync.getServers().map(extractNameserverIp).filter((ip): ip is string => ip !== null) } +async function listSandboxResolvConfNameservers(): Promise { + const { nameservers } = await readSandboxResolvConfNameservers() + return nameservers +} + +async function readSandboxResolvConfNameservers(): Promise<{ nameservers: string[], readError?: Error }> { + const { data, error } = await tryCatch(() => readFile(SANDBOX_RESOLV_CONF_PATH, 'utf8')) + if (error !== null) return { nameservers: [], readError: error } + return { nameservers: parseResolvConfNameservers(data) } +} + +function parseResolvConfNameservers(body: string): string[] { + const ips: string[] = [] + for (const rawLine of body.split('\n')) { + const line = rawLine.replace(/#.*$/, '').trim() + const match = line.match(/^nameserver\s+(\S+)$/i) + if (!match) continue + const ip = extractNameserverIp(match[1]) + if (ip) ips.push(ip) + } + return ips +} + function extractNameserverIp(server: string): string | null { if (net.isIP(server) > 0) return server const bracketed = server.match(/^\[([^\]]+)\](?::\d+)?$/) @@ -147,3 +186,10 @@ export type EgressStack = { proxyPort: number | null shutdown: () => Promise } + +export const egressInternals = { + listSandboxResolvConfNameservers, + parseResolvConfNameservers, +} + +const SANDBOX_RESOLV_CONF_PATH = path.resolve(process.cwd(), 'packages/server/api/src/assets/etc/resolv.conf') diff --git a/packages/server/worker/test/e2e/README.md b/packages/server/worker/test/e2e/README.md index f0f544383e0..d2b43b7e9a8 100644 --- a/packages/server/worker/test/e2e/README.md +++ b/packages/server/worker/test/e2e/README.md @@ -20,6 +20,16 @@ This builds a privileged container and runs the vitest suite inside it. See `Doc If the suite is invoked directly on a host that lacks the required primitives it will skip with a clear message — it does not silently pass. +## Real third-party connectivity smoke + +`sandbox-real-third-party.e2e.test.ts` brings up the same SANDBOX_PROCESS + STRICT stack used in production and reaches out to a curated list of public APIs (~30 hosts: OpenAI, Anthropic, Stripe, GitHub, Notion, etc.). It asserts: + +- DNS resolves (no `EAI_AGAIN` — the production-outage signature). +- HTTPS via CONNECT through the egress proxy reaches the real origin (any HTTP status is accepted; we only assert the connection landed). +- AWS/GCP metadata endpoints and RFC1918 / loopback IPs remain blocked. + +Tolerance: requires ≥80% of Group A hosts to succeed (one transient vendor outage shouldn't fail CI; a systemic break in DNS/proxy/iptables takes the whole list down). Auto-skips when no outbound internet (TCP/443 to `1.1.1.1` is unreachable). + ## Why these exist alongside the unit tests The unit tests under `packages/server/worker/test/lib/egress/` and `packages/server/engine/test/ssrf/` mock `execFile`, `spawn`, and kernel state. They prove the logic branches, not that the kernel is actually enforcing anything. These e2e tests close that gap. diff --git a/packages/server/worker/test/e2e/fixtures/egress-probe.js b/packages/server/worker/test/e2e/fixtures/egress-probe.js index 262252a23a8..1309ea5440b 100644 --- a/packages/server/worker/test/e2e/fixtures/egress-probe.js +++ b/packages/server/worker/test/e2e/fixtures/egress-probe.js @@ -1,10 +1,13 @@ -/* eslint-disable @typescript-eslint/no-require-imports */ +/* eslint-disable @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires */ // Minimal probe that runs inside an isolate sandbox. // Reads AP_PROBE_PLAN from env (JSON array of actions) and writes a JSON summary to stdout. // Uses only Node core modules so it does not need any mounted node_modules. +const dns = require('node:dns') const http = require('node:http') +const https = require('node:https') const net = require('node:net') +const tls = require('node:tls') const { URL } = require('node:url') async function main() { @@ -28,6 +31,15 @@ async function main() { else if (action.type === 'direct-http') { results.push(await directHttpGet(action)) } + else if (action.type === 'dns-lookup') { + results.push(await dnsLookup(action)) + } + else if (action.type === 'dns-lookup-v6') { + results.push(await dnsLookupV6(action)) + } + else if (action.type === 'https-head-via-proxy') { + results.push(await httpsHeadViaProxy(action)) + } else { results.push({ action, error: 'unknown action type' }) } @@ -58,7 +70,9 @@ function httpGetViaProxy(action) { })) }) req.on('error', (err) => resolve({ action, error: err.code || err.message })) - req.setTimeout(5000, () => { req.destroy(new Error('timeout')) }) + req.setTimeout(5000, () => { + req.destroy(new Error('timeout')) + }) req.end() }) } @@ -76,7 +90,9 @@ function directHttpGet(action) { res.on('end', () => resolve({ action, statusCode: res.statusCode })) }) req.on('error', (err) => resolve({ action, error: err.code || err.message })) - req.setTimeout(5000, () => { req.destroy(new Error('timeout')) }) + req.setTimeout(5000, () => { + req.destroy(new Error('timeout')) + }) req.end() }) } @@ -85,12 +101,103 @@ function directTcpConnect(action) { return new Promise((resolve) => { const socket = net.createConnection({ host: action.host, port: action.port }) socket.setTimeout(5000) - socket.on('connect', () => { socket.destroy(); resolve({ action, status: 'OK' }) }) - socket.on('timeout', () => { socket.destroy(); resolve({ action, status: 'TIMEOUT' }) }) + socket.on('connect', () => { + socket.destroy(); resolve({ action, status: 'OK' }) + }) + socket.on('timeout', () => { + socket.destroy(); resolve({ action, status: 'TIMEOUT' }) + }) socket.on('error', (err) => resolve({ action, status: 'ERR', code: err.code || err.message })) }) } +function dnsLookup(action) { + return new Promise((resolve) => { + const start = Date.now() + dns.lookup(action.hostname, { family: 4 }, (err, address) => { + const elapsedMs = Date.now() - start + if (err) { + resolve({ action, status: 'ERR', code: err.code, syscall: err.syscall, errno: err.errno, elapsedMs, message: err.message }) + return + } + resolve({ action, status: 'OK', address, elapsedMs }) + }) + }) +} + +function dnsLookupV6(action) { + return new Promise((resolve) => { + const start = Date.now() + dns.lookup(action.hostname, { family: 6 }, (err, address) => { + const elapsedMs = Date.now() - start + if (err) { + resolve({ action, status: 'ERR', code: err.code, syscall: err.syscall, errno: err.errno, elapsedMs, message: err.message }) + return + } + resolve({ action, status: 'OK', address, elapsedMs }) + }) + }) +} + +function httpsHeadViaProxy(action) { + return new Promise((resolve) => { + const start = Date.now() + const proxyUrl = new URL(process.env.AP_EGRESS_PROXY_URL || '') + const target = new URL(action.url) + const targetHost = target.hostname + const targetPort = target.port ? parseInt(target.port, 10) : 443 + const settle = (payload) => resolve({ ...payload, action, elapsedMs: Date.now() - start }) + const connectReq = http.request({ + host: proxyUrl.hostname, + port: parseInt(proxyUrl.port, 10), + method: 'CONNECT', + path: `${targetHost}:${targetPort}`, + headers: { host: `${targetHost}:${targetPort}` }, + }) + connectReq.setTimeout(action.timeoutMs || 10000, () => { + connectReq.destroy(new Error('connect-timeout')) + settle({ status: 'ERR', code: 'CONNECT_TIMEOUT' }) + }) + connectReq.on('error', (err) => settle({ status: 'ERR', code: err.code || 'ERR', message: err.message })) + connectReq.on('connect', (res, socket) => { + if (res.statusCode !== 200) { + socket.destroy() + settle({ status: 'ERR', code: 'PROXY_REJECT', proxyStatus: res.statusCode }) + return + } + const tlsSocket = tls.connect({ + socket, + servername: targetHost, + ALPNProtocols: ['http/1.1'], + }) + tlsSocket.setTimeout(action.timeoutMs || 10000, () => { + tlsSocket.destroy(new Error('tls-timeout')) + settle({ status: 'ERR', code: 'TLS_TIMEOUT' }) + }) + tlsSocket.on('error', (err) => settle({ status: 'ERR', code: err.code || 'TLS_ERR', message: err.message })) + tlsSocket.on('secureConnect', () => { + const httpReq = https.request({ + createConnection: () => tlsSocket, + method: 'HEAD', + host: targetHost, + port: targetPort, + path: target.pathname || '/', + headers: { host: targetHost, 'user-agent': 'activepieces-sandbox-smoke/1.0' }, + }, (httpRes) => { + httpRes.resume() + httpRes.on('end', () => { + tlsSocket.destroy() + settle({ status: 'OK', statusCode: httpRes.statusCode }) + }) + }) + httpReq.on('error', (err) => settle({ status: 'ERR', code: err.code || 'HTTP_ERR', message: err.message })) + httpReq.end() + }) + }) + connectReq.end() + }) +} + main().catch((err) => { process.stdout.write(JSON.stringify({ fatal: String(err && err.message || err) })) process.exit(1) diff --git a/packages/server/worker/test/e2e/helpers/outbound-internet-guard.ts b/packages/server/worker/test/e2e/helpers/outbound-internet-guard.ts new file mode 100644 index 00000000000..f8df5a1a652 --- /dev/null +++ b/packages/server/worker/test/e2e/helpers/outbound-internet-guard.ts @@ -0,0 +1,25 @@ +import net from 'node:net' +import { SkipReason } from './privilege-guard' + +export async function requireOutboundInternet(): Promise { + const reachable = await canConnect({ host: '1.1.1.1', port: 443, timeoutMs: 3_000 }) + if (!reachable) { + return { skip: true, reason: 'requires outbound internet (TCP/443 to 1.1.1.1) — skipping real third-party connectivity suite' } + } + return undefined +} + +function canConnect({ host, port, timeoutMs }: { host: string, port: number, timeoutMs: number }): Promise { + return new Promise((resolve) => { + const socket = net.createConnection({ host, port }) + const finish = (ok: boolean): void => { + socket.removeAllListeners() + socket.destroy() + resolve(ok) + } + socket.setTimeout(timeoutMs) + socket.once('connect', () => finish(true)) + socket.once('timeout', () => finish(false)) + socket.once('error', () => finish(false)) + }) +} diff --git a/packages/server/worker/test/e2e/sandbox-dns-eai-again.e2e.test.ts b/packages/server/worker/test/e2e/sandbox-dns-eai-again.e2e.test.ts new file mode 100644 index 00000000000..9f7e9e7ab7e --- /dev/null +++ b/packages/server/worker/test/e2e/sandbox-dns-eai-again.e2e.test.ts @@ -0,0 +1,222 @@ +import { chmod, copyFile, mkdtemp, readFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import path from 'node:path' +import { afterAll, afterEach, beforeAll, describe, expect, it } from 'vitest' +import { egressInternals } from '../../src/lib/egress/lifecycle' +import { iptablesLockdown, IptablesLockdown } from '../../src/lib/egress/iptables-lockdown' +import { EgressProxy, startEgressProxy } from '../../src/lib/egress/proxy' +import { sandboxCapacity } from '../../src/lib/sandbox/capacity' +import { getIsolateExecutableName, isolateProcess } from '../../src/lib/sandbox/isolate' +import { requireIsolateBinary, requireLinuxPrivileged } from './helpers/privilege-guard' +import { silentLogger } from './helpers/silent-logger' + +/** + * Reproduces the production failure: + * APIConnectionError: Connection error. + * cause: FetchError ... getaddrinfo EAI_AGAIN api.openai.com + * + * Root cause: + * 1. The sandbox /etc/resolv.conf shipped with Activepieces is hardcoded to + * Google DNS (8.8.8.8, 8.8.4.4) — see packages/server/api/src/assets/etc/resolv.conf. + * 2. The kernel egress lockdown (lifecycle.ts -> iptables-lockdown.ts) builds + * its DNS allowlist from the *worker host's* dns.getServers() — typically + * 127.0.0.53 (systemd-resolved) or a VPC resolver, NEVER 8.8.8.8. + * 3. With AP_EXECUTION_MODE=SANDBOX_PROCESS + AP_NETWORK_MODE=STRICT, iptables + * REJECTs all egress except DNS to that host allowlist + the proxy/WS RPC ports. + * 4. So when a piece calls dns.lookup('api.openai.com') from inside the sandbox, + * the libc resolver sends UDP/53 to 8.8.8.8 -> kernel REJECTs -> + * glibc retries -> getaddrinfo returns EAI_AGAIN. + */ + +const BOX_ID = 0 +const SANDBOX_UID = sandboxCapacity.firstBoxUid + BOX_ID +const ISOLATE_BINARY_PATH = path.resolve(process.cwd(), 'packages/server/api/src/assets', getIsolateExecutableName()) +const SANDBOX_RESOLV_CONF = path.resolve(process.cwd(), 'packages/server/api/src/assets/etc/resolv.conf') + +const skip = requireLinuxPrivileged() ?? requireIsolateBinary(ISOLATE_BINARY_PATH) + +describe.skipIf(skip)('sandbox DNS — reproduces EAI_AGAIN under SANDBOX_PROCESS + STRICT', () => { + let proxy: EgressProxy + let lockdown: IptablesLockdown | null = null + let commonDir: string + + beforeAll(async () => { + const resolvBody = await readFile(SANDBOX_RESOLV_CONF, 'utf8') + if (!/8\.8\.8\.8/.test(resolvBody)) { + throw new Error( + `precondition failed: ${SANDBOX_RESOLV_CONF} no longer pins 8.8.8.8 — ` + + 'this test reproduces the mismatch between sandbox resolv.conf and the iptables DNS allowlist; ' + + 'update the test (or remove it) if the resolv.conf shipped to the sandbox has changed.', + ) + } + + proxy = await startEgressProxy({ + log: silentLogger(), + allowList: ['127.0.0.1'], + }) + + commonDir = await mkdtemp(path.join(tmpdir(), 'ap-sandbox-dns-eai-')) + const probeDst = path.join(commonDir, 'egress-probe.js') + await copyFile(path.resolve(__dirname, 'fixtures/egress-probe.js'), probeDst) + await chmod(commonDir, 0o755) + await chmod(probeDst, 0o644) + }) + + afterAll(async () => { + await proxy?.close() + }) + + afterEach(async () => { + if (lockdown) { + await lockdown.remove() + lockdown = null + } + }) + + it('repro: sandbox resolv.conf points at 8.8.8.8 but allowlist only includes a different host nameserver — DNS to public domain returns EAI_AGAIN', async () => { + // Mirrors the production case where the worker host runs systemd-resolved (127.0.0.53) + // or a VPC resolver — anything that is NOT 8.8.8.8. We pick a routable-but-fake IP + // so iptables installs a DNS allow rule (lifecycle.ts refuses to start with []), but + // the rule never matches the sandbox's actual DNS traffic to 8.8.8.8. + lockdown = await iptablesLockdown.apply({ + log: silentLogger(), + proxyPort: proxy.port, + firstBoxUid: SANDBOX_UID, + numBoxes: 1, + nameservers: ['10.123.123.123'], + }) + + const result = await runProbeInSandbox({ + commonDir, + plan: [ + { type: 'dns-lookup', hostname: 'api.openai.com' }, + ], + proxyPort: proxy.port, + }) + + expect(result.results).toHaveLength(1) + const lookup = result.results[0] + expect(lookup, 'expected dns.lookup to fail').toMatchObject({ status: 'ERR', syscall: 'getaddrinfo' }) + // EAI_AGAIN is the canonical "DNS temporary failure" libc returns when the + // resolver cannot reach its nameservers (REJECT/DROP at the kernel layer). + // Some glibc versions surface ENOTFOUND when REJECT replies fast — accept either, + // but assert it is one of the resolver-side failure codes (NOT a successful lookup). + expect(['EAI_AGAIN', 'ENOTFOUND']).toContain(lookup.code) + }, 30_000) + + it('fix: lifecycle unions sandbox resolv.conf nameservers into the iptables allowlist, so the sandbox can resolve', async () => { + // This test reproduces the production wiring: the host has its own DNS servers + // (here we simulate that with a fake IP) and the sandbox resolv.conf has its + // own (8.8.8.8 / 8.8.4.4). The fix in `lifecycle.ts` unions the two before + // applying iptables, so DNS packets the sandbox sends to 8.8.8.8 are allowed. + const sandboxNameservers = await egressInternals.listSandboxResolvConfNameservers() + expect(sandboxNameservers).toContain('8.8.8.8') + + const fakeHostNameservers = ['10.123.123.123'] + const unionedAllowList = [...new Set([...fakeHostNameservers, ...sandboxNameservers])] + + lockdown = await iptablesLockdown.apply({ + log: silentLogger(), + proxyPort: proxy.port, + firstBoxUid: SANDBOX_UID, + numBoxes: 1, + nameservers: unionedAllowList, + }) + + const result = await runProbeInSandbox({ + commonDir, + plan: [ + { type: 'dns-lookup', hostname: 'api.openai.com' }, + ], + proxyPort: proxy.port, + }) + + const lookup = result.results[0] + // We accept "OK" (best case) or any error that is NOT EAI_AGAIN. The point + // of the assertion is that the resolver can now reach 8.8.8.8 — DNS no + // longer fails with the production EAI_AGAIN signature. + if (lookup.status === 'ERR') { + expect(lookup.code).not.toBe('EAI_AGAIN') + } + else { + expect(lookup).toMatchObject({ status: 'OK' }) + } + }, 30_000) +}) + +describe('lifecycle resolv.conf parsing (unit)', () => { + it('parses RFC-style resolv.conf nameserver lines, ignoring comments and non-IP entries', () => { + const body = [ + '# managed by foo', + 'nameserver 8.8.8.8', + 'nameserver 8.8.4.4 # primary', + 'nameserver not-an-ip', + 'options edns0', + '', + 'NAMESERVER 1.1.1.1', + ].join('\n') + const ips = egressInternals.parseResolvConfNameservers(body) + expect(ips).toEqual(['8.8.8.8', '8.8.4.4', '1.1.1.1']) + }) + + it('reads the shipped sandbox resolv.conf and returns its nameservers', async () => { + const ips = await egressInternals.listSandboxResolvConfNameservers() + expect(ips).toContain('8.8.8.8') + }) +}) + +async function runProbeInSandbox({ commonDir, plan, proxyPort }: { + commonDir: string + plan: unknown[] + proxyPort: number +}): Promise<{ results: Array }> { + const logger = silentLogger() + const maker = isolateProcess(logger, path.join(commonDir, 'egress-probe.js'), commonDir, BOX_ID) + + const proxyUrl = `http://127.0.0.1:${proxyPort}` + const child = await maker.create({ + sandboxId: 'e2e-dns-eai', + command: [], + mounts: [ + { hostPath: commonDir, sandboxPath: '/root/common' }, + ], + env: { + HOME: '/tmp/', + NODE_PATH: '/usr/src/node_modules', + AP_EXECUTION_MODE: 'SANDBOX_PROCESS', + AP_SANDBOX_WS_PORT: '0', + AP_BASE_CODE_DIRECTORY: '/root/codes', + SANDBOX_ID: 'e2e-dns-eai', + AP_NETWORK_MODE: 'STRICT', + AP_EGRESS_PROXY_URL: proxyUrl, + AP_PROBE_PLAN: JSON.stringify(plan), + }, + resourceLimits: { memoryLimitMb: 256, cpuMsPerSec: 1000, timeLimitSeconds: 30 }, + }) + + const stdoutChunks: Buffer[] = [] + const stderrChunks: Buffer[] = [] + child.stdout?.removeAllListeners('data') + child.stderr?.removeAllListeners('data') + child.stdout?.on('data', (d: Buffer) => stdoutChunks.push(d)) + child.stderr?.on('data', (d: Buffer) => stderrChunks.push(d)) + + const exitCode = await new Promise((resolve) => child.on('close', (code) => resolve(code))) + + const out = Buffer.concat(stdoutChunks).toString().trim() + const err = Buffer.concat(stderrChunks).toString().trim() + const jsonLine = out.split('\n').reverse().find((line) => line.trim().startsWith('{')) + if (!jsonLine) throw new Error(`No JSON on probe stdout (exit=${exitCode}). stdout="${out}" stderr="${err}"`) + return JSON.parse(jsonLine) +} + +type ProbeResult = { + action: unknown + status?: 'OK' | 'ERR' | 'TIMEOUT' + code?: string + syscall?: string + errno?: number + address?: string + elapsedMs?: number + message?: string +} diff --git a/packages/server/worker/test/e2e/sandbox-real-third-party.e2e.test.ts b/packages/server/worker/test/e2e/sandbox-real-third-party.e2e.test.ts new file mode 100644 index 00000000000..8306312c804 --- /dev/null +++ b/packages/server/worker/test/e2e/sandbox-real-third-party.e2e.test.ts @@ -0,0 +1,251 @@ +import { chmod, copyFile, mkdtemp } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import path from 'node:path' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { iptablesLockdown, IptablesLockdown } from '../../src/lib/egress/iptables-lockdown' +import { egressInternals } from '../../src/lib/egress/lifecycle' +import { EgressProxy, startEgressProxy } from '../../src/lib/egress/proxy' +import { sandboxCapacity } from '../../src/lib/sandbox/capacity' +import { getIsolateExecutableName, isolateProcess } from '../../src/lib/sandbox/isolate' +import { requireOutboundInternet } from './helpers/outbound-internet-guard' +import { requireIsolateBinary, requireLinuxPrivileged } from './helpers/privilege-guard' +import { silentLogger } from './helpers/silent-logger' + +/** + * Reaches out to a curated list of real, public third-party API hosts from inside + * the production-shaped stack: SANDBOX_PROCESS + STRICT + iptables lockdown + + * egress proxy + sandbox-resolv-conf-aware DNS allowlist. + * + * The purpose is regression coverage for the 2026-05-06 outage class: any future + * change that breaks DNS / proxy CONNECT / TLS / iptables for the sandbox will + * fail this suite *before* it ships. + */ + +const BOX_ID = 0 +const SANDBOX_UID = sandboxCapacity.firstBoxUid + BOX_ID +const ISOLATE_BINARY_PATH = path.resolve(process.cwd(), 'packages/server/api/src/assets', getIsolateExecutableName()) + +const PRIVILEGE_SKIP = requireLinuxPrivileged() ?? requireIsolateBinary(ISOLATE_BINARY_PATH) + +describe.skipIf(PRIVILEGE_SKIP)('sandbox real third-party connectivity (SANDBOX_PROCESS + STRICT)', () => { + let proxy: EgressProxy + let lockdown: IptablesLockdown | null = null + let commonDir: string + let internetSkip: { skip: true, reason: string } | undefined + + beforeAll(async () => { + internetSkip = await requireOutboundInternet() + if (internetSkip) return + + proxy = await startEgressProxy({ + log: silentLogger(), + allowList: [], + }) + + const sandboxNameservers = await egressInternals.listSandboxResolvConfNameservers() + const unionedAllowList = [...new Set(['1.1.1.1', '8.8.8.8', ...sandboxNameservers])] + + lockdown = await iptablesLockdown.apply({ + log: silentLogger(), + proxyPort: proxy.port, + firstBoxUid: SANDBOX_UID, + numBoxes: 1, + nameservers: unionedAllowList, + }) + + commonDir = await mkdtemp(path.join(tmpdir(), 'ap-real-3p-')) + const probeDst = path.join(commonDir, 'egress-probe.js') + await copyFile(path.resolve(__dirname, 'fixtures/egress-probe.js'), probeDst) + await chmod(commonDir, 0o755) + await chmod(probeDst, 0o644) + }, 60_000) + + afterAll(async () => { + if (lockdown) await lockdown.remove() + if (proxy) await proxy.close() + }) + + it.skipIf(PRIVILEGE_SKIP)('Group A — at least 80% of curated third-party APIs reach DNS+TLS+HTTP through the proxy', async (ctx) => { + if (internetSkip) ctx.skip() + const plan = GROUP_A_HOSTS.flatMap((host) => [ + { type: 'dns-lookup', hostname: host, tag: `${host}:dns` }, + { type: 'https-head-via-proxy', url: `https://${host}/`, timeoutMs: 8000, tag: `${host}:https` }, + ]) + const result = await runProbeInSandbox({ commonDir, plan, proxyPort: proxy.port }) + + const successes = countConnectivitySuccesses({ results: result.results, hosts: GROUP_A_HOSTS }) + const summary = summarizeFailures({ results: result.results, hosts: GROUP_A_HOSTS }) + + expect( + successes >= MIN_GROUP_A_SUCCESSES, + `expected >= ${MIN_GROUP_A_SUCCESSES}/${GROUP_A_HOSTS.length} hosts reachable through the proxy; got ${successes}.\n${summary}`, + ).toBe(true) + + const eaiAgain = result.results.filter((r) => r.code === 'EAI_AGAIN') + expect( + eaiAgain.length, + `EAI_AGAIN must never appear — that is the production outage signature. Hosts: ${eaiAgain.map((r) => r.action.tag).join(', ')}`, + ).toBe(0) + }, 120_000) + + it.skipIf(PRIVILEGE_SKIP)('Group B — multi-record / Cloudflare-fronted hosts resolve and reach origin', async (ctx) => { + if (internetSkip) ctx.skip() + const plan = GROUP_B_HOSTS.flatMap((host) => [ + { type: 'dns-lookup', hostname: host, tag: `${host}:dns` }, + { type: 'https-head-via-proxy', url: `https://${host}/`, timeoutMs: 8000, tag: `${host}:https` }, + ]) + const result = await runProbeInSandbox({ commonDir, plan, proxyPort: proxy.port }) + const successes = countConnectivitySuccesses({ results: result.results, hosts: GROUP_B_HOSTS }) + expect(successes, summarizeFailures({ results: result.results, hosts: GROUP_B_HOSTS })).toBeGreaterThanOrEqual(GROUP_B_HOSTS.length - 1) + }, 60_000) + + it.skipIf(PRIVILEGE_SKIP)('Group D — SSRF defense-in-depth: cloud-metadata and loopback are blocked', async (ctx) => { + if (internetSkip) ctx.skip() + const plan: ProbeAction[] = [ + { type: 'https-head-via-proxy', url: 'https://169.254.169.254/', timeoutMs: 5000, tag: 'aws-imds:https' }, + { type: 'https-head-via-proxy', url: 'https://metadata.google.internal/', timeoutMs: 5000, tag: 'gcp-metadata:https' }, + { type: 'direct-tcp-connect', host: '127.0.0.1', port: 22, tag: 'loopback-ssh' }, + { type: 'direct-tcp-connect', host: '10.0.0.1', port: 80, tag: 'rfc1918' }, + ] + const result = await runProbeInSandbox({ commonDir, plan, proxyPort: proxy.port }) + + for (const r of result.results) { + if (r.action.type === 'https-head-via-proxy') { + expect( + r.status === 'ERR', + `${r.action.tag} must be rejected by the proxy/SSRF guard, got: ${JSON.stringify(r)}`, + ).toBe(true) + } + else if (r.action.type === 'direct-tcp-connect') { + expect( + r.status, + `${r.action.tag} must NOT be reachable; iptables should REJECT direct connect`, + ).not.toBe('OK') + } + } + }, 60_000) +}) + +function countConnectivitySuccesses({ results, hosts }: { results: ProbeResult[], hosts: readonly string[] }): number { + let n = 0 + for (const host of hosts) { + const dnsOk = results.some((r) => r.action.tag === `${host}:dns` && r.status === 'OK') + const httpsOk = results.some((r) => r.action.tag === `${host}:https` && r.status === 'OK') + if (dnsOk && httpsOk) n += 1 + } + return n +} + +function summarizeFailures({ results, hosts }: { results: ProbeResult[], hosts: readonly string[] }): string { + const lines: string[] = [] + for (const host of hosts) { + const dns = results.find((r) => r.action.tag === `${host}:dns`) + const https = results.find((r) => r.action.tag === `${host}:https`) + const dnsLabel = dns?.status === 'OK' ? 'dns:OK' : `dns:${dns?.code ?? dns?.status ?? 'missing'}` + const httpsLabel = https?.status === 'OK' ? `https:${https.statusCode ?? 'OK'}` : `https:${https?.code ?? https?.status ?? 'missing'}` + if (dns?.status !== 'OK' || https?.status !== 'OK') { + lines.push(` ${host} → ${dnsLabel} ${httpsLabel}`) + } + } + return lines.length > 0 ? `failures:\n${lines.join('\n')}` : 'all hosts succeeded' +} + +async function runProbeInSandbox({ commonDir, plan, proxyPort }: { + commonDir: string + plan: ProbeAction[] + proxyPort: number +}): Promise<{ results: ProbeResult[] }> { + const logger = silentLogger() + const maker = isolateProcess(logger, path.join(commonDir, 'egress-probe.js'), commonDir, BOX_ID) + const proxyUrl = `http://127.0.0.1:${proxyPort}` + const child = await maker.create({ + sandboxId: 'e2e-real-3p', + command: [], + mounts: [ + { hostPath: commonDir, sandboxPath: '/root/common' }, + ], + env: { + HOME: '/tmp/', + NODE_PATH: '/usr/src/node_modules', + AP_EXECUTION_MODE: 'SANDBOX_PROCESS', + AP_SANDBOX_WS_PORT: '0', + AP_BASE_CODE_DIRECTORY: '/root/codes', + SANDBOX_ID: 'e2e-real-3p', + AP_NETWORK_MODE: 'STRICT', + AP_EGRESS_PROXY_URL: proxyUrl, + AP_PROBE_PLAN: JSON.stringify(plan), + }, + resourceLimits: { memoryLimitMb: 256, cpuMsPerSec: 4000, timeLimitSeconds: 90 }, + }) + + const stdoutChunks: Buffer[] = [] + const stderrChunks: Buffer[] = [] + child.stdout?.removeAllListeners('data') + child.stderr?.removeAllListeners('data') + child.stdout?.on('data', (d: Buffer) => stdoutChunks.push(d)) + child.stderr?.on('data', (d: Buffer) => stderrChunks.push(d)) + + const exitCode = await new Promise((resolve) => child.on('close', (code) => resolve(code))) + const out = Buffer.concat(stdoutChunks).toString().trim() + const err = Buffer.concat(stderrChunks).toString().trim() + const jsonLine = out.split('\n').reverse().find((line) => line.trim().startsWith('{')) + if (!jsonLine) throw new Error(`probe emitted no JSON (exit=${exitCode}). stdout="${out}" stderr="${err}"`) + return JSON.parse(jsonLine) +} + +const GROUP_A_HOSTS = [ + 'api.openai.com', + 'api.anthropic.com', + 'api.github.com', + 'api.stripe.com', + 'api.notion.com', + 'api.hubapi.com', + 'slack.com', + 'api.airtable.com', + 'api.dropboxapi.com', + 'api.box.com', + 'graph.microsoft.com', + 'sheets.googleapis.com', + 'api.twilio.com', + 'api.sendgrid.com', + 'discord.com', + 'api.figma.com', + 'api.zoom.us', + 'api.pagerduty.com', + 'api.pipedrive.com', + 'api.xero.com', + 'a.klaviyo.com', + 'api.cohere.com', + 'api.mistral.ai', + 'api.groq.com', + 'api.assemblyai.com', + 'api.webflow.com', + 'api.line.me', + 'api.telnyx.com', + 'gateway.ai.cloudflare.com', + 'api.pandadoc.com', +] as const + +const GROUP_B_HOSTS = [ + 'cloudflare.com', + 'notion.so', + 'www.figma.com', +] as const + +const MIN_GROUP_A_SUCCESSES = Math.ceil(GROUP_A_HOSTS.length * 0.8) + +type ProbeAction = + | { type: 'dns-lookup', hostname: string, tag: string } + | { type: 'dns-lookup-v6', hostname: string, tag: string } + | { type: 'https-head-via-proxy', url: string, timeoutMs: number, tag: string } + | { type: 'direct-tcp-connect', host: string, port: number, tag: string } + +type ProbeResult = { + action: ProbeAction + status?: 'OK' | 'ERR' | 'TIMEOUT' + statusCode?: number + code?: string + message?: string + address?: string + elapsedMs?: number +} From 7103e33ac3c8fb372cc2079bc7c2c23ffd311231 Mon Sep 17 00:00:00 2001 From: Bastien <57838962+bst1n@users.noreply.github.com> Date: Thu, 7 May 2026 10:21:02 +0200 Subject: [PATCH 4/7] fix(baserow): clean French i18n, sync translation keys, reorder actions (#13078) Co-authored-by: Claude Opus 4.7 (1M context) Co-authored-by: David Anyatonwu <51977119+onyedikachi-david@users.noreply.github.com> Co-authored-by: David Anyatonwu --- bun.lock | 2 +- .../pieces/community/baserow/package.json | 2 +- .../pieces/community/baserow/src/i18n/fr.json | 242 ++++++++++-------- .../baserow/src/i18n/translation.json | 201 ++++++++------- .../pieces/community/baserow/src/index.ts | 8 +- .../baserow/src/lib/triggers/row-event.ts | 2 +- 6 files changed, 260 insertions(+), 197 deletions(-) diff --git a/bun.lock b/bun.lock index 5d8e9b18075..00c0f30fc17 100644 --- a/bun.lock +++ b/bun.lock @@ -1042,7 +1042,7 @@ }, "packages/pieces/community/baserow": { "name": "@activepieces/piece-baserow", - "version": "0.9.0", + "version": "0.9.1", "dependencies": { "@activepieces/pieces-common": "workspace:*", "@activepieces/pieces-framework": "workspace:*", diff --git a/packages/pieces/community/baserow/package.json b/packages/pieces/community/baserow/package.json index a96cb654bc4..8fe63b22f66 100644 --- a/packages/pieces/community/baserow/package.json +++ b/packages/pieces/community/baserow/package.json @@ -1,6 +1,6 @@ { "name": "@activepieces/piece-baserow", - "version": "0.9.0", + "version": "0.9.1", "main": "./dist/src/index.js", "types": "./dist/src/index.d.ts", "scripts": { diff --git a/packages/pieces/community/baserow/src/i18n/fr.json b/packages/pieces/community/baserow/src/i18n/fr.json index 581ff60be16..975485d9083 100644 --- a/packages/pieces/community/baserow/src/i18n/fr.json +++ b/packages/pieces/community/baserow/src/i18n/fr.json @@ -1,129 +1,163 @@ { "Open-source online database tool, alternative to Airtable": "Outil de base de données en ligne open source, alternative à Airtable", - "API URL": "API URL", - "Database Token": "Jeton de la base de données", + + "Authentication": "Authentification", + "Connect to Baserow using either a Database Token (recommended) or your Email & Password. Fill in only the fields for your chosen method — leave the other section blank.": "Connectez-vous à Baserow soit avec un Jeton de base de données (recommandé), soit avec votre E-mail et mot de passe. Remplissez uniquement les champs de la méthode choisie — laissez l'autre section vide.", + "Authentication Method": "Méthode d'authentification", + "Choose **Database Token** (recommended) for scoped, per-table access compatible with 2FA. Choose **Email & Password** only if you need automatic webhook registration on triggers — 2FA accounts are not supported.": "Choisissez **Jeton de base de données** (recommandé) pour un accès par table compatible avec la 2FA. Choisissez **E-mail et mot de passe** uniquement si vous avez besoin de l'enregistrement automatique des webhooks pour les déclencheurs — les comptes 2FA ne sont pas pris en charge.", + "Database Token (recommended)": "Jeton de base de données (recommandé)", "Email & Password (JWT)": "E-mail et mot de passe (JWT)", - "Email": "Courriel", + "API URL": "URL de l'API", + "Your Baserow instance URL. Leave the default for Baserow Cloud.": "URL de votre instance Baserow. Laissez la valeur par défaut pour Baserow Cloud.", + "---\n#### 🔑 Database Token\nGo to **Settings → Database tokens** in Baserow, create a token, copy it, and paste it below. Leave Email and Password blank.": "---\n#### 🔑 Jeton de base de données\nDans Baserow, allez dans **Paramètres → Jetons de base de données**, créez un jeton, copiez-le et collez-le ci-dessous. Laissez E-mail et Mot de passe vides.", + "Database Token": "Jeton de base de données", + "Your Baserow database token. Leave blank when using Email & Password.": "Votre jeton de base de données Baserow. Laissez vide si vous utilisez E-mail et mot de passe.", + "---\n#### 👤 Email & Password (JWT)\nEnter your Baserow login credentials below. Leave Database Token blank. Accounts with 2FA enabled are not supported.": "---\n#### 👤 E-mail et mot de passe (JWT)\nSaisissez vos identifiants Baserow ci-dessous. Laissez le Jeton de base de données vide. Les comptes avec 2FA activée ne sont pas pris en charge.", + "Email": "E-mail", + "Your Baserow account email. Leave blank when using Database Token.": "L'e-mail de votre compte Baserow. Laissez vide si vous utilisez un Jeton de base de données.", "Password": "Mot de passe", + "Your Baserow account password. Leave blank when using Database Token.": "Le mot de passe de votre compte Baserow. Laissez vide si vous utilisez un Jeton de base de données.", + + "Table": "Table", + "Select the table.": "Sélectionnez la table.", + "Connect your account first.": "Connectez d'abord votre compte.", + "Row": "Ligne", + "Select the row.": "Sélectionnez la ligne.", + "Select a table first.": "Sélectionnez d'abord une table.", + "Table Fields": "Champs de la table", + "Create Row": "Créer une ligne", - "Delete Row": "Supprimer la ligne", - "Get Row": "Obtenir la ligne", - "List Rows": "Lister les lignes", - "Update Row": "Mettre à jour la ligne", - "Find Row": "Trouver une ligne", - "Clean Row": "Nettoyer la ligne", - "Aggregate Field": "Champ agrégé", - "Custom API Call": "Appel API personnalisé", - "Creates a new row.": "Crée une nouvelle ligne.", - "Deletes an existing row.": "Supprime une ligne existante.", - "Fetches a single table row.": "Récupère une seule ligne de table.", - "Finds a page of rows in given table.": "Trouve une page de lignes dans une table donnée.", - "Updates fields in an existing row. Empty values are skipped. To clear a field, use Clean Row.": "Met à jour les champs dans une ligne existante. Les valeurs vides sont ignorées. Pour effacer un champ, utilisez Nettoyer la ligne.", - "Finds a row by matching a field value. Returns the first match.": "Trouve une ligne en correspondant à la valeur du champ. Renvoie la première correspondance.", - "Clears fields in a row. Empty values will clear the corresponding fields.": "Efface les champs dans une ligne. Les valeurs vides effaceront les champs correspondants.", - "Calculates an aggregation (sum, average, min, max, count, etc.) over all values of a field in a table.": "Calcule une agrégation (somme, moyenne, min, max, compteur, etc.) sur toutes les valeurs d'un champ dans un tableau.", - "Make a custom API call to a specific endpoint": "Passez un appel API personnalisé à un point de terminaison spécifique", + "Creates a new row in a table.": "Crée une nouvelle ligne dans une table.", "Create missing select options": "Créer les options de sélection manquantes", "When enabled, single/multi-select values that do not yet exist in the field will be added before creating the row. Existing options are preserved.": "Lorsque activé, les valeurs de sélection unique/multiple qui n'existent pas encore dans le champ seront ajoutées avant la création de la ligne. Les options existantes sont préservées.", "When enabled, single/multi-select values that do not yet exist in the field will be added before updating the row. Existing options are preserved.": "Lorsque activé, les valeurs de sélection unique/multiple qui n'existent pas encore dans le champ seront ajoutées avant la mise à jour de la ligne. Les options existantes sont préservées.", - "Table": "Tableau", - "Table Fields": "Champs de la table", - "Row": "Ligne", + "When enabled, single/multi-select values that do not yet exist in the field will be added before creating or updating the row. Existing options are preserved.": "Lorsque activé, les valeurs de sélection unique/multiple qui n'existent pas encore dans le champ seront ajoutées avant la création ou la mise à jour de la ligne. Les options existantes sont préservées.", + + "Get Row": "Obtenir la ligne", + "Gets a single row by its ID from a table.": "Récupère une ligne unique d'une table à partir de son identifiant.", + + "List Rows": "Lister les lignes", + "Lists rows from a table with optional search, sorting, and filtering.": "Liste les lignes d'une table, avec recherche, tri et filtres optionnels.", "Page": "Page", - "Page Size": "Nombre d'élément", + "Page number to return. Defaults to 1.": "Numéro de page à retourner. Par défaut, 1.", + "Page Size": "Lignes par page", + "Number of rows to return per page. Maximum 200. Defaults to 100.": "Nombre de lignes à retourner par page. Maximum 200. Par défaut, 100.", "Search": "Rechercher", + "Return only rows whose cell data matches this search term.": "Retourne uniquement les lignes dont les données de cellule correspondent à ce terme de recherche.", "Order By": "Trier par", + "Field name to sort by. Prefix with **-** for descending or **+** for ascending. Example: `-Name` sorts by Name Z→A.": "Nom du champ utilisé pour le tri. Préfixez par **-** pour ordre décroissant, **+** pour ordre croissant. Exemple : `-Nom` trie par Nom de Z à A.", + "Filter Combination": "Combinaison des filtres", + "How to combine multiple filters. **AND** requires all filters to match; **OR** requires any one filter to match.": "Comment combiner plusieurs filtres. **ET** exige que tous les filtres correspondent ; **OU** exige qu'au moins un filtre corresponde.", + "AND — all filters must match": "ET — tous les filtres doivent correspondre", + "OR — any filter can match": "OU — un seul filtre suffit", + "**How to add filters** (optional):\n\nEach filter is a JSON object with three keys:\n- `field` — numeric field ID (in Baserow, click the field header; the ID appears in the page URL)\n- `type` — operator: `equal`, `not_equal`, `contains`, `contains_not`, `higher_than`, `lower_than`, `is_empty`, `is_not_empty`\n- `value` — the value to compare against\n\nExample: `{\"field\": 123, \"type\": \"equal\", \"value\": \"Active\"}`": "**Comment ajouter des filtres** (optionnel) :\n\nChaque filtre est un objet JSON à trois clés :\n- `field` — identifiant numérique du champ (dans Baserow, cliquez sur l'en-tête du champ ; l'identifiant apparaît dans l'URL de la page)\n- `type` — opérateur : `equal`, `not_equal`, `contains`, `contains_not`, `higher_than`, `lower_than`, `is_empty`, `is_not_empty`\n- `value` — la valeur de comparaison\n\nExemple : `{\"field\": 123, \"type\": \"equal\", \"value\": \"Actif\"}`", + "Filters": "Filtres", + "Each entry is a JSON object with \"field\" (numeric ID), \"type\" (operator), and \"value\". Leave empty to return all rows.": "Chaque entrée est un objet JSON avec \"field\" (identifiant numérique), \"type\" (opérateur) et \"value\". Laissez vide pour retourner toutes les lignes.", + + "Find Row": "Trouver une ligne", + "Finds a row by matching a field value. Returns the first match.": "Trouve une ligne en cherchant une valeur dans un champ. Renvoie la première correspondance.", "Field": "Champ", + "Select the field to search by.": "Sélectionnez le champ dans lequel rechercher.", "Field Value": "Valeur du champ", + "The value to search for (exact match).": "La valeur à rechercher (correspondance exacte).", + + "Update Row": "Mettre à jour la ligne", + "Updates fields in an existing row. Empty values are skipped. To clear a field, use Clean Row.": "Met à jour les champs d'une ligne existante. Les valeurs vides sont ignorées. Pour effacer un champ, utilisez Nettoyer la ligne.", + + "Upsert Row": "Créer ou mettre à jour une ligne", + "Creates a new row or updates an existing one by matching a field value.": "Crée une nouvelle ligne ou met à jour une ligne existante en recherchant une valeur dans un champ.", + "Match Field": "Champ de correspondance", + "Select the field to search for an existing row.": "Sélectionnez le champ utilisé pour retrouver une ligne existante.", + "Match Value": "Valeur de correspondance", + "The value to search for (exact match). If a row with this value is found it will be updated; otherwise a new row is created.": "La valeur à rechercher (correspondance exacte). Si une ligne avec cette valeur est trouvée, elle sera mise à jour ; sinon une nouvelle ligne sera créée.", + + "Delete Row": "Supprimer la ligne", + "Deletes an existing row.": "Supprime une ligne existante.", + + "Clean Row": "Nettoyer la ligne", + "Sets all fields in a row to empty/null. To update only specific fields, use Update Row instead.": "Vide tous les champs d'une ligne (valeurs nulles). Pour ne mettre à jour que certains champs, utilisez plutôt Mettre à jour la ligne.", + + "Batch Create Rows": "Créer des lignes par lot", + "Creates multiple rows in a single request. Accepts up to 200 rows.": "Crée plusieurs lignes en une seule requête. Accepte jusqu'à 200 lignes.", + "Rows": "Lignes", + "A JSON array of objects. Each object represents a row with field names as keys.": "Un tableau JSON d'objets. Chaque objet représente une ligne, avec les noms de champs en clés.", + + "Batch Update Rows": "Mettre à jour des lignes par lot", + "Updates multiple rows in a single request. Each row must include an \"id\" field. Accepts up to 200 rows.": "Met à jour plusieurs lignes en une seule requête. Chaque ligne doit inclure un champ \"id\". Accepte jusqu'à 200 lignes.", + "A JSON array of objects. Each object must include an \"id\" field and the fields to update.": "Un tableau JSON d'objets. Chaque objet doit inclure un champ \"id\" ainsi que les champs à mettre à jour.", + + "Batch Delete Rows": "Supprimer des lignes par lot", + "Deletes multiple rows in a single request. Accepts up to 200 row IDs.": "Supprime plusieurs lignes en une seule requête. Accepte jusqu'à 200 identifiants de ligne.", + "Row IDs": "Identifiants de ligne", + "Numeric IDs of the rows to delete. You can get row IDs from the List Rows or Find Row actions.": "Identifiants numériques des lignes à supprimer. Vous pouvez les récupérer depuis les actions Lister les lignes ou Trouver une ligne.", + + "Aggregate Field": "Agréger un champ", + "Calculates an aggregation (sum, average, min, max, count, etc.) over all values of a field in a grid view.": "Calcule une agrégation (somme, moyenne, min, max, nombre, etc.) sur toutes les valeurs d'un champ dans une vue tableau.", + "View": "Vue", + "Select the grid view to aggregate.": "Sélectionnez la vue tableau à agréger.", + "Select the field to aggregate.": "Sélectionnez le champ à agréger.", + "Aggregation Type": "Type d'agrégation", + "The calculation to run over the field. **Sum, Average, Min, Max, Median, Std Dev, and Variance** only work with number fields.": "Le calcul à effectuer sur le champ. **Somme, Moyenne, Min, Max, Médiane, Écart-type et Variance** ne fonctionnent que sur les champs numériques.", + "Sum": "Somme", + "Average": "Moyenne", + "Min": "Min", + "Max": "Max", + "Count (non-empty)": "Nombre (non vides)", + "Count (empty)": "Nombre (vides)", + "Count (unique values)": "Nombre (valeurs uniques)", + "Median": "Médiane", + "Standard deviation": "Écart-type", + "Variance": "Variance", + + "Upload File": "Téléverser un fichier", + "Uploads a file to Baserow from a URL. Returns the uploaded file object that can be used in file fields. Requires Email & Password (JWT) authentication — Database Tokens do not have access to the user-files endpoint.": "Téléverse un fichier dans Baserow depuis une URL. Renvoie l'objet fichier téléversé, utilisable dans les champs de type fichier. Requiert l'authentification E-mail et mot de passe (JWT) — les Jetons de base de données n'ont pas accès au point de terminaison user-files.", + "File URL": "URL du fichier", + "The public URL of the file to upload to Baserow.": "L'URL publique du fichier à téléverser dans Baserow.", + + "Custom API Call": "Appel API personnalisé", + "Make a custom API call to a specific endpoint": "Effectuer un appel API personnalisé vers un point de terminaison spécifique", "Method": "Méthode", "Headers": "En-têtes", "Query Parameters": "Paramètres de requête", - "Body Type": "Body Type", + "Body Type": "Type de corps", "Body": "Corps", - "Response is Binary ?": "La réponse est Binaire ?", - "No Error on Failure": "Aucune erreur en cas d'échec", + "Response is Binary ?": "Réponse binaire ?", + "No Error on Failure": "Pas d'erreur en cas d'échec", "Timeout (in seconds)": "Délai d'attente (en secondes)", "Follow redirects": "Suivre les redirections", - "Select the table.": "Sélectionnez la table.", - "Select the row.": "Sélectionnez la ligne.", - "Select the field to aggregate.": "Sélectionnez le champ à agréger.", - "Aggregation Type": "Type d'agrégation", - "Sum, average, min, max, std_dev and variance only work on number fields.": "Somme, moyenne, min, max, std_dev et variance ne fonctionnent que sur les champs numéros.", - "Sum": "Sum", - "Average": "Moyenne", - "Min": "Min", - "Max": "Max.", - "Count (non-empty)": "Nombre (non-vide)", - "Count (empty)": "Nombre (vide)", - "Count (unique values)": "Nombre (valeurs uniques)", - "Median": "Moyenne", - "Standard deviation": "Écart type", - "Variance": "Variante", - "Page number to return. Defaults to 1.": "Numéro de page à retourner. Par défaut, 1.", - "Number of rows to return per page. Defaults to 100.": "Nombre de lignes à retourner par page. Par défaut, 100.", - "If provided only rows with cell data that matches the search query are going to be returned.": "Si fourni, seules les lignes avec des données cellulaires qui correspondent à la requête de recherche seront retournées.", - "If provided rows will be order by specific field.Use **-** sign for descending / **+** sing for ascending ordering.\n Example. \"-My Field\" will return rows in descending order based on \"My Field\" field.": "Si les lignes fournies seront ordonnées par champ spécifique.Utilisez le signe **-** pour décroissant / **+** pour chanter pour ordre croissant.\n Exemple. \"-Mon Champ\" retournera des lignes dans l'ordre décroissant basé sur le champ \"Mon Champ\".", - "Select the field to search by.": "Sélectionnez le champ à rechercher.", - "The value to search for (exact match).": "La valeur à rechercher (correspondance exacte).", - "Authorization headers are injected automatically from your connection.": "Les en-têtes d'autorisation sont injectés automatiquement à partir de votre connexion.", - "Enable for files like PDFs, images, etc.": "Activer pour les fichiers comme les PDFs, les images, etc.", - "GET": "OBTENIR", - "POST": "POSTER", + "GET": "GET", + "POST": "POST", "PATCH": "PATCH", - "PUT": "EFFACER", - "DELETE": "SUPPRIMER", - "HEAD": "TÊTE", + "PUT": "PUT", + "DELETE": "DELETE", + "HEAD": "HEAD", "None": "Aucun", "JSON": "JSON", - "Form Data": "Données du formulaire", + "Form Data": "Données de formulaire", "Raw": "Brut", - "Row Created": "Ligne créée", - "Row Updated": "Ligne mise à jour", - "Row Deleted": "Ligne supprimée", - "Row Event": "Événement sur une ligne", - "Triggers when a new row is created in a Baserow table.": "Déclenche quand une nouvelle ligne est créée dans une table Baserow.", - "Triggers when an existing row is updated in a Baserow table.": "Déclenche lorsqu'une ligne existante est mise à jour dans une table Baserow.", - "Triggers when a row is deleted from a Baserow table.": "Déclenche lorsqu'une ligne est supprimée d'une table Baserow.", - "Triggers when a row is created, updated, or deleted in a Baserow table. To react to only one event type, use the dedicated Row Created, Row Updated, or Row Deleted triggers.": "Déclenche lorsqu'une ligne est créée, mise à jour ou supprimée dans une table Baserow. Pour ne réagir qu'à un seul type d'événement, utilisez les déclencheurs dédiés Ligne créée, Ligne mise à jour ou Ligne supprimée.", - "Rows Created (Batch)": "Lignes créées par lot", - "Rows Updated (Batch)": "Lignes mises à jour par lot", - "Rows Deleted (Batch)": "Lignes supprimées par lot", - "Triggers when new rows are created in a Baserow table. Returns all rows from the event as a single batch.": "Déclenche lorsque de nouvelles lignes sont créées dans une table Baserow. Renvoie toutes les lignes de l'événement en un seul lot.", - "Triggers when existing rows are updated in a Baserow table. Returns all rows from the event as a single batch.": "Déclenche lorsque les lignes existantes sont mises à jour dans une table Baserow. Renvoie toutes les lignes de l'événement en un seul lot.", - "Triggers when rows are deleted from a Baserow table. Returns all deleted row IDs from the event as a single batch.": "Déclenche lorsque les lignes sont supprimées d'une table Baserow. Renvoie tous les identifiants de ligne supprimés de l'événement en un seul lot.", - "Batch Create Rows": "Créer des lignes par lot", - "Batch Update Rows": "Mettre à jour des lignes par lot", - "Batch Delete Rows": "Supprimer des lignes par lot", - "Creates multiple rows in a single request. Accepts up to 200 rows.": "Crée plusieurs lignes dans une seule requête. Accepte jusqu'à 200 lignes.", - "Updates multiple rows in a single request. Each row must include an \"id\" field. Accepts up to 200 rows.": "Met à jour plusieurs lignes dans une seule requête. Chaque ligne doit inclure un champ \"id\". Accepte jusqu'à 200 lignes.", - "Deletes multiple rows in a single request. Accepts up to 200 row IDs.": "Supprime plusieurs lignes dans une seule requête. Accepte jusqu'à 200 identifiants de ligne.", - "Rows": "Lignes", - "Row IDs": "ID de ligne", - "A JSON array of objects. Each object represents a row with field names as keys.": "Un tableau JSON d'objets. Chaque objet représente une ligne avec les noms de champs comme clés.", - "A JSON array of objects. Each object must include an \"id\" field and the fields to update.": "Un tableau JSON d'objets. Chaque objet doit inclure un champ \"id\" et les champs à mettre à jour.", - "List of row IDs to delete.": "Liste des identifiants de ligne à supprimer.", - "Filter Type": "Type de filtre", - "When AND is selected, all filters must match. When OR is selected, any filter can match.": "Lorsque AND est sélectionné, tous les filtres doivent correspondre. Lorsque OU est sélectionné, tout filtre peut correspondre.", - "AND": "ET", - "OR": "OU", - "Filters": "Filtres", - "List of filters. Each filter is an object with \"field\" (field ID as number), \"type\" (operator), and \"value\" (filter value).": "Liste des filtres. Chaque filtre est un objet avec \"field\" (ID du champ comme nombre), \"type\" (opérateur) et \"value\" (valeur du filtre).", + "Authorization headers are injected automatically from your connection.": "Les en-têtes d'autorisation sont injectés automatiquement depuis votre connexion.", + "Enable for files like PDFs, images, etc.": "Activez pour les fichiers binaires comme PDF, images, etc.", "Markdown": "Markdown", - "Authentication": "Authentification", - "Authentication Method": "Méthode d'authentification", - "Choose how you want to authenticate with Baserow:\n\n**Database Token** — recommended. Per-table CRUD scoping, compatible with 2FA accounts. Triggers require manual webhook setup.\n 1. Log in to your Baserow account.\n 2. Click on your profile picture (top-left) and go to **Settings → Database tokens**.\n 3. Create a new token, then click **:** beside the token name to copy it.\n 4. Paste it into **Database Token** below. Leave **Email** and **Password** empty.\n\n**Email & Password (JWT)** — workspace-wide access, enables automatic webhook registration for triggers. Not compatible with accounts that have 2FA enabled.\n 1. Fill in **Email** and **Password** with your Baserow login credentials. Leave **Database Token** empty.\n\nIn both modes, set **API URL** to your Baserow instance (default: `https://api.baserow.io`).": "Choisissez comment vous authentifier auprès de Baserow :\n\n**Jeton de base de données** — recommandé. Permissions CRUD par table, compatible avec les comptes 2FA. Les déclencheurs nécessitent une configuration manuelle des webhooks.\n 1. Connectez-vous à votre compte Baserow.\n 2. Cliquez sur votre photo de profil (en haut à gauche) et allez dans **Paramètres → Jetons de base de données**.\n 3. Créez un nouveau jeton, puis cliquez sur **:** à côté de son nom pour le copier.\n 4. Collez-le dans **Jeton de base de données** ci-dessous. Laissez **Courriel** et **Mot de passe** vides.\n\n**E-mail et mot de passe (JWT)** — accès au workspace entier, active l'enregistrement automatique des webhooks pour les déclencheurs. Incompatible avec les comptes ayant la 2FA activée.\n 1. Remplissez **Courriel** et **Mot de passe** avec vos identifiants Baserow. Laissez **Jeton de base de données** vide.\n\nDans les deux modes, renseignez **API URL** avec l'URL de votre instance Baserow (par défaut : `https://api.baserow.io`).", - "Database Token is recommended. Use Email & Password (JWT) only if you need automatic webhook registration on triggers.": "Le jeton de base de données est recommandé. Utilisez E-mail et mot de passe (JWT) uniquement si vous avez besoin de l'enregistrement automatique des webhooks pour les déclencheurs.", - "Required if Authentication Method is **Database Token**. Leave empty for JWT.": "Requis si la méthode d'authentification est **Jeton de base de données**. Laissez vide pour JWT.", - "Required if Authentication Method is **Email & Password (JWT)**. Leave empty for Database Token.": "Requis si la méthode d'authentification est **E-mail et mot de passe (JWT)**. Laissez vide pour Jeton de base de données.", - "Upsert Row": "Créer ou mettre à jour une ligne", - "Creates a new row or updates an existing one by matching a field value.": "Crée une nouvelle ligne ou met à jour une ligne existante en recherchant une valeur dans un champ.", - "Match Field": "Champ de correspondance", - "Select the field to search for an existing row.": "Sélectionnez le champ pour rechercher une ligne existante.", - "Match Value": "Valeur de correspondance", - "The value to search for (exact match). If a row with this value is found it will be updated; otherwise a new row is created.": "La valeur à rechercher (correspondance exacte). Si une ligne avec cette valeur est trouvée, elle sera mise à jour ; sinon une nouvelle ligne sera créée.", - "When enabled, single/multi-select values that do not yet exist in the field will be added before creating or updating the row. Existing options are preserved.": "Lorsque activé, les valeurs de sélection unique/multiple qui n'existent pas encore dans le champ seront ajoutées avant la création ou la mise à jour de la ligne. Les options existantes sont préservées.", - "Upload File": "Téléverser un fichier", - "Uploads a file to Baserow from a URL. Returns the uploaded file object that can be used in file fields.": "Téléverse un fichier dans Baserow depuis une URL. Renvoie l'objet fichier qui peut être utilisé dans les champs de type fichier.", - "File URL": "URL du fichier", - "The public URL of the file to upload to Baserow.": "L'URL publique du fichier à téléverser dans Baserow." + + "New Row": "Nouvelle ligne", + "Triggers when a new row is created in a Baserow table.": "Se déclenche lorsqu'une nouvelle ligne est créée dans une table Baserow.", + "Updated Row": "Ligne mise à jour", + "Triggers when an existing row is updated in a Baserow table.": "Se déclenche lorsqu'une ligne existante est mise à jour dans une table Baserow.", + "Deleted Row": "Ligne supprimée", + "Triggers when a row is deleted from a Baserow table.": "Se déclenche lorsqu'une ligne est supprimée d'une table Baserow.", + "Any Row Change": "Tout changement de ligne", + "Triggers when a row is created, updated, or deleted in a Baserow table. To react to only one event type, use the dedicated New Row, Updated Row, or Deleted Row triggers.": "Se déclenche lorsqu'une ligne est créée, mise à jour ou supprimée dans une table Baserow. Pour ne réagir qu'à un seul type d'événement, utilisez les déclencheurs dédiés Nouvelle ligne, Ligne mise à jour ou Ligne supprimée.", + "New Rows (Batch)": "Nouvelles lignes (lot)", + "Triggers when new rows are created in a Baserow table. Returns all rows from the event as a single batch.": "Se déclenche lorsque de nouvelles lignes sont créées dans une table Baserow. Renvoie toutes les lignes de l'événement en un seul lot.", + "Updated Rows (Batch)": "Lignes mises à jour (lot)", + "Triggers when existing rows are updated in a Baserow table. Returns all rows from the event as a single batch.": "Se déclenche lorsque des lignes existantes sont mises à jour dans une table Baserow. Renvoie toutes les lignes de l'événement en un seul lot.", + "Deleted Rows (Batch)": "Lignes supprimées (lot)", + "Triggers when rows are deleted from a Baserow table. Returns all deleted row IDs from the event as a single batch.": "Se déclenche lorsque des lignes sont supprimées d'une table Baserow. Renvoie tous les identifiants de ligne supprimés en un seul lot.", + + "Webhook Setup": "Configuration du webhook", + "✅ **Webhook auto-registered** — no manual setup needed. The webhook is created and removed automatically when you enable or disable this trigger.": "✅ **Webhook enregistré automatiquement** — aucune configuration manuelle requise. Le webhook est créé et supprimé automatiquement lorsque vous activez ou désactivez ce déclencheur.", + "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows created**.\n6. Click **Save**.": "**Configuration manuelle du webhook requise** (auth. par Jeton de base de données) :\n\n1. Dans Baserow, cliquez sur le menu **···** à côté de votre table et choisissez **Webhooks**.\n2. Cliquez sur **Create webhook +**.\n3. Définissez la méthode HTTP sur **POST**.\n4. Collez cette URL dans le champ endpoint :\n```\n{{webhookUrl}}\n```\n5. Dans **Events**, sélectionnez **Rows created**.\n6. Cliquez sur **Save**.", + "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows updated**.\n6. Click **Save**.": "**Configuration manuelle du webhook requise** (auth. par Jeton de base de données) :\n\n1. Dans Baserow, cliquez sur le menu **···** à côté de votre table et choisissez **Webhooks**.\n2. Cliquez sur **Create webhook +**.\n3. Définissez la méthode HTTP sur **POST**.\n4. Collez cette URL dans le champ endpoint :\n```\n{{webhookUrl}}\n```\n5. Dans **Events**, sélectionnez **Rows updated**.\n6. Cliquez sur **Save**.", + "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows deleted**.\n6. Click **Save**.": "**Configuration manuelle du webhook requise** (auth. par Jeton de base de données) :\n\n1. Dans Baserow, cliquez sur le menu **···** à côté de votre table et choisissez **Webhooks**.\n2. Cliquez sur **Create webhook +**.\n3. Définissez la méthode HTTP sur **POST**.\n4. Collez cette URL dans le champ endpoint :\n```\n{{webhookUrl}}\n```\n5. Dans **Events**, sélectionnez **Rows deleted**.\n6. Cliquez sur **Save**.", + "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows created, Rows updated, and Rows deleted**.\n6. Click **Save**.": "**Configuration manuelle du webhook requise** (auth. par Jeton de base de données) :\n\n1. Dans Baserow, cliquez sur le menu **···** à côté de votre table et choisissez **Webhooks**.\n2. Cliquez sur **Create webhook +**.\n3. Définissez la méthode HTTP sur **POST**.\n4. Collez cette URL dans le champ endpoint :\n```\n{{webhookUrl}}\n```\n5. Dans **Events**, sélectionnez **Rows created, Rows updated, and Rows deleted**.\n6. Cliquez sur **Save**." } diff --git a/packages/pieces/community/baserow/src/i18n/translation.json b/packages/pieces/community/baserow/src/i18n/translation.json index 623d706a402..23fc114361d 100644 --- a/packages/pieces/community/baserow/src/i18n/translation.json +++ b/packages/pieces/community/baserow/src/i18n/translation.json @@ -1,81 +1,103 @@ { "Open-source online database tool, alternative to Airtable": "Open-source online database tool, alternative to Airtable", + + "Authentication": "Authentication", + "Connect to Baserow using either a Database Token (recommended) or your Email & Password. Fill in only the fields for your chosen method — leave the other section blank.": "Connect to Baserow using either a Database Token (recommended) or your Email & Password. Fill in only the fields for your chosen method — leave the other section blank.", + "Authentication Method": "Authentication Method", + "Choose **Database Token** (recommended) for scoped, per-table access compatible with 2FA. Choose **Email & Password** only if you need automatic webhook registration on triggers — 2FA accounts are not supported.": "Choose **Database Token** (recommended) for scoped, per-table access compatible with 2FA. Choose **Email & Password** only if you need automatic webhook registration on triggers — 2FA accounts are not supported.", + "Database Token (recommended)": "Database Token (recommended)", + "Email & Password (JWT)": "Email & Password (JWT)", "API URL": "API URL", + "Your Baserow instance URL. Leave the default for Baserow Cloud.": "Your Baserow instance URL. Leave the default for Baserow Cloud.", + "---\n#### 🔑 Database Token\nGo to **Settings → Database tokens** in Baserow, create a token, copy it, and paste it below. Leave Email and Password blank.": "---\n#### 🔑 Database Token\nGo to **Settings → Database tokens** in Baserow, create a token, copy it, and paste it below. Leave Email and Password blank.", "Database Token": "Database Token", - "Email & Password (JWT)": "Email & Password (JWT)", + "Your Baserow database token. Leave blank when using Email & Password.": "Your Baserow database token. Leave blank when using Email & Password.", + "---\n#### 👤 Email & Password (JWT)\nEnter your Baserow login credentials below. Leave Database Token blank. Accounts with 2FA enabled are not supported.": "---\n#### 👤 Email & Password (JWT)\nEnter your Baserow login credentials below. Leave Database Token blank. Accounts with 2FA enabled are not supported.", "Email": "Email", + "Your Baserow account email. Leave blank when using Database Token.": "Your Baserow account email. Leave blank when using Database Token.", "Password": "Password", + "Your Baserow account password. Leave blank when using Database Token.": "Your Baserow account password. Leave blank when using Database Token.", + + "Table": "Table", + "Select the table.": "Select the table.", + "Connect your account first.": "Connect your account first.", + "Row": "Row", + "Select the row.": "Select the row.", + "Select a table first.": "Select a table first.", + "Table Fields": "Table Fields", + "Create Row": "Create Row", - "Delete Row": "Delete Row", - "Get Row": "Get Row", - "List Rows": "List Rows", - "Update Row": "Update Row", - "Find Row": "Find Row", - "Clean Row": "Clean Row", - "Aggregate Field": "Aggregate Field", - "Batch Create Rows": "Batch Create Rows", - "Batch Update Rows": "Batch Update Rows", - "Batch Delete Rows": "Batch Delete Rows", - "Custom API Call": "Custom API Call", - "Creates a new row.": "Creates a new row.", - "Deletes an existing row.": "Deletes an existing row.", - "Fetches a single table row.": "Fetches a single table row.", - "Finds a page of rows in given table.": "Finds a page of rows in given table.", - "Updates fields in an existing row. Empty values are skipped. To clear a field, use Clean Row.": "Updates fields in an existing row. Empty values are skipped. To clear a field, use Clean Row.", - "Finds a row by matching a field value. Returns the first match.": "Finds a row by matching a field value. Returns the first match.", - "Clears fields in a row. Empty values will clear the corresponding fields.": "Clears fields in a row. Empty values will clear the corresponding fields.", - "Calculates an aggregation (sum, average, min, max, count, etc.) over all values of a field in a grid view.": "Calculates an aggregation (sum, average, min, max, count, etc.) over all values of a field in a grid view.", - "Creates multiple rows in a single request. Accepts up to 200 rows.": "Creates multiple rows in a single request. Accepts up to 200 rows.", - "Updates multiple rows in a single request. Each row must include an \"id\" field. Accepts up to 200 rows.": "Updates multiple rows in a single request. Each row must include an \"id\" field. Accepts up to 200 rows.", - "Deletes multiple rows in a single request. Accepts up to 200 row IDs.": "Deletes multiple rows in a single request. Accepts up to 200 row IDs.", - "Make a custom API call to a specific endpoint": "Make a custom API call to a specific endpoint", + "Creates a new row in a table.": "Creates a new row in a table.", "Create missing select options": "Create missing select options", "When enabled, single/multi-select values that do not yet exist in the field will be added before creating the row. Existing options are preserved.": "When enabled, single/multi-select values that do not yet exist in the field will be added before creating the row. Existing options are preserved.", "When enabled, single/multi-select values that do not yet exist in the field will be added before updating the row. Existing options are preserved.": "When enabled, single/multi-select values that do not yet exist in the field will be added before updating the row. Existing options are preserved.", - "Table": "Table", - "Table Fields": "Table Fields", - "Row": "Row", + "When enabled, single/multi-select values that do not yet exist in the field will be added before creating or updating the row. Existing options are preserved.": "When enabled, single/multi-select values that do not yet exist in the field will be added before creating or updating the row. Existing options are preserved.", + + "Get Row": "Get Row", + "Gets a single row by its ID from a table.": "Gets a single row by its ID from a table.", + + "List Rows": "List Rows", + "Lists rows from a table with optional search, sorting, and filtering.": "Lists rows from a table with optional search, sorting, and filtering.", "Page": "Page", + "Page number to return. Defaults to 1.": "Page number to return. Defaults to 1.", "Page Size": "Page Size", + "Number of rows to return per page. Maximum 200. Defaults to 100.": "Number of rows to return per page. Maximum 200. Defaults to 100.", "Search": "Search", + "Return only rows whose cell data matches this search term.": "Return only rows whose cell data matches this search term.", "Order By": "Order By", - "Filter Type": "Filter Type", + "Field name to sort by. Prefix with **-** for descending or **+** for ascending. Example: `-Name` sorts by Name Z→A.": "Field name to sort by. Prefix with **-** for descending or **+** for ascending. Example: `-Name` sorts by Name Z→A.", + "Filter Combination": "Filter Combination", + "How to combine multiple filters. **AND** requires all filters to match; **OR** requires any one filter to match.": "How to combine multiple filters. **AND** requires all filters to match; **OR** requires any one filter to match.", + "AND — all filters must match": "AND — all filters must match", + "OR — any filter can match": "OR — any filter can match", + "**How to add filters** (optional):\n\nEach filter is a JSON object with three keys:\n- `field` — numeric field ID (in Baserow, click the field header; the ID appears in the page URL)\n- `type` — operator: `equal`, `not_equal`, `contains`, `contains_not`, `higher_than`, `lower_than`, `is_empty`, `is_not_empty`\n- `value` — the value to compare against\n\nExample: `{\"field\": 123, \"type\": \"equal\", \"value\": \"Active\"}`": "**How to add filters** (optional):\n\nEach filter is a JSON object with three keys:\n- `field` — numeric field ID (in Baserow, click the field header; the ID appears in the page URL)\n- `type` — operator: `equal`, `not_equal`, `contains`, `contains_not`, `higher_than`, `lower_than`, `is_empty`, `is_not_empty`\n- `value` — the value to compare against\n\nExample: `{\"field\": 123, \"type\": \"equal\", \"value\": \"Active\"}`", "Filters": "Filters", + "Each entry is a JSON object with \"field\" (numeric ID), \"type\" (operator), and \"value\". Leave empty to return all rows.": "Each entry is a JSON object with \"field\" (numeric ID), \"type\" (operator), and \"value\". Leave empty to return all rows.", + + "Find Row": "Find Row", + "Finds a row by matching a field value. Returns the first match.": "Finds a row by matching a field value. Returns the first match.", "Field": "Field", + "Select the field to search by.": "Select the field to search by.", "Field Value": "Field Value", - "View": "View", - "Aggregation Type": "Aggregation Type", + "The value to search for (exact match).": "The value to search for (exact match).", + + "Update Row": "Update Row", + "Updates fields in an existing row. Empty values are skipped. To clear a field, use Clean Row.": "Updates fields in an existing row. Empty values are skipped. To clear a field, use Clean Row.", + + "Upsert Row": "Upsert Row", + "Creates a new row or updates an existing one by matching a field value.": "Creates a new row or updates an existing one by matching a field value.", + "Match Field": "Match Field", + "Select the field to search for an existing row.": "Select the field to search for an existing row.", + "Match Value": "Match Value", + "The value to search for (exact match). If a row with this value is found it will be updated; otherwise a new row is created.": "The value to search for (exact match). If a row with this value is found it will be updated; otherwise a new row is created.", + + "Delete Row": "Delete Row", + "Deletes an existing row.": "Deletes an existing row.", + + "Clean Row": "Clean Row", + "Sets all fields in a row to empty/null. To update only specific fields, use Update Row instead.": "Sets all fields in a row to empty/null. To update only specific fields, use Update Row instead.", + + "Batch Create Rows": "Batch Create Rows", + "Creates multiple rows in a single request. Accepts up to 200 rows.": "Creates multiple rows in a single request. Accepts up to 200 rows.", "Rows": "Rows", + "A JSON array of objects. Each object represents a row with field names as keys.": "A JSON array of objects. Each object represents a row with field names as keys.", + + "Batch Update Rows": "Batch Update Rows", + "Updates multiple rows in a single request. Each row must include an \"id\" field. Accepts up to 200 rows.": "Updates multiple rows in a single request. Each row must include an \"id\" field. Accepts up to 200 rows.", + "A JSON array of objects. Each object must include an \"id\" field and the fields to update.": "A JSON array of objects. Each object must include an \"id\" field and the fields to update.", + + "Batch Delete Rows": "Batch Delete Rows", + "Deletes multiple rows in a single request. Accepts up to 200 row IDs.": "Deletes multiple rows in a single request. Accepts up to 200 row IDs.", "Row IDs": "Row IDs", - "Method": "Method", - "Headers": "Headers", - "Query Parameters": "Query Parameters", - "Body Type": "Body Type", - "Body": "Body", - "Response is Binary ?": "Response is Binary ?", - "No Error on Failure": "No Error on Failure", - "Timeout (in seconds)": "Timeout (in seconds)", - "Follow redirects": "Follow redirects", - "Select the table.": "Select the table.", - "Select the row.": "Select the row.", - "Page number to return. Defaults to 1.": "Page number to return. Defaults to 1.", - "Number of rows to return per page. Defaults to 100.": "Number of rows to return per page. Defaults to 100.", - "If provided only rows with cell data that matches the search query are going to be returned.": "If provided only rows with cell data that matches the search query are going to be returned.", - "If provided rows will be order by specific field.Use **-** sign for descending / **+** sing for ascending ordering.\n Example. \"-My Field\" will return rows in descending order based on \"My Field\" field.": "If provided rows will be order by specific field.Use **-** sign for descending / **+** sing for ascending ordering.\n Example. \"-My Field\" will return rows in descending order based on \"My Field\" field.", - "When AND is selected, all filters must match. When OR is selected, any filter can match.": "When AND is selected, all filters must match. When OR is selected, any filter can match.", - "List of filters. Each filter is an object with \"field\" (field ID as number), \"type\" (operator), and \"value\" (filter value).": "List of filters. Each filter is an object with \"field\" (field ID as number), \"type\" (operator), and \"value\" (filter value).", - "Select the field to search by.": "Select the field to search by.", - "The value to search for (exact match).": "The value to search for (exact match).", + "Numeric IDs of the rows to delete. You can get row IDs from the List Rows or Find Row actions.": "Numeric IDs of the rows to delete. You can get row IDs from the List Rows or Find Row actions.", + + "Aggregate Field": "Aggregate Field", + "Calculates an aggregation (sum, average, min, max, count, etc.) over all values of a field in a grid view.": "Calculates an aggregation (sum, average, min, max, count, etc.) over all values of a field in a grid view.", + "View": "View", "Select the grid view to aggregate.": "Select the grid view to aggregate.", "Select the field to aggregate.": "Select the field to aggregate.", - "Sum, average, min, max, std_dev and variance only work on number fields.": "Sum, average, min, max, std_dev and variance only work on number fields.", - "A JSON array of objects. Each object represents a row with field names as keys.": "A JSON array of objects. Each object represents a row with field names as keys.", - "A JSON array of objects. Each object must include an \"id\" field and the fields to update.": "A JSON array of objects. Each object must include an \"id\" field and the fields to update.", - "List of row IDs to delete.": "List of row IDs to delete.", - "Authorization headers are injected automatically from your connection.": "Authorization headers are injected automatically from your connection.", - "Enable for files like PDFs, images, etc.": "Enable for files like PDFs, images, etc.", - "AND": "AND", - "OR": "OR", + "Aggregation Type": "Aggregation Type", + "The calculation to run over the field. **Sum, Average, Min, Max, Median, Std Dev, and Variance** only work with number fields.": "The calculation to run over the field. **Sum, Average, Min, Max, Median, Std Dev, and Variance** only work with number fields.", "Sum": "Sum", "Average": "Average", "Min": "Min", @@ -86,6 +108,23 @@ "Median": "Median", "Standard deviation": "Standard deviation", "Variance": "Variance", + + "Upload File": "Upload File", + "Uploads a file to Baserow from a URL. Returns the uploaded file object that can be used in file fields. Requires Email & Password (JWT) authentication — Database Tokens do not have access to the user-files endpoint.": "Uploads a file to Baserow from a URL. Returns the uploaded file object that can be used in file fields. Requires Email & Password (JWT) authentication — Database Tokens do not have access to the user-files endpoint.", + "File URL": "File URL", + "The public URL of the file to upload to Baserow.": "The public URL of the file to upload to Baserow.", + + "Custom API Call": "Custom API Call", + "Make a custom API call to a specific endpoint": "Make a custom API call to a specific endpoint", + "Method": "Method", + "Headers": "Headers", + "Query Parameters": "Query Parameters", + "Body Type": "Body Type", + "Body": "Body", + "Response is Binary ?": "Response is Binary ?", + "No Error on Failure": "No Error on Failure", + "Timeout (in seconds)": "Timeout (in seconds)", + "Follow redirects": "Follow redirects", "GET": "GET", "POST": "POST", "PATCH": "PATCH", @@ -96,39 +135,29 @@ "JSON": "JSON", "Form Data": "Form Data", "Raw": "Raw", - "Row Created": "Row Created", - "Row Updated": "Row Updated", - "Row Deleted": "Row Deleted", - "Row Event": "Row Event", - "Rows Created (Batch)": "Rows Created (Batch)", - "Rows Updated (Batch)": "Rows Updated (Batch)", - "Rows Deleted (Batch)": "Rows Deleted (Batch)", + "Authorization headers are injected automatically from your connection.": "Authorization headers are injected automatically from your connection.", + "Enable for files like PDFs, images, etc.": "Enable for files like PDFs, images, etc.", + "Markdown": "Markdown", + + "New Row": "New Row", "Triggers when a new row is created in a Baserow table.": "Triggers when a new row is created in a Baserow table.", + "Updated Row": "Updated Row", "Triggers when an existing row is updated in a Baserow table.": "Triggers when an existing row is updated in a Baserow table.", + "Deleted Row": "Deleted Row", "Triggers when a row is deleted from a Baserow table.": "Triggers when a row is deleted from a Baserow table.", - "Triggers when a row is created, updated, or deleted in a Baserow table. To react to only one event type, use the dedicated Row Created, Row Updated, or Row Deleted triggers.": "Triggers when a row is created, updated, or deleted in a Baserow table. To react to only one event type, use the dedicated Row Created, Row Updated, or Row Deleted triggers.", + "Any Row Change": "Any Row Change", + "Triggers when a row is created, updated, or deleted in a Baserow table. To react to only one event type, use the dedicated New Row, Updated Row, or Deleted Row triggers.": "Triggers when a row is created, updated, or deleted in a Baserow table. To react to only one event type, use the dedicated New Row, Updated Row, or Deleted Row triggers.", + "New Rows (Batch)": "New Rows (Batch)", "Triggers when new rows are created in a Baserow table. Returns all rows from the event as a single batch.": "Triggers when new rows are created in a Baserow table. Returns all rows from the event as a single batch.", + "Updated Rows (Batch)": "Updated Rows (Batch)", "Triggers when existing rows are updated in a Baserow table. Returns all rows from the event as a single batch.": "Triggers when existing rows are updated in a Baserow table. Returns all rows from the event as a single batch.", + "Deleted Rows (Batch)": "Deleted Rows (Batch)", "Triggers when rows are deleted from a Baserow table. Returns all deleted row IDs from the event as a single batch.": "Triggers when rows are deleted from a Baserow table. Returns all deleted row IDs from the event as a single batch.", - "Authentication": "Authentication", - "Authentication Method": "Authentication Method", - "Choose how you want to authenticate with Baserow:\n\n**Database Token** — recommended. Per-table CRUD scoping, compatible with 2FA accounts. Triggers require manual webhook setup.\n 1. Log in to your Baserow account.\n 2. Click on your profile picture (top-left) and go to **Settings → Database tokens**.\n 3. Create a new token, then click **:** beside the token name to copy it.\n 4. Paste it into **Database Token** below. Leave **Email** and **Password** empty.\n\n**Email & Password (JWT)** — workspace-wide access, enables automatic webhook registration for triggers. Not compatible with accounts that have 2FA enabled.\n 1. Fill in **Email** and **Password** with your Baserow login credentials. Leave **Database Token** empty.\n\nIn both modes, set **API URL** to your Baserow instance (default: `https://api.baserow.io`).": "Choose how you want to authenticate with Baserow:\n\n**Database Token** — recommended. Per-table CRUD scoping, compatible with 2FA accounts. Triggers require manual webhook setup.\n 1. Log in to your Baserow account.\n 2. Click on your profile picture (top-left) and go to **Settings → Database tokens**.\n 3. Create a new token, then click **:** beside the token name to copy it.\n 4. Paste it into **Database Token** below. Leave **Email** and **Password** empty.\n\n**Email & Password (JWT)** — workspace-wide access, enables automatic webhook registration for triggers. Not compatible with accounts that have 2FA enabled.\n 1. Fill in **Email** and **Password** with your Baserow login credentials. Leave **Database Token** empty.\n\nIn both modes, set **API URL** to your Baserow instance (default: `https://api.baserow.io`).", - "Database Token is recommended. Use Email & Password (JWT) only if you need automatic webhook registration on triggers.": "Database Token is recommended. Use Email & Password (JWT) only if you need automatic webhook registration on triggers.", - "Required if Authentication Method is **Database Token**. Leave empty for JWT.": "Required if Authentication Method is **Database Token**. Leave empty for JWT.", - "Required if Authentication Method is **Email & Password (JWT)**. Leave empty for Database Token.": "Required if Authentication Method is **Email & Password (JWT)**. Leave empty for Database Token.", - "Markdown": "Markdown", - "\n## Setup Instructions\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste the following URL into the endpoint field:\n```text\n{{webhookUrl}}\n```\n5. Under events, select **Rows created**.\n6. Click **Save**.\n": "\n## Setup Instructions\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste the following URL into the endpoint field:\n```text\n{{webhookUrl}}\n```\n5. Under events, select **Rows created**.\n6. Click **Save**.\n", - "\n## Setup Instructions\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste the following URL into the endpoint field:\n```text\n{{webhookUrl}}\n```\n5. Under events, select **Rows updated**.\n6. Click **Save**.\n": "\n## Setup Instructions\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste the following URL into the endpoint field:\n```text\n{{webhookUrl}}\n```\n5. Under events, select **Rows updated**.\n6. Click **Save**.\n", - "\n## Setup Instructions\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste the following URL into the endpoint field:\n```text\n{{webhookUrl}}\n```\n5. Under events, select **Rows deleted**.\n6. Click **Save**.\n": "\n## Setup Instructions\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste the following URL into the endpoint field:\n```text\n{{webhookUrl}}\n```\n5. Under events, select **Rows deleted**.\n6. Click **Save**.\n", - "Upsert Row": "Upsert Row", - "Creates a new row or updates an existing one by matching a field value.": "Creates a new row or updates an existing one by matching a field value.", - "Match Field": "Match Field", - "Select the field to search for an existing row.": "Select the field to search for an existing row.", - "Match Value": "Match Value", - "The value to search for (exact match). If a row with this value is found it will be updated; otherwise a new row is created.": "The value to search for (exact match). If a row with this value is found it will be updated; otherwise a new row is created.", - "When enabled, single/multi-select values that do not yet exist in the field will be added before creating or updating the row. Existing options are preserved.": "When enabled, single/multi-select values that do not yet exist in the field will be added before creating or updating the row. Existing options are preserved.", - "Upload File": "Upload File", - "Uploads a file to Baserow from a URL. Returns the uploaded file object that can be used in file fields.": "Uploads a file to Baserow from a URL. Returns the uploaded file object that can be used in file fields.", - "File URL": "File URL", - "The public URL of the file to upload to Baserow.": "The public URL of the file to upload to Baserow." + + "Webhook Setup": "Webhook Setup", + "✅ **Webhook auto-registered** — no manual setup needed. The webhook is created and removed automatically when you enable or disable this trigger.": "✅ **Webhook auto-registered** — no manual setup needed. The webhook is created and removed automatically when you enable or disable this trigger.", + "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows created**.\n6. Click **Save**.": "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows created**.\n6. Click **Save**.", + "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows updated**.\n6. Click **Save**.": "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows updated**.\n6. Click **Save**.", + "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows deleted**.\n6. Click **Save**.": "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows deleted**.\n6. Click **Save**.", + "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows created, Rows updated, and Rows deleted**.\n6. Click **Save**.": "**Manual webhook setup required** (Database Token auth):\n\n1. In Baserow, click the **···** menu beside your table and select **Webhooks**.\n2. Click **Create webhook +**.\n3. Set the HTTP method to **POST**.\n4. Paste this URL into the endpoint field:\n```\n{{webhookUrl}}\n```\n5. Under **Events**, select **Rows created, Rows updated, and Rows deleted**.\n6. Click **Save**." } diff --git a/packages/pieces/community/baserow/src/index.ts b/packages/pieces/community/baserow/src/index.ts index 2f51233c3d2..d0972df53ca 100644 --- a/packages/pieces/community/baserow/src/index.ts +++ b/packages/pieces/community/baserow/src/index.ts @@ -51,17 +51,17 @@ export const baserow = createPiece({ authors: ["kishanprmr", "MoShizzle", "abuaboud", 'bst1n', 'sanket-a11y', 'onyedikachi-david'], actions: [ createRowAction, - deleteRowAction, getRowAction, listRowsAction, - updateRowAction, findRowAction, + updateRowAction, + upsertRowAction, + deleteRowAction, cleanRowAction, - aggregateFieldAction, batchCreateRowsAction, batchUpdateRowsAction, batchDeleteRowsAction, - upsertRowAction, + aggregateFieldAction, uploadFileAction, createCustomApiCallAction({ baseUrl: (auth) => { diff --git a/packages/pieces/community/baserow/src/lib/triggers/row-event.ts b/packages/pieces/community/baserow/src/lib/triggers/row-event.ts index b0fe30e05c2..9021bac535f 100644 --- a/packages/pieces/community/baserow/src/lib/triggers/row-event.ts +++ b/packages/pieces/community/baserow/src/lib/triggers/row-event.ts @@ -13,7 +13,7 @@ export const rowEventTrigger = createTrigger({ auth: baserowAuth, displayName: 'Any Row Change', description: - 'Triggers when a row is created, updated, or deleted in a Baserow table. To react to only one event type, use the dedicated Row Created, Row Updated, or Row Deleted triggers.', + 'Triggers when a row is created, updated, or deleted in a Baserow table. To react to only one event type, use the dedicated New Row, Updated Row, or Deleted Row triggers.', type: TriggerStrategy.WEBHOOK, props: { table_id: baserowCommon.tableId(), From 23b110864748d7103984bc12019df3daa8f6a49c Mon Sep 17 00:00:00 2001 From: David Anyatonwu <51977119+onyedikachi-david@users.noreply.github.com> Date: Thu, 7 May 2026 09:23:37 +0100 Subject: [PATCH 5/7] fix(pieces): connectuc logo (#13140) --- bun.lock | 12 ++++++++++++ packages/pieces/community/connectuc/package.json | 2 +- packages/pieces/community/connectuc/src/index.ts | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/bun.lock b/bun.lock index 00c0f30fc17..64fa1f2bfc1 100644 --- a/bun.lock +++ b/bun.lock @@ -1856,6 +1856,16 @@ "@types/xml2js": "0.4.14", }, }, + "packages/pieces/community/connectuc": { + "name": "@activepieces/piece-connectuc", + "version": "0.0.2", + "dependencies": { + "@activepieces/pieces-common": "workspace:*", + "@activepieces/pieces-framework": "workspace:*", + "@activepieces/shared": "workspace:*", + "tslib": "2.6.2", + }, + }, "packages/pieces/community/constant-contact": { "name": "@activepieces/piece-constant-contact", "version": "0.2.4", @@ -8712,6 +8722,8 @@ "@activepieces/piece-connections": ["@activepieces/piece-connections@workspace:packages/pieces/core/connections"], + "@activepieces/piece-connectuc": ["@activepieces/piece-connectuc@workspace:packages/pieces/community/connectuc"], + "@activepieces/piece-constant-contact": ["@activepieces/piece-constant-contact@workspace:packages/pieces/community/constant-contact"], "@activepieces/piece-contentful": ["@activepieces/piece-contentful@workspace:packages/pieces/community/contentful"], diff --git a/packages/pieces/community/connectuc/package.json b/packages/pieces/community/connectuc/package.json index afa46669b9d..25de328efda 100644 --- a/packages/pieces/community/connectuc/package.json +++ b/packages/pieces/community/connectuc/package.json @@ -1,6 +1,6 @@ { "name": "@activepieces/piece-connectuc", - "version": "0.0.1", + "version": "0.0.2", "main": "./dist/src/index.js", "types": "./dist/src/index.d.ts", "scripts": { diff --git a/packages/pieces/community/connectuc/src/index.ts b/packages/pieces/community/connectuc/src/index.ts index b37bb21206d..d429bd2ab56 100644 --- a/packages/pieces/community/connectuc/src/index.ts +++ b/packages/pieces/community/connectuc/src/index.ts @@ -60,7 +60,7 @@ export const connectuc = createPiece({ displayName: "ConnectUC", auth: connectucAuth, minimumSupportedRelease: '0.36.1', - logoUrl: "https://cuc-media.s3.us-east-1.amazonaws.com/cuc_logo_120x120.png", + logoUrl: "https://cdn.activepieces.com/pieces/connectuc.png", authors: ['dranes'], actions: [createContactAction, doNotDisturbAction, findCdrAction, initiateCallAction, sendSmsAction, updateCdrAction], triggers: [newRecording, newCallTranscription, newCallSummary, newCdr, newIncomingCall, newOutgoingCall, newVoicemail, newSms], From 86e753c25b148b378fd21e3a68ef1eb37c77a9e5 Mon Sep 17 00:00:00 2001 From: Hazem Adel Date: Thu, 7 May 2026 12:42:04 +0300 Subject: [PATCH 6/7] feat(chat): redesign AI chat UX with thinking, tiers, and platform-scoped experience (#13138) --- .../api/src/app/ee/chat/chat-service.ts | 80 ++- .../prompts/chat-project-context-none.md | 2 +- .../prompts/chat-project-context-selected.md | 5 +- .../src/assets/prompts/chat-system-prompt.md | 33 +- packages/shared/package.json | 2 +- .../src/lib/management/ai-providers/index.ts | 10 + .../web/public/locales/en/translation.json | 43 ++ .../app/routes/chat-with-ai/ai-chat-box.tsx | 126 ++-- .../components/activity-accordion.tsx | 552 ++++++++++++++++++ .../components/build-progress-card.tsx | 73 ++- .../chat-with-ai/components/chat-input.tsx | 8 +- .../chat-with-ai/components/chat-message.tsx | 282 ++++----- .../components/chat-model-selector.tsx | 128 +++- .../components/chat-project-selector.tsx | 125 ---- .../components/chat-thinking-loader.tsx | 12 +- .../components/connected-apps-list.tsx | 161 ----- .../components/message-content.tsx | 8 +- .../components/tool-call-group.tsx | 206 ------- .../web/src/features/chat/lib/use-chat.ts | 17 +- 19 files changed, 1103 insertions(+), 770 deletions(-) create mode 100644 packages/web/src/app/routes/chat-with-ai/components/activity-accordion.tsx delete mode 100644 packages/web/src/app/routes/chat-with-ai/components/chat-project-selector.tsx delete mode 100644 packages/web/src/app/routes/chat-with-ai/components/connected-apps-list.tsx delete mode 100644 packages/web/src/app/routes/chat-with-ai/components/tool-call-group.tsx diff --git a/packages/server/api/src/app/ee/chat/chat-service.ts b/packages/server/api/src/app/ee/chat/chat-service.ts index 9ae4fd09d61..0777836ea06 100644 --- a/packages/server/api/src/app/ee/chat/chat-service.ts +++ b/packages/server/api/src/app/ee/chat/chat-service.ts @@ -1,4 +1,5 @@ import { + ACTIVEPIECES_CHAT_TIERS, ActivepiecesError, AIProviderModelType, AIProviderName, @@ -15,6 +16,7 @@ import { spreadIfDefined, UpdateChatConversationRequest, } from '@activepieces/shared' +import { SharedV3ProviderOptions } from '@ai-sdk/provider' import { createUIMessageStream, LanguageModel, ModelMessage, stepCountIs, streamText } from 'ai' import { FastifyBaseLogger } from 'fastify' import { aiProviderService } from '../../ai/ai-provider-service' @@ -214,11 +216,13 @@ export const chatService = (log: FastifyBaseLogger) => ({ const gatedTools = chatMcp.withApprovalGates({ mcpToolSet, writer, log }) const tools = { ...localTools, ...gatedTools } + const sanitizedMessages = stripThinkingBlocks(messagesForLlm) const textStream = streamText({ model, system: systemPrompt, - messages: messagesForLlm, + messages: sanitizedMessages, tools, + providerOptions: buildThinkingOptions({ provider: providerConfig.provider, modelId: modelName }), stopWhen: stepCountIs(MAX_STEPS), onStepFinish: ({ finishReason, usage }) => { log.debug({ conversationId, finishReason, usage }, 'Chat step finished') @@ -355,6 +359,80 @@ async function resolveCompactionState({ conversation, allMessages, systemPromptL return result } +function stripThinkingBlocks(messages: ModelMessage[]): ModelMessage[] { + const hasThinking = messages.some( + (msg) => msg.role === 'assistant' && Array.isArray(msg.content) + && (msg.content as Array>).some( + (part) => part.type === 'reasoning' || part.type === 'thinking', + ), + ) + if (!hasThinking) return messages + + return messages + .map((msg) => { + if (msg.role !== 'assistant' || !Array.isArray(msg.content)) { + return msg + } + const filtered = (msg.content as Array>).filter( + (part) => part.type !== 'reasoning' && part.type !== 'thinking', + ) + if (filtered.length === msg.content.length) { + return msg + } + if (filtered.length === 0) return null + return { ...msg, content: filtered } + }) + .filter((msg): msg is ModelMessage => msg !== null) +} + +const TIER_EFFORT: Record = { + fast: { anthropicBudget: 5_000, openrouterEffort: 'low' }, + smart: { anthropicBudget: 10_000, openrouterEffort: 'medium' }, + premium: { anthropicBudget: 20_000, openrouterEffort: 'high' }, +} + +const DEFAULT_EFFORT = TIER_EFFORT.smart + +function resolveEffort({ modelId }: { modelId: string }): { anthropicBudget: number, openrouterEffort: string } { + const tier = ACTIVEPIECES_CHAT_TIERS.find((t) => t.modelId === modelId) + if (tier) { + return TIER_EFFORT[tier.id] ?? DEFAULT_EFFORT + } + return DEFAULT_EFFORT +} + +const THINKING_CAPABLE_MODELS = new Set([ + 'claude-sonnet-4-6', 'claude-opus-4-7', 'claude-haiku-4-5', + 'claude-sonnet-4.6', 'claude-opus-4.7', 'claude-haiku-4.5', +]) + +function supportsThinking({ modelId }: { modelId: string }): boolean { + const bareModel = modelId.replace(/^[^/]+\//, '') + return THINKING_CAPABLE_MODELS.has(bareModel) +} + +function buildThinkingOptions({ provider, modelId }: { provider: AIProviderName, modelId: string }): SharedV3ProviderOptions { + if (!supportsThinking({ modelId })) return {} + const effort = resolveEffort({ modelId }) + switch (provider) { + case AIProviderName.ANTHROPIC: + return { + anthropic: { + thinking: { type: 'enabled', budgetTokens: effort.anthropicBudget }, + }, + } + case AIProviderName.ACTIVEPIECES: + case AIProviderName.OPENROUTER: + return { + openrouter: { + reasoning: { effort: effort.openrouterEffort }, + }, + } + default: + return {} + } +} + type CreateConversationParams = { platformId: string userId: string diff --git a/packages/server/api/src/assets/prompts/chat-project-context-none.md b/packages/server/api/src/assets/prompts/chat-project-context-none.md index b3cc6efcb9d..b91e999fa18 100644 --- a/packages/server/api/src/assets/prompts/chat-project-context-none.md +++ b/packages/server/api/src/assets/prompts/chat-project-context-none.md @@ -1 +1 @@ -No project is currently selected. Answer the user's question directly. If a tool call requires project context, select the user's project automatically with `ap_select_project` — do not ask. \ No newline at end of file +No project is currently selected. If a tool call requires project context, select the most relevant project silently with `ap_select_project` — do not ask unless you are building an automation (Step 3). \ No newline at end of file diff --git a/packages/server/api/src/assets/prompts/chat-project-context-selected.md b/packages/server/api/src/assets/prompts/chat-project-context-selected.md index 192f8cdb990..a63f42773cb 100644 --- a/packages/server/api/src/assets/prompts/chat-project-context-selected.md +++ b/packages/server/api/src/assets/prompts/chat-project-context-selected.md @@ -1,3 +1,2 @@ -You are currently working in project "{{PROJECT_NAME}}" (ID: {{PROJECT_ID}}). -All operations (flows, tables, connections) are scoped to this project. -The project URL is: {{FRONTEND_URL}}/projects/{{PROJECT_ID}} \ No newline at end of file +Active project: "{{PROJECT_NAME}}" (ID: {{PROJECT_ID}}). All tool operations are scoped to this project. +Project URL: {{FRONTEND_URL}}/projects/{{PROJECT_ID}} \ No newline at end of file diff --git a/packages/server/api/src/assets/prompts/chat-system-prompt.md b/packages/server/api/src/assets/prompts/chat-system-prompt.md index 125d6903571..b980ca2dd51 100644 --- a/packages/server/api/src/assets/prompts/chat-system-prompt.md +++ b/packages/server/api/src/assets/prompts/chat-system-prompt.md @@ -14,13 +14,11 @@ Your available projects: -A project is always active (shown in the dropdown below the chat input). All tools operate within it. +Projects exist behind the scenes. Do NOT mention projects unless building an automation or the user explicitly asks about them. All tool operations are scoped to whichever project is active — users don't need to know this. -- If the user mentions a different project by name, switch to it with `ap_select_project`. -- If the user's request clearly targets a different project than the one selected, ask which one using a multi-question block. -- Before building an automation, always confirm the target project using a `project-picker` block (see sequential build process and ui_blocks). - -When presenting project-scoped results, mention which project you are working in. +- If a tool call requires project context and none is set, silently select the most relevant project with `ap_select_project`. +- If the user mentions a specific project by name, switch to it silently with `ap_select_project`. +- During automation builds, project selection is handled in Step 3 — see the sequential build process. @@ -140,13 +138,28 @@ Follow these steps IN ORDER when the user wants to build an automation. If the request names specific apps and actions, skip to Step 2. Otherwise, ask ONE question at a time via a multi-question block. Stop and wait. **Step 2 — PROPOSE** -Show an `automation-proposal` block. Stop and wait for approval. +Show an `automation-proposal` block. STOP here — do NOT output anything else in this message. No project-picker, no connection checks, no questions. Wait for the user to click "Build this automation" before proceeding. **Step 3 — CONFIRM PROJECT** -Output a `project-picker` block with 3-5 relevant projects. Always show this — never skip it. Stop and wait. After the user picks, switch with `ap_select_project`. +Only after the user approves the proposal (clicks "Build this automation"), pick the most relevant project from the available list and ask for confirmation using a multi-question block: +```multi-question +title: Project +question: Build this flow inside [Project Name]? +type: choice +- Yes, build it here +- No, change project +``` +If the user picks "Yes, build it here", call `ap_select_project` with that project's ID and proceed to Step 4. +If the user picks "No, change project", output a `project-picker` block with 3-5 relevant projects. After the user picks, switch with `ap_select_project`. **Step 4 — CHECK CONNECTIONS** -Call ap_list_connections. Only show `connection-required` blocks for connections that are MISSING or ERRORED — skip active ones. If all are active, proceed silently. When a connection is created or reconnected via the UI card, it updates silently — no message is sent, do not wait for one. +Call ap_list_connections. For each piece needed by the automation: +- **No connection exists**: Show a `connection-required` block so the user can create one. +- **One active connection exists**: Use it silently — no need to ask. +- **Multiple active connections exist**: Show a `connection-picker` block so the user can choose which account to use. NEVER use multi-question for connection selection — always use the connection-picker block. +- **Connection exists but has an error**: Show a `connection-required` block with `status: error` so the user can reconnect. + +When a connection is created or reconnected via the UI card, it updates silently — no message is sent, do not wait for one. After the user resolves all connections and clicks Continue, re-call `ap_list_connections` to get the externalIds of the newly created connections before proceeding. **Step 5 — GATHER CONFIGURATION** @@ -194,7 +207,7 @@ User: "Send me a Slack message when I get a new Gmail email" Step 1: Clear enough. Skip. Step 2: Show automation-proposal. Wait for approval. -Step 3: Show project-picker. User picks "Team 1". +Step 3: Ask "Build this flow inside Team 1?" via multi-question. User picks "Yes, build it here". Call ap_select_project. Step 4: ap_list_connections → Gmail ✓, Slack ✓. Both active. Proceed. Step 5: ap_get_piece_props for Slack send_channel_message → sees "channel" is DROPDOWN. ap_resolve_property_options(piece=slack, action=send_channel_message, property=channel, auth=slack_conn_123) diff --git a/packages/shared/package.json b/packages/shared/package.json index 79e03620051..b85424462d4 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "name": "@activepieces/shared", - "version": "0.71.6", + "version": "0.71.7", "type": "commonjs", "sideEffects": false, "main": "./dist/src/index.js", diff --git a/packages/shared/src/lib/management/ai-providers/index.ts b/packages/shared/src/lib/management/ai-providers/index.ts index be1ed8edd54..706db1d834f 100644 --- a/packages/shared/src/lib/management/ai-providers/index.ts +++ b/packages/shared/src/lib/management/ai-providers/index.ts @@ -372,6 +372,16 @@ function getMaxContextTokens({ provider }: { provider: AIProviderName | undefine return PROVIDER_MAX_CONTEXT_TOKENS[provider] ?? DEFAULT_MAX_CONTEXT_TOKENS } +export const ACTIVEPIECES_CHAT_TIERS = [ + { id: 'fast', label: 'Fast', modelId: 'google/gemini-2.5-flash' }, + { id: 'smart', label: 'Smart', modelId: 'anthropic/claude-sonnet-4.6' }, + { id: 'premium', label: 'Premium', modelId: 'anthropic/claude-opus-4.7' }, +] as const + +export const DEFAULT_CHAT_TIER_ID = 'smart' as const + +export type ActivepiecesChatTier = typeof ACTIVEPIECES_CHAT_TIERS[number] + export const aiProviderUtils = { getMaxContextTokens, } diff --git a/packages/web/public/locales/en/translation.json b/packages/web/public/locales/en/translation.json index 6cdf4a801d8..cba83530d79 100644 --- a/packages/web/public/locales/en/translation.json +++ b/packages/web/public/locales/en/translation.json @@ -1549,6 +1549,49 @@ "Thought for a few seconds": "Thought for a few seconds", "Thinking for {duration}...": "Thinking for {duration}...", "Thought for {duration}": "Thought for {duration}", + "Fast": "Fast", + "Smart": "Smart", + "Premium": "Premium", + "stepsCompleted": "{count, plural, =1 {1 step completed} other {# steps completed}}", + "Finding the best tools": "Finding the best tools", + "Found the right tools for your task.": "Found the right tools for your task.", + "Searched available integrations.": "Searched available integrations.", + "foundIntegrations": "{count, plural, =1 {Found 1 integration} other {Found # integrations}}", + "Searching integrations": "Searching integrations", + "Calling your connections": "Calling your connections", + "Located your accounts.": "Located your accounts.", + "Checked available connections.": "Checked available connections.", + "foundAccounts": "{count, plural, =1 {Found 1 account} other {Found # accounts}}", + "Finding {name} accounts": "Finding {name} accounts", + "Checking accounts": "Checking accounts", + "Building the automation": "Building the automation", + "Built your automation steps.": "Built your automation steps.", + "Creating flow": "Creating flow", + "Validating the flow": "Validating the flow", + "Validated the flow configuration.": "Validated the flow configuration.", + "Running checks": "Running checks", + "Testing the flow": "Testing the flow", + "Ran tests on your flow.": "Ran tests on your flow.", + "Reviewed your existing flows.": "Reviewed your existing flows.", + "foundFlows": "{count, plural, =1 {Found 1 flow} other {Found # flows}}", + "Listing flows": "Listing flows", + "Queried your tables.": "Queried your tables.", + "foundRecords": "{count, plural, =1 {Found 1 record} other {Found # records}}", + "Searching records": "Searching records", + "Published your flow.": "Published your flow.", + "Publishing": "Publishing", + "Running the action": "Running the action", + "Executed the action.": "Executed the action.", + "Checked your recent runs.": "Checked your recent runs.", + "Reviewing runs": "Reviewing runs", + "Found setup instructions.": "Found setup instructions.", + "Getting setup guide": "Getting setup guide", + "Added": "Added", + "Configuring {name}": "Configuring {name}", + "Running {name}": "Running {name}", + "Executing": "Executing", + "Completed a step.": "Completed a step.", + "Processing": "Processing", "Search...": "Search...", "No chats found": "No chats found", "Start your first chat": "Start your first chat", diff --git a/packages/web/src/app/routes/chat-with-ai/ai-chat-box.tsx b/packages/web/src/app/routes/chat-with-ai/ai-chat-box.tsx index 8881aff12b3..6f1c1f07fa7 100644 --- a/packages/web/src/app/routes/chat-with-ai/ai-chat-box.tsx +++ b/packages/web/src/app/routes/chat-with-ai/ai-chat-box.tsx @@ -1,8 +1,13 @@ -import { AIProviderName, ProjectType } from '@activepieces/shared'; +import { + AIProviderName, + PROJECT_COLOR_PALETTE, + Project, + ProjectType, +} from '@activepieces/shared'; import { t } from 'i18next'; import { AlertTriangle, RefreshCw, Square } from 'lucide-react'; import { motion } from 'motion/react'; -import { useCallback, useEffect, useMemo, useRef, useState } from 'react'; +import { useCallback, useEffect, useMemo, useState } from 'react'; import { ChatContainerContent, @@ -25,7 +30,6 @@ import { import { ChatInput } from './components/chat-input'; import { ChatMessage } from './components/chat-message'; import { ChatModelSelector } from './components/chat-model-selector'; -import { ChatProjectSelector } from './components/chat-project-selector'; import { QuickReplies } from './components/message-content'; import { MultiQuestionForm } from './components/multi-question-form'; import { ToolApprovalForm } from './components/tool-approval-form'; @@ -73,6 +77,7 @@ function ChatBoxContent({ messages, modelName, selectedProjectId, + projectSetInSession, isStreaming, wasCancelled, isLoadingHistory, @@ -85,7 +90,7 @@ function ChatBoxContent({ pendingApprovalRequest, } = useAgentChat({ onTitleUpdate, onConversationCreated }); const { data: allProjects } = projectCollectionUtils.useAll(); - const projects = allProjects ?? []; + const projects = useMemo(() => allProjects ?? [], [allProjects]); const handleProjectChange = useCallback( (projectId: string | null) => { @@ -98,22 +103,15 @@ function ChatBoxContent({ new Set(), ); - const didAutoSelectProjectRef = useRef(false); - useEffect(() => { - if ( - didAutoSelectProjectRef.current || - selectedProjectId !== null || - initialConversationId - ) - return; - const personalProject = projects.find( - (p) => p.type === ProjectType.PERSONAL, - ); - if (personalProject) { - didAutoSelectProjectRef.current = true; - void setProjectContext(personalProject.id); - } - }, [projects, selectedProjectId, initialConversationId, setProjectContext]); + const activeProject = useMemo( + () => + resolveActiveProject({ + selectedProjectId, + projectSetInSession, + projects, + }), + [selectedProjectId, projectSetInSession, projects], + ); useEffect(() => { if (initialConversationId) { @@ -173,19 +171,13 @@ function ChatBoxContent({ isStreaming={isStreaming} onSend={handleSend} onStop={cancelStream} + activeProject={activeProject} leftActions={ - <> - - - + } /> @@ -269,8 +261,19 @@ function ChatBoxContent({ -
-
+
+
+ {activeProject && ( +
+ {activeProject.name} +
+ )} {hasActiveApproval ? ( - - - + } /> )} @@ -332,6 +329,43 @@ function ChatBoxContent({ ); } +function resolveActiveProject({ + selectedProjectId, + projectSetInSession, + projects, +}: { + selectedProjectId: string | null; + projectSetInSession: boolean; + projects: Project[]; +}): ActiveProjectInfo | undefined { + if (!selectedProjectId || !projectSetInSession) return undefined; + const project = projects.find((p) => p.id === selectedProjectId); + if (!project) return undefined; + + if (project.type === ProjectType.PERSONAL) { + return { + name: t('Personal Project'), + color: '#0a0a0a', + textColor: '#ffffff', + }; + } + + const palette = project.icon?.color + ? PROJECT_COLOR_PALETTE[project.icon.color] + : undefined; + return { + name: project.displayName, + color: palette?.color ?? '#0a0a0a', + textColor: palette?.textColor ?? '#ffffff', + }; +} + +type ActiveProjectInfo = { + name: string; + color: string; + textColor: string; +}; + type AIChatBoxProps = { incognito: boolean; conversationId?: string | null; diff --git a/packages/web/src/app/routes/chat-with-ai/components/activity-accordion.tsx b/packages/web/src/app/routes/chat-with-ai/components/activity-accordion.tsx new file mode 100644 index 00000000000..fb3a273d01e --- /dev/null +++ b/packages/web/src/app/routes/chat-with-ai/components/activity-accordion.tsx @@ -0,0 +1,552 @@ +import { isObject } from '@activepieces/shared'; +import { t } from 'i18next'; +import { ChevronDown } from 'lucide-react'; +import { AnimatePresence, motion } from 'motion/react'; +import { useCallback, useEffect, useMemo, useRef, useState } from 'react'; + +import { DynamicToolPart } from '@/features/chat/lib/chat-types'; +import { PieceIconWithPieceName } from '@/features/pieces/components/piece-icon-from-name'; +import { cn } from '@/lib/utils'; + +import { normalizePieceName } from '../lib/message-parsers'; + +import { LottieLoader } from './chat-thinking-loader'; + +const HIDDEN_TOOL_NAMES = new Set([ + 'ap_set_session_title', + 'ap_select_project', + 'ap_deselect_project', +]); + +const COLLAPSE_DELAY_MS = 600; + +export function ActivityAccordion({ + toolParts, + reasoningText, + isStreaming, + hasContent, +}: { + toolParts: DynamicToolPart[]; + reasoningText: string; + isStreaming: boolean; + hasContent: boolean; +}) { + const visibleParts = useMemo( + () => toolParts.filter((p) => !HIDDEN_TOOL_NAMES.has(p.toolName)), + [toolParts], + ); + + const steps = useMemo(() => groupIntoSteps(visibleParts), [visibleParts]); + + const [isOpen, setIsOpen] = useState(isStreaming); + const userToggledRef = useRef(false); + const collapseTimerRef = useRef | null>(null); + + const isThinkingOnly = isStreaming && visibleParts.length === 0; + const isActive = isStreaming && visibleParts.length > 0; + const isComplete = !isStreaming && steps.length > 0; + + const elapsedSeconds = useElapsedTimer(isThinkingOnly); + const thinkingSnippet = useReasoningSnippet(reasoningText, isThinkingOnly); + + useEffect(() => { + if (userToggledRef.current) return; + + if (isActive || isThinkingOnly) { + if (collapseTimerRef.current) { + clearTimeout(collapseTimerRef.current); + collapseTimerRef.current = null; + } + setIsOpen(true); + } else if (hasContent && isComplete) { + collapseTimerRef.current = setTimeout(() => { + if (!userToggledRef.current) { + setIsOpen(false); + } + collapseTimerRef.current = null; + }, COLLAPSE_DELAY_MS); + } + + return () => { + if (collapseTimerRef.current) { + clearTimeout(collapseTimerRef.current); + } + }; + }, [isActive, isThinkingOnly, hasContent, isComplete]); + + const handleToggle = useCallback(() => { + userToggledRef.current = true; + if (collapseTimerRef.current) { + clearTimeout(collapseTimerRef.current); + collapseTimerRef.current = null; + } + setIsOpen((prev) => !prev); + }, []); + + if (steps.length === 0 && !isStreaming) return null; + + const showLottie = isThinkingOnly || isActive; + const hasExpandableContent = steps.length > 0; + + const label = isThinkingOnly + ? t('Thinking...') + : isActive + ? steps[steps.length - 1]?.chipLabel ?? t('Working on it') + : t('stepsCompleted', { count: steps.length }); + + return ( + +
+ {showLottie && } + + + + {label} + + + + {isThinkingOnly && elapsedSeconds > 0 && ( + + {elapsedSeconds}s + + )} + + {hasExpandableContent && ( + + )} +
+ + {isThinkingOnly && thinkingSnippet && ( + + + {thinkingSnippet} + + + )} + + + {isOpen && hasExpandableContent && ( + +
+ {steps.map((step, i) => ( + + + + ))} +
+
+ )} +
+
+ ); +} + +function StepChip({ + step, + isLast, + isStreaming, +}: { + step: ActivityStep; + isLast: boolean; + isStreaming: boolean; +}) { + const isRunning = isLast && isStreaming; + + return ( +
+ {!isRunning && step.summary && ( +

{step.summary}

+ )} +
+ {step.chipLabel} + {step.pieceNames.length > 0 && ( +
+ {step.pieceNames.map((name) => ( + + ))} +
+ )} +
+
+ ); +} + +function groupIntoSteps(parts: DynamicToolPart[]): ActivityStep[] { + if (parts.length === 0) return []; + + const groups: Array<{ action: string; tools: DynamicToolPart[] }> = []; + let currentAction = ''; + let currentTools: DynamicToolPart[] = []; + + for (const part of parts) { + const action = classifyTool(part); + if (action !== currentAction && currentTools.length > 0) { + groups.push({ action: currentAction, tools: [...currentTools] }); + currentTools = []; + } + currentAction = action; + currentTools.push(part); + } + if (currentTools.length > 0) { + groups.push({ action: currentAction, tools: currentTools }); + } + + return groups.map((group) => buildStep(group)); +} + +function classifyTool(part: DynamicToolPart): string { + const name = (part.title ?? part.toolName).toLowerCase(); + if (name.includes('list_pieces') || name.includes('get_piece_props')) + return 'explore'; + if (name.includes('list_connections') || name.includes('resolve_property')) + return 'connections'; + if (name.includes('list_across_projects')) return 'connections'; + if ( + name.includes('create_flow') || + name.includes('build_flow') || + name.includes('add_step') || + name.includes('update_trigger') || + name.includes('update_step') + ) + return 'build'; + if (name.includes('validate')) return 'validate'; + if (name.includes('test')) return 'test'; + if (name.includes('list_flows') || name.includes('flow_structure')) + return 'flows'; + if (name.includes('list_runs') || name.includes('get_run')) return 'runs'; + if ( + name.includes('list_tables') || + name.includes('find_records') || + name.includes('create_table') || + name.includes('insert_records') || + name.includes('manage_fields') + ) + return 'data'; + if (name.includes('lock_and_publish') || name.includes('change_flow_status')) + return 'publish'; + if (name.includes('run_action') || name.includes('run_one_time')) + return 'execute'; + if (name.includes('setup_guide')) return 'setup'; + if (name.includes('rename_flow') || name.includes('duplicate_flow')) + return 'flows'; + if ( + name.includes('add_branch') || + name.includes('update_branch') || + name.includes('delete_branch') || + name.includes('delete_step') + ) + return 'build'; + return 'explore'; +} + +function extractAllPieceNames(tools: DynamicToolPart[]): string[] { + const names = new Set(); + for (const tool of tools) { + const input = isObject(tool.input) ? tool.input : undefined; + if (input && typeof input.pieceName === 'string') { + names.add(shortPieceName(input.pieceName)); + } + if ( + input && + isObject(input.settings) && + typeof input.settings.pieceName === 'string' + ) { + names.add(shortPieceName(input.settings.pieceName)); + } + if (tool.state === 'output-available' && isObject(tool.output)) { + const output = tool.output as Record; + if (Array.isArray(output.pieces)) { + for (const p of output.pieces.slice(0, 4)) { + if (isObject(p) && typeof p.name === 'string') { + names.add(shortPieceName(p.name)); + } + } + } + if (Array.isArray(output.data)) { + for (const item of output.data.slice(0, 5)) { + if (isObject(item) && typeof item.pieceName === 'string') { + names.add(shortPieceName(item.pieceName)); + } + } + } + } + } + return [...names].slice(0, 5); +} + +function shortPieceName(name: string): string { + return name.replace(/^@activepieces\/piece-/, ''); +} + +function countResults(tools: DynamicToolPart[]): number { + for (const tool of tools) { + if (tool.state !== 'output-available' || !isObject(tool.output)) continue; + const output = tool.output as Record; + if (Array.isArray(output.data)) return output.data.length; + if (Array.isArray(output.pieces)) return output.pieces.length; + if (Array.isArray(output.connections)) return output.connections.length; + } + return 0; +} + +function buildStep({ + action, + tools, +}: { + action: string; + tools: DynamicToolPart[]; +}): ActivityStep { + const pieceNames = extractAllPieceNames(tools); + const count = countResults(tools); + + switch (action) { + case 'explore': { + const chipLabel = + count > 0 + ? t('foundIntegrations', { count }) + : t('Searching integrations'); + return { + summary: + count > 0 + ? t('Found the right tools for your task.') + : t('Searched available integrations.'), + chipLabel, + pieceNames, + }; + } + case 'connections': { + const chipLabel = + count > 0 + ? t('foundAccounts', { count }) + : pieceNames.length > 0 + ? t('Finding {name} accounts', { name: pieceNames[0] }) + : t('Checking accounts'); + return { + summary: + count > 0 + ? t('Located your accounts.') + : t('Checked available connections.'), + chipLabel, + pieceNames, + }; + } + case 'build': + return { + summary: t('Built your automation steps.'), + chipLabel: + pieceNames.length > 0 + ? t('Configuring {name}', { name: pieceNames.join(', ') }) + : t('Creating flow'), + pieceNames, + }; + case 'validate': + return { + summary: t('Validated the flow configuration.'), + chipLabel: t('Running checks'), + pieceNames: [], + }; + case 'test': + return { + summary: t('Ran tests on your flow.'), + chipLabel: t('Running tests'), + pieceNames, + }; + case 'flows': + return { + summary: t('Reviewed your existing flows.'), + chipLabel: count > 0 ? t('foundFlows', { count }) : t('Listing flows'), + pieceNames: [], + }; + case 'data': + return { + summary: t('Queried your tables.'), + chipLabel: + count > 0 ? t('foundRecords', { count }) : t('Searching records'), + pieceNames: [], + }; + case 'publish': + return { + summary: t('Published your flow.'), + chipLabel: t('Publishing'), + pieceNames: [], + }; + case 'execute': + return { + summary: t('Executed the action.'), + chipLabel: + pieceNames.length > 0 + ? t('Running {name}', { name: pieceNames[0] }) + : t('Executing'), + pieceNames, + }; + case 'runs': + return { + summary: t('Checked your recent runs.'), + chipLabel: t('Reviewing runs'), + pieceNames: [], + }; + case 'setup': + return { + summary: t('Found setup instructions.'), + chipLabel: t('Getting setup guide'), + pieceNames: [], + }; + default: + return { + summary: t('Completed a step.'), + chipLabel: t('Searching integrations'), + pieceNames, + }; + } +} + +function useElapsedTimer(isActive: boolean): number { + const startRef = useRef(null); + const [seconds, setSeconds] = useState(0); + + useEffect(() => { + if (isActive) { + startRef.current = startRef.current ?? Date.now(); + const interval = setInterval(() => { + if (startRef.current) { + setSeconds( + Math.max(1, Math.floor((Date.now() - startRef.current) / 1000)), + ); + } + }, 1000); + return () => clearInterval(interval); + } + startRef.current = null; + setSeconds(0); + return undefined; + }, [isActive]); + + return seconds; +} + +const SNIPPET_PREFIXES = [ + /^the user\b/i, + /^i need to\b/i, + /^i should\b/i, + /^i('ll|'ve| will| have| can| want| am)\b/i, + /^let me\b/i, + /^(?:okay|alright|hmm|wait),?\s*/i, + /^(?:now|first|next|so),?\s*/i, +]; + +function useReasoningSnippet( + reasoningText: string, + isActive: boolean, +): string | null { + const [snippet, setSnippet] = useState(null); + const lastLengthRef = useRef(0); + + useEffect(() => { + if (!isActive || !reasoningText) { + setSnippet(null); + lastLengthRef.current = 0; + return; + } + if (reasoningText.length <= lastLengthRef.current + 20) return; + lastLengthRef.current = reasoningText.length; + + const sentences = reasoningText + .split(/[.\n]/) + .filter((s) => s.trim().length > 8); + const latest = sentences[sentences.length - 1]; + if (!latest) return; + + let cleaned = latest.trim(); + for (const prefix of SNIPPET_PREFIXES) { + cleaned = cleaned.replace(prefix, ''); + } + cleaned = cleaned.trim(); + if (cleaned.length < 6) return; + + const capitalized = cleaned.charAt(0).toUpperCase() + cleaned.slice(1); + const maxLen = 50; + if (capitalized.length <= maxLen) { + setSnippet(capitalized + '...'); + } else { + const lastSpace = capitalized.lastIndexOf(' ', maxLen); + setSnippet( + (lastSpace > 15 + ? capitalized.slice(0, lastSpace) + : capitalized.slice(0, maxLen)) + '...', + ); + } + }, [reasoningText, isActive]); + + return snippet; +} + +type ActivityStep = { + summary: string; + chipLabel: string; + pieceNames: string[]; +}; diff --git a/packages/web/src/app/routes/chat-with-ai/components/build-progress-card.tsx b/packages/web/src/app/routes/chat-with-ai/components/build-progress-card.tsx index ccc8bede15d..0818bd5a93f 100644 --- a/packages/web/src/app/routes/chat-with-ai/components/build-progress-card.tsx +++ b/packages/web/src/app/routes/chat-with-ai/components/build-progress-card.tsx @@ -10,14 +10,21 @@ import { cn } from '@/lib/utils'; import { BuildProgressData, normalizePieceName } from '../lib/message-parsers'; -type StepStatus = 'queued' | 'configuring' | 'validating' | 'ready' | 'error'; +type StepStatus = + | 'queued' + | 'added' + | 'configuring' + | 'validating' + | 'ready' + | 'error'; const STEP_ORDER: Record = { queued: 0, - configuring: 1, - validating: 2, - ready: 3, - error: 3, + added: 1, + configuring: 2, + validating: 3, + ready: 4, + error: 4, }; const ANIMATION_DELAY_MS = 350; @@ -60,6 +67,8 @@ function computeTargetStatuses({ return statuses; } + let completedCount = 0; + for (const tool of buildTools) { const name = tool.toolName; const isCompleted = tool.state === 'output-available'; @@ -71,7 +80,7 @@ function computeTargetStatuses({ if (name === 'ap_build_flow') { if (isCompleted) { - statuses.fill('configuring'); + statuses.fill('added'); } else if (isRunning) { statuses[0] = 'configuring'; } @@ -79,23 +88,24 @@ function computeTargetStatuses({ } if (isError) { - const firstQueued = statuses.indexOf('queued'); - const idx = firstQueued >= 0 ? firstQueued : statuses.length - 1; + const idx = Math.min(completedCount, statuses.length - 1); statuses[idx] = 'error'; continue; } - if (isCompleted || isRunning) { - const firstQueued = statuses.indexOf('queued'); - if (firstQueued >= 0) { - statuses[firstQueued] = 'configuring'; - } + if (isCompleted) { + const idx = Math.min(completedCount, statuses.length - 1); + statuses[idx] = 'ready'; + completedCount++; + } else if (isRunning) { + const idx = Math.min(completedCount, statuses.length - 1); + statuses[idx] = 'configuring'; } } if (isValidating) { for (let i = 0; i < statuses.length; i++) { - if (statuses[i] === 'configuring') { + if (statuses[i] === 'added' || statuses[i] === 'configuring') { statuses[i] = 'validating'; } } @@ -122,6 +132,7 @@ function advanceOneStep({ const next = [...current]; const progression: StepStatus[] = [ 'queued', + 'added', 'configuring', 'validating', 'ready', @@ -250,9 +261,9 @@ export function BuildProgressCard({ animate={{ opacity: 1, y: 0 }} transition={{ duration: reduce ? 0 : 0.3 }} > -
-
-

+
+
+

{progress.title}

@@ -281,7 +292,7 @@ export function BuildProgressCard({
-
+
{progress.steps.map((step, index) => { const status = stepStatuses[index]; @@ -305,25 +316,28 @@ export function BuildProgressCard({ delay: reduce ? 0 : index * 0.06, }} className={cn( - 'rounded-lg border border-dashed p-3 transition-all duration-300', + 'rounded-lg border border-dashed px-3 py-2 transition-all duration-300', status === 'configuring' && 'border-primary/40 bg-primary/5', status === 'validating' && 'border-amber-500/40 bg-amber-500/5', status === 'error' && 'border-destructive/40 bg-destructive/5', + status === 'added' && 'border-green-500/30 bg-green-500/5', (status === 'ready' || status === 'queued') && 'border-muted-foreground/20 bg-muted/20', )} > -
+
{typeLabel}
-
+
- + {step.label}
@@ -360,14 +374,15 @@ export function BuildProgressCard({ initial={reduce ? false : { opacity: 0, y: 8 }} animate={{ opacity: 1, y: 0 }} transition={{ duration: reduce ? 0 : 0.25, delay: 0.1 }} - className="mt-4" + className="mt-3" > {flowUrl && ( )} @@ -382,6 +397,8 @@ function statusLabel(status: StepStatus): string { switch (status) { case 'ready': return t('Ready'); + case 'added': + return t('Added'); case 'configuring': return t('Configuring...'); case 'validating': @@ -403,7 +420,7 @@ function StepConnector({ index, reduce }: { index: number; reduce: boolean }) { duration: reduce ? 0 : 0.16, delay: reduce ? 0 : index * 0.06 - 0.02, }} - className="relative flex h-5 items-center justify-center" + className="relative flex h-3 items-center justify-center" > diff --git a/packages/web/src/app/routes/chat-with-ai/components/chat-input.tsx b/packages/web/src/app/routes/chat-with-ai/components/chat-input.tsx index 00387b1712e..80b58c15dee 100644 --- a/packages/web/src/app/routes/chat-with-ai/components/chat-input.tsx +++ b/packages/web/src/app/routes/chat-with-ai/components/chat-input.tsx @@ -15,20 +15,20 @@ import { } from '@/components/prompt-kit/prompt-input'; import { Button } from '@/components/ui/button'; -import { ConnectedAppsList } from './connected-apps-list'; - export function ChatInput({ isStreaming, onSend, onStop, placeholder, leftActions, + activeProject, }: { isStreaming: boolean; onSend: (text: string, files?: File[]) => void; onStop?: () => void; placeholder?: string; leftActions?: React.ReactNode; + activeProject?: { name: string; color: string; textColor: string }; }) { const [value, setValue] = useState(''); const [attachedFiles, setAttachedFiles] = useState([]); @@ -57,7 +57,8 @@ export function ChatInput({ value={value} onValueChange={setValue} onSubmit={handleSubmit} - className="relative z-10 rounded-2xl border border-foreground/20 shadow-none transition-colors hover:border-foreground/40 focus-within:border-foreground/40" + className="relative z-10 rounded-2xl border shadow-none transition-colors border-foreground/20 hover:border-foreground/40 focus-within:border-foreground/40" + style={activeProject ? { borderColor: activeProject.color } : undefined} > {attachedFiles.length > 0 && (
@@ -127,7 +128,6 @@ export function ChatInput({ )} -
diff --git a/packages/web/src/app/routes/chat-with-ai/components/chat-message.tsx b/packages/web/src/app/routes/chat-with-ai/components/chat-message.tsx index 5fe0dd13a5d..f1915070bb3 100644 --- a/packages/web/src/app/routes/chat-with-ai/components/chat-message.tsx +++ b/packages/web/src/app/routes/chat-with-ai/components/chat-message.tsx @@ -3,8 +3,7 @@ import { Check, Copy, Paperclip, RefreshCw } from 'lucide-react'; import { AnimatePresence, motion } from 'motion/react'; import { forwardRef, - useEffect, - useRef, + useMemo, useState, type ButtonHTMLAttributes, } from 'react'; @@ -15,22 +14,15 @@ import { MessageActions, MessageContent, } from '@/components/prompt-kit/message'; -import { - Reasoning, - ReasoningContent, - ReasoningTrigger, -} from '@/components/prompt-kit/reasoning'; -import { ChatUIMessage } from '@/features/chat/lib/chat-types'; +import { ChatUIMessage, DynamicToolPart } from '@/features/chat/lib/chat-types'; import { cn } from '@/lib/utils'; import { getTextFromParts, parseBuildProgress } from '../lib/message-parsers'; +import { ActivityAccordion } from './activity-accordion'; import { BuildProgressCard } from './build-progress-card'; import { ChatThinkingLoader } from './chat-thinking-loader'; import { MessageContentWithAuth } from './message-content'; -import { ToolCallGroup } from './tool-call-group'; - -const HIDDEN_TOOLS = new Set(['ap_set_session_title', 'ap_select_project']); export function ChatMessage({ message, @@ -150,36 +142,62 @@ function AssistantMessage({ selectedProjectId?: string | null; onSelectProject?: (projectId: string) => void; }) { - const reasoningParts = message.parts.filter( - (p): p is { type: 'reasoning'; text: string } => p.type === 'reasoning', + const allToolParts = useMemo( + () => + message.parts.filter( + (p): p is DynamicToolPart => p.type === 'dynamic-tool', + ), + [message.parts], ); - const thoughts = reasoningParts.map((p) => p.text).join(''); - const hasThoughts = thoughts.length > 0; - const dynamicToolParts = message.parts.filter( - (p) => p.type === 'dynamic-tool' && !HIDDEN_TOOLS.has(p.toolName), + const reasoningText = useMemo( + () => + message.parts + .filter( + (p): p is { type: 'reasoning'; text: string } => + p.type === 'reasoning', + ) + .map((p) => p.text) + .join(''), + [message.parts], ); + const hasReasoning = reasoningText.length > 0; const textParts = message.parts.filter( (p): p is { type: 'text'; text: string } => p.type === 'text' && p.text.length > 0, ); const hasContent = textParts.length > 0; - const hasAnyParts = hasThoughts || dynamicToolParts.length > 0 || hasContent; + const hasToolCalls = allToolParts.length > 0; + const hasAnyVisible = hasToolCalls || hasContent || hasReasoning; + + const isWaiting = isStreaming && !hasAnyVisible; + const activityActive = + isWaiting || hasToolCalls || (isStreaming && hasReasoning && !hasContent); + + const [activityEverShown, setActivityEverShown] = useState( + activityActive || hasToolCalls, + ); + if ((activityActive || hasToolCalls) && !activityEverShown) { + setActivityEverShown(true); + } - const isWaiting = isStreaming && !hasAnyParts; - const isThinkingOnly = - isStreaming && hasThoughts && !hasContent && dynamicToolParts.length === 0; - const isThinking = isWaiting || isThinkingOnly; - const thinkingSeconds = useThinkingTimer(isThinking); - const [isReasoningOpen, setIsReasoningOpen] = useState(false); const fullText = getTextFromParts(message.parts); + const hasBuildProgress = useMemo( + () => parseBuildProgress(fullText).progress !== null, + [fullText], + ); + + const showActivity = activityEverShown && !hasBuildProgress; const renderableParts = message.parts.filter( - (p) => - (p.type === 'text' && 'text' in p && p.text.length > 0) || - (p.type === 'dynamic-tool' && !HIDDEN_TOOLS.has(p.toolName)), + (p): p is { type: 'text'; text: string } => + p.type === 'text' && p.text.length > 0, ); + if (!isStreaming && !isLastMessage && !hasContent) { + return null; + } + return (
- - {isWaiting && ( + {showActivity && ( + + )} + + + {hasContent && ( - + {renderTextParts({ + parts: renderableParts, + isStreaming, + isLastMessage, + onSend, + selectedProjectId, + onSelectProject, + allParts: message.parts, + })} )} - {renderParts({ - parts: renderableParts, - isStreaming, - isLastMessage, - onSend, - selectedProjectId, - onSelectProject, - })} - - {isStreaming && !isWaiting && } + {isStreaming && !isWaiting && !showActivity && !hasContent && ( + + )} - {hasContent && !isStreaming && ( - - {hasThoughts && ( - - {thoughts} - - )} - + + {hasContent && !isStreaming && ( + <> @@ -249,141 +262,66 @@ function AssistantMessage({ - {hasThoughts && thinkingSeconds >= 0.1 && ( - - - {formatThinkingTime({ seconds: thinkingSeconds })} - - - )} - - - )} + + )} +
); } -function renderParts({ +function renderTextParts({ parts, isStreaming, onSend, selectedProjectId, onSelectProject, isLastMessage, + allParts, }: { - parts: ChatUIMessage['parts']; + parts: Array<{ type: 'text'; text: string }>; isStreaming: boolean; isLastMessage: boolean; onSend: (text: string, files?: File[]) => void; selectedProjectId?: string | null; onSelectProject?: (projectId: string) => void; + allParts: ChatUIMessage['parts']; }): React.ReactNode[] { - const fullText = parts - .filter((p): p is { type: 'text'; text: string } => p.type === 'text') - .map((p) => p.text) - .join(''); + const fullText = parts.map((p) => p.text).join(''); const { progress: buildProgress } = parseBuildProgress(fullText); - const allToolParts = buildProgress - ? parts.filter((p) => p.type === 'dynamic-tool') - : []; - let buildCardRendered = false; - const nodes: React.ReactNode[] = []; - const toolBuffer: ChatUIMessage['parts'] = []; - function flushTools(key: string) { - if (toolBuffer.length === 0) return; - const snapshot = [...toolBuffer]; - toolBuffer.length = 0; - - if (buildProgress) { - if (!buildCardRendered) { - buildCardRendered = true; - nodes.push( - , - ); - } - } else { - nodes.push( - , - ); - } + if (buildProgress) { + const toolParts = allParts.filter((p) => p.type === 'dynamic-tool'); + nodes.push( + , + ); } - parts.forEach((part, idx) => { - if (part.type === 'dynamic-tool') { - toolBuffer.push(part); - } else if (part.type === 'text') { - flushTools(`tools-before-${idx}`); - nodes.push( - , - ); - } - }); + for (let i = 0; i < parts.length; i++) { + nodes.push( + , + ); + } - flushTools('tools-end'); return nodes; } -function useThinkingTimer(isActive: boolean): number { - const startRef = useRef(null); - const finalRef = useRef(0); - const [seconds, setSeconds] = useState(0); - - useEffect(() => { - if (isActive) { - startRef.current = Date.now(); - } else { - if (startRef.current) { - finalRef.current = (Date.now() - startRef.current) / 1000; - } - startRef.current = null; - setSeconds(finalRef.current); - } - }, [isActive]); - - useEffect(() => { - if (!isActive) return; - const interval = setInterval(() => { - if (startRef.current) { - setSeconds((Date.now() - startRef.current) / 1000); - } - }, 100); - return () => clearInterval(interval); - }, [isActive]); - - return seconds; -} - -function formatThinkingTime({ seconds }: { seconds: number }): string { - const rounded = Math.round(seconds * 10) / 10; - return Number.isInteger(rounded) ? `${rounded}s` : `${rounded.toFixed(1)}s`; -} - const CopyIconButton = forwardRef< HTMLButtonElement, { diff --git a/packages/web/src/app/routes/chat-with-ai/components/chat-model-selector.tsx b/packages/web/src/app/routes/chat-with-ai/components/chat-model-selector.tsx index 7a8463beb80..137e78a2cb8 100644 --- a/packages/web/src/app/routes/chat-with-ai/components/chat-model-selector.tsx +++ b/packages/web/src/app/routes/chat-with-ai/components/chat-model-selector.tsx @@ -1,7 +1,18 @@ -import { AIProviderName } from '@activepieces/shared'; +import { + ACTIVEPIECES_CHAT_TIERS, + AIProviderName, + DEFAULT_CHAT_TIER_ID, +} from '@activepieces/shared'; import { t } from 'i18next'; -import { Check, ChevronDown, Loader2 } from 'lucide-react'; -import { useEffect, useRef, useState } from 'react'; +import { + Check, + ChevronDown, + Crown, + Loader2, + Sparkles, + Zap, +} from 'lucide-react'; +import React, { useEffect, useRef, useState } from 'react'; import { Button } from '@/components/ui/button'; import { @@ -23,6 +34,117 @@ export function ChatModelSelector({ chatProviderName, selectedModel, onModelChange, +}: ChatModelSelectorProps) { + if (chatProviderName === AIProviderName.ACTIVEPIECES) { + return ( + + ); + } + + return ( + + ); +} + +const TIER_ICONS: Record< + string, + React.ComponentType<{ className?: string }> +> = { + fast: Zap, + smart: Sparkles, + premium: Crown, +}; + +function TierSelector({ + selectedModel, + onModelChange, +}: { + selectedModel: string | null; + onModelChange: (modelId: string) => void; +}) { + const [open, setOpen] = useState(false); + + const autoSelectedRef = useRef(false); + useEffect(() => { + if (!selectedModel && !autoSelectedRef.current) { + autoSelectedRef.current = true; + const defaultTier = ACTIVEPIECES_CHAT_TIERS.find( + (tier) => tier.id === DEFAULT_CHAT_TIER_ID, + ); + if (defaultTier) { + onModelChange(defaultTier.modelId); + } + } + }, [selectedModel, onModelChange]); + + const selectedTier = ACTIVEPIECES_CHAT_TIERS.find( + (tier) => tier.modelId === selectedModel, + ); + const SelectedIcon = selectedTier + ? TIER_ICONS[selectedTier.id] + : TIER_ICONS.smart; + + return ( + + + + + + + + {ACTIVEPIECES_CHAT_TIERS.map((tier) => { + const Icon = TIER_ICONS[tier.id]; + return ( + { + onModelChange(tier.modelId); + setOpen(false); + }} + className="cursor-pointer" + > + {Icon && } + {t(tier.label)} + + + ); + })} + + + + + ); +} + +function ModelDropdown({ + chatProviderName, + selectedModel, + onModelChange, }: ChatModelSelectorProps) { const [open, setOpen] = useState(false); const { data: models = [], isLoading } = diff --git a/packages/web/src/app/routes/chat-with-ai/components/chat-project-selector.tsx b/packages/web/src/app/routes/chat-with-ai/components/chat-project-selector.tsx deleted file mode 100644 index 0355e1549e3..00000000000 --- a/packages/web/src/app/routes/chat-with-ai/components/chat-project-selector.tsx +++ /dev/null @@ -1,125 +0,0 @@ -import { Project, ProjectType } from '@activepieces/shared'; -import { t } from 'i18next'; -import { Check, ChevronDown, FolderOpen, X } from 'lucide-react'; -import { useState } from 'react'; - -import { Button } from '@/components/ui/button'; -import { - Command, - CommandEmpty, - CommandGroup, - CommandInput, - CommandItem, -} from '@/components/ui/command'; -import { - Popover, - PopoverContent, - PopoverTrigger, -} from '@/components/ui/popover'; -import { ApProjectDisplay } from '@/features/projects'; -import { cn } from '@/lib/utils'; - -function projectName(project: Project): string { - return project.type === ProjectType.PERSONAL - ? t('Personal Project') - : project.displayName; -} - -export function ChatProjectSelector({ - projects, - selectedProjectId, - onProjectChange, -}: ChatProjectSelectorProps) { - const [open, setOpen] = useState(false); - - const selectedProject = selectedProjectId - ? projects.find((p) => p.id === selectedProjectId) - : null; - - return ( - - - - - ) : ( - <> - - {t('Project')} - - )} - - - - - - {projects.length > 5 && ( - - )} - {t('No project found.')} - - {projects.map((project) => ( - { - onProjectChange(project.id); - setOpen(false); - }} - className="cursor-pointer gap-2" - > - - - - ))} - - - - - ); -} - -type ChatProjectSelectorProps = { - projects: Project[]; - selectedProjectId: string | null; - onProjectChange: (projectId: string | null) => void; -}; diff --git a/packages/web/src/app/routes/chat-with-ai/components/chat-thinking-loader.tsx b/packages/web/src/app/routes/chat-with-ai/components/chat-thinking-loader.tsx index 7a044d5a620..8a1fa813a7a 100644 --- a/packages/web/src/app/routes/chat-with-ai/components/chat-thinking-loader.tsx +++ b/packages/web/src/app/routes/chat-with-ai/components/chat-thinking-loader.tsx @@ -8,13 +8,13 @@ import { cn } from '@/lib/utils'; import chatLoadingAnimation from './chat-loading.lottie.json'; -function pickRandomIndex(current: number, length: number): number { +export function pickRandomIndex(current: number, length: number): number { let next = Math.floor(Math.random() * (length - 1)); if (next >= current) next++; return next; } -const MESSAGES = [ +export const THINKING_MESSAGES = [ 'Hold on, thinking real hard', 'Hmm let me figure this out', 'Cooking something up for you', @@ -62,11 +62,11 @@ function ChatThinkingLoader({ showText?: boolean; }) { const [messageIndex, setMessageIndex] = useState(() => - Math.floor(Math.random() * MESSAGES.length), + Math.floor(Math.random() * THINKING_MESSAGES.length), ); const rotateMessage = useCallback(() => { - setMessageIndex((i) => pickRandomIndex(i, MESSAGES.length)); + setMessageIndex((i) => pickRandomIndex(i, THINKING_MESSAGES.length)); }, []); useEffect(() => { @@ -93,7 +93,7 @@ function ChatThinkingLoader({ transition={{ duration: 0.2 }} > - {t(MESSAGES[messageIndex])} + {t(THINKING_MESSAGES[messageIndex])} @@ -114,7 +114,7 @@ function ChatThinkingLoader({ ); } -function LottieLoader() { +export function LottieLoader() { const containerRef = useRef(null); useEffect(() => { diff --git a/packages/web/src/app/routes/chat-with-ai/components/connected-apps-list.tsx b/packages/web/src/app/routes/chat-with-ai/components/connected-apps-list.tsx deleted file mode 100644 index 5d536aad2d1..00000000000 --- a/packages/web/src/app/routes/chat-with-ai/components/connected-apps-list.tsx +++ /dev/null @@ -1,161 +0,0 @@ -import { AppConnectionStatus } from '@activepieces/shared'; -import { t } from 'i18next'; -import { motion } from 'motion/react'; -import { useNavigate } from 'react-router-dom'; - -import { Skeleton } from '@/components/ui/skeleton'; -import { - Tooltip, - TooltipContent, - TooltipTrigger, -} from '@/components/ui/tooltip'; -import { appConnectionsQueries } from '@/features/connections'; -import { piecesHooks } from '@/features/pieces'; -import { authenticationSession } from '@/lib/authentication-session'; - -export function ConnectedAppsList() { - const navigate = useNavigate(); - const projectId = authenticationSession.getProjectId() ?? ''; - - const goToConnections = () => { - navigate(authenticationSession.appendProjectRoutePrefix('/connections')); - }; - - const { data, isLoading } = appConnectionsQueries.useAppConnections({ - request: { projectId, limit: 100, cursor: undefined }, - extraKeys: [projectId], - enabled: Boolean(projectId), - }); - - const { pieces } = piecesHooks.usePieces({}); - const pieceMeta = new Map( - (pieces ?? []).map((p) => [ - p.name, - { displayName: p.displayName, logoUrl: p.logoUrl }, - ]), - ); - - if (isLoading) { - return ( -
- -
- {Array.from({ length: 3 }).map((_, i) => ( - - ))} -
-
- ); - } - - const activeConnections = (data?.data ?? []) - .filter((c) => c.status === AppConnectionStatus.ACTIVE) - .slice() - .sort((a, b) => (a.created < b.created ? 1 : -1)); - const orderedPieceNames: string[] = []; - for (const connection of activeConnections) { - if (!orderedPieceNames.includes(connection.pieceName)) { - orderedPieceNames.push(connection.pieceName); - } - } - const uniquePieceNames = dedupePieceNames({ - pieceNames: orderedPieceNames, - pieceMeta, - }); - if (uniquePieceNames.length === 0) return null; - - const visiblePieceNames = uniquePieceNames.slice(0, MAX_VISIBLE); - const overflowCount = uniquePieceNames.length - visiblePieceNames.length; - - return ( - - ); -} - -function dedupePieceNames({ - pieceNames, - pieceMeta, -}: { - pieceNames: string[]; - pieceMeta: Map; -}): string[] { - const seenDisplayNames = new Set(); - const seenLogoUrls = new Set(); - const result: string[] = []; - for (const pieceName of pieceNames) { - const meta = pieceMeta.get(pieceName); - const displayName = meta?.displayName ?? pieceName; - const logoUrl = meta?.logoUrl ?? ''; - if (seenDisplayNames.has(displayName)) continue; - if (logoUrl && seenLogoUrls.has(logoUrl)) continue; - seenDisplayNames.add(displayName); - if (logoUrl) seenLogoUrls.add(logoUrl); - result.push(pieceName); - } - return result; -} - -function PieceLogo({ - displayName, - logoUrl, -}: { - displayName: string; - logoUrl?: string; -}) { - if (!logoUrl) { - return ; - } - return ( - - -
- {displayName} -
-
- {displayName} -
- ); -} - -const MAX_VISIBLE = 10; diff --git a/packages/web/src/app/routes/chat-with-ai/components/message-content.tsx b/packages/web/src/app/routes/chat-with-ai/components/message-content.tsx index f8b8df34ffd..02a75db4716 100644 --- a/packages/web/src/app/routes/chat-with-ai/components/message-content.tsx +++ b/packages/web/src/app/routes/chat-with-ai/components/message-content.tsx @@ -303,10 +303,16 @@ function ConnectionsRequiredCard({ if (cancelled) return; const map: Record = {}; const alreadyActive = new Set(); + const aiErrorPieces = new Set( + connections.filter((c) => c.status === 'error').map((c) => c.piece), + ); for (const { piece, connection } of results) { if (connection) { map[piece] = connection; - if (connection.status === AppConnectionStatus.ACTIVE) { + if ( + connection.status === AppConnectionStatus.ACTIVE && + !aiErrorPieces.has(piece) + ) { alreadyActive.add(piece); } } diff --git a/packages/web/src/app/routes/chat-with-ai/components/tool-call-group.tsx b/packages/web/src/app/routes/chat-with-ai/components/tool-call-group.tsx deleted file mode 100644 index 227d830b13a..00000000000 --- a/packages/web/src/app/routes/chat-with-ai/components/tool-call-group.tsx +++ /dev/null @@ -1,206 +0,0 @@ -import { isObject } from '@activepieces/shared'; -import { t } from 'i18next'; -import { Check, Loader2, Pause } from 'lucide-react'; - -import { - ChainOfThought, - ChainOfThoughtContent, - ChainOfThoughtStep, - ChainOfThoughtTrigger, -} from '@/components/prompt-kit/chain-of-thought'; -import { ToolCallCard } from '@/features/chat/components/tool-call-card'; -import { ChatUIMessage, DynamicToolPart } from '@/features/chat/lib/chat-types'; -import { chatUtils } from '@/features/chat/lib/chat-utils'; - -const PENDING_STATES = new Set([ - 'input-streaming', - 'input-available', - 'approval-requested', - 'approval-responded', -]); - -function isPending(part: DynamicToolPart): boolean { - return PENDING_STATES.has(part.state); -} - -function isStopped(part: DynamicToolPart): boolean { - return part.state === 'output-error' || part.state === 'output-denied'; -} - -export function ToolCallGroup({ - toolParts, - isStreaming = false, -}: { - toolParts: ChatUIMessage['parts']; - isStreaming?: boolean; -}) { - const dynamicParts = toolParts.filter( - (p): p is DynamicToolPart => p.type === 'dynamic-tool', - ); - - const groups = groupToolPartsByPhase(dynamicParts); - - if (groups.length === 0) return null; - - return ( - - {groups.map((group, i) => { - const hasPending = group.tools.some(isPending); - const hasError = group.tools.some(isStopped); - const hasRunning = isStreaming && hasPending; - const hasStopped = hasError || (!isStreaming && hasPending); - return ( - - - ) : hasStopped ? ( - - ) : ( - - ) - } - > - {group.label} - - -
- {group.tools.map((part) => ( - - ))} -
-
-
- ); - })} -
- ); -} - -function groupToolPartsByPhase( - parts: DynamicToolPart[], -): Array<{ label: string; tools: DynamicToolPart[] }> { - const visible = parts.filter((p) => !isUtilityTool(p.title ?? p.toolName)); - if (visible.length === 0) return []; - - if (visible.length <= 4) { - return [{ label: describeToolParts(visible), tools: visible }]; - } - - const groups: Array<{ label: string; tools: DynamicToolPart[] }> = []; - let current: DynamicToolPart[] = []; - - for (const part of visible) { - current.push(part); - if (current.length >= 4) { - groups.push({ label: describeToolParts(current), tools: [...current] }); - current = []; - } - } - if (current.length > 0) { - groups.push({ label: describeToolParts(current), tools: current }); - } - - return mergeConsecutiveGroups(groups); -} - -function mergeConsecutiveGroups( - groups: Array<{ label: string; tools: DynamicToolPart[] }>, -): Array<{ label: string; tools: DynamicToolPart[] }> { - if (groups.length <= 1) return groups; - const merged: Array<{ label: string; tools: DynamicToolPart[] }> = [ - groups[0], - ]; - for (let i = 1; i < groups.length; i++) { - const prev = merged[merged.length - 1]; - if (groups[i].label === prev.label) { - prev.tools.push(...groups[i].tools); - } else { - merged.push(groups[i]); - } - } - return merged; -} - -function describeToolParts(parts: DynamicToolPart[]): string { - const contexts: string[] = []; - let primaryAction = ''; - - for (const part of parts) { - const name = (part.title ?? part.toolName).toLowerCase(); - const ctx = chatUtils.extractToolContext({ - input: isObject(part.input) ? part.input : undefined, - }); - if (ctx && !contexts.includes(ctx)) contexts.push(ctx); - - if (!primaryAction) { - if (name.includes('build_flow') || name.includes('create_flow')) - primaryAction = 'build'; - else if (name.includes('update_trigger')) primaryAction = 'trigger'; - else if (name.includes('add_step')) primaryAction = 'add'; - else if (name.includes('update_step') || name.includes('test_step')) - primaryAction = 'configure'; - else if (name.includes('test_flow')) primaryAction = 'test'; - else if (name.includes('list_pieces') || name.includes('get_piece_props')) - primaryAction = 'explore'; - else if (name.includes('list_connections')) primaryAction = 'connections'; - else if (name.includes('list_flows') || name.includes('flow_structure')) - primaryAction = 'flows'; - else if (name.includes('list_tables') || name.includes('find_records')) - primaryAction = 'data'; - else if (name.includes('lock_and_publish')) primaryAction = 'publish'; - else if (name.includes('change_flow_status')) { - const status = - isObject(part.input) && typeof part.input.status === 'string' - ? part.input.status - : ''; - primaryAction = status === 'DISABLED' ? 'disable' : 'publish'; - } - } - } - - const subject = contexts.slice(0, 2).join(' & '); - - switch (primaryAction) { - case 'build': - return subject - ? t('Creating {subject} flow', { subject }) - : t('Creating the flow'); - case 'trigger': - return subject - ? t('Setting up {subject} trigger', { subject }) - : t('Setting up the trigger'); - case 'add': - return subject - ? t('Adding {subject} step', { subject }) - : t('Adding a new step'); - case 'configure': - return subject - ? t('Configuring {subject}', { subject }) - : t('Wiring up the steps'); - case 'test': - return subject ? t('Testing {subject}', { subject }) : t('Running tests'); - case 'explore': - return subject - ? t('Looking up {subject}', { subject }) - : t('Exploring integrations'); - case 'connections': - return t('Checking connections'); - case 'flows': - return t('Reviewing your flows'); - case 'data': - return t('Querying your data'); - case 'publish': - return t('Publishing the flow'); - case 'disable': - return t('Disabling the flow'); - default: - return t('Working on it'); - } -} - -function isUtilityTool(name: string): boolean { - const lower = name.toLowerCase(); - return lower.includes('toolsearch') || lower.includes('tool_search'); -} diff --git a/packages/web/src/features/chat/lib/use-chat.ts b/packages/web/src/features/chat/lib/use-chat.ts index 000bdc16713..5dcf64f2fc3 100644 --- a/packages/web/src/features/chat/lib/use-chat.ts +++ b/packages/web/src/features/chat/lib/use-chat.ts @@ -171,6 +171,7 @@ export function useAgentChat({ const [selectedProjectId, _setSelectedProjectId] = useState( null, ); + const [projectSetInSession, setProjectSetInSession] = useState(false); const [isLoadingHistory, setIsLoadingHistory] = useState(false); const [localError, setLocalError] = useState(null); const [wasCancelled, setWasCancelled] = useState(false); @@ -351,10 +352,11 @@ export function useAgentChat({ } if (newProjectId !== undefined) { updateSelectedProjectId(newProjectId); + setProjectSetInSession(true); } }, [uiMessages]); - // Sync project context from server after streaming completes (authoritative) + // Sync project context from server after streaming completes const prevStatusRef = useRef(status); useEffect(() => { const wasStreaming = @@ -366,7 +368,11 @@ export function useAgentChat({ void chatApi .getConversation(conversationIdRef.current) .then((conv) => { - updateSelectedProjectId(conv.projectId ?? null); + const projectId = conv.projectId ?? null; + updateSelectedProjectId(projectId); + if (projectId) { + setProjectSetInSession(true); + } }) .catch(() => undefined); } @@ -388,6 +394,7 @@ export function useAgentChat({ setConversationIdState(null); setModelNameState(null); updateSelectedProjectId(null); + setProjectSetInSession(false); setUiMessages([]); setLocalError(null); setWasCancelled(false); @@ -505,7 +512,9 @@ export function useAgentChat({ if (convResult.data) { modelNameRef.current = convResult.data.modelName ?? null; setModelNameState(convResult.data.modelName ?? null); + // Set project for backend tool scoping, but don't show it visually (projectSetInSession stays false) updateSelectedProjectId(convResult.data.projectId ?? null); + setProjectSetInSession(false); } setIsLoadingHistory(false); }, @@ -526,6 +535,9 @@ export function useAgentChat({ const setProjectContext = useCallback(async (projectId: string | null) => { const previousProjectId = selectedProjectIdRef.current; updateSelectedProjectId(projectId); + if (projectId) { + setProjectSetInSession(true); + } const convId = conversationIdRef.current; if (!convId) return; const { error: err } = await tryCatch(() => @@ -540,6 +552,7 @@ export function useAgentChat({ conversationId, modelName, selectedProjectId, + projectSetInSession, messages, isStreaming, wasCancelled, From 01c1d7182a0348d7042d3df69c902a64ee9e0d22 Mon Sep 17 00:00:00 2001 From: hugh-codes <166336705+hugh-codes@users.noreply.github.com> Date: Thu, 7 May 2026 21:44:49 +1200 Subject: [PATCH 7/7] fix(YouTube): new-video trigger returns only latest video (#13064) Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: Copilot Co-authored-by: David Anyatonwu <51977119+onyedikachi-david@users.noreply.github.com> Co-authored-by: David Anyatonwu --- .../pieces/community/youtube/package.json | 2 +- .../pieces/community/youtube/src/index.ts | 2 +- .../src/lib/triggers/new-video.trigger.ts | 181 +++++++++++------- 3 files changed, 116 insertions(+), 69 deletions(-) diff --git a/packages/pieces/community/youtube/package.json b/packages/pieces/community/youtube/package.json index 8c44225f7e4..dddd2ca1b63 100644 --- a/packages/pieces/community/youtube/package.json +++ b/packages/pieces/community/youtube/package.json @@ -1,6 +1,6 @@ { "name": "@activepieces/piece-youtube", - "version": "0.4.6", + "version": "0.4.7", "main": "./dist/src/index.js", "types": "./dist/src/index.d.ts", "dependencies": { diff --git a/packages/pieces/community/youtube/src/index.ts b/packages/pieces/community/youtube/src/index.ts index 5a4ebbf1a1d..dd4e16a0b44 100644 --- a/packages/pieces/community/youtube/src/index.ts +++ b/packages/pieces/community/youtube/src/index.ts @@ -16,7 +16,7 @@ export const youtube = createPiece({ logoUrl: 'https://cdn.activepieces.com/pieces/youtube.png', categories: [PieceCategory.CONTENT_AND_FILES], auth: youtubeAuth, - authors: ['abaza738', 'kishanprmr', 'khaledmashaly', 'abuaboud'], + authors: ['abaza738', 'kishanprmr', 'khaledmashaly', 'abuaboud', 'hugh-codes'], actions: [ createCustomApiCallAction({ baseUrl: () => 'https://www.googleapis.com/youtube/v3', diff --git a/packages/pieces/community/youtube/src/lib/triggers/new-video.trigger.ts b/packages/pieces/community/youtube/src/lib/triggers/new-video.trigger.ts index 96bef1ff56f..01ee37ed651 100644 --- a/packages/pieces/community/youtube/src/lib/triggers/new-video.trigger.ts +++ b/packages/pieces/community/youtube/src/lib/triggers/new-video.trigger.ts @@ -1,8 +1,22 @@ -import { createTrigger, TriggerStrategy } from '@activepieces/pieces-framework'; -import { httpClient, HttpMethod } from '@activepieces/pieces-common'; +import { + DedupeStrategy, + Polling, + pollingHelper, + httpClient, + HttpMethod, +} from '@activepieces/pieces-common'; +import { + AppConnectionValueForAuthProperty, + createTrigger, + DEDUPE_KEY_PROPERTY, + PieceAuth, + Store, + TriggerStrategy, +} from '@activepieces/pieces-framework'; import { channelIdentifier } from '../common/props'; +import { isNil } from '@activepieces/shared'; import dayjs from 'dayjs'; -import cheerio from 'cheerio'; +import { load as cheerioLoad } from 'cheerio'; import FeedParser from 'feedparser'; import axios from 'axios'; @@ -10,8 +24,9 @@ export const youtubeNewVideoTrigger = createTrigger({ name: 'new-video', displayName: 'New Video In Channel', description: 'Runs when a new video is added to a YouTube channel', - type: TriggerStrategy.POLLING, + auth: PieceAuth.None(), requireAuth: false, + type: TriggerStrategy.POLLING, props: { channel_identifier: channelIdentifier, }, @@ -210,82 +225,114 @@ export const youtubeNewVideoTrigger = createTrigger({ }, }, }, - async test({ propsValue }): Promise { - const channelId = await getChannelId(propsValue.channel_identifier); - if (!channelId) { - return []; - } - return (await getRssItems(channelId)) || []; + async test({ auth, propsValue, store, files }): Promise { + return pollingHelper.test(polling, { + auth, + store, + propsValue, + files, + }); }, async onEnable({ propsValue, store }): Promise { - const channelId = await getChannelId(propsValue.channel_identifier); - - if (!channelId) { - throw new Error('Unable to get channel ID.'); + let channelId = await store.get('channelId'); + if (isNil(channelId)) { + channelId = await getChannelId(propsValue.channel_identifier); + await store.put('channelId', channelId); + } + const items = await getRssItems(channelId); + if (items.length > 0) { + await store.put('_seenVideoIds', items.map((item) => getId(item))); } - - await store.put('channelId', channelId); - const items = (await getRssItems(channelId)) || []; - await store.put('lastFetchedYoutubeVideo', items?.[0]?.guid); - await store.put('lastUpdatedYoutubeVideo', getUpdateDate(items?.[0])); - return; }, - - async onDisable(): Promise { - return; + async onDisable({ store }): Promise { + await store.delete('_seenVideoIds'); + await store.delete('channelId'); }, - async run({ store }): Promise { - const channelId = await store.get('channelId'); - - if (!channelId) return []; - - const items = (await getRssItems(channelId)) || []; - if (items.length === 0) { - return []; + async run({ propsValue, store }): Promise { + let channelId = await store.get('channelId'); + if (isNil(channelId)) { + channelId = await getChannelId(propsValue.channel_identifier); + await store.put('channelId', channelId); } - const lastItemId = await store.get('lastFetchedYoutubeVideo'); - const storedLastUpdated = await store.get( - 'lastUpdatedYoutubeVideo' - ); + const rawItems = await getRssItems(channelId); - /** - * If the new latest item's date is before the last saved date - * it means something got deleted, nothing else to do - * this happens when a live stream ends, the live stream entry is deleted and later - * is replaced by the stream's video. - */ - if ( - storedLastUpdated && - dayjs(getUpdateDate(items?.[0])).isBefore(dayjs(storedLastUpdated)) - ) { - return []; - } + const seenIds = new Set(await store.get('_seenVideoIds') ?? []); + const newItems = rawItems.filter((item) => !seenIds.has(getId(item))); - const newItems = []; - for (const item of items) { - if (item.guid === lastItemId) break; - if ( - storedLastUpdated && - dayjs(getUpdateDate(item)).isBefore(dayjs(storedLastUpdated)) - ) { - continue; - } - newItems.push(item); + // Advance the seen-IDs set to the current feed so that if the anchor item + // (e.g. a live stream) is later deleted and pollingHelper would otherwise + // return all items, the previously-seen IDs are still excluded. + // Skip the update on an empty feed response to avoid wiping the set and + // causing false positives on the next successful poll. + if (rawItems.length > 0) { + await store.put('_seenVideoIds', rawItems.map((item) => getId(item))); } - await store.put('lastFetchedYoutubeVideo', items?.[0]?.guid); - await store.put('lastUpdatedYoutubeVideo', getUpdateDate(items?.[0])); + return newItems + .sort((a, b) => { + const aDate = + (a as { pubdate?: string; pubDate?: string }).pubdate ?? + (a as { pubdate?: string; pubDate?: string }).pubDate; + const bDate = + (b as { pubdate?: string; pubDate?: string }).pubdate ?? + (b as { pubdate?: string; pubDate?: string }).pubDate; - return newItems; + if (aDate && bDate) { + const aUnix = dayjs(aDate).unix(); + const bUnix = dayjs(bDate).unix(); + if (aUnix === bUnix) { + return newItems.indexOf(a) - newItems.indexOf(b); + } + return bUnix - aUnix; + } + + return newItems.indexOf(a) - newItems.indexOf(b); + }) + .map((item) => withDedupeKey(item as Record)); }, }); -function getUpdateDate(item: any) { - const updated = item['atom:updated']; - if (updated == undefined) { - return undefined; +const polling: Polling< + AppConnectionValueForAuthProperty, + { channel_identifier: string } +> = { + strategy: DedupeStrategy.LAST_ITEM, + items: async ({ + propsValue, + store, + }: { + store: Store; + propsValue: { channel_identifier: string }; + }) => { + let channelId = await store.get('channelId'); + if (isNil(channelId)) { + channelId = await getChannelId(propsValue.channel_identifier); + await store.put('channelId', channelId); + } + const items = await getRssItems(channelId); + return items.map((item) => ({ + id: getId(item), + data: item, + })); + }, +}; + +function withDedupeKey(item: Record) { + const dedupeKey = typeof item.guid === 'string' ? item.guid : JSON.stringify(item); + return { + ...item, + [DEDUPE_KEY_PROPERTY]: dedupeKey, + }; +} + +function getId(item: { id?: string; guid?: string }) { + if (item.guid) { + return item.guid; + } + if (item.id) { + return item.id; } - return updated['#']; + return JSON.stringify(item); } async function getChannelId(urlOrId: string) { @@ -299,7 +346,7 @@ async function getChannelId(urlOrId: string) { method: HttpMethod.GET, url: urlOrId, }); - const $ = cheerio.load(response.body); + const $ = cheerioLoad(response.body); // Check if the URL is a channel ID itself const channelUrl = $('link[rel="canonical"]').attr('href'); @@ -333,7 +380,7 @@ function getRssItems(channelId: string): Promise { }); feedparser.on('end', () => { - resolve(items.reverse()); + resolve(items); }); feedparser.on('error', (error: any) => {