Add Activepieces integration for workflow automation

- Add Activepieces fork with SmoothSchedule custom piece
- Create integrations app with Activepieces service layer
- Add embed token endpoint for iframe integration
- Create Automations page with embedded workflow builder
- Add sidebar visibility fix for embed mode
- Add list inactive customers endpoint to Public API
- Include SmoothSchedule triggers: event created/updated/cancelled
- Include SmoothSchedule actions: create/update/cancel events, list resources/services/customers

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
poduck
2025-12-18 22:59:37 -05:00
parent 9848268d34
commit 3aa7199503
16292 changed files with 1284892 additions and 4708 deletions

View File

@@ -0,0 +1,33 @@
{
"extends": [
"../../../../.eslintrc.base.json"
],
"ignorePatterns": [
"!**/*"
],
"overrides": [
{
"files": [
"*.ts",
"*.tsx",
"*.js",
"*.jsx"
],
"rules": {}
},
{
"files": [
"*.ts",
"*.tsx"
],
"rules": {}
},
{
"files": [
"*.js",
"*.jsx"
],
"rules": {}
}
]
}

View File

@@ -0,0 +1,7 @@
# pieces-rapidtext-ai
This library was generated with [Nx](https://nx.dev).
## Building
Run `nx build pieces-rapidtext-ai` to build the library.

View File

@@ -0,0 +1,10 @@
{
"name": "@activepieces/piece-rapidtext-ai",
"version": "0.0.1",
"type": "commonjs",
"main": "./src/index.js",
"types": "./src/index.d.ts",
"dependencies": {
"tslib": "^2.3.0"
}
}

View File

@@ -0,0 +1,65 @@
{
"name": "pieces-rapidtext-ai",
"$schema": "../../../../node_modules/nx/schemas/project-schema.json",
"sourceRoot": "packages/pieces/community/rapidtext-ai/src",
"projectType": "library",
"release": {
"version": {
"manifestRootsToUpdate": [
"dist/{projectRoot}"
],
"currentVersionResolver": "git-tag",
"fallbackCurrentVersionResolver": "disk"
}
},
"tags": [],
"targets": {
"build": {
"executor": "@nx/js:tsc",
"outputs": [
"{options.outputPath}"
],
"options": {
"outputPath": "dist/packages/pieces/community/rapidtext-ai",
"tsConfig": "packages/pieces/community/rapidtext-ai/tsconfig.lib.json",
"packageJson": "packages/pieces/community/rapidtext-ai/package.json",
"main": "packages/pieces/community/rapidtext-ai/src/index.ts",
"assets": [
"packages/pieces/community/rapidtext-ai/*.md",
{
"input": "packages/pieces/community/rapidtext-ai/src/i18n",
"output": "./src/i18n",
"glob": "**/!(i18n.json)"
}
],
"buildableProjectDepsInPackageJsonType": "dependencies",
"updateBuildableProjectDepsInPackageJson": true
},
"dependsOn": [
"prebuild",
"^build"
]
},
"nx-release-publish": {
"options": {
"packageRoot": "dist/{projectRoot}"
}
},
"prebuild": {
"dependsOn": [
"^build"
],
"executor": "nx:run-commands",
"options": {
"cwd": "packages/pieces/community/rapidtext-ai",
"command": "bun install --no-save --silent"
}
},
"lint": {
"executor": "@nx/eslint:lint",
"outputs": [
"{options.outputFile}"
]
}
}
}

View File

@@ -0,0 +1,27 @@
{
"You can obtain your API key from [Dashboard Settings](app.rapidtextai.com).": "You can obtain your API key from [Dashboard Settings](app.rapidtextai.com).",
"Generate Article": "Generate Article",
"Send Prompt": "Send Prompt",
"Generates an article.": "Generates an article.",
"Send prompt to RapidTextAI.": "Send prompt to RapidTextAI.",
"Model": "Model",
"Prompt": "Prompt",
"Max Tokens": "Max Tokens",
"Temperature": "Temperature",
"Top N": "Top N",
"Frequency Penalty": "Frequency Penalty",
"Presence Penalty": "Presence Penalty",
"Sampling temperature between 0 and 2.": "Sampling temperature between 0 and 2.",
"Nucleus sampling parameter": "Nucleus sampling parameter",
"Penalty for new tokens based on frequency": "Penalty for new tokens based on frequency",
"Penalty for new tokens based on presence": "Penalty for new tokens based on presence",
"Gemini 2.0 Flash": "Gemini 2.0 Flash",
"GPT-4o Mini": "GPT-4o Mini",
"GPT-4": "GPT-4",
"DeepSeek V3": "DeepSeek V3",
"GPT-4o": "GPT-4o",
"Grok-2": "Grok-2",
"Gemini 1.5 Pro": "Gemini 1.5 Pro",
"Gemini 2.0 Pro": "Gemini 2.0 Pro",
"DeepSeek R1": "DeepSeek R1"
}

View File

@@ -0,0 +1,16 @@
import { createPiece } from '@activepieces/pieces-framework';
import { PieceCategory } from '@activepieces/shared';
import { generateArticleAction } from './lib/actions/generate-article';
import { sendPromptAction } from './lib/actions/send-prompt';
import { rapidTextAiAuth } from './lib/common/auth';
export const rapidtextAi = createPiece({
displayName: 'RapidText AI',
auth: rapidTextAiAuth,
minimumSupportedRelease: '0.36.1',
categories: [PieceCategory.ARTIFICIAL_INTELLIGENCE],
logoUrl: 'https://cdn.activepieces.com/pieces/rapidtext-ai.png',
authors: ['kishanprmr'],
actions: [generateArticleAction, sendPromptAction],
triggers: [],
});

View File

@@ -0,0 +1,41 @@
import { createAction, Property } from '@activepieces/pieces-framework';
import { rapidTextAiAuth } from '../common/auth';
import { httpClient, HttpMethod } from '@activepieces/pieces-common';
import { modelDropdown } from '../common/props';
export const generateArticleAction = createAction({
name: 'generate-article',
auth: rapidTextAiAuth,
displayName: 'Generate Article',
description: 'Generates an article.',
props: {
model: modelDropdown,
prompt: Property.LongText({
displayName: 'Prompt',
required: true,
}),
},
async run(context) {
const { model, prompt } = context.propsValue;
const response = await httpClient.sendRequest<{
choices: { message: { content: string } }[];
}>({
method: HttpMethod.POST,
url: 'https://app.rapidtextai.com/openai/v1/chat/completionsarticle',
queryParams: {
gigsixkey: context.auth.secret_text,
},
body: {
model,
messages: [
{
role: 'user',
content: prompt,
},
],
},
});
return response.body.choices[0].message.content;
},
});

View File

@@ -0,0 +1,82 @@
import { createAction, Property } from '@activepieces/pieces-framework';
import { rapidTextAiAuth } from '../common/auth';
import { httpClient, HttpMethod } from '@activepieces/pieces-common';
import { modelDropdown } from '../common/props';
export const sendPromptAction = createAction({
name: 'create-prompt',
auth: rapidTextAiAuth,
displayName: 'Send Prompt',
description: 'Send prompt to RapidTextAI.',
props: {
model: modelDropdown,
prompt: Property.LongText({
displayName: 'Prompt',
required: true,
}),
max_tokens: Property.Number({
displayName: 'Max Tokens',
required: false,
defaultValue: 2048,
}),
temperature: Property.Number({
displayName: 'Temperature',
required: false,
description: 'Sampling temperature between 0 and 2.',
defaultValue: 1,
}),
top_p: Property.Number({
displayName: 'Top N',
description: 'Nucleus sampling parameter',
defaultValue: 1,
required: false,
}),
frequency_penalty: Property.Number({
displayName: 'Frequency Penalty',
required: false,
description: 'Penalty for new tokens based on frequency',
}),
presence_penalty: Property.Number({
displayName: 'Presence Penalty',
required: false,
description: 'Penalty for new tokens based on presence',
}),
},
async run(context) {
const {
model,
prompt,
max_tokens,
temperature,
top_p,
frequency_penalty,
presence_penalty,
} = context.propsValue;
const response = await httpClient.sendRequest<{
choices: { message: { content: string } }[];
}>({
method: HttpMethod.POST,
url: 'https://app.rapidtextai.com/openai/completion',
queryParams: {
gigsixkey: context.auth.secret_text,
},
body: {
model,
messages: [
{ role: 'system', content: 'You are a helpful assistant.' },
{
role: 'user',
content: prompt,
},
],
max_tokens,
temperature,
top_p,
frequency_penalty,
presence_penalty,
},
});
return response.body.choices[0].message.content;
},
});

View File

@@ -0,0 +1,7 @@
import { PieceAuth } from '@activepieces/pieces-framework';
export const rapidTextAiAuth = PieceAuth.SecretText({
displayName: 'API Key',
description: `You can obtain your API key from [Dashboard Settings](app.rapidtextai.com).`,
required: true,
});

View File

@@ -0,0 +1,20 @@
import { Property } from '@activepieces/pieces-framework';
export const modelDropdown = Property.StaticDropdown({
displayName: 'Model',
required: true,
options: {
disabled: false,
options: [
{ label: 'Gemini 2.0 Flash', value: 'gemini-2.0-flash' },
{ label: 'GPT-4o Mini', value: 'gpt-4o-mini' },
{ label: 'GPT-4', value: 'gpt-4' },
{ label: 'DeepSeek V3', value: 'deepseek-chat' },
{ label: 'GPT-4o', value: 'gpt-4o' },
{ label: 'Grok-2', value: 'grok-2' },
{ label: 'Gemini 1.5 Pro', value: 'gemini-1.5-pro' },
{ label: 'Gemini 2.0 Pro', value: 'gemini-2.0-pro' },
{ label: 'DeepSeek R1', value: 'deepseek-reasoner' },
],
},
});

View File

@@ -0,0 +1,20 @@
{
"extends": "../../../../tsconfig.base.json",
"compilerOptions": {
"module": "commonjs",
"forceConsistentCasingInFileNames": true,
"strict": true,
"importHelpers": true,
"noImplicitOverride": true,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"noPropertyAccessFromIndexSignature": true
},
"files": [],
"include": [],
"references": [
{
"path": "./tsconfig.lib.json"
}
]
}

View File

@@ -0,0 +1,9 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"outDir": "../../../../dist/out-tsc",
"declaration": true,
"types": ["node"]
},
"include": ["src/**/*.ts"]
}