Add Activepieces integration for workflow automation
- Add Activepieces fork with SmoothSchedule custom piece - Create integrations app with Activepieces service layer - Add embed token endpoint for iframe integration - Create Automations page with embedded workflow builder - Add sidebar visibility fix for embed mode - Add list inactive customers endpoint to Public API - Include SmoothSchedule triggers: event created/updated/cancelled - Include SmoothSchedule actions: create/update/cancel events, list resources/services/customers 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"You can obtain your API key from [Dashboard Settings](app.rapidtextai.com).": "You can obtain your API key from [Dashboard Settings](app.rapidtextai.com).",
|
||||
"Generate Article": "Generate Article",
|
||||
"Send Prompt": "Send Prompt",
|
||||
"Generates an article.": "Generates an article.",
|
||||
"Send prompt to RapidTextAI.": "Send prompt to RapidTextAI.",
|
||||
"Model": "Model",
|
||||
"Prompt": "Prompt",
|
||||
"Max Tokens": "Max Tokens",
|
||||
"Temperature": "Temperature",
|
||||
"Top N": "Top N",
|
||||
"Frequency Penalty": "Frequency Penalty",
|
||||
"Presence Penalty": "Presence Penalty",
|
||||
"Sampling temperature between 0 and 2.": "Sampling temperature between 0 and 2.",
|
||||
"Nucleus sampling parameter": "Nucleus sampling parameter",
|
||||
"Penalty for new tokens based on frequency": "Penalty for new tokens based on frequency",
|
||||
"Penalty for new tokens based on presence": "Penalty for new tokens based on presence",
|
||||
"Gemini 2.0 Flash": "Gemini 2.0 Flash",
|
||||
"GPT-4o Mini": "GPT-4o Mini",
|
||||
"GPT-4": "GPT-4",
|
||||
"DeepSeek V3": "DeepSeek V3",
|
||||
"GPT-4o": "GPT-4o",
|
||||
"Grok-2": "Grok-2",
|
||||
"Gemini 1.5 Pro": "Gemini 1.5 Pro",
|
||||
"Gemini 2.0 Pro": "Gemini 2.0 Pro",
|
||||
"DeepSeek R1": "DeepSeek R1"
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
import { createPiece } from '@activepieces/pieces-framework';
|
||||
import { PieceCategory } from '@activepieces/shared';
|
||||
import { generateArticleAction } from './lib/actions/generate-article';
|
||||
import { sendPromptAction } from './lib/actions/send-prompt';
|
||||
import { rapidTextAiAuth } from './lib/common/auth';
|
||||
|
||||
export const rapidtextAi = createPiece({
|
||||
displayName: 'RapidText AI',
|
||||
auth: rapidTextAiAuth,
|
||||
minimumSupportedRelease: '0.36.1',
|
||||
categories: [PieceCategory.ARTIFICIAL_INTELLIGENCE],
|
||||
logoUrl: 'https://cdn.activepieces.com/pieces/rapidtext-ai.png',
|
||||
authors: ['kishanprmr'],
|
||||
actions: [generateArticleAction, sendPromptAction],
|
||||
triggers: [],
|
||||
});
|
||||
@@ -0,0 +1,41 @@
|
||||
import { createAction, Property } from '@activepieces/pieces-framework';
|
||||
import { rapidTextAiAuth } from '../common/auth';
|
||||
import { httpClient, HttpMethod } from '@activepieces/pieces-common';
|
||||
import { modelDropdown } from '../common/props';
|
||||
|
||||
export const generateArticleAction = createAction({
|
||||
name: 'generate-article',
|
||||
auth: rapidTextAiAuth,
|
||||
displayName: 'Generate Article',
|
||||
description: 'Generates an article.',
|
||||
props: {
|
||||
model: modelDropdown,
|
||||
prompt: Property.LongText({
|
||||
displayName: 'Prompt',
|
||||
required: true,
|
||||
}),
|
||||
},
|
||||
async run(context) {
|
||||
const { model, prompt } = context.propsValue;
|
||||
const response = await httpClient.sendRequest<{
|
||||
choices: { message: { content: string } }[];
|
||||
}>({
|
||||
method: HttpMethod.POST,
|
||||
url: 'https://app.rapidtextai.com/openai/v1/chat/completionsarticle',
|
||||
queryParams: {
|
||||
gigsixkey: context.auth.secret_text,
|
||||
},
|
||||
body: {
|
||||
model,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: prompt,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
return response.body.choices[0].message.content;
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,82 @@
|
||||
import { createAction, Property } from '@activepieces/pieces-framework';
|
||||
import { rapidTextAiAuth } from '../common/auth';
|
||||
import { httpClient, HttpMethod } from '@activepieces/pieces-common';
|
||||
import { modelDropdown } from '../common/props';
|
||||
|
||||
export const sendPromptAction = createAction({
|
||||
name: 'create-prompt',
|
||||
auth: rapidTextAiAuth,
|
||||
displayName: 'Send Prompt',
|
||||
description: 'Send prompt to RapidTextAI.',
|
||||
props: {
|
||||
model: modelDropdown,
|
||||
prompt: Property.LongText({
|
||||
displayName: 'Prompt',
|
||||
required: true,
|
||||
}),
|
||||
max_tokens: Property.Number({
|
||||
displayName: 'Max Tokens',
|
||||
required: false,
|
||||
defaultValue: 2048,
|
||||
}),
|
||||
temperature: Property.Number({
|
||||
displayName: 'Temperature',
|
||||
required: false,
|
||||
description: 'Sampling temperature between 0 and 2.',
|
||||
defaultValue: 1,
|
||||
}),
|
||||
top_p: Property.Number({
|
||||
displayName: 'Top N',
|
||||
description: 'Nucleus sampling parameter',
|
||||
defaultValue: 1,
|
||||
required: false,
|
||||
}),
|
||||
frequency_penalty: Property.Number({
|
||||
displayName: 'Frequency Penalty',
|
||||
required: false,
|
||||
description: 'Penalty for new tokens based on frequency',
|
||||
}),
|
||||
presence_penalty: Property.Number({
|
||||
displayName: 'Presence Penalty',
|
||||
required: false,
|
||||
description: 'Penalty for new tokens based on presence',
|
||||
}),
|
||||
},
|
||||
async run(context) {
|
||||
const {
|
||||
model,
|
||||
prompt,
|
||||
max_tokens,
|
||||
temperature,
|
||||
top_p,
|
||||
frequency_penalty,
|
||||
presence_penalty,
|
||||
} = context.propsValue;
|
||||
const response = await httpClient.sendRequest<{
|
||||
choices: { message: { content: string } }[];
|
||||
}>({
|
||||
method: HttpMethod.POST,
|
||||
url: 'https://app.rapidtextai.com/openai/completion',
|
||||
queryParams: {
|
||||
gigsixkey: context.auth.secret_text,
|
||||
},
|
||||
body: {
|
||||
model,
|
||||
messages: [
|
||||
{ role: 'system', content: 'You are a helpful assistant.' },
|
||||
{
|
||||
role: 'user',
|
||||
content: prompt,
|
||||
},
|
||||
],
|
||||
max_tokens,
|
||||
temperature,
|
||||
top_p,
|
||||
frequency_penalty,
|
||||
presence_penalty,
|
||||
},
|
||||
});
|
||||
|
||||
return response.body.choices[0].message.content;
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,7 @@
|
||||
import { PieceAuth } from '@activepieces/pieces-framework';
|
||||
|
||||
export const rapidTextAiAuth = PieceAuth.SecretText({
|
||||
displayName: 'API Key',
|
||||
description: `You can obtain your API key from [Dashboard Settings](app.rapidtextai.com).`,
|
||||
required: true,
|
||||
});
|
||||
@@ -0,0 +1,20 @@
|
||||
import { Property } from '@activepieces/pieces-framework';
|
||||
|
||||
export const modelDropdown = Property.StaticDropdown({
|
||||
displayName: 'Model',
|
||||
required: true,
|
||||
options: {
|
||||
disabled: false,
|
||||
options: [
|
||||
{ label: 'Gemini 2.0 Flash', value: 'gemini-2.0-flash' },
|
||||
{ label: 'GPT-4o Mini', value: 'gpt-4o-mini' },
|
||||
{ label: 'GPT-4', value: 'gpt-4' },
|
||||
{ label: 'DeepSeek V3', value: 'deepseek-chat' },
|
||||
{ label: 'GPT-4o', value: 'gpt-4o' },
|
||||
{ label: 'Grok-2', value: 'grok-2' },
|
||||
{ label: 'Gemini 1.5 Pro', value: 'gemini-1.5-pro' },
|
||||
{ label: 'Gemini 2.0 Pro', value: 'gemini-2.0-pro' },
|
||||
{ label: 'DeepSeek R1', value: 'deepseek-reasoner' },
|
||||
],
|
||||
},
|
||||
});
|
||||
Reference in New Issue
Block a user