Add Activepieces integration for workflow automation
- Add Activepieces fork with SmoothSchedule custom piece - Create integrations app with Activepieces service layer - Add embed token endpoint for iframe integration - Create Automations page with embedded workflow builder - Add sidebar visibility fix for embed mode - Add list inactive customers endpoint to Public API - Include SmoothSchedule triggers: event created/updated/cancelled - Include SmoothSchedule actions: create/update/cancel events, list resources/services/customers 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,41 @@
|
||||
import { createAction, Property } from '@activepieces/pieces-framework';
|
||||
import { rapidTextAiAuth } from '../common/auth';
|
||||
import { httpClient, HttpMethod } from '@activepieces/pieces-common';
|
||||
import { modelDropdown } from '../common/props';
|
||||
|
||||
export const generateArticleAction = createAction({
|
||||
name: 'generate-article',
|
||||
auth: rapidTextAiAuth,
|
||||
displayName: 'Generate Article',
|
||||
description: 'Generates an article.',
|
||||
props: {
|
||||
model: modelDropdown,
|
||||
prompt: Property.LongText({
|
||||
displayName: 'Prompt',
|
||||
required: true,
|
||||
}),
|
||||
},
|
||||
async run(context) {
|
||||
const { model, prompt } = context.propsValue;
|
||||
const response = await httpClient.sendRequest<{
|
||||
choices: { message: { content: string } }[];
|
||||
}>({
|
||||
method: HttpMethod.POST,
|
||||
url: 'https://app.rapidtextai.com/openai/v1/chat/completionsarticle',
|
||||
queryParams: {
|
||||
gigsixkey: context.auth.secret_text,
|
||||
},
|
||||
body: {
|
||||
model,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: prompt,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
return response.body.choices[0].message.content;
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,82 @@
|
||||
import { createAction, Property } from '@activepieces/pieces-framework';
|
||||
import { rapidTextAiAuth } from '../common/auth';
|
||||
import { httpClient, HttpMethod } from '@activepieces/pieces-common';
|
||||
import { modelDropdown } from '../common/props';
|
||||
|
||||
export const sendPromptAction = createAction({
|
||||
name: 'create-prompt',
|
||||
auth: rapidTextAiAuth,
|
||||
displayName: 'Send Prompt',
|
||||
description: 'Send prompt to RapidTextAI.',
|
||||
props: {
|
||||
model: modelDropdown,
|
||||
prompt: Property.LongText({
|
||||
displayName: 'Prompt',
|
||||
required: true,
|
||||
}),
|
||||
max_tokens: Property.Number({
|
||||
displayName: 'Max Tokens',
|
||||
required: false,
|
||||
defaultValue: 2048,
|
||||
}),
|
||||
temperature: Property.Number({
|
||||
displayName: 'Temperature',
|
||||
required: false,
|
||||
description: 'Sampling temperature between 0 and 2.',
|
||||
defaultValue: 1,
|
||||
}),
|
||||
top_p: Property.Number({
|
||||
displayName: 'Top N',
|
||||
description: 'Nucleus sampling parameter',
|
||||
defaultValue: 1,
|
||||
required: false,
|
||||
}),
|
||||
frequency_penalty: Property.Number({
|
||||
displayName: 'Frequency Penalty',
|
||||
required: false,
|
||||
description: 'Penalty for new tokens based on frequency',
|
||||
}),
|
||||
presence_penalty: Property.Number({
|
||||
displayName: 'Presence Penalty',
|
||||
required: false,
|
||||
description: 'Penalty for new tokens based on presence',
|
||||
}),
|
||||
},
|
||||
async run(context) {
|
||||
const {
|
||||
model,
|
||||
prompt,
|
||||
max_tokens,
|
||||
temperature,
|
||||
top_p,
|
||||
frequency_penalty,
|
||||
presence_penalty,
|
||||
} = context.propsValue;
|
||||
const response = await httpClient.sendRequest<{
|
||||
choices: { message: { content: string } }[];
|
||||
}>({
|
||||
method: HttpMethod.POST,
|
||||
url: 'https://app.rapidtextai.com/openai/completion',
|
||||
queryParams: {
|
||||
gigsixkey: context.auth.secret_text,
|
||||
},
|
||||
body: {
|
||||
model,
|
||||
messages: [
|
||||
{ role: 'system', content: 'You are a helpful assistant.' },
|
||||
{
|
||||
role: 'user',
|
||||
content: prompt,
|
||||
},
|
||||
],
|
||||
max_tokens,
|
||||
temperature,
|
||||
top_p,
|
||||
frequency_penalty,
|
||||
presence_penalty,
|
||||
},
|
||||
});
|
||||
|
||||
return response.body.choices[0].message.content;
|
||||
},
|
||||
});
|
||||
Reference in New Issue
Block a user