Add Activepieces integration for workflow automation
- Add Activepieces fork with SmoothSchedule custom piece - Create integrations app with Activepieces service layer - Add embed token endpoint for iframe integration - Create Automations page with embedded workflow builder - Add sidebar visibility fix for embed mode - Add list inactive customers endpoint to Public API - Include SmoothSchedule triggers: event created/updated/cancelled - Include SmoothSchedule actions: create/update/cancel events, list resources/services/customers 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"extends": [
|
||||
"../../../../.eslintrc.base.json"
|
||||
],
|
||||
"ignorePatterns": [
|
||||
"!**/*"
|
||||
],
|
||||
"overrides": [
|
||||
{
|
||||
"files": [
|
||||
"*.ts",
|
||||
"*.tsx",
|
||||
"*.js",
|
||||
"*.jsx"
|
||||
],
|
||||
"rules": {}
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"*.ts",
|
||||
"*.tsx"
|
||||
],
|
||||
"rules": {}
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"*.js",
|
||||
"*.jsx"
|
||||
],
|
||||
"rules": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
# pieces-datafuel
|
||||
|
||||
This library was generated with [Nx](https://nx.dev).
|
||||
|
||||
## Building
|
||||
|
||||
Run `nx build pieces-datafuel` to build the library.
|
||||
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"name": "@activepieces/piece-datafuel",
|
||||
"version": "0.0.1",
|
||||
"type": "commonjs",
|
||||
"main": "./src/index.js",
|
||||
"types": "./src/index.d.ts",
|
||||
"dependencies": {
|
||||
"tslib": "^2.3.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
{
|
||||
"name": "pieces-datafuel",
|
||||
"$schema": "../../../../node_modules/nx/schemas/project-schema.json",
|
||||
"sourceRoot": "packages/pieces/community/datafuel/src",
|
||||
"projectType": "library",
|
||||
"release": {
|
||||
"version": {
|
||||
"manifestRootsToUpdate": [
|
||||
"dist/{projectRoot}"
|
||||
],
|
||||
"currentVersionResolver": "git-tag",
|
||||
"fallbackCurrentVersionResolver": "disk"
|
||||
}
|
||||
},
|
||||
"tags": [],
|
||||
"targets": {
|
||||
"build": {
|
||||
"executor": "@nx/js:tsc",
|
||||
"outputs": [
|
||||
"{options.outputPath}"
|
||||
],
|
||||
"options": {
|
||||
"outputPath": "dist/packages/pieces/community/datafuel",
|
||||
"tsConfig": "packages/pieces/community/datafuel/tsconfig.lib.json",
|
||||
"packageJson": "packages/pieces/community/datafuel/package.json",
|
||||
"main": "packages/pieces/community/datafuel/src/index.ts",
|
||||
"assets": [
|
||||
"packages/pieces/community/datafuel/*.md",
|
||||
{
|
||||
"input": "packages/pieces/community/datafuel/src/i18n",
|
||||
"output": "./src/i18n",
|
||||
"glob": "**/!(i18n.json)"
|
||||
}
|
||||
],
|
||||
"buildableProjectDepsInPackageJsonType": "dependencies",
|
||||
"updateBuildableProjectDepsInPackageJson": true
|
||||
},
|
||||
"dependsOn": [
|
||||
"prebuild",
|
||||
"^build"
|
||||
]
|
||||
},
|
||||
"nx-release-publish": {
|
||||
"options": {
|
||||
"packageRoot": "dist/{projectRoot}"
|
||||
}
|
||||
},
|
||||
"prebuild": {
|
||||
"dependsOn": [
|
||||
"^build"
|
||||
],
|
||||
"executor": "nx:run-commands",
|
||||
"options": {
|
||||
"cwd": "packages/pieces/community/datafuel",
|
||||
"command": "bun install --no-save --silent"
|
||||
}
|
||||
},
|
||||
"lint": {
|
||||
"executor": "@nx/eslint:lint",
|
||||
"outputs": [
|
||||
"{options.outputFile}"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"You can obtain API key from [Settings](https://app.datafuel.dev/account/api_key).": "You can obtain API key from [Settings](https://app.datafuel.dev/account/api_key).",
|
||||
"Crawl Website": "Crawl Website",
|
||||
"Get Scrape Result": "Get Scrape Result",
|
||||
"Custom API Call": "Custom API Call",
|
||||
"Crawl a website into a markdown format.": "Crawl a website into a markdown format.",
|
||||
"Retrieves the deatils about scrape.": "Retrieves the deatils about scrape.",
|
||||
"Make a custom API call to a specific endpoint": "Make a custom API call to a specific endpoint",
|
||||
"URL": "URL",
|
||||
"AI Prompt": "AI Prompt",
|
||||
"Depth": "Depth",
|
||||
"Limit": "Limit",
|
||||
"JSON Schema": "JSON Schema",
|
||||
"Job ID": "Job ID",
|
||||
"AI JSON Result": "AI JSON Result",
|
||||
"Markdown Result": "Markdown Result",
|
||||
"Method": "Method",
|
||||
"Headers": "Headers",
|
||||
"Query Parameters": "Query Parameters",
|
||||
"Body": "Body",
|
||||
"Response is Binary ?": "Response is Binary ?",
|
||||
"No Error on Failure": "No Error on Failure",
|
||||
"Timeout (in seconds)": "Timeout (in seconds)",
|
||||
"Prompt to crawl data": "Prompt to crawl data",
|
||||
"The depth of the crawl 1 depth mean only the first level of links will be scraped": "The depth of the crawl 1 depth mean only the first level of links will be scraped",
|
||||
"The maximum number of pages to scrape": "The maximum number of pages to scrape",
|
||||
"JSON schema definition for structured data extraction.Format should follow OpenAI's function calling schema format (https://platform.openai.com/docs/guides/structured-outputs)": "JSON schema definition for structured data extraction.Format should follow OpenAI's function calling schema format (https://platform.openai.com/docs/guides/structured-outputs)",
|
||||
"Authorization headers are injected automatically from your connection.": "Authorization headers are injected automatically from your connection.",
|
||||
"Enable for files like PDFs, images, etc..": "Enable for files like PDFs, images, etc..",
|
||||
"GET": "GET",
|
||||
"POST": "POST",
|
||||
"PATCH": "PATCH",
|
||||
"PUT": "PUT",
|
||||
"DELETE": "DELETE",
|
||||
"HEAD": "HEAD"
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
import { createPiece } from '@activepieces/pieces-framework';
|
||||
import { dataFuelAuth } from './lib/common/auth';
|
||||
import { crawlWebsiteAction } from './lib/actions/crawl-website';
|
||||
import { scrapeWebsiteAction } from './lib/actions/scrape-website';
|
||||
import { getScrapeAction } from './lib/actions/get-scrape-result';
|
||||
import { createCustomApiCallAction } from '@activepieces/pieces-common';
|
||||
import { BASE_URL } from './lib/common/constants';
|
||||
|
||||
export const datafuel = createPiece({
|
||||
displayName: 'DataFuel',
|
||||
auth: dataFuelAuth,
|
||||
minimumSupportedRelease: '0.36.1',
|
||||
logoUrl: 'https://cdn.activepieces.com/pieces/datafuel.png',
|
||||
authors: ['kishanprmr'],
|
||||
actions: [
|
||||
crawlWebsiteAction,
|
||||
scrapeWebsiteAction,
|
||||
getScrapeAction,
|
||||
createCustomApiCallAction({
|
||||
auth: dataFuelAuth,
|
||||
baseUrl: () => BASE_URL,
|
||||
authMapping: async (auth) => {
|
||||
return {
|
||||
Authorization: `Bearer ${auth.secret_text}`,
|
||||
};
|
||||
},
|
||||
}),
|
||||
],
|
||||
triggers: [],
|
||||
});
|
||||
@@ -0,0 +1,84 @@
|
||||
import { createAction, Property } from '@activepieces/pieces-framework';
|
||||
import { dataFuelAuth } from '../common/auth';
|
||||
import { AuthenticationType, httpClient, HttpMethod } from '@activepieces/pieces-common';
|
||||
import { BASE_URL } from '../common/constants';
|
||||
import { CrawlWebsiteResponse, ListScrapesResponse } from '../common/types';
|
||||
|
||||
export const crawlWebsiteAction = createAction({
|
||||
name: 'crawl-website',
|
||||
auth: dataFuelAuth,
|
||||
displayName: 'Crawl Website',
|
||||
description: 'Crawl a website into a markdown format.',
|
||||
props: {
|
||||
url: Property.ShortText({
|
||||
displayName: 'URL',
|
||||
required: true,
|
||||
}),
|
||||
prompt: Property.LongText({
|
||||
displayName: 'AI Prompt',
|
||||
description: 'Prompt to crawl data',
|
||||
required: false,
|
||||
}),
|
||||
depth: Property.Number({
|
||||
displayName: 'Depth',
|
||||
description:
|
||||
'The depth of the crawl 1 depth mean only the first level of links will be scraped',
|
||||
required: true,
|
||||
}),
|
||||
limit: Property.Number({
|
||||
displayName: 'Limit',
|
||||
description: 'The maximum number of pages to scrape',
|
||||
required: true,
|
||||
}),
|
||||
jsonSchema: Property.Json({
|
||||
displayName: 'JSON Schema',
|
||||
required: false,
|
||||
description: `JSON schema definition for structured data extraction.Format should follow OpenAI's function calling schema format (https://platform.openai.com/docs/guides/structured-outputs)`,
|
||||
}),
|
||||
},
|
||||
async run(context) {
|
||||
const { url, prompt, depth, limit, jsonSchema } = context.propsValue;
|
||||
|
||||
const response = await httpClient.sendRequest<CrawlWebsiteResponse>({
|
||||
method: HttpMethod.POST,
|
||||
url: BASE_URL + '/crawl',
|
||||
authentication: {
|
||||
type: AuthenticationType.BEARER_TOKEN,
|
||||
token: context.auth.secret_text,
|
||||
},
|
||||
body: {
|
||||
url,
|
||||
ai_prompt: prompt,
|
||||
json_schema: jsonSchema,
|
||||
depth,
|
||||
limit,
|
||||
},
|
||||
});
|
||||
|
||||
const jobId = response.body.job_id;
|
||||
let status = 'pending';
|
||||
const timeoutAt = Date.now() + 5 * 60 * 1000;
|
||||
|
||||
while (status !== 'finished' && Date.now() < timeoutAt) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000)); // wait 5 seconds
|
||||
|
||||
const pollResponse = await httpClient.sendRequest<Array<ListScrapesResponse>>({
|
||||
method: HttpMethod.GET,
|
||||
url: BASE_URL + '/list_scrapes',
|
||||
authentication: {
|
||||
type: AuthenticationType.BEARER_TOKEN,
|
||||
token: context.auth.secret_text,
|
||||
},
|
||||
queryParams: {
|
||||
job_id: jobId,
|
||||
markdown: 'true',
|
||||
},
|
||||
});
|
||||
|
||||
status = pollResponse.body[0].job_status;
|
||||
|
||||
if (status === 'finished') return pollResponse.body;
|
||||
}
|
||||
throw new Error('Crawl Job timed out or failed.');
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,46 @@
|
||||
import { createAction, Property } from '@activepieces/pieces-framework';
|
||||
import { dataFuelAuth } from '../common/auth';
|
||||
import { AuthenticationType, httpClient, HttpMethod } from '@activepieces/pieces-common';
|
||||
import { BASE_URL } from '../common/constants';
|
||||
import { ListScrapesResponse } from '../common/types';
|
||||
|
||||
export const getScrapeAction = createAction({
|
||||
name: 'get-scrape',
|
||||
auth: dataFuelAuth,
|
||||
displayName: 'Get Scrape Result',
|
||||
description: 'Retrieves the deatils about scrape.',
|
||||
props: {
|
||||
jobId: Property.ShortText({
|
||||
displayName: 'Job ID',
|
||||
required: true,
|
||||
}),
|
||||
aiResponse: Property.Checkbox({
|
||||
displayName: 'AI JSON Result',
|
||||
required: true,
|
||||
}),
|
||||
markdownResponse: Property.Checkbox({
|
||||
displayName: 'Markdown Result',
|
||||
required: true,
|
||||
}),
|
||||
},
|
||||
async run(context) {
|
||||
const { jobId, aiResponse, markdownResponse } = context.propsValue;
|
||||
|
||||
const response = await httpClient.sendRequest<Array<ListScrapesResponse>>({
|
||||
method: HttpMethod.GET,
|
||||
url: BASE_URL + '/list_scrapes',
|
||||
|
||||
authentication: {
|
||||
type: AuthenticationType.BEARER_TOKEN,
|
||||
token: context.auth.secret_text,
|
||||
},
|
||||
queryParams: {
|
||||
job_id: jobId,
|
||||
markdown: markdownResponse ? 'true' : 'false',
|
||||
ai_response: aiResponse ? 'true' : 'false',
|
||||
},
|
||||
});
|
||||
|
||||
return response.body;
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,70 @@
|
||||
import { createAction, Property } from '@activepieces/pieces-framework';
|
||||
import { dataFuelAuth } from '../common/auth';
|
||||
import { AuthenticationType, httpClient, HttpMethod } from '@activepieces/pieces-common';
|
||||
import { BASE_URL } from '../common/constants';
|
||||
import { CrawlWebsiteResponse, ListScrapesResponse } from '../common/types';
|
||||
|
||||
export const scrapeWebsiteAction = createAction({
|
||||
name: 'scrape-website',
|
||||
auth: dataFuelAuth,
|
||||
displayName: 'Crawl Website',
|
||||
description: 'Crawl a website into a markdown format.',
|
||||
props: {
|
||||
url: Property.ShortText({
|
||||
displayName: 'URL',
|
||||
required: true,
|
||||
}),
|
||||
prompt: Property.LongText({
|
||||
displayName: 'AI Prompt',
|
||||
description: 'Prompt to crawl data',
|
||||
required: false,
|
||||
}),
|
||||
jsonSchema: Property.Json({
|
||||
displayName: 'JSON Schema',
|
||||
required: false,
|
||||
description: `JSON schema definition for structured data extraction.Format should follow OpenAI's function calling schema format (https://platform.openai.com/docs/guides/structured-outputs)`,
|
||||
}),
|
||||
},
|
||||
async run(context) {
|
||||
const { url, prompt, jsonSchema } = context.propsValue;
|
||||
|
||||
const response = await httpClient.sendRequest<CrawlWebsiteResponse>({
|
||||
method: HttpMethod.POST,
|
||||
url: BASE_URL + '/scrape',
|
||||
authentication: {
|
||||
type: AuthenticationType.BEARER_TOKEN,
|
||||
token: context.auth.secret_text,
|
||||
},
|
||||
body: {
|
||||
url,
|
||||
ai_prompt: prompt,
|
||||
json_schema: jsonSchema,
|
||||
},
|
||||
});
|
||||
|
||||
const jobId = response.body.job_id;
|
||||
let status = 'pending';
|
||||
const timeoutAt = Date.now() + 5 * 60 * 1000;
|
||||
|
||||
while (status !== 'finished' && Date.now() < timeoutAt) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000)); // wait 5 seconds
|
||||
|
||||
const pollResponse = await httpClient.sendRequest<Array<ListScrapesResponse>>({
|
||||
method: HttpMethod.GET,
|
||||
url: BASE_URL + '/list_scrapes',
|
||||
authentication: {
|
||||
type: AuthenticationType.BEARER_TOKEN,
|
||||
token: context.auth.secret_text,
|
||||
},
|
||||
queryParams: {
|
||||
job_id: jobId,
|
||||
},
|
||||
});
|
||||
|
||||
status = pollResponse.body[0].job_status;
|
||||
|
||||
if (status === 'finished') return pollResponse.body;
|
||||
}
|
||||
throw new Error('Crawl Job timed out or failed.');
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,7 @@
|
||||
import { PieceAuth } from '@activepieces/pieces-framework';
|
||||
|
||||
export const dataFuelAuth = PieceAuth.SecretText({
|
||||
displayName: 'API Key',
|
||||
description: `You can obtain API key from [Settings](https://app.datafuel.dev/account/api_key).`,
|
||||
required: true,
|
||||
});
|
||||
@@ -0,0 +1 @@
|
||||
export const BASE_URL = 'https://api.datafuel.dev/'
|
||||
@@ -0,0 +1,8 @@
|
||||
export type CrawlWebsiteResponse = {
|
||||
job_id:string
|
||||
}
|
||||
|
||||
export type ListScrapesResponse = {
|
||||
job_id:string,
|
||||
job_status:string,
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"extends": "../../../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"importHelpers": true,
|
||||
"noImplicitOverride": true,
|
||||
"noImplicitReturns": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noPropertyAccessFromIndexSignature": true
|
||||
},
|
||||
"files": [],
|
||||
"include": [],
|
||||
"references": [
|
||||
{
|
||||
"path": "./tsconfig.lib.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "../../../../dist/out-tsc",
|
||||
"declaration": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["src/**/*.ts"]
|
||||
}
|
||||
Reference in New Issue
Block a user