Add Activepieces integration for workflow automation

- Add Activepieces fork with SmoothSchedule custom piece
- Create integrations app with Activepieces service layer
- Add embed token endpoint for iframe integration
- Create Automations page with embedded workflow builder
- Add sidebar visibility fix for embed mode
- Add list inactive customers endpoint to Public API
- Include SmoothSchedule triggers: event created/updated/cancelled
- Include SmoothSchedule actions: create/update/cancel events, list resources/services/customers

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
poduck
2025-12-18 22:59:37 -05:00
parent 9848268d34
commit 3aa7199503
16292 changed files with 1284892 additions and 4708 deletions

View File

@@ -0,0 +1,66 @@
const fs = require('fs');
function main() {
if (process.argv.length < 3) {
console.error('Usage: node format-checkly-report.js <path-to-md> [environment]');
process.exit(1);
}
const mdPath = process.argv[2];
const environment = process.argv[3];
const content = fs.readFileSync(mdPath, 'utf8');
const lines = content.split('\n');
const bulletPoints = [];
const summaryLines = [];
let inTable = false;
for (const line of lines) {
if (line.startsWith('|Result|')) {
inTable = true;
continue;
}
if (inTable && (line.startsWith('|:-') || !line.trim())) {
continue;
}
if (inTable && line.includes('|')) {
const parts = line.split('|').map(part => part.trim()).filter(Boolean);
if (parts.length >= 5) {
const status = parts[0];
const name = parts[1];
const checkType = parts[2];
const filename = parts[3].replace(/`/g, '');
const duration = parts[4];
// Try to extract link if present
const linkMatch = line.match(/\[Full test report\]\((.*?)\)/);
const link = linkMatch ? linkMatch[1] : null;
const emoji = status.includes('Fail') ? '❌' : '✅';
let point = `${emoji} **${name}** (${checkType}) - ${duration}\n 📁 ${filename}`;
if (link) {
point += `\n 🔗 [Full report](${link})`;
}
bulletPoints.push(point);
}
} else if (!inTable) {
summaryLines.push(line);
}
}
let message = '';
if (environment) {
message += `**Environment:** ${environment}\n\n`;
}
for (const l of summaryLines) {
if (l.trim()) {
message += l + '\n';
}
}
if (bulletPoints.length) {
message += '\n**Test Results:**\n';
message += bulletPoints.join('\n');
}
console.log(JSON.stringify(message));
}
main();

View File

@@ -0,0 +1,18 @@
#!/usr/bin/env node
const { execSync } = require("child_process")
try {
// Try to get bun version to check if installed
execSync("bun --version", { stdio: "ignore" });
console.log("✅ Bun is already installed.");
} catch {
console.log("⚙️ Bun not found. Installing globally...");
try {
execSync("npm install -g bun", { stdio: "inherit" });
console.log("✅ Bun installed successfully.");
} catch (err) {
console.error("❌ Failed to install Bun:", err.message);
process.exit(1);
}
}

View File

@@ -0,0 +1,46 @@
const fs = require('fs');
const path = require('path');
const { execSync } = require('child_process');
// Define the root folder path
const rootFolderPath = '/workspace/packages/pieces';
// Function to read project.json file and extract name attribute
function readProjectJson(folderPath) {
const projectJsonPath = path.join(folderPath, 'project.json');
const projectJson = JSON.parse(fs.readFileSync(projectJsonPath, 'utf8'));
const packageName = projectJson.name;
return packageName;
}
// Function to list packages/pieces in each folder
function listPackagesInFolders(folderPath) {
const folders = fs.readdirSync(folderPath, { withFileTypes: true })
.filter(dirent => dirent.isDirectory() && dirent.name !== 'community' && dirent.name !== 'custom')
.map(dirent => dirent.name);
const packages = folders.map(folder => {
const packagePath = path.join(folderPath, folder);
const packageName = readProjectJson(packagePath);
return { folder, packageName };
});
// Execute nx g move command
packages.forEach(({ folder, packageName }) => {
const destination = 'packages/pieces/community/' + folder;
const command = `nx g move --projectName=${packageName} --newProjectName=${packageName} --destination=${destination} --importPath=@activepieces/piece-${folder}`;
try{
execSync(command, { stdio: 'inherit' });
}catch(err){
console.log(err);
}
})
return packages;
}
// Call the function with the root folder path
const packagesInFolders = listPackagesInFolders(rootFolderPath);
console.log(packagesInFolders);

View File

@@ -0,0 +1,218 @@
import fs from 'fs';
import path from 'path';
import { execSync } from 'child_process';
interface PackageJson {
name: string;
version: string;
[key: string]: any;
}
interface UpdateResult {
success: boolean;
oldVersion?: string;
newVersion?: string;
error?: string;
}
// Function to bump patch version
function bumpPatchVersion(version: string): string {
const parts = version.split('.');
if (parts.length >= 3) {
const patch = parseInt(parts[2]) + 1;
return `${parts[0]}.${parts[1]}.${patch}`;
}
return version;
}
// Function to update package.json
function updatePackageJson(packageJsonPath: string): UpdateResult {
try {
const content = fs.readFileSync(packageJsonPath, 'utf8');
const packageJson: PackageJson = JSON.parse(content);
if (packageJson.version) {
const oldVersion = packageJson.version;
const newVersion = bumpPatchVersion(oldVersion);
console.log(`Bumping ${path.basename(path.dirname(packageJsonPath))}: ${oldVersion} -> ${newVersion}`);
packageJson.version = newVersion;
// Write back to file with proper formatting
fs.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2) + '\n');
return { success: true, oldVersion, newVersion };
}
return { success: false, error: 'No version field found in package.json' };
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
console.error(`Error updating ${packageJsonPath}:`, errorMessage);
return { success: false, error: errorMessage };
}
}
// Function to check if a piece has changes compared to main
function hasChangesComparedToMain(piecePath: string): boolean {
try {
// Check if there are any changes in the piece directory compared to main
execSync(`git diff --quiet origin/main -- ${piecePath}`, { encoding: 'utf8' });
return false; // No changes
} catch (error) {
if (error instanceof Error && 'status' in error && error.status === 1) {
return true; // Has changes
}
throw error;
}
}
// Function to check if a piece has i18n directory
function hasI18nDirectory(piecePath: string): boolean {
const i18nPath = path.join(piecePath, 'src', 'i18n');
return fs.existsSync(i18nPath) && fs.statSync(i18nPath).isDirectory();
}
// Function to check if a piece has translation-related changes
function hasTranslationChanges(piecePath: string): boolean {
try {
// Check if there are changes in the i18n directory
const i18nChanges = execSync(`git diff --name-only origin/main -- ${piecePath}/src/i18n`, { encoding: 'utf8' }).trim();
return i18nChanges.length > 0;
} catch (error) {
if (error instanceof Error && 'status' in error && error.status === 1) {
return true; // Has changes
}
return false; // No changes or error
}
}
// Function to check if a piece version has already been bumped compared to main
function hasVersionBeenBumped(piecePath: string): boolean {
try {
const packageJsonPath = path.join(piecePath, 'package.json');
if (!fs.existsSync(packageJsonPath)) {
return false;
}
// Get current version
const content = fs.readFileSync(packageJsonPath, 'utf8');
const packageJson: PackageJson = JSON.parse(content);
const currentVersion = packageJson.version;
if (!currentVersion) {
return false;
}
// Get version from main branch
const mainVersion = execSync(`git show origin/main:${packageJsonPath}`, { encoding: 'utf8' });
const mainPackageJson: PackageJson = JSON.parse(mainVersion);
const mainBranchVersion = mainPackageJson.version;
// Compare versions - if current version is higher than main, it's already been bumped
return compareVersions(currentVersion, mainBranchVersion) > 0;
} catch (error) {
// If we can't get the main version (e.g., file doesn't exist on main), assume not bumped
return false;
}
}
// Helper function to compare semantic versions
function compareVersions(version1: string, version2: string): number {
const v1Parts = version1.split('.').map(Number);
const v2Parts = version2.split('.').map(Number);
for (let i = 0; i < Math.max(v1Parts.length, v2Parts.length); i++) {
const v1Part = v1Parts[i] || 0;
const v2Part = v2Parts[i] || 0;
if (v1Part > v2Part) return 1;
if (v1Part < v2Part) return -1;
}
return 0;
}
// Function to get all piece directories
function getPieceDirectories(piecesDir: string): string[] {
return fs.readdirSync(piecesDir)
.filter(item => {
const fullPath = path.join(piecesDir, item);
return fs.statSync(fullPath).isDirectory();
});
}
// Main function
function main(): void {
console.log('Finding pieces with translation changes compared to main...');
const piecesWithChanges: string[] = [];
const piecesDir = 'packages/pieces/community';
// Get all piece directories
const pieceDirs = getPieceDirectories(piecesDir);
console.log(`Checking ${pieceDirs.length} pieces for changes...`);
for (const piece of pieceDirs) {
const piecePath = path.join(piecesDir, piece);
// Check if piece has changes compared to main
if (hasChangesComparedToMain(piecePath)) {
// Check if it has i18n directory (indicating translations were added)
if (hasI18nDirectory(piecePath)) {
// Check if the changes are translation-related
if (hasTranslationChanges(piecePath)) {
// Check if version has already been bumped
if (hasVersionBeenBumped(piecePath)) {
console.log(` - ${piece} - has translation changes but version already bumped`);
} else {
piecesWithChanges.push(piece);
console.log(`${piece} - has translation changes and needs version bump`);
}
} else {
console.log(` - ${piece} - has changes but not translation-related`);
}
} else {
console.log(` - ${piece} - has changes but no i18n directory`);
}
} else {
console.log(` - ${piece} - no changes`);
}
}
if (piecesWithChanges.length === 0) {
console.log('\nNo pieces with translation changes found.');
return;
}
console.log(`\nFound ${piecesWithChanges.length} pieces with translation changes:`);
piecesWithChanges.forEach(piece => console.log(` - ${piece}`));
console.log('\nBumping patch versions...');
let successCount = 0;
let errorCount = 0;
for (const piece of piecesWithChanges) {
const packageJsonPath = path.join(piecesDir, piece, 'package.json');
if (fs.existsSync(packageJsonPath)) {
const result = updatePackageJson(packageJsonPath);
if (result.success) {
successCount++;
} else {
errorCount++;
}
} else {
console.error(`Package.json not found for ${piece}`);
errorCount++;
}
}
console.log(`\nSummary:`);
console.log(` Successfully updated: ${successCount} pieces`);
console.log(` Errors: ${errorCount} pieces`);
}
// Run the script
main();

View File

@@ -0,0 +1,50 @@
import assert from 'node:assert'
import { argv } from 'node:process'
import { exec } from '../utils/exec'
import { readPackageJson, readProjectJson } from '../utils/files'
import { findAllPiecesDirectoryInSource } from '../utils/piece-script-utils'
import { isNil } from '@activepieces/shared'
import chalk from 'chalk'
import path from 'node:path'
export const publishPiece = async (name: string): Promise<void> => {
assert(name, '[publishPiece] parameter "name" is required')
const distPaths = await findAllPiecesDirectoryInSource()
const directory = distPaths.find(p => path.basename(p) === name)
if (isNil(directory)) {
console.error(chalk.red(`[publishPiece] can't find the directory with name ${name}`))
return
}
const { version } = await readPackageJson(directory)
const { name: nxProjectName } = await readProjectJson(directory)
await exec(`npx nx build ${nxProjectName}`)
const nxPublishProjectCommand = `
node tools/scripts/publish.mjs \
${nxProjectName} \
${version} \
latest
`
await exec(nxPublishProjectCommand)
console.info(chalk.green.bold(`[publishPiece] success, name=${name}, version=${version}`))
}
const main = async (): Promise<void> => {
const pieceName = argv[2]
await publishPiece(pieceName)
}
/*
* module is entrypoint, not imported i.e. invoked directly
* see https://nodejs.org/api/modules.html#modules_accessing_the_main_module
*/
if (require.main === module) {
main()
}

View File

@@ -0,0 +1,20 @@
import { publishNxProject } from '../utils/publish-nx-project'
import { findAllPiecesDirectoryInSource } from '../utils/piece-script-utils'
import { chunk } from '../../../packages/shared/src/lib/common/utils/utils'
const publishPiece = async (nxProjectPath: string): Promise<void> => {
console.info(`[publishPiece] nxProjectPath=${nxProjectPath}`)
await publishNxProject(nxProjectPath)
}
const main = async () => {
const piecesSource = await findAllPiecesDirectoryInSource()
const piecesSourceChunks = chunk(piecesSource, 30)
for (const chunk of piecesSourceChunks) {
await Promise.all(chunk.map(publishPiece))
await new Promise(resolve => setTimeout(resolve, 5000))
}
}
main()

View File

@@ -0,0 +1,492 @@
import * as fs from 'fs';
import * as path from 'path';
import { execSync } from 'child_process';
import semVer from 'semver';
const contributors = [
{
"name": "Kishan Parmar",
"login": "kishanprmr"
},
{
"name": "Mohammad AbuAboud",
"login": "abuaboud"
},
{
"name": "Damien HEBERT",
"login": "doskyft"
},
{
"name": "AbdulTheActivePiecer",
"login": "AbdulTheActivePiecer"
},
{
"name": "Moayyad Shaddad",
"login": "MoShizzle"
},
{
"name": "TaskMagicKyle",
"login": "TaskMagicKyle"
},
{
"name": "kylebuilds",
"login": "TaskMagicKyle"
},
{
"name": "Salem-Alaa",
"login": "Salem-Alaa"
},
{
"name": "Khaled Mashaly",
"login": "khaledmashaly"
},
{
"name": "abuaboud",
"login": "abuaboud"
},
{
"name": "Mohammed Abu Aboud",
"login": "abuaboud"
},
{
"name": "Mukewa O. Wekalao",
"login": "kanarelo"
},
{
"name": "Willian",
"login": "Willianwg"
},
{
"name": "Aler Denisov",
"login": "alerdenisov"
},
{
"name": "Abdallah-Alwarawreh",
"login": "Abdallah-Alwarawreh"
},
{
"name": "Shay Punter",
"login": "ShayPunter"
},
{
"name": "i-nithin",
"login": "i-nithin"
},
{
"name": "Joe Workman",
"login": "joeworkman"
},
{
"name": "ShayPunter",
"login": "ShayPunter"
},
{
"name": "Vraj Gohil",
"login": "VrajGohil"
},
{
"name": "Matthew Zeiler",
"login": "zeiler"
},
{
"name": "Alexandros Katechis",
"login": "akatechis"
},
{
"name": "JanHolger",
"login": "JanHolger"
},
{
"name": "Andrei Chirko",
"login": "andchir"
},
{
"name": "Landon Moir",
"login": "landonmoir"
},
{
"name": "bibhuty-did-this",
"login": "bibhuty-did-this"
},
{
"name": "Cyril Selasi",
"login": "cyrilselasi"
},
{
"name": "Gunther Schulz",
"login": "Gunther-Schulz"
},
{
"name": "Osama Zakarneh",
"login": "Ozak93"
},
{
"name": "Owlcept",
"login": "Owlcept"
},
{
"name": "Drew Lewis",
"login": "Owlcept"
},
{
"name": "AbdullahBitar",
"login": "AbdullahBitar"
},
{
"name": "Mohammad Abuaboud",
"login": "abuaboud"
},
{
"name": "BBND",
"login": "BBND"
},
{
"name": "Nilesh",
"login": "Nilesh"
},
{
"name": "Karim Khaleel",
"login": "karimkhaleel"
},
{
"name": "[NULL] Dev",
"login": "Abdallah-Alwarawreh"
},
{
"name": "Pablo Fernandez",
"login": "pfernandez98"
},
{
"name": "BastienMe",
"login": "BastienMe"
},
{
"name": "Olivier Sambourg",
"login": "AdamSelene"
},
{
"name": "MoShizzle",
"login": "MoShizzle"
},
{
"name": "Aasim Sani",
"login": "aasimsani"
},
{
"name": "Abdul-rahman Yasir Khalil",
"login": "AbdulTheActivePiecer"
},
{
"name": "awais",
"login": "awais"
},
{
"name": "Lisander Lopez",
"login": "lisander-lopez"
},
{
"name": "OsamaHaikal",
"login": "OsamaHaikal"
},
{
"name": "Maher",
"login": "abaza738"
},
{
"name": "Maher Abaza",
"login": "abaza738"
},
{
"name": "Mukewa Wekalao",
"login": "kanarelo"
},
{
"name": "Mark van Bellen",
"login": "buttonsbond"
},
{
"name": "Denis Gurskij",
"login": "DGurskij"
},
{
"name": "Thibaut Patel",
"login": "tpatel"
},
{
"name": "Bastien Meffre",
"login": "BastienMe"
},
{
"name": "Abdullah Ranginwala",
"login": "abdullahranginwala"
},
{
"name": "pfernandez98",
"login": "pfernandez98"
},
{
"name": "Vitali Borovi",
"login": "Vitalini"
},
{
"name": "Vitali Borovik",
"login": "Vitalini"
},
{
"name": "Vitalik Borovik",
"login": "Vitalini"
},
{
"name": "Armand Giauffret 4",
"login": "ArmanGiau3"
},
{
"name": "Armand Giauffret 3",
"login": "ArmanGiau3"
},
{
"name": "Salem Alwarawreh",
"login": "Salem-Alaa"
},
{
"name": "MyWay",
"login": "MyWay"
},
{
"name": "leenmashni",
"login": "leenmashni"
},
{
"name": "Fábio Ferreira",
"login": "facferreira"
},
{
"name": "Diego Nijboer",
"login": "lldiegon"
},
{
"name": "Enrike Nur",
"login": "w95"
},
{
"name": "Haithem BOUJRIDA",
"login": "hkboujrida"
},
{
"name": "Willian Guedes",
"login": "Willianwg"
},
{
"name": "Daniel Ostapenko",
"login": "denieler"
},
{
"name": "Yann Petitjean",
"login": "yann120"
},
{
"name": "Lawrence Li",
"login": "la3rence"
},
{
"name": "Mario Meyer",
"login": "mariomeyer"
},
{
"name": "aboudzein",
"login": "aboudzein",
},
{
"name": "aboudzeineddin",
"login": "aboudzein",
},
{
"name": "Alexander Storozhevsky",
"login": "astorozhevsky"
},
{
"name": "dentych",
"login": "dentych"
},
{
"name": "Matt Lung",
"login": "Vitalini"
},
{
"name": "joselupianez",
"login": "joselupianez"
},
{
"name": "Hoang Duc Tan",
"login": "tanoggy"
},
{
"name": "Herman Kudria",
"login": "HKudria"
},
{
"name": "Ahmad Ghosheh",
"login": "BLaidzX"
},
{
"name": "Ben",
"login": "bendersej"
},
{
"name": "Rita Gorokhod",
"login": "rita-gorokhod"
},
{
name: "Dennis Rongo",
login: "dennisrongo"
},
{
"name": "x7airworker",
"login": "x7airworker"
},
{
"name": "Camilo Usuga",
"login": "camilou"
},
{
"name": "Fardeen Panjwani",
"login": "fardeenpanjwani-codeglo"
},
{
"name": "Tân Một Nắng",
"login": "tanoggy"
},
{
"name": "ashrafsamhouri",
"login": "ashrafsamhouri"
},
{
"name": "Ahmad-AbuOsbeh",
"login": "Ahmad-AbuOsbeh"
},
{
"name": "Fastkop",
"login": "abuaboud"
},
{
"name": "Abdul",
"login": "AbdulTheActivePiecer"
},
{
"name": "ahmad jaber",
"login": "creed983",
},
{
"name": "creed983",
"login": "creed983",
},
{
"name": "Activepieces Dev",
"login": "ashrafsamhouri"
},
{
"name": "hiasat",
"login": "abuaboud"
},
{
"name": "Mohammad",
"login": "abuaboud"
},
{
"name": "ActivePieces",
"login": "abuaboud"
},
{
"name": "haseebrehmanpc",
"login": "haseebrehmanpc"
},
{
"name": "Haseeb Rehman",
"login": "haseebrehmanpc"
}
]
function cleanAuthors(authors: string[]) {
const cleanedAuthors: string[] = []
authors.forEach(author => {
const contributor = contributors.find(contributor => contributor.name === author);
if (contributor) {
cleanedAuthors.push(contributor.login);
} else {
throw new Error(`Author ${author} not found`);
}
});
return cleanedAuthors;
}
function loadAuthors(directoryPath: string) {
const gitLogCommand = `git log --format="%aN" -- ${directoryPath}`;
const result = execSync(gitLogCommand, { cwd: '/workspace/', encoding: 'utf-8' });
if (result.length === 0) {
return [];
}
const authors = result.trim().split('\n');
authors.forEach(author => {
if (!contributors.find(contributor => contributor.name === author)) {
throw new Error(`Author ${author} not found in ${directoryPath}`);
}
})
return authors;
}
function loadSrcIndexFiles(directoryPath: string) {
const files = fs.readdirSync(directoryPath);
files.forEach(file => {
const filePath = path.join(directoryPath, file);
const stats = fs.statSync(filePath);
if (file === 'tmp' || file === 'framework' || file === 'common') return;
if (stats.isDirectory()) {
const indexPath = path.join(filePath, 'src', 'index.ts');
if (fs.existsSync(indexPath)) {
const authorsOne = cleanAuthors(loadAuthors(filePath));
const authorsTwo = cleanAuthors(loadAuthors(filePath.replace('/community', '')));
const authorsThree = cleanAuthors(loadAuthors(filePath.replace('/community', '/src/lib/apps')));
const authorsFour = cleanAuthors(loadAuthors(filePath.replace('/community', '/src/apps')));
const uniqueAuthors = customSort([...new Set([...authorsOne, ...authorsTwo, ...authorsThree, ...authorsFour])]);
console.log(uniqueAuthors);
const fileContent = fs.readFileSync(indexPath, { encoding: 'utf-8' });
const pattern = /authors: \[(.*?)\]/;
if (!pattern.test(fileContent)) {
throw new Error("Pattern 'authors: [...] not found in the file content. " + indexPath);
}
const modifiedContent = fileContent.replace(/authors: \[(.*?)\]/, `authors: ${JSON.stringify(uniqueAuthors)}`);
fs.writeFileSync(indexPath, modifiedContent, { encoding: 'utf-8' });
const packageJson = path.join(filePath, 'package.json');
const packageJsonContent = JSON.parse(fs.readFileSync(packageJson, { encoding: 'utf-8' }));
packageJsonContent.version = semVer.inc(packageJsonContent.version, 'patch');
fs.writeFileSync(packageJson, JSON.stringify(packageJsonContent, null, 2), { encoding: 'utf-8' });
}
}
});
}
// Sort the official team members last.
const authorsOrder = ['Abdallah-Alwarawreh', 'Salem-Alaa', 'kishanprmr', 'MoShizzle', 'AbdulTheActivePiecer', 'khaledmashaly', 'abuaboud'].map(author => author.toLocaleLowerCase());
function customSort(authors: string[]): string[] {
return authors.sort((a, b) => {
const indexA = authorsOrder.indexOf(a.toLocaleLowerCase());
const indexB = authorsOrder.indexOf(b.toLocaleLowerCase());
// If either author is not found in the specified order, move it to the end
if (indexA === -1) return -1;
if (indexB === -1) return 1;
// Sort based on the index in the specified order
return indexA - indexB;
});
}
const directoryToTraverse = '/workspace/packages/pieces/community'
loadSrcIndexFiles(directoryToTraverse);

View File

@@ -0,0 +1,70 @@
import assert from 'node:assert';
import { PieceMetadata } from '../../../packages/pieces/community/framework/src';
import { StatusCodes } from 'http-status-codes';
import { HttpHeader } from '../../../packages/pieces/community/common/src';
import { AP_CLOUD_API_BASE, findNewPieces, pieceMetadataExists } from '../utils/piece-script-utils';
import { chunk } from '../../../packages/shared/src/lib/common/utils/utils';
assert(process.env['AP_CLOUD_API_KEY'], 'API Key is not defined');
const { AP_CLOUD_API_KEY } = process.env;
const insertPieceMetadata = async (
pieceMetadata: PieceMetadata
): Promise<void> => {
const body = JSON.stringify(pieceMetadata);
const headers = {
['api-key']: AP_CLOUD_API_KEY,
[HttpHeader.CONTENT_TYPE]: 'application/json'
};
const cloudResponse = await fetch(`${AP_CLOUD_API_BASE}/admin/pieces`, {
method: 'POST',
headers,
body
});
if (cloudResponse.status !== StatusCodes.OK && cloudResponse.status !== StatusCodes.CONFLICT) {
throw new Error(await cloudResponse.text());
}
};
const insertMetadataIfNotExist = async (pieceMetadata: PieceMetadata) => {
console.info(
`insertMetadataIfNotExist, name: ${pieceMetadata.name}, version: ${pieceMetadata.version}`
);
const metadataAlreadyExist = await pieceMetadataExists(
pieceMetadata.name,
pieceMetadata.version
);
if (metadataAlreadyExist) {
console.info(`insertMetadataIfNotExist, piece metadata already inserted`);
return;
}
await insertPieceMetadata(pieceMetadata);
};
const insertMetadata = async (piecesMetadata: PieceMetadata[]) => {
const batches = chunk(piecesMetadata, 30)
for (const batch of batches) {
await Promise.all(batch.map(insertMetadataIfNotExist))
await new Promise(resolve => setTimeout(resolve, 5000))
}
};
const main = async () => {
console.log('update pieces metadata: started')
const piecesMetadata = await findNewPieces()
await insertMetadata(piecesMetadata)
console.log('update pieces metadata: completed')
process.exit()
}
main()

View File

@@ -0,0 +1,62 @@
/**
* This is a minimal script to publish your package to "npm".
* This is meant to be used as-is or customize as you see fit.
*
* This script is executed on "dist/path/to/library" as "cwd" by default.
*
* You might need to authenticate with NPM before running this script.
*/
import pkg from '@nx/devkit';
const { readCachedProjectGraph } = pkg;
import { execSync } from 'child_process';
import { readFileSync, writeFileSync } from 'fs';
import chalk from 'chalk';
function invariant(condition, message) {
if (!condition) {
console.error(chalk.bold.red(message));
process.exit(1);
}
}
// Executing publish script: node path/to/publish.mjs {name} --version {version} --tag {tag}
// Default "tag" to "next" so we won't publish the "latest" tag by accident.
const [, , name, version, tag = 'next'] = process.argv;
// A simple SemVer validation to validate the version
const validVersion = /^\d+\.\d+\.\d+(-\w+\.\d+)?/;
invariant(
version && validVersion.test(version),
`No version provided or version did not match Semantic Versioning, expected: #.#.#-tag.# or #.#.#, got ${version}.`
);
const graph = readCachedProjectGraph();
const project = graph.nodes[name];
invariant(
project,
`Could not find project "${name}" in the workspace. Is the project.json configured correctly?`
);
const outputPath = project.data?.targets?.build?.options?.outputPath;
invariant(
outputPath,
`Could not find "build.options.outputPath" of project "${name}". Is project.json configured correctly?`
);
process.chdir(outputPath);
// Updating the version in "package.json" before publishing
try {
const json = JSON.parse(readFileSync(`package.json`).toString());
json.version = version;
writeFileSync(`package.json`, JSON.stringify(json, null, 2));
} catch (e) {
console.error(
chalk.bold.red(`Error reading package.json file from library build output.`)
);
}
// Execute "npm publish" to publish
execSync(`npm publish --access public --tag ${tag}`);

View File

@@ -0,0 +1,4 @@
import { exec as execCallback } from 'node:child_process'
import { promisify } from 'node:util'
export const exec = promisify(execCallback)

View File

@@ -0,0 +1,55 @@
import { readFile, writeFile } from 'node:fs/promises'
export type PackageJson = {
name: string
version: string
keywords: string[]
}
export type ProjectJson = {
name: string
targets?: {
build?: {
options?: {
buildableProjectDepsInPackageJsonType?: 'peerDependencies' | 'dependencies'
updateBuildableProjectDepsInPackageJson: boolean
}
},
lint: {
options: {
lintFilePatterns: string[]
}
}
}
}
const readJsonFile = async <T> (path: string): Promise<T> => {
const jsonFile = await readFile(path, { encoding: 'utf-8' })
return JSON.parse(jsonFile) as T
}
const writeJsonFile = async (path: string, data: unknown): Promise<void> => {
const serializedData = JSON.stringify(data, null, 2)
await writeFile(path, serializedData, { encoding: 'utf-8' })
}
export const readPackageJson = async (path: string): Promise<PackageJson> => {
return await readJsonFile(`${path}/package.json`)
}
export const readProjectJson = async (path: string): Promise<ProjectJson> => {
return await readJsonFile(`${path}/project.json`)
}
export const readPackageEslint = async (path: string): Promise<any> => {
return await readJsonFile(`${path}/.eslintrc.json`)
}
export const writePackageEslint = async (path: string, eslint: any): Promise<void> => {
return await writeJsonFile(`${path}/.eslintrc.json`, eslint)
}
export const writeProjectJson = async (path: string, projectJson: ProjectJson): Promise<void> => {
return await writeJsonFile(`${path}/project.json`, projectJson)
}

View File

@@ -0,0 +1,86 @@
import assert from 'node:assert'
import { ExecException } from 'node:child_process'
import axios, { AxiosError } from 'axios'
import { exec } from './exec'
import { readPackageJson } from './files'
const getLatestPublishedVersion = async (packageName: string, maxRetries: number = 5): Promise<string | null> => {
console.info(`[getLatestPublishedVersion] packageName=${packageName}`);
const retryDelay = (attempt: number) => Math.pow(4, attempt - 1) * 2000;
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
const response = await axios<{ version: string }>(`https://registry.npmjs.org/${packageName}/latest`);
const version = response.data.version;
console.info(`[getLatestPublishedVersion] packageName=${packageName}, latestVersion=${version}`);
return version;
} catch (e: unknown) {
if (attempt === maxRetries) {
throw e; // If it's the last attempt, rethrow the error
}
if (e instanceof AxiosError && e.response?.status === 404) {
console.info(`[getLatestPublishedVersion] packageName=${packageName}, latestVersion=null`);
return null;
}
console.warn(`[getLatestPublishedVersion] packageName=${packageName}, attempt=${attempt}, error=${e}`);
const delay = retryDelay(attempt);
await new Promise(resolve => setTimeout(resolve, delay)); // Wait for the delay before retrying
}
}
return null; // Return null if all retries fail
};
const packageChangedFromMainBranch = async (path: string): Promise<boolean> => {
const cleaned = path.includes('/packages') ? `packages/` + path.split('packages/')[1] : path
if (!cleaned.startsWith('packages/')) {
throw new Error(`[packageChangedFromMainBranch] path=${cleaned} is not a valid package path`)
}
console.info(`[packageChangedFromMainBranch] path=${cleaned}`)
try {
const diff = await exec(`git diff --quiet origin/main -- ${cleaned}`)
return false
}
catch (e) {
if ((e as ExecException).code === 1) {
return true
}
throw e
}
}
/**
* Validates the package before publishing.
* returns false if package can be published.
* returns true if package is already published.
* throws if validation fails.
* @param path path of package to run pre-publishing checks for
*/
export const packagePrePublishChecks = async (path: string): Promise<boolean> => {
console.info(`[packagePrePublishValidation] path=${path}`)
assert(path, '[packagePrePublishValidation] parameter "path" is required')
const { name: packageName, version: currentVersion } = await readPackageJson(path)
const latestPublishedVersion = await getLatestPublishedVersion(packageName)
const currentVersionAlreadyPublished = latestPublishedVersion !== null && currentVersion === latestPublishedVersion
if (currentVersionAlreadyPublished) {
const packageChanged = await packageChangedFromMainBranch(path)
if (packageChanged) {
throw new Error(`[packagePrePublishValidation] package version not incremented, path=${path}, version=${currentVersion}`)
}
console.info(`[packagePrePublishValidation] package already published, path=${path}, version=${currentVersion}`)
return true
}
return false
}

View File

@@ -0,0 +1,197 @@
import { readdir, stat } from 'node:fs/promises'
import { resolve, join } from 'node:path'
import { cwd } from 'node:process'
import { extractPieceFromModule } from '@activepieces/shared'
import * as semver from 'semver'
import { readPackageJson } from './files'
import { StatusCodes } from 'http-status-codes'
import { execSync } from 'child_process'
import { pieceTranslation,PieceMetadata } from '@activepieces/pieces-framework'
type SubPiece = {
name: string;
displayName: string;
version: string;
minimumSupportedRelease?: string;
maximumSupportedRelease?: string;
metadata(): Omit<PieceMetadata, 'name' | 'version'>;
};
export const AP_CLOUD_API_BASE = 'https://cloud.activepieces.com/api/v1';
export const PIECES_FOLDER = 'packages/pieces'
export const COMMUNITY_PIECE_FOLDER = 'packages/pieces/community'
export const NON_PIECES_PACKAGES = ['@activepieces/pieces-framework', '@activepieces/pieces-common']
const validateSupportedRelease = (minRelease: string | undefined, maxRelease: string | undefined) => {
if (minRelease !== undefined && !semver.valid(minRelease)) {
throw Error(`[validateSupportedRelease] "minimumSupportedRelease" should be a valid semver version`)
}
if (maxRelease !== undefined && !semver.valid(maxRelease)) {
throw Error(`[validateSupportedRelease] "maximumSupportedRelease" should be a valid semver version`)
}
if (minRelease !== undefined && maxRelease !== undefined && semver.gt(minRelease, maxRelease)) {
throw Error(`[validateSupportedRelease] "minimumSupportedRelease" should be less than "maximumSupportedRelease"`)
}
}
const validateMetadata = (pieceMetadata: PieceMetadata): void => {
console.info(`[validateMetadata] pieceName=${pieceMetadata.name}`)
validateSupportedRelease(
pieceMetadata.minimumSupportedRelease,
pieceMetadata.maximumSupportedRelease,
)
}
const byDisplayNameIgnoreCase = (a: PieceMetadata, b: PieceMetadata) => {
const aName = a.displayName.toUpperCase();
const bName = b.displayName.toUpperCase();
return aName.localeCompare(bName, 'en');
};
export function getCommunityPieceFolder(pieceName: string): string {
return join(COMMUNITY_PIECE_FOLDER, pieceName)
}
export async function findAllPiecesDirectoryInSource(): Promise<string[]> {
const piecesPath = resolve(cwd(), 'packages', 'pieces')
const paths = await traverseFolder(piecesPath)
return paths
}
export const pieceMetadataExists = async (
pieceName: string,
pieceVersion: string
): Promise<boolean> => {
const cloudResponse = await fetch(
`${AP_CLOUD_API_BASE}/pieces/${pieceName}?version=${pieceVersion}`
);
const pieceExist: Record<number, boolean> = {
[StatusCodes.OK]: true,
[StatusCodes.NOT_FOUND]: false
};
if (
pieceExist[cloudResponse.status] === null ||
pieceExist[cloudResponse.status] === undefined
) {
throw new Error(await cloudResponse.text());
}
return pieceExist[cloudResponse.status];
};
export async function findNewPieces(): Promise<PieceMetadata[]> {
const paths = await findAllDistPaths()
const changedPieces: PieceMetadata[] = []
// Adding batches because of memory limit when we have a lot of pieces
const batchSize = 75
for (let i = 0; i < paths.length; i += batchSize) {
const batch = paths.slice(i, i + batchSize)
const batchResults = await Promise.all(batch.map(async (folderPath) => {
const packageJson = await readPackageJson(folderPath);
if (NON_PIECES_PACKAGES.includes(packageJson.name)) {
return null;
}
const exists = await pieceMetadataExists(packageJson.name, packageJson.version)
if (!exists) {
try {
return loadPieceFromFolder(folderPath);
} catch (ex) {
return null;
}
}
return null;
}))
const validResults = batchResults.filter((piece): piece is PieceMetadata => piece !== null)
changedPieces.push(...validResults)
}
return changedPieces;
}
export async function findAllPieces(): Promise<PieceMetadata[]> {
const paths = await findAllDistPaths()
const pieces = await Promise.all(paths.map((p) => loadPieceFromFolder(p)))
return pieces.filter((p): p is PieceMetadata => p !== null).sort(byDisplayNameIgnoreCase)
}
async function findAllDistPaths(): Promise<string[]> {
const baseDir = resolve(cwd(), 'dist', 'packages')
const piecesBuildOutputPath = resolve(baseDir, 'pieces')
return await traverseFolder(piecesBuildOutputPath)
}
async function traverseFolder(folderPath: string): Promise<string[]> {
const paths: string[] = []
const directoryExists = await stat(folderPath).catch(() => null)
if (directoryExists && directoryExists.isDirectory()) {
const files = await readdir(folderPath)
for (const file of files) {
const filePath = join(folderPath, file)
const fileStats = await stat(filePath)
if (fileStats.isDirectory() && file !== 'node_modules' && file !== 'dist') {
paths.push(...await traverseFolder(filePath))
}
else if (file === 'package.json') {
paths.push(folderPath)
}
}
}
return paths
}
async function loadPieceFromFolder(folderPath: string): Promise<PieceMetadata | null> {
try {
const packageJson = await readPackageJson(folderPath);
const packageLockPath = join(folderPath, 'package.json');
const packageExists = await stat(packageLockPath).catch(() => null);
if (packageExists) {
console.info(`[loadPieceFromFolder] package.json exists, running bun install`)
execSync('bun install', { cwd: folderPath, stdio: 'inherit' });
}
const module = await import(
join(folderPath, 'src', 'index')
)
const { name: pieceName, version: pieceVersion } = packageJson
const piece = extractPieceFromModule<SubPiece>({
module,
pieceName,
pieceVersion
});
const originalMetadata = piece.metadata()
const i18n = await pieceTranslation.initializeI18n(folderPath)
const metadata = {
...originalMetadata,
name: packageJson.name,
version: packageJson.version,
i18n
};
metadata.directoryPath = folderPath;
metadata.name = packageJson.name;
metadata.version = packageJson.version;
metadata.minimumSupportedRelease = piece.minimumSupportedRelease ?? '0.0.0';
metadata.maximumSupportedRelease =
piece.maximumSupportedRelease ?? '99999.99999.9999';
validateMetadata(metadata);
return metadata;
}
catch (ex) {
console.error(ex)
}
return null
}

View File

@@ -0,0 +1,43 @@
import assert from 'node:assert'
import { argv } from 'node:process'
import { exec } from './exec'
import { readPackageJson, readProjectJson } from './files'
import { packagePrePublishChecks } from './package-pre-publish-checks'
export const publishNxProject = async (path: string): Promise<void> => {
console.info(`[publishNxProject] path=${path}`)
assert(path, '[publishNxProject] parameter "path" is required')
const packageAlreadyPublished = await packagePrePublishChecks(path);
if (packageAlreadyPublished) {
return;
}
const { version } = await readPackageJson(path)
const { name: nxProjectName } = await readProjectJson(path)
const nxPublishProjectCommand = `
node tools/scripts/publish.mjs \
${nxProjectName} \
${version} \
latest
`
await exec(nxPublishProjectCommand)
console.info(`[publishNxProject] success, path=${path}, version=${version}`)
}
const main = async (): Promise<void> => {
const path = argv[2]
await publishNxProject(path)
}
/*
* module is entrypoint, not imported i.e. invoked directly
* see https://nodejs.org/api/modules.html#modules_accessing_the_main_module
*/
if (require.main === module) {
main()
}

View File

@@ -0,0 +1,23 @@
import { findAllPiecesDirectoryInSource } from './utils/piece-script-utils';
import { packagePrePublishChecks } from './utils/package-pre-publish-checks';
const main = async () => {
const piecesMetadata = await findAllPiecesDirectoryInSource()
const sharedDependencies = ['packages/pieces/community/framework', 'packages/pieces/community/common']
const packages = [
...piecesMetadata,
]
const validationResults = packages.filter(p => !sharedDependencies.includes(p)).map(p => packagePrePublishChecks(p))
const sharedDependenciesValidationResults = await Promise.all(sharedDependencies.map(p => packagePrePublishChecks(p)))
const isSharedDependenciesChanged = !sharedDependenciesValidationResults.every(p => p)
if (isSharedDependenciesChanged) {
validationResults.push(packagePrePublishChecks('packages/shared'))
}
Promise.all(validationResults);
}
main();