Add Activepieces integration for workflow automation

- Add Activepieces fork with SmoothSchedule custom piece
- Create integrations app with Activepieces service layer
- Add embed token endpoint for iframe integration
- Create Automations page with embedded workflow builder
- Add sidebar visibility fix for embed mode
- Add list inactive customers endpoint to Public API
- Include SmoothSchedule triggers: event created/updated/cancelled
- Include SmoothSchedule actions: create/update/cancel events, list resources/services/customers

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
poduck
2025-12-18 22:59:37 -05:00
parent 9848268d34
commit 3aa7199503
16292 changed files with 1284892 additions and 4708 deletions

View File

@@ -0,0 +1,31 @@
AP_API_KEY=api-key
AP_DB_TYPE=PGLITE
AP_ENVIRONMENT=test
AP_ENCRYPTION_KEY=7e19fad4c13eaea8f657afb12e8f9c40
AP_FRONTEND_URL=http://localhost:4200
AP_WEBHOOK_TIMEOUT_SECONDS=30
AP_LOG_LEVEL=error
AP_LOG_PRETTY=true
AP_TELEMETRY_ENABLED=false
AP_ENRICH_ERROR_CONTEXT=true
AP_TRIGGER_DEFAULT_POLL_INTERVAL=1
AP_CACHE_PATH=./dev/cache
AP_DEV_PIECES=""
AP_PIECES_SYNC_MODE=NONE
AP_EDITION=ee
AP_REDIS_TYPE=MEMORY
AP_EXECUTION_MODE=UNSANDBOXED
AP_JWT_SECRET=secret
AP_STRIPE_SECRET_KEY=invalid-key
AP_FIREBASE_HASH_PARAMETERS={\"memCost\":14,\"rounds\":8,\"signerKey\":\"YE0dO4bwD4JnJafh6lZZfkp1MtKzuKAXQcDCJNJNyeCHairWHKENOkbh3dzwaCdizzOspwr/FITUVlnOAwPKyw==\",\"saltSeparator\":\"Bw==\"}
AP_API_KEY="api-key"
AP_CLOUD_PLATFORM_ID="cloud-id"
AP_APPSUMO_TOKEN="app-sumo-token"
AP_CONTAINER_TYPE=WORKER_AND_APP
AP_REDIS_HOST=redis
AP_REDIS_PORT=6379
OPENAI_API_KEY=
ANTHROPIC_API_KEY=
GOOGLE_GENERATIVE_AI_API_KEY=
REPLICATE_API_TOKEN=
NODE_OPTIONS="--max-old-space-size=8192"

View File

@@ -0,0 +1,144 @@
{
"extends": [
"../../../.eslintrc.json",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/strict",
"plugin:import-x/recommended"
],
"ignorePatterns": [
"!**/*",
"node_modules/**/*"
],
"overrides": [
{
"files": [
"*.ts",
"*.js"
],
"parserOptions": {
"project": [
"packages/server/api/tsconfig.*?.json"
]
},
"rules": {
"import-x/no-unresolved": "off",
"no-console": "error",
"object-shorthand": "error",
"@typescript-eslint/switch-exhaustiveness-check": "error",
"@typescript-eslint/brace-style": [
"error",
"stroustrup"
],
"@typescript-eslint/comma-dangle": [
"error",
"always-multiline"
],
"@typescript-eslint/indent": [
"error",
4
],
"@typescript-eslint/quotes": [
"error",
"single"
],
"@typescript-eslint/semi": [
"error",
"never"
],
"@typescript-eslint/consistent-type-definitions": [
"error",
"type"
],
"@typescript-eslint/no-explicit-any": "error",
"@typescript-eslint/no-redundant-type-constituents": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/adjacent-overload-signatures": "error",
"@typescript-eslint/comma-spacing": "error",
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/block-spacing": "error",
"@typescript-eslint/func-call-spacing": "error",
"@typescript-eslint/key-spacing": "error",
"@typescript-eslint/object-curly-spacing": [
"error",
"always"
],
"@typescript-eslint/space-before-blocks": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/member-delimiter-style": [
"error",
{
"multiline": {
"delimiter": "none"
},
"singleline": {
"delimiter": "comma",
"requireLast": false
}
}
],
"@typescript-eslint/no-unused-vars": [
"error",
{
"varsIgnorePattern": "^_",
"argsIgnorePattern": "^_"
}
],
"@typescript-eslint/space-before-function-paren": [
"error",
{
"anonymous": "always",
"named": "never",
"asyncArrow": "always"
}
],
"@typescript-eslint/space-infix-ops": "error",
"@typescript-eslint/keyword-spacing": "error",
"@typescript-eslint/explicit-function-return-type": "warn",
"@typescript-eslint/no-floating-promises": "error",
"@typescript-eslint/no-misused-promises": "warn",
"no-return-await": "off",
"@typescript-eslint/return-await": [
"error",
"in-try-catch"
],
"default-case-last": "error",
"import-x/no-duplicates": "error",
"import-x/order": [
"error",
{
"alphabetize": {
"order": "asc"
}
}
],
"sort-imports": [
"error",
{
"ignoreCase": true,
"ignoreDeclarationSort": true,
"ignoreMemberSort": false,
"memberSyntaxSortOrder": [
"none",
"all",
"multiple",
"single"
],
"allowSeparatedGroups": false
}
]
}
}
],
"settings": {
"import/ignore:": [
"node_modules"
],
"import-x/resolver": {
"typescript": {
"alwaysTryTypes": false
},
"node": true
}
}
}

View File

@@ -0,0 +1,9 @@
# Backend
## scripts
### Generate database migrations
```sh
npx nx db-migration server-api -- --name migration-name
```

View File

@@ -0,0 +1 @@
module.exports = {}; // You can export a mock implementation if needed

View File

@@ -0,0 +1,25 @@
/* eslint-disable */
export default {
displayName: 'server-api',
preset: '../../../jest.preset.js',
globals: {},
testEnvironment: 'node',
setupFiles: ['<rootDir>/jest.setup.js'],
transform: {
'^.+\\.[tj]s$': [
'ts-jest',
{
tsconfig: '<rootDir>/tsconfig.spec.json',
},
],
},
"moduleNameMapper": {
"isolated-vm": "<rootDir>/__mocks__/isolated-vm.js",
"^@activepieces/shared$": "<rootDir>/../../../packages/shared/src/index.ts",
"^@activepieces/ee-shared$": "<rootDir>/../../../packages/ee/shared/src/index.ts"
},
moduleFileExtensions: ['ts', 'js', 'html'],
coverageDirectory: '../../../coverage/packages/server/api',
testTimeout: 200000,
maxWorkers: 1
};

View File

@@ -0,0 +1,4 @@
const path = require('path');
const resolvedPath = path.resolve('packages/server/api/.env.tests');
require('dotenv').config({ path: resolvedPath});
console.log("Configuring jest " + resolvedPath)

View File

@@ -0,0 +1,170 @@
{
"name": "server-api",
"$schema": "../../../node_modules/nx/schemas/project-schema.json",
"sourceRoot": "packages/server/api/src",
"projectType": "application",
"tags": [],
"implicitDependencies": ["engine"],
"targets": {
"build": {
"executor": "@nx/esbuild:esbuild",
"outputs": ["{options.outputPath}"],
"defaultConfiguration": "production",
"options": {
"external": ["@electric-sql/pglite"],
"main": "packages/server/api/src/main.ts",
"outputPath": "dist/packages/server/api",
"outputFileName": "main.js",
"tsConfig": "packages/server/api/tsconfig.app.json",
"platform": "node",
"format": ["cjs"],
"bundle": true,
"sourcemap": true,
"generatePackageJson": true,
"assets": [
{
"glob": "packages/server/api/README.md",
"input": ".",
"output": "."
},
"packages/server/api/src/assets"
]
},
"configurations": {
"development": {
"minify": false
},
"production": {
"minify": false
}
}
},
"serve": {
"executor": "@nx/js:node",
"options": {
"buildTarget": "server-api:build",
"host": "0.0.0.0"
},
"configurations": {
"production": {
"buildTarget": "server-api:build:production"
}
}
},
"lint": {
"executor": "@nx/eslint:lint",
"outputs": ["{options.outputFile}"],
"options": {
"lintFilePatterns": ["packages/server/api/**/*.ts"]
}
},
"test-unit": {
"executor": "@nx/jest:jest",
"outputs": ["{workspaceRoot}/coverage/{projectRoot}"],
"options": {
"jestConfig": "packages/server/api/jest.config.ts",
"passWithNoTests": false,
"bail": true,
"testPathPatterns": ["packages/server/api/test/unit"]
}
},
"test-ce-command": {
"executor": "@nx/jest:jest",
"outputs": ["{workspaceRoot}/coverage/{projectRoot}"],
"options": {
"jestConfig": "packages/server/api/jest.config.ts",
"passWithNoTests": false,
"bail": true,
"testPathPatterns": ["packages/server/api/test/integration/ce"]
}
},
"test-cloud-command": {
"executor": "@nx/jest:jest",
"outputs": ["{workspaceRoot}/coverage/{projectRoot}"],
"options": {
"jestConfig": "packages/server/api/jest.config.ts",
"passWithNoTests": false,
"bail": true,
"testPathPatterns": ["packages/server/api/test/integration/cloud"]
}
},
"test-ee-command": {
"executor": "@nx/jest:jest",
"outputs": ["{workspaceRoot}/coverage/{projectRoot}"],
"options": {
"jestConfig": "packages/server/api/jest.config.ts",
"passWithNoTests": false,
"bail": true,
"testPathPatterns": ["packages/server/api/test/integration/ee"]
}
},
"test-ee": {
"executor": "nx:run-commands",
"options": {
"commands": [
"export $(cat packages/server/api/.env.tests | xargs) && NODE_OPTIONS='--experimental-vm-modules' AP_EDITION=ee nx test-ee-command server-api --output-style stream-without-prefixes"
],
"parallel": false
}
},
"test-ce": {
"executor": "nx:run-commands",
"options": {
"commands": [
"nx build server-api",
"export $(cat packages/server/api/.env.tests | xargs) && NODE_OPTIONS='--experimental-vm-modules' AP_EDITION=ce nx test-ce-command server-api --output-style stream-without-prefixes"
],
"parallel": false
}
},
"test-cloud": {
"executor": "nx:run-commands",
"options": {
"commands": [
"nx build server-api",
"export $(cat packages/server/api/.env.tests | xargs) && NODE_OPTIONS='--experimental-vm-modules' AP_EDITION=cloud nx test-cloud-command server-api --output-style stream-without-prefixes"
],
"parallel": false
}
},
"test": {
"executor": "nx:run-commands",
"options": {
"commands": [
"nx test-ee server-api",
"nx test-ce server-api",
"nx test-cloud server-api"
],
"parallel": true
}
},
"test:docker": {
"command": "UID=\"$(id -u)\" GID=\"$(id -g)\" docker compose --profile full -f docker-compose.test.yml up --exit-code-from app --attach app"
},
"db": {
"executor": "nx:run-commands",
"options": {
"command": "ts-node -r tsconfig-paths/register -P packages/server/api/tsconfig.app.json ./node_modules/typeorm/cli.js"
}
},
"db-migration": {
"executor": "nx:run-commands",
"options": {
"command": "nx db server-api -- migration:generate -p -d packages/server/api/src/app/database/migration-data-source.ts packages/server/api/src/app/database/migration/postgres/{args.name} {args.flags}"
}
},
"check-migrations": {
"executor": "nx:run-commands",
"options": {
"commands": [
"echo '🔍 Checking for schema changes without migrations...'",
"export $(cat packages/server/api/.env.tests | xargs) && export AP_DEV_PIECES='' && export AP_ENVIRONMENT=dev && AP_EDITION=ce && nx db server-api -- migration:run -d packages/server/api/src/app/database/migration-data-source.ts > /dev/null 2>&1",
"echo 'Checking for schema drift...'",
"export $(cat packages/server/api/.env.tests | xargs) && export AP_DEV_PIECES='' && export AP_ENVIRONMENT=dev && AP_EDITION=ce && nx db-migration server-api --flags='--dryrun --check' || (echo '' && echo '❌ ERROR: Schema changes detected without corresponding migration!' && echo '' && echo 'The PR contains database schema changes without corresponding migration.' && echo 'Please generate a migration using:' && echo ' nx db-migration server-api --name=<DESCRIPTIVE_NAME>' && echo '' && echo 'For more information, see: docs/handbook/engineering/playbooks/database-migration.mdx' && echo '' && exit 1)",
"echo '✅ No missing migrations detected'"
],
"parallel": false
}
}
}
}

View File

@@ -0,0 +1,88 @@
import { AIProviderConfig, AIProviderModel, AIProviderName, AIProviderWithoutSensitiveData, CreateAIProviderRequest, PrincipalType } from '@activepieces/shared'
import { FastifyPluginAsyncTypebox, Type } from '@fastify/type-provider-typebox'
import { StatusCodes } from 'http-status-codes'
import { aiProviderService } from './ai-provider-service'
export const aiProviderController: FastifyPluginAsyncTypebox = async (app) => {
app.get('/', ListAIProviders, async (request) => {
const platformId = request.principal.platform.id
return aiProviderService(app.log).listProviders(platformId)
})
app.get('/:id/config', GetAIProviderConfig, async (request) => {
const platformId = request.principal.platform.id
return aiProviderService(app.log).getConfig(platformId, request.params.id)
})
app.get('/:id/models', ListModels, async (request) => {
const platformId = request.principal.platform.id
return aiProviderService(app.log).listModels(platformId, request.params.id)
})
app.post('/', CreateAIProvider, async (request, reply) => {
const platformId = request.principal.platform.id
await aiProviderService(app.log).upsert(platformId, request.body)
return reply.status(StatusCodes.NO_CONTENT).send()
})
app.delete('/:id', DeleteAIProvider, async (request, reply) => {
const platformId = request.principal.platform.id
await aiProviderService(app.log).delete(platformId, request.params.id)
return reply.status(StatusCodes.NO_CONTENT).send()
})
}
const ListAIProviders = {
config: {
allowedPrincipals: [PrincipalType.USER, PrincipalType.ENGINE] as const,
},
schema: {
response: {
[StatusCodes.OK]: Type.Array(AIProviderWithoutSensitiveData),
},
},
}
const GetAIProviderConfig = {
config: {
allowedPrincipals: [PrincipalType.ENGINE] as const,
},
schema: {
params: Type.Object({
id: Type.Enum(AIProviderName),
}),
response: {
[StatusCodes.OK]: AIProviderConfig,
},
},
}
const ListModels = {
config: {
allowedPrincipals: [PrincipalType.USER, PrincipalType.ENGINE] as const,
},
schema: {
params: Type.Object({
id: Type.Enum(AIProviderName),
}),
response: {
[StatusCodes.OK]: Type.Array(AIProviderModel),
},
},
}
const CreateAIProvider = {
config: {
allowedPrincipals: [PrincipalType.USER] as const,
},
schema: {
body: CreateAIProviderRequest,
},
}
const DeleteAIProvider = {
config: {
allowedPrincipals: [PrincipalType.USER] as const,
},
schema: {
params: Type.Object({
id: Type.Enum(AIProviderName),
}),
},
}

View File

@@ -0,0 +1,54 @@
import { AIProvider, AIProviderName, Platform } from '@activepieces/shared'
import { Static, Type } from '@sinclair/typebox'
import { EntitySchema } from 'typeorm'
import { ApIdSchema, BaseColumnSchemaPart } from '../database/database-common'
import { EncryptedObject } from '../helper/encryption'
const AIProviderEncrypted = Type.Composite([Type.Omit(AIProvider, ['config']), Type.Object({
config: EncryptedObject,
})])
type AIProviderEncrypted = Static<typeof AIProviderEncrypted>
export type AIProviderSchema = AIProviderEncrypted & {
platform: Platform
provider: AIProviderName
}
export const AIProviderEntity = new EntitySchema<AIProviderSchema>({
name: 'ai_provider',
columns: {
...BaseColumnSchemaPart,
config: {
type: 'json',
nullable: false,
},
provider: {
type: String,
nullable: false,
},
platformId: {
...ApIdSchema,
nullable: false,
},
},
indices: [
{
name: 'idx_ai_provider_platform_id_provider',
columns: ['platformId', 'provider'],
unique: true,
},
],
relations: {
platform: {
type: 'many-to-one',
target: 'platform',
cascade: true,
onDelete: 'CASCADE',
joinColumn: {
name: 'platformId',
foreignKeyConstraintName: 'fk_ai_provider_platform_id',
},
},
},
})

View File

@@ -0,0 +1,109 @@
import { ActivepiecesError, AIProviderConfig, AIProviderModel, AIProviderName, AIProviderWithoutSensitiveData,
ApEdition,
apId,
CreateAIProviderRequest,
ErrorCode,
PlatformId,
} from '@activepieces/shared'
import { FastifyBaseLogger } from 'fastify'
import cron from 'node-cron'
import { repoFactory } from '../core/db/repo-factory'
import { platformAiCreditsService } from '../ee/platform/platform-plan/platform-ai-credits'
import { encryptUtils } from '../helper/encryption'
import { system } from '../helper/system/system'
import { AIProviderEntity, AIProviderSchema } from './ai-provider-entity'
import { aiProviders } from './providers'
const aiProviderRepo = repoFactory<AIProviderSchema>(AIProviderEntity)
const modelsCache = new Map<string, AIProviderModel[]>()
export const aiProviderService = (log: FastifyBaseLogger) => ({
async setup(): Promise<void> {
cron.schedule('0 0 * * *', () => {
log.info('Clearing AI provider models cache')
modelsCache.clear()
})
},
async listProviders(platformId: PlatformId): Promise<AIProviderWithoutSensitiveData[]> {
const enableOpenRouterProvider = platformAiCreditsService(log).isEnabled()
const configuredProviders = await aiProviderRepo().findBy({ platformId })
const formattedProviders: AIProviderWithoutSensitiveData[] = Object.values(AIProviderName).filter(id => id !== AIProviderName.ACTIVEPIECES).map(id => {
return {
id,
name: aiProviders[id].name,
configured: !!configuredProviders.find(c => c.provider === id),
}
})
if (enableOpenRouterProvider) {
formattedProviders.push({
id: AIProviderName.ACTIVEPIECES,
name: aiProviders[AIProviderName.ACTIVEPIECES].name,
configured: true,
})
}
return formattedProviders
},
async listModels(platformId: PlatformId, providerId: AIProviderName): Promise<AIProviderModel[]> {
const config = await this.getConfig(platformId, providerId)
const cacheKey = `${providerId}-${config.apiKey}`
if (modelsCache.has(cacheKey)) {
return modelsCache.get(cacheKey)!
}
const provider = aiProviders[providerId]
const data = await provider.listModels(config)
modelsCache.set(cacheKey, data.map(model => ({
id: model.id,
name: model.name,
type: model.type,
})))
return modelsCache.get(cacheKey)!
},
async upsert(platformId: PlatformId, request: CreateAIProviderRequest): Promise<void> {
if (request.provider === AIProviderName.AZURE && system.getEdition() !== ApEdition.ENTERPRISE) {
throw new ActivepiecesError({
code: ErrorCode.FEATURE_DISABLED,
params: {
message: 'Azure OpenAI is only available for enterprise customers',
},
})
}
await aiProviderRepo().upsert({
id: apId(),
config: await encryptUtils.encryptObject(request.config),
provider: request.provider,
platformId,
}, ['provider', 'platformId'])
},
async delete(platformId: PlatformId, provider: AIProviderName): Promise<void> {
await aiProviderRepo().delete({
platformId,
provider,
})
},
async getConfig(platformId: PlatformId, providerId: AIProviderName): Promise<GetProviderConfigResponse> {
if (providerId === AIProviderName.ACTIVEPIECES) {
const provisionedKey = await platformAiCreditsService(log).provisionKeyIfNeeded(platformId)
return {
apiKey: provisionedKey.key,
}
}
const aiProvider = await aiProviderRepo().findOneByOrFail({
platformId,
provider: providerId,
})
return encryptUtils.decryptObject<AIProviderConfig>(aiProvider.config)
},
})
export type GetProviderConfigResponse = AIProviderConfig

View File

@@ -0,0 +1,6 @@
import { FastifyPluginAsyncTypebox } from '@fastify/type-provider-typebox'
import { aiProviderController } from './ai-provider-controller'
export const aiProviderModule: FastifyPluginAsyncTypebox = async (app) => {
await app.register(aiProviderController, { prefix: '/v1/ai-providers' })
}

View File

@@ -0,0 +1,6 @@
import { AIProviderConfig, AIProviderModel } from '@activepieces/shared'
export type AIProviderStrategy<T extends AIProviderConfig> = {
name: string
listModels(config: T): Promise<AIProviderModel[]>
}

View File

@@ -0,0 +1,31 @@
import { httpClient, HttpMethod } from '@activepieces/pieces-common'
import { AIProviderModel, AIProviderModelType, AnthropicProviderConfig } from '@activepieces/shared'
import { AIProviderStrategy } from './ai-provider'
export const anthropicProvider: AIProviderStrategy<AnthropicProviderConfig> = {
name: 'Anthropic',
async listModels(config: AnthropicProviderConfig): Promise<AIProviderModel[]> {
const res = await httpClient.sendRequest<{ data: AnthropicModel[] }>({
url: 'https://api.anthropic.com/v1/models',
method: HttpMethod.GET,
headers: {
'x-api-key': config.apiKey,
'Content-Type': 'application/json',
'anthropic-version': '2023-06-01',
},
})
const { data } = res.body
return data.map((model: AnthropicModel) => ({
id: model.id,
name: model.display_name,
type: AIProviderModelType.TEXT,
}))
},
}
type AnthropicModel = {
id: string
display_name: string
}

View File

@@ -0,0 +1,37 @@
import { httpClient, HttpMethod } from '@activepieces/pieces-common'
import { AIProviderModel, AIProviderModelType, AzureProviderConfig } from '@activepieces/shared'
import { AIProviderStrategy } from './ai-provider'
export const azureProvider: AIProviderStrategy<AzureProviderConfig> = {
name: 'Azure OpenAI',
async listModels(config: AzureProviderConfig): Promise<AIProviderModel[]> {
const endpoint = `https://${config.resourceName}.openai.azure.com`
const apiKey = config.apiKey
const apiVersion = '2024-10-21'
if (!endpoint || !apiKey) {
return []
}
const res = await httpClient.sendRequest<{ data: AzureModel[] }>({
url: `${endpoint}/openai/deployments?api-version=${apiVersion}`,
method: HttpMethod.GET,
headers: {
'api-key': config.apiKey,
'Content-Type': 'application/json',
},
})
const { data } = res.body
return data.map((deployment: AzureModel) => ({
id: deployment.name,
name: deployment.name,
type: AIProviderModelType.TEXT,
}))
},
}
type AzureModel = {
name: string
}

View File

@@ -0,0 +1,27 @@
import { httpClient, HttpMethod } from '@activepieces/pieces-common'
import { AIProviderModel, AIProviderModelType, GoogleProviderConfig } from '@activepieces/shared'
import { AIProviderStrategy } from './ai-provider'
export const googleProvider: AIProviderStrategy<GoogleProviderConfig> = {
name: 'Google',
async listModels(config: GoogleProviderConfig): Promise<AIProviderModel[]> {
const res = await httpClient.sendRequest<{ models: GoogleModel[] }>({
url: 'https://generativelanguage.googleapis.com/v1beta/models?pageSize=1000',
method: HttpMethod.GET,
headers: {
'x-goog-api-key': config.apiKey,
'Content-Type': 'application/json',
},
})
return res.body.models.map((model: GoogleModel) => ({
id: model.name,
name: model.displayName,
type: model.name.includes('image') ? AIProviderModelType.IMAGE : AIProviderModelType.TEXT,
}))
},
}
type GoogleModel = {
name: string
displayName: string
}

View File

@@ -0,0 +1,21 @@
import { AIProviderConfig, AIProviderName } from '@activepieces/shared'
import { AIProviderStrategy } from './ai-provider'
import { anthropicProvider } from './anthropic-provider'
import { azureProvider } from './azure-provider'
import { googleProvider } from './google-provider'
import { openaiProvider } from './openai-provider'
import { openRouterProvider } from './openrouter-provider'
export const aiProviders: Record<AIProviderName, AIProviderStrategy<AIProviderConfig>> = {
[AIProviderName.OPENAI]: openaiProvider,
[AIProviderName.ANTHROPIC]: anthropicProvider,
[AIProviderName.OPENROUTER]: openRouterProvider,
[AIProviderName.AZURE]: azureProvider,
[AIProviderName.GOOGLE]: googleProvider,
[AIProviderName.ACTIVEPIECES]: {
...openRouterProvider,
name: 'Activepieces',
},
}
export { AIProviderStrategy } from './ai-provider'

View File

@@ -0,0 +1,35 @@
import { httpClient, HttpMethod } from '@activepieces/pieces-common'
import { AIProviderModel, AIProviderModelType, OpenAIProviderConfig } from '@activepieces/shared'
import { AIProviderStrategy } from './ai-provider'
export const openaiProvider: AIProviderStrategy<OpenAIProviderConfig> = {
name: 'OpenAI',
async listModels(config: OpenAIProviderConfig): Promise<AIProviderModel[]> {
const res = await httpClient.sendRequest<{ data: OpenAIModel[] }>({
url: 'https://api.openai.com/v1/models',
method: HttpMethod.GET,
headers: {
'Authorization': `Bearer ${config.apiKey}`,
'Content-Type': 'application/json',
},
})
const { data } = res.body
const openaiImageModels = [
'gpt-image-1',
'dall-e-3',
'dall-e-2',
]
return data.map((model: OpenAIModel) => ({
id: model.id,
name: model.id,
type: openaiImageModels.includes(model.id) ? AIProviderModelType.IMAGE : AIProviderModelType.TEXT,
}))
},
}
type OpenAIModel = {
id: string
}

View File

@@ -0,0 +1,33 @@
import { httpClient, HttpMethod } from '@activepieces/pieces-common'
import { AIProviderModel, AIProviderModelType, OpenRouterProviderConfig } from '@activepieces/shared'
import { AIProviderStrategy } from './ai-provider'
export const openRouterProvider: AIProviderStrategy<OpenRouterProviderConfig> = {
name: 'OpenRouter',
async listModels(config: OpenRouterProviderConfig): Promise<AIProviderModel[]> {
const res = await httpClient.sendRequest<{ data: OpenRouterModel[] }>({
url: 'https://openrouter.ai/api/v1/models/user',
method: HttpMethod.GET,
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${config.apiKey}`,
},
})
const { data } = res.body
return data.map((model: OpenRouterModel) => ({
id: model.id,
name: model.name,
type: model.architecture.output_modalities.includes('image') ? AIProviderModelType.IMAGE : AIProviderModelType.TEXT,
}))
},
}
type OpenRouterModel = {
id: string
name: string
architecture: {
output_modalities: string[]
}
}

View File

@@ -0,0 +1,85 @@
import { FlowActionType, FlowStatus, flowStructureUtil, FlowTriggerType, isNil, PieceAction, PieceTrigger } from '@activepieces/shared'
import { FastifyBaseLogger } from 'fastify'
import { repoFactory } from '../core/db/repo-factory'
import { FlowEntity } from '../flows/flow/flow.entity'
import { flowVersionService } from '../flows/flow-version/flow-version.service'
import { SystemJobName } from '../helper/system-jobs/common'
import { systemJobHandlers } from '../helper/system-jobs/job-handlers'
import { systemJobsSchedule } from '../helper/system-jobs/system-job'
import { pieceMetadataService } from '../pieces/metadata/piece-metadata-service'
import { projectService } from '../project/project-service'
const flowRepo = repoFactory(FlowEntity)
export const piecesAnalyticsService = (log: FastifyBaseLogger) => ({
async init(): Promise<void> {
systemJobHandlers.registerJobHandler(SystemJobName.PIECES_ANALYTICS, async () => {
const flowIds: string[] = (await flowRepo().createQueryBuilder().select('id').where({
status: FlowStatus.ENABLED,
}).getRawMany()).map((flow) => flow.id)
const activeProjects: Record<string, Set<string>> = {}
log.info('Syncing pieces analytics')
for (const flowId of flowIds) {
const flow = await flowRepo().findOneBy({
id: flowId,
})
const publishedVersionId = flow?.publishedVersionId
if (isNil(flow) || isNil(publishedVersionId)) {
continue
}
const flowVersion = await flowVersionService(log).getOne(publishedVersionId)
if (isNil(flowVersion)) {
continue
}
const pieces = flowStructureUtil.getAllSteps(flowVersion.trigger).filter(
(step) =>
step.type === FlowActionType.PIECE || step.type === FlowTriggerType.PIECE,
).map((step) => {
const clonedStep = step as (PieceTrigger | PieceAction)
return {
name: clonedStep.settings.pieceName,
version: clonedStep.settings.pieceVersion,
}
})
const platformId = await projectService.getPlatformId(flow.projectId)
for (const piece of pieces) {
try {
const pieceMetadata = await pieceMetadataService(log).getOrThrow({
name: piece.name,
version: piece.version,
projectId: flow.projectId,
platformId,
})
const pieceId = pieceMetadata.id!
activeProjects[pieceId] = activeProjects[pieceId] || new Set()
activeProjects[pieceId].add(flow.projectId)
}
catch (e) {
log.error({
name: piece.name,
version: piece.version,
}, 'Piece not found in pieces analytics service')
}
}
}
for (const id in activeProjects) {
await pieceMetadataService(log).updateUsage({
id,
usage: activeProjects[id].size,
})
}
log.info('Synced pieces analytics finished')
})
await systemJobsSchedule(log).upsertJob({
job: {
name: SystemJobName.PIECES_ANALYTICS,
data: {},
},
schedule: {
type: 'repeated',
cron: '0 12 * * *',
},
})
},
})

View File

@@ -0,0 +1,75 @@
import { Platform, PlatformAnalyticsReport } from '@activepieces/shared'
import { EntitySchema } from 'typeorm'
import {
BaseColumnSchemaPart,
} from '../database/database-common'
type PlatformAnalyticsReportEntity = PlatformAnalyticsReport & {
platform: Platform
}
export const PlatformAnalyticsReportEntity = new EntitySchema<PlatformAnalyticsReportEntity>({
name: 'platform_analytics_report',
columns: {
...BaseColumnSchemaPart,
estimatedTimeSavedPerStep: {
type: Number,
nullable: true,
},
platformId: {
type: String,
},
outdated: {
type: Boolean,
},
totalFlows: {
type: Number,
},
activeFlows: {
type: Number,
},
totalUsers: {
type: Number,
},
activeUsers: {
type: Number,
},
totalProjects: {
type: Number,
},
activeFlowsWithAI: {
type: Number,
},
totalFlowRuns: {
type: Number,
},
topPieces: {
type: 'jsonb',
nullable: false,
},
topProjects: {
type: 'jsonb',
nullable: false,
},
runsUsage: {
type: 'jsonb',
nullable: false,
},
flowsDetails: {
type: 'jsonb',
nullable: false,
},
},
relations: {
platform: {
target: 'platform',
type: 'one-to-one',
cascade: true,
onDelete: 'CASCADE',
joinColumn: {
name: 'platformId',
referencedColumnName: 'id',
foreignKeyConstraintName: 'fk_platform_analytics_report_platform_id',
},
},
},
})

View File

@@ -0,0 +1,277 @@
import { PieceMetadataModel } from '@activepieces/pieces-framework'
import { AnalyticsFlowReportItem, AnalyticsPieceReportItem, AnalyticsProjectReportItem, AnalyticsRunsUsageItem, apId, assertNotNullOrUndefined, DEFAULT_ESTIMATED_TIME_SAVED_PER_STEP, flowPieceUtil, FlowStatus, FlowVersionState, isNil, PieceCategory, PlatformAnalyticsReport, PlatformId, PopulatedFlow, RunEnvironment, spreadIfDefined, UpdatePlatformReportRequest } from '@activepieces/shared'
import dayjs from 'dayjs'
import { FastifyBaseLogger } from 'fastify'
import { MoreThan } from 'typeorm'
import { repoFactory } from '../core/db/repo-factory'
import { distributedLock } from '../database/redis-connections'
import { flowService } from '../flows/flow/flow.service'
import { flowRunRepo } from '../flows/flow-run/flow-run-service'
import { pieceMetadataService } from '../pieces/metadata/piece-metadata-service'
import { projectRepo } from '../project/project-service'
import { userRepo } from '../user/user-service'
import { PlatformAnalyticsReportEntity } from './platform-analytics-report.entity'
export const platformAnalyticsReportRepo = repoFactory(PlatformAnalyticsReportEntity)
export const platformAnalyticsReportService = (log: FastifyBaseLogger) => ({
refreshReport: async (platformId: PlatformId) => {
await distributedLock(log).runExclusive({
key: `platform-analytics-report-${platformId}`,
timeoutInSeconds: 400,
fn: async () => {
await refreshReport(platformId, log)
},
})
return platformAnalyticsReportRepo().findOneBy({ platformId })
},
update: async (platformId: PlatformId, request: UpdatePlatformReportRequest) => {
await platformAnalyticsReportRepo().update({ platformId }, {
...spreadIfDefined('estimatedTimeSavedPerStep', request.estimatedTimeSavedPerStep),
outdated: request.outdated,
})
},
getOrGenerateReport: async (platformId: PlatformId): Promise<PlatformAnalyticsReport> => {
const report = await platformAnalyticsReportRepo().findOneBy({ platformId })
if (report && !report.outdated) {
return report
}
return refreshReport(platformId, log)
},
})
const refreshReport = async (platformId: PlatformId, log: FastifyBaseLogger): Promise<PlatformAnalyticsReport> => {
const report = await platformAnalyticsReportRepo().findOneBy({ platformId })
const updatedInLastMinute = dayjs().subtract(1, 'minute').toISOString()
if (!isNil(report) && dayjs(report.updated).isAfter(updatedInLastMinute)) {
return report
}
const estimatedTimeSavedPerStep = report?.estimatedTimeSavedPerStep ?? DEFAULT_ESTIMATED_TIME_SAVED_PER_STEP
const flows = await listAllFlows(log, platformId)
const activeFlows = countFlows(flows, FlowStatus.ENABLED)
const totalFlows = countFlows(flows, undefined)
const totalProjects = await countProjects(platformId)
const { totalUsers, activeUsers } = await analyzeUsers(platformId)
const pieceMetadataMap = await pieceMetadataService(log).getAllUnfiltered(platformId)
const topPieces = analyzePieces(flows, pieceMetadataMap)
const activeFlowsWithAI = numberOfFlowsWithAI(flows, pieceMetadataMap)
const topProjects = await analyzeProjects(flows)
const { runsUsage, totalFlowRuns } = await analyzeRuns(platformId, estimatedTimeSavedPerStep)
const flowsDetails = await analyzeFlowsDetails(platformId, estimatedTimeSavedPerStep)
return platformAnalyticsReportRepo().save({
totalUsers,
activeUsers,
activeFlows,
totalFlows,
estimatedTimeSavedPerStep: report?.estimatedTimeSavedPerStep,
totalProjects,
activeFlowsWithAI,
totalFlowRuns,
outdated: false,
topProjects,
topPieces,
runsUsage,
flowsDetails,
platformId,
created: dayjs().toISOString(),
updated: dayjs().toISOString(),
id: report?.id ?? apId(),
})
}
async function analyzeProjects(flows: PopulatedFlow[]): Promise<AnalyticsProjectReportItem[]> {
const projectIds = [...new Set(flows.map(flow => flow.projectId))]
const projects = await projectRepo().findBy(projectIds.map(id => ({ id })))
const projectMap = new Map(projects.map(project => [project.id, project]))
const projectUsage: Record<string, AnalyticsProjectReportItem> = {}
for (const flow of flows) {
const projectId = flow.projectId
const project = projectMap.get(projectId)
assertNotNullOrUndefined(project, 'project')
if (!projectUsage[projectId]) {
projectUsage[projectId] = { id: projectId, activeFlows: 0, totalFlows: 0, displayName: project.displayName }
}
projectUsage[projectId].totalFlows += 1
if (flow.status === FlowStatus.ENABLED) {
projectUsage[projectId].activeFlows += 1
}
}
return Object.values(projectUsage).map(({ id, activeFlows, totalFlows, displayName }) => ({
id,
activeFlows,
displayName,
totalFlows,
}))
}
function numberOfFlowsWithAI(flows: PopulatedFlow[], pieceMetadataMap: Map<string, PieceMetadataModel>): number {
let count = 0
for (const flow of flows) {
const usedPieces = flowPieceUtil.getUsedPieces(flow.version.trigger)
const hasAIPiece = usedPieces.some(pieceName => {
const metadata = pieceMetadataMap.get(pieceName)
return metadata?.categories?.includes(PieceCategory.ARTIFICIAL_INTELLIGENCE)
})
if (hasAIPiece) {
count++
}
}
return count
}
function analyzePieces(flows: PopulatedFlow[], pieceMetadataMap: Map<string, PieceMetadataModel>): AnalyticsPieceReportItem[] {
const pieces: Record<string, AnalyticsPieceReportItem> = {}
for (const flow of flows) {
const usedPieces = flowPieceUtil.getUsedPieces(flow.version.trigger)
for (const piece of usedPieces) {
if (!pieces[piece]) {
const pieceMetadata = pieceMetadataMap.get(piece)
if (!isNil(pieceMetadata)) {
pieces[piece] = {
name: piece,
displayName: pieceMetadata.displayName,
logoUrl: pieceMetadata.logoUrl,
usageCount: 0,
}
}
}
if (!isNil(pieces[piece])) {
pieces[piece].usageCount += 1
}
}
}
return Object.entries(pieces).sort((a, b) => b[1].usageCount - a[1].usageCount).map(([_, value]) => value)
}
async function countProjects(platformId: PlatformId) {
return projectRepo().countBy({
platformId,
})
}
async function analyzeUsers(platformId: PlatformId) {
const oneMonthAgo = dayjs().subtract(1, 'month').toISOString()
const totalUsers = await userRepo().countBy({
platformId,
})
const activeUsers = await userRepo().countBy({
platformId,
lastActiveDate: MoreThan(oneMonthAgo),
})
return {
activeUsers,
totalUsers,
}
}
async function listAllFlows(log: FastifyBaseLogger, platformId: PlatformId): Promise<PopulatedFlow[]> {
const page = await flowService(log).list({
platformId,
cursorRequest: null,
versionState: FlowVersionState.DRAFT,
includeTriggerSource: false,
})
return page.data
}
function countFlows(flows: PopulatedFlow[], status: FlowStatus | undefined) {
if (status) {
return flows.filter(flow => flow.status === status).length
}
return flows.length
}
async function analyzeRuns(platformId: PlatformId, estimatedTimeSavedPerStep: number): Promise<{ runsUsage: AnalyticsRunsUsageItem[], totalFlowRuns: number }> {
if (isNil(estimatedTimeSavedPerStep)) {
throw new Error('Estimated time saved per step is required')
}
const runsData = await flowRunRepo()
.createQueryBuilder('flow_run')
.select('DATE(flow_run.created)', 'day')
.addSelect('COUNT(*)::int', 'totalRuns')
.addSelect('COALESCE(SUM(COALESCE(flow."timeSavedPerRun", flow_run."stepsCount" * :estimatedTimeSavedPerStep)), 0)::int', 'minutesSaved')
.innerJoin('project', 'project', 'flow_run."projectId" = project.id')
.innerJoin('flow', 'flow', 'flow_run."flowId" = flow.id')
.where('project."platformId" = :platformId', { platformId })
.andWhere('flow_run.created >= now() - interval \'3 months\'')
.andWhere('flow_run.environment = :environment', { environment: RunEnvironment.PRODUCTION })
.groupBy('DATE(flow_run.created)')
.orderBy('DATE(flow_run.created)', 'ASC')
.setParameters({ estimatedTimeSavedPerStep })
.getRawMany()
let totalFlowRuns = 0
const runsUsage = runsData.map((row) => {
const runs = parseInt(row.totalRuns)
const minutesSaved = parseInt(row.minutesSaved) || 0
totalFlowRuns += runs
return {
day: row.day,
totalRuns: runs,
minutesSaved,
}
})
return { runsUsage, totalFlowRuns }
}
async function analyzeFlowsDetails(platformId: PlatformId, estimatedTimeSavedPerStep: number): Promise<AnalyticsFlowReportItem[]> {
if (isNil(estimatedTimeSavedPerStep)) {
throw new Error('Estimated time saved per step is required')
}
const flowData = await flowRunRepo()
.createQueryBuilder('flow_run')
.select('flow.id', 'flowId')
.addSelect('latest_version."displayName"', 'flowName')
.addSelect('project.id', 'projectId')
.addSelect('project."displayName"', 'projectName')
.addSelect('COUNT(*)::int', 'runs')
.addSelect('flow."timeSavedPerRun"', 'timeSavedPerRun')
.addSelect('COALESCE(SUM(COALESCE(flow."timeSavedPerRun", flow_run."stepsCount" * :estimatedTimeSavedPerStep)), 0)::int', 'minutesSaved')
.innerJoin('project', 'project', 'flow_run."projectId" = project.id')
.innerJoin('flow', 'flow', 'flow_run."flowId" = flow.id')
.innerJoin('flow_version', 'latest_version', 'latest_version."flowId" = flow.id AND latest_version.id = (SELECT fv.id FROM flow_version fv WHERE fv."flowId" = flow.id ORDER BY fv.created DESC LIMIT 1)')
.where('project."platformId" = :platformId', { platformId })
.andWhere('flow_run.environment = :environment', { environment: RunEnvironment.PRODUCTION })
.groupBy('flow.id')
.addGroupBy('latest_version."displayName"')
.addGroupBy('project.id')
.addGroupBy('project."displayName"')
.orderBy('COUNT(*)', 'DESC')
.setParameters({ estimatedTimeSavedPerStep })
.getRawMany()
return flowData.map((row) => ({
flowId: row.flowId,
flowName: row.flowName,
projectId: row.projectId,
projectName: row.projectName,
timeSavedPerRun: {
value: !isNil(row.timeSavedPerRun) ? parseInt(row.timeSavedPerRun) : (
row.runs > 0 ? parseInt(row.minutesSaved) / parseInt(row.runs) : null
),
isEstimated: isNil(row.timeSavedPerRun),
},
minutesSaved: parseInt(row.minutesSaved),
runs: parseInt(row.runs),
}))
}

View File

@@ -0,0 +1,76 @@
import { FlowOperationType, PrincipalType, UpdatePlatformReportRequest, UpdateTimeSavedPerRunRequest } from '@activepieces/shared'
import { FastifyPluginAsyncTypebox } from '@fastify/type-provider-typebox'
import { platformMustBeOwnedByCurrentUser, platformMustHaveFeatureEnabled } from '../ee/authentication/ee-authorization'
import { flowService } from '../flows/flow/flow.service'
import { projectService } from '../project/project-service'
import { piecesAnalyticsService } from './pieces-analytics.service'
import { platformAnalyticsReportService } from './platform-analytics-report.service'
export const platformAnalyticsModule: FastifyPluginAsyncTypebox = async (app) => {
app.addHook('preHandler', platformMustBeOwnedByCurrentUser)
app.addHook('preHandler', platformMustHaveFeatureEnabled((platform) => platform.plan.analyticsEnabled))
await piecesAnalyticsService(app.log).init()
await app.register(platformAnalyticsController, { prefix: '/v1/analytics' })
}
const platformAnalyticsController: FastifyPluginAsyncTypebox = async (app) => {
app.get('/', PlatformAnalyticsRequest, async (request) => {
const { platform } = request.principal
return platformAnalyticsReportService(request.log).getOrGenerateReport(platform.id)
})
app.post('/', UpdatePlatformReportRequestSchema, async (request) => {
const { platform } = request.principal
return platformAnalyticsReportService(request.log).update(platform.id, request.body)
})
app.post('/refresh', PlatformAnalyticsRequest, async (request) => {
const { platform } = request.principal
return platformAnalyticsReportService(request.log).refreshReport(platform.id)
})
// TODO(@chaker): remove this endpoint after solving the issue with removing project id from the principal
app.post('/time-saved-per-run', UpdateTimeSavedPerRunRequestSchema, async (request) => {
const flow = await flowService(request.log).getOneById(request.body.flowId)
if (!flow) {
throw new Error('Flow not found')
}
const platformId = await projectService.getPlatformId(flow.projectId)
if (platformId !== request.principal.platform.id) {
throw new Error('Unauthorized')
}
return flowService(request.log).update({
id: flow.id,
projectId: flow.projectId,
userId: request.principal.id,
platformId: request.principal.platform.id,
operation: {
type: FlowOperationType.UPDATE_MINUTES_SAVED,
request: { timeSavedPerRun: request.body.timeSavedPerRun ?? null },
},
})
})
}
const UpdateTimeSavedPerRunRequestSchema = {
config: {
allowedPrincipals: [PrincipalType.USER] as const,
},
schema: {
body: UpdateTimeSavedPerRunRequest,
},
}
const UpdatePlatformReportRequestSchema = {
config: {
allowedPrincipals: [PrincipalType.USER] as const,
},
schema: {
body: UpdatePlatformReportRequest,
},
}
const PlatformAnalyticsRequest = {
config: {
allowedPrincipals: [PrincipalType.USER] as const,
},
}

View File

@@ -0,0 +1,703 @@
import { AppSystemProp } from '@activepieces/server-shared'
import {
ActivepiecesError,
ApEdition,
ApEnvironment,
apId,
AppConnection,
AppConnectionId,
AppConnectionOwners,
AppConnectionScope,
AppConnectionStatus,
AppConnectionType,
AppConnectionValue,
AppConnectionWithoutSensitiveData,
ConnectionState,
Cursor,
EngineResponseStatus,
ErrorCode,
isNil,
Metadata,
OAuth2GrantType,
PlatformId,
PlatformRole,
ProjectId,
SeekPage,
spreadIfDefined,
UpsertAppConnectionRequestBody,
User,
UserId,
UserIdentity,
UserWithMetaInformation,
WorkerJobType,
} from '@activepieces/shared'
import { FastifyBaseLogger } from 'fastify'
import semver from 'semver'
import { EngineHelperResponse, EngineHelperValidateAuthResult } from 'server-worker'
import { ArrayContains, Equal, FindOperator, FindOptionsWhere, ILike, In } from 'typeorm'
import { repoFactory } from '../../core/db/repo-factory'
import { projectMemberService } from '../../ee/projects/project-members/project-member.service'
import { flowService } from '../../flows/flow/flow.service'
import { encryptUtils } from '../../helper/encryption'
import { buildPaginator } from '../../helper/pagination/build-paginator'
import { paginationHelper } from '../../helper/pagination/pagination-utils'
import { system } from '../../helper/system/system'
import {
getPiecePackageWithoutArchive,
pieceMetadataService,
} from '../../pieces/metadata/piece-metadata-service'
import { projectRepo } from '../../project/project-service'
import { userService } from '../../user/user-service'
import { userInteractionWatcher } from '../../workers/user-interaction-watcher'
import {
AppConnectionEntity,
AppConnectionSchema,
} from '../app-connection.entity'
import { appConnectionHandler } from './app-connection.handler'
import { oauth2Handler } from './oauth2'
import { oauth2Util } from './oauth2/oauth2-util'
export const appConnectionsRepo = repoFactory(AppConnectionEntity)
export const appConnectionService = (log: FastifyBaseLogger) => ({
async upsert(params: UpsertParams): Promise<AppConnectionWithoutSensitiveData> {
const { projectIds, externalId, value, displayName, pieceName, ownerId, platformId, scope, type, status, metadata } = params
const pieceVersion = params.pieceVersion ?? ( await pieceMetadataService(log).getOrThrow({
name: pieceName,
projectId: projectIds[0],
platformId,
})).version
validatePieceVersion(pieceVersion)
await assertProjectIds(projectIds, platformId)
const validatedConnectionValue = await validateConnectionValue({
value,
pieceName,
projectId: projectIds[0],
platformId,
}, log)
const encryptedConnectionValue = await encryptUtils.encryptObject({
...validatedConnectionValue,
...value,
})
const existingConnection = await appConnectionsRepo().findOneBy({
externalId,
scope,
platformId,
...(projectIds ? { projectIds: ArrayContains(projectIds) } : {}),
})
const newId = existingConnection?.id ?? apId()
const connection = {
displayName,
...spreadIfDefined('ownerId', ownerId),
status: status ?? AppConnectionStatus.ACTIVE,
value: encryptedConnectionValue,
externalId,
pieceName,
type,
id: newId,
scope,
projectIds,
platformId,
...spreadIfDefined('metadata', metadata),
pieceVersion,
}
await appConnectionsRepo().upsert(connection, ['id'])
const updatedConnection = await appConnectionsRepo().findOneByOrFail({
id: newId,
platformId,
...(projectIds ? { projectIds: ArrayContains(projectIds) } : {}),
scope,
})
return this.removeSensitiveData(updatedConnection)
},
async update(params: UpdateParams): Promise<AppConnectionWithoutSensitiveData> {
const { projectIds, id, request, scope, platformId } = params
if (!isNil(request.projectIds)) {
await assertProjectIds(request.projectIds, platformId)
}
const filter: FindOptionsWhere<AppConnectionSchema> = {
id,
scope,
platformId,
...(projectIds ? { projectIds: ArrayContains(projectIds) } : {}),
}
await appConnectionsRepo().update(filter, {
displayName: request.displayName,
...spreadIfDefined('projectIds', request.projectIds),
...spreadIfDefined('metadata', request.metadata),
})
const updatedConnection = await appConnectionsRepo().findOneByOrFail(filter)
return this.removeSensitiveData(updatedConnection)
},
async getOne({
projectId,
platformId,
externalId,
}: GetOneByName): Promise<AppConnection | null> {
const encryptedAppConnection = await appConnectionsRepo().findOne({
where: {
projectIds: ArrayContains([projectId]),
externalId,
platformId,
},
})
if (isNil(encryptedAppConnection)) {
return null
}
const connection = await this.decryptAndRefreshConnection(encryptedAppConnection, projectId, log)
if (isNil(connection)) {
return null
}
const owner = isNil(connection.ownerId) ? null : await userService.getMetaInformation({
id: connection.ownerId,
})
return {
...connection,
owner,
}
},
async getOneOrThrowWithoutValue(params: GetOneParams): Promise<AppConnectionWithoutSensitiveData> {
const connectionById = await appConnectionsRepo().findOneBy({
id: params.id,
platformId: params.platformId,
...(params.projectId ? { projectIds: ArrayContains([params.projectId]) } : {}),
})
if (isNil(connectionById)) {
throw new ActivepiecesError({
code: ErrorCode.ENTITY_NOT_FOUND,
params: {
entityType: 'AppConnection',
entityId: params.id,
},
})
}
return this.removeSensitiveData(connectionById)
},
async getManyConnectionStates(params: GetManyParams): Promise<ConnectionState[]> {
const connections = await appConnectionsRepo().find({
where: {
projectIds: ArrayContains([params.projectId]),
},
})
return connections.map((connection) => ({
externalId: connection.externalId,
pieceName: connection.pieceName,
displayName: connection.displayName,
}))
},
async replace(params: ReplaceParams): Promise<void> {
const { sourceAppConnectionId, targetAppConnectionId, projectId, platformId, userId } = params
const sourceAppConnection = await this.getOneOrThrowWithoutValue({
id: sourceAppConnectionId,
projectId,
platformId,
})
const targetAppConnection = await this.getOneOrThrowWithoutValue({
id: targetAppConnectionId,
projectId,
platformId,
})
if (sourceAppConnection.pieceName !== targetAppConnection.pieceName) {
throw new ActivepiecesError({
code: ErrorCode.VALIDATION,
params: {
message: 'Connections must be from the same app',
},
})
}
const flows = await flowService(log).list({
projectIds: [projectId],
cursorRequest: null,
limit: 1000,
folderId: undefined,
name: undefined,
status: undefined,
connectionExternalIds: [sourceAppConnection.externalId],
})
await appConnectionHandler(log).updateFlowsWithAppConnection(flows.data, {
appConnection: sourceAppConnection,
newAppConnection: targetAppConnection,
userId,
})
await this.delete({
id: sourceAppConnection.id,
platformId,
scope: sourceAppConnection.scope,
projectId,
})
},
async delete(params: DeleteParams): Promise<void> {
await appConnectionsRepo().delete({
id: params.id,
platformId: params.platformId,
scope: params.scope,
...(params.projectId ? { projectIds: ArrayContains([params.projectId]) } : {}),
})
},
async list({
projectId,
pieceName,
cursorRequest,
displayName,
status,
limit,
scope,
platformId,
externalIds,
}: ListParams): Promise<SeekPage<AppConnection>> {
const decodedCursor = paginationHelper.decodeCursor(cursorRequest)
const paginator = buildPaginator({
entity: AppConnectionEntity,
query: {
limit,
order: 'ASC',
afterCursor: decodedCursor.nextCursor,
beforeCursor: decodedCursor.previousCursor,
},
})
const querySelector: Record<string, string | FindOperator<string>> = {
...(projectId ? { projectIds: ArrayContains([projectId]) } : {}),
...spreadIfDefined('scope', scope),
platformId,
}
if (!isNil(pieceName)) {
querySelector.pieceName = Equal(pieceName)
}
if (!isNil(displayName)) {
querySelector.displayName = ILike(`%${displayName}%`)
}
if (!isNil(status)) {
querySelector.status = In(status)
}
if (!isNil(externalIds)) {
querySelector.externalId = In(externalIds)
}
const queryBuilder = appConnectionsRepo()
.createQueryBuilder('app_connection')
.leftJoinAndSelect('app_connection.owner', 'owner')
.leftJoinAndSelect('owner.identity', 'owner_identity')
.where(querySelector)
const { data, cursor } = await paginator.paginate(queryBuilder)
const flowIdsByExternalId = await fetchFlowIdsForConnections(log, data)
const promises = data.map(async (encryptedConnection) => {
const apConnection: AppConnection = await appConnectionHandler(log).decryptConnection(encryptedConnection)
const owner = mapToUserWithMetaInformation(encryptedConnection.owner)
const flowIds = flowIdsByExternalId.get(apConnection.externalId) ?? []
return {
...apConnection,
owner,
flowIds,
}
})
const refreshConnections = await Promise.all(promises)
return paginationHelper.createPage<AppConnection>(
refreshConnections,
cursor,
)
},
removeSensitiveData: (
appConnection: AppConnection | AppConnectionSchema,
): AppConnectionWithoutSensitiveData => {
const { value: _, ...appConnectionWithoutSensitiveData } = appConnection
return appConnectionWithoutSensitiveData as AppConnectionWithoutSensitiveData
},
async decryptAndRefreshConnection(
encryptedAppConnection: AppConnectionSchema,
projectId: ProjectId,
log: FastifyBaseLogger,
): Promise<AppConnection | null> {
const appConnection = await appConnectionHandler(log).decryptConnection(encryptedAppConnection)
if (!appConnectionHandler(log).needRefresh(appConnection, log)) {
return oauth2Util(log).removeRefreshTokenAndClientSecret(appConnection)
}
const refreshedConnection = await appConnectionHandler(log).lockAndRefreshConnection({ projectId, externalId: appConnection.externalId, log })
if (isNil(refreshedConnection)) {
return null
}
return oauth2Util(log).removeRefreshTokenAndClientSecret(refreshedConnection)
},
async deleteAllProjectConnections(projectId: string) {
await appConnectionsRepo().delete({
scope: AppConnectionScope.PROJECT,
projectIds: ArrayContains([projectId]),
})
},
async getOwners({ projectId, platformId }: { projectId: ProjectId, platformId: PlatformId }): Promise<AppConnectionOwners[]> {
const platformAdmins = (await userService.getByPlatformRole(platformId, PlatformRole.ADMIN)).map(user => ({
firstName: user.identity.firstName,
lastName: user.identity.lastName,
email: user.identity.email,
}))
const edition = system.getOrThrow(AppSystemProp.EDITION)
if (edition === ApEdition.COMMUNITY) {
return platformAdmins
}
const projectMembers = await projectMemberService(log).list({
platformId,
projectId,
cursorRequest: null,
limit: 1000,
projectRoleId: undefined,
})
const projectMembersDetails = projectMembers.data.map(pm => ({
firstName: pm.user.firstName,
lastName: pm.user.lastName,
email: pm.user.email,
}))
return [...platformAdmins, ...projectMembersDetails]
},
})
async function assertProjectIds(projectIds: ProjectId[], platformId: string): Promise<void> {
const filteredProjects = await projectRepo().countBy({
id: In(projectIds),
platformId,
})
if (filteredProjects !== projectIds.length) {
throw new ActivepiecesError({
code: ErrorCode.ENTITY_NOT_FOUND,
params: {
entityType: 'Project',
},
})
}
}
const validateConnectionValue = async (
params: ValidateConnectionValueParams,
log: FastifyBaseLogger,
): Promise<AppConnectionValue> => {
const { value, pieceName, projectId, platformId } = params
switch (value.type) {
case AppConnectionType.PLATFORM_OAUTH2: {
const tokenUrl = await oauth2Util(log).getOAuth2TokenUrl({
projectId,
pieceName,
platformId,
props: value.props,
})
return oauth2Handler[value.type](log).claim({
projectId,
platformId,
pieceName,
request: {
grantType: OAuth2GrantType.AUTHORIZATION_CODE,
code: value.code,
tokenUrl,
clientId: value.client_id,
props: value.props,
authorizationMethod: value.authorization_method,
codeVerifier: value.code_challenge,
redirectUrl: value.redirect_url,
},
})
}
case AppConnectionType.CLOUD_OAUTH2: {
const tokenUrl = await oauth2Util(log).getOAuth2TokenUrl({
projectId,
pieceName,
platformId,
props: value.props,
})
return oauth2Handler[value.type](log).claim({
projectId,
platformId,
pieceName,
request: {
tokenUrl,
grantType: OAuth2GrantType.AUTHORIZATION_CODE,
code: value.code,
props: value.props,
clientId: value.client_id,
authorizationMethod: value.authorization_method,
codeVerifier: value.code_challenge,
},
})
}
case AppConnectionType.OAUTH2: {
const tokenUrl = await oauth2Util(log).getOAuth2TokenUrl({
projectId,
pieceName,
platformId,
props: value.props,
})
const auth = await oauth2Handler[value.type](log).claim({
projectId,
platformId,
pieceName,
request: {
tokenUrl,
code: value.code,
clientId: value.client_id,
props: value.props,
grantType: value.grant_type!,
redirectUrl: value.redirect_url,
clientSecret: value.client_secret,
authorizationMethod: value.authorization_method,
codeVerifier: value.code_challenge,
scope: value.scope,
},
})
await engineValidateAuth({
pieceName,
projectId,
platformId,
auth,
}, log)
return auth
}
case AppConnectionType.NO_AUTH:
break
case AppConnectionType.CUSTOM_AUTH:
case AppConnectionType.BASIC_AUTH:
case AppConnectionType.SECRET_TEXT:
await engineValidateAuth({
platformId,
pieceName,
projectId,
auth: value,
}, log)
}
return value
}
const engineValidateAuth = async (
params: EngineValidateAuthParams,
log: FastifyBaseLogger,
): Promise<void> => {
const environment = system.getOrThrow(AppSystemProp.ENVIRONMENT)
if (environment === ApEnvironment.TESTING) {
return
}
const { pieceName, auth, projectId, platformId } = params
const pieceMetadata = await pieceMetadataService(log).getOrThrow({
name: pieceName,
projectId,
version: undefined,
platformId,
})
const engineResponse = await userInteractionWatcher(log).submitAndWaitForResponse<EngineHelperResponse<EngineHelperValidateAuthResult>>({
piece: await getPiecePackageWithoutArchive(log, projectId, platformId, {
pieceName,
pieceVersion: pieceMetadata.version,
}),
projectId,
platformId,
connectionValue: auth,
jobType: WorkerJobType.EXECUTE_VALIDATION,
})
if (engineResponse.status !== EngineResponseStatus.OK) {
log.error(
engineResponse,
'[AppConnectionService#engineValidateAuth] engineResponse',
)
throw new ActivepiecesError({
code: ErrorCode.ENGINE_OPERATION_FAILURE,
params: {
message: 'Failed to run engine validate auth',
context: engineResponse,
},
})
}
const validateAuthResult = engineResponse.result
if (!validateAuthResult.valid) {
throw new ActivepiecesError({
code: ErrorCode.INVALID_APP_CONNECTION,
params: {
error: validateAuthResult.error,
},
})
}
}
async function fetchFlowIdsForConnections(
log: FastifyBaseLogger,
connections: AppConnectionSchema[],
): Promise<Map<string, string[]>> {
const allExternalIds = new Set<string>()
const allProjectIds = new Set<string>()
connections.forEach((connection) => {
allExternalIds.add(connection.externalId)
connection.projectIds.forEach((projectId) => {
allProjectIds.add(projectId)
})
})
if (allExternalIds.size === 0 || allProjectIds.size === 0) {
return new Map<string, string[]>()
}
const flowsPage = await flowService(log).list({
projectIds: Array.from(allProjectIds),
cursorRequest: null,
connectionExternalIds: Array.from(allExternalIds),
})
const flowIdsByExternalId = new Map<string, string[]>()
flowsPage.data.forEach((flow) => {
if (flow.version?.connectionIds) {
flow.version.connectionIds.forEach((connectionExternalId) => {
if (!flowIdsByExternalId.has(connectionExternalId)) {
flowIdsByExternalId.set(connectionExternalId, [])
}
flowIdsByExternalId.get(connectionExternalId)!.push(flow.id)
})
}
})
return flowIdsByExternalId
}
function mapToUserWithMetaInformation(owner: (User & { identity?: UserIdentity }) | null): UserWithMetaInformation | null {
if (isNil(owner)) {
return null
}
const identity = owner.identity
if (isNil(identity)) {
return null
}
return {
id: owner.id,
email: identity.email,
firstName: identity.firstName,
lastName: identity.lastName,
platformId: owner.platformId,
platformRole: owner.platformRole,
status: owner.status,
externalId: owner.externalId,
created: owner.created,
updated: owner.updated,
}
}
function validatePieceVersion(pieceVersion: string): void {
if (!semver.valid(pieceVersion)) {
throw new ActivepiecesError({
code: ErrorCode.VALIDATION,
params: {
message: 'Invalid piece version',
},
})
}
}
type UpsertParams = {
projectIds: ProjectId[]
ownerId: string | null
platformId: string
scope: AppConnectionScope
externalId: string
value: UpsertAppConnectionRequestBody['value']
displayName: string
type: AppConnectionType
status?: AppConnectionStatus
pieceName: string
metadata?: Metadata
pieceVersion?: string
}
type GetOneByName = {
projectId: ProjectId
platformId: string
externalId: string
}
type GetOneParams = {
projectId: ProjectId | null
platformId: string
id: string
}
type GetManyParams = {
projectId: ProjectId
}
type DeleteParams = {
projectId: ProjectId | null
scope: AppConnectionScope
id: AppConnectionId
platformId: string
}
type ValidateConnectionValueParams = {
value: UpsertAppConnectionRequestBody['value']
pieceName: string
projectId: ProjectId | undefined
platformId: string
}
type ListParams = {
projectId: ProjectId | null
platformId: string
pieceName: string | undefined
cursorRequest: Cursor | null
scope: AppConnectionScope | undefined
displayName: string | undefined
status: AppConnectionStatus[] | undefined
limit: number
externalIds: string[] | undefined
}
type UpdateParams = {
projectIds: ProjectId[] | null
platformId: string
id: AppConnectionId
scope: AppConnectionScope
request: {
displayName: string
projectIds: ProjectId[] | null
metadata?: Metadata
}
}
type EngineValidateAuthParams = {
pieceName: string
projectId: ProjectId | undefined
platformId: string
auth: AppConnectionValue
}
type ReplaceParams = {
sourceAppConnectionId: AppConnectionId
targetAppConnectionId: AppConnectionId
projectId: ProjectId
platformId: string
userId: UserId
}

View File

@@ -0,0 +1,221 @@
import { exceptionHandler } from '@activepieces/server-shared'
import { AppConnection, AppConnectionStatus, AppConnectionType, AppConnectionValue, AppConnectionWithoutSensitiveData, assertNotNullOrUndefined, Flow, FlowOperationType, flowStructureUtil, FlowVersion, FlowVersionState, isNil, PlatformId, PopulatedFlow, ProjectId, UserId } from '@activepieces/shared'
import dayjs from 'dayjs'
import { FastifyBaseLogger } from 'fastify'
import { ArrayContains } from 'typeorm'
import { distributedLock } from '../../database/redis-connections'
import { flowService } from '../../flows/flow/flow.service'
import { flowVersionService } from '../../flows/flow-version/flow-version.service'
import { encryptUtils } from '../../helper/encryption'
import { projectService } from '../../project/project-service'
import { AppConnectionSchema } from '../app-connection.entity'
import { appConnectionsRepo } from './app-connection-service'
import { oauth2Handler } from './oauth2'
import { oauth2Util } from './oauth2/oauth2-util'
export const appConnectionHandler = (log: FastifyBaseLogger) => ({
async updateFlowsWithAppConnection(flows: PopulatedFlow[], params: UpdateFlowsWithAppConnectionParams): Promise<void> {
const { appConnection, newAppConnection, userId } = params
await Promise.all(flows.map(async (flow) => {
const project = await projectService.getOneOrThrow(flow.projectId)
const lastVersion = await flowVersionService(log).getFlowVersionOrThrow({
flowId: flow.id,
versionId: undefined,
})
// Don't Change the order of the following two functions
await handleLockedVersion(flow, userId, flow.projectId, project.platformId, appConnection, newAppConnection, log)
await handleDraftVersion(flow, lastVersion, userId, flow.projectId, project.platformId, appConnection, newAppConnection, log)
}))
},
async refresh(connection: AppConnection, projectId: ProjectId, log: FastifyBaseLogger): Promise<AppConnection> {
switch (connection.value.type) {
case AppConnectionType.PLATFORM_OAUTH2:
connection.value = await oauth2Handler[connection.value.type](log).refresh({
pieceName: connection.pieceName,
platformId: connection.platformId,
projectId,
connectionValue: connection.value,
})
break
case AppConnectionType.CLOUD_OAUTH2:
connection.value = await oauth2Handler[connection.value.type](log).refresh({
pieceName: connection.pieceName,
platformId: connection.platformId,
projectId,
connectionValue: connection.value,
})
break
case AppConnectionType.OAUTH2:
connection.value = await oauth2Handler[connection.value.type](log).refresh({
pieceName: connection.pieceName,
platformId: connection.platformId,
projectId,
connectionValue: connection.value,
})
break
default:
break
}
return connection
},
/**
* We should make sure this is accessed only once, as a race condition could occur where the token needs to be
* refreshed and it gets accessed at the same time, which could result in the wrong request saving incorrect data.
*/
async lockAndRefreshConnection({
projectId,
externalId,
log,
}: {
projectId: ProjectId
externalId: string
log: FastifyBaseLogger
}) {
return distributedLock(log).runExclusive({
key: `${projectId}_${externalId}`,
timeoutInSeconds: 60,
fn: async () => {
let appConnection: AppConnection | null = null
try {
const encryptedAppConnection = await appConnectionsRepo().findOneBy({
projectIds: ArrayContains([projectId]),
externalId,
})
if (isNil(encryptedAppConnection)) {
return encryptedAppConnection
}
appConnection = await this.decryptConnection(encryptedAppConnection)
if (!this.needRefresh(appConnection, log)) {
return appConnection
}
const refreshedAppConnection = await this.refresh(appConnection, projectId, log)
await appConnectionsRepo().update(refreshedAppConnection.id, {
status: AppConnectionStatus.ACTIVE,
value: await encryptUtils.encryptObject(refreshedAppConnection.value),
})
return refreshedAppConnection
}
catch (e) {
exceptionHandler.handle(e, log)
if (!isNil(appConnection) && oauth2Util(log).isUserError(e)) {
appConnection.status = AppConnectionStatus.ERROR
await appConnectionsRepo().update(appConnection.id, {
status: appConnection.status,
updated: dayjs().toISOString(),
})
}
}
return appConnection
},
})
},
async decryptConnection(
encryptedConnection: AppConnectionSchema,
): Promise<AppConnection> {
const value = await encryptUtils.decryptObject<AppConnectionValue>(encryptedConnection.value)
const connection: AppConnection = {
...encryptedConnection,
value,
}
return connection
},
needRefresh(connection: AppConnection, log: FastifyBaseLogger): boolean {
if (connection.status === AppConnectionStatus.ERROR) {
return false
}
switch (connection.value.type) {
case AppConnectionType.PLATFORM_OAUTH2:
case AppConnectionType.CLOUD_OAUTH2:
case AppConnectionType.OAUTH2:
return oauth2Util(log).isExpired(connection.value)
default:
return false
}
},
})
async function handleLockedVersion(flow: PopulatedFlow, userId: UserId, projectId: ProjectId, platformId: PlatformId, appConnection: AppConnectionWithoutSensitiveData, newAppConnection: AppConnectionWithoutSensitiveData, log: FastifyBaseLogger) {
if (isNil(flow.publishedVersionId)) {
return
}
const lastPublishedVersion = await flowVersionService(log).getLatestVersion(flow.id, FlowVersionState.LOCKED)
assertNotNullOrUndefined(lastPublishedVersion, `Last published version not found for flow ${flow.id}`)
await flowService(log).update({
id: flow.id,
projectId,
platformId,
userId,
operation: {
type: FlowOperationType.IMPORT_FLOW,
request: replaceConnectionInFlowVersion(lastPublishedVersion, appConnection, newAppConnection),
},
})
await flowService(log).update({
id: flow.id,
projectId,
platformId,
userId,
operation: {
type: FlowOperationType.LOCK_AND_PUBLISH,
request: {},
},
})
}
async function handleDraftVersion(flow: Flow, lastVersion: FlowVersion, userId: UserId, projectId: ProjectId, platformId: PlatformId, appConnection: AppConnectionWithoutSensitiveData, newAppConnection: AppConnectionWithoutSensitiveData, log: FastifyBaseLogger) {
if (lastVersion.state !== FlowVersionState.DRAFT) {
return
}
await flowService(log).update({
id: flow.id,
projectId,
platformId,
userId,
operation: {
type: FlowOperationType.IMPORT_FLOW,
request: replaceConnectionInFlowVersion(lastVersion, appConnection, newAppConnection),
},
})
}
function replaceConnectionInFlowVersion(flowVersion: FlowVersion, appConnection: AppConnectionWithoutSensitiveData, newAppConnection: AppConnectionWithoutSensitiveData) {
return flowStructureUtil.transferFlow(flowVersion, (step) => {
if (step.settings?.input?.auth?.includes(appConnection.externalId)) {
return {
...step,
settings: {
...step.settings,
input: {
...step.settings?.input,
auth: replaceConnectionIdInAuth(step.settings.input.auth, appConnection.externalId, newAppConnection.externalId),
},
},
}
}
return step
})
}
function replaceConnectionIdInAuth(auth: string, oldConnectionId: string, newConnectionId: string): string {
return auth.replace(
new RegExp(`connections\\['${oldConnectionId}'\\]`, 'g'),
`connections['${newConnectionId}']`,
)
}
type UpdateFlowsWithAppConnectionParams = {
appConnection: AppConnectionWithoutSensitiveData
newAppConnection: AppConnectionWithoutSensitiveData
userId: UserId
}

View File

@@ -0,0 +1,35 @@
import {
AppConnectionType,
PlatformOAuth2ConnectionValue,
} from '@activepieces/shared'
import { FastifyBaseLogger } from 'fastify'
import {
ClaimOAuth2Request,
OAuth2Service,
RefreshOAuth2Request,
} from './oauth2-service'
import { cloudOAuth2Service } from './services/cloud-oauth2-service'
import { credentialsOauth2Service } from './services/credentials-oauth2-service'
const unimplementedService = (_log: FastifyBaseLogger): OAuth2Service<PlatformOAuth2ConnectionValue> => ({
claim: async (
_req: ClaimOAuth2Request,
): Promise<PlatformOAuth2ConnectionValue> => {
throw new Error('Unimplemented platform oauth')
},
refresh: async (
_req: RefreshOAuth2Request<PlatformOAuth2ConnectionValue>,
): Promise<PlatformOAuth2ConnectionValue> => {
throw new Error('Unimplemented platform oauth')
},
})
export const oauth2Handler = {
[AppConnectionType.CLOUD_OAUTH2]: cloudOAuth2Service,
[AppConnectionType.OAUTH2]: credentialsOauth2Service,
[AppConnectionType.PLATFORM_OAUTH2]: unimplementedService,
}
export function setPlatformOAuthService(service: OAuth2Service<PlatformOAuth2ConnectionValue>) {
oauth2Handler[AppConnectionType.PLATFORM_OAUTH2] = (_log: FastifyBaseLogger) => service
}

View File

@@ -0,0 +1,40 @@
import { OAuth2AuthorizationMethod } from '@activepieces/pieces-framework'
import {
BaseOAuth2ConnectionValue,
OAuth2GrantType,
} from '@activepieces/shared'
export type OAuth2Service<CONNECTION_VALUE extends BaseOAuth2ConnectionValue> =
{
claim(request: ClaimOAuth2Request): Promise<CONNECTION_VALUE>
refresh(
request: RefreshOAuth2Request<CONNECTION_VALUE>
): Promise<CONNECTION_VALUE>
}
export type RefreshOAuth2Request<T extends BaseOAuth2ConnectionValue> = {
pieceName: string
projectId: string | undefined
platformId: string
connectionValue: T
}
export type OAuth2RequestBody = {
props?: Record<string, unknown>
code: string
clientId: string
tokenUrl: string
clientSecret?: string
redirectUrl?: string
grantType?: OAuth2GrantType
authorizationMethod?: OAuth2AuthorizationMethod
codeVerifier?: string
scope?: string
}
export type ClaimOAuth2Request = {
projectId: string | undefined
platformId: string
pieceName: string
request: OAuth2RequestBody
}

View File

@@ -0,0 +1,114 @@
import { PropertyType } from '@activepieces/pieces-framework'
import { ActivepiecesError,
AppConnection,
AppConnectionType,
assertNotNullOrUndefined,
BaseOAuth2ConnectionValue,
deleteProps,
ErrorCode,
OAuth2GrantType,
PlatformId,
resolveValueFromProps,
} from '@activepieces/shared'
import { isAxiosError } from 'axios'
import { FastifyBaseLogger } from 'fastify'
import { pieceMetadataService } from '../../../pieces/metadata/piece-metadata-service'
export const oauth2Util = (log: FastifyBaseLogger) => ({
formatOAuth2Response: (response: Omit<BaseOAuth2ConnectionValue, 'claimed_at'>): BaseOAuth2ConnectionValue => {
const secondsSinceEpoch = Math.round(Date.now() / 1000)
const formattedResponse: BaseOAuth2ConnectionValue = {
...response,
data: response,
claimed_at: secondsSinceEpoch,
}
deleteProps(formattedResponse.data, [
'access_token',
'expires_in',
'refresh_token',
'scope',
'token_type',
])
return formattedResponse
},
isExpired: (connection: BaseOAuth2ConnectionValue): boolean => {
const secondsSinceEpoch = Math.round(Date.now() / 1000)
const grantType = connection.grant_type ?? OAuth2GrantType.AUTHORIZATION_CODE
if (
grantType === OAuth2GrantType.AUTHORIZATION_CODE &&
!connection.refresh_token
) {
return false
}
const expiresIn = connection.expires_in ?? 60 * 60
const refreshThreshold = 15 * 60
return (
secondsSinceEpoch + refreshThreshold >= connection.claimed_at + expiresIn
)
},
isUserError: (e: unknown): boolean => {
if (isAxiosError(e)) {
const error = e.response?.data.error
switch (error) {
case 'invalid_grant':
return true
case 'invalid_request':
case 'invalid_client':
case 'invalid_scope':
case 'unauthorized_client':
case 'unsupported_grant_type':
default:
return false
}
}
return false
},
getOAuth2TokenUrl: async ({
projectId,
platformId,
pieceName,
props,
}: OAuth2TokenUrlParams): Promise<string> => {
const pieceMetadata = await pieceMetadataService(log).getOrThrow({
name: pieceName,
projectId,
platformId,
version: undefined,
})
const pieceAuth = Array.isArray(pieceMetadata.auth) ? pieceMetadata.auth.find(auth => auth.type === PropertyType.OAUTH2) : pieceMetadata.auth
assertNotNullOrUndefined(pieceAuth, 'auth')
switch (pieceAuth.type) {
case PropertyType.OAUTH2:
return resolveValueFromProps(props, pieceAuth.tokenUrl)
default:
throw new ActivepiecesError({
code: ErrorCode.INVALID_APP_CONNECTION,
params: {
error: 'invalid auth type',
},
})
}
},
removeRefreshTokenAndClientSecret: (connection: AppConnection): AppConnection => {
if (connection.value.type === AppConnectionType.OAUTH2 && connection.value.grant_type === OAuth2GrantType.CLIENT_CREDENTIALS) {
connection.value.client_secret = '(REDACTED)'
}
if (connection.value.type === AppConnectionType.OAUTH2
|| connection.value.type === AppConnectionType.CLOUD_OAUTH2
|| connection.value.type === AppConnectionType.PLATFORM_OAUTH2) {
connection.value = {
...connection.value,
refresh_token: '(REDACTED)',
}
}
return connection
},
})
type OAuth2TokenUrlParams = {
projectId: string | undefined
platformId: PlatformId
pieceName: string
props?: Record<string, unknown>
}

View File

@@ -0,0 +1,92 @@
import { OAuth2AuthorizationMethod } from '@activepieces/pieces-framework'
import { apAxios } from '@activepieces/server-shared'
import {
ActivepiecesError,
AppConnectionType,
CloudOAuth2ConnectionValue,
ErrorCode,
} from '@activepieces/shared'
import { FastifyBaseLogger } from 'fastify'
import { system } from '../../../../helper/system/system'
import {
ClaimOAuth2Request,
OAuth2Service,
RefreshOAuth2Request,
} from '../oauth2-service'
export const cloudOAuth2Service = (log: FastifyBaseLogger): OAuth2Service<CloudOAuth2ConnectionValue> => ({
refresh: async ({
pieceName,
connectionValue,
}: RefreshOAuth2Request<CloudOAuth2ConnectionValue>): Promise<CloudOAuth2ConnectionValue> => {
const requestBody = {
refreshToken: connectionValue.refresh_token,
pieceName,
clientId: connectionValue.client_id,
edition: system.getEdition(),
authorizationMethod: connectionValue.authorization_method,
tokenUrl: connectionValue.token_url,
}
const response = (
await apAxios.post('https://secrets.activepieces.com/refresh', requestBody, {
timeout: 20000,
})
).data
return {
...connectionValue,
...response,
props: connectionValue.props,
type: AppConnectionType.CLOUD_OAUTH2,
}
},
claim: async ({
request,
pieceName,
}: ClaimOAuth2Request): Promise<CloudOAuth2ConnectionValue> => {
try {
const cloudRequest: ClaimWithCloudRequest = {
code: request.code,
codeVerifier: request.codeVerifier,
authorizationMethod: request.authorizationMethod,
clientId: request.clientId,
tokenUrl: request.tokenUrl,
pieceName,
edition: system.getEdition(),
}
const value = (
await apAxios.post<CloudOAuth2ConnectionValue>(
'https://secrets.activepieces.com/claim',
cloudRequest,
{
timeout: 10000,
},
)
).data
return {
...value,
token_url: request.tokenUrl,
props: request.props,
}
}
catch (e: unknown) {
log.error(e)
throw new ActivepiecesError({
code: ErrorCode.INVALID_CLOUD_CLAIM,
params: {
pieceName,
},
})
}
},
})
type ClaimWithCloudRequest = {
pieceName: string
code: string
codeVerifier: string | undefined
authorizationMethod: OAuth2AuthorizationMethod | undefined
edition: string
clientId: string
tokenUrl: string
}

View File

@@ -0,0 +1,200 @@
import { OAuth2AuthorizationMethod } from '@activepieces/pieces-framework'
import { apAxios } from '@activepieces/server-shared'
import { ActivepiecesError,
AppConnectionType,
BaseOAuth2ConnectionValue,
ErrorCode,
isNil,
OAuth2ConnectionValueWithApp,
OAuth2GrantType,
resolveValueFromProps,
} from '@activepieces/shared'
import { AxiosError } from 'axios'
import { FastifyBaseLogger } from 'fastify'
import {
ClaimOAuth2Request,
OAuth2Service,
RefreshOAuth2Request,
} from '../oauth2-service'
import { oauth2Util } from '../oauth2-util'
export const credentialsOauth2Service = (log: FastifyBaseLogger): OAuth2Service<OAuth2ConnectionValueWithApp> => ({
async claim({
request,
}: ClaimOAuth2Request): Promise<OAuth2ConnectionValueWithApp> {
try {
const grantType = request.grantType ?? OAuth2GrantType.AUTHORIZATION_CODE
const body: Record<string, unknown> = {
grant_type: grantType,
}
switch (grantType) {
case OAuth2GrantType.AUTHORIZATION_CODE: {
body.redirect_uri = request.redirectUrl!
body.code = request.code
break
}
case OAuth2GrantType.CLIENT_CREDENTIALS:
if (request.scope) {
body.scope = resolveValueFromProps(request.props, request.scope)
}
if (request.props) {
Object.entries(request.props).forEach(([key, value]) => {
body[key] = value
})
}
break
}
if (request.codeVerifier) {
body.code_verifier = request.codeVerifier
}
const headers: Record<string, string> = {
'content-type': 'application/x-www-form-urlencoded',
accept: 'application/json',
}
const authorizationMethod =
request.authorizationMethod || OAuth2AuthorizationMethod.BODY
switch (authorizationMethod) {
case OAuth2AuthorizationMethod.BODY:
body.client_id = request.clientId
body.client_secret = request.clientSecret!
break
case OAuth2AuthorizationMethod.HEADER:
headers.authorization = `Basic ${Buffer.from(
`${request.clientId}:${request.clientSecret}`,
).toString('base64')}`
break
default:
throw new Error(`Unknown authorization method: ${authorizationMethod}`)
}
const urlSearchParams = new URLSearchParams(Object.fromEntries(Object.entries(body).map(([key, value]) => [key, String(value)])))
const response = (
await apAxios.post(request.tokenUrl, urlSearchParams, {
headers,
})
).data
return {
type: AppConnectionType.OAUTH2,
...oauth2Util(log).formatOAuth2Response(response),
token_url: request.tokenUrl,
client_id: request.clientId,
client_secret: request.clientSecret!,
redirect_url: request.redirectUrl!,
grant_type: grantType,
props: request.props,
authorization_method: authorizationMethod,
}
}
catch (e: unknown) {
if (e instanceof AxiosError) {
log.error('Axios Error:')
log.error(e.response?.data)
log.error({
clientId: request.clientId,
tokenUrl: request.tokenUrl,
})
}
else {
log.error('Unknown Error:')
log.error(e)
}
throw new ActivepiecesError({
code: ErrorCode.INVALID_CLAIM,
params: {
clientId: request.clientId,
tokenUrl: request.tokenUrl,
redirectUrl: request.redirectUrl ?? '',
message: e instanceof AxiosError ? e.response?.data.error_description : 'unknown error',
},
})
}
},
async refresh({
connectionValue,
}: RefreshOAuth2Request<OAuth2ConnectionValueWithApp>): Promise<OAuth2ConnectionValueWithApp> {
const appConnection = connectionValue
if (!oauth2Util(log).isExpired(appConnection)) {
return appConnection
}
const grantType =
connectionValue.grant_type ?? OAuth2GrantType.AUTHORIZATION_CODE
const body: Record<string, string> = {}
switch (grantType) {
case OAuth2GrantType.AUTHORIZATION_CODE: {
body.grant_type = 'refresh_token'
body.refresh_token = appConnection.refresh_token
break
}
case OAuth2GrantType.CLIENT_CREDENTIALS: {
body.grant_type = OAuth2GrantType.CLIENT_CREDENTIALS
if (appConnection.scope) {
body.scope = resolveValueFromProps(appConnection.props, appConnection.scope)
}
if (appConnection.props) {
Object.entries(appConnection.props).forEach(([key, value]) => {
body[key] = String(value)
})
}
break
}
default:
throw new Error(`Unknown grant type: ${grantType}`)
}
const headers: Record<string, string> = {
'content-type': 'application/x-www-form-urlencoded',
accept: 'application/json',
}
const authorizationMethod =
appConnection.authorization_method || OAuth2AuthorizationMethod.BODY
switch (authorizationMethod) {
case OAuth2AuthorizationMethod.BODY:
body.client_id = appConnection.client_id
body.client_secret = appConnection.client_secret
break
case OAuth2AuthorizationMethod.HEADER:
headers.authorization = `Basic ${Buffer.from(
`${appConnection.client_id}:${appConnection.client_secret}`,
).toString('base64')}`
break
default:
throw new Error(`Unknown authorization method: ${authorizationMethod}`)
}
const response = (
await apAxios.post(appConnection.token_url, new URLSearchParams(body), {
headers,
timeout: 20000,
})
).data
const mergedObject = mergeNonNull(
appConnection,
oauth2Util(log).formatOAuth2Response({ ...response }),
)
return {
...mergedObject,
props: appConnection.props,
}
},
})
/**
* When the refresh token is null or undefined, it indicates that the original connection's refresh token is also null
* or undefined. Therefore, we only need to merge non-null values to avoid overwriting the original refresh token with a
* null or undefined value.
*/
function mergeNonNull(
appConnection: OAuth2ConnectionValueWithApp,
oAuth2Response: BaseOAuth2ConnectionValue,
): OAuth2ConnectionValueWithApp {
const formattedOAuth2Response: Partial<BaseOAuth2ConnectionValue> =
Object.fromEntries(
Object.entries(oAuth2Response).filter(([, value]) => !isNil(value)),
)
return {
...appConnection,
...formattedOAuth2Response,
} as OAuth2ConnectionValueWithApp
}

View File

@@ -0,0 +1,50 @@
import {
ActivepiecesError,
AppConnection,
assertNotNullOrUndefined,
EnginePrincipal,
ErrorCode,
GetAppConnectionForWorkerRequestQuery,
isNil,
PrincipalType,
} from '@activepieces/shared'
import {
FastifyPluginAsyncTypebox,
} from '@fastify/type-provider-typebox'
import { appConnectionService } from './app-connection-service/app-connection-service'
export const appConnectionWorkerController: FastifyPluginAsyncTypebox = async (app) => {
app.get('/:externalId', GetAppConnectionRequest, async (request): Promise<AppConnection> => {
const enginePrincipal = (request.principal as EnginePrincipal)
assertNotNullOrUndefined(enginePrincipal.projectId, 'projectId')
const appConnection = await appConnectionService(request.log).getOne({
projectId: enginePrincipal.projectId,
platformId: enginePrincipal.platform.id,
externalId: request.params.externalId,
})
if (isNil(appConnection)) {
throw new ActivepiecesError({
code: ErrorCode.ENTITY_NOT_FOUND,
params: {
entityId: `externalId=${request.params.externalId}`,
entityType: 'AppConnection',
},
})
}
return appConnection
},
)
}
const GetAppConnectionRequest = {
config: {
allowedPrincipals: [PrincipalType.ENGINE],
},
schema: {
params: GetAppConnectionForWorkerRequestQuery,
},
}

View File

@@ -0,0 +1,236 @@
import { ApplicationEventName } from '@activepieces/ee-shared'
import {
ApId,
AppConnectionOwners,
AppConnectionScope,
AppConnectionWithoutSensitiveData,
ListAppConnectionOwnersRequestQuery,
ListAppConnectionsRequestQuery,
Permission,
PrincipalType,
ReplaceAppConnectionsRequestBody,
SeekPage,
SERVICE_KEY_SECURITY_OPENAPI,
UpdateConnectionValueRequestBody,
UpsertAppConnectionRequestBody,
} from '@activepieces/shared'
import {
FastifyPluginCallbackTypebox,
Type,
} from '@fastify/type-provider-typebox'
import { StatusCodes } from 'http-status-codes'
import { eventsHooks } from '../helper/application-events'
import { securityHelper } from '../helper/security-helper'
import { appConnectionService } from './app-connection-service/app-connection-service'
export const appConnectionController: FastifyPluginCallbackTypebox = (app, _opts, done) => {
app.post('/', UpsertAppConnectionRequest, async (request, reply) => {
const appConnection = await appConnectionService(request.log).upsert({
platformId: request.principal.platform.id,
projectIds: [request.principal.projectId],
type: request.body.type,
externalId: request.body.externalId,
value: request.body.value,
displayName: request.body.displayName,
pieceName: request.body.pieceName,
ownerId: await securityHelper.getUserIdFromRequest(request),
scope: AppConnectionScope.PROJECT,
metadata: request.body.metadata,
pieceVersion: request.body.pieceVersion,
})
eventsHooks.get(request.log).sendUserEventFromRequest(request, {
action: ApplicationEventName.CONNECTION_UPSERTED,
data: {
connection: appConnection,
},
})
await reply
.status(StatusCodes.CREATED)
.send(appConnection)
})
app.post('/:id', UpdateConnectionValueRequest, async (request) => {
const appConnection = await appConnectionService(request.log).update({
id: request.params.id,
platformId: request.principal.platform.id,
projectIds: [request.principal.projectId],
scope: AppConnectionScope.PROJECT,
request: {
displayName: request.body.displayName,
projectIds: null,
metadata: request.body.metadata,
},
})
return appConnection
})
app.get('/', ListAppConnectionsRequest, async (request): Promise<SeekPage<AppConnectionWithoutSensitiveData>> => {
const { displayName, pieceName, status, cursor, limit, scope } = request.query
const appConnections = await appConnectionService(request.log).list({
pieceName,
displayName,
status,
scope,
platformId: request.principal.platform.id,
projectId: request.principal.projectId,
cursorRequest: cursor ?? null,
limit: limit ?? DEFAULT_PAGE_SIZE,
externalIds: undefined,
})
const appConnectionsWithoutSensitiveData: SeekPage<AppConnectionWithoutSensitiveData> = {
...appConnections,
data: appConnections.data.map(appConnectionService(request.log).removeSensitiveData),
}
return appConnectionsWithoutSensitiveData
},
)
app.get('/owners', ListAppConnectionOwnersRequest, async (request): Promise<SeekPage<AppConnectionOwners>> => {
const owners = await appConnectionService(request.log).getOwners({
projectId: request.principal.projectId,
platformId: request.principal.platform.id,
})
return {
data: owners,
next: null,
previous: null,
}
},
)
app.post('/replace', ReplaceAppConnectionsRequest, async (request, reply) => {
const { sourceAppConnectionId, targetAppConnectionId } = request.body
await appConnectionService(request.log).replace({
sourceAppConnectionId,
targetAppConnectionId,
projectId: request.principal.projectId,
platformId: request.principal.platform.id,
userId: request.principal.id,
})
await reply.status(StatusCodes.OK).send()
})
app.delete('/:id', DeleteAppConnectionRequest, async (request, reply): Promise<void> => {
const connection = await appConnectionService(request.log).getOneOrThrowWithoutValue({
id: request.params.id,
platformId: request.principal.platform.id,
projectId: request.principal.projectId,
})
eventsHooks.get(request.log).sendUserEventFromRequest(request, {
action: ApplicationEventName.CONNECTION_DELETED,
data: {
connection,
},
})
await appConnectionService(request.log).delete({
id: request.params.id,
platformId: request.principal.platform.id,
scope: AppConnectionScope.PROJECT,
projectId: request.principal.projectId,
})
await reply.status(StatusCodes.NO_CONTENT).send()
})
done()
}
const DEFAULT_PAGE_SIZE = 10
const UpsertAppConnectionRequest = {
config: {
allowedPrincipals: [PrincipalType.USER, PrincipalType.SERVICE] as const,
permission: Permission.WRITE_APP_CONNECTION,
},
schema: {
tags: ['app-connections'],
security: [SERVICE_KEY_SECURITY_OPENAPI],
description: 'Upsert an app connection based on the app name',
body: UpsertAppConnectionRequestBody,
Response: {
[StatusCodes.CREATED]: AppConnectionWithoutSensitiveData,
},
},
}
const UpdateConnectionValueRequest = {
config: {
allowedPrincipals: [PrincipalType.USER, PrincipalType.SERVICE] as const,
permission: Permission.WRITE_APP_CONNECTION,
},
schema: {
tags: ['app-connections'],
security: [SERVICE_KEY_SECURITY_OPENAPI],
description: 'Update an app connection value',
body: UpdateConnectionValueRequestBody,
params: Type.Object({
id: ApId,
}),
},
}
const ReplaceAppConnectionsRequest = {
config: {
allowedPrincipals: [PrincipalType.USER, PrincipalType.SERVICE] as const,
permission: Permission.WRITE_APP_CONNECTION,
},
schema: {
tags: ['app-connections'],
security: [SERVICE_KEY_SECURITY_OPENAPI],
description: 'Replace app connections',
body: ReplaceAppConnectionsRequestBody,
response: {
[StatusCodes.NO_CONTENT]: Type.Never(),
},
},
}
const ListAppConnectionsRequest = {
config: {
allowedPrincipals: [PrincipalType.USER, PrincipalType.SERVICE] as const,
permission: Permission.READ_APP_CONNECTION,
},
schema: {
tags: ['app-connections'],
security: [SERVICE_KEY_SECURITY_OPENAPI],
querystring: ListAppConnectionsRequestQuery,
description: 'List app connections',
response: {
[StatusCodes.OK]: SeekPage(AppConnectionWithoutSensitiveData),
},
},
}
const ListAppConnectionOwnersRequest = {
config: {
allowedPrincipals: [PrincipalType.USER, PrincipalType.SERVICE] as const,
permission: Permission.READ_APP_CONNECTION,
},
schema: {
querystring: ListAppConnectionOwnersRequestQuery,
tags: ['app-connections'],
security: [SERVICE_KEY_SECURITY_OPENAPI],
description: 'List app connection owners',
response: {
[StatusCodes.OK]: SeekPage(AppConnectionOwners),
},
},
}
const DeleteAppConnectionRequest = {
config: {
allowedPrincipals: [PrincipalType.USER, PrincipalType.SERVICE] as const,
permission: Permission.WRITE_APP_CONNECTION,
},
schema: {
tags: ['app-connections'],
security: [SERVICE_KEY_SECURITY_OPENAPI],
description: 'Delete an app connection',
params: Type.Object({
id: ApId,
}),
response: {
[StatusCodes.NO_CONTENT]: Type.Never(),
},
},
}

View File

@@ -0,0 +1,88 @@
import {
AppConnection,
AppConnectionStatus,
User,
UserIdentity,
} from '@activepieces/shared'
import { EntitySchema } from 'typeorm'
import {
BaseColumnSchemaPart,
} from '../database/database-common'
import { EncryptedObject } from '../helper/encryption'
export type AppConnectionSchema = Omit<AppConnection, 'value'> & {
value: EncryptedObject
owner?: (User & { identity?: UserIdentity })
}
export const AppConnectionEntity = new EntitySchema<AppConnectionSchema>({
name: 'app_connection',
columns: {
...BaseColumnSchemaPart,
displayName: {
type: String,
},
externalId: {
type: String,
},
type: {
type: String,
},
status: {
type: String,
default: AppConnectionStatus.ACTIVE,
},
platformId: {
type: String,
nullable: false,
},
pieceName: {
type: String,
},
ownerId: {
type: String,
nullable: true,
},
projectIds: {
type: String,
array: true,
nullable: false,
},
scope: {
type: String,
},
value: {
type: 'jsonb',
},
metadata: {
type: 'jsonb',
nullable: true,
},
pieceVersion: {
type: String,
},
},
indices: [
{
name: 'idx_app_connection_platform_id_and_external_id',
columns: ['platformId', 'externalId'],
},
{
name: 'idx_app_connection_owner_id',
columns: ['ownerId'],
},
],
relations: {
owner: {
type: 'many-to-one',
target: 'user',
cascade: true,
onDelete: 'SET NULL',
joinColumn: {
name: 'ownerId',
foreignKeyConstraintName: 'fk_app_connection_owner_id',
},
},
},
})

View File

@@ -0,0 +1,14 @@
import { FastifyPluginAsyncTypebox } from '@fastify/type-provider-typebox'
import { entitiesMustBeOwnedByCurrentProject } from '../authentication/authorization'
import { appConnectionWorkerController } from './app-connection-worker-controller'
import { appConnectionController } from './app-connection.controller'
export const appConnectionModule: FastifyPluginAsyncTypebox = async (app) => {
app.addHook('preSerialization', entitiesMustBeOwnedByCurrentProject)
await app.register(appConnectionController, {
prefix: '/v1/app-connections',
})
await app.register(appConnectionWorkerController, {
prefix: '/v1/worker/app-connections',
})
}

View File

@@ -0,0 +1,376 @@
import { ApplicationEventName, AuthenticationEvent, ConnectionEvent, FlowCreatedEvent, FlowDeletedEvent, FlowRunEvent, FlowUpdatedEvent, FolderEvent, GitRepoWithoutSensitiveData, ProjectMember, ProjectReleaseEvent, ProjectRoleEvent, SigningKeyEvent, SignUpEvent } from '@activepieces/ee-shared'
import { PieceMetadata } from '@activepieces/pieces-framework'
import { AppSystemProp, exceptionHandler, rejectedPromiseHandler } from '@activepieces/server-shared'
import { ApEdition, ApEnvironment, AppConnectionWithoutSensitiveData, Flow, FlowRun, Folder, ProjectRelease, ProjectWithLimits, spreadIfDefined, Template, UserInvitation, UserWithMetaInformation } from '@activepieces/shared'
import swagger from '@fastify/swagger'
import { createAdapter } from '@socket.io/redis-adapter'
import { FastifyInstance, FastifyRequest, HTTPMethods } from 'fastify'
import fastifySocketIO from 'fastify-socket'
import { Socket } from 'socket.io'
import { aiProviderService } from './ai/ai-provider-service'
import { aiProviderModule } from './ai/ai-provider.module'
import { platformAnalyticsModule } from './analytics/platform-analytics.module'
import { setPlatformOAuthService } from './app-connection/app-connection-service/oauth2'
import { appConnectionModule } from './app-connection/app-connection.module'
import { authenticationModule } from './authentication/authentication.module'
import { rateLimitModule } from './core/security/rate-limit'
import { securityHandlerChain } from './core/security/security-handler-chain'
import { websocketService } from './core/websockets.service'
import { distributedLock, redisConnections } from './database/redis-connections'
import { alertsModule } from './ee/alerts/alerts-module'
import { apiKeyModule } from './ee/api-keys/api-key-module'
import { platformOAuth2Service } from './ee/app-connections/platform-oauth2-service'
import { appCredentialModule } from './ee/app-credentials/app-credentials.module'
import { appSumoModule } from './ee/appsumo/appsumo.module'
import { auditEventModule } from './ee/audit-logs/audit-event-module'
import { auditLogService } from './ee/audit-logs/audit-event-service'
import { enterpriseLocalAuthnModule } from './ee/authentication/enterprise-local-authn/enterprise-local-authn-module'
import { federatedAuthModule } from './ee/authentication/federated-authn/federated-authn-module'
import { otpModule } from './ee/authentication/otp/otp-module'
import { rbacMiddleware } from './ee/authentication/project-role/rbac-middleware'
import { authnSsoSamlModule } from './ee/authentication/saml-authn/authn-sso-saml-module'
import { connectionKeyModule } from './ee/connection-keys/connection-key.module'
import { customDomainModule } from './ee/custom-domains/custom-domain.module'
import { domainHelper } from './ee/custom-domains/domain-helper'
import { enterpriseFlagsHooks } from './ee/flags/enterprise-flags.hooks'
import { globalConnectionModule } from './ee/global-connections/global-connection-module'
import { licenseKeysModule } from './ee/license-keys/license-keys-module'
import { managedAuthnModule } from './ee/managed-authn/managed-authn-module'
import { oauthAppModule } from './ee/oauth-apps/oauth-app.module'
import { platformPieceModule } from './ee/pieces/platform-piece-module'
import { adminPlatformModule } from './ee/platform/admin/admin-platform.controller'
import { adminPlatformTemplatesCloudModule } from './ee/platform/admin/templates/admin-platform-templates-cloud.module'
import { platformPlanModule } from './ee/platform/platform-plan/platform-plan.module'
import { projectEnterpriseHooks } from './ee/projects/ee-project-hooks'
import { platformProjectModule } from './ee/projects/platform-project-module'
import { projectMemberModule } from './ee/projects/project-members/project-member.module'
import { gitRepoModule } from './ee/projects/project-release/git-sync/git-sync.module'
import { projectReleaseModule } from './ee/projects/project-release/project-release.module'
import { projectRoleModule } from './ee/projects/project-role/project-role.module'
import { signingKeyModule } from './ee/signing-key/signing-key-module'
import { solutionsModule } from './ee/solutions/solutions.module'
import { userModule } from './ee/users/user.module'
import { fileModule } from './file/file.module'
import { flagModule } from './flags/flag.module'
import { flagHooks } from './flags/flags.hooks'
import { flowBackgroundJobs } from './flows/flow/flow.jobs'
import { humanInputModule } from './flows/flow/human-input/human-input.module'
import { flowRunModule } from './flows/flow-run/flow-run-module'
import { flowModule } from './flows/flow.module'
import { folderModule } from './flows/folder/folder.module'
import { eventsHooks } from './helper/application-events'
import { openapiModule } from './helper/openapi/openapi.module'
import { system } from './helper/system/system'
import { SystemJobName } from './helper/system-jobs/common'
import { systemJobHandlers } from './helper/system-jobs/job-handlers'
import { systemJobsSchedule } from './helper/system-jobs/system-job'
import { validateEnvPropsOnStartup } from './helper/system-validator'
import { mcpServerModule } from './mcp/mcp-module'
import { communityPiecesModule } from './pieces/community-piece-module'
import { pieceModule } from './pieces/metadata/piece-metadata-controller'
import { pieceMetadataService } from './pieces/metadata/piece-metadata-service'
import { pieceSyncService } from './pieces/piece-sync-service'
import { tagsModule } from './pieces/tags/tags-module'
import { platformModule } from './platform/platform.module'
import { projectHooks } from './project/project-hooks'
import { projectModule } from './project/project-module'
import { storeEntryModule } from './store-entry/store-entry.module'
import { tablesModule } from './tables/tables.module'
import { templateModule } from './template/template.module'
import { todoActivityModule } from './todos/activity/todos-activity.module'
import { todoModule } from './todos/todo.module'
import { appEventRoutingModule } from './trigger/app-event-routing/app-event-routing.module'
import { triggerModule } from './trigger/trigger.module'
import { platformUserModule } from './user/platform/platform-user-module'
import { invitationModule } from './user-invitations/user-invitation.module'
import { webhookModule } from './webhooks/webhook-module'
import { engineResponseWatcher } from './workers/engine-response-watcher'
import { queueMetricsModule } from './workers/queue/metrics/queue-metrics.module'
import { migrateQueuesAndRunConsumers, workerModule } from './workers/worker-module'
export const setupApp = async (app: FastifyInstance): Promise<FastifyInstance> => {
app.addContentTypeParser('application/octet-stream', { parseAs: 'buffer' }, async (_request: FastifyRequest, payload: unknown) => {
return payload as Buffer
})
await app.register(swagger, {
hideUntagged: true,
openapi: {
servers: [
{
url: 'https://cloud.activepieces.com/api',
description: 'Production Server',
},
],
components: {
securitySchemes: {
apiKey: {
type: 'http',
description: 'Use your api key generated from the admin console',
scheme: 'bearer',
},
},
schemas: {
[ApplicationEventName.FLOW_CREATED]: FlowCreatedEvent,
[ApplicationEventName.FLOW_UPDATED]: FlowUpdatedEvent,
[ApplicationEventName.FLOW_DELETED]: FlowDeletedEvent,
[ApplicationEventName.CONNECTION_UPSERTED]: ConnectionEvent,
[ApplicationEventName.CONNECTION_DELETED]: ConnectionEvent,
[ApplicationEventName.FOLDER_CREATED]: FolderEvent,
[ApplicationEventName.FOLDER_UPDATED]: FolderEvent,
[ApplicationEventName.FOLDER_DELETED]: FolderEvent,
[ApplicationEventName.FLOW_RUN_STARTED]: FlowRunEvent,
[ApplicationEventName.FLOW_RUN_FINISHED]: FlowRunEvent,
[ApplicationEventName.USER_SIGNED_UP]: SignUpEvent,
[ApplicationEventName.USER_SIGNED_IN]: AuthenticationEvent,
[ApplicationEventName.USER_PASSWORD_RESET]: AuthenticationEvent,
[ApplicationEventName.USER_EMAIL_VERIFIED]: AuthenticationEvent,
[ApplicationEventName.SIGNING_KEY_CREATED]: SigningKeyEvent,
[ApplicationEventName.PROJECT_ROLE_CREATED]: ProjectRoleEvent,
[ApplicationEventName.PROJECT_RELEASE_CREATED]: ProjectReleaseEvent,
'template': Template,
'folder': Folder,
'user': UserWithMetaInformation,
'user-invitation': UserInvitation,
'project-member': ProjectMember,
project: ProjectWithLimits,
flow: Flow,
'flow-run': FlowRun,
'app-connection': AppConnectionWithoutSensitiveData,
piece: PieceMetadata,
'git-repo': GitRepoWithoutSensitiveData,
'project-release': ProjectRelease,
'global-connection': AppConnectionWithoutSensitiveData,
},
},
info: {
title: 'Activepieces Documentation',
version: '0.0.0',
},
externalDocs: {
url: 'https://www.activepieces.com/docs',
description: 'Find more info here',
},
},
})
await app.register(rateLimitModule)
app.addHook('onResponse', async (request, reply) => {
// eslint-disable-next-line
reply.header('x-request-id', request.id)
})
app.addHook('onRequest', async (request, reply) => {
const route = app.hasRoute({
method: request.method as HTTPMethods,
url: request.routeOptions.url!,
})
if (!route) {
return reply.code(404).send({
statusCode: 404,
error: 'Not Found',
message: 'Route not found',
})
}
})
app.addHook('preHandler', securityHandlerChain)
app.addHook('preHandler', rbacMiddleware)
await systemJobsSchedule(app.log).init()
await app.register(fileModule)
await app.register(flagModule)
await app.register(storeEntryModule)
await app.register(folderModule)
await pieceSyncService(app.log).setup()
await pieceMetadataService(app.log).setup()
await app.register(pieceModule)
await app.register(flowModule)
await app.register(flowRunModule)
await app.register(webhookModule)
await app.register(appConnectionModule)
await app.register(openapiModule)
await app.register(appEventRoutingModule)
await app.register(authenticationModule)
await app.register(triggerModule)
await app.register(platformModule)
await app.register(humanInputModule)
await app.register(tagsModule)
await app.register(mcpServerModule)
await app.register(platformUserModule)
await app.register(alertsModule)
await app.register(invitationModule)
await app.register(workerModule)
await aiProviderService(app.log).setup()
await app.register(aiProviderModule)
await app.register(licenseKeysModule)
await app.register(tablesModule)
await app.register(userModule)
await app.register(todoModule)
await app.register(todoActivityModule)
await app.register(solutionsModule)
await app.register(templateModule)
await app.register(platformAnalyticsModule)
systemJobHandlers.registerJobHandler(SystemJobName.DELETE_FLOW, (data) => flowBackgroundJobs(app.log).deleteHandler(data))
systemJobHandlers.registerJobHandler(SystemJobName.UPDATE_FLOW_STATUS, (data) => flowBackgroundJobs(app.log).updateStatusHandler(data))
app.get(
'/redirect',
async (
request: FastifyRequest<{ Querystring: { code: string } }>,
reply,
) => {
const params = {
code: request.query.code,
}
if (!params.code) {
return reply.send('The code is missing in url')
}
else {
return reply
.type('text/html')
.send(
`<script>if(window.opener){window.opener.postMessage({ 'code': '${encodeURIComponent(
params.code,
)}' },'*')}</script> <html>Redirect succuesfully, this window should close now</html>`,
)
}
},
)
await app.register(fastifySocketIO, {
cors: {
origin: '*',
},
maxHttpBufferSize: 1e8,
...spreadIfDefined('adapter', await getAdapter()),
transports: ['websocket'],
})
app.io.use((socket: Socket, next: (err?: Error) => void) => {
websocketService
.verifyPrincipal(socket)
.then(() => next())
.catch(() => next(new Error('Authentication error')))
})
app.io.on('connection', (socket: Socket) => rejectedPromiseHandler(websocketService.init(socket, app.log), app.log))
app.io.on('disconnect', (socket: Socket) => rejectedPromiseHandler(websocketService.onDisconnect(socket), app.log))
await validateEnvPropsOnStartup(app.log)
const edition = system.getEdition()
app.log.info({
edition,
}, 'Activepieces Edition')
switch (edition) {
case ApEdition.CLOUD:
await app.register(adminPlatformModule)
await app.register(adminPlatformTemplatesCloudModule)
await app.register(appCredentialModule)
await app.register(connectionKeyModule)
await app.register(platformProjectModule)
await app.register(platformPlanModule)
await app.register(projectMemberModule)
await app.register(appSumoModule)
await app.register(customDomainModule)
await app.register(signingKeyModule)
await app.register(authnSsoSamlModule)
await app.register(managedAuthnModule)
await app.register(oauthAppModule)
await app.register(platformPieceModule)
await app.register(otpModule)
await app.register(enterpriseLocalAuthnModule)
await app.register(federatedAuthModule)
await app.register(apiKeyModule)
await app.register(gitRepoModule)
await app.register(auditEventModule)
await app.register(projectRoleModule)
await app.register(projectReleaseModule)
await app.register(globalConnectionModule)
setPlatformOAuthService(platformOAuth2Service(app.log))
projectHooks.set(projectEnterpriseHooks)
eventsHooks.set(auditLogService)
flagHooks.set(enterpriseFlagsHooks)
exceptionHandler.initializeSentry(system.get(AppSystemProp.SENTRY_DSN))
break
case ApEdition.ENTERPRISE:
await app.register(platformPlanModule)
await app.register(customDomainModule)
await app.register(platformProjectModule)
await app.register(projectMemberModule)
await app.register(signingKeyModule)
await app.register(authnSsoSamlModule)
await app.register(managedAuthnModule)
await app.register(oauthAppModule)
await app.register(platformPieceModule)
await app.register(otpModule)
await app.register(enterpriseLocalAuthnModule)
await app.register(federatedAuthModule)
await app.register(apiKeyModule)
await app.register(gitRepoModule)
await app.register(auditEventModule)
await app.register(projectRoleModule)
await app.register(projectReleaseModule)
await app.register(globalConnectionModule)
await app.register(queueMetricsModule)
setPlatformOAuthService(platformOAuth2Service(app.log))
projectHooks.set(projectEnterpriseHooks)
eventsHooks.set(auditLogService)
flagHooks.set(enterpriseFlagsHooks)
break
case ApEdition.COMMUNITY:
await app.register(projectModule)
await app.register(communityPiecesModule)
await app.register(queueMetricsModule)
break
}
app.addHook('onClose', async () => {
app.log.info('Shutting down')
await systemJobsSchedule(app.log).close()
await redisConnections.destroy()
await distributedLock(app.log).destroy()
await engineResponseWatcher(app.log).shutdown()
})
return app
}
async function getAdapter() {
const redisConnectionInstance = await redisConnections.useExisting()
const sub = redisConnectionInstance.duplicate()
const pub = redisConnectionInstance.duplicate()
return createAdapter(pub, sub, {
requestsTimeout: 30000,
})
}
export async function appPostBoot(app: FastifyInstance): Promise<void> {
app.log.info(`
_____ _______ _____ __ __ ______ _____ _____ ______ _____ ______ _____
/\\ / ____| |__ __| |_ _| \\ \\ / / | ____| | __ \\ |_ _| | ____| / ____| | ____| / ____|
/ \\ | | | | | | \\ \\ / / | |__ | |__) | | | | |__ | | | |__ | (___
/ /\\ \\ | | | | | | \\ \\/ / | __| | ___/ | | | __| | | | __| \\___ \\
/ ____ \\ | |____ | | _| |_ \\ / | |____ | | _| |_ | |____ | |____ | |____ ____) |
/_/ \\_\\ \\_____| |_| |_____| \\/ |______| |_| |_____| |______| \\_____| |______| |_____/
The application started on ${await domainHelper.getPublicApiUrl({ path: '' })}, as specified by the AP_FRONTEND_URL variables.`)
const environment = system.get(AppSystemProp.ENVIRONMENT)
const pieces = process.env.AP_DEV_PIECES
await migrateQueuesAndRunConsumers(app)
app.log.info('Queues migrated and consumers run')
if (environment === ApEnvironment.DEVELOPMENT) {
app.log.warn(
`[WARNING]: The application is running in ${environment} mode.`,
)
app.log.warn(
`[WARNING]: This is only shows pieces specified in AP_DEV_PIECES ${pieces} environment variable.`,
)
}
}

View File

@@ -0,0 +1,228 @@
import { AppSystemProp } from '@activepieces/server-shared'
import { ActivepiecesError, ApEdition, ApEnvironment, AuthenticationResponse, EndpointScope, EnginePrincipal, ErrorCode, isNil, PrincipalType, Project, ServicePrincipal, TelemetryEventName, User, UserIdentity, UserIdentityProvider, UserPrincipal, UserStatus } from '@activepieces/shared'
import { FastifyBaseLogger } from 'fastify'
import { system } from '../helper/system/system'
import { telemetry } from '../helper/telemetry.utils'
import { platformService } from '../platform/platform.service'
import { projectService } from '../project/project-service'
import { userService } from '../user/user-service'
import { userInvitationsService } from '../user-invitations/user-invitation.service'
import { accessTokenManager } from './lib/access-token-manager'
import { userIdentityService } from './user-identity/user-identity-service'
export const authenticationUtils = {
async assertUserIsInvitedToPlatformOrProject(log: FastifyBaseLogger, {
email,
platformId,
}: AssertUserIsInvitedToPlatformOrProjectParams): Promise<void> {
const isInvited = await userInvitationsService(log).hasAnyAcceptedInvitations({
platformId,
email,
})
if (!isInvited) {
throw new ActivepiecesError({
code: ErrorCode.INVITATION_ONLY_SIGN_UP,
params: {
message: 'User is not invited to the platform',
},
})
}
},
async getProjectAndToken(params: GetProjectAndTokenParams): Promise<AuthenticationResponse> {
const user = await userService.getOneOrFail({ id: params.userId })
const projects = await projectService.getAllForUser({
platformId: params.platformId,
userId: params.userId,
scope: params.scope,
})
const project = isNil(params.projectId) ? projects?.[0] : projects.find((project) => project.id === params.projectId)
if (isNil(project)) {
throw new ActivepiecesError({
code: ErrorCode.INVITATION_ONLY_SIGN_UP,
params: {
message: 'No project found for user',
},
})
}
const identity = await userIdentityService(system.globalLogger()).getOneOrFail({ id: user.identityId })
if (!identity.verified) {
throw new ActivepiecesError({
code: ErrorCode.EMAIL_IS_NOT_VERIFIED,
params: {
email: identity.email,
},
})
}
if (user.status === UserStatus.INACTIVE) {
throw new ActivepiecesError({
code: ErrorCode.USER_IS_INACTIVE,
params: {
email: identity.email,
},
})
}
const token = await accessTokenManager.generateToken({
id: user.id,
type: PrincipalType.USER,
projectId: project.id,
platform: {
id: params.platformId,
},
tokenVersion: identity.tokenVersion,
})
return {
...user,
firstName: identity.firstName,
lastName: identity.lastName,
email: identity.email,
trackEvents: identity.trackEvents,
newsLetter: identity.newsLetter,
verified: identity.verified,
token,
projectId: project.id,
}
},
async assertDomainIsAllowed({
email,
platformId,
}: AssertDomainIsAllowedParams): Promise<void> {
const edition = system.getEdition()
if (edition === ApEdition.COMMUNITY) {
return
}
const platform = await platformService.getOneWithPlanOrThrow(platformId)
if (!platform.plan.ssoEnabled) {
return
}
const emailDomain = email.split('@')[1]
const isAllowedDomaiin =
!platform.enforceAllowedAuthDomains ||
platform.allowedAuthDomains.includes(emailDomain)
if (!isAllowedDomaiin) {
throw new ActivepiecesError({
code: ErrorCode.DOMAIN_NOT_ALLOWED,
params: {
domain: emailDomain,
},
})
}
},
async assertEmailAuthIsEnabled({
platformId,
provider,
}: AssertEmailAuthIsEnabledParams): Promise<void> {
const edition = system.getEdition()
if (edition === ApEdition.COMMUNITY) {
return
}
const platform = await platformService.getOneWithPlanOrThrow(platformId)
if (!platform.plan.ssoEnabled) {
return
}
if (provider !== UserIdentityProvider.EMAIL) {
return
}
if (!platform.emailAuthEnabled) {
throw new ActivepiecesError({
code: ErrorCode.EMAIL_AUTH_DISABLED,
params: {},
})
}
},
async sendTelemetry({
user,
identity,
project,
log,
}: SendTelemetryParams): Promise<void> {
try {
await telemetry(log).identify(user, identity, project.id)
await telemetry(log).trackProject(project.id, {
name: TelemetryEventName.SIGNED_UP,
payload: {
userId: identity.id,
email: identity.email,
firstName: identity.firstName,
lastName: identity.lastName,
projectId: project.id,
},
})
}
catch (e) {
log.warn({ name: 'AuthenticationService#sendTelemetry', error: e })
}
},
async saveNewsLetterSubscriber(user: User, platformId: string, identity: UserIdentity, log: FastifyBaseLogger): Promise<void> {
const platform = await platformService.getOneWithPlanOrThrow(platformId)
const environment = system.get(AppSystemProp.ENVIRONMENT)
if (environment !== ApEnvironment.PRODUCTION) {
return
}
if (platform.plan.embeddingEnabled) {
return
}
try {
const response = await fetch(
'https://us-central1-activepieces-b3803.cloudfunctions.net/addContact',
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ email: identity.email }),
},
)
await response.json()
}
catch (error) {
log.warn(error)
}
},
async extractUserIdFromPrincipal(
principal: UserPrincipal | ServicePrincipal | EnginePrincipal,
): Promise<string> {
if (principal.type === PrincipalType.USER) {
return principal.id
}
// TODO currently it's same as api service, but it's better to get it from api key service, in case we introduced more admin users
const project = await projectService.getOneOrThrow(principal.projectId)
return project.ownerId
},
}
type SendTelemetryParams = {
identity: UserIdentity
user: User
project: Project
log: FastifyBaseLogger
}
type AssertDomainIsAllowedParams = {
email: string
platformId: string
}
type AssertEmailAuthIsEnabledParams = {
platformId: string
provider: UserIdentityProvider
}
type AssertUserIsInvitedToPlatformOrProjectParams = {
email: string
platformId: string
}
type GetProjectAndTokenParams = {
userId: string
platformId: string
projectId: string | null
scope?: EndpointScope
}

View File

@@ -0,0 +1,141 @@
import { ApplicationEventName } from '@activepieces/ee-shared'
import { AppSystemProp, networkUtils } from '@activepieces/server-shared'
import {
ALL_PRINCIPAL_TYPES,
assertNotNullOrUndefined,
EndpointScope,
PlatformRole,
PrincipalType,
SignInRequest,
SignUpRequest,
SwitchPlatformRequest,
SwitchProjectRequest,
UserIdentityProvider,
} from '@activepieces/shared'
import { RateLimitOptions } from '@fastify/rate-limit'
import { FastifyPluginAsyncTypebox } from '@fastify/type-provider-typebox'
import { eventsHooks } from '../helper/application-events'
import { system } from '../helper/system/system'
import { platformUtils } from '../platform/platform.utils'
import { userService } from '../user/user-service'
import { authenticationService } from './authentication.service'
export const authenticationController: FastifyPluginAsyncTypebox = async (
app,
) => {
app.post('/sign-up', SignUpRequestOptions, async (request) => {
const platformId = await platformUtils.getPlatformIdForRequest(request)
const signUpResponse = await authenticationService(request.log).signUp({
...request.body,
provider: UserIdentityProvider.EMAIL,
platformId: platformId ?? null,
})
eventsHooks.get(request.log).sendUserEvent({
platformId: signUpResponse.platformId!,
userId: signUpResponse.id,
projectId: signUpResponse.projectId,
ip: networkUtils.extractClientRealIp(request, system.get(AppSystemProp.CLIENT_REAL_IP_HEADER)),
}, {
action: ApplicationEventName.USER_SIGNED_UP,
data: {
source: 'credentials',
},
})
return signUpResponse
})
app.post('/sign-in', SignInRequestOptions, async (request) => {
const predefinedPlatformId = await platformUtils.getPlatformIdForRequest(request)
const response = await authenticationService(request.log).signInWithPassword({
email: request.body.email,
password: request.body.password,
predefinedPlatformId,
})
const responsePlatformId = response.platformId
assertNotNullOrUndefined(responsePlatformId, 'Platform ID is required')
eventsHooks.get(request.log).sendUserEvent({
platformId: responsePlatformId,
userId: response.id,
projectId: response.projectId,
ip: networkUtils.extractClientRealIp(request, system.get(AppSystemProp.CLIENT_REAL_IP_HEADER)),
}, {
action: ApplicationEventName.USER_SIGNED_IN,
data: {},
})
return response
})
app.post('/switch-platform', SwitchPlatformRequestOptions, async (request) => {
const user = await userService.getOneOrFail({ id: request.principal.id })
return authenticationService(request.log).switchPlatform({
identityId: user.identityId,
platformId: request.body.platformId,
})
})
app.post('/switch-project', SwitchProjectRequestOptions, async (request) => {
const user = await userService.getOneOrFail({ id: request.principal.id })
const isPrivilegedUser = user.platformRole === PlatformRole.ADMIN
return authenticationService(request.log).switchProject({
identityId: user.identityId,
projectId: request.body.projectId,
currentPlatformId: request.principal.platform.id,
scope: isPrivilegedUser ? EndpointScope.PLATFORM : undefined,
})
})
}
const rateLimitOptions: RateLimitOptions = {
max: Number.parseInt(
system.getOrThrow(AppSystemProp.API_RATE_LIMIT_AUTHN_MAX),
10,
),
timeWindow: system.getOrThrow(AppSystemProp.API_RATE_LIMIT_AUTHN_WINDOW),
}
const SwitchProjectRequestOptions = {
config: {
allowedPrincipals: [PrincipalType.USER] as const,
rateLimit: rateLimitOptions,
},
schema: {
body: SwitchProjectRequest,
},
}
const SwitchPlatformRequestOptions = {
config: {
allowedPrincipals: [PrincipalType.USER] as const,
rateLimit: rateLimitOptions,
},
schema: {
body: SwitchPlatformRequest,
},
}
const SignUpRequestOptions = {
config: {
allowedPrincipals: ALL_PRINCIPAL_TYPES,
rateLimit: rateLimitOptions,
},
schema: {
body: SignUpRequest,
},
}
const SignInRequestOptions = {
config: {
allowedPrincipals: ALL_PRINCIPAL_TYPES,
rateLimit: rateLimitOptions,
},
schema: {
body: SignInRequest,
},
}

View File

@@ -0,0 +1,12 @@
import { FastifyPluginAsyncTypebox } from '@fastify/type-provider-typebox'
import { authenticationController } from './authentication.controller'
import { djangoTrustAuthController } from './django-trust-auth.controller'
export const authenticationModule: FastifyPluginAsyncTypebox = async (app) => {
await app.register(authenticationController, {
prefix: '/v1/authentication',
})
await app.register(djangoTrustAuthController, {
prefix: '/v1/authentication',
})
}

View File

@@ -0,0 +1,313 @@
import { OtpType } from '@activepieces/ee-shared'
import { cryptoUtils } from '@activepieces/server-shared'
import { ActivepiecesError, ApEdition, ApFlagId, assertNotNullOrUndefined, AuthenticationResponse, EndpointScope, ErrorCode, isNil, PlatformRole, PlatformWithoutSensitiveData, ProjectType, User, UserIdentity, UserIdentityProvider } from '@activepieces/shared'
import { FastifyBaseLogger } from 'fastify'
import { otpService } from '../ee/authentication/otp/otp-service'
import { flagService } from '../flags/flag.service'
import { system } from '../helper/system/system'
import { platformService } from '../platform/platform.service'
import { platformUtils } from '../platform/platform.utils'
import { projectService } from '../project/project-service'
import { userService } from '../user/user-service'
import { userInvitationsService } from '../user-invitations/user-invitation.service'
import { authenticationUtils } from './authentication-utils'
import { userIdentityService } from './user-identity/user-identity-service'
export const authenticationService = (log: FastifyBaseLogger) => ({
async signUp(params: SignUpParams): Promise<AuthenticationResponse> {
if (!isNil(params.platformId)) {
await authenticationUtils.assertEmailAuthIsEnabled({
platformId: params.platformId,
provider: params.provider,
})
await authenticationUtils.assertDomainIsAllowed({
email: params.email,
platformId: params.platformId,
})
}
if (isNil(params.platformId)) {
const userIdentity = await userIdentityService(log).create({
...params,
verified: params.provider === UserIdentityProvider.GOOGLE || params.provider === UserIdentityProvider.JWT || params.provider === UserIdentityProvider.SAML,
})
return createUserAndPlatform(userIdentity, log)
}
await authenticationUtils.assertUserIsInvitedToPlatformOrProject(log, {
email: params.email,
platformId: params.platformId,
})
const userIdentity = await userIdentityService(log).create({
...params,
verified: true,
})
const user = await userService.getOrCreateWithProject({
identity: userIdentity,
platformId: params.platformId,
})
await userInvitationsService(log).provisionUserInvitation({
email: params.email,
user,
})
return authenticationUtils.getProjectAndToken({
userId: user.id,
platformId: params.platformId,
projectId: null,
})
},
async signInWithPassword(params: SignInWithPasswordParams): Promise<AuthenticationResponse> {
const identity = await userIdentityService(log).verifyIdentityPassword(params)
const platformId = isNil(params.predefinedPlatformId) ? await getPersonalPlatformIdForIdentity(identity.id) : params.predefinedPlatformId
if (isNil(platformId)) {
throw new ActivepiecesError({
code: ErrorCode.AUTHENTICATION,
params: {
message: 'No platform found for identity',
},
})
}
await authenticationUtils.assertEmailAuthIsEnabled({
platformId,
provider: UserIdentityProvider.EMAIL,
})
await authenticationUtils.assertDomainIsAllowed({
email: params.email,
platformId,
})
const user = await userService.getOneByIdentityAndPlatform({
identityId: identity.id,
platformId,
})
assertNotNullOrUndefined(user, 'User not found')
return authenticationUtils.getProjectAndToken({
userId: user.id,
platformId,
projectId: null,
})
},
async federatedAuthn(params: FederatedAuthnParams): Promise<AuthenticationResponse> {
const platformId = isNil(params.predefinedPlatformId) ? await getPersonalPlatformIdForFederatedAuthn(params.email, log) : params.predefinedPlatformId
const userIdentity = await userIdentityService(log).getIdentityByEmail(params.email)
if (isNil(platformId)) {
if (!isNil(userIdentity)) {
// User already exists, create a new personal platform and return token
return createUserAndPlatform(userIdentity, log)
}
// Create New Identity and Platform
return authenticationService(log).signUp({
email: params.email,
firstName: params.firstName,
lastName: params.lastName,
newsLetter: params.newsLetter,
trackEvents: params.trackEvents,
provider: params.provider,
platformId: null,
password: await cryptoUtils.generateRandomPassword(),
})
}
if (isNil(userIdentity)) {
return authenticationService(log).signUp({
email: params.email,
firstName: params.firstName,
lastName: params.lastName,
newsLetter: params.newsLetter,
trackEvents: params.trackEvents,
provider: params.provider,
platformId,
password: await cryptoUtils.generateRandomPassword(),
})
}
const user = await userService.getOrCreateWithProject({
identity: userIdentity,
platformId,
})
await userInvitationsService(log).provisionUserInvitation({
email: params.email,
user,
})
return authenticationUtils.getProjectAndToken({
userId: user.id,
platformId,
projectId: null,
})
},
async switchPlatform(params: SwitchPlatformParams): Promise<AuthenticationResponse> {
const platforms = await platformService.listPlatformsForIdentityWithAtleastProject({ identityId: params.identityId })
const platform = platforms.find((platform) => platform.id === params.platformId)
await assertUserCanSwitchToPlatform(null, platform)
assertNotNullOrUndefined(platform, 'Platform not found')
const user = await getUserForPlatform(params.identityId, platform)
return authenticationUtils.getProjectAndToken({
userId: user.id,
platformId: platform.id,
projectId: null,
})
},
async switchProject(params: SwitchProjectParams): Promise<AuthenticationResponse> {
const project = await projectService.getOneOrThrow(params.projectId)
const projectPlatform = await platformService.getOneWithPlanOrThrow(project.platformId)
await assertUserCanSwitchToPlatform(params.currentPlatformId, projectPlatform)
const user = await getUserForPlatform(params.identityId, projectPlatform)
return authenticationUtils.getProjectAndToken({
userId: user.id,
platformId: project.platformId,
projectId: params.projectId,
scope: params.scope,
})
},
})
async function assertUserCanSwitchToPlatform(currentPlatformId: string | null, platform: PlatformWithoutSensitiveData | undefined): Promise<void> {
if (isNil(platform)) {
throw new ActivepiecesError({
code: ErrorCode.AUTHORIZATION,
params: {
message: 'The user is not a member of the platform',
},
})
}
const samePlatform = currentPlatformId === platform.id
const allowToSwitch = !platformUtils.isCustomerOnDedicatedDomain(platform) || samePlatform
if (!allowToSwitch) {
throw new ActivepiecesError({
code: ErrorCode.AUTHENTICATION,
params: {
message: 'The user is not a member of the platform',
},
})
}
}
async function getUserForPlatform(identityId: string, platform: PlatformWithoutSensitiveData): Promise<User> {
const user = await userService.getOneByIdentityAndPlatform({
identityId,
platformId: platform.id,
})
if (isNil(user)) {
throw new ActivepiecesError({
code: ErrorCode.AUTHORIZATION,
params: {
message: 'User is not member of the platform',
},
})
}
return user
}
async function createUserAndPlatform(userIdentity: UserIdentity, log: FastifyBaseLogger): Promise<AuthenticationResponse> {
const user = await userService.create({
identityId: userIdentity.id,
platformRole: PlatformRole.ADMIN,
platformId: null,
})
const platform = await platformService.create({
ownerId: user.id,
name: userIdentity.firstName + '\'s Platform',
})
await userService.addOwnerToPlatform({
platformId: platform.id,
id: user.id,
})
const defaultProject = await projectService.create({
displayName: userIdentity.firstName + '\'s Project',
ownerId: user.id,
platformId: platform.id,
type: ProjectType.PERSONAL,
})
const cloudEdition = system.getEdition()
switch (cloudEdition) {
case ApEdition.CLOUD:
await otpService(log).createAndSend({
platformId: platform.id,
email: userIdentity.email,
type: OtpType.EMAIL_VERIFICATION,
})
break
case ApEdition.COMMUNITY:
case ApEdition.ENTERPRISE:
await userIdentityService(log).verify(userIdentity.id)
break
}
await flagService.save({
id: ApFlagId.USER_CREATED,
value: true,
})
await authenticationUtils.sendTelemetry({
identity: userIdentity,
user,
project: defaultProject,
log,
})
await authenticationUtils.saveNewsLetterSubscriber(user, platform.id, userIdentity, log)
return authenticationUtils.getProjectAndToken({
userId: user.id,
platformId: platform.id,
projectId: defaultProject.id,
})
}
async function getPersonalPlatformIdForFederatedAuthn(email: string, log: FastifyBaseLogger): Promise<string | null> {
const identity = await userIdentityService(log).getIdentityByEmail(email)
if (isNil(identity)) {
return null
}
return getPersonalPlatformIdForIdentity(identity.id)
}
async function getPersonalPlatformIdForIdentity(identityId: string): Promise<string | null> {
const edition = system.getEdition()
if (edition === ApEdition.CLOUD) {
const platforms = await platformService.listPlatformsForIdentityWithAtleastProject({ identityId })
const platform = platforms.find((platform) => !platformUtils.isCustomerOnDedicatedDomain(platform))
return platform?.id ?? null
}
return null
}
type FederatedAuthnParams = {
email: string
firstName: string
lastName: string
newsLetter: boolean
trackEvents: boolean
provider: UserIdentityProvider
predefinedPlatformId: string | null
}
type SignUpParams = {
email: string
firstName: string
lastName: string
password: string
platformId: string | null
trackEvents: boolean
newsLetter: boolean
provider: UserIdentityProvider
}
type SignInWithPasswordParams = {
email: string
password: string
predefinedPlatformId: string | null
}
type SwitchPlatformParams = {
identityId: string
platformId: string
}
type SwitchProjectParams = {
identityId: string
currentPlatformId: string
projectId: string
scope?: EndpointScope
}

View File

@@ -0,0 +1,75 @@
import {
ActivepiecesError,
ErrorCode,
isNil,
isObject,
PrincipalType,
} from '@activepieces/shared'
import { preSerializationHookHandler } from 'fastify'
export function extractResourceName(url: string): string | undefined {
const urlPath = url.split('?')[0]
const resourceRegex = /\/v1\/(.+?)(\/|$)/
const resourceMatch = urlPath.match(resourceRegex)
const resource = resourceMatch ? resourceMatch[1] : undefined
return resource
}
/**
* Throws an authz error if response entities contain a `projectId` property and
* the `projectId` property value does not match the principal's `projectId`.
* Otherwise, does nothing.
*/
export const entitiesMustBeOwnedByCurrentProject: preSerializationHookHandler<
Payload | null
> = (request, _response, payload, done) => {
request.log.trace(
{ payload, principal: request.principal, route: request.routeOptions.config },
'entitiesMustBeOwnedByCurrentProject',
)
const principalProjectId = request.principal.type === PrincipalType.USER
|| request.principal.type === PrincipalType.ENGINE
? request.principal.projectId : undefined
if (isObject(payload) && !isNil(principalProjectId)) {
let verdict: AuthzVerdict = 'ALLOW'
if ('projectId' in payload) {
if (payload.projectId !== principalProjectId) {
verdict = 'DENY'
}
}
else if ('data' in payload && Array.isArray(payload.data)) {
const someEntityNotOwnedByCurrentProject = payload.data.some((entity) => {
return 'projectId' in entity && entity.projectId !== principalProjectId
})
if (someEntityNotOwnedByCurrentProject) {
verdict = 'DENY'
}
}
if (verdict === 'DENY') {
throw new ActivepiecesError({
code: ErrorCode.AUTHORIZATION,
params: {
message: 'not owned by current project',
},
})
}
}
done()
}
type SingleEntity = {
projectId?: string
}
type MultipleEntities = {
data: SingleEntity[]
}
type Payload = SingleEntity | MultipleEntities
type AuthzVerdict = 'ALLOW' | 'DENY'

View File

@@ -0,0 +1,168 @@
/**
* Django Trust Authentication Controller
*
* This is a clean-room implementation for authenticating users from Django.
* Django signs a JWT with the shared AP_JWT_SECRET, and this endpoint verifies
* it and returns an Activepieces session token.
*
* This code is MIT licensed and does not use any EE code.
*/
import { cryptoUtils } from '@activepieces/server-shared'
import {
ALL_PRINCIPAL_TYPES,
AuthenticationResponse,
isNil,
PlatformRole,
PrincipalType,
ProjectType,
UserIdentityProvider,
} from '@activepieces/shared'
import { Static, Type } from '@sinclair/typebox'
import { FastifyPluginAsyncTypebox } from '@fastify/type-provider-typebox'
import { jwtUtils } from '../helper/jwt-utils'
import { platformService } from '../platform/platform.service'
import { projectService } from '../project/project-service'
import { userService } from '../user/user-service'
import { accessTokenManager } from './lib/access-token-manager'
import { userIdentityService } from './user-identity/user-identity-service'
/**
* Token payload structure from Django
*/
const DjangoTrustTokenPayload = Type.Object({
tenant_id: Type.String(),
tenant_name: Type.String(),
project_id: Type.Optional(Type.String()), // Activepieces project ID if already created
})
type DjangoTrustTokenPayload = Static<typeof DjangoTrustTokenPayload>
const DjangoTrustRequest = Type.Object({
token: Type.String(),
})
type DjangoTrustRequest = Static<typeof DjangoTrustRequest>
export const djangoTrustAuthController: FastifyPluginAsyncTypebox = async (app) => {
/**
* POST /authentication/django-trust
*
* Accepts a Django-signed JWT and returns an Activepieces session.
* The token must be signed with AP_JWT_SECRET using HS256.
*/
app.post('/django-trust', DjangoTrustRequestOptions, async (request) => {
const { token } = request.body
const log = request.log
// Verify the Django token using shared secret
const secret = await jwtUtils.getJwtSecret()
let payload: DjangoTrustTokenPayload
try {
payload = await jwtUtils.decodeAndVerify<DjangoTrustTokenPayload>({
jwt: token,
key: secret,
issuer: 'smoothschedule', // Django sets this issuer
})
}
catch (error) {
log.error({ error }, 'Failed to verify Django trust token')
throw new Error('Invalid token')
}
// Get or create the platform (one platform for all SmoothSchedule tenants)
let platform = await platformService.getOldestPlatform()
if (isNil(platform)) {
throw new Error('No platform configured in Activepieces')
}
// Generate a unique email for this tenant's shared user
const tenantEmail = `tenant-${payload.tenant_id}@smoothschedule.internal`
// Get or create user identity for this tenant
let identity = await userIdentityService(log).getIdentityByEmail(tenantEmail)
if (isNil(identity)) {
// Create new identity for this tenant
identity = await userIdentityService(log).create({
email: tenantEmail,
password: await cryptoUtils.generateRandomPassword(),
firstName: payload.tenant_name,
lastName: 'Automations',
trackEvents: false,
newsLetter: false,
provider: UserIdentityProvider.JWT,
verified: true,
})
}
// Get or create user for this identity on the platform
let user = await userService.getOneByIdentityAndPlatform({
identityId: identity.id,
platformId: platform.id,
})
if (isNil(user)) {
user = await userService.create({
identityId: identity.id,
platformId: platform.id,
platformRole: PlatformRole.MEMBER,
})
}
// Get or create project for this tenant
let project = await projectService.getByPlatformIdAndExternalId({
platformId: platform.id,
externalId: payload.tenant_id,
})
if (isNil(project)) {
project = await projectService.create({
displayName: `${payload.tenant_name} Automations`,
ownerId: user.id,
platformId: platform.id,
externalId: payload.tenant_id,
type: ProjectType.TEAM,
})
}
// Generate Activepieces access token
const accessToken = await accessTokenManager.generateToken({
id: user.id,
type: PrincipalType.USER,
projectId: project.id,
platform: {
id: platform.id,
},
tokenVersion: identity.tokenVersion,
}, 7 * 24 * 60 * 60) // 7 days
const response: AuthenticationResponse = {
id: user.id,
platformRole: user.platformRole,
status: user.status,
externalId: user.externalId,
platformId: user.platformId,
firstName: identity.firstName,
lastName: identity.lastName,
email: identity.email,
trackEvents: identity.trackEvents,
newsLetter: identity.newsLetter,
verified: identity.verified,
token: accessToken,
projectId: project.id,
}
return response
})
}
const DjangoTrustRequestOptions = {
config: {
allowedPrincipals: ALL_PRINCIPAL_TYPES,
},
schema: {
body: DjangoTrustRequest,
},
}

View File

@@ -0,0 +1,99 @@
import { ActivepiecesError, apId, assertNotNullOrUndefined, EnginePrincipal, ErrorCode, PlatformId, Principal, PrincipalType, ProjectId, UserStatus, WorkerPrincipal } from '@activepieces/shared'
import dayjs from 'dayjs'
import { jwtUtils } from '../../helper/jwt-utils'
import { system } from '../../helper/system/system'
import { userService } from '../../user/user-service'
import { userIdentityService } from '../user-identity/user-identity-service'
export const accessTokenManager = {
async generateToken(principal: Principal, expiresInSeconds: number = dayjs.duration(7, 'day').asSeconds()): Promise<string> {
const secret = await jwtUtils.getJwtSecret()
return jwtUtils.sign({
payload: principal,
key: secret,
expiresInSeconds,
})
},
async generateEngineToken({ jobId, projectId, platformId }: GenerateEngineTokenParams): Promise<string> {
const enginePrincipal: EnginePrincipal = {
id: jobId ?? apId(),
type: PrincipalType.ENGINE,
projectId,
platform: {
id: platformId,
},
}
const secret = await jwtUtils.getJwtSecret()
return jwtUtils.sign({
payload: enginePrincipal,
key: secret,
expiresInSeconds: dayjs.duration(100, 'year').asSeconds(),
})
},
async generateWorkerToken(): Promise<string> {
const workerPrincipal: WorkerPrincipal = {
id: apId(),
type: PrincipalType.WORKER,
}
const secret = await jwtUtils.getJwtSecret()
return jwtUtils.sign({
payload: workerPrincipal,
key: secret,
expiresInSeconds: dayjs.duration(100, 'year').asSeconds(),
})
},
async verifyPrincipal(token: string): Promise<Principal> {
const secret = await jwtUtils.getJwtSecret()
try {
const decoded = await jwtUtils.decodeAndVerify<Principal>({
jwt: token,
key: secret,
})
assertNotNullOrUndefined(decoded.type, 'decoded.type')
await assertUserSession(decoded)
return decoded
}
catch (e) {
if (e instanceof ActivepiecesError) {
throw e
}
throw new ActivepiecesError({
code: ErrorCode.INVALID_BEARER_TOKEN,
params: {
message: 'invalid access token or session expired',
},
})
}
},
}
async function assertUserSession(decoded: Principal): Promise<void> {
if (decoded.type !== PrincipalType.USER) return
const user = await userService.getOneOrFail({ id: decoded.id })
const identity = await userIdentityService(system.globalLogger()).getOneOrFail({ id: user.identityId })
const isExpired = (identity.tokenVersion ?? null) !== (decoded.tokenVersion ?? null)
if (isExpired || user.status === UserStatus.INACTIVE || !identity.verified) {
throw new ActivepiecesError({
code: ErrorCode.SESSION_EXPIRED,
params: {
message: 'The session has expired or the user is not verified.',
},
})
}
}
type GenerateEngineTokenParams = {
projectId: ProjectId
jobId?: string
platformId: PlatformId
}

View File

@@ -0,0 +1,52 @@
import { AppSystemProp } from '@activepieces/server-shared'
import { assertNotNullOrUndefined } from '@activepieces/shared'
import bcrypt from 'bcrypt'
import { FirebaseScrypt } from 'firebase-scrypt'
import { system } from '../../helper/system/system'
const SALT_ROUNDS = 10
const SCRYPT_SEPARATOR = '~'
export const passwordHasher = {
hash: async (plainTextPassword: string): Promise<string> => {
return bcrypt.hash(plainTextPassword, SALT_ROUNDS)
},
compare: async (
plainTextPassword: string,
hashedPassword: string,
): Promise<boolean> => {
assertNotNullOrUndefined(plainTextPassword, 'plainTextPassword')
assertNotNullOrUndefined(hashedPassword, 'hashedPassword')
if (isBcryptHash(hashedPassword)) {
return bcrypt.compare(plainTextPassword, hashedPassword)
}
if (isScrypt(hashedPassword)) {
const salt = hashedPassword.split(SCRYPT_SEPARATOR)[1]
const rawHashedPassword = hashedPassword
.split(SCRYPT_SEPARATOR)[0]
.substring('$scrypt$'.length)
return compareScrypt(plainTextPassword, salt, rawHashedPassword)
}
return false
},
}
async function compareScrypt(
password: string,
salt: string,
hashedPassword: string,
): Promise<boolean> {
const firebaseParameter = JSON.parse(
system.getOrThrow(AppSystemProp.FIREBASE_HASH_PARAMETERS),
)
const firebaseScrypt = new FirebaseScrypt(firebaseParameter)
return firebaseScrypt.verify(password, salt, hashedPassword)
}
function isBcryptHash(hash: string): boolean {
return hash.startsWith('$2')
}
function isScrypt(hash: string): boolean {
return hash.startsWith('$scrypt$')
}

View File

@@ -0,0 +1,54 @@
import { UserIdentity } from '@activepieces/shared'
import { EntitySchema } from 'typeorm'
import { BaseColumnSchemaPart } from '../../database/database-common'
export const UserIdentityEntity = new EntitySchema<UserIdentity>({
name: 'user_identity',
columns: {
...BaseColumnSchemaPart,
email: {
type: String,
nullable: false,
unique: true,
},
password: {
type: String,
},
trackEvents: {
type: Boolean,
nullable: true,
},
newsLetter: {
type: Boolean,
nullable: true,
},
verified: {
type: Boolean,
nullable: false,
default: false,
},
firstName: {
type: String,
nullable: false,
},
lastName: {
type: String,
nullable: false,
},
tokenVersion: {
type: String,
nullable: true,
},
provider: {
type: String,
nullable: false,
},
},
indices: [
{
name: 'idx_user_identity_email',
columns: ['email'],
unique: true,
},
],
})

View File

@@ -0,0 +1,131 @@
import { ActivepiecesError, apId, ErrorCode, isNil, UserIdentity } from '@activepieces/shared'
import { FastifyBaseLogger } from 'fastify'
import { nanoid } from 'nanoid'
import { repoFactory } from '../../core/db/repo-factory'
import { passwordHasher } from '../lib/password-hasher'
import { UserIdentityEntity } from './user-identity-entity'
export const userIdentityRepository = repoFactory(UserIdentityEntity)
export const userIdentityService = (log: FastifyBaseLogger) => ({
async create(params: Pick<UserIdentity, 'email' | 'password' | 'firstName' | 'lastName' | 'trackEvents' | 'newsLetter' | 'provider' | 'verified'>): Promise<UserIdentity> {
log.info({
email: params.email,
}, 'Creating user identity')
const cleanedEmail = params.email.toLowerCase().trim()
const hashedPassword = await passwordHasher.hash(params.password)
const userByEmail = await userIdentityRepository().findOne({ where: { email: cleanedEmail } })
if (userByEmail) {
throw new ActivepiecesError({
code: ErrorCode.EXISTING_USER,
params: {
email: cleanedEmail,
platformId: null,
},
})
}
const newUserIdentity: UserIdentity = {
firstName: params.firstName,
lastName: params.lastName,
provider: params.provider,
email: cleanedEmail,
created: new Date().toISOString(),
updated: new Date().toISOString(),
verified: params.verified,
id: apId(),
password: hashedPassword,
trackEvents: params.trackEvents,
newsLetter: params.newsLetter,
tokenVersion: nanoid(),
}
const identity = await userIdentityRepository().save(newUserIdentity)
return identity
},
async verifyIdentityPassword(params: VerifyIdentityPasswordParams): Promise<UserIdentity> {
const userIdentity = await getIdentityByEmail(params.email)
if (isNil(userIdentity)) {
throw new ActivepiecesError({
code: ErrorCode.INVALID_CREDENTIALS,
params: null,
})
}
if (!userIdentity.verified) {
throw new ActivepiecesError({
code: ErrorCode.EMAIL_IS_NOT_VERIFIED,
params: {
email: userIdentity.email,
},
})
}
const passwordMatches = await passwordHasher.compare(params.password, userIdentity.password)
if (!passwordMatches) {
throw new ActivepiecesError({
code: ErrorCode.INVALID_CREDENTIALS,
params: null,
})
}
return userIdentity
},
async getIdentityByEmail(email: string): Promise<UserIdentity | null> {
const cleanedEmail = email.toLowerCase().trim()
return userIdentityRepository().findOneBy({ email: cleanedEmail })
},
async getOneOrFail(params: GetOneOrFailParams): Promise<UserIdentity> {
const userIdentity = await userIdentityRepository().findOneByOrFail({ id: params.id })
return userIdentity
},
async getBasicInformation(id: string): Promise<Pick<UserIdentity, 'email' | 'firstName' | 'lastName' | 'trackEvents' | 'newsLetter'>> {
const user = await userIdentityRepository().findOneByOrFail({ id })
return {
email: user.email,
firstName: user.firstName,
lastName: user.lastName,
trackEvents: user.trackEvents,
newsLetter: user.newsLetter,
}
},
async updatePassword(params: UpdatePasswordParams): Promise<void> {
const hashedPassword = await passwordHasher.hash(params.newPassword)
await userIdentityRepository().update(params.id, {
password: hashedPassword,
tokenVersion: nanoid(),
})
},
async verify(id: string): Promise<UserIdentity> {
const user = await userIdentityRepository().findOneByOrFail({ id })
if (user.verified) {
throw new ActivepiecesError({
code: ErrorCode.AUTHORIZATION,
params: {
message: 'User is already verified',
},
})
}
return userIdentityRepository().save({
...user,
verified: true,
})
},
})
async function getIdentityByEmail(email: string): Promise<UserIdentity | null> {
const cleanedEmail = email.toLowerCase().trim()
return userIdentityRepository().findOneBy({ email: cleanedEmail })
}
type GetOneOrFailParams = {
id: string
}
type UpdatePasswordParams = {
id: string
newPassword: string
}
type VerifyIdentityPasswordParams = {
email: string
password: string
}

View File

@@ -0,0 +1,40 @@
import {
EntityManager,
EntitySchema,
ObjectLiteral,
Repository,
} from 'typeorm'
import { databaseConnection } from '../../database/database-connection'
/**
* If given an {@link EntityManager}, returns a {@link Repository} for the current transaction.
* Otherwise, returns the {@link Repository} for the default connection.
*/
type RepoGetter<T extends ObjectLiteral = ObjectLiteral> = (
entityManager?: EntityManager
) => Repository<T>
const instances = new Map<EntitySchema, RepoGetter>()
/**
* Creates a {@link RepoGetter} for the given entity.
* @param entity The entity to create a {@link RepoGetter} for.
* @returns A {@link RepoGetter} for the given entity.
*/
export const repoFactory = <T extends ObjectLiteral>(
entity: EntitySchema<T>,
): RepoGetter<T> => {
if (instances.has(entity)) {
return instances.get(entity) as RepoGetter<T>
}
const newInstance: RepoGetter<T> = (entityManager?: EntityManager) => {
return (
entityManager?.getRepository(entity) ??
databaseConnection().getRepository(entity)
)
}
instances.set(entity, newInstance as RepoGetter)
return newInstance
}

View File

@@ -0,0 +1,8 @@
import { EntityManager } from 'typeorm'
import { databaseConnection } from '../../database/database-connection'
export const transaction = async <T>(
operation: (entityManager: EntityManager) => Promise<T>,
): Promise<T> => {
return databaseConnection().transaction(operation)
}

View File

@@ -0,0 +1,15 @@
import { isObject } from '@activepieces/shared'
import { FastifyRequest } from 'fastify'
export const requestUtils = {
extractProjectId(request: FastifyRequest): string | undefined {
if (isObject(request.body) && 'projectId' in request.body) {
return request.body.projectId as string
}
else if (isObject(request.query) && 'projectId' in request.query) {
return request.query.projectId as string
}
return undefined
},
}

View File

@@ -0,0 +1,44 @@
import { ActivepiecesError, ErrorCode, isNil, PrincipalType } from '@activepieces/shared'
import { FastifyRequest } from 'fastify'
import { accessTokenManager } from '../../../authentication/lib/access-token-manager'
import { userService } from '../../../user/user-service'
import { BaseSecurityHandler } from '../security-handler'
export class AccessTokenAuthnHandler extends BaseSecurityHandler {
private static readonly HEADER_NAME = 'authorization'
private static readonly HEADER_PREFIX = 'Bearer '
protected canHandle(request: FastifyRequest): Promise<boolean> {
const header = request.headers[AccessTokenAuthnHandler.HEADER_NAME]
const prefix = AccessTokenAuthnHandler.HEADER_PREFIX
const routeMatches = header?.startsWith(prefix) ?? false
const skipAuth = request.routeOptions.config?.skipAuth ?? false
return Promise.resolve(routeMatches && !skipAuth)
}
protected async doHandle(request: FastifyRequest): Promise<void> {
const accessToken = this.extractAccessTokenOrThrow(request)
const principal = await accessTokenManager.verifyPrincipal(accessToken)
if (principal.type === PrincipalType.USER) {
await userService.updateLastActiveDate({ id: principal.id })
}
request.principal = principal
}
private extractAccessTokenOrThrow(request: FastifyRequest): string {
const header = request.headers[AccessTokenAuthnHandler.HEADER_NAME]
const prefix = AccessTokenAuthnHandler.HEADER_PREFIX
const accessToken = header?.substring(prefix.length)
if (isNil(accessToken)) {
throw new ActivepiecesError({
code: ErrorCode.AUTHENTICATION,
params: {
message: 'missing access token',
},
})
}
return accessToken
}
}

View File

@@ -0,0 +1,27 @@
import {
apId,
isNil,
Principal,
PrincipalType,
} from '@activepieces/shared'
import { FastifyRequest } from 'fastify'
import { BaseSecurityHandler } from '../security-handler'
export class AnonymousAuthnHandler extends BaseSecurityHandler {
protected canHandle(_request: FastifyRequest): Promise<boolean> {
return Promise.resolve(true)
}
protected doHandle(request: FastifyRequest): Promise<void> {
const principal = request.principal as Principal | undefined
if (isNil(principal)) {
request.principal = {
id: `ANONYMOUS_${apId()}`,
type: PrincipalType.UNKNOWN,
}
}
return Promise.resolve()
}
}

View File

@@ -0,0 +1,222 @@
import { ApiKey } from '@activepieces/ee-shared'
import {
ActivepiecesError,
assertNotNullOrUndefined,
EndpointScope,
ErrorCode,
isNil,
isObject,
Principal,
PrincipalType,
Project,
ProjectId,
} from '@activepieces/shared'
import { FastifyRequest } from 'fastify'
import { nanoid } from 'nanoid'
import { AppConnectionEntity } from '../../../app-connection/app-connection.entity'
import { extractResourceName } from '../../../authentication/authorization'
import { databaseConnection } from '../../../database/database-connection'
import { apiKeyService } from '../../../ee/api-keys/api-key-service'
import { ProjectMemberEntity } from '../../../ee/projects/project-members/project-member.entity'
import { FlowEntity } from '../../../flows/flow/flow.entity'
import { FlowRunEntity } from '../../../flows/flow-run/flow-run-entity'
import { FolderEntity } from '../../../flows/folder/folder.entity'
import { projectService } from '../../../project/project-service'
import { requestUtils } from '../../request/request-utils'
import { BaseSecurityHandler } from '../security-handler'
export class PlatformApiKeyAuthnHandler extends BaseSecurityHandler {
private static readonly HEADER_NAME = 'authorization'
private static readonly HEADER_PREFIX = 'Bearer '
private static readonly API_KEY_PREFIX = 'sk-'
protected canHandle(request: FastifyRequest): Promise<boolean> {
const prefix = `${PlatformApiKeyAuthnHandler.HEADER_PREFIX}${PlatformApiKeyAuthnHandler.API_KEY_PREFIX}`
const routeMatches = request.headers[PlatformApiKeyAuthnHandler.HEADER_NAME]?.startsWith(prefix) ?? false
const skipAuth = request.routeOptions.config?.skipAuth ?? false
return Promise.resolve(routeMatches && !skipAuth)
}
protected async doHandle(request: FastifyRequest): Promise<void> {
const apiKeyValue = this.extractApiKeyValue(request)
let apiKey: ApiKey | null = null
try {
apiKey = await apiKeyService.getByValueOrThrow(apiKeyValue)
}
catch (e) {
throw new ActivepiecesError({
code: ErrorCode.AUTHENTICATION,
params: {
message: 'invalid api key',
},
})
}
const principal = await this.createPrincipal(request, apiKey)
request.principal = principal
}
private extractApiKeyValue(request: FastifyRequest): string {
const header = request.headers[PlatformApiKeyAuthnHandler.HEADER_NAME]
const prefix = PlatformApiKeyAuthnHandler.HEADER_PREFIX
const apiKeyValue = header?.substring(prefix.length)
if (isNil(apiKeyValue)) {
throw new ActivepiecesError({
code: ErrorCode.AUTHENTICATION,
params: {
message: 'missing api key',
},
})
}
return apiKeyValue
}
private async createPrincipal(
request: FastifyRequest,
apiKey: ApiKey,
): Promise<Principal> {
const principal: Principal = {
id: apiKey.id,
type: PrincipalType.SERVICE,
projectId: 'ANONYMOUS_' + nanoid(),
platform: {
id: apiKey.platformId,
},
}
if (request.routeOptions.config?.scope === EndpointScope.PLATFORM) {
return principal
}
const projectId = await this.extractProjectIdOrThrow(request)
try {
const project = await projectService.getOneOrThrow(projectId)
this.assertApiKeyAndProjectBelongToSamePlatform(project, apiKey)
principal.projectId = projectId
return principal
}
catch (e) {
throw new ActivepiecesError({
code: ErrorCode.AUTHORIZATION,
params: {
message: 'invalid api key',
},
})
}
}
private async extractProjectIdOrThrow(
request: FastifyRequest,
): Promise<ProjectId> {
const projectIdFromRequest = requestUtils.extractProjectId(request)
const routerPath = request.routeOptions.url
assertNotNullOrUndefined(routerPath, 'routerPath is undefined' )
const hasIdParam = routerPath.includes(':id') &&
isObject(request.params) &&
'id' in request.params &&
typeof request.params.id === 'string'
if (hasIdParam) {
const projectIdFromResource = await this.extractProjectIdFromResource(request)
if (!isNil(projectIdFromResource)) {
return projectIdFromResource
}
const resourceName = extractResourceName(routerPath)
const resourceId = (request.params as { id: string }).id
throw new ActivepiecesError({
code: ErrorCode.ENTITY_NOT_FOUND,
params: {
message: `${resourceId} not found`,
entityType: resourceName,
entityId: resourceId,
},
})
}
if (isNil(projectIdFromRequest)) {
throw new ActivepiecesError({
code: ErrorCode.VALIDATION,
params: {
message: 'missing project id in request',
},
})
}
return projectIdFromRequest
}
private async extractProjectIdFromResource(
request: FastifyRequest,
): Promise<string | undefined> {
const routerPath = request.routeOptions.url
assertNotNullOrUndefined(routerPath, 'routerPath is undefined' )
const oneResourceRoute =
routerPath.includes(':id') &&
isObject(request.params) &&
'id' in request.params &&
typeof request.params.id === 'string'
if (!oneResourceRoute) {
return undefined
}
const resourceName = extractResourceName(routerPath)
const { id } = request.params as { id: string }
return this.getProjectIdFromResource(resourceName, id)
}
private async getProjectIdFromResource(
resource: string | undefined,
id: string,
): Promise<string | undefined> {
const tableName = this.getTableNameFromResource(resource)
if (isNil(tableName)) {
return undefined
}
const entity = await databaseConnection().getRepository(tableName).findOneBy({
id,
})
return entity?.projectId
}
private getTableNameFromResource(
resource: string | undefined,
): string | undefined {
if (isNil(resource)) {
return undefined
}
switch (resource) {
case 'flow-runs':
return FlowRunEntity.options.name
case 'flows':
return FlowEntity.options.name
case 'app-connections':
return AppConnectionEntity.options.name
case 'project-members':
return ProjectMemberEntity.options.name
case 'folders':
return FolderEntity.options.name
}
return undefined
}
private assertApiKeyAndProjectBelongToSamePlatform(
project: Project,
apiKey: ApiKey,
): void {
if (project.platformId !== apiKey.platformId) {
throw new ActivepiecesError({
code: ErrorCode.AUTHORIZATION,
params: {
message: 'invalid project id and platform id',
},
})
}
}
}

View File

@@ -0,0 +1,42 @@
import {
ActivepiecesError,
assertNotNullOrUndefined,
ErrorCode,
} from '@activepieces/shared'
import { FastifyRequest } from 'fastify'
import { BaseSecurityHandler } from '../security-handler'
export class PrincipalTypeAuthzHandler extends BaseSecurityHandler {
private static readonly IGNORED_ROUTES = [
'/favicon.ico',
'/v1/docs',
'/redirect',
]
protected canHandle(request: FastifyRequest): Promise<boolean> {
const routerPath = request.routeOptions.url
assertNotNullOrUndefined(routerPath, 'routerPath is undefined' )
const requestMatches =
!PrincipalTypeAuthzHandler.IGNORED_ROUTES.includes(routerPath) &&
!routerPath.startsWith('/ui')
return Promise.resolve(requestMatches)
}
protected doHandle(request: FastifyRequest): Promise<void> {
const principalType = request.principal.type
const configuredPrincipals = request.routeOptions.config?.allowedPrincipals
assertNotNullOrUndefined(configuredPrincipals, 'configuredPrincipals is undefined')
const principalTypeNotAllowed = !configuredPrincipals.includes(principalType)
if (principalTypeNotAllowed) {
throw new ActivepiecesError({
code: ErrorCode.AUTHORIZATION,
params: {
message: 'invalid route for principal type',
},
})
}
return Promise.resolve()
}
}

View File

@@ -0,0 +1,55 @@
import {
ActivepiecesError,
assertNotNullOrUndefined,
ErrorCode,
PrincipalType,
} from '@activepieces/shared'
import { FastifyRequest } from 'fastify'
import { requestUtils } from '../../request/request-utils'
import { BaseSecurityHandler } from '../security-handler'
export class ProjectAuthzHandler extends BaseSecurityHandler {
private static readonly IGNORED_ROUTES = [
'/v1/admin/pieces',
'/v1/admin/platforms',
'/v1/app-credentials',
'/v1/authentication/switch-project',
'/v1/authentication/switch-platform',
'/v1/webhooks',
'/v1/webhooks/:flowId',
'/v1/webhooks/:flowId/test',
'/v1/webhooks/:flowId/sync',
// This works for both platform and project, we have to check this manually
'/v1/user-invitations',
'/v1/audit-events',
]
protected canHandle(request: FastifyRequest): Promise<boolean> {
const routerPath = request.routeOptions.url
assertNotNullOrUndefined(routerPath, 'routerPath is undefined')
const requestMatches = !ProjectAuthzHandler.IGNORED_ROUTES.includes(
routerPath,
)
return Promise.resolve(requestMatches)
}
protected doHandle(request: FastifyRequest): Promise<void> {
const principal = request.principal
if (principal.type === PrincipalType.WORKER || principal.type === PrincipalType.UNKNOWN) {
return Promise.resolve()
}
const projectId = requestUtils.extractProjectId(request)
if (projectId && projectId !== principal.projectId) {
throw new ActivepiecesError({
code: ErrorCode.AUTHORIZATION,
params: {
message: 'invalid project id',
},
})
}
return Promise.resolve()
}
}

View File

@@ -0,0 +1,22 @@
import { AppSystemProp, networkUtils } from '@activepieces/server-shared'
import RateLimitPlugin from '@fastify/rate-limit'
import { FastifyPluginAsyncTypebox } from '@fastify/type-provider-typebox'
import FastifyPlugin from 'fastify-plugin'
import { redisConnections } from '../../database/redis-connections'
import { system } from '../../helper/system/system'
const API_RATE_LIMIT_AUTHN_ENABLED = system.getBoolean(
AppSystemProp.API_RATE_LIMIT_AUTHN_ENABLED,
)
export const rateLimitModule: FastifyPluginAsyncTypebox = FastifyPlugin(
async (app) => {
if (API_RATE_LIMIT_AUTHN_ENABLED) {
await app.register(RateLimitPlugin, {
global: false,
keyGenerator: (req) => networkUtils.extractClientRealIp(req, system.get(AppSystemProp.CLIENT_REAL_IP_HEADER)),
redis: await redisConnections.create(),
})
}
},
)

View File

@@ -0,0 +1,50 @@
import { Principal } from '@activepieces/shared'
import { FastifyRequest } from 'fastify'
import { AccessTokenAuthnHandler } from './authn/access-token-authn-handler'
import { AnonymousAuthnHandler } from './authn/anonymous-authn-handler'
import { PlatformApiKeyAuthnHandler } from './authn/platform-api-key-authn-handler'
import { PrincipalTypeAuthzHandler } from './authz/principal-type-authz-handler'
import { ProjectAuthzHandler } from './authz/project-authz-handler'
const AUTHN_HANDLERS = [
new PlatformApiKeyAuthnHandler(),
new AccessTokenAuthnHandler(),
new AnonymousAuthnHandler(),
]
const AUTHZ_HANDLERS = [
new PrincipalTypeAuthzHandler(),
new ProjectAuthzHandler(),
]
export const securityHandlerChain = async (
request: FastifyRequest,
): Promise<void> => {
await executeAuthnHandlers(request)
await executeAuthzHandlers(request)
}
/**
* Executes authn handlers in order, if one of the handlers populates the principal,
* the remaining handlers are skipped.
*/
const executeAuthnHandlers = async (request: FastifyRequest): Promise<void> => {
for (const handler of AUTHN_HANDLERS) {
await handler.handle(request)
const principalPopulated = checkWhetherPrincipalIsPopulated(request)
if (principalPopulated) {
return
}
}
}
const executeAuthzHandlers = async (request: FastifyRequest): Promise<void> => {
for (const handler of AUTHZ_HANDLERS) {
await handler.handle(request)
}
}
const checkWhetherPrincipalIsPopulated = (request: FastifyRequest): boolean => {
const principal = request.principal as Principal | undefined
return principal !== undefined
}

View File

@@ -0,0 +1,16 @@
import { FastifyRequest } from 'fastify'
export type SecurityHandler = {
handle(request: FastifyRequest): Promise<void>
}
export abstract class BaseSecurityHandler implements SecurityHandler {
async handle(request: FastifyRequest): Promise<void> {
if (await this.canHandle(request)) {
await this.doHandle(request)
}
}
protected abstract canHandle(request: FastifyRequest): Promise<boolean>
protected abstract doHandle(request: FastifyRequest): Promise<void>
}

View File

@@ -0,0 +1,90 @@
import { rejectedPromiseHandler } from '@activepieces/server-shared'
import { ActivepiecesError, ErrorCode, Principal, PrincipalForType, PrincipalType, WebsocketServerEvent } from '@activepieces/shared'
import { FastifyBaseLogger } from 'fastify'
import { Socket } from 'socket.io'
import { accessTokenManager } from '../authentication/lib/access-token-manager'
import { app } from '../server'
export type WebsocketListener<T, PR extends PrincipalType.USER | PrincipalType.WORKER>
= (socket: Socket) => (data: T, principal: PrincipalForType<PR>, callback?: (data: unknown) => void) => Promise<void>
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type ListenerMap<PR extends PrincipalType.USER | PrincipalType.WORKER> = Partial<Record<WebsocketServerEvent, WebsocketListener<any, PR>>>
const listener = {
[PrincipalType.USER]: {} as ListenerMap<PrincipalType.USER >,
[PrincipalType.WORKER]: {} as ListenerMap<PrincipalType.WORKER>,
}
export const websocketService = {
to: (workerId: string) => app!.io.to(workerId),
async init(socket: Socket, log: FastifyBaseLogger): Promise<void> {
const principal = await websocketService.verifyPrincipal(socket)
const type = principal.type
if (![PrincipalType.USER, PrincipalType.WORKER].includes(type)) {
return
}
const castedType = type as keyof typeof listener
switch (type) {
case PrincipalType.USER: {
log.info({
message: 'User connected',
userId: principal.id,
projectId: principal.projectId,
})
await socket.join(principal.projectId)
break
}
case PrincipalType.WORKER: {
const workerId = socket.handshake.auth.workerId
log.info({
message: 'Worker connected',
workerId,
})
await socket.join(workerId)
break
}
default: {
throw new ActivepiecesError({
code: ErrorCode.AUTHENTICATION,
params: {
message: 'Invalid principal type',
},
})
}
}
for (const [event, handler] of Object.entries(listener[castedType])) {
socket.on(event, async (data, callback) => rejectedPromiseHandler(handler(socket)(data, principal, callback), log))
}
for (const handler of Object.values(listener[castedType][WebsocketServerEvent.CONNECT] ?? {})) {
handler(socket)
}
},
async onDisconnect(socket: Socket): Promise<void> {
const principal = await websocketService.verifyPrincipal(socket)
const castedType = principal.type as keyof typeof listener
for (const handler of Object.values(listener[castedType][WebsocketServerEvent.DISCONNECT] ?? {})) {
handler(socket)
}
},
async verifyPrincipal(socket: Socket): Promise<Principal> {
return accessTokenManager.verifyPrincipal(socket.handshake.auth.token)
},
addListener<T, PR extends PrincipalType.WORKER | PrincipalType.USER>(principalType: PR, event: WebsocketServerEvent, handler: WebsocketListener<T, PR>): void {
switch (principalType) {
case PrincipalType.USER: {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
listener[PrincipalType.USER][event] = handler as unknown as WebsocketListener<any, PrincipalType.USER>
break
}
case PrincipalType.WORKER: {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
listener[PrincipalType.WORKER][event] = handler as unknown as WebsocketListener<any, PrincipalType.WORKER>
break
}
}
},
emitWithAck<T = unknown>(event: WebsocketServerEvent, workerId: string, data?: unknown): Promise<T> {
return app!.io.to([workerId]).timeout(4000).emitWithAck(event, data)
},
}

View File

@@ -0,0 +1,34 @@
import { AppSystemProp, DatabaseType } from '@activepieces/server-shared'
import { ApEdition } from '@activepieces/shared'
import { EntitySchemaColumnOptions } from 'typeorm'
import { system } from '../helper/system/system'
const databaseType = system.get(AppSystemProp.DB_TYPE)
export const COLLATION = databaseType === DatabaseType.PGLITE ? undefined : 'en_natural'
export const ApIdSchema = {
type: String,
length: 21,
} as EntitySchemaColumnOptions
export const BaseColumnSchemaPart = {
id: {
...ApIdSchema,
primary: true,
} as EntitySchemaColumnOptions,
created: {
name: 'created',
type: 'timestamp with time zone',
createDate: true,
} as EntitySchemaColumnOptions,
updated: {
name: 'updated',
type: 'timestamp with time zone',
updateDate: true,
} as EntitySchemaColumnOptions,
}
export function isNotOneOfTheseEditions(editions: ApEdition[]): boolean {
return !editions.includes(system.getEdition())
}

View File

@@ -0,0 +1,134 @@
import { AppSystemProp, DatabaseType } from '@activepieces/server-shared'
import { isNil } from '@activepieces/shared'
import {
DataSource,
EntitySchema,
} from 'typeorm'
import { AIProviderEntity } from '../ai/ai-provider-entity'
import { PlatformAnalyticsReportEntity } from '../analytics/platform-analytics-report.entity'
import { AppConnectionEntity } from '../app-connection/app-connection.entity'
import { UserIdentityEntity } from '../authentication/user-identity/user-identity-entity'
import { AlertEntity } from '../ee/alerts/alerts-entity'
import { ApiKeyEntity } from '../ee/api-keys/api-key-entity'
import { AppCredentialEntity } from '../ee/app-credentials/app-credentials.entity'
import { AppSumoEntity } from '../ee/appsumo/appsumo.entity'
import { AuditEventEntity } from '../ee/audit-logs/audit-event-entity'
import { OtpEntity } from '../ee/authentication/otp/otp-entity'
import { ConnectionKeyEntity } from '../ee/connection-keys/connection-key.entity'
import { CustomDomainEntity } from '../ee/custom-domains/custom-domain.entity'
import { OAuthAppEntity } from '../ee/oauth-apps/oauth-app.entity'
import { PlatformPlanEntity } from '../ee/platform/platform-plan/platform-plan.entity'
import { ProjectMemberEntity } from '../ee/projects/project-members/project-member.entity'
import { ProjectPlanEntity } from '../ee/projects/project-plan/project-plan.entity'
import { GitRepoEntity } from '../ee/projects/project-release/git-sync/git-sync.entity'
import { ProjectReleaseEntity } from '../ee/projects/project-release/project-release.entity'
import { ProjectRoleEntity } from '../ee/projects/project-role/project-role.entity'
import { SigningKeyEntity } from '../ee/signing-key/signing-key-entity'
import { FileEntity } from '../file/file.entity'
import { FlagEntity } from '../flags/flag.entity'
import { FlowEntity } from '../flows/flow/flow.entity'
import { FlowRunEntity } from '../flows/flow-run/flow-run-entity'
import { FlowVersionEntity } from '../flows/flow-version/flow-version-entity'
import { FolderEntity } from '../flows/folder/folder.entity'
import { system } from '../helper/system/system'
import { McpServerEntity } from '../mcp/mcp-entity'
import { PieceMetadataEntity } from '../pieces/metadata/piece-metadata-entity'
import { PieceTagEntity } from '../pieces/tags/pieces/piece-tag.entity'
import { TagEntity } from '../pieces/tags/tag-entity'
import { PlatformEntity } from '../platform/platform.entity'
import { ProjectEntity } from '../project/project-entity'
import { StoreEntryEntity } from '../store-entry/store-entry-entity'
import { FieldEntity } from '../tables/field/field.entity'
import { CellEntity } from '../tables/record/cell.entity'
import { RecordEntity } from '../tables/record/record.entity'
import { TableWebhookEntity } from '../tables/table/table-webhook.entity'
import { TableEntity } from '../tables/table/table.entity'
import { TemplateEntity } from '../template/template.entity'
import { TodoActivityEntity } from '../todos/activity/todos-activity.entity'
import { TodoEntity } from '../todos/todo.entity'
import { AppEventRoutingEntity } from '../trigger/app-event-routing/app-event-routing.entity'
import { TriggerEventEntity } from '../trigger/trigger-events/trigger-event.entity'
import { TriggerSourceEntity } from '../trigger/trigger-source/trigger-source-entity'
import { UserEntity } from '../user/user-entity'
import { UserInvitationEntity } from '../user-invitations/user-invitation.entity'
import { createPGliteDataSource } from './pglite-connection'
import { createPostgresDataSource } from './postgres-connection'
const databaseType = system.get(AppSystemProp.DB_TYPE)
function getEntities(): EntitySchema<unknown>[] {
return [
TriggerEventEntity,
AppEventRoutingEntity,
FileEntity,
FlagEntity,
FlowEntity,
FlowVersionEntity,
FlowRunEntity,
ProjectEntity,
StoreEntryEntity,
UserEntity,
AppConnectionEntity,
FolderEntity,
PieceMetadataEntity,
PlatformEntity,
TagEntity,
PieceTagEntity,
AlertEntity,
UserInvitationEntity,
AIProviderEntity,
ProjectRoleEntity,
TableEntity,
FieldEntity,
RecordEntity,
CellEntity,
TableWebhookEntity,
UserIdentityEntity,
TodoEntity,
McpServerEntity,
TodoActivityEntity,
TriggerSourceEntity,
// Enterprise
ProjectMemberEntity,
ProjectPlanEntity,
CustomDomainEntity,
SigningKeyEntity,
OAuthAppEntity,
OtpEntity,
ApiKeyEntity,
TemplateEntity,
GitRepoEntity,
AuditEventEntity,
ProjectReleaseEntity,
PlatformAnalyticsReportEntity,
// CLOUD
AppSumoEntity,
ConnectionKeyEntity,
AppCredentialEntity,
PlatformPlanEntity,
]
}
export const commonProperties = {
subscribers: [],
entities: getEntities(),
}
let _databaseConnection: DataSource | null = null
const createDataSource = (): DataSource => {
switch (databaseType) {
case DatabaseType.PGLITE:
return createPGliteDataSource()
case DatabaseType.POSTGRES:
default:
return createPostgresDataSource()
}
}
export const databaseConnection = (): DataSource => {
if (isNil(_databaseConnection)) {
_databaseConnection = createDataSource()
}
return _databaseConnection
}

View File

@@ -0,0 +1,10 @@
import { databaseConnection } from './database-connection'
import { databaseSeeds } from './seeds'
export async function initializeDatabase({ runMigrations }: { runMigrations: boolean }): Promise<void> {
await databaseConnection().initialize()
if (runMigrations) {
await databaseConnection().runMigrations()
}
await databaseSeeds.run()
}

View File

@@ -0,0 +1,14 @@
/**
* This file exists solely for TypeORM CLI migration generation.
* It exports a DataSource instance that the CLI can use.
*
* Usage: nx db-migration server-api --name=<MIGRATION_NAME>
*
* DO NOT import this file in application code - use databaseConnection() instead.
* Importing this file triggers immediate DataSource initialization at module load time,
* bypassing the lazy initialization that the app relies on for proper startup sequencing.
*/
import { databaseConnection } from './database-connection'
export default databaseConnection()

View File

@@ -0,0 +1,161 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class AddPieceTypeAndPackageTypeToFlowVersion1696245170061
implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
// Execute raw SQL query to fetch IDs of FlowVersion records
const flowVersionIds = await queryRunner.query(
'SELECT id FROM flow_version',
)
log.info(
'AddPieceTypeAndPackageTypeToFlowVersion1696245170061: found ' +
flowVersionIds.length +
' versions',
)
let updatedFlows = 0
for (const { id } of flowVersionIds) {
// Fetch FlowVersion record by ID
const flowVersion = await queryRunner.query(
'SELECT * FROM flow_version WHERE id = $1',
[id],
)
if (flowVersion.length > 0) {
const updated = traverseAndUpdateSubFlow(
addPackageTypeAndPieceTypeToPieceStepSettings,
flowVersion[0].trigger,
)
if (updated) {
await queryRunner.query(
'UPDATE flow_version SET trigger = $1 WHERE id = $2',
[flowVersion[0].trigger, flowVersion[0].id],
)
}
}
updatedFlows++
if (updatedFlows % 100 === 0) {
log.info(
'AddPieceTypeAndPackageTypeToFlowVersion1696245170061: ' +
updatedFlows +
' flows updated',
)
}
}
log.info('AddPieceTypeAndPackageTypeToFlowVersion1696245170061: up')
}
public async down(queryRunner: QueryRunner): Promise<void> {
// Execute raw SQL query to fetch IDs of FlowVersion records
const flowVersionIds = await queryRunner.query(
'SELECT id FROM flow_version',
)
for (const { id } of flowVersionIds) {
// Fetch FlowVersion record by ID
const flowVersion = await queryRunner.query(
'SELECT * FROM flow_version WHERE id = $1',
[id],
)
if (flowVersion.length > 0) {
const updated = traverseAndUpdateSubFlow(
removePackageTypeAndPieceTypeFromPieceStepSettings,
flowVersion[0].trigger,
)
if (updated) {
await queryRunner.query(
'UPDATE flow_version SET trigger = $1 WHERE id = $2',
[flowVersion[0].trigger, flowVersion[0].id],
)
}
}
}
log.info('AddPieceTypeAndPackageTypeToFlowVersion1696245170061: down')
}
}
const traverseAndUpdateSubFlow = (
updater: (s: PieceStep) => void,
root?: Step,
): boolean => {
if (!root) {
return false
}
let updated = false
switch (root.type) {
case 'BRANCH':
updated =
traverseAndUpdateSubFlow(updater, root.onSuccessAction) || updated
updated =
traverseAndUpdateSubFlow(updater, root.onFailureAction) || updated
break
case 'LOOP_ON_ITEMS':
updated =
traverseAndUpdateSubFlow(updater, root.firstLoopAction) || updated
break
case 'PIECE':
case 'PIECE_TRIGGER':
updater(root)
updated = true
break
default:
break
}
updated = traverseAndUpdateSubFlow(updater, root.nextAction) || updated
return updated
}
const addPackageTypeAndPieceTypeToPieceStepSettings = (
pieceStep: PieceStep,
): void => {
pieceStep.settings.packageType = 'REGISTRY'
pieceStep.settings.pieceType = 'OFFICIAL'
}
const removePackageTypeAndPieceTypeFromPieceStepSettings = (
pieceStep: PieceStep,
): void => {
delete pieceStep.settings.packageType
delete pieceStep.settings.pieceType
}
type StepType =
| 'BRANCH'
| 'CODE'
| 'EMPTY'
| 'LOOP_ON_ITEMS'
| 'MISSING'
| 'PIECE'
| 'PIECE_TRIGGER'
| 'WEBHOOK'
type BaseStep<T extends StepType> = {
type: T
nextAction?: Step
}
type BranchStep = BaseStep<'BRANCH'> & {
onFailureAction?: Step
onSuccessAction?: Step
}
type LoopOnItemsStep = BaseStep<'LOOP_ON_ITEMS'> & {
firstLoopAction?: Step
}
type PieceStep = BaseStep<'PIECE' | 'PIECE_TRIGGER'> & {
settings: {
packageType?: 'REGISTRY' | 'ARCHIVE'
pieceType?: 'OFFICIAL' | 'CUSTOM'
}
}
type GenericStep = BaseStep<'CODE' | 'EMPTY' | 'MISSING' | 'WEBHOOK'>
type Step = BranchStep | LoopOnItemsStep | GenericStep | PieceStep

View File

@@ -0,0 +1,134 @@
import { ApEdition } from '@activepieces/shared'
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
import { isNotOneOfTheseEditions } from '../../database-common'
const log = system.globalLogger()
export class AddPieceTypeAndPackageTypeToFlowTemplate1696245170062
implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
if (isNotOneOfTheseEditions([ApEdition.CLOUD])) {
return
}
const connection = queryRunner.connection
const templates = await connection.query('SELECT * FROM flow_template')
for (const template of templates) {
const updated = traverseAndUpdateSubFlow(
addPackageTypeAndPieceTypeToPieceStepSettings,
template.template.trigger,
)
if (updated) {
await connection.query(
'UPDATE flow_template SET template = $1 WHERE id = $2',
[template.template, template.id],
)
}
}
log.info('AddPieceTypeAndPackageTypeToFlowTemplate1696245170062: up')
}
public async down(queryRunner: QueryRunner): Promise<void> {
if (isNotOneOfTheseEditions([ApEdition.CLOUD])) {
return
}
const connection = queryRunner.connection
const templates = await connection.query('SELECT * FROM flow_template')
for (const template of templates) {
const updated = traverseAndUpdateSubFlow(
removePackageTypeAndPieceTypeFromPieceStepSettings,
template.template.trigger,
)
if (updated) {
await connection.query(
'UPDATE flow_template SET template = $1 WHERE id = $2',
[template.template, template.id],
)
}
}
log.info('AddPieceTypeAndPackageTypeToFlowTemplate1696245170062: down')
}
}
const traverseAndUpdateSubFlow = (
updater: (s: PieceStep) => void,
root?: Step,
): boolean => {
if (!root) {
return false
}
let updated = false
switch (root.type) {
case 'BRANCH':
updated =
traverseAndUpdateSubFlow(updater, root.onSuccessAction) || updated
updated =
traverseAndUpdateSubFlow(updater, root.onFailureAction) || updated
break
case 'LOOP_ON_ITEMS':
updated =
traverseAndUpdateSubFlow(updater, root.firstLoopAction) || updated
break
case 'PIECE':
case 'PIECE_TRIGGER':
updater(root)
updated = true
break
default:
break
}
updated = traverseAndUpdateSubFlow(updater, root.nextAction) || updated
return updated
}
const addPackageTypeAndPieceTypeToPieceStepSettings = (
pieceStep: PieceStep,
): void => {
pieceStep.settings.packageType = 'REGISTRY'
pieceStep.settings.pieceType = 'OFFICIAL'
}
const removePackageTypeAndPieceTypeFromPieceStepSettings = (
pieceStep: PieceStep,
): void => {
delete pieceStep.settings.packageType
delete pieceStep.settings.pieceType
}
type StepType =
| 'BRANCH'
| 'CODE'
| 'EMPTY'
| 'LOOP_ON_ITEMS'
| 'MISSING'
| 'PIECE'
| 'PIECE_TRIGGER'
| 'WEBHOOK'
type BaseStep<T extends StepType> = {
type: T
nextAction?: Step
}
type BranchStep = BaseStep<'BRANCH'> & {
onFailureAction?: Step
onSuccessAction?: Step
}
type LoopOnItemsStep = BaseStep<'LOOP_ON_ITEMS'> & {
firstLoopAction?: Step
}
type PieceStep = BaseStep<'PIECE' | 'PIECE_TRIGGER'> & {
settings: {
packageType?: 'REGISTRY' | 'ARCHIVE'
pieceType?: 'OFFICIAL' | 'CUSTOM'
}
}
type GenericStep = BaseStep<'CODE' | 'EMPTY' | 'MISSING' | 'WEBHOOK'>
type Step = BranchStep | LoopOnItemsStep | GenericStep | PieceStep

View File

@@ -0,0 +1,267 @@
import { isNil } from '@activepieces/shared'
import decompress from 'decompress'
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
type FunctionTransformer = (
s: CodeStep,
fileRepo: QueryRunner,
flowId: string,
flowVersionId: string
) => Promise<void>
export class StoreCodeInsideFlow1697969398200 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await this.processFlowVersions(queryRunner, flattenCodeStep)
await this.processFlowTemplates(queryRunner, flattenCodeStep)
log.info('StoreCodeInsideFlow1697969398200: up finished')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await this.processFlowVersions(queryRunner, removeNewCodeField)
await this.processFlowTemplates(queryRunner, removeNewCodeField)
log.info('StoreCodeInsideFlow1697969398200: down finished')
}
private async processFlowVersions(
queryRunner: QueryRunner,
stepFunction: FunctionTransformer,
) {
const flowVersionIds = await queryRunner.query(
'SELECT id FROM flow_version',
)
for (const { id } of flowVersionIds) {
const flowVersion = await this.findFlowVersionById(queryRunner, id)
if (flowVersion) {
const updated = await traverseAndUpdateSubFlow(
stepFunction,
flowVersion.trigger,
queryRunner,
flowVersion.flowId,
flowVersion.id,
)
if (updated) {
await this.updateFlowVersion(
queryRunner,
flowVersion.id,
flowVersion,
)
}
}
}
}
private async processFlowTemplates(
queryRunner: QueryRunner,
stepFunction: FunctionTransformer,
) {
// Check if the "flow_template" table exists
const doesTableExist = await queryRunner.hasTable('flow_template')
if (doesTableExist) {
log.info(
'StoreCodeInsideFlow1697969398200: flow template table exists',
)
const templates = await queryRunner.query('SELECT * FROM flow_template')
for (const template of templates) {
const updated = await traverseAndUpdateSubFlow(
stepFunction,
template.template.trigger,
queryRunner,
template.projectId,
template.id,
)
if (updated) {
await queryRunner.query(
'UPDATE flow_template SET template = $1 WHERE id = $2',
[template.template, template.id],
)
}
}
}
}
private async findFlowVersionById(
queryRunner: QueryRunner,
id: string,
): Promise<FlowVersion | undefined> {
const flowVersion = await queryRunner.query(
'SELECT * FROM flow_version WHERE id = $1',
[id],
)
return flowVersion[0]
}
private async updateFlowVersion(
queryRunner: QueryRunner,
id: string,
flowVersion: FlowVersion,
): Promise<void> {
await queryRunner.query(
'UPDATE flow_version SET "flowId" = $1, trigger = $2 WHERE id = $3',
[flowVersion.flowId, flowVersion.trigger, id],
)
}
}
const traverseAndUpdateSubFlow = async (
updater: FunctionTransformer,
root: Step | undefined,
queryRunner: QueryRunner,
flowId: string,
flowVersionId: string,
): Promise<boolean> => {
if (!root) {
return false
}
let updated = false
switch (root.type) {
case 'BRANCH':
updated =
(await traverseAndUpdateSubFlow(
updater,
root.onSuccessAction,
queryRunner,
flowId,
flowVersionId,
)) || updated
updated =
(await traverseAndUpdateSubFlow(
updater,
root.onFailureAction,
queryRunner,
flowId,
flowVersionId,
)) || updated
break
case 'LOOP_ON_ITEMS':
updated =
(await traverseAndUpdateSubFlow(
updater,
root.firstLoopAction,
queryRunner,
flowId,
flowVersionId,
)) || updated
break
case 'CODE':
await updater(root, queryRunner, flowId, flowVersionId)
updated = true
break
default:
break
}
updated =
(await traverseAndUpdateSubFlow(
updater,
root.nextAction,
queryRunner,
flowId,
flowVersionId,
)) || updated
return updated
}
const flattenCodeStep = async (
codeStep: CodeStep,
queryRunner: QueryRunner,
flowVersionId: string,
flowId: string,
): Promise<void> => {
const sourceCodeId = codeStep.settings.artifactSourceId
const sourceCode = codeStep.settings.sourceCode
if (!isNil(sourceCodeId) && isNil(sourceCode)) {
const [file] = await queryRunner.query('SELECT * FROM file WHERE id = $1', [
sourceCodeId,
])
if (isNil(file)) {
log.warn(
`StoreCodeInsideFlow1697969398100: file not found for file id ${sourceCodeId} in flow ${flowId} of flow version ${flowVersionId}`,
)
return
}
const buffer = await decompress(file.data)
const code = buffer.find(
(f: { path: string | string[] }) =>
f.path.includes('index.ts') || f.path.includes('index.js'),
)
const packageJson = buffer.find((f: { path: string | string[] }) =>
f.path.includes('package.json'),
)
if (isNil(code) || isNil(packageJson)) {
log.warn(
`StoreCodeInsideFlow1697969398100: code or package.json not found for file ${file.id} in flow ${flowId} of flow version ${flowVersionId}`,
)
return
}
codeStep.settings.sourceCode = {
code: code.data.toString('utf-8'),
packageJson: packageJson.data.toString('utf-8'),
}
}
}
const removeNewCodeField = async (
codeStep: CodeStep,
_queryRunner: QueryRunner,
): Promise<void> => {
delete codeStep.settings.sourceCode
}
type StepType =
| 'BRANCH'
| 'CODE'
| 'EMPTY'
| 'LOOP_ON_ITEMS'
| 'MISSING'
| 'PIECE'
| 'PIECE_TRIGGER'
| 'WEBHOOK'
type BaseStep<T extends StepType> = {
type: T
nextAction?: Step
}
type BranchStep = BaseStep<'BRANCH'> & {
onFailureAction?: Step
onSuccessAction?: Step
}
type LoopOnItemsStep = BaseStep<'LOOP_ON_ITEMS'> & {
firstLoopAction?: Step
}
type CodeStep = BaseStep<'CODE'> & {
settings: {
artifactSourceId: string
sourceCode?: {
code: string
packageJson: string
}
}
}
type GenericStep = BaseStep<
'PIECE' | 'PIECE_TRIGGER' | 'EMPTY' | 'MISSING' | 'WEBHOOK'
>
type Step = BranchStep | LoopOnItemsStep | GenericStep | CodeStep
type FlowVersion = {
id: string
flowId: string
trigger?: Step
}

View File

@@ -0,0 +1,29 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class UpdateUserStatusRenameShadowToInvited1699818680567
implements MigrationInterface {
name = 'UpdateUserStatusRenameShadowToInvited1699818680567'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
UPDATE "user"
SET "status" = 'INVITED'
WHERE "status" = 'SHADOW'
`)
log.info('UpdateUserStatusRenameShadowToInvited1699818680567 up')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
UPDATE "user"
SET "status" = 'SHADOW'
WHERE "status" = 'INVITED'
`)
log.info('UpdateUserStatusRenameShadowToInvited1699818680567 down')
}
}

View File

@@ -0,0 +1,30 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class AddPartialUniqueIndexForEmailAndPlatformIdIsNull1701096458822
implements MigrationInterface {
name = 'AddPartialUniqueIndexForEmailAndPlatformIdIsNull1701096458822'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
CREATE UNIQUE INDEX "idx_user_partial_unique_email_platform_id_is_null"
ON "user"("email") WHERE "platformId" IS NULL
`)
log.info(
'AddPartialUniqueIndexForEmailAndPlatformIdIsNull1701096458822 up',
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
DROP INDEX "user_partial_unique_email_platform_id_is_null"
`)
log.info(
'AddPartialUniqueIndexForEmailAndPlatformIdIsNull1701096458822 down',
)
}
}

View File

@@ -0,0 +1,106 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class AddTriggerTestStrategy1707087022764 implements MigrationInterface {
name = 'AddTriggerTestStrategy1707087022764'
public async up(queryRunner: QueryRunner): Promise<void> {
const ids = await getAllPieceMetadataIds(queryRunner)
for (const id of ids) {
const pieceMetadata = await getPieceMetadataById(queryRunner, id)
addTestStrategyToTriggers(pieceMetadata)
await updatePieceMetadata(queryRunner, pieceMetadata)
}
log.info({ name: 'AddTriggerTestStrategy1707087022764' }, 'up')
}
public async down(queryRunner: QueryRunner): Promise<void> {
const ids = await getAllPieceMetadataIds(queryRunner)
for (const id of ids) {
const pieceMetadata = await getPieceMetadataById(queryRunner, id)
removeTestStrategyFromTriggers(pieceMetadata)
await updatePieceMetadata(queryRunner, pieceMetadata)
}
log.info({ name: 'AddTriggerTestStrategy1707087022764' }, 'down')
}
}
const getAllPieceMetadataIds = async (
queryRunner: QueryRunner,
): Promise<string[]> => {
const queryResult: { id: string }[] = await queryRunner.query(
'SELECT id FROM piece_metadata',
)
return queryResult.map(({ id }) => id)
}
const getPieceMetadataById = async (
queryRunner: QueryRunner,
id: string,
): Promise<PieceMetadata> => {
const queryResult = await queryRunner.query(
'SELECT id, triggers FROM piece_metadata WHERE id = $1',
[id],
)
return queryResult[0]
}
const addTestStrategyToTriggers = (pieceMetadata: PieceMetadata): void => {
const testStrategyMap: Record<TriggerType, TriggerTestStrategy> = {
POLLING: 'TEST_FUNCTION',
WEBHOOK: 'SIMULATION',
APP_WEBHOOK: 'TEST_FUNCTION',
}
pieceMetadata.triggers = parseTriggers(pieceMetadata.triggers)
for (const trigger of Object.values(pieceMetadata.triggers)) {
trigger.testStrategy = testStrategyMap[trigger.type]
}
}
const removeTestStrategyFromTriggers = (pieceMetadata: PieceMetadata): void => {
pieceMetadata.triggers = parseTriggers(pieceMetadata.triggers)
for (const trigger of Object.values(pieceMetadata.triggers)) {
delete trigger.testStrategy
}
}
const updatePieceMetadata = async (
queryRunner: QueryRunner,
pieceMetadata: PieceMetadata,
): Promise<void> => {
await queryRunner.query(
'UPDATE piece_metadata SET triggers = $1 WHERE id = $2',
[JSON.stringify(pieceMetadata.triggers), pieceMetadata.id],
)
}
const parseTriggers = (triggers: string | Record<string, Trigger>): Record<string, Trigger> => {
if (typeof triggers === 'string') {
return JSON.parse(triggers)
}
return triggers
}
type TriggerType = 'POLLING' | 'WEBHOOK' | 'APP_WEBHOOK'
type TriggerTestStrategy = 'SIMULATION' | 'TEST_FUNCTION'
type Trigger = {
type: TriggerType
testStrategy?: TriggerTestStrategy
}
type PieceMetadata = {
id: string
triggers: Record<string, Trigger> | string
}

View File

@@ -0,0 +1,83 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const FLOW_VERSION_TABLE = 'flow_version'
const log = system.globalLogger()
export class MigrateWebhook1709581196563 implements MigrationInterface {
name = 'MigrateWebhook1709581196563'
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('MigrateWebhook1709581196563, started')
let count = 0
const flowVersionsIds = await queryRunner.query('SELECT id FROM flow_version')
for (const { id } of flowVersionsIds) {
const [flowVersion] = await queryRunner.query('SELECT * FROM flow_version WHERE id = $1', [id])
const step = parseJson(flowVersion.trigger)
const isString = typeof flowVersion.trigger === 'string'
if (step.type === 'WEBHOOK') {
step.type = 'PIECE_TRIGGER'
step.settings = {
input: {},
'inputUiInfo': step.settings.inputUiInfo,
triggerName: 'catch_request',
pieceName: '@activepieces/piece-webhook',
pieceVersion: '0.0.1',
'pieceType': 'OFFICIAL',
'packageType': 'REGISTRY',
}
count++
const result = isString ? JSON.stringify(step) : step
await queryRunner.query(
`UPDATE ${FLOW_VERSION_TABLE} SET trigger = $1 WHERE id = $2`,
[result, flowVersion.id],
)
}
}
log.info('MigrateWebhook1709581196563, migrated flows ' + count)
}
public async down(queryRunner: QueryRunner): Promise<void> {
log.info('rolling back MigrateWebhook1709581196563, started')
let count = 0
const flowVersionsIds = await queryRunner.query('SELECT id FROM flow_version')
for (const { id } of flowVersionsIds) {
const [flowVersion] = await queryRunner.query('SELECT * FROM flow_version WHERE id = $1', [id])
const step = parseJson(flowVersion.trigger)
const isString = typeof flowVersion.trigger === 'string'
if (step.type === 'PIECE_TRIGGER') {
if (step.settings.pieceName === '@activepieces/piece-webhook') {
step.type = 'WEBHOOK'
step.settings = {
'inputUiInfo': step.settings.inputUiInfo,
}
count++
const result = isString ? JSON.stringify(step) : step
await queryRunner.query(
`UPDATE ${FLOW_VERSION_TABLE} SET trigger = $1 WHERE id = $2`,
[result, flowVersion.id],
)
}
}
}
log.info(
'rolling back MigrateWebhook1709581196563, finished flows ' + count,
)
}
}
const parseJson = (json: string) => {
try {
return JSON.parse(json)
}
catch (e) {
return json
}
}

View File

@@ -0,0 +1,29 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class RemoveShowActivityLog1716105958530 implements MigrationInterface {
name = 'RemoveShowActivityLog1716105958530'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE platform
DROP COLUMN "showActivityLog";
`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE platform
ADD COLUMN "showActivityLog" BOOLEAN;
`)
await queryRunner.query(`
UPDATE platform
SET "showActivityLog" = false;
`)
await queryRunner.query(`
ALTER TABLE platform
ALTER COLUMN "showActivityLog" SET NOT NULL;
`)
}
}

View File

@@ -0,0 +1,19 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddDurationForRuns1716725027424 implements MigrationInterface {
name = 'AddDurationForRuns1716725027424'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE "flow_run"
ADD "duration" integer
`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE "flow_run" DROP COLUMN "duration"
`)
}
}

View File

@@ -0,0 +1,35 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class ChangeEventRoutingConstraint1723549873495 implements MigrationInterface {
name = 'ChangeEventRoutingConstraint1723549873495'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
DROP INDEX "idx_app_event_project_id_appName_identifier_value_event"
`)
await queryRunner.query(`
CREATE UNIQUE INDEX "idx_app_event_flow_id_project_id_appName_identifier_value_event" ON "app_event_routing" (
"appName",
"projectId",
"flowId",
"identifierValue",
"event"
)
`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
DROP INDEX "idx_app_event_flow_id_project_id_appName_identifier_value_event"
`)
await queryRunner.query(`
CREATE UNIQUE INDEX "idx_app_event_project_id_appName_identifier_value_event" ON "app_event_routing" (
"appName",
"projectId",
"identifierValue",
"event"
)
`)
}
}

View File

@@ -0,0 +1,29 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class RemoveUniqueConstraintOnStepFile1725570317713 implements MigrationInterface {
name = 'RemoveUniqueConstraintOnStepFile1725570317713'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
DROP INDEX "step_file_project_id_flow_id_step_name_name"
`)
await queryRunner.query(`
CREATE INDEX "step_file_project_id_flow_id_step_name_name" ON "step_file" ("projectId", "flowId", "stepName", "name")
`)
log.info({ name: 'RemoveUniqueConstraintOnStepFile1725570317713' }, 'up')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
DROP INDEX "step_file_project_id_flow_id_step_name_name"
`)
await queryRunner.query(`
CREATE UNIQUE INDEX "step_file_project_id_flow_id_step_name_name" ON "step_file" ("projectId", "flowId", "stepName", "name")
`)
log.info({ name: 'RemoveUniqueConstraintOnStepFile1725570317713' }, 'down')
}
}

View File

@@ -0,0 +1,19 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddUserSessionId1727130193726 implements MigrationInterface {
name = 'AddUserSessionId1727130193726'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE "user"
ADD "tokenVersion" character varying
`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE "user" DROP COLUMN "tokenVersion"
`)
}
}

View File

@@ -0,0 +1,19 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddLicenseKeyIntoPlatform1728827704109 implements MigrationInterface {
name = 'AddLicenseKeyIntoPlatform1728827704109'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE "platform"
ADD "licenseKey" character varying
`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE "platform" DROP COLUMN "licenseKey"
`)
}
}

View File

@@ -0,0 +1,36 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class ChangeProjectUniqueConstraintToPartialIndex1729098769827 implements MigrationInterface {
name = 'ChangeProjectUniqueConstraintToPartialIndex1729098769827'
public async up(queryRunner: QueryRunner): Promise<void> {
log.info({ name: this.name }, 'Up')
await queryRunner.query(`
DROP INDEX "idx_project_platform_id_external_id";
`)
await queryRunner.query(`
CREATE UNIQUE INDEX "idx_project_platform_id_external_id"
ON "project" ("platformId", "externalId")
WHERE "deleted" IS NULL;
`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
log.info({
name: this.name,
}, 'down')
await queryRunner.query(`
DROP INDEX "idx_project_platform_id_external_id";
`)
await queryRunner.query(`
CREATE UNIQUE INDEX "idx_project_platform_id_external_id"
ON "project" ("platformId", "externalId");
`)
}
}

View File

@@ -0,0 +1,235 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable no-case-declarations */
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class SwitchToRouter1731019013340 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
const flowVersionIds = await queryRunner.query(
'SELECT id FROM flow_version WHERE "schemaVersion" IS NULL',
)
log.info(
'SwitchToRouter1731019013340: found ' +
flowVersionIds.length +
' versions',
)
let updatedFlows = 0
for (const { id } of flowVersionIds) {
const flowVersion = await queryRunner.query(
'SELECT * FROM flow_version WHERE id = $1',
[id],
)
if (flowVersion.length > 0) {
const trigger = typeof flowVersion[0].trigger === 'string' ? JSON.parse(flowVersion[0].trigger) : flowVersion[0].trigger
const originalStepCount = countSteps(trigger)
const updatedTrigger = traverseAndUpdateSubFlow(
convertBranchToRouter,
JSON.parse(JSON.stringify(trigger)), // Deep clone to avoid modifying original
)
const updatedStepCount = countSteps(updatedTrigger)
if (originalStepCount !== updatedStepCount) {
throw new Error(`Step count mismatch for flow ${id}: original=${originalStepCount}, updated=${updatedStepCount}`)
}
if (hasBranchType(updatedTrigger)) {
throw new Error(`Flow ${id} still contains BRANCH type after migration`)
}
await queryRunner.query(
'UPDATE flow_version SET trigger = $1, "schemaVersion" = $2 WHERE id = $3',
[JSON.stringify(updatedTrigger), '1', flowVersion[0].id],
)
updatedFlows++
}
}
log.info({
name: 'SwitchToRouter1731019013340: up',
updatedFlows,
})
}
public async down(queryRunner: QueryRunner): Promise<void> {
throw new Error('SwitchToRouter1731019013340: down - no rollback supported')
}
}
const countSteps = (step: Step | undefined): number => {
if (!step) return 0
let count = 1
switch (step.type) {
case 'ROUTER':
count += step.children.reduce((acc, child) => acc + countSteps(child), 0)
break
case 'BRANCH':
count += countSteps(step.onSuccessAction) + countSteps(step.onFailureAction)
break
case 'LOOP_ON_ITEMS':
count += countSteps(step.firstLoopAction)
break
default:
break
}
count += countSteps(step.nextAction)
return count
}
const hasBranchType = (step: Step | undefined): boolean => {
if (!step) return false
if (step.type === 'BRANCH') return true
let hasBranch = false
switch (step.type) {
case 'ROUTER':
hasBranch = step.children.some(child => hasBranchType(child))
break
case 'LOOP_ON_ITEMS':
hasBranch = hasBranchType(step.firstLoopAction)
break
default:
break
}
if (hasBranchType(step.nextAction)) {
return true
}
return hasBranch
}
const traverseAndUpdateSubFlow = (
updater: (s: Step) => void,
root: Step | undefined,
): Step | undefined => {
if (!root) {
return undefined
}
const clonedRoot = JSON.parse(JSON.stringify(root))
switch (clonedRoot.type) {
case 'ROUTER':
const updatedChildren: (Step | null)[] = []
for (const branch of clonedRoot.children) {
if (branch) {
const branchUpdated = traverseAndUpdateSubFlow(updater, branch)
updatedChildren.push(branchUpdated ?? null)
}
else {
updatedChildren.push(null)
}
}
clonedRoot.children = updatedChildren
break
case 'BRANCH':
clonedRoot.onSuccessAction = clonedRoot.onSuccessAction ?
traverseAndUpdateSubFlow(updater, clonedRoot.onSuccessAction) : undefined
clonedRoot.onFailureAction = clonedRoot.onFailureAction ?
traverseAndUpdateSubFlow(updater, clonedRoot.onFailureAction) : undefined
updater(clonedRoot)
break
case 'LOOP_ON_ITEMS':
clonedRoot.firstLoopAction = clonedRoot.firstLoopAction ?
traverseAndUpdateSubFlow(updater, clonedRoot.firstLoopAction) : undefined
break
case 'PIECE':
case 'PIECE_TRIGGER':
break
default:
break
}
clonedRoot.nextAction = clonedRoot.nextAction ?
traverseAndUpdateSubFlow(updater, clonedRoot.nextAction) : undefined
return clonedRoot
}
const convertBranchToRouter = (step: any): void => {
if (step.type === 'BRANCH') {
step.type = 'ROUTER'
step.settings = {
branches: [
{
conditions: step.settings.conditions,
branchType: 'CONDITION',
branchName: 'On Success',
},
{
branchType: 'FALLBACK',
branchName: 'Otherwise',
},
],
executionType: 'EXECUTE_FIRST_MATCH',
inputUiInfo: {
sampleDataFileId: undefined,
sampleDataInputFileId: undefined,
lastTestDate: undefined,
customizedInputs: undefined,
currentSelectedData: undefined,
},
}
step.children = [step.onSuccessAction, step.onFailureAction]
step.onSuccessAction = undefined
step.onFailureAction = undefined
}
}
type StepType =
| 'BRANCH'
| 'CODE'
| 'EMPTY'
| 'LOOP_ON_ITEMS'
| 'MISSING'
| 'PIECE'
| 'PIECE_TRIGGER'
| 'WEBHOOK'
| 'ROUTER'
type BaseStep<T extends StepType> = {
type: T
nextAction?: Step
}
type BranchStep = BaseStep<'BRANCH'> & {
onFailureAction?: Step
onSuccessAction?: Step
settings: {
conditions: unknown[]
}
}
type RouterStep = BaseStep<'ROUTER'> & {
children: Step[]
settings: {
branches: {
conditions?: unknown[]
branchType: 'CONDITION' | 'FALLBACK'
branchName: string
}[]
executionType: 'EXECUTE_FIRST_MATCH'
inputUiInfo: {
sampleDataFileId?: string
sampleDataInputFileId?: string
lastTestDate?: string
customizedInputs?: Record<string, unknown>
currentSelectedData?: unknown
}
}
}
type LoopOnItemsStep = BaseStep<'LOOP_ON_ITEMS'> & {
firstLoopAction?: Step
}
type PieceStep = BaseStep<'PIECE' | 'PIECE_TRIGGER'> & {
settings: Record<string, unknown>
}
type GenericStep = BaseStep<'CODE' | 'EMPTY' | 'MISSING' | 'WEBHOOK'>
type Step = BranchStep | LoopOnItemsStep | GenericStep | PieceStep | RouterStep

View File

@@ -0,0 +1,196 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class ChangeExternalIdsForTables1747346473001 implements MigrationInterface {
name = 'ChangeExternalIdsForTables1747346473001'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
UPDATE "table" SET "externalId" = "id"
`)
const flowVersionIds = await queryRunner.query(
'SELECT id FROM "flow_version" WHERE CAST("trigger" AS TEXT) LIKE \'%@activepieces/piece-tables%\'',
)
log.info(
'ChangeExternalIdsForTables1747346473001: found ' +
flowVersionIds.length +
' versions',
)
let updatedFlows = 0
for (const { id } of flowVersionIds) {
// Fetch FlowVersion record by ID
const flowVersion = await queryRunner.query(
'SELECT * FROM flow_version WHERE id = $1',
[id],
)
if (flowVersion.length > 0) {
const trigger = typeof flowVersion[0].trigger === 'string' ? JSON.parse(flowVersion[0].trigger) : flowVersion[0].trigger
const updated = traverseAndUpdateSubFlow(
updateVersionOfTablesStep,
trigger,
)
if (updated) {
await queryRunner.connection.getRepository('flow_version').update(flowVersion[0].id, { trigger: updated })
}
}
updatedFlows++
if (updatedFlows % 100 === 0) {
log.info(
'ChangeExternalIdsForTables1747346473001: ' +
updatedFlows +
' flows updated',
)
}
}
log.info('ChangeExternalIdsForTables1747346473001: up')
}
public async down(queryRunner: QueryRunner): Promise<void> {
const flowVersionIds = await queryRunner.query(
'SELECT id FROM "flow_version" WHERE CAST("trigger" AS TEXT) LIKE \'%@activepieces/piece-tables%\'',
)
log.info(
'ChangeExternalIdsForTables1747346473001 down: found ' +
flowVersionIds.length +
' versions',
)
let updatedFlows = 0
for (const { id } of flowVersionIds) {
// Fetch FlowVersion record by ID
const flowVersion = await queryRunner.query(
'SELECT * FROM flow_version WHERE id = $1',
[id],
)
if (flowVersion.length > 0) {
const trigger = typeof flowVersion[0].trigger === 'string' ? JSON.parse(flowVersion[0].trigger) : flowVersion[0].trigger
const updated = traverseAndUpdateSubFlow(
downgradeVersionOfTablesStep,
trigger,
)
if (updated) {
await queryRunner.connection.getRepository('flow_version').update(flowVersion[0].id, { trigger: updated })
}
}
updatedFlows++
if (updatedFlows % 100 === 0) {
log.info(
'ChangeExternalIdsForTables1747346473001 down: ' +
updatedFlows +
' flows updated',
)
}
}
log.info('ChangeExternalIdsForTables1747346473001: down')
}
}
const traverseAndUpdateSubFlow = (
updater: (s: Step) => Step,
root: Step | undefined,
): Step | undefined => {
if (!root) {
return undefined
}
const clonedRoot = updater(root)
switch (clonedRoot.type) {
case 'ROUTER': {
const updatedChildren: (Step | null)[] = []
for (const branch of clonedRoot.children) {
if (branch) {
const branchUpdated = traverseAndUpdateSubFlow(updater, branch)
updatedChildren.push(branchUpdated ?? null)
}
else {
updatedChildren.push(null)
}
}
clonedRoot.children = updatedChildren
break
}
case 'LOOP_ON_ITEMS':
clonedRoot.firstLoopAction = clonedRoot.firstLoopAction ?
traverseAndUpdateSubFlow(updater, clonedRoot.firstLoopAction) : undefined
break
case 'PIECE':
case 'PIECE_TRIGGER':
break
default:
break
}
clonedRoot.nextAction = clonedRoot.nextAction ?
traverseAndUpdateSubFlow(updater, clonedRoot.nextAction) : undefined
return clonedRoot
}
const updateVersionOfTablesStep = (
step: Step,
): Step => {
if ((step.type === 'PIECE' || step.type === 'PIECE_TRIGGER') && (step as PieceStep).settings.pieceName === '@activepieces/piece-tables') {
(step as PieceStep).settings.pieceVersion = '0.1.0'
}
return step
}
const downgradeVersionOfTablesStep = (
step: Step,
): Step => {
if ((step.type === 'PIECE' || step.type === 'PIECE_TRIGGER') && (step as PieceStep).settings.pieceName === '@activepieces/piece-tables') {
(step as PieceStep).settings.pieceVersion = '0.0.6'
}
return step
}
type StepType =
| 'CODE'
| 'EMPTY'
| 'LOOP_ON_ITEMS'
| 'PIECE'
| 'PIECE_TRIGGER'
| 'ROUTER'
type BaseStep<T extends StepType> = {
type: T
nextAction?: Step
}
type RouterStep = BaseStep<'ROUTER'> & {
children: (Step | null)[]
settings: {
branches: {
conditions?: unknown[]
branchType: 'CONDITION' | 'FALLBACK'
branchName: string
}[]
executionType: 'EXECUTE_FIRST_MATCH'
inputUiInfo: {
sampleDataFileId?: string
sampleDataInputFileId?: string
lastTestDate?: string
customizedInputs?: Record<string, unknown>
currentSelectedData?: unknown
}
}
}
type LoopOnItemsStep = BaseStep<'LOOP_ON_ITEMS'> & {
firstLoopAction?: Step
}
type PieceStep = BaseStep<'PIECE' | 'PIECE_TRIGGER'> & {
settings: Record<string, unknown>
}
type GenericStep = BaseStep<'CODE' | 'EMPTY'>
type Step = LoopOnItemsStep | GenericStep | PieceStep | RouterStep

View File

@@ -0,0 +1,175 @@
import { gt } from 'semver'
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class UpgradePieceVersionsToLatest1748253670449 implements MigrationInterface {
name = 'UpgradePieceVersionsToLatest1748253670449'
public async up(queryRunner: QueryRunner): Promise<void> {
const migrationAlreadyRan = await queryRunner.query(
'SELECT * FROM "migrations" WHERE "name" = \'ChangeExternalIdsForTables1747346473000\'',
)
if (migrationAlreadyRan.length === 0) {
log.info('ChangeExternalIdsForTables1747346473000: migration already ran')
return
}
const flowVersionIds = await queryRunner.query(
'SELECT id FROM "flow_version" WHERE CAST("trigger" AS TEXT) LIKE \'%@activepieces/piece-tables%\'',
)
const allPieceVersions = await queryRunner.query('SELECT name, version FROM piece_metadata')
// Create a map of piece names to their latest versions
const pieceNameToLatestVersion = new Map<string, string>()
for (const piece of allPieceVersions) {
const currentLatest = pieceNameToLatestVersion.get(piece.name)
if (!currentLatest || gt(piece.version, currentLatest)) {
pieceNameToLatestVersion.set(piece.name, piece.version)
}
}
log.info(
'UpgradePieceVersionsToLatest1748253670449: found ' +
flowVersionIds.length +
' versions',
)
let updatedFlows = 0
for (const { id } of flowVersionIds) {
// Fetch FlowVersion record by ID
const flowVersion = await queryRunner.query(
'SELECT * FROM flow_version WHERE id = $1',
[id],
)
if (flowVersion.length > 0) {
const trigger = typeof flowVersion[0].trigger === 'string' ? JSON.parse(flowVersion[0].trigger) : flowVersion[0].trigger
const updated = traverseAndUpdateSubFlow(
(step) => updateVersionOfPieceStep(step, pieceNameToLatestVersion),
trigger,
)
if (updated) {
await queryRunner.connection.getRepository('flow_version').update(flowVersion[0].id, { trigger: updated })
}
}
updatedFlows++
if (updatedFlows % 100 === 0) {
log.info(
'UpgradePieceVersionsToLatest1748253670449: ' +
updatedFlows +
' flows updated',
)
}
}
log.info('UpgradePieceVersionsToLatest1748253670449: up')
}
public async down(_queryRunner: QueryRunner): Promise<void> {
// No need to downgrade
}
}
const traverseAndUpdateSubFlow = (
updater: (s: Step) => Step,
root: Step | undefined,
): Step | undefined => {
if (!root) {
return undefined
}
const clonedRoot = updater(root)
switch (clonedRoot.type) {
case 'ROUTER': {
const updatedChildren: (Step | null)[] = []
for (const branch of clonedRoot.children) {
if (branch) {
const branchUpdated = traverseAndUpdateSubFlow(updater, branch)
updatedChildren.push(branchUpdated ?? null)
}
else {
updatedChildren.push(null)
}
}
clonedRoot.children = updatedChildren
break
}
case 'LOOP_ON_ITEMS':
clonedRoot.firstLoopAction = clonedRoot.firstLoopAction ?
traverseAndUpdateSubFlow(updater, clonedRoot.firstLoopAction) : undefined
break
case 'PIECE':
case 'PIECE_TRIGGER':
break
default:
break
}
clonedRoot.nextAction = clonedRoot.nextAction ?
traverseAndUpdateSubFlow(updater, clonedRoot.nextAction) : undefined
return clonedRoot
}
const updateVersionOfPieceStep = (
step: Step,
pieceNameToLatestVersion: Map<string, string>,
): Step => {
if (step.type === 'PIECE' || step.type === 'PIECE_TRIGGER') {
const pieceStep = step as PieceStep
const latestVersion = pieceNameToLatestVersion.get(pieceStep.settings.pieceName as string)
if (latestVersion) {
pieceStep.settings.pieceVersion = latestVersion
}
else {
throw new Error(`Piece ${pieceStep.settings.pieceName} not found`)
}
}
return step
}
type StepType =
| 'CODE'
| 'EMPTY'
| 'LOOP_ON_ITEMS'
| 'PIECE'
| 'PIECE_TRIGGER'
| 'ROUTER'
type BaseStep<T extends StepType> = {
type: T
nextAction?: Step
}
type RouterStep = BaseStep<'ROUTER'> & {
children: (Step | null)[]
settings: {
branches: {
conditions?: unknown[]
branchType: 'CONDITION' | 'FALLBACK'
branchName: string
}[]
executionType: 'EXECUTE_FIRST_MATCH'
inputUiInfo: {
sampleDataFileId?: string
sampleDataInputFileId?: string
lastTestDate?: string
customizedInputs?: Record<string, unknown>
currentSelectedData?: unknown
}
}
}
type LoopOnItemsStep = BaseStep<'LOOP_ON_ITEMS'> & {
firstLoopAction?: Step
}
type PieceStep = BaseStep<'PIECE' | 'PIECE_TRIGGER'> & {
settings: Record<string, unknown>
}
type GenericStep = BaseStep<'CODE' | 'EMPTY'>
type Step = LoopOnItemsStep | GenericStep | PieceStep | RouterStep

View File

@@ -0,0 +1,24 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
export class DeprecateApproval1748648340742 implements MigrationInterface {
name = 'DeprecateApproval1748648340742'
public async up(queryRunner: QueryRunner): Promise<void> {
const log = system.globalLogger()
log.info({
name: this.name,
}, 'up')
await queryRunner.query(`
UPDATE piece_metadata SET "maximumSupportedRelease" = '0.57.9' WHERE "name" = '@activepieces/piece-approval'
`)
log.info({
name: this.name,
}, 'down')
}
public async down(_queryRunner: QueryRunner): Promise<void> {
// do nothing
}
}

View File

@@ -0,0 +1,30 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class RemoveProjectIdFromIndex1750712746125 implements MigrationInterface {
name = 'RemoveProjectIdFromIndex1750712746125'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
DROP INDEX "idx_app_connection_project_ids_and_external_id"
`)
await queryRunner.query(`
DROP INDEX "idx_app_connection_platform_id"
`)
await queryRunner.query(`
CREATE INDEX "idx_app_connection_platform_id_and_external_id" ON "app_connection" ("platformId", "externalId")
`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
DROP INDEX "idx_app_connection_platform_id_and_external_id"
`)
await queryRunner.query(`
CREATE INDEX "idx_app_connection_platform_id" ON "app_connection" ("platformId")
`)
await queryRunner.query(`
CREATE INDEX "idx_app_connection_project_ids_and_external_id" ON "app_connection" ("projectIds", "externalId")
`)
}
}

View File

@@ -0,0 +1,90 @@
import { apId } from '@activepieces/shared'
import { gt } from 'semver'
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
type OldMcpPieceToolData = {
pieceName: string
pieceVersion: string
actionNames: string[]
logoUrl: string
connectionExternalId: string | null
}
enum McpToolType {
PIECE = 'PIECE',
FLOW = 'FLOW',
}
type PieceMetadata = {
name: string
displayName: string
}
export class SplitUpPieceMetadataIntoTools1752004202722 implements MigrationInterface {
name = 'SplitUpPieceMetadataIntoTools1752004202722'
public async up(queryRunner: QueryRunner): Promise<void> {
system.globalLogger().info({
name: this.name,
}, 'Starting migration')
const mcpTools = await queryRunner.query(`
SELECT * FROM "mcp_tool" WHERE "pieceMetadata" IS NOT NULL
`)
const allPieceVersions = await queryRunner.query('SELECT name, version, actions FROM piece_metadata')
const pieceNameToDisplayName = new Map<string, string>()
const pieceNameToLatestVersion = new Map<string, string>()
for (const piece of allPieceVersions) {
const currentLatest = pieceNameToLatestVersion.get(piece.name)
if (!currentLatest || gt(piece.version, currentLatest)) {
pieceNameToLatestVersion.set(piece.name, piece.version)
Object.values(piece.actions as PieceMetadata[]).forEach((action: PieceMetadata) => {
const actionName = `${piece.name}:${action.name}`
pieceNameToDisplayName.set(actionName, action.displayName)
})
}
}
for (const mcpTool of mcpTools) {
const { pieceMetadata: pieceMetadataString, ...rest } = mcpTool
const pieceMetadata = typeof pieceMetadataString === 'string'
? JSON.parse(pieceMetadataString) as OldMcpPieceToolData
: pieceMetadataString as OldMcpPieceToolData
const { actionNames, ...restPieceMetadata } = pieceMetadata
for (const actionName of actionNames) {
const pieceNameWithActionName = `${pieceMetadata.pieceName}:${actionName}`
const actionDisplayName = pieceNameToDisplayName.get(pieceNameWithActionName) ?? actionName
const tool = {
...rest,
pieceMetadata: {
...restPieceMetadata,
actionName,
actionDisplayName,
},
}
const toolId = apId()
await queryRunner.query(`
INSERT INTO "mcp_tool"
("id", "mcpId", "type", "pieceMetadata", "flowId", "created", "updated")
VALUES ($1, $2, $3, $4, $5, $6, $7)
`, [toolId, tool.mcpId, McpToolType.PIECE, JSON.stringify(tool.pieceMetadata), tool.flowId, tool.created, tool.updated])
}
await queryRunner.query(`
DELETE FROM "mcp_tool" WHERE "id" = $1
`, [mcpTool.id])
}
system.globalLogger().info({
name: this.name,
}, 'finished')
}
public async down(_queryRunner: QueryRunner): Promise<void> {
// no down
}
}

View File

@@ -0,0 +1,18 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddIndexToIssues1756775080449 implements MigrationInterface {
name = 'AddIndexToIssues1756775080449'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
CREATE INDEX "idx_issue_projectId_status_updated" ON "issue" ("projectId", "status", "updated")
`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
DROP INDEX "idx_issue_projectId_status_updated"
`)
}
}

View File

@@ -0,0 +1,24 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddFlowIndexToTriggerSource1757555419075 implements MigrationInterface {
name = 'AddFlowIndexToTriggerSource1757555419075'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
CREATE INDEX "idx_trigger_flow_id" ON "trigger_source" ("flowId")
`)
await queryRunner.query(`
CREATE INDEX "idx_trigger_project_id" ON "trigger_source" ("projectId")
`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
DROP INDEX "idx_trigger_project_id"
`)
await queryRunner.query(`
DROP INDEX "idx_trigger_flow_id"
`)
}
}

View File

@@ -0,0 +1,18 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddIndexForAppEvents1759392852559 implements MigrationInterface {
name = 'AddIndexForAppEvents1759392852559'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
CREATE INDEX "idx_app_event_appName_identifier_event" ON "app_event_routing" ("appName", "identifierValue", "event")
`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
DROP INDEX "idx_app_event_appName_identifier_event"
`)
}
}

View File

@@ -0,0 +1,124 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class FlowAndFileProjectId1674788714498 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('FlowAndFileProjectId1674788714498: started')
const flowTableExistsQueryResponse: { exists: boolean }[] =
await queryRunner.query(
`SELECT exists (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'flow'
)`,
)
const flowTableNotExist =
flowTableExistsQueryResponse &&
flowTableExistsQueryResponse.length > 0 &&
!flowTableExistsQueryResponse[0].exists
if (flowTableNotExist) {
log.info('FlowAndFileProjectId1674788714498: skipped')
return
}
const flows = await queryRunner.query('SELECT * FROM flow')
for (let i = 0; i < flows.length; ++i) {
const currentFlow = flows[i]
const collection = await queryRunner.query(
'SELECT * FROM collection WHERE id = $1',
[currentFlow.collectionId],
)
currentFlow.projectId = collection[0].projectId
await queryRunner.query(
'UPDATE flow SET "projectId" = $1 WHERE id = $2',
[currentFlow.projectId, currentFlow.id],
)
}
const flowVersions = await queryRunner.query('SELECT * FROM flow_version')
for (let i = 0; i < flowVersions.length; ++i) {
const currentFlowVersion = flowVersions[i]
const currentFlow = await queryRunner.query(
'SELECT * FROM flow WHERE id = $1',
[currentFlowVersion.flowId],
)
let action = currentFlowVersion.trigger?.nextAction
while (action !== undefined && action !== null) {
if (action.type === 'CODE') {
const packagedFileId = action.settings.artifactPackagedId
if (packagedFileId !== undefined && packagedFileId !== null) {
const packagedFileToUpdate = await queryRunner.query(
'SELECT * FROM file WHERE id = $1',
[packagedFileId],
)
if (packagedFileToUpdate.length === 0) {
log.error(
'Found an old packaged artifact file id without file ' +
packagedFileId +
' and for flow ' +
currentFlow[0].id,
)
}
else {
packagedFileToUpdate[0].projectId = currentFlow[0].projectId
await queryRunner.query(
'UPDATE file SET "projectId" = $1 WHERE id = $2',
[packagedFileToUpdate[0].projectId, packagedFileId],
)
}
}
const sourceFileId = action.settings.artifactSourceId
if (sourceFileId !== undefined && sourceFileId !== null) {
const sourceFileToUpdate = await queryRunner.query(
'SELECT * FROM file WHERE id = $1',
[sourceFileId],
)
if (sourceFileToUpdate.length === 0) {
log.error(
'Found an old source artifact file id without file ' +
sourceFileId +
' and for flow ' +
currentFlow[0].id,
)
}
else {
sourceFileToUpdate[0].projectId = currentFlow[0].projectId
await queryRunner.query(
'UPDATE file SET "projectId" = $1 WHERE id = $2',
[sourceFileToUpdate[0].projectId, sourceFileId],
)
}
}
}
action = action.nextAction
}
}
const flowRuns = await queryRunner.query('SELECT * FROM flow_run')
for (let i = 0; i < flowRuns.length; ++i) {
const currentFlowRun = flowRuns[i]
if (
currentFlowRun.logsFileId !== undefined &&
currentFlowRun.logsFileId !== null
) {
const logFlowRunFile = await queryRunner.query(
'SELECT * FROM file WHERE id = $1',
[currentFlowRun.logsFileId],
)
logFlowRunFile[0].projectId = currentFlowRun.projectId
await queryRunner.query(
'UPDATE file SET "projectId" = $1 WHERE id = $2',
[logFlowRunFile[0].projectId, logFlowRunFile[0].id],
)
}
}
}
// eslint-disable-next-line @typescript-eslint/no-empty-function
public async down(): Promise<void> {}
}

View File

@@ -0,0 +1,215 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class initializeSchema1676238396411 implements MigrationInterface {
name = 'initializeSchema1676238396411'
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('initializeSchema1676238396411: started')
const userTableExistsQueryResponse: { exists: boolean }[] =
await queryRunner.query(
`SELECT exists (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'user'
)`,
)
const userTableExists =
userTableExistsQueryResponse &&
userTableExistsQueryResponse.length > 0 &&
userTableExistsQueryResponse[0].exists
if (userTableExists) {
log.info('initializeSchema1676238396411: skipped')
return
}
await queryRunner.query(
'CREATE TABLE "collection" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "projectId" character varying(21) NOT NULL, CONSTRAINT "PK_ad3f485bbc99d875491f44d7c85" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE INDEX "idx_collection_project_id" ON "collection" ("projectId") ',
)
await queryRunner.query(
'CREATE TABLE "collection_version" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "displayName" character varying NOT NULL, "collectionId" character varying(21) NOT NULL, "configs" jsonb NOT NULL, "state" character varying NOT NULL, CONSTRAINT "PK_76c769e96c091b478e3c338a0ac" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE INDEX "idx_collection_version_collection_id" ON "collection_version" ("collectionId") ',
)
await queryRunner.query(
'CREATE TABLE "file" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "projectId" character varying(21), "data" bytea NOT NULL, CONSTRAINT "PK_36b46d232307066b3a2c9ea3a1d" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE TABLE "flag" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "value" jsonb NOT NULL, CONSTRAINT "PK_17b74257294fdfd221178a132d4" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE TABLE "flow" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "projectId" character varying(21), "collectionId" character varying(21) NOT NULL, CONSTRAINT "PK_6c2ad4a3e86394cd9bb7a80a228" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE INDEX "idx_flow_collection_id" ON "flow" ("collectionId") ',
)
await queryRunner.query(
'CREATE TABLE "flow_version" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "flowId" character varying(21) NOT NULL, "displayName" character varying NOT NULL, "trigger" jsonb, "valid" boolean NOT NULL, "state" character varying NOT NULL, CONSTRAINT "PK_2f20a52dcddf98d3fafe621a9f5" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE INDEX "idx_flow_version_flow_id" ON "flow_version" ("flowId") ',
)
await queryRunner.query(
'CREATE TABLE "instance" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "projectId" character varying(21) NOT NULL, "collectionId" character varying(21) NOT NULL, "collectionVersionId" character varying(21) NOT NULL, "flowIdToVersionId" jsonb NOT NULL, "status" character varying NOT NULL, CONSTRAINT "REL_183c020130aa172f58c6a0c647" UNIQUE ("collectionVersionId"), CONSTRAINT "REL_6b75536fbdf7d8dc967fc350ff" UNIQUE ("collectionId"), CONSTRAINT "PK_eaf60e4a0c399c9935413e06474" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE INDEX "idx_instance_project_id" ON "instance" ("projectId") ',
)
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_instance_collection_id" ON "instance" ("collectionId") ',
)
await queryRunner.query(
'CREATE TABLE "flow_run" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "projectId" character varying(21) NOT NULL, "flowId" character varying(21) NOT NULL, "collectionId" character varying(21) NOT NULL, "flowVersionId" character varying(21) NOT NULL, "collectionVersionId" character varying(21) NOT NULL, "environment" character varying, "flowDisplayName" character varying NOT NULL, "collectionDisplayName" character varying NOT NULL, "logsFileId" character varying(21), "status" character varying NOT NULL, "startTime" TIMESTAMP WITH TIME ZONE NOT NULL, "finishTime" TIMESTAMP WITH TIME ZONE, CONSTRAINT "PK_858b1dd0d1055c44261ae00d45b" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE INDEX "idx_run_project_id" ON "flow_run" ("projectId") ',
)
await queryRunner.query(
'CREATE TABLE "project" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "ownerId" character varying(21) NOT NULL, "displayName" character varying NOT NULL, CONSTRAINT "PK_4d68b1358bb5b766d3e78f32f57" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE INDEX "idx_project_owner_id" ON "project" ("ownerId") ',
)
await queryRunner.query(
'CREATE TABLE "store-entry" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "key" character varying NOT NULL, "collectionId" character varying(21) NOT NULL, "value" jsonb NOT NULL, CONSTRAINT "PK_afb44ca7c0b4606b19deb1680d6" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE TABLE "user" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "email" character varying NOT NULL, "firstName" character varying NOT NULL, "lastName" character varying NOT NULL, "password" character varying NOT NULL, "status" character varying NOT NULL, "trackEvents" boolean, "newsLetter" boolean, CONSTRAINT "UQ_e12875dfb3b1d92d7d7c5377e22" UNIQUE ("email"), CONSTRAINT "PK_cace4a159ff9f2512dd42373760" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE TABLE "app_connection" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "name" character varying NOT NULL, "appName" character varying NOT NULL, "projectId" character varying(21) NOT NULL, "value" jsonb NOT NULL, CONSTRAINT "PK_9efa2d6633ecc57cc5adeafa039" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_app_connection_project_id_and_app_name_and_name" ON "app_connection" ("projectId", "appName", "name") ',
)
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_app_connection_project_id_and_name" ON "app_connection" ("projectId", "name") ',
)
await queryRunner.query(
'ALTER TABLE "collection" ADD CONSTRAINT "fk_collection_project_id" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "collection_version" ADD CONSTRAINT "fk_collection_version_collection_id" FOREIGN KEY ("collectionId") REFERENCES "collection"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "file" ADD CONSTRAINT "fk_file_project_id" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "flow" ADD CONSTRAINT "fk_flow_project_id" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "flow" ADD CONSTRAINT "fk_flow_collection_id" FOREIGN KEY ("collectionId") REFERENCES "collection"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "flow_version" ADD CONSTRAINT "fk_flow_version_flow" FOREIGN KEY ("flowId") REFERENCES "flow"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "instance" ADD CONSTRAINT "fk_instance_collection_version" FOREIGN KEY ("collectionVersionId") REFERENCES "collection_version"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "instance" ADD CONSTRAINT "fk_instance_collection" FOREIGN KEY ("collectionId") REFERENCES "collection"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "flow_run" ADD CONSTRAINT "fk_flow_run_project_id" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "flow_run" ADD CONSTRAINT "fk_flow_run_flow_id" FOREIGN KEY ("flowId") REFERENCES "flow"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "flow_run" ADD CONSTRAINT "fk_flow_run_collection_id" FOREIGN KEY ("collectionId") REFERENCES "collection"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "flow_run" ADD CONSTRAINT "fk_flow_run_collection_version_id" FOREIGN KEY ("collectionVersionId") REFERENCES "collection_version"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "project" ADD CONSTRAINT "fk_project_owner_id" FOREIGN KEY ("ownerId") REFERENCES "user"("id") ON DELETE NO ACTION ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "app_connection" ADD CONSTRAINT "fk_app_connection_app_project_id" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
log.info('initializeSchema1676238396411: completed')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'ALTER TABLE "app_connection" DROP CONSTRAINT "fk_app_connection_app_project_id"',
)
await queryRunner.query(
'ALTER TABLE "project" DROP CONSTRAINT "fk_project_owner_id"',
)
await queryRunner.query(
'ALTER TABLE "flow_run" DROP CONSTRAINT "fk_flow_run_collection_version_id"',
)
await queryRunner.query(
'ALTER TABLE "flow_run" DROP CONSTRAINT "fk_flow_run_collection_id"',
)
await queryRunner.query(
'ALTER TABLE "flow_run" DROP CONSTRAINT "fk_flow_run_flow_id"',
)
await queryRunner.query(
'ALTER TABLE "flow_run" DROP CONSTRAINT "fk_flow_run_project_id"',
)
await queryRunner.query(
'ALTER TABLE "instance" DROP CONSTRAINT "fk_instance_collection"',
)
await queryRunner.query(
'ALTER TABLE "instance" DROP CONSTRAINT "fk_instance_collection_version"',
)
await queryRunner.query(
'ALTER TABLE "flow_version" DROP CONSTRAINT "fk_flow_version_flow"',
)
await queryRunner.query(
'ALTER TABLE "flow" DROP CONSTRAINT "fk_flow_collection_id"',
)
await queryRunner.query(
'ALTER TABLE "flow" DROP CONSTRAINT "fk_flow_project_id"',
)
await queryRunner.query(
'ALTER TABLE "file" DROP CONSTRAINT "fk_file_project_id"',
)
await queryRunner.query(
'ALTER TABLE "collection_version" DROP CONSTRAINT "fk_collection_version_collection_id"',
)
await queryRunner.query(
'ALTER TABLE "collection" DROP CONSTRAINT "fk_collection_project_id"',
)
await queryRunner.query(
'DROP INDEX "idx_app_connection_project_id_and_name"',
)
await queryRunner.query(
'DROP INDEX "idx_app_connection_project_id_and_app_name_and_name"',
)
await queryRunner.query('DROP TABLE "app_connection"')
await queryRunner.query('DROP TABLE "user"')
await queryRunner.query('DROP TABLE "store-entry"')
await queryRunner.query('DROP INDEX "idx_project_owner_id"')
await queryRunner.query('DROP TABLE "project"')
await queryRunner.query('DROP INDEX "idx_run_project_id"')
await queryRunner.query('DROP TABLE "flow_run"')
await queryRunner.query('DROP INDEX "idx_instance_collection_id"')
await queryRunner.query('DROP INDEX "idx_instance_project_id"')
await queryRunner.query('DROP TABLE "instance"')
await queryRunner.query('DROP INDEX "idx_flow_version_flow_id"')
await queryRunner.query('DROP TABLE "flow_version"')
await queryRunner.query('DROP INDEX "idx_flow_collection_id"')
await queryRunner.query('DROP TABLE "flow"')
await queryRunner.query('DROP TABLE "flag"')
await queryRunner.query('DROP TABLE "file"')
await queryRunner.query(
'DROP INDEX "idx_collection_version_collection_id"',
)
await queryRunner.query('DROP TABLE "collection_version"')
await queryRunner.query('DROP INDEX "idx_collection_project_id"')
await queryRunner.query('DROP TABLE "collection"')
}
}

View File

@@ -0,0 +1,40 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { encryptUtils } from '../../../helper/encryption'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class encryptCredentials1676505294811 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('encryptCredentials1676505294811 up: started')
const connections = await queryRunner.query('SELECT * FROM app_connection')
for (const currentConnection of connections) {
currentConnection.value = encryptUtils.encryptObject(currentConnection.value)
await queryRunner.query(
`UPDATE app_connection SET value = '${JSON.stringify(
currentConnection.value,
)}' WHERE id = ${currentConnection.id}`,
)
}
log.info('encryptCredentials1676505294811 up: finished')
}
public async down(queryRunner: QueryRunner): Promise<void> {
log.info('encryptCredentials1676505294811 down: started')
const connections = await queryRunner.query('SELECT * FROM app_connection')
for (const currentConnection of connections) {
try {
currentConnection.value = encryptUtils.decryptObject(currentConnection.value)
await queryRunner.query(
`UPDATE app_connection SET value = '${JSON.stringify(
currentConnection.value,
)}' WHERE id = ${currentConnection.id}`,
)
}
catch (e) {
log.error(e)
}
}
log.info('encryptCredentials1676505294811 down: finished')
}
}

View File

@@ -0,0 +1,82 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class removeStoreAction1676649852890 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('Running migration removeStoreAction1676649852890')
const flowVersions = await queryRunner.query('SELECT * FROM flow_version')
let count = 0
for (let i = 0; i < flowVersions.length; ++i) {
const currentFlowVersion = flowVersions[i]
let action = currentFlowVersion.trigger?.nextAction
let changed = false
while (action !== undefined && action !== null) {
if (action.type === 'STORAGE') {
action.type = 'PIECE'
const operation = action.settings.operation === 'GET' ? 'get' : 'put'
const key = action.settings.key
const value = action.settings.value
count++
action.settings = {
pieceName: 'storage',
actionName: operation,
input: {
key,
value,
},
inputUiInfo: {},
}
changed = true
}
action = action.nextAction
}
if (changed) {
await queryRunner.query(
`UPDATE flow_version SET trigger = '${JSON.stringify(
currentFlowVersion.trigger,
)}' WHERE id = ${currentFlowVersion.id}`,
)
}
}
log.info(
'Finished running migration removeStoreAction1676649852890, changed ' +
count +
' actions',
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
const flowVersions = await queryRunner.query('SELECT * FROM flow_version')
for (let i = 0; i < flowVersions.length; ++i) {
const currentFlowVersion = flowVersions[i]
let changed = false
let action = currentFlowVersion.trigger?.nextAction
while (action !== undefined && action !== null) {
if (
action.type === 'PIECE' &&
action.settings.pieceName === 'storage'
) {
action.type = 'STORAGE'
action.settings = {
operation: action.setings.operation.toUpperCase(),
key: action.settings.key,
value: action.settings.value,
}
changed = true
}
action = action.nextAction
}
if (changed) {
await queryRunner.query(
`UPDATE flow_version SET trigger = '${JSON.stringify(
currentFlowVersion.trigger,
)}' WHERE id = ${currentFlowVersion.id}`,
)
}
}
}
}

View File

@@ -0,0 +1,60 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class billing1677286751592 implements MigrationInterface {
name = 'billing1677286751592'
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('Running migration billing1677286751592')
await queryRunner.query(
'DROP INDEX "idx_app_connection_project_id_and_app_name_and_name"',
)
await queryRunner.query(
'CREATE TABLE "project_plan" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "projectId" character varying(21) NOT NULL, "name" character varying NOT NULL, "stripeCustomerId" character varying NOT NULL, "stripeSubscriptionId" character varying NOT NULL, "tasks" integer NOT NULL, "subscriptionStartDatetime" TIMESTAMP WITH TIME ZONE NOT NULL, CONSTRAINT "REL_4f52e89612966d95843e4158bb" UNIQUE ("projectId"), CONSTRAINT "PK_759d33fce71c95de832df935841" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_plan_project_id" ON "project_plan" ("projectId") ',
)
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_plan_stripe_customer_id" ON "project_plan" ("stripeCustomerId") ',
)
await queryRunner.query(
'CREATE TABLE "project_usage" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "projectId" character varying(21) NOT NULL, "consumedTasks" integer NOT NULL, "nextResetDatetime" TIMESTAMP WITH TIME ZONE NOT NULL, CONSTRAINT "REL_c407fc9b2bfb44515af69d575a" UNIQUE ("projectId"), CONSTRAINT "PK_100c1959e9dc487c4cadbf9cb56" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_project_usage_project_id" ON "project_usage" ("projectId") ',
)
await queryRunner.query(
'ALTER TABLE "project_plan" ADD CONSTRAINT "fk_project_plan_project_id" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "project_usage" ADD CONSTRAINT "fk_project_usage_project_id" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
log.info('Finished migration billing1677286751592')
}
public async down(queryRunner: QueryRunner): Promise<void> {
log.info('rolling back migration billing1677286751592')
await queryRunner.query(
'ALTER TABLE "project_usage" DROP CONSTRAINT "fk_project_usage_project_id"',
)
await queryRunner.query(
'ALTER TABLE "project_plan" DROP CONSTRAINT "fk_project_plan_project_id"',
)
await queryRunner.query(
'DROP INDEX "idx_project_usage_project_id"',
)
await queryRunner.query('DROP TABLE "project_usage"')
await queryRunner.query(
'DROP INDEX "idx_plan_stripe_customer_id"',
)
await queryRunner.query('DROP INDEX "idx_plan_project_id"')
await queryRunner.query('DROP TABLE "project_plan"')
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_app_connection_project_id_and_app_name_and_name" ON "app_connection" ("name", "appName", "projectId") ',
)
log.info('Finished rolling back billing1677286751592')
}
}

View File

@@ -0,0 +1,66 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
const FLOW_VERSION_TABLE = 'flow_version'
const PIECE_TYPE = 'PIECE'
export class addVersionToPieceSteps1677521257188 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('addVersionToPieceSteps1677521257188, started')
const flowVersions = await queryRunner.query('SELECT * FROM flow_version')
for (const flowVersion of flowVersions) {
let step = flowVersion.trigger
let update = false
while (step) {
if (step.type === PIECE_TYPE) {
step.settings.pieceVersion = '0.0.0'
update = true
}
step = step.nextAction
}
if (update) {
await queryRunner.query(
`UPDATE ${FLOW_VERSION_TABLE} SET trigger = $1 WHERE id = $2`,
[flowVersion.trigger, flowVersion.id],
)
}
}
log.info('addVersionToPieceSteps1677521257188, finished')
}
public async down(queryRunner: QueryRunner): Promise<void> {
log.info('addVersionToPieceSteps1677521257188, started')
const flowVersions = await queryRunner.query('SELECT * FROM flow_version')
for (const flowVersion of flowVersions) {
let step = flowVersion.trigger
let update = false
while (step) {
if (step.type === PIECE_TYPE) {
delete step.settings.pieceVersion
update = true
}
step = step.nextAction
}
if (update) {
await queryRunner.query(
`UPDATE ${FLOW_VERSION_TABLE} SET trigger = $1 WHERE id = $2`,
[flowVersion.trigger, flowVersion.id],
)
}
}
log.info('addVersionToPieceSteps1677521257188, finished')
}
}

View File

@@ -0,0 +1,63 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
export class productEmbed1677894800372 implements MigrationInterface {
name = 'productEmbed1677894800372'
public async up(queryRunner: QueryRunner): Promise<void> {
const log = system.globalLogger()
const appCredentialExistsQuery: { exists: boolean }[] =
await queryRunner.query(
`SELECT exists (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'app_credential'
)`,
)
const appCredentialExists =
appCredentialExistsQuery &&
appCredentialExistsQuery.length > 0 &&
appCredentialExistsQuery[0].exists
if (appCredentialExists) {
log.info('initializeSchema1676238396411: skipped')
return
}
await queryRunner.query(
'CREATE TABLE "app_credential" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "appName" character varying NOT NULL, "projectId" character varying(21) NOT NULL, "settings" jsonb NOT NULL, CONSTRAINT "PK_62eb102bb75a05d2951796a3b46" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_app_credentials_projectId_appName" ON "app_credential" ("appName", "projectId") ',
)
await queryRunner.query(
'CREATE TABLE "connection_key" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "projectId" character varying(21) NOT NULL, "settings" jsonb NOT NULL, CONSTRAINT "PK_4dcf1d9ae4ba5eb261a6c775ad2" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE INDEX "idx_connection_key_project_id" ON "connection_key" ("projectId") ',
)
await queryRunner.query(
'ALTER TABLE "app_credential" ADD CONSTRAINT "FK_d82bfb4c7432a69dc2419083a0e" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE NO ACTION ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "connection_key" ADD CONSTRAINT "FK_03177dc6779e6e147866d43c050" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE NO ACTION ON UPDATE NO ACTION',
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'DROP INDEX "idx_connection_key_project_id"',
)
await queryRunner.query('DROP TABLE "connection_key"')
await queryRunner.query(
'DROP INDEX "idx_app_credentials_projectId_appName"',
)
await queryRunner.query('DROP TABLE "app_credential"')
await queryRunner.query(
'ALTER TABLE "connection_key" DROP CONSTRAINT "FK_03177dc6779e6e147866d43c050"',
)
await queryRunner.query(
'ALTER TABLE "app_credential" DROP CONSTRAINT "FK_d82bfb4c7432a69dc2419083a0e"',
)
}
}

View File

@@ -0,0 +1,34 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class addEventRouting1678382946390 implements MigrationInterface {
name = 'addEventRouting1678382946390'
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('Running migration addEventRouting1678382946390')
await queryRunner.query(
'CREATE TABLE "app_event_routing" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "appName" character varying NOT NULL, "projectId" character varying(21) NOT NULL, "flowId" character varying(21) NOT NULL, "identifierValue" character varying NOT NULL, "event" character varying NOT NULL, CONSTRAINT "PK_2107df2b2faf9d50435f9d5acd7" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE INDEX "idx_app_event_routing_flow_id" ON "app_event_routing" ("flowId") ',
)
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_app_event_project_id_appName_identifier_value_event" ON "app_event_routing" ("appName", "projectId", "identifierValue", "event") ',
)
log.info('Finished migration addEventRouting1678382946390')
}
public async down(queryRunner: QueryRunner): Promise<void> {
log.info('Rolling Back migration addEventRouting1678382946390')
await queryRunner.query(
'DROP INDEX "idx_app_event_project_id_appName_identifier_value_event"',
)
await queryRunner.query(
'DROP INDEX "idx_app_event_routing_flow_id"',
)
await queryRunner.query('DROP TABLE "app_event_routing"')
log.info('Finished Rolling Back migration addEventRouting1678382946390')
}
}

View File

@@ -0,0 +1,86 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class removeCollectionVersion1678492809093
implements MigrationInterface {
name = 'removeCollectionVersion1678492809093'
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('Running migration removeCollectionVersion1678492809093')
await queryRunner.query(
'ALTER TABLE "instance" DROP CONSTRAINT "fk_instance_collection_version"',
)
await queryRunner.query(
'ALTER TABLE "flow_run" DROP CONSTRAINT "fk_flow_run_collection_version_id"',
)
await queryRunner.query(
'ALTER TABLE "instance" DROP CONSTRAINT "REL_183c020130aa172f58c6a0c647"',
)
await queryRunner.query(
'ALTER TABLE "instance" DROP COLUMN "collectionVersionId"',
)
await queryRunner.query(
'ALTER TABLE "flow_run" DROP COLUMN "collectionVersionId"',
)
await queryRunner.query(
'ALTER TABLE "collection" ADD "displayName" character varying',
)
const collections = await queryRunner.query(
'SELECT * FROM collection',
)
for (let i = 0; i < collections.length; ++i) {
let currentCollection = collections[i]
const latestCollectionVersionQuery = `
SELECT * FROM collection_version
WHERE "collectionId" = '${currentCollection.id}'
ORDER BY created DESC
LIMIT 1
`
const [latestCollectionVersion] = await queryRunner.query(
latestCollectionVersionQuery,
)
let displayName = 'Untitled'
if (latestCollectionVersion) {
displayName = latestCollectionVersion.displayName
}
currentCollection = {
...currentCollection,
displayName,
}
const updateCollectionQuery = `
UPDATE collection
SET displayName = '${displayName}'
WHERE id = '${currentCollection.id}'
`
await queryRunner.query(updateCollectionQuery)
}
log.info('Finished migration removeCollectionVersion1678492809093')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'ALTER TABLE "collection" DROP COLUMN "displayName"',
)
await queryRunner.query(
'ALTER TABLE "flow_run" ADD "collectionVersionId" character varying(21) NOT NULL',
)
await queryRunner.query(
'ALTER TABLE "instance" ADD "collectionVersionId" character varying(21) NOT NULL',
)
await queryRunner.query(
'ALTER TABLE "instance" ADD CONSTRAINT "REL_183c020130aa172f58c6a0c647" UNIQUE ("collectionVersionId")',
)
await queryRunner.query(
'ALTER TABLE "flow_run" ADD CONSTRAINT "fk_flow_run_collection_version_id" FOREIGN KEY ("collectionVersionId") REFERENCES "collection_version"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "instance" ADD CONSTRAINT "fk_instance_collection_version" FOREIGN KEY ("collectionVersionId") REFERENCES "collection_version"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
}
}

View File

@@ -0,0 +1,36 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class addtriggerevents1678621361185 implements MigrationInterface {
name = 'addtriggerevents1678621361185'
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('addtriggerevents1678621361185 up: started')
await queryRunner.query(
'CREATE TABLE "trigger_event" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "flowId" character varying(21) NOT NULL, "projectId" character varying(21) NOT NULL, "sourceName" character varying NOT NULL, "payload" jsonb NOT NULL, CONSTRAINT "PK_79bbc8c2af95776e801c7eaab11" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE INDEX "idx_trigger_event_flow_id" ON "trigger_event" ("flowId") ',
)
await queryRunner.query(
'ALTER TABLE "trigger_event" ADD CONSTRAINT "fk_trigger_event_project_id" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "trigger_event" ADD CONSTRAINT "fk_trigger_event_flow_id" FOREIGN KEY ("flowId") REFERENCES "flow"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
log.info('addtriggerevents1678621361185 up: finished')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'ALTER TABLE "trigger_event" DROP CONSTRAINT "fk_trigger_event_flow_id"',
)
await queryRunner.query(
'ALTER TABLE "trigger_event" DROP CONSTRAINT "fk_trigger_event_project_id"',
)
await queryRunner.query('DROP INDEX "idx_trigger_event_flow_id"')
await queryRunner.query('DROP TABLE "trigger_event"')
}
}

View File

@@ -0,0 +1,151 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
const FLOW_VERSION_TABLE = 'flow_version'
const APP_CONNECTION_TABLE = 'app_connection'
const PIECE_TYPE = 'PIECE'
const PIECE_TRIGGER_TYPE = 'PIECE_TRIGGER'
const BRANCH_TYPE = 'BRANCH'
type Step = {
type: string
settings: {
pieceName: string
pieceVersion: string
}
onFailureAction?: Step
onSuccessAction?: Step
nextAction?: Step
}
export class bumpFixPieceVersions1678928503715 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('bumpFixPieceVersions1678928503715, started')
let count = 0
const flowVersions = await queryRunner.query('SELECT * FROM flow_version')
for (const flowVersion of flowVersions) {
const step = flowVersion.trigger
const update = updateStep(step)
if (update) {
count++
await queryRunner.query(
`UPDATE ${FLOW_VERSION_TABLE} SET "trigger" = $1 WHERE id = $2`,
[flowVersion.trigger, flowVersion.id],
)
}
}
let connectionCount = 0
const appConnections = await queryRunner.query(
`SELECT * FROM ${APP_CONNECTION_TABLE}`,
)
for (const appConnection of appConnections) {
let update = false
if (appConnection.appName === 'google_sheets') {
appConnection.appName = 'google-sheets'
update = true
}
if (appConnection.appName === 'google_calendar') {
appConnection.appName = 'google-calendar'
update = true
}
if (appConnection.appName === 'google_contacts') {
appConnection.appName = 'google-contacts'
update = true
}
if (appConnection.appName === 'google_drive') {
appConnection.appName = 'google-drive'
update = true
}
if (appConnection.appName === 'google_tasks') {
appConnection.appName = 'google-tasks'
update = true
}
if (appConnection.appName === 'cal.com') {
appConnection.appName = 'cal-com'
update = true
}
if (appConnection.appName === 'storage') {
appConnection.appName = 'store'
update = true
}
if (appConnection.appName === 'telegram_bot') {
appConnection.appName = 'telegram-bot'
update = true
}
if (update) {
connectionCount++
await queryRunner.query(
`UPDATE ${APP_CONNECTION_TABLE} SET "appName" = $1 WHERE id = $2`,
[appConnection.appName, appConnection.id],
)
}
}
log.info(
'bumpFixPieceVersions1678928503715, finished bumping ' +
count +
' flows ' +
' and connections count ' +
connectionCount,
)
}
public async down(): Promise<void> {
// Ignored
}
}
function updateStep(step: Step | undefined): boolean {
let update = false
while (step) {
if (step.type === PIECE_TYPE || step.type === PIECE_TRIGGER_TYPE) {
if (step.settings.pieceName === 'google_sheets') {
step.settings.pieceName = 'google-sheets'
}
if (step.settings.pieceName === 'google_calendar') {
step.settings.pieceName = 'google-calendar'
}
if (step.settings.pieceName === 'google_contacts') {
step.settings.pieceName = 'google-contacts'
}
if (step.settings.pieceName === 'google_drive') {
step.settings.pieceName = 'google-drive'
}
if (step.settings.pieceName === 'google_tasks') {
step.settings.pieceName = 'google-tasks'
}
if (step.settings.pieceName === 'cal.com') {
step.settings.pieceName = 'cal-com'
}
if (step.settings.pieceName === 'storage') {
step.settings.pieceName = 'store'
}
if (step.settings.pieceName === 'telegram_bot') {
step.settings.pieceName = 'telegram-bot'
}
if (step.settings.pieceName === 'youtube') {
// Youtube latest version is 0.1.4
step.settings.pieceVersion = '0.1.4'
}
else {
step.settings.pieceVersion = '0.1.3'
}
update = true
}
if (step.type === BRANCH_TYPE) {
if (step.onSuccessAction) {
update = update || updateStep(step.onSuccessAction)
}
if (step.onFailureAction) {
update = update || updateStep(step.onFailureAction)
}
}
step = step.nextAction
}
return update
}

View File

@@ -0,0 +1,61 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
const FLOW_VERSION_TABLE = 'flow_version'
export class migrateSchedule1679014156667 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('migrateSchedule1679014156667, started')
let count = 0
const flowVersions = await queryRunner.query('SELECT * FROM flow_version')
for (const flowVersion of flowVersions) {
const step = flowVersion.trigger
if (step.type === 'SCHEDULE') {
step.type = 'PIECE_TRIGGER'
step.settings = {
input: {
cronExpression: step.settings.cronExpression,
},
triggerName: 'cron_expression',
pieceName: 'schedule',
pieceVersion: '0.0.2',
}
count++
await queryRunner.query(
`UPDATE ${FLOW_VERSION_TABLE} SET trigger = $1 WHERE id = $2`,
[flowVersion.trigger, flowVersion.id],
)
}
}
log.info('migrateSchedule1679014156667, finished flows ' + count)
}
public async down(queryRunner: QueryRunner): Promise<void> {
log.info('rolling back migrateSchedule1679014156667, started')
let count = 0
const flowVersions = await queryRunner.query('SELECT * FROM flow_version')
for (const flowVersion of flowVersions) {
const step = flowVersion.trigger
if (step.type === 'PIECE_TRIGGER') {
if (step.settings.pieceName === 'schedule') {
step.type = 'SCHEDULE'
step.settings = {
cronExpression: step.settings.input.cronExpression,
}
count++
await queryRunner.query(
`UPDATE ${FLOW_VERSION_TABLE} SET trigger = $1 WHERE id = $2`,
[flowVersion.trigger, flowVersion.id],
)
}
}
}
log.info(
'rolling back migrateSchedule1679014156667, finished flows ' + count,
)
}
}

View File

@@ -0,0 +1,25 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class addNotificationsStatus1680563747425 implements MigrationInterface {
name = 'addNotificationsStatus1680563747425'
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('Running migration: addNotificationsStatus1680563747425')
await queryRunner.query(
'ALTER TABLE "project" ADD "notifications" character varying',
)
await queryRunner.query(
'UPDATE "project" SET "notifications" = \'ALWAYS\'',
)
log.info('Completed migration: addNotificationsStatus1680563747425')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'ALTER TABLE "project" DROP COLUMN "notifications"',
)
}
}

View File

@@ -0,0 +1,22 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class CreateWebhookSimulationSchema1680698259291
implements MigrationInterface {
name = 'CreateWebhookSimulationSchema1680698259291'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'CREATE TABLE "webhook_simulation" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "flowId" character varying(21) NOT NULL, "projectId" character varying(21) NOT NULL, CONSTRAINT "PK_6854a1ac9a5b24810b29aaf0f43" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_webhook_simulation_flow_id" ON "webhook_simulation" ("flowId") ',
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'DROP INDEX "idx_webhook_simulation_flow_id"',
)
await queryRunner.query('DROP TABLE "webhook_simulation"')
}
}

View File

@@ -0,0 +1,193 @@
import { apId } from '@activepieces/shared'
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class RemoveCollections1680986182074 implements MigrationInterface {
name = 'RemoveCollections1680986182074'
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('Running RemoveCollections1680986182074 migration')
// Data Queries
await queryRunner.query(`
UPDATE "store-entry"
SET "collectionId" = "collection"."projectId"
FROM "collection"
WHERE "store-entry"."collectionId" = "collection"."id";
`)
await queryRunner.query(
'CREATE TABLE "folder" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "displayName" character varying NOT NULL, "projectId" character varying(21) NOT NULL, CONSTRAINT "PK_6278a41a706740c94c02e288df8" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE INDEX "idx_folder_project_id" ON "folder" ("projectId") ',
)
await queryRunner.query(
'ALTER TABLE "flow" ADD "folderId" character varying(21)',
)
let countFolders = 0
const collections = await queryRunner.query('SELECT * FROM "collection"')
for (const collection of collections) {
const randomId = apId()
await queryRunner.query(
'INSERT INTO "folder" ("id", "created", "updated", "displayName", "projectId") VALUES ($1, NOW(), NOW(), $2, $3)',
[randomId, collection.displayName, collection.projectId],
)
await queryRunner.query(
`UPDATE "flow" SET "folderId" = '${randomId}' WHERE "collectionId" = '${collection.id}'`,
)
countFolders++
}
log.info(
`RemoveCollections1680986182074 Migrated ${countFolders} folders`,
)
// Schema Queries
await queryRunner.query(
'ALTER TABLE "flow" DROP CONSTRAINT "fk_flow_collection_id"',
)
await queryRunner.query(
'ALTER TABLE "flow_run" DROP CONSTRAINT "fk_flow_run_collection_id"',
)
await queryRunner.query('DROP INDEX "idx_flow_collection_id"')
await queryRunner.query(
'ALTER TABLE "store-entry" RENAME COLUMN "collectionId" TO "projectId"',
)
await queryRunner.query(
'CREATE TABLE "flow_instance" ("id" character varying(21) NOT NULL, "created" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "projectId" character varying(21) NOT NULL, "flowId" character varying(21) NOT NULL, "flowVersionId" character varying(21) NOT NULL, "status" character varying NOT NULL, CONSTRAINT "REL_cb897f5e48cc3cba1418966326" UNIQUE ("flowId"), CONSTRAINT "REL_ec72f514c21734fb7a08797d75" UNIQUE ("flowVersionId"), CONSTRAINT "PK_5b0308060b7de5abec61ac5d2db" PRIMARY KEY ("id"))',
)
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_flow_instance_project_id_flow_id" ON "flow_instance" ("projectId", "flowId") ',
)
await queryRunner.query('ALTER TABLE "flow" DROP COLUMN "collectionId"')
await queryRunner.query(
'ALTER TABLE "flow_run" DROP COLUMN "collectionId"',
)
await queryRunner.query(
'ALTER TABLE "flow_run" DROP COLUMN "collectionDisplayName"',
)
await queryRunner.query(
'ALTER TABLE "flow" DROP CONSTRAINT "fk_flow_project_id"',
)
await queryRunner.query(
'ALTER TABLE "flow" ALTER COLUMN "projectId" SET NOT NULL',
)
await queryRunner.query(
'CREATE INDEX "idx_flow_project_id" ON "flow" ("projectId") ',
)
await queryRunner.query(
'CREATE INDEX "idx_flow_folder_id" ON "flow" ("folderId") ',
)
await queryRunner.query(
'ALTER TABLE "flow_instance" ADD CONSTRAINT "fk_flow_instance_flow" FOREIGN KEY ("flowId") REFERENCES "flow"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "flow_instance" ADD CONSTRAINT "fk_flow_instance_flow_version" FOREIGN KEY ("flowVersionId") REFERENCES "flow_version"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "flow" ADD CONSTRAINT "fk_flow_folder_id" FOREIGN KEY ("folderId") REFERENCES "folder"("id") ON DELETE SET NULL ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "flow" ADD CONSTRAINT "fk_flow_project_id" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "folder" ADD CONSTRAINT "fk_folder_project" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
// Migrate Flow Instances
const instances = await queryRunner.query('SELECT * FROM "instance"')
let count = 0
let failed = 0
for (const instance of instances) {
const flowIdToVersionId = instance.flowIdToVersionId
for (const flowId of Object.keys(flowIdToVersionId)) {
const flowVersionId = flowIdToVersionId[flowId]
const randomId = apId()
const flowExists = await queryRunner.query(
`SELECT EXISTS(SELECT 1 FROM "flow" WHERE "id" = '${flowId}')`,
)
const flowVersionExists = await queryRunner.query(
`SELECT EXISTS(SELECT 1 FROM "flow_version" WHERE "id" = '${flowVersionId}')`,
)
if (!flowExists[0].exists || !flowVersionExists[0].exists) {
failed++
log.info(
`Skipping flow instance ${instance.id} because flow ${flowId} or flow version ${flowVersionId} does not exist`,
)
}
else {
await queryRunner.query(
`INSERT INTO "flow_instance" ("id", "created", "updated", "projectId", "flowId", "flowVersionId", "status") VALUES ('${randomId}', 'NOW()', 'NOW()', '${instance.projectId}', '${flowId}', '${flowVersionId}', '${instance.status}')`,
)
count++
}
}
}
log.info(
`Finished Running RemoveCollections1680986182074 migration with ${count} flow instances migrated and ${failed} failed`,
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
// Schema Queries
await queryRunner.query(
'ALTER TABLE "folder" DROP CONSTRAINT "fk_folder_project"',
)
await queryRunner.query(
'ALTER TABLE "flow" DROP CONSTRAINT "fk_flow_project_id"',
)
await queryRunner.query(
'ALTER TABLE "flow" DROP CONSTRAINT "fk_flow_folder_id"',
)
await queryRunner.query(
'ALTER TABLE "flow_instance" DROP CONSTRAINT "fk_flow_instance_flow_version"',
)
await queryRunner.query(
'ALTER TABLE "flow_instance" DROP CONSTRAINT "fk_flow_instance_flow"',
)
await queryRunner.query('DROP INDEX "idx_flow_folder_id"')
await queryRunner.query('DROP INDEX "idx_flow_project_id"')
await queryRunner.query(
'ALTER TABLE "flow" ALTER COLUMN "projectId" DROP NOT NULL',
)
await queryRunner.query(
'ALTER TABLE "flow" ADD CONSTRAINT "fk_flow_project_id" FOREIGN KEY ("projectId") REFERENCES "project"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query('ALTER TABLE "flow" DROP COLUMN "folderId"')
await queryRunner.query(
'ALTER TABLE "flow_run" ADD "collectionDisplayName" character varying NOT NULL',
)
await queryRunner.query(
'ALTER TABLE "flow_run" ADD "collectionId" character varying(21) NOT NULL',
)
await queryRunner.query(
'ALTER TABLE "flow" ADD "collectionId" character varying(21) NOT NULL',
)
await queryRunner.query('DROP INDEX "idx_folder_project_id"')
await queryRunner.query('DROP TABLE "folder"')
await queryRunner.query(
'DROP INDEX "idx_flow_instance_project_id_flow_id"',
)
await queryRunner.query('DROP TABLE "flow_instance"')
await queryRunner.query(
'ALTER TABLE "store-entry" RENAME COLUMN "projectId" TO "collectionId"',
)
await queryRunner.query(
'CREATE INDEX "idx_flow_collection_id" ON "flow" ("collectionId") ',
)
await queryRunner.query(
'ALTER TABLE "flow_run" ADD CONSTRAINT "fk_flow_run_collection_id" FOREIGN KEY ("collectionId") REFERENCES "collection"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
await queryRunner.query(
'ALTER TABLE "flow" ADD CONSTRAINT "fk_flow_collection_id" FOREIGN KEY ("collectionId") REFERENCES "collection"("id") ON DELETE CASCADE ON UPDATE NO ACTION',
)
// Data queries
await queryRunner.query(`
UPDATE "store-entry"
SET "collectionId" = "collection"."id"
FROM "collection"
WHERE "store-entry"."collectionId" = "collection"."projectId";`)
}
}

View File

@@ -0,0 +1,34 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
export class StoreAllPeriods1681019096716 implements MigrationInterface {
name = 'StoreAllPeriods1681019096716'
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('Running StoreAllPeriods1681019096716')
await queryRunner.query(
'ALTER TABLE "project_usage" DROP CONSTRAINT "REL_c407fc9b2bfb44515af69d575a"',
)
await queryRunner.query(
'DROP INDEX "idx_project_usage_project_id"',
)
await queryRunner.query(
'CREATE INDEX "idx_project_usage_project_id" ON "project_usage" ("projectId") ',
)
log.info('Finished Running StoreAllPeriods1681019096716')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'ALTER TABLE "project_usage" ADD CONSTRAINT "REL_c407fc9b2bfb44515af69d575a" UNIQUE ("projectId")',
)
await queryRunner.query(
'DROP INDEX "idx_project_usage_project_id"',
)
await queryRunner.query(
'CREATE UNIQUE INDEX "idx_project_usage_project_id" ON "project_usage" ("projectId") ',
)
}
}

View File

@@ -0,0 +1,71 @@
import { FlowVersion } from '@activepieces/shared'
import { MigrationInterface, QueryRunner } from 'typeorm'
import { system } from '../../../helper/system/system'
const log = system.globalLogger()
type Step = {
type: string
settings: {
inputUiInfo?: Record<string, unknown>
}
onFailureAction?: Step
onSuccessAction?: Step
nextAction?: Step
}
const FLOW_VERSION_TABLE = 'flow_version'
// Legacy flow versions have no inputUiInfo, so we should add it
export class AddInputUiInfo1681107443963 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
log.info('AddInputUiInfo1681107443963, started')
let count = 0
const flowVersions = await queryRunner.query(
`SELECT * FROM ${FLOW_VERSION_TABLE}`,
)
for (const flowVersion of flowVersions) {
const steps = getAllSteps(flowVersion as FlowVersion)
let changed = false
for (const step of steps) {
if (step.type === 'PIECE_TRIGGER' || step.type === 'PIECE') {
if (!step.settings.inputUiInfo) {
step.settings.inputUiInfo = {}
changed = true
}
}
}
if (changed) {
count++
await queryRunner.query(
`UPDATE ${FLOW_VERSION_TABLE} SET flow_version = $1 WHERE id = $2`,
[flowVersion, flowVersion.id],
)
}
}
log.info('AddInputUiInfo1681107443963, finished flows ' + count)
}
public async down(): Promise<void> {
log.info('no rolling back AddInputUiInfo1681107443963')
}
}
function traverseFlowInternal(step: Step | undefined): Step[] {
const steps: Step[] = []
while (step !== undefined && step !== null) {
steps.push(step)
if (step.type === 'BRANCH') {
steps.push(...traverseFlowInternal(step.onFailureAction))
steps.push(...traverseFlowInternal(step.onSuccessAction))
}
step = step.nextAction
}
return steps
}
function getAllSteps(flowVersion: FlowVersion): Step[] {
return traverseFlowInternal(flowVersion.trigger as Step)
}

Some files were not shown because too many files have changed in this diff Show More