Add Activepieces integration for workflow automation
- Add Activepieces fork with SmoothSchedule custom piece - Create integrations app with Activepieces service layer - Add embed token endpoint for iframe integration - Create Automations page with embedded workflow builder - Add sidebar visibility fix for embed mode - Add list inactive customers endpoint to Public API - Include SmoothSchedule triggers: event created/updated/cancelled - Include SmoothSchedule actions: create/update/cancel events, list resources/services/customers 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
27
activepieces-fork/packages/server/shared/src/index.ts
Normal file
27
activepieces-fork/packages/server/shared/src/index.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
export * from './lib/memory-lock'
|
||||
export * from './lib/network-utils'
|
||||
export * from './lib/pieces/file-pieces-utils'
|
||||
export * from './lib/job'
|
||||
export * from './lib/exception-handler'
|
||||
export * from './lib/crypto'
|
||||
export * from './lib/semaphore'
|
||||
export * from './lib/file-compressor'
|
||||
export * from './lib/file-system-utils'
|
||||
export * from './lib/promise-handler'
|
||||
export * from './lib/logger'
|
||||
export * from './lib/database-type'
|
||||
export * from './lib/webhook-secrets-util'
|
||||
export * from './lib/exec'
|
||||
export * from './lib/system-props'
|
||||
export * from './lib/env-migrations'
|
||||
export * from './lib/ap-axios'
|
||||
|
||||
export * from './lib/redis/distributed-lock-factory'
|
||||
export * from './lib/redis/distributed-store-factory'
|
||||
export * from './lib/redis/keys'
|
||||
export * from './lib/redis/types'
|
||||
export * from './lib/redis/index'
|
||||
export * from './lib/dayjs-helper'
|
||||
export * from './lib/trigger-run-stats'
|
||||
export * from './lib/pubsub'
|
||||
export * from './lib/system-usage'
|
||||
22
activepieces-fork/packages/server/shared/src/lib/ap-axios.ts
Normal file
22
activepieces-fork/packages/server/shared/src/lib/ap-axios.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { isNil } from '@activepieces/shared'
|
||||
import axios, { AxiosError } from 'axios'
|
||||
import axiosRetry from 'axios-retry'
|
||||
|
||||
|
||||
export const apAxios = axios.create({
|
||||
baseURL: 'https://api.activepieces.com',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
axiosRetry(apAxios, {
|
||||
retryDelay: (_retryCount: number) => {
|
||||
return 2000
|
||||
},
|
||||
retries: 3,
|
||||
retryCondition: (error: AxiosError) => {
|
||||
return !isNil(error.response?.status) && error.response.status >= 500 && error.response.status < 600
|
||||
},
|
||||
})
|
||||
|
||||
23
activepieces-fork/packages/server/shared/src/lib/crypto.ts
Normal file
23
activepieces-fork/packages/server/shared/src/lib/crypto.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { createHash, randomBytes } from 'node:crypto'
|
||||
import { promisify } from 'node:util'
|
||||
|
||||
const randomBytesPromisified = promisify(randomBytes)
|
||||
|
||||
const generateRandomPassword = async (): Promise<string> => {
|
||||
const passwordBytes = await randomBytesPromisified(32)
|
||||
return passwordBytes.toString('hex')
|
||||
}
|
||||
|
||||
function hashSHA256(input: string): string {
|
||||
const hash = createHash('sha256')
|
||||
hash.update(input)
|
||||
return hash.digest('hex')
|
||||
}
|
||||
|
||||
export const cryptoUtils = {
|
||||
generateRandomPassword,
|
||||
hashSHA256,
|
||||
async hashObject(obj: Record<string, unknown>): Promise<string> {
|
||||
return hashSHA256(JSON.stringify(obj))
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
export enum DatabaseType {
|
||||
POSTGRES = 'POSTGRES',
|
||||
PGLITE = 'PGLITE',
|
||||
}
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
import dayjs from 'dayjs'
|
||||
import duration, { DurationUnitType } from 'dayjs/plugin/duration'
|
||||
import timezone from 'dayjs/plugin/timezone'
|
||||
import utc from 'dayjs/plugin/utc'
|
||||
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(timezone)
|
||||
dayjs.extend(duration)
|
||||
|
||||
export function apDayjs(
|
||||
time: undefined | number | string = undefined,
|
||||
): dayjs.Dayjs {
|
||||
if (time === undefined) {
|
||||
return dayjs()
|
||||
}
|
||||
return dayjs(time)
|
||||
}
|
||||
|
||||
export function apDayjsDuration(
|
||||
value: number,
|
||||
unit: DurationUnitType,
|
||||
) {
|
||||
return dayjs.duration(value, unit)
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
import { ExecutionMode } from '@activepieces/shared'
|
||||
import { DatabaseType } from './database-type'
|
||||
import { RedisType } from './redis/types'
|
||||
import { AppSystemProp } from './system-props'
|
||||
|
||||
const envPrefix = (prop: string): string => `AP_${prop}`
|
||||
|
||||
export const environmentMigrations = {
|
||||
migrate(): Record<string, string | undefined> {
|
||||
|
||||
return {
|
||||
...process.env,
|
||||
[envPrefix(AppSystemProp.EXECUTION_MODE)]: migrateExecutionMode(getRawValue(AppSystemProp.EXECUTION_MODE)),
|
||||
[envPrefix(AppSystemProp.REDIS_TYPE)]: migrateRedisType(getRawValue(AppSystemProp.REDIS_TYPE)),
|
||||
[envPrefix(AppSystemProp.DB_TYPE)]: migrateDbType(getRawValue(AppSystemProp.DB_TYPE)),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
function migrateRedisType(currentRedisType: string | undefined): string | undefined {
|
||||
const queueMode = process.env['AP_QUEUE_MODE']
|
||||
if (queueMode === 'MEMORY') {
|
||||
return RedisType.MEMORY
|
||||
}
|
||||
return currentRedisType
|
||||
}
|
||||
|
||||
function migrateExecutionMode(currentExecutionMode: string | undefined): string | undefined {
|
||||
if (currentExecutionMode === 'SANDBOXED') {
|
||||
return ExecutionMode.SANDBOX_PROCESS
|
||||
}
|
||||
return currentExecutionMode
|
||||
}
|
||||
|
||||
function migrateDbType(currentDbType: string | undefined): string | undefined {
|
||||
if (currentDbType === 'SQLITE3') {
|
||||
return DatabaseType.PGLITE
|
||||
}
|
||||
return currentDbType
|
||||
}
|
||||
|
||||
function getRawValue(prop: string): string | undefined {
|
||||
return process.env[envPrefix(prop)]
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
import * as Sentry from '@sentry/node'
|
||||
import { FastifyBaseLogger } from 'fastify'
|
||||
|
||||
let sentryInitialized = false
|
||||
|
||||
export const exceptionHandler = {
|
||||
initializeSentry: (sentryDsn: string | undefined) => {
|
||||
if (!sentryDsn) {
|
||||
return
|
||||
}
|
||||
sentryInitialized = true
|
||||
Sentry.init({
|
||||
dsn: sentryDsn,
|
||||
beforeSend: (event) => {
|
||||
if (event?.exception?.values?.[0].type === 'AxiosError') {
|
||||
return null
|
||||
}
|
||||
const value = event?.exception?.values?.[0]?.value
|
||||
if (value && ['EXECUTION_TIMEOUT', 'ENTITY_NOT_FOUND'].includes(value)) {
|
||||
return null
|
||||
}
|
||||
return event
|
||||
},
|
||||
})
|
||||
},
|
||||
handle: (e: unknown, log: FastifyBaseLogger): void => {
|
||||
log.error(e)
|
||||
if (sentryInitialized) {
|
||||
Sentry.captureException(e)
|
||||
}
|
||||
},
|
||||
}
|
||||
94
activepieces-fork/packages/server/shared/src/lib/exec.ts
Normal file
94
activepieces-fork/packages/server/shared/src/lib/exec.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { exec as execCallback, spawn } from 'node:child_process'
|
||||
import type { SpawnOptions } from 'node:child_process'
|
||||
import { promisify } from 'node:util'
|
||||
import treeKill from 'tree-kill'
|
||||
|
||||
export const execPromise = promisify(execCallback)
|
||||
|
||||
export async function spawnWithKill({
|
||||
cmd,
|
||||
options = {},
|
||||
printOutput,
|
||||
timeoutMs,
|
||||
}: SpawnWithKillParams): Promise<CommandOutput> {
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const [command, ...args] = cmd.split(' ')
|
||||
const cp = spawn(command, args, {
|
||||
detached: true,
|
||||
shell: true,
|
||||
...options,
|
||||
})
|
||||
|
||||
let stdout = ''
|
||||
let stderr = ''
|
||||
|
||||
if (cp.stdout) {
|
||||
cp.stdout.on('data', data => {
|
||||
if (printOutput) process.stdout.write(data)
|
||||
stdout += data
|
||||
})
|
||||
}
|
||||
|
||||
if (cp.stderr) {
|
||||
cp.stderr.on('data', data => {
|
||||
if (printOutput) process.stderr.write(data)
|
||||
stderr += data
|
||||
})
|
||||
}
|
||||
|
||||
let finished = false
|
||||
let timeoutHandler: NodeJS.Timeout | undefined
|
||||
|
||||
const finish = (err?: Error | null) => {
|
||||
if (finished) return
|
||||
finished = true
|
||||
|
||||
if (timeoutHandler) clearTimeout(timeoutHandler)
|
||||
|
||||
if (!cp.pid) {
|
||||
return err ? reject(err) : resolve({ stdout, stderr })
|
||||
}
|
||||
|
||||
treeKill(cp.pid, 'SIGKILL', () => {
|
||||
if (err) reject(err)
|
||||
else resolve({ stdout, stderr })
|
||||
})
|
||||
}
|
||||
|
||||
if (timeoutMs && timeoutMs > 0) {
|
||||
timeoutHandler = setTimeout(() => {
|
||||
finish(
|
||||
new Error(
|
||||
`Timeout after ${timeoutMs}ms\nstdout: ${stdout}\nstderr: ${stderr}`,
|
||||
),
|
||||
)
|
||||
}, timeoutMs)
|
||||
}
|
||||
|
||||
cp.on('error', err => finish(err))
|
||||
cp.on('close', (code, signal) => {
|
||||
if (code !== 0) {
|
||||
return finish(
|
||||
new Error(
|
||||
`Exit ${code}${signal ? ` (signal ${signal})` : ''}\nstdout: ${stdout}\nstderr: ${stderr}`,
|
||||
),
|
||||
)
|
||||
}
|
||||
finish()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
type SpawnWithKillParams = {
|
||||
cmd: string
|
||||
options?: SpawnOptions
|
||||
printOutput?: boolean
|
||||
timeoutMs?: number
|
||||
}
|
||||
|
||||
export type CommandOutput = {
|
||||
stdout: string
|
||||
stderr: string
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
import { promisify } from 'node:util'
|
||||
import { gzip as gzipCallback, unzip as unzipCallback } from 'node:zlib'
|
||||
import { FileCompression } from '@activepieces/shared'
|
||||
|
||||
const gzip = promisify(gzipCallback)
|
||||
const unzip = promisify(unzipCallback)
|
||||
|
||||
export const fileCompressor = {
|
||||
async compress({ data, compression }: Params): Promise<Buffer> {
|
||||
switch (compression) {
|
||||
case FileCompression.NONE:
|
||||
return data
|
||||
case FileCompression.GZIP:
|
||||
return gzip(data)
|
||||
}
|
||||
},
|
||||
|
||||
async decompress({ data, compression }: Params): Promise<Buffer> {
|
||||
switch (compression) {
|
||||
case FileCompression.NONE:
|
||||
return data
|
||||
case FileCompression.GZIP:
|
||||
return unzip(data)
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
type Params = {
|
||||
data: Buffer
|
||||
compression: FileCompression
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
import { access, mkdir, unlink } from 'node:fs/promises'
|
||||
|
||||
|
||||
export const INFINITE_LOCK_TIMEOUT = 60 * 60 * 1000
|
||||
|
||||
export const fileSystemUtils = {
|
||||
fileExists: async (path: string): Promise<boolean> => {
|
||||
try {
|
||||
await access(path)
|
||||
return true
|
||||
}
|
||||
catch (e) {
|
||||
const castedError = e as Error
|
||||
if ('code' in castedError && castedError.code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
|
||||
throw e
|
||||
}
|
||||
},
|
||||
|
||||
threadSafeMkdir: async (path: string): Promise<void> => {
|
||||
try {
|
||||
await mkdir(path, { recursive: true })
|
||||
}
|
||||
catch (e) {
|
||||
const castedError = e as Error
|
||||
if ('code' in castedError && castedError.code === 'EEXIST') {
|
||||
return
|
||||
}
|
||||
throw e
|
||||
}
|
||||
},
|
||||
|
||||
deleteFile: async (path: string): Promise<void> => {
|
||||
if (await fileSystemUtils.fileExists(path)) {
|
||||
await unlink(path)
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
import {
|
||||
EngineOperationType,
|
||||
JobData,
|
||||
ProgressUpdateType,
|
||||
RunEnvironment,
|
||||
} from '@activepieces/shared'
|
||||
import { Static, Type } from '@sinclair/typebox'
|
||||
|
||||
export * from './runs-metadata-queue-factory'
|
||||
|
||||
export enum JobStatus {
|
||||
COMPLETED = 'COMPLETED',
|
||||
FAILED = 'FAILED',
|
||||
}
|
||||
|
||||
export enum QueueName {
|
||||
WORKER_JOBS = 'workerJobs',
|
||||
RUNS_METADATA = 'runsMetadata',
|
||||
}
|
||||
|
||||
export const getPlatformQueueName = (platformId: string): string => {
|
||||
return `platform-${platformId}-jobs`
|
||||
}
|
||||
|
||||
export const ApQueueJob = Type.Object({
|
||||
id: Type.String(),
|
||||
data: JobData,
|
||||
engineToken: Type.String(),
|
||||
attempsStarted: Type.Number(),
|
||||
})
|
||||
|
||||
export type ApQueueJob = Static<typeof ApQueueJob>
|
||||
export const SendEngineUpdateRequest = Type.Object({
|
||||
workerServerId: Type.String(),
|
||||
requestId: Type.String(),
|
||||
response: Type.Unknown(),
|
||||
})
|
||||
export type SendEngineUpdateRequest = Static<typeof SendEngineUpdateRequest>
|
||||
|
||||
export const MigrateJobsRequest = Type.Object({
|
||||
jobData: Type.Record(Type.String(), Type.Unknown()),
|
||||
})
|
||||
export type MigrateJobsRequest = Static<typeof MigrateJobsRequest>
|
||||
|
||||
export const SavePayloadRequest = Type.Object({
|
||||
flowId: Type.String(),
|
||||
projectId: Type.String(),
|
||||
payloads: Type.Array(Type.Unknown()),
|
||||
})
|
||||
export type SavePayloadRequest = Static<typeof SavePayloadRequest>
|
||||
|
||||
export const SubmitPayloadsRequest = Type.Object({
|
||||
flowVersionId: Type.String(),
|
||||
projectId: Type.String(),
|
||||
progressUpdateType: Type.Enum(ProgressUpdateType),
|
||||
synchronousHandlerId: Type.Optional(Type.String()),
|
||||
httpRequestId: Type.Optional(Type.String()),
|
||||
payloads: Type.Array(Type.Unknown()),
|
||||
environment: Type.Enum(RunEnvironment),
|
||||
parentRunId: Type.Optional(Type.String()),
|
||||
failParentOnFailure: Type.Optional(Type.Boolean()),
|
||||
platformId: Type.String(),
|
||||
})
|
||||
|
||||
export type SubmitPayloadsRequest = Static<typeof SubmitPayloadsRequest>
|
||||
|
||||
|
||||
|
||||
|
||||
export function getEngineTimeout(operationType: EngineOperationType, flowTimeoutSandbox: number, triggerTimeoutSandbox: number): number {
|
||||
switch (operationType) {
|
||||
case EngineOperationType.EXECUTE_FLOW:
|
||||
return flowTimeoutSandbox
|
||||
case EngineOperationType.EXECUTE_PROPERTY:
|
||||
case EngineOperationType.EXECUTE_VALIDATE_AUTH:
|
||||
case EngineOperationType.EXTRACT_PIECE_METADATA:
|
||||
case EngineOperationType.EXECUTE_TRIGGER_HOOK:
|
||||
return triggerTimeoutSandbox
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,112 @@
|
||||
import { apId, ApId, FlowRun as FlowRunSchema } from '@activepieces/shared'
|
||||
import { Static, Type } from '@sinclair/typebox'
|
||||
import { Value } from '@sinclair/typebox/value'
|
||||
import { Queue } from 'bullmq'
|
||||
import { BullMQOtel } from 'bullmq-otel'
|
||||
import Redis from 'ioredis'
|
||||
import { apDayjsDuration } from '../dayjs-helper'
|
||||
import { DistributedStore } from '../redis/distributed-store-factory'
|
||||
import { QueueName } from './index'
|
||||
|
||||
export const redisMetadataKey = (runId: ApId): string => `runs_metadata:${runId}`
|
||||
|
||||
export const runsMetadataQueueFactory = ({
|
||||
createRedisConnection,
|
||||
distributedStore,
|
||||
}: RunsMetadataQueueFactoryParams) => {
|
||||
let queueInstance: Queue<RunsMetadataJobData> | undefined = undefined
|
||||
|
||||
return {
|
||||
async init(config: RunsMetadataQueueConfig): Promise<void> {
|
||||
queueInstance = new Queue<RunsMetadataJobData>(QueueName.RUNS_METADATA, {
|
||||
connection: await createRedisConnection(),
|
||||
telemetry: config.isOtelEnabled ? new BullMQOtel(QueueName.RUNS_METADATA) : undefined,
|
||||
defaultJobOptions: {
|
||||
attempts: 5,
|
||||
backoff: {
|
||||
type: 'exponential',
|
||||
delay: apDayjsDuration(8, 'minute').asMilliseconds(),
|
||||
},
|
||||
removeOnComplete: true,
|
||||
removeOnFail: {
|
||||
age: apDayjsDuration(config.redisFailedJobRetentionDays, 'day').asSeconds(),
|
||||
count: config.redisFailedJobRetentionMaxCount,
|
||||
},
|
||||
},
|
||||
})
|
||||
await queueInstance.waitUntilReady()
|
||||
},
|
||||
|
||||
async add(params: RunsMetadataUpsertData): Promise<void> {
|
||||
if (!queueInstance) {
|
||||
throw new Error('Runs metadata queue not initialized')
|
||||
}
|
||||
|
||||
const cleanedParams = Value.Clean(RunsMetadataUpsertData, params) as RunsMetadataUpsertData
|
||||
|
||||
await distributedStore.merge(redisMetadataKey(cleanedParams.id), {
|
||||
...cleanedParams,
|
||||
requestId: apId(),
|
||||
})
|
||||
|
||||
await queueInstance.add(
|
||||
'update-run-metadata',
|
||||
{ runId: cleanedParams.id, projectId: cleanedParams.projectId },
|
||||
{ deduplication: { id: cleanedParams.id } },
|
||||
)
|
||||
},
|
||||
|
||||
get(): Queue<RunsMetadataJobData> {
|
||||
if (!queueInstance) {
|
||||
throw new Error('Runs metadata queue not initialized')
|
||||
}
|
||||
return queueInstance
|
||||
},
|
||||
|
||||
isInitialized(): boolean {
|
||||
return queueInstance !== undefined
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type RunsMetadataQueueFactoryParams = {
|
||||
createRedisConnection: () => Promise<Redis>
|
||||
distributedStore: DistributedStore
|
||||
}
|
||||
|
||||
export type RunsMetadataJobData = {
|
||||
runId: string
|
||||
projectId: string
|
||||
}
|
||||
|
||||
export type RunsMetadataQueueConfig = {
|
||||
isOtelEnabled: boolean
|
||||
redisFailedJobRetentionDays: number
|
||||
redisFailedJobRetentionMaxCount: number
|
||||
}
|
||||
|
||||
export const RunsMetadataUpsertData = Type.Composite([
|
||||
Type.Required(Type.Pick(FlowRunSchema, ['id', 'projectId'])),
|
||||
Type.Partial(Type.Pick(FlowRunSchema, [
|
||||
'flowId',
|
||||
'flowVersionId',
|
||||
'environment',
|
||||
'startTime',
|
||||
'finishTime',
|
||||
'status',
|
||||
'tags',
|
||||
'pauseMetadata',
|
||||
'failedStep',
|
||||
'stepNameToTest',
|
||||
'parentRunId',
|
||||
'failParentOnFailure',
|
||||
'logsFileId',
|
||||
'updated',
|
||||
'stepsCount',
|
||||
])),
|
||||
Type.Object({
|
||||
requestId: Type.Optional(Type.String()),
|
||||
}),
|
||||
])
|
||||
|
||||
export type RunsMetadataUpsertData = Static<typeof RunsMetadataUpsertData>
|
||||
@@ -0,0 +1,33 @@
|
||||
import * as HyperDX from '@hyperdx/node-opentelemetry'
|
||||
import { Level, Logger, pino, transport, TransportTargetOptions } from 'pino'
|
||||
|
||||
export type HyperDXCredentials = {
|
||||
token: string | undefined
|
||||
}
|
||||
|
||||
export const createHyperDXTransport = (level: Level, targets: TransportTargetOptions[], hyperdx?: HyperDXCredentials): Logger | null => {
|
||||
if (!hyperdx) {
|
||||
return null
|
||||
}
|
||||
const token = hyperdx.token
|
||||
if (!token) {
|
||||
return null
|
||||
}
|
||||
HyperDX.init({
|
||||
apiKey: token,
|
||||
service: 'activepieces',
|
||||
})
|
||||
|
||||
return pino(
|
||||
{ level, mixin: HyperDX.getPinoMixinFunction },
|
||||
transport({
|
||||
targets: [
|
||||
HyperDX.getPinoTransport(level, {
|
||||
detectResources: true,
|
||||
queueSize: 1000,
|
||||
}),
|
||||
...targets,
|
||||
],
|
||||
}),
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
import { FastifyBaseLogger } from 'fastify'
|
||||
import pino, { Level, Logger } from 'pino'
|
||||
import 'pino-loki'
|
||||
import { createHyperDXTransport, HyperDXCredentials } from './hyperdx-pino'
|
||||
import { createLokiTransport, LokiCredentials } from './loki-pino'
|
||||
|
||||
export const pinoLogging = {
|
||||
initLogger: (loggerLevel: Level | undefined, logPretty: boolean, loki: LokiCredentials, hyperdx: HyperDXCredentials): Logger => {
|
||||
const level: Level = loggerLevel ?? 'info'
|
||||
const pretty = logPretty ?? false
|
||||
|
||||
if (pretty) {
|
||||
return pino({
|
||||
level,
|
||||
transport: {
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
translateTime: 'HH:MM:ss Z',
|
||||
colorize: true,
|
||||
ignore: 'pid,hostname',
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const defaultTargets = [
|
||||
{
|
||||
target: 'pino/file',
|
||||
level,
|
||||
options: {},
|
||||
},
|
||||
]
|
||||
|
||||
const hyperdxLogger = createHyperDXTransport(level, defaultTargets, hyperdx)
|
||||
if (hyperdxLogger) {
|
||||
return hyperdxLogger
|
||||
}
|
||||
|
||||
const lokiLogger = createLokiTransport(level, defaultTargets, loki)
|
||||
if (lokiLogger) {
|
||||
return lokiLogger
|
||||
}
|
||||
|
||||
// Default logger
|
||||
return pino({
|
||||
level,
|
||||
transport: {
|
||||
targets: defaultTargets,
|
||||
},
|
||||
})
|
||||
},
|
||||
createRunContextLog: ({ log, runId, webhookId, flowId, flowVersionId }: { log: FastifyBaseLogger, runId: string, webhookId: string | undefined, flowId: string, flowVersionId: string }) => {
|
||||
return log.child({ runId, webhookId, flowId, flowVersionId })
|
||||
},
|
||||
createWebhookContextLog: ({ log, webhookId, flowId }: { log: FastifyBaseLogger, webhookId: string, flowId: string }) => {
|
||||
return log.child({ webhookId, flowId })
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
import { Level, Logger, pino, TransportTargetOptions } from 'pino'
|
||||
import 'pino-loki'
|
||||
|
||||
export type LokiCredentials = {
|
||||
url: string | undefined
|
||||
username: string | undefined
|
||||
password: string | undefined
|
||||
}
|
||||
|
||||
export const createLokiTransport = (level: Level, targets: TransportTargetOptions[], loki: LokiCredentials): Logger | null => {
|
||||
const lokiUrl = loki.url
|
||||
const lokiUsername = loki.username
|
||||
const lokiPassword = loki.password
|
||||
if (!lokiUrl) {
|
||||
return null
|
||||
}
|
||||
|
||||
return pino({
|
||||
level,
|
||||
transport: {
|
||||
targets: [
|
||||
{
|
||||
target: 'pino-loki',
|
||||
level,
|
||||
options: {
|
||||
batching: true,
|
||||
interval: 5,
|
||||
host: lokiUrl,
|
||||
basicAuth:
|
||||
lokiUsername && lokiPassword
|
||||
? {
|
||||
username: lokiUsername,
|
||||
password: lokiPassword,
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
},
|
||||
...targets,
|
||||
],
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
import { E_TIMEOUT, Mutex, MutexInterface, withTimeout } from 'async-mutex'
|
||||
|
||||
const memoryLocks = new Map<string, MutexInterface>()
|
||||
|
||||
export const memoryLock = {
|
||||
acquire: async (key: string, timeout?: number): Promise<ApLock> => {
|
||||
let lock = memoryLocks.get(key)
|
||||
if (!lock) {
|
||||
if (timeout) {
|
||||
lock = withTimeout(new Mutex(), timeout)
|
||||
}
|
||||
else {
|
||||
lock = new Mutex()
|
||||
}
|
||||
memoryLocks.set(key, lock)
|
||||
}
|
||||
const release = await lock.acquire()
|
||||
return {
|
||||
release: async () => {
|
||||
release()
|
||||
},
|
||||
}
|
||||
},
|
||||
isTimeoutError: (e: unknown): boolean => {
|
||||
return e === E_TIMEOUT
|
||||
},
|
||||
runExclusive: async <T>({ key, fn }: RunExclusiveParams<T>): Promise<T> => {
|
||||
const lock = await memoryLock.acquire(key)
|
||||
try {
|
||||
return await fn()
|
||||
}
|
||||
finally {
|
||||
await lock.release()
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
type RunExclusiveParams<T> = {
|
||||
key: string
|
||||
fn: () => Promise<T>
|
||||
}
|
||||
|
||||
export type ApLock = {
|
||||
release(): Promise<unknown>
|
||||
}
|
||||
@@ -0,0 +1,85 @@
|
||||
import dns from 'node:dns/promises'
|
||||
import os from 'os'
|
||||
import { isNil } from '@activepieces/shared'
|
||||
import { FastifyRequest } from 'fastify'
|
||||
|
||||
const GOOGLE_DNS = '216.239.32.10'
|
||||
const PUBLIC_IP_ADDRESS_QUERY = 'o-o.myaddr.l.google.com'
|
||||
|
||||
|
||||
type IpMetadata = {
|
||||
ip: string
|
||||
}
|
||||
|
||||
let ipMetadata: IpMetadata | undefined
|
||||
|
||||
const getLocalIp = (): string | null => {
|
||||
const networkInterfaces = os.networkInterfaces()
|
||||
for (const interfaceName of Object.keys(networkInterfaces)) {
|
||||
const networkInterface = networkInterfaces[interfaceName]
|
||||
if (networkInterface) {
|
||||
for (const iface of networkInterface) {
|
||||
if (iface.family === 'IPv4' && !iface.internal) {
|
||||
return iface.address
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
const getPublicIp = async (): Promise<IpMetadata> => {
|
||||
if (ipMetadata !== undefined) {
|
||||
return ipMetadata
|
||||
}
|
||||
|
||||
try {
|
||||
dns.setServers([GOOGLE_DNS])
|
||||
|
||||
const ipList = await dns.resolve(PUBLIC_IP_ADDRESS_QUERY, 'TXT')
|
||||
|
||||
ipMetadata = {
|
||||
ip: ipList[0][0],
|
||||
}
|
||||
|
||||
return ipMetadata
|
||||
}
|
||||
catch (error) {
|
||||
const localIp = getLocalIp()
|
||||
if (localIp) {
|
||||
ipMetadata = {
|
||||
ip: localIp,
|
||||
}
|
||||
return ipMetadata
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const extractClientRealIp = (request: FastifyRequest, clientIpHeader: string | undefined): string => {
|
||||
if (isNil(clientIpHeader)) {
|
||||
return request.ip
|
||||
}
|
||||
return request.headers[clientIpHeader] as string
|
||||
}
|
||||
|
||||
|
||||
export const networkUtils = {
|
||||
extractClientRealIp,
|
||||
getPublicIp,
|
||||
combineUrl(url: string, path: string) {
|
||||
const cleanedUrl = cleanTrailingSlash(url)
|
||||
const cleanedPath = cleanLeadingSlash(path)
|
||||
return `${cleanedUrl}/${cleanedPath}`
|
||||
},
|
||||
}
|
||||
|
||||
function cleanLeadingSlash(url: string): string {
|
||||
return url.startsWith('/') ? url.slice(1) : url
|
||||
}
|
||||
|
||||
function cleanTrailingSlash(url: string): string {
|
||||
return url.endsWith('/') ? url.slice(0, -1) : url
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,127 @@
|
||||
import { readdir, readFile, stat } from 'node:fs/promises'
|
||||
import { join, resolve } from 'node:path'
|
||||
import { cwd } from 'node:process'
|
||||
import { sep } from 'path'
|
||||
import importFresh from '@activepieces/import-fresh-webpack'
|
||||
import { Piece, PieceMetadata, pieceTranslation } from '@activepieces/pieces-framework'
|
||||
import { extractPieceFromModule } from '@activepieces/shared'
|
||||
import clearModule from 'clear-module'
|
||||
import { FastifyBaseLogger } from 'fastify'
|
||||
|
||||
const DIST_PIECES_PATH = resolve(cwd(), 'dist', 'packages', 'pieces')
|
||||
const SOURCE_PIECES_PATH = resolve(cwd(), 'packages', 'pieces')
|
||||
|
||||
export const filePiecesUtils = (log: FastifyBaseLogger) => ({
|
||||
|
||||
getPackageNameFromFolderPath: async (folderPath: string): Promise<string> => {
|
||||
const packageJson = await readFile(join(folderPath, 'package.json'), 'utf-8').then(JSON.parse)
|
||||
return packageJson.name
|
||||
},
|
||||
|
||||
getProjectJsonFromFolderPath: async (folderPath: string): Promise<string> => {
|
||||
return join(folderPath, 'project.json')
|
||||
},
|
||||
|
||||
getPieceDependencies: async (folderPath: string): Promise<Record<string, string> | null> => {
|
||||
try {
|
||||
const packageJson = await readFile(join(folderPath, 'package.json'), 'utf-8').then(JSON.parse)
|
||||
if (!packageJson.dependencies) {
|
||||
return null
|
||||
}
|
||||
return packageJson.dependencies
|
||||
}
|
||||
catch (e) {
|
||||
return null
|
||||
}
|
||||
},
|
||||
|
||||
findDistPiecePathByPackageName: async (packageName: string): Promise<string | null> => {
|
||||
const paths = await findAllPiecesFolder(DIST_PIECES_PATH)
|
||||
for (const path of paths) {
|
||||
try {
|
||||
const packageJsonName = await filePiecesUtils(log).getPackageNameFromFolderPath(path)
|
||||
if (packageJsonName === packageName) {
|
||||
return path
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
log.error({
|
||||
name: 'findDistPiecePathByPackageName',
|
||||
message: JSON.stringify(e),
|
||||
}, 'Error finding dist piece path by package name')
|
||||
}
|
||||
}
|
||||
return null
|
||||
},
|
||||
|
||||
findSourcePiecePathByPieceName: async (pieceName: string): Promise<string | null> => {
|
||||
const piecesPath = await findAllPiecesFolder(SOURCE_PIECES_PATH)
|
||||
const piecePath = piecesPath.find((p) => p.endsWith(sep + pieceName))
|
||||
return piecePath ?? null
|
||||
},
|
||||
|
||||
loadDistPiecesMetadata: async (piecesNames: string[]): Promise<PieceMetadata[]> => {
|
||||
try {
|
||||
const paths = (await findAllPiecesFolder(DIST_PIECES_PATH)).filter(path => piecesNames.some(name => path.endsWith(sep + name)))
|
||||
const pieces = await Promise.all(paths.map((p) => loadPieceFromFolder(p)))
|
||||
return pieces.filter((p): p is PieceMetadata => p !== null)
|
||||
}
|
||||
catch (e) {
|
||||
const err = e as Error
|
||||
log.warn({ name: 'FilePieceMetadataService#loadPiecesFromFolder', message: err.message, stack: err.stack })
|
||||
return []
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
const findAllPiecesFolder = async (folderPath: string): Promise<string[]> => {
|
||||
const paths = []
|
||||
const files = await readdir(folderPath)
|
||||
|
||||
const ignoredFiles = ['node_modules', 'dist', 'framework', 'common']
|
||||
for (const file of files) {
|
||||
const filePath = join(folderPath, file)
|
||||
const fileStats = await stat(filePath)
|
||||
if (
|
||||
fileStats.isDirectory() &&
|
||||
!ignoredFiles.includes(file)
|
||||
) {
|
||||
paths.push(...(await findAllPiecesFolder(filePath)))
|
||||
}
|
||||
else if (file === 'package.json') {
|
||||
paths.push(folderPath)
|
||||
}
|
||||
}
|
||||
return paths
|
||||
}
|
||||
|
||||
const loadPieceFromFolder = async (
|
||||
folderPath: string,
|
||||
): Promise<PieceMetadata | null> => {
|
||||
const indexPath = join(folderPath, 'src', 'index')
|
||||
clearModule(indexPath)
|
||||
const packageJson = importFresh<Record<string, string>>(
|
||||
join(folderPath, 'package.json'),
|
||||
)
|
||||
const module = importFresh<Record<string, unknown>>(
|
||||
indexPath,
|
||||
)
|
||||
const { name: pieceName, version: pieceVersion } = packageJson
|
||||
const piece = extractPieceFromModule<Piece>({
|
||||
module,
|
||||
pieceName,
|
||||
pieceVersion,
|
||||
})
|
||||
const originalMetadata = piece.metadata()
|
||||
const i18n = await pieceTranslation.initializeI18n(folderPath)
|
||||
const metadata: PieceMetadata = {
|
||||
...originalMetadata,
|
||||
name: pieceName,
|
||||
version: pieceVersion,
|
||||
authors: piece.authors,
|
||||
directoryPath: folderPath,
|
||||
i18n,
|
||||
}
|
||||
|
||||
return metadata
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
import { FastifyBaseLogger } from 'fastify'
|
||||
|
||||
export function rejectedPromiseHandler(promise: Promise<unknown>, log: FastifyBaseLogger) {
|
||||
promise.catch((error) => {
|
||||
log.error(error)
|
||||
})
|
||||
}
|
||||
69
activepieces-fork/packages/server/shared/src/lib/pubsub.ts
Normal file
69
activepieces-fork/packages/server/shared/src/lib/pubsub.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { isNil } from '@activepieces/shared'
|
||||
import { Mutex } from 'async-mutex'
|
||||
import Redis from 'ioredis'
|
||||
|
||||
let redisClientSubscriber: Redis | null = null
|
||||
let redisClientPublisher: Redis | null = null
|
||||
const mutexLock = new Mutex()
|
||||
|
||||
export const pubsubFactory = (redisFactory: () => Promise<Redis>) => ({
|
||||
async subscribe(
|
||||
channel: string,
|
||||
listener: (message: string) => void,
|
||||
): Promise<void> {
|
||||
const redisClientSubscriber = await getRedisClientSubscriber(redisFactory)
|
||||
await redisClientSubscriber.subscribe(channel)
|
||||
redisClientSubscriber.on('message', (_channel, message) => {
|
||||
if (_channel === channel) {
|
||||
listener(message)
|
||||
}
|
||||
})
|
||||
},
|
||||
async publish(channel: string, message: string): Promise<void> {
|
||||
const redisClientPublisher = await getRedisClientPublisher(redisFactory)
|
||||
await redisClientPublisher.publish(channel, message)
|
||||
},
|
||||
async unsubscribe(channel: string): Promise<void> {
|
||||
const redisClientSubscriber = await getRedisClientSubscriber(redisFactory)
|
||||
await redisClientSubscriber.unsubscribe(channel)
|
||||
},
|
||||
async close(): Promise<void> {
|
||||
if (!isNil(redisClientSubscriber)) {
|
||||
await redisClientSubscriber.quit()
|
||||
redisClientSubscriber = null
|
||||
}
|
||||
if (!isNil(redisClientPublisher)) {
|
||||
await redisClientPublisher.quit()
|
||||
redisClientPublisher = null
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
async function getRedisClientSubscriber(redisFactory: () => Promise<Redis>): Promise<Redis> {
|
||||
if (!isNil(redisClientSubscriber)) {
|
||||
return redisClientSubscriber
|
||||
}
|
||||
|
||||
return mutexLock.runExclusive(async () => {
|
||||
if (!isNil(redisClientSubscriber)) {
|
||||
return redisClientSubscriber
|
||||
}
|
||||
redisClientSubscriber = await redisFactory()
|
||||
return redisClientSubscriber
|
||||
})
|
||||
}
|
||||
|
||||
async function getRedisClientPublisher(redisFactory: () => Promise<Redis>): Promise<Redis> {
|
||||
if (!isNil(redisClientPublisher)) {
|
||||
return redisClientPublisher
|
||||
}
|
||||
|
||||
return mutexLock.runExclusive(async () => {
|
||||
if (!isNil(redisClientPublisher)) {
|
||||
return redisClientPublisher
|
||||
}
|
||||
redisClientPublisher = await redisFactory()
|
||||
return redisClientPublisher
|
||||
})
|
||||
}
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
import fs from 'fs'
|
||||
import { assertNotNullOrUndefined, isNil } from '@activepieces/shared'
|
||||
import Redis, { RedisOptions } from 'ioredis'
|
||||
import { RedisConnectionSettings } from './types'
|
||||
|
||||
|
||||
export async function createDefaultRedisConnection(settings: RedisConnectionSettings): Promise<Redis> {
|
||||
const config: Partial<RedisOptions> = {
|
||||
maxRetriesPerRequest: null,
|
||||
}
|
||||
|
||||
const url = settings.REDIS_URL
|
||||
if (isNil(url)) {
|
||||
return createStandaloneRedisConnection(settings, config)
|
||||
}
|
||||
return createRedisConnectionUsingUrl(settings, config)
|
||||
}
|
||||
|
||||
function createRedisConnectionUsingUrl(settings: RedisConnectionSettings, config: Partial<RedisOptions>): Redis {
|
||||
const url = settings.REDIS_URL
|
||||
assertNotNullOrUndefined(url, 'URL is required')
|
||||
const client = new Redis(url, config)
|
||||
return client
|
||||
}
|
||||
|
||||
function createStandaloneRedisConnection(settings: RedisConnectionSettings, config: Partial<RedisOptions>): Redis {
|
||||
const host = settings.REDIS_HOST
|
||||
const serializedPort = settings.REDIS_PORT
|
||||
assertNotNullOrUndefined(host, 'Host is required')
|
||||
assertNotNullOrUndefined(serializedPort, 'Port is required')
|
||||
const username = settings.REDIS_USER
|
||||
const password = settings.REDIS_PASSWORD
|
||||
const port = Number.parseInt(serializedPort, 10)
|
||||
const db = settings.REDIS_DB ?? 0
|
||||
const useSsl = settings.REDIS_USE_SSL ?? false
|
||||
const sslCaFile = settings.REDIS_SSL_CA_FILE
|
||||
|
||||
const client = new Redis({
|
||||
...config,
|
||||
host,
|
||||
port,
|
||||
username,
|
||||
password,
|
||||
db,
|
||||
retryStrategy: (times) => Math.min(times * 50, 2000),
|
||||
tls: useSsl ? {
|
||||
ca: readCAFile(sslCaFile),
|
||||
} : undefined,
|
||||
})
|
||||
|
||||
|
||||
return client
|
||||
}
|
||||
|
||||
|
||||
function readCAFile(file: string | undefined): string | undefined {
|
||||
if (isNil(file)) {
|
||||
return undefined
|
||||
}
|
||||
return fs.readFileSync(file, { encoding: 'utf8' })
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
import { isNil } from '@activepieces/shared'
|
||||
import { Mutex } from 'async-mutex'
|
||||
import { FastifyBaseLogger } from 'fastify'
|
||||
import Redis from 'ioredis'
|
||||
import RedLock from 'redlock'
|
||||
|
||||
export const distributedLockFactory = (
|
||||
createRedisConnection: () => Promise<Redis>,
|
||||
) => {
|
||||
const lockMutex = new Mutex()
|
||||
let redLock: RedLock | undefined
|
||||
|
||||
const getOrCreateRedLock = async (): Promise<RedLock> => {
|
||||
return lockMutex.runExclusive(async () => {
|
||||
if (!isNil(redLock)) {
|
||||
return redLock
|
||||
}
|
||||
const redisClient = await createRedisConnection()
|
||||
redLock = new RedLock([redisClient], {
|
||||
driftFactor: 0.01,
|
||||
automaticExtensionThreshold: 1000,
|
||||
})
|
||||
return redLock
|
||||
})
|
||||
}
|
||||
|
||||
return (_log: FastifyBaseLogger) => ({
|
||||
runExclusive: async <T>({
|
||||
key,
|
||||
timeoutInSeconds,
|
||||
fn,
|
||||
}: RunExclusiveParams<T>): Promise<T> => {
|
||||
const timeout = timeoutInSeconds * 1000
|
||||
const redLockInstance = await getOrCreateRedLock()
|
||||
return redLockInstance.using(
|
||||
[key],
|
||||
timeout,
|
||||
{
|
||||
retryCount: Math.ceil(timeout / 200),
|
||||
retryDelay: 200,
|
||||
automaticExtensionThreshold: 2000,
|
||||
driftFactor: 0.01,
|
||||
},
|
||||
async () => fn(),
|
||||
)
|
||||
},
|
||||
destroy: async (): Promise<void> => {
|
||||
if (redLock) {
|
||||
await redLock.quit()
|
||||
redLock = undefined
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
type RunExclusiveParams<T> = {
|
||||
key: string
|
||||
timeoutInSeconds: number
|
||||
fn: () => Promise<T>
|
||||
}
|
||||
@@ -0,0 +1,116 @@
|
||||
import { isNil } from '@activepieces/shared'
|
||||
import Redis from 'ioredis'
|
||||
|
||||
export const distributedStoreFactory = (getRedisClient: () => Promise<Redis>) => ({
|
||||
async put(key: string, value: unknown, ttlInSeconds?: number): Promise<void> {
|
||||
const serializedValue = JSON.stringify(value)
|
||||
const redisClient = await getRedisClient()
|
||||
if (ttlInSeconds) {
|
||||
await redisClient.setex(key, ttlInSeconds, serializedValue)
|
||||
}
|
||||
else {
|
||||
await redisClient.set(key, serializedValue)
|
||||
}
|
||||
},
|
||||
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
const redisClient = await getRedisClient()
|
||||
const value = await redisClient.get(key)
|
||||
if (!value) return null
|
||||
|
||||
return JSON.parse(value) as T
|
||||
},
|
||||
|
||||
async getAll<T>(keys: string[]): Promise<Record<string, T | null>> {
|
||||
const redisClient = await getRedisClient()
|
||||
const values = await redisClient.mget(keys)
|
||||
return values.reduce<Record<string, T | null>>((result, value, index) => {
|
||||
if (value) {
|
||||
result[keys[index]] = JSON.parse(value)
|
||||
}
|
||||
return result
|
||||
}, {})
|
||||
},
|
||||
|
||||
async delete(key: string): Promise<void> {
|
||||
const redisClient = await getRedisClient()
|
||||
await redisClient.del(key)
|
||||
},
|
||||
|
||||
async putBoolean(key: string, value: boolean): Promise<void> {
|
||||
const redisClient = await getRedisClient()
|
||||
await redisClient.set(key, value ? '1' : '0')
|
||||
},
|
||||
|
||||
async getBoolean(key: string): Promise<boolean | null> {
|
||||
const redisClient = await getRedisClient()
|
||||
const value = await redisClient.get(key)
|
||||
if (isNil(value)) return null
|
||||
return value === '1'
|
||||
},
|
||||
|
||||
async putBooleanBatch(keyValuePairs: Array<{ key: string, value: boolean }>): Promise<void> {
|
||||
if (keyValuePairs.length === 0) return
|
||||
|
||||
const redisClient = await getRedisClient()
|
||||
const multi = redisClient.multi()
|
||||
|
||||
for (const { key, value } of keyValuePairs) {
|
||||
multi.set(key, value ? '1' : '0')
|
||||
}
|
||||
|
||||
await multi.exec()
|
||||
},
|
||||
|
||||
async hgetJson<T extends Record<string, unknown>>(key: string): Promise<T | null> {
|
||||
const redisClient = await getRedisClient()
|
||||
const hashData = await redisClient.hgetall(key)
|
||||
if (!hashData || Object.keys(hashData).length === 0) return null
|
||||
const result: Record<string, unknown> = {}
|
||||
for (const [field, value] of Object.entries(hashData)) {
|
||||
const hasValue = !isNil(value) && value.trim().length > 0
|
||||
if (!hasValue) {
|
||||
continue
|
||||
}
|
||||
try {
|
||||
result[field] = JSON.parse(value)
|
||||
}
|
||||
catch (error) {
|
||||
result[field] = value
|
||||
}
|
||||
}
|
||||
return result as T
|
||||
},
|
||||
|
||||
async merge<T extends Record<string, unknown>>(key: string, value: T, ttlInSeconds?: number): Promise<void> {
|
||||
const redisClient = await getRedisClient()
|
||||
const serializedFields: Record<string, string> = {}
|
||||
|
||||
for (const [field, fieldValue] of Object.entries(value)) {
|
||||
if (isNil(fieldValue)) {
|
||||
continue
|
||||
}
|
||||
serializedFields[field] = JSON.stringify(fieldValue)
|
||||
}
|
||||
|
||||
await redisClient.hset(key, serializedFields)
|
||||
|
||||
if (ttlInSeconds) {
|
||||
await redisClient.expire(key, ttlInSeconds)
|
||||
}
|
||||
},
|
||||
|
||||
async deleteKeyIfFieldValueMatches(key: string, field: string, expectedValue: unknown): Promise<void> {
|
||||
const redisClient = await getRedisClient()
|
||||
const lua = `
|
||||
local currentValue = redis.call('HGET', KEYS[1], ARGV[1])
|
||||
if currentValue and currentValue == ARGV[2] then
|
||||
redis.call('DEL', KEYS[1])
|
||||
end
|
||||
`
|
||||
const serializedValue = JSON.stringify(expectedValue)
|
||||
await redisClient.eval(lua, 1, key, field, serializedValue)
|
||||
},
|
||||
})
|
||||
|
||||
export type DistributedStore = ReturnType<typeof distributedStoreFactory>
|
||||
@@ -0,0 +1,68 @@
|
||||
import { isNil } from '@activepieces/shared'
|
||||
import { Mutex } from 'async-mutex'
|
||||
import Redis from 'ioredis'
|
||||
import { createDefaultRedisConnection } from './default-redis'
|
||||
import { createMemoryRedisConnection } from './memory-redis'
|
||||
import { createSentinelRedisConnection } from './sentinel-redis'
|
||||
import { RedisConnectionSettings, RedisType } from './types'
|
||||
|
||||
let redisConnectionInstance: Redis | null = null
|
||||
const mutexLock = new Mutex()
|
||||
|
||||
export function redisConnectionFactory(
|
||||
settings: () => RedisConnectionSettings,
|
||||
) {
|
||||
const factory = {
|
||||
getRedisType(): RedisType {
|
||||
return settings().REDIS_TYPE as RedisType
|
||||
},
|
||||
async create(): Promise<Redis> {
|
||||
let redisConnection: Redis
|
||||
const redisType = settings().REDIS_TYPE
|
||||
switch (redisType) {
|
||||
case RedisType.MEMORY:
|
||||
redisConnection = await createMemoryRedisConnection()
|
||||
break
|
||||
case RedisType.SENTINEL:
|
||||
redisConnection = await createSentinelRedisConnection(settings())
|
||||
break
|
||||
default:
|
||||
redisConnection = await createDefaultRedisConnection(settings())
|
||||
break
|
||||
}
|
||||
return redisConnection
|
||||
},
|
||||
async useExisting(): Promise<Redis> {
|
||||
if (redisConnectionInstance) {
|
||||
return redisConnectionInstance
|
||||
}
|
||||
return mutexLock.runExclusive(async () => {
|
||||
if (!isNil(redisConnectionInstance)) {
|
||||
return redisConnectionInstance
|
||||
}
|
||||
redisConnectionInstance = await factory.create()
|
||||
return redisConnectionInstance
|
||||
})
|
||||
},
|
||||
async destroy(): Promise<void> {
|
||||
if (redisConnectionInstance) {
|
||||
await redisConnectionInstance.quit()
|
||||
redisConnectionInstance = null
|
||||
}
|
||||
},
|
||||
}
|
||||
return factory
|
||||
}
|
||||
|
||||
export const redisHelper = {
|
||||
scanAll: async (redis: Redis, match: string): Promise<string[]> => {
|
||||
const keys: string[] = []
|
||||
let cursor = '0'
|
||||
do {
|
||||
const [newCursor, foundKeys] = await redis.scan(cursor, 'MATCH', match, 'COUNT', 1000)
|
||||
cursor = newCursor
|
||||
keys.push(...foundKeys)
|
||||
} while (cursor !== '0')
|
||||
return keys
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
import { PlatformId, ProjectId } from '@activepieces/shared'
|
||||
|
||||
export const getProjectMaxConcurrentJobsKey = (projectId: ProjectId): string => `project:max-concurrent-jobs:${projectId}`
|
||||
export const getPlatformPlanNameKey = (platformId: PlatformId): string => `platform_plan:plan:${platformId}`
|
||||
@@ -0,0 +1,24 @@
|
||||
import Redis from 'ioredis'
|
||||
import { RedisMemoryServer } from 'redis-memory-server'
|
||||
|
||||
let redisMemoryServer: RedisMemoryServer | null = null
|
||||
|
||||
export async function createMemoryRedisConnection(): Promise<Redis> {
|
||||
const memoryServer = getOrCreateRedisMemoryServer()
|
||||
const host = await memoryServer.getHost()
|
||||
const port = await memoryServer.getPort()
|
||||
const client = new Redis({
|
||||
maxRetriesPerRequest: null,
|
||||
host,
|
||||
port,
|
||||
})
|
||||
return client
|
||||
}
|
||||
|
||||
function getOrCreateRedisMemoryServer(): RedisMemoryServer {
|
||||
if (redisMemoryServer) {
|
||||
return redisMemoryServer
|
||||
}
|
||||
redisMemoryServer = new RedisMemoryServer()
|
||||
return redisMemoryServer
|
||||
}
|
||||
@@ -0,0 +1,62 @@
|
||||
import fs from 'fs'
|
||||
import { assertNotNullOrUndefined, isNil } from '@activepieces/shared'
|
||||
import Redis, { RedisOptions } from 'ioredis'
|
||||
import { RedisConnectionSettings } from './types'
|
||||
|
||||
export async function createSentinelRedisConnection(settings: RedisConnectionSettings): Promise<Redis> {
|
||||
const sentinelList = settings.REDIS_SENTINEL_HOSTS
|
||||
const sentinelName = settings.REDIS_SENTINEL_NAME
|
||||
const sentinelRole = settings.REDIS_SENTINEL_ROLE as 'master' | 'slave'
|
||||
const username = settings.REDIS_USER
|
||||
const password = settings.REDIS_PASSWORD
|
||||
const useSsl = settings.REDIS_USE_SSL ?? false
|
||||
const sslCaFile = settings.REDIS_SSL_CA_FILE
|
||||
|
||||
assertNotNullOrUndefined(sentinelList, 'Sentinel list is required')
|
||||
assertNotNullOrUndefined(sentinelName, 'Sentinel name is required')
|
||||
|
||||
|
||||
const sentinels = sentinelList.split(',').map((sentinel) => {
|
||||
const [host, port] = sentinel.split(':')
|
||||
return { host, port: Number.parseInt(port, 10) }
|
||||
})
|
||||
|
||||
const tlsCa = readCAFile(sslCaFile)
|
||||
|
||||
const redisOptions: RedisOptions = {
|
||||
maxRetriesPerRequest: null,
|
||||
sentinels,
|
||||
name: sentinelName,
|
||||
username,
|
||||
password,
|
||||
role: sentinelRole,
|
||||
...getTlsOptionsForSentinel(useSsl, tlsCa),
|
||||
lazyConnect: true,
|
||||
}
|
||||
|
||||
const client = new Redis(redisOptions)
|
||||
return client
|
||||
}
|
||||
|
||||
function getTlsOptionsForSentinel(useSsl: boolean, tlsCa: string | undefined): Partial<RedisOptions> {
|
||||
if (!useSsl) {
|
||||
return {}
|
||||
}
|
||||
return {
|
||||
enableTLSForSentinelMode: true,
|
||||
tls: {
|
||||
ca: tlsCa,
|
||||
},
|
||||
sentinelTLS: {
|
||||
ca: tlsCa,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function readCAFile(file: string | undefined): string | undefined {
|
||||
if (isNil(file)) {
|
||||
return undefined
|
||||
}
|
||||
return fs.readFileSync(file, { encoding: 'utf8' })
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { Static, Type } from '@sinclair/typebox'
|
||||
|
||||
|
||||
export enum RedisType {
|
||||
SENTINEL = 'SENTINEL',
|
||||
MEMORY = 'MEMORY',
|
||||
STANDALONE = 'STANDALONE',
|
||||
}
|
||||
|
||||
export const RedisConnectionSettings = Type.Object({
|
||||
REDIS_TYPE: Type.String(),
|
||||
REDIS_SSL_CA_FILE: Type.Union([Type.String(), Type.Undefined()]),
|
||||
REDIS_DB: Type.Union([Type.Number(), Type.Undefined()]),
|
||||
REDIS_HOST: Type.Union([Type.String(), Type.Undefined()]),
|
||||
REDIS_PASSWORD: Type.Union([Type.String(), Type.Undefined()]),
|
||||
REDIS_PORT: Type.Union([Type.String(), Type.Undefined()]),
|
||||
REDIS_URL: Type.Union([Type.String(), Type.Undefined()]),
|
||||
REDIS_USER: Type.Union([Type.String(), Type.Undefined()]),
|
||||
REDIS_USE_SSL: Type.Union([Type.Boolean(), Type.Undefined()]),
|
||||
REDIS_SENTINEL_ROLE: Type.Union([Type.String(), Type.Undefined()]),
|
||||
REDIS_SENTINEL_HOSTS: Type.Union([Type.String(), Type.Undefined()]),
|
||||
REDIS_SENTINEL_NAME: Type.Union([Type.String(), Type.Undefined()]),
|
||||
})
|
||||
|
||||
export type RedisConnectionSettings = Static<typeof RedisConnectionSettings>
|
||||
@@ -0,0 +1,26 @@
|
||||
export class ApSemaphore {
|
||||
private maxConcurrent: number
|
||||
private queue: (() => void)[]
|
||||
private currentConcurrent: number
|
||||
|
||||
constructor(maxConcurrent: number) {
|
||||
this.maxConcurrent = maxConcurrent
|
||||
this.queue = []
|
||||
this.currentConcurrent = 0
|
||||
}
|
||||
|
||||
async acquire() {
|
||||
if (this.currentConcurrent >= this.maxConcurrent) {
|
||||
await new Promise<void>((resolve) => this.queue.push(resolve))
|
||||
}
|
||||
this.currentConcurrent++
|
||||
}
|
||||
|
||||
release() {
|
||||
this.currentConcurrent--
|
||||
if (this.queue.length > 0) {
|
||||
const nextResolver = this.queue.shift()
|
||||
nextResolver?.()
|
||||
}
|
||||
}
|
||||
}
|
||||
184
activepieces-fork/packages/server/shared/src/lib/system-props.ts
Normal file
184
activepieces-fork/packages/server/shared/src/lib/system-props.ts
Normal file
@@ -0,0 +1,184 @@
|
||||
import { assertNotNullOrUndefined } from '@activepieces/shared'
|
||||
import axios from 'axios'
|
||||
import { environmentMigrations } from './env-migrations'
|
||||
|
||||
export const systemConstants = {
|
||||
ENGINE_EXECUTABLE_PATH: 'dist/packages/engine/main.js',
|
||||
}
|
||||
|
||||
export type SystemProp = AppSystemProp | WorkerSystemProp
|
||||
|
||||
let cachedVersion: string | undefined
|
||||
|
||||
export enum AppSystemProp {
|
||||
API_KEY = 'API_KEY',
|
||||
TEMPLATES_API_KEY = 'TEMPLATES_API_KEY',
|
||||
API_RATE_LIMIT_AUTHN_ENABLED = 'API_RATE_LIMIT_AUTHN_ENABLED',
|
||||
API_RATE_LIMIT_AUTHN_MAX = 'API_RATE_LIMIT_AUTHN_MAX',
|
||||
API_RATE_LIMIT_AUTHN_WINDOW = 'API_RATE_LIMIT_AUTHN_WINDOW',
|
||||
APP_WEBHOOK_SECRETS = 'APP_WEBHOOK_SECRETS',
|
||||
APPSUMO_TOKEN = 'APPSUMO_TOKEN',
|
||||
CLIENT_REAL_IP_HEADER = 'CLIENT_REAL_IP_HEADER',
|
||||
CLOUD_AUTH_ENABLED = 'CLOUD_AUTH_ENABLED',
|
||||
CLOUDFLARE_API_BASE = 'CLOUDFLARE_API_BASE',
|
||||
CLOUDFLARE_API_TOKEN = 'CLOUDFLARE_API_TOKEN',
|
||||
CLOUDFLARE_ZONE_ID = 'CLOUDFLARE_ZONE_ID',
|
||||
CONFIG_PATH = 'CONFIG_PATH',
|
||||
DB_TYPE = 'DB_TYPE',
|
||||
DEV_PIECES = 'DEV_PIECES',
|
||||
EDITION = 'EDITION',
|
||||
ENABLE_FLOW_ON_PUBLISH = 'ENABLE_FLOW_ON_PUBLISH',
|
||||
ENCRYPTION_KEY = 'ENCRYPTION_KEY',
|
||||
ENVIRONMENT = 'ENVIRONMENT',
|
||||
EXECUTION_DATA_RETENTION_DAYS = 'EXECUTION_DATA_RETENTION_DAYS',
|
||||
EXECUTION_MODE = 'EXECUTION_MODE',
|
||||
FEATUREBASE_API_KEY = 'FEATUREBASE_API_KEY',
|
||||
FILE_STORAGE_LOCATION = 'FILE_STORAGE_LOCATION',
|
||||
FIREBASE_ADMIN_CREDENTIALS = 'FIREBASE_ADMIN_CREDENTIALS',
|
||||
FIREBASE_HASH_PARAMETERS = 'FIREBASE_HASH_PARAMETERS',
|
||||
FLOW_TIMEOUT_SECONDS = 'FLOW_TIMEOUT_SECONDS',
|
||||
GOOGLE_CLIENT_ID = 'GOOGLE_CLIENT_ID',
|
||||
GOOGLE_CLIENT_SECRET = 'GOOGLE_CLIENT_SECRET',
|
||||
HYPERDX_TOKEN = 'HYPERDX_TOKEN',
|
||||
INTERNAL_URL = 'INTERNAL_URL',
|
||||
ISSUE_ARCHIVE_DAYS = 'ISSUE_ARCHIVE_DAYS',
|
||||
JWT_SECRET = 'JWT_SECRET',
|
||||
LOG_LEVEL = 'LOG_LEVEL',
|
||||
LOG_PRETTY = 'LOG_PRETTY',
|
||||
LOKI_PASSWORD = 'LOKI_PASSWORD',
|
||||
LOKI_URL = 'LOKI_URL',
|
||||
LOKI_USERNAME = 'LOKI_USERNAME',
|
||||
MAX_CONCURRENT_JOBS_PER_PROJECT = 'MAX_CONCURRENT_JOBS_PER_PROJECT',
|
||||
MAX_FIELDS_PER_TABLE = 'MAX_FIELDS_PER_TABLE',
|
||||
MAX_FILE_SIZE_MB = 'MAX_FILE_SIZE_MB',
|
||||
MAX_RECORDS_PER_TABLE = 'MAX_RECORDS_PER_TABLE',
|
||||
OTEL_ENABLED = 'OTEL_ENABLED',
|
||||
PAUSED_FLOW_TIMEOUT_DAYS = 'PAUSED_FLOW_TIMEOUT_DAYS',
|
||||
PIECES_SYNC_MODE = 'PIECES_SYNC_MODE',
|
||||
PM2_ENABLED = 'PM2_ENABLED',
|
||||
POSTGRES_DATABASE = 'POSTGRES_DATABASE',
|
||||
POSTGRES_HOST = 'POSTGRES_HOST',
|
||||
POSTGRES_IDLE_TIMEOUT_MS = 'POSTGRES_IDLE_TIMEOUT_MS',
|
||||
POSTGRES_PASSWORD = 'POSTGRES_PASSWORD',
|
||||
POSTGRES_POOL_SIZE = 'POSTGRES_POOL_SIZE',
|
||||
POSTGRES_PORT = 'POSTGRES_PORT',
|
||||
POSTGRES_SSL_CA = 'POSTGRES_SSL_CA',
|
||||
POSTGRES_URL = 'POSTGRES_URL',
|
||||
POSTGRES_USERNAME = 'POSTGRES_USERNAME',
|
||||
POSTGRES_USE_SSL = 'POSTGRES_USE_SSL',
|
||||
PROJECT_RATE_LIMITER_ENABLED = 'PROJECT_RATE_LIMITER_ENABLED',
|
||||
QUEUE_UI_ENABLED = 'QUEUE_UI_ENABLED',
|
||||
QUEUE_UI_PASSWORD = 'QUEUE_UI_PASSWORD',
|
||||
QUEUE_UI_USERNAME = 'QUEUE_UI_USERNAME',
|
||||
REDIS_DB = 'REDIS_DB',
|
||||
REDIS_FAILED_JOB_RETENTION_DAYS = 'REDIS_FAILED_JOB_RETENTION_DAYS',
|
||||
REDIS_FAILED_JOB_RETENTION_MAX_COUNT = 'REDIS_FAILED_JOB_RETENTION_MAX_COUNT',
|
||||
REDIS_HOST = 'REDIS_HOST',
|
||||
REDIS_PASSWORD = 'REDIS_PASSWORD',
|
||||
REDIS_PORT = 'REDIS_PORT',
|
||||
REDIS_SENTINEL_HOSTS = 'REDIS_SENTINEL_HOSTS',
|
||||
REDIS_SENTINEL_NAME = 'REDIS_SENTINEL_NAME',
|
||||
REDIS_SENTINEL_ROLE = 'REDIS_SENTINEL_ROLE',
|
||||
REDIS_SSL_CA_FILE = 'REDIS_SSL_CA_FILE',
|
||||
REDIS_TYPE = 'REDIS_TYPE',
|
||||
REDIS_URL = 'REDIS_URL',
|
||||
REDIS_USER = 'REDIS_USER',
|
||||
REDIS_USE_SSL = 'REDIS_USE_SSL',
|
||||
RUNS_METADATA_UPDATE_CONCURRENCY = 'RUNS_METADATA_UPDATE_CONCURRENCY',
|
||||
S3_ACCESS_KEY_ID = 'S3_ACCESS_KEY_ID',
|
||||
S3_BUCKET = 'S3_BUCKET',
|
||||
S3_ENDPOINT = 'S3_ENDPOINT',
|
||||
S3_REGION = 'S3_REGION',
|
||||
S3_SECRET_ACCESS_KEY = 'S3_SECRET_ACCESS_KEY',
|
||||
S3_USE_IRSA = 'S3_USE_IRSA',
|
||||
S3_USE_SIGNED_URLS = 'S3_USE_SIGNED_URLS',
|
||||
SANDBOX_MEMORY_LIMIT = 'SANDBOX_MEMORY_LIMIT',
|
||||
SANDBOX_PROPAGATED_ENV_VARS = 'SANDBOX_PROPAGATED_ENV_VARS',
|
||||
SECRET_MANAGER_API_KEY = 'SECRET_MANAGER_API_KEY',
|
||||
SENTRY_DSN = 'SENTRY_DSN',
|
||||
SKIP_PROJECT_LIMITS_CHECK = 'SKIP_PROJECT_LIMITS_CHECK',
|
||||
SMTP_HOST = 'SMTP_HOST',
|
||||
SMTP_PASSWORD = 'SMTP_PASSWORD',
|
||||
SMTP_PORT = 'SMTP_PORT',
|
||||
SMTP_SENDER_EMAIL = 'SMTP_SENDER_EMAIL',
|
||||
SMTP_SENDER_NAME = 'SMTP_SENDER_NAME',
|
||||
SMTP_USERNAME = 'SMTP_USERNAME',
|
||||
STRIPE_SECRET_KEY = 'STRIPE_SECRET_KEY',
|
||||
STRIPE_WEBHOOK_SECRET = 'STRIPE_WEBHOOK_SECRET',
|
||||
TELEMETRY_ENABLED = 'TELEMETRY_ENABLED',
|
||||
TEMPLATES_SOURCE_URL = 'TEMPLATES_SOURCE_URL',
|
||||
TRIGGER_DEFAULT_POLL_INTERVAL = 'TRIGGER_DEFAULT_POLL_INTERVAL',
|
||||
TRIGGER_HOOKS_TIMEOUT_SECONDS = 'TRIGGER_HOOKS_TIMEOUT_SECONDS',
|
||||
TRIGGER_TIMEOUT_SECONDS = 'TRIGGER_TIMEOUT_SECONDS',
|
||||
WEBHOOK_TIMEOUT_SECONDS = 'WEBHOOK_TIMEOUT_SECONDS',
|
||||
OPENROUTER_PROVISION_KEY = 'OPENROUTER_PROVISION_KEY',
|
||||
}
|
||||
|
||||
export enum ContainerType {
|
||||
WORKER = 'WORKER',
|
||||
APP = 'APP',
|
||||
WORKER_AND_APP = 'WORKER_AND_APP',
|
||||
}
|
||||
|
||||
export enum WorkerSystemProp {
|
||||
WORKER_TOKEN = 'WORKER_TOKEN',
|
||||
CONTAINER_TYPE = 'CONTAINER_TYPE',
|
||||
FRONTEND_URL = 'FRONTEND_URL',
|
||||
|
||||
// Optional
|
||||
WORKER_CONCURRENCY = 'WORKER_CONCURRENCY',
|
||||
PLATFORM_ID_FOR_DEDICATED_WORKER = 'PLATFORM_ID_FOR_DEDICATED_WORKER',
|
||||
PRE_WARM_CACHE = 'PRE_WARM_CACHE',
|
||||
}
|
||||
|
||||
|
||||
export const environmentVariables = {
|
||||
hasAppModules(): boolean {
|
||||
const environment = this.getEnvironment(WorkerSystemProp.CONTAINER_TYPE) ?? ContainerType.WORKER_AND_APP
|
||||
return [ContainerType.APP, ContainerType.WORKER_AND_APP].includes(environment as ContainerType)
|
||||
},
|
||||
getNumberEnvironment: (prop: WorkerSystemProp | AppSystemProp): number | undefined => {
|
||||
const value = environmentVariables.getEnvironment(prop)
|
||||
return value ? parseInt(value) : undefined
|
||||
},
|
||||
getEnvironment: (prop: WorkerSystemProp | AppSystemProp): string | undefined => {
|
||||
const environmnetVariables = environmentMigrations.migrate()
|
||||
return environmnetVariables['AP_' + prop]
|
||||
},
|
||||
getEnvironmentOrThrow: (prop: WorkerSystemProp | AppSystemProp): string => {
|
||||
const value = environmentVariables.getEnvironment(prop)
|
||||
assertNotNullOrUndefined(value, `Environment variable ${prop} is not set`)
|
||||
return value
|
||||
},
|
||||
}
|
||||
|
||||
export const apVersionUtil = {
|
||||
async getCurrentRelease(): Promise<string> {
|
||||
// eslint-disable-next-line @nx/enforce-module-boundaries
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const packageJson = require('package.json')
|
||||
return packageJson.version
|
||||
},
|
||||
async getLatestRelease(): Promise<string> {
|
||||
try {
|
||||
if (cachedVersion) {
|
||||
return cachedVersion
|
||||
}
|
||||
const response = await axios.get<PackageJson>(
|
||||
'https://raw.githubusercontent.com/activepieces/activepieces/main/package.json',
|
||||
{
|
||||
timeout: 5000,
|
||||
},
|
||||
)
|
||||
cachedVersion = response.data.version
|
||||
return response.data.version
|
||||
}
|
||||
catch (ex) {
|
||||
return '0.0.0'
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
type PackageJson = {
|
||||
version: string
|
||||
}
|
||||
135
activepieces-fork/packages/server/shared/src/lib/system-usage.ts
Normal file
135
activepieces-fork/packages/server/shared/src/lib/system-usage.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import { exec } from 'child_process'
|
||||
import fs from 'fs'
|
||||
import os from 'os'
|
||||
import { promisify } from 'util'
|
||||
import { MachineInformation } from '@activepieces/shared'
|
||||
import { fileSystemUtils } from './file-system-utils'
|
||||
|
||||
const execAsync = promisify(exec)
|
||||
|
||||
async function calcMemory(memLimitPath: string, memUsagePath: string) {
|
||||
try {
|
||||
const exists = await fileSystemUtils.fileExists(memLimitPath) && await fileSystemUtils.fileExists(memUsagePath)
|
||||
if (!exists) return null
|
||||
const memLimit = await fs.promises.readFile(memLimitPath, 'utf8')
|
||||
if (memLimit.trim() === 'max') return null
|
||||
const memUsage = await fs.promises.readFile(memUsagePath, 'utf8')
|
||||
return {
|
||||
totalRamInBytes: parseInt(memLimit),
|
||||
ramUsage: (parseInt(memUsage) / parseInt(memLimit)) * 100,
|
||||
}
|
||||
}
|
||||
catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export const systemUsage = {
|
||||
async getContainerMemoryUsage() {
|
||||
const memLimitPathV1 = '/sys/fs/cgroup/memory/memory.limit_in_bytes'
|
||||
const memUsagePathV1 = '/sys/fs/cgroup/memory/memory.usage_in_bytes'
|
||||
|
||||
const memLimitPathV2 = '/sys/fs/cgroup/memory.max'
|
||||
const memUsagePathV2 = '/sys/fs/cgroup/memory.current'
|
||||
|
||||
const memoryV2 = await calcMemory(memLimitPathV2, memUsagePathV2)
|
||||
if (memoryV2) return memoryV2
|
||||
|
||||
const memoryV1 = await calcMemory(memLimitPathV1, memUsagePathV1)
|
||||
if (memoryV1) return memoryV1
|
||||
|
||||
return {
|
||||
totalRamInBytes: os.totalmem(),
|
||||
ramUsage: (os.totalmem() - os.freemem()) / os.totalmem() * 100,
|
||||
}
|
||||
},
|
||||
|
||||
async getDiskInfo(): Promise<MachineInformation['diskInfo']> {
|
||||
const platform = os.platform()
|
||||
|
||||
try {
|
||||
if (platform === 'win32') {
|
||||
const { stdout } = await execAsync('wmic logicaldisk get size,freespace,caption')
|
||||
const lines = stdout.trim().split('\n').slice(1)
|
||||
let total = 0, free = 0
|
||||
|
||||
for (const line of lines) {
|
||||
const [, freeSpace, size] = line.trim().split(/\s+/)
|
||||
if (freeSpace && size) {
|
||||
total += parseInt(size)
|
||||
free += parseInt(freeSpace)
|
||||
}
|
||||
}
|
||||
|
||||
const used = total - free
|
||||
return {
|
||||
total,
|
||||
free,
|
||||
used,
|
||||
percentage: (used / total) * 100,
|
||||
}
|
||||
}
|
||||
else {
|
||||
const { stdout } = await execAsync('df -k / | tail -1')
|
||||
const [, blocks, used, available] = stdout.trim().split(/\s+/)
|
||||
|
||||
const totalBytes = parseInt(blocks) * 1024
|
||||
const usedBytes = parseInt(used) * 1024
|
||||
const freeBytes = parseInt(available) * 1024
|
||||
|
||||
return {
|
||||
total: totalBytes,
|
||||
free: freeBytes,
|
||||
used: usedBytes,
|
||||
percentage: (usedBytes / totalBytes) * 100,
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return {
|
||||
total: 0,
|
||||
free: 0,
|
||||
used: 0,
|
||||
percentage: 0,
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
getCpuUsage(): number {
|
||||
const cpus = os.cpus()
|
||||
return cpus.reduce((acc, cpu) => {
|
||||
const total = Object.values(cpu.times).reduce((acc, time) => acc + time, 0)
|
||||
const idle = cpu.times.idle
|
||||
return acc + (1 - idle / total)
|
||||
}, 0) / cpus.length * 100
|
||||
},
|
||||
|
||||
async getCpuCores(): Promise<number> {
|
||||
// cgroups v2 path
|
||||
const cgroupV2Path = '/sys/fs/cgroup/cpu.max'
|
||||
// cgroups v1 paths
|
||||
const quotaPath = '/sys/fs/cgroup/cpu/cpu.cfs_quota_us'
|
||||
const periodPath = '/sys/fs/cgroup/cpu/cpu.cfs_period_us'
|
||||
|
||||
try {
|
||||
if (await fileSystemUtils.fileExists(cgroupV2Path)) {
|
||||
const content = await fs.promises.readFile(cgroupV2Path, 'utf8')
|
||||
const [quota, period] = content.trim().split(' ')
|
||||
if (quota !== 'max') {
|
||||
return parseInt(quota) / parseInt(period)
|
||||
}
|
||||
}
|
||||
else if (await fileSystemUtils.fileExists(quotaPath) && await fileSystemUtils.fileExists(periodPath)) {
|
||||
const quota = parseInt(await fs.promises.readFile(quotaPath, 'utf8'))
|
||||
const period = parseInt(await fs.promises.readFile(periodPath, 'utf8'))
|
||||
if (quota > 0) {
|
||||
return quota / period
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
return os.cpus().length
|
||||
}
|
||||
return os.cpus().length
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
import { PlatformId, ProjectId, TriggerRunStatus, TriggerStatusReport } from '@activepieces/shared'
|
||||
import { FastifyBaseLogger } from 'fastify'
|
||||
import Redis from 'ioredis'
|
||||
import { apDayjs, apDayjsDuration } from './dayjs-helper'
|
||||
import { redisHelper } from './redis'
|
||||
|
||||
export const triggerRunStats = (_log: FastifyBaseLogger, redisConnection: Redis) => ({
|
||||
async save({ platformId, pieceName, status }: SaveParams): Promise<void> {
|
||||
const day = apDayjs().format('YYYY-MM-DD')
|
||||
const statusToStore = status === TriggerRunStatus.COMPLETED ? status : TriggerRunStatus.FAILED
|
||||
const redisKey = triggerRunRedisKey(platformId, pieceName, day, statusToStore)
|
||||
|
||||
await redisConnection.incr(redisKey)
|
||||
await redisConnection.expire(redisKey, apDayjsDuration(14, 'days').asSeconds())
|
||||
},
|
||||
|
||||
async getStatusReport(params: GetStatusReportParams): Promise<TriggerStatusReport> {
|
||||
const { platformId } = params
|
||||
const redisKeys = await redisHelper.scanAll(redisConnection, triggerRunRedisKey(platformId, '*', '*', '*'))
|
||||
if (redisKeys.length === 0) {
|
||||
return { pieces: {} }
|
||||
}
|
||||
const values = await redisConnection.mget(redisKeys)
|
||||
const parsedRecords = parseRedisRecords(redisKeys, values)
|
||||
return aggregateRecords(parsedRecords)
|
||||
},
|
||||
})
|
||||
|
||||
export const triggerRunRedisKey = (platformId: PlatformId, pieceName: string, formattedDate: string, status: TriggerRunStatus | '*') => `trigger_run:${platformId}:${pieceName}:${formattedDate}:${status}`
|
||||
|
||||
type ParsedRedisRecord = {
|
||||
pieceName: string
|
||||
day: string
|
||||
status: TriggerRunStatus
|
||||
count: number
|
||||
}
|
||||
|
||||
const parseRedisRecords = (redisKeys: string[], values: (string | null)[]): ParsedRedisRecord[] => {
|
||||
return redisKeys.map((key, index) => {
|
||||
const parts = key.split(':')
|
||||
return {
|
||||
pieceName: parts[2],
|
||||
day: parts[3],
|
||||
status: parts[4] as TriggerRunStatus,
|
||||
count: Number(values[index]) || 0,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const aggregateRecords = (records: ParsedRedisRecord[]): TriggerStatusReport => {
|
||||
const pieceNameToDayToStats = new Map<string, Map<string, { success: number, failure: number }>>()
|
||||
|
||||
for (const record of records) {
|
||||
if (!pieceNameToDayToStats.has(record.pieceName)) {
|
||||
pieceNameToDayToStats.set(record.pieceName, new Map())
|
||||
}
|
||||
const dayMap = pieceNameToDayToStats.get(record.pieceName)!
|
||||
const dayKey = record.day
|
||||
if (!dayMap.has(dayKey)) {
|
||||
dayMap.set(dayKey, { success: 0, failure: 0 })
|
||||
}
|
||||
const dayStats = dayMap.get(dayKey)!
|
||||
if (record.status === TriggerRunStatus.COMPLETED) {
|
||||
dayStats.success += record.count
|
||||
}
|
||||
else {
|
||||
dayStats.failure += record.count
|
||||
}
|
||||
}
|
||||
const pieces: TriggerStatusReport['pieces'] = {}
|
||||
for (const [pieceName, dayMap] of pieceNameToDayToStats) {
|
||||
const dailyStats: Record<string, { success: number, failure: number }> = {}
|
||||
let totalRuns = 0
|
||||
for (const [day, stats] of dayMap) {
|
||||
dailyStats[day] = stats
|
||||
totalRuns += stats.success + stats.failure
|
||||
}
|
||||
pieces[pieceName] = {
|
||||
dailyStats,
|
||||
totalRuns,
|
||||
}
|
||||
}
|
||||
|
||||
return { pieces }
|
||||
}
|
||||
|
||||
type GetStatusReportParams = {
|
||||
platformId: ProjectId
|
||||
}
|
||||
|
||||
type SaveParams = {
|
||||
platformId: PlatformId
|
||||
pieceName: string
|
||||
status: TriggerRunStatus
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
import {
|
||||
assertNotNullOrUndefined,
|
||||
FlowVersion,
|
||||
isNil,
|
||||
parseToJsonIfPossible,
|
||||
} from '@activepieces/shared'
|
||||
|
||||
let webhookSecrets:
|
||||
| Record<string, { webhookSecret: string | Record<string, string> }>
|
||||
| undefined = undefined
|
||||
|
||||
export const webhookSecretsUtils = {
|
||||
init,
|
||||
getWebhookSecret,
|
||||
parseWebhookSecrets,
|
||||
}
|
||||
|
||||
async function init(_webhookSecrets: string) {
|
||||
const parsed = parseWebhookSecrets(_webhookSecrets)
|
||||
webhookSecrets = parsed
|
||||
}
|
||||
|
||||
function parseWebhookSecrets(webhookSecrets: string): Record<
|
||||
string,
|
||||
{
|
||||
webhookSecret: string | Record<string, string>
|
||||
}
|
||||
> {
|
||||
return (
|
||||
(parseToJsonIfPossible(webhookSecrets) as
|
||||
| Record<
|
||||
string,
|
||||
{
|
||||
webhookSecret: string | Record<string, string>
|
||||
}
|
||||
>
|
||||
| undefined) ?? {}
|
||||
)
|
||||
}
|
||||
|
||||
async function getWebhookSecret(
|
||||
flowVersion: FlowVersion,
|
||||
): Promise<string | Record<string, string> | undefined> {
|
||||
const appName = flowVersion.trigger.settings.pieceName
|
||||
if (!appName) {
|
||||
return undefined
|
||||
}
|
||||
assertNotNullOrUndefined(
|
||||
webhookSecrets,
|
||||
'Webhook secrets are not initialized',
|
||||
)
|
||||
const appConfig = webhookSecrets[appName]
|
||||
if (isNil(appConfig)) {
|
||||
return undefined
|
||||
}
|
||||
return appConfig.webhookSecret
|
||||
}
|
||||
Reference in New Issue
Block a user