Add Activepieces integration for workflow automation

- Add Activepieces fork with SmoothSchedule custom piece
- Create integrations app with Activepieces service layer
- Add embed token endpoint for iframe integration
- Create Automations page with embedded workflow builder
- Add sidebar visibility fix for embed mode
- Add list inactive customers endpoint to Public API
- Include SmoothSchedule triggers: event created/updated/cancelled
- Include SmoothSchedule actions: create/update/cancel events, list resources/services/customers

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
poduck
2025-12-18 22:59:37 -05:00
parent 9848268d34
commit 3aa7199503
16292 changed files with 1284892 additions and 4708 deletions

View File

@@ -0,0 +1,132 @@
import { createAction, Property } from '@activepieces/pieces-framework';
import { httpClient, HttpMethod } from '@activepieces/pieces-common';
import { microsoftPowerBiAuth } from '../../index';
type ColumnDefinition = {
name: string;
dataType: 'String' | 'Int64' | 'Double' | 'DateTime' | 'Boolean';
};
type TableDefinition = {
name: string;
columns: ColumnDefinition[];
};
type DatasetMode = 'Push' | 'Streaming' | 'PushStreaming';
export const createDatasetAction = createAction({
auth:microsoftPowerBiAuth,
name: 'create_dataset',
displayName: 'Create Dataset',
description: 'Create a new dataset in Power BI with custom schema (Push, Streaming, or PushStreaming mode).',
props: {
dataset_name: Property.ShortText({
displayName: 'Dataset Name',
description: 'Name of the dataset to create',
required: true,
defaultValue: 'ActivepiecesDataset'
}),
mode: Property.StaticDropdown({
displayName: 'Dataset Mode',
description: 'The mode of the dataset',
required: true,
defaultValue: 'Push',
options: {
options: [
{ label: 'Push', value: 'Push' },
{ label: 'Streaming', value: 'Streaming' },
{ label: 'PushStreaming', value: 'PushStreaming' }
]
}
}),
tables: Property.Json({
displayName: 'Tables',
description: 'Define the tables and their columns for the dataset (JSON format)',
required: true,
defaultValue: [
{
name: 'Data',
columns: [
{ name: 'Id', dataType: 'Int64' },
{ name: 'Name', dataType: 'String' },
{ name: 'Value', dataType: 'Double' },
{ name: 'Timestamp', dataType: 'DateTime' }
]
}
]
})
},
async run(context) {
const auth = context.auth;
const datasetName = context.propsValue.dataset_name;
const mode = context.propsValue.mode as DatasetMode;
let tables: TableDefinition[];
try {
// Parse the tables JSON
const parsedTables = typeof context.propsValue.tables === 'string'
? JSON.parse(context.propsValue.tables)
: context.propsValue.tables;
// Expect tables to be an array directly
if (Array.isArray(parsedTables)) {
tables = parsedTables;
} else {
throw new Error('Tables must be an array of table definitions');
}
} catch (e) {
console.error('Error parsing tables:', e);
throw new Error(`Invalid tables JSON format. Received value: ${JSON.stringify(context.propsValue.tables)}`);
}
// Always use My Workspace URL
const baseUrl = 'https://api.powerbi.com/v1.0/myorg';
// Define the dataset schema
const datasetDefinition = {
name: datasetName,
defaultMode: mode,
tables: tables.map(table => ({
name: table.name,
columns: table.columns.map(column => ({
name: column.name,
dataType: column.dataType.toLowerCase()
}))
}))
};
try {
// Create the dataset
const requestBody = {
name: datasetName,
defaultMode: mode,
tables: datasetDefinition.tables
};
const response = await httpClient.sendRequest({
method: HttpMethod.POST,
url: `${baseUrl}/datasets`,
headers: {
'Authorization': `Bearer ${auth.access_token}`,
'Content-Type': 'application/json'
},
body: requestBody
});
if (response.status >= 400) {
throw new Error(`Failed to create dataset: ${response.status} - ${JSON.stringify(response.body)}`);
}
return {
success: true,
statusCode: response.status,
datasetInfo: response.body,
schema: datasetDefinition
};
} catch (error) {
console.error('Error creating dataset:', error);
throw error;
}
}
});

View File

@@ -0,0 +1,220 @@
import { createAction, OAuth2PropertyValue, Property } from '@activepieces/pieces-framework';
import { httpClient, HttpMethod } from '@activepieces/pieces-common';
import { microsoftPowerBiAuth } from '../../index';
type PowerBIRow = {
[key: string]: string | number | boolean | null | undefined;
};
export const pushRowsToDatasetTableAction = createAction({
auth:microsoftPowerBiAuth,
name: 'push_rows_to_dataset_table',
displayName: 'Push Rows to Dataset Table',
description: 'Add rows to a table in a Power BI dataset (supports Push, Streaming, and PushStreaming modes)',
props: {
dataset_id: Property.Dropdown({
auth: microsoftPowerBiAuth,
displayName: 'Dataset',
description: 'Select a dataset.',
required: true,
refreshers: ['auth'],
options: async (propsValue) => {
const auth = propsValue['auth'] as OAuth2PropertyValue;
if (!auth) {
return {
disabled: true,
options: [],
placeholder: 'Please authenticate first.'
};
}
try {
const response = await httpClient.sendRequest<{value:{name:string,id:string}[]}>({
method: HttpMethod.GET,
url: 'https://api.powerbi.com/v1.0/myorg/datasets',
headers: {
'Authorization': `Bearer ${auth.access_token}`
}
});
return {
options: response.body.value.map((dataset) => ({
label: dataset.name,
value: dataset.id
}))
};
} catch (error) {
console.error('Error fetching datasets:', error);
return {
disabled: true,
options: [],
placeholder: 'Error loading datasets'
};
}
}
}),
table_name: Property.Dropdown({
auth: microsoftPowerBiAuth,
displayName: 'Table',
description: 'Select a table',
required: true,
refreshers: ['auth', 'dataset_id'],
options: async (propsValue) => {
const auth = propsValue['auth'] as OAuth2PropertyValue;
const datasetId = propsValue['dataset_id'] as string;
if (!auth || !datasetId) {
return {
disabled: true,
options: [],
placeholder: 'Please select a dataset first'
};
}
try {
const response = await httpClient.sendRequest<{value:{name:string}[]}>({
method: HttpMethod.GET,
url: `https://api.powerbi.com/v1.0/myorg/datasets/${datasetId}/tables`,
headers: {
'Authorization': `Bearer ${auth.access_token}`
}
});
return {
options: response.body.value.map((table) => ({
label: table.name,
value: table.name
}))
};
} catch (error) {
console.error('Error fetching tables:', error);
return {
disabled: true,
options: [],
placeholder: 'Error loading tables'
};
}
}
}),
rows: Property.Json({
displayName: 'Rows',
description: 'JSON object containing the rows to add to the table. Each row must match your table schema.',
required: true,
defaultValue: {
rows: [
{
Id: 1,
Name: "Example",
Value: 42.5,
Timestamp: new Date().toISOString()
}
]
}
}),
skip_refresh: Property.Checkbox({
displayName: 'Skip Dataset Refresh',
description: 'Skip refreshing the dataset after pushing data (only applies to Push and PushStreaming modes)',
required: false,
defaultValue: false
})
},
async run(context) {
const auth = context.auth;
const datasetId = context.propsValue.dataset_id;
const tableName = context.propsValue.table_name;
let rows: PowerBIRow[];
try {
// Parse rows if it's a string
const rowsInput = context.propsValue.rows;
const parsedInput = typeof rowsInput === 'string' ? JSON.parse(rowsInput) : rowsInput;
// Handle both direct array and object with rows property
if (Array.isArray(parsedInput)) {
rows = parsedInput;
} else if (parsedInput && parsedInput.rows && Array.isArray(parsedInput.rows)) {
rows = parsedInput.rows;
} else {
throw new Error('Rows must be either an array or an object with a rows array property');
}
} catch (e) {
console.error('Error parsing rows:', e);
throw new Error('Invalid rows format');
}
const skipRefresh = context.propsValue.skip_refresh;
const baseUrl = 'https://api.powerbi.com/v1.0/myorg';
try {
// 1. Get dataset info
const datasetResponse = await httpClient.sendRequest({
method: HttpMethod.GET,
url: `${baseUrl}/datasets/${datasetId}`,
headers: {
'Authorization': `Bearer ${auth.access_token}`
}
});
// 2. Get table info
const tableResponse = await httpClient.sendRequest({
method: HttpMethod.GET,
url: `${baseUrl}/datasets/${datasetId}/tables`,
headers: {
'Authorization': `Bearer ${auth.access_token}`
}
});
// 3. Validate table exists and is in the dataset
const tables = tableResponse.body.value;
const targetTable = tables.find((t: any) => t.name === tableName);
if (!targetTable) {
throw new Error(`Table '${tableName}' not found in dataset. Available tables: ${tables.map((t: any) => t.name).join(', ')}`);
}
// 4. Prepare and send data
const url = `${baseUrl}/datasets/${datasetId}/tables/${tableName}/rows`;
const response = await httpClient.sendRequest({
method: HttpMethod.POST,
url: url,
headers: {
'Authorization': `Bearer ${auth.access_token}`,
'Content-Type': 'application/json'
},
body: {
rows
}
});
if (response.status >= 400) {
const errorMessage = response.body?.error?.message || JSON.stringify(response.body);
throw new Error(`Power BI API Error: ${response.status} - ${errorMessage}`);
}
// 5. Refresh dataset if not skipped
let refreshResponse = null;
if (!skipRefresh) {
refreshResponse = await httpClient.sendRequest({
method: HttpMethod.POST,
url: `${baseUrl}/datasets/${datasetId}/refreshes`,
headers: {
'Authorization': `Bearer ${auth.access_token}`
}
}).catch(e => ({ status: e.response?.status, body: e.response?.body }));
}
return {
success: true,
statusCode: response.status,
body: response.body,
url: url,
sentData: rows,
datasetInfo: datasetResponse.body,
tables: tableResponse.body,
refreshAttempt: refreshResponse?.body
};
} catch (error) {
console.error('Error pushing data to Power BI:', error);
throw error;
}
}
});