Add Activepieces integration for workflow automation
- Add Activepieces fork with SmoothSchedule custom piece - Create integrations app with Activepieces service layer - Add embed token endpoint for iframe integration - Create Automations page with embedded workflow builder - Add sidebar visibility fix for embed mode - Add list inactive customers endpoint to Public API - Include SmoothSchedule triggers: event created/updated/cancelled - Include SmoothSchedule actions: create/update/cancel events, list resources/services/customers 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"extends": [
|
||||
"../../../../.eslintrc.base.json"
|
||||
],
|
||||
"ignorePatterns": [
|
||||
"!**/*"
|
||||
],
|
||||
"overrides": [
|
||||
{
|
||||
"files": [
|
||||
"*.ts",
|
||||
"*.tsx",
|
||||
"*.js",
|
||||
"*.jsx"
|
||||
],
|
||||
"rules": {}
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"*.ts",
|
||||
"*.tsx"
|
||||
],
|
||||
"rules": {}
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"*.js",
|
||||
"*.jsx"
|
||||
],
|
||||
"rules": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
# pieces-contextual-ai
|
||||
|
||||
This library was generated with [Nx](https://nx.dev).
|
||||
|
||||
## Building
|
||||
|
||||
Run `nx build pieces-contextual-ai` to build the library.
|
||||
@@ -0,0 +1,89 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "@activepieces/piece-contextual-ai",
|
||||
"dependencies": {
|
||||
"contextual-client": "^0.10.0",
|
||||
"tslib": "^2.3.0",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@types/node": ["@types/node@18.19.130", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg=="],
|
||||
|
||||
"@types/node-fetch": ["@types/node-fetch@2.6.13", "", { "dependencies": { "@types/node": "*", "form-data": "^4.0.4" } }, "sha512-QGpRVpzSaUs30JBSGPjOg4Uveu384erbHBoT1zeONvyCfwQxIkUshLAOqN/k9EjGviPRmWTTe6aH2qySWKTVSw=="],
|
||||
|
||||
"abort-controller": ["abort-controller@3.0.0", "", { "dependencies": { "event-target-shim": "^5.0.0" } }, "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg=="],
|
||||
|
||||
"agentkeepalive": ["agentkeepalive@4.6.0", "", { "dependencies": { "humanize-ms": "^1.2.1" } }, "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ=="],
|
||||
|
||||
"asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
|
||||
|
||||
"call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="],
|
||||
|
||||
"combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="],
|
||||
|
||||
"contextual-client": ["contextual-client@0.10.0", "", { "dependencies": { "@types/node": "^18.11.18", "@types/node-fetch": "^2.6.4", "abort-controller": "^3.0.0", "agentkeepalive": "^4.2.1", "form-data-encoder": "1.7.2", "formdata-node": "^4.3.2", "node-fetch": "^2.6.7" } }, "sha512-9hjPD0WdLKvOGh4ajtOG75Jh8IS3edrG4jjilDsnAU1vN6SrBEIxe3ehKf3MqxnMRa+2DNkltLCi6zB2zV+1kg=="],
|
||||
|
||||
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
|
||||
|
||||
"dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="],
|
||||
|
||||
"es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="],
|
||||
|
||||
"es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="],
|
||||
|
||||
"es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="],
|
||||
|
||||
"es-set-tostringtag": ["es-set-tostringtag@2.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA=="],
|
||||
|
||||
"event-target-shim": ["event-target-shim@5.0.1", "", {}, "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ=="],
|
||||
|
||||
"form-data": ["form-data@4.0.5", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", "hasown": "^2.0.2", "mime-types": "^2.1.12" } }, "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w=="],
|
||||
|
||||
"form-data-encoder": ["form-data-encoder@1.7.2", "", {}, "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A=="],
|
||||
|
||||
"formdata-node": ["formdata-node@4.4.1", "", { "dependencies": { "node-domexception": "1.0.0", "web-streams-polyfill": "4.0.0-beta.3" } }, "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ=="],
|
||||
|
||||
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
|
||||
|
||||
"get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="],
|
||||
|
||||
"get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="],
|
||||
|
||||
"gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="],
|
||||
|
||||
"has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="],
|
||||
|
||||
"has-tostringtag": ["has-tostringtag@1.0.2", "", { "dependencies": { "has-symbols": "^1.0.3" } }, "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw=="],
|
||||
|
||||
"hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
|
||||
|
||||
"humanize-ms": ["humanize-ms@1.2.1", "", { "dependencies": { "ms": "^2.0.0" } }, "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ=="],
|
||||
|
||||
"math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="],
|
||||
|
||||
"mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
|
||||
|
||||
"mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
|
||||
"node-domexception": ["node-domexception@1.0.0", "", {}, "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ=="],
|
||||
|
||||
"node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
|
||||
|
||||
"tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="],
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
|
||||
"web-streams-polyfill": ["web-streams-polyfill@4.0.0-beta.3", "", {}, "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug=="],
|
||||
|
||||
"webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
|
||||
|
||||
"whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "@activepieces/piece-contextual-ai",
|
||||
"version": "0.0.1",
|
||||
"type": "commonjs",
|
||||
"main": "./src/index.js",
|
||||
"types": "./src/index.d.ts",
|
||||
"dependencies": {
|
||||
"contextual-client": "^0.10.0",
|
||||
"tslib": "^2.3.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
{
|
||||
"name": "pieces-contextual-ai",
|
||||
"$schema": "../../../../node_modules/nx/schemas/project-schema.json",
|
||||
"sourceRoot": "packages/pieces/community/contextual-ai/src",
|
||||
"projectType": "library",
|
||||
"release": {
|
||||
"version": {
|
||||
"manifestRootsToUpdate": [
|
||||
"dist/{projectRoot}"
|
||||
],
|
||||
"currentVersionResolver": "git-tag",
|
||||
"fallbackCurrentVersionResolver": "disk"
|
||||
}
|
||||
},
|
||||
"tags": [],
|
||||
"targets": {
|
||||
"build": {
|
||||
"executor": "@nx/js:tsc",
|
||||
"outputs": [
|
||||
"{options.outputPath}"
|
||||
],
|
||||
"options": {
|
||||
"outputPath": "dist/packages/pieces/community/contextual-ai",
|
||||
"tsConfig": "packages/pieces/community/contextual-ai/tsconfig.lib.json",
|
||||
"packageJson": "packages/pieces/community/contextual-ai/package.json",
|
||||
"main": "packages/pieces/community/contextual-ai/src/index.ts",
|
||||
"assets": [
|
||||
"packages/pieces/community/contextual-ai/*.md",
|
||||
{
|
||||
"input": "packages/pieces/community/contextual-ai/src/i18n",
|
||||
"output": "./src/i18n",
|
||||
"glob": "**/!(i18n.json)"
|
||||
}
|
||||
],
|
||||
"buildableProjectDepsInPackageJsonType": "dependencies",
|
||||
"updateBuildableProjectDepsInPackageJson": true
|
||||
},
|
||||
"dependsOn": [
|
||||
"prebuild",
|
||||
"^build"
|
||||
]
|
||||
},
|
||||
"nx-release-publish": {
|
||||
"options": {
|
||||
"packageRoot": "dist/{projectRoot}"
|
||||
}
|
||||
},
|
||||
"prebuild": {
|
||||
"dependsOn": [
|
||||
"^build"
|
||||
],
|
||||
"executor": "nx:run-commands",
|
||||
"options": {
|
||||
"cwd": "packages/pieces/community/contextual-ai",
|
||||
"command": "bun install --no-save --silent"
|
||||
}
|
||||
},
|
||||
"lint": {
|
||||
"executor": "@nx/eslint:lint",
|
||||
"outputs": [
|
||||
"{options.outputFile}"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
{
|
||||
"Integrate with Contextual AI to automate document processing and AI workflows": "Integrate with Contextual AI to automate document processing and AI workflows",
|
||||
"API Key": "API Key",
|
||||
"Base URL": "Base URL",
|
||||
"Your Contextual AI API key": "Your Contextual AI API key",
|
||||
"API base URL (leave blank for default)": "API base URL (leave blank for default)",
|
||||
"\n## Contextual AI Connection Setup\n\n### Prerequisites\n- Create a Contextual AI account at [Contextual AI](https://contextual.ai)\n- Generate an API key from your workspace settings\n- You'll receive $25 in free credits (or $50 with work email)\n\n### Authentication Fields\n\n**API Key**: Your Contextual AI API key (required)\n- Sign in to your Contextual AI workspace\n- Navigate to API Keys in the sidebar\n- Click \"Create API Key\" and follow the instructions\n- Copy the generated key and paste it here\n\n**Base URL**: ": "\n## Contextual AI Connection Setup\n\n### Prerequisites\n- Create a Contextual AI account at [Contextual AI](https://contextual.ai)\n- Generate an API key from your workspace settings\n- You'll receive $25 in free credits (or $50 with work email)\n\n### Authentication Fields\n\n**API Key**: Your Contextual AI API key (required)\n- Sign in to your Contextual AI workspace\n- Navigate to API Keys in the sidebar\n- Click \"Create API Key\" and follow the instructions\n- Copy the generated key and paste it here\n\n**Base URL**: The API base URL (optional)\n- Leave blank to use the default: `https://api.contextual.ai/v1`\n- Only change if you have a custom deployment\n",
|
||||
"Query Agent": "Query Agent",
|
||||
"Generate Text": "Generate Text",
|
||||
"Ingest Document": "Ingest Document",
|
||||
"Parse File": "Parse File",
|
||||
"Create Agent": "Create Agent",
|
||||
"Invite Users": "Invite Users",
|
||||
"Create Datastore": "Create Datastore",
|
||||
"Send a message to a Contextual AI agent and get a response": "Send a message to a Contextual AI agent and get a response",
|
||||
"Generate text using Contextual AI's Grounded Language Model": "Generate text using Contextual AI's Grounded Language Model",
|
||||
"Upload and ingest a document into a Contextual AI datastore": "Upload and ingest a document into a Contextual AI datastore",
|
||||
"Parse a document file into structured Markdown and/or JSON format": "Parse a document file into structured Markdown and/or JSON format",
|
||||
"Create a new Contextual AI agent with specified configuration": "Create a new Contextual AI agent with specified configuration",
|
||||
"Invite new users to the Contextual AI workspace": "Invite new users to the Contextual AI workspace",
|
||||
"Create a new datastore for organizing documents": "Create a new datastore for organizing documents",
|
||||
"Agent": "Agent",
|
||||
"Message": "Message",
|
||||
"Conversation ID": "Conversation ID",
|
||||
"Include Retrieval Content": "Include Retrieval Content",
|
||||
"Prompt": "Prompt",
|
||||
"Model Version": "Model Version",
|
||||
"Knowledge Sources": "Knowledge Sources",
|
||||
"System Prompt": "System Prompt",
|
||||
"Max Tokens": "Max Tokens",
|
||||
"Temperature": "Temperature",
|
||||
"Top P": "Top P",
|
||||
"Avoid Commentary": "Avoid Commentary",
|
||||
"Datastore": "Datastore",
|
||||
"Document File": "Document File",
|
||||
"Custom Metadata": "Custom Metadata",
|
||||
"Configuration Override": "Configuration Override",
|
||||
"Parse Mode": "Parse Mode",
|
||||
"Page Range": "Page Range",
|
||||
"Enable Document Hierarchy": "Enable Document Hierarchy",
|
||||
"Enable Split Tables": "Enable Split Tables",
|
||||
"Max Split Table Cells": "Max Split Table Cells",
|
||||
"Figure Caption Mode": "Figure Caption Mode",
|
||||
"Agent Name": "Agent Name",
|
||||
"Description": "Description",
|
||||
"Datastores": "Datastores",
|
||||
"Filter Prompt": "Filter Prompt",
|
||||
"Users to Invite": "Users to Invite",
|
||||
"Tenant Short Name": "Tenant Short Name",
|
||||
"Datastore Name": "Datastore Name",
|
||||
"Select the agent to query": "Select the agent to query",
|
||||
"The message to send to the agent": "The message to send to the agent",
|
||||
"Optional conversation ID to continue an existing conversation (leave empty for new conversation)": "Optional conversation ID to continue an existing conversation (leave empty for new conversation)",
|
||||
"Include the text of retrieved contents in the response": "Include the text of retrieved contents in the response",
|
||||
"The text prompt to generate a response for": "The text prompt to generate a response for",
|
||||
"The version of Contextual's GLM to use": "The version of Contextual's GLM to use",
|
||||
"Optional knowledge sources to ground the generation (leave empty for general generation)": "Optional knowledge sources to ground the generation (leave empty for general generation)",
|
||||
"Optional system instructions for the model": "Optional system instructions for the model",
|
||||
"Maximum number of tokens to generate (default: 1024)": "Maximum number of tokens to generate (default: 1024)",
|
||||
"Sampling temperature (0.0 to 1.0, lower = more focused, higher = more creative)": "Sampling temperature (0.0 to 1.0, lower = more focused, higher = more creative)",
|
||||
"Nucleus sampling parameter (0.0 to 1.0)": "Nucleus sampling parameter (0.0 to 1.0)",
|
||||
"Avoid providing additional conversational commentary not grounded in context": "Avoid providing additional conversational commentary not grounded in context",
|
||||
"Select the datastore to upload the document to": "Select the datastore to upload the document to",
|
||||
"The document file to upload (PDF, HTML, DOC, DOCX, PPT, PPTX)": "The document file to upload (PDF, HTML, DOC, DOCX, PPT, PPTX)",
|
||||
"Optional custom metadata as key-value pairs (max 15 fields, 2KB total)": "Optional custom metadata as key-value pairs (max 15 fields, 2KB total)",
|
||||
"Optional configuration override in JSON format for this specific document": "Optional configuration override in JSON format for this specific document",
|
||||
"The document file to parse (PDF, DOC, DOCX, PPT, PPTX)": "The document file to parse (PDF, DOC, DOCX, PPT, PPTX)",
|
||||
"Parsing mode - basic for simple text, standard for complex documents": "Parsing mode - basic for simple text, standard for complex documents",
|
||||
"Optional page range to parse (e.g., \"0,1,2\" or \"0-2,5,6\")": "Optional page range to parse (e.g., \"0,1,2\" or \"0-2,5,6\")",
|
||||
"Add table of contents with document structure (beta feature)": "Add table of contents with document structure (beta feature)",
|
||||
"Split large tables into multiple tables with headers": "Split large tables into multiple tables with headers",
|
||||
"Threshold for splitting large tables (only used when split tables is enabled)": "Threshold for splitting large tables (only used when split tables is enabled)",
|
||||
"How thorough figure captions should be": "How thorough figure captions should be",
|
||||
"Name for the new agent": "Name for the new agent",
|
||||
"Optional description of the agent": "Optional description of the agent",
|
||||
"Select datastores to associate with this agent (leave empty to create new datastore)": "Select datastores to associate with this agent (leave empty to create new datastore)",
|
||||
"Optional system prompt for the agent": "Optional system prompt for the agent",
|
||||
"Optional prompt for filtering retrieved chunks": "Optional prompt for filtering retrieved chunks",
|
||||
"List of users to invite": "List of users to invite",
|
||||
"The short name of the tenant/workspace": "The short name of the tenant/workspace",
|
||||
"Name for the new datastore": "Name for the new datastore",
|
||||
"GLM v2": "GLM v2",
|
||||
"GLM v1": "GLM v1",
|
||||
"Basic (text-only)": "Basic (text-only)",
|
||||
"Standard (complex documents)": "Standard (complex documents)",
|
||||
"Concise": "Concise",
|
||||
"Detailed (beta)": "Detailed (beta)",
|
||||
"New Agent": "New Agent",
|
||||
"Triggers when a new Contextual AI agent is created": "Triggers when a new Contextual AI agent is created"
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
|
||||
import { PieceAuth, createPiece, Property } from "@activepieces/pieces-framework";
|
||||
import { PieceCategory } from '@activepieces/shared';
|
||||
import { ContextualAI } from 'contextual-client';
|
||||
import { queryAgentAction } from './lib/actions/query-agent';
|
||||
import { generateAction } from './lib/actions/generate';
|
||||
import { ingestDocumentAction } from './lib/actions/ingest-document';
|
||||
import { parseFileAction } from './lib/actions/parse-file';
|
||||
import { createAgentAction } from './lib/actions/create-agent';
|
||||
import { inviteUsersAction } from './lib/actions/invite-users';
|
||||
import { createDatastoreAction } from './lib/actions/create-datastore';
|
||||
import { newAgentTrigger } from './lib/triggers/new-agent';
|
||||
|
||||
const markdown = `
|
||||
## Contextual AI Connection Setup
|
||||
|
||||
### Prerequisites
|
||||
- Create a Contextual AI account at [Contextual AI](https://contextual.ai)
|
||||
- Generate an API key from your workspace settings
|
||||
- You'll receive $25 in free credits (or $50 with work email)
|
||||
|
||||
### Authentication Fields
|
||||
|
||||
**API Key**: Your Contextual AI API key (required)
|
||||
- Sign in to your Contextual AI workspace
|
||||
- Navigate to API Keys in the sidebar
|
||||
- Click "Create API Key" and follow the instructions
|
||||
- Copy the generated key and paste it here
|
||||
|
||||
**Base URL**: The API base URL (optional)
|
||||
- Leave blank to use the default: \`https://api.contextual.ai/v1\`
|
||||
- Only change if you have a custom deployment
|
||||
`;
|
||||
|
||||
export const contextualAiAuth = PieceAuth.CustomAuth({
|
||||
required: true,
|
||||
description: markdown,
|
||||
props: {
|
||||
apiKey: PieceAuth.SecretText({
|
||||
displayName: 'API Key',
|
||||
description: 'Your Contextual AI API key',
|
||||
required: true,
|
||||
}),
|
||||
baseUrl: Property.ShortText({
|
||||
displayName: 'Base URL',
|
||||
description: 'API base URL (leave blank for default)',
|
||||
required: false,
|
||||
}),
|
||||
},
|
||||
validate: async ({ auth }) => {
|
||||
try {
|
||||
const { apiKey, baseUrl } = auth;
|
||||
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
await client.datastores.list();
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Authentication failed: ${error instanceof Error ? error.message : 'Unknown error'}. Please verify your API key and base URL.`,
|
||||
};
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
export const contextualAi = createPiece({
|
||||
displayName: "Contextual AI",
|
||||
description: "Integrate with Contextual AI to automate document processing and AI workflows",
|
||||
auth: contextualAiAuth,
|
||||
minimumSupportedRelease: '0.36.1',
|
||||
logoUrl: "https://cdn.activepieces.com/pieces/contextual-ai.png",
|
||||
categories: [PieceCategory.ARTIFICIAL_INTELLIGENCE],
|
||||
authors: ["onyedikachi-david"],
|
||||
actions: [queryAgentAction, generateAction, ingestDocumentAction, parseFileAction, createAgentAction, inviteUsersAction, createDatastoreAction],
|
||||
triggers: [newAgentTrigger],
|
||||
});
|
||||
|
||||
@@ -0,0 +1,101 @@
|
||||
import { createAction, Property } from "@activepieces/pieces-framework";
|
||||
import { contextualAiAuth } from "../../index";
|
||||
import { ContextualAI } from 'contextual-client';
|
||||
import type { Datastore } from 'contextual-client/resources/datastores';
|
||||
|
||||
export const createAgentAction = createAction({
|
||||
auth: contextualAiAuth,
|
||||
name: 'create_agent',
|
||||
displayName: 'Create Agent',
|
||||
description: 'Create a new Contextual AI agent with specified configuration',
|
||||
props: {
|
||||
name: Property.ShortText({
|
||||
displayName: 'Agent Name',
|
||||
description: 'Name for the new agent',
|
||||
required: true,
|
||||
}),
|
||||
description: Property.ShortText({
|
||||
displayName: 'Description',
|
||||
description: 'Optional description of the agent',
|
||||
required: false,
|
||||
}),
|
||||
datastoreIds: Property.MultiSelectDropdown({
|
||||
auth: contextualAiAuth,
|
||||
displayName: 'Datastores',
|
||||
description: 'Select datastores to associate with this agent (leave empty to create new datastore)',
|
||||
required: false,
|
||||
refreshers: [],
|
||||
options: async ({ auth }) => {
|
||||
try {
|
||||
if (!auth) {
|
||||
return {
|
||||
disabled: true,
|
||||
options: [],
|
||||
placeholder: 'Please connect your account first',
|
||||
};
|
||||
}
|
||||
const { apiKey, baseUrl } = auth.props;
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
const allDatastores: Datastore[] = [];
|
||||
for await (const datastore of client.datastores.list()) {
|
||||
allDatastores.push(datastore);
|
||||
}
|
||||
|
||||
return {
|
||||
options: allDatastores.map((datastore: Datastore) => ({
|
||||
label: datastore.name,
|
||||
value: datastore.id,
|
||||
})),
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
options: [],
|
||||
error: 'Failed to fetch datastores. Please check your API key.',
|
||||
};
|
||||
}
|
||||
},
|
||||
}),
|
||||
systemPrompt: Property.LongText({
|
||||
displayName: 'System Prompt',
|
||||
description: 'Optional system prompt for the agent',
|
||||
required: false,
|
||||
}),
|
||||
filterPrompt: Property.LongText({
|
||||
displayName: 'Filter Prompt',
|
||||
description: 'Optional prompt for filtering retrieved chunks',
|
||||
required: false,
|
||||
}),
|
||||
},
|
||||
async run({ auth, propsValue }) {
|
||||
const { apiKey, baseUrl } = auth.props;
|
||||
const { name, description, datastoreIds, systemPrompt, filterPrompt } = propsValue;
|
||||
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
const datastoreIdsArray = datastoreIds || [];
|
||||
|
||||
const agentParams: any = {
|
||||
name,
|
||||
};
|
||||
|
||||
if (description) agentParams.description = description;
|
||||
if (datastoreIdsArray.length > 0) agentParams.datastore_ids = datastoreIdsArray;
|
||||
if (systemPrompt) agentParams.multiturn_system_prompt = systemPrompt;
|
||||
if (filterPrompt) agentParams.filter_prompt = filterPrompt;
|
||||
|
||||
const response = await client.agents.create(agentParams);
|
||||
|
||||
return {
|
||||
agent_id: response.id,
|
||||
datastore_ids: response.datastore_ids,
|
||||
status: 'Agent created successfully',
|
||||
};
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,35 @@
|
||||
import { createAction, Property } from "@activepieces/pieces-framework";
|
||||
import { contextualAiAuth } from "../../index";
|
||||
import { ContextualAI } from 'contextual-client';
|
||||
|
||||
export const createDatastoreAction = createAction({
|
||||
auth: contextualAiAuth,
|
||||
name: 'create_datastore',
|
||||
displayName: 'Create Datastore',
|
||||
description: 'Create a new datastore for organizing documents',
|
||||
props: {
|
||||
name: Property.ShortText({
|
||||
displayName: 'Datastore Name',
|
||||
description: 'Name for the new datastore',
|
||||
required: true,
|
||||
}),
|
||||
},
|
||||
async run({ auth, propsValue }) {
|
||||
const { apiKey, baseUrl } = auth.props;
|
||||
const { name } = propsValue;
|
||||
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
const response = await client.datastores.create({
|
||||
name,
|
||||
});
|
||||
|
||||
return {
|
||||
datastore_id: response.id,
|
||||
status: 'Datastore created successfully',
|
||||
};
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,100 @@
|
||||
import { createAction, Property } from "@activepieces/pieces-framework";
|
||||
import { contextualAiAuth } from "../../index";
|
||||
import { ContextualAI } from 'contextual-client';
|
||||
|
||||
export const generateAction = createAction({
|
||||
auth: contextualAiAuth,
|
||||
name: 'generate',
|
||||
displayName: 'Generate Text',
|
||||
description: 'Generate text using Contextual AI\'s Grounded Language Model',
|
||||
props: {
|
||||
prompt: Property.LongText({
|
||||
displayName: 'Prompt',
|
||||
description: 'The text prompt to generate a response for',
|
||||
required: true,
|
||||
}),
|
||||
model: Property.StaticDropdown({
|
||||
displayName: 'Model Version',
|
||||
description: 'The version of Contextual\'s GLM to use',
|
||||
required: true,
|
||||
options: {
|
||||
options: [
|
||||
{ label: 'GLM v2', value: 'v2' },
|
||||
{ label: 'GLM v1', value: 'v1' },
|
||||
],
|
||||
},
|
||||
defaultValue: 'v2',
|
||||
}),
|
||||
knowledge: Property.Array({
|
||||
displayName: 'Knowledge Sources',
|
||||
description: 'Optional knowledge sources to ground the generation (leave empty for general generation)',
|
||||
required: false,
|
||||
}),
|
||||
systemPrompt: Property.LongText({
|
||||
displayName: 'System Prompt',
|
||||
description: 'Optional system instructions for the model',
|
||||
required: false,
|
||||
}),
|
||||
maxTokens: Property.Number({
|
||||
displayName: 'Max Tokens',
|
||||
description: 'Maximum number of tokens to generate (default: 1024)',
|
||||
required: false,
|
||||
defaultValue: 1024,
|
||||
}),
|
||||
temperature: Property.Number({
|
||||
displayName: 'Temperature',
|
||||
description: 'Sampling temperature (0.0 to 1.0, lower = more focused, higher = more creative)',
|
||||
required: false,
|
||||
defaultValue: 0.7,
|
||||
}),
|
||||
topP: Property.Number({
|
||||
displayName: 'Top P',
|
||||
description: 'Nucleus sampling parameter (0.0 to 1.0)',
|
||||
required: false,
|
||||
defaultValue: 0.9,
|
||||
}),
|
||||
avoidCommentary: Property.Checkbox({
|
||||
displayName: 'Avoid Commentary',
|
||||
description: 'Avoid providing additional conversational commentary not grounded in context',
|
||||
required: false,
|
||||
defaultValue: false,
|
||||
}),
|
||||
},
|
||||
async run({ auth, propsValue }) {
|
||||
const { apiKey, baseUrl } = auth.props;
|
||||
const {
|
||||
prompt,
|
||||
model,
|
||||
knowledge,
|
||||
systemPrompt,
|
||||
maxTokens,
|
||||
temperature,
|
||||
topP,
|
||||
avoidCommentary,
|
||||
} = propsValue;
|
||||
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
const messages: Array<{ role: 'user' | 'assistant'; content: string }> = [
|
||||
{ role: 'user', content: prompt },
|
||||
];
|
||||
|
||||
const response = await client.generate.create({
|
||||
messages,
|
||||
model,
|
||||
knowledge: (knowledge || []) as string[],
|
||||
system_prompt: systemPrompt,
|
||||
max_new_tokens: maxTokens,
|
||||
temperature,
|
||||
top_p: topP,
|
||||
avoid_commentary: avoidCommentary,
|
||||
});
|
||||
|
||||
return {
|
||||
response: response.response,
|
||||
};
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,101 @@
|
||||
import { createAction, Property } from "@activepieces/pieces-framework";
|
||||
import { contextualAiAuth } from "../../index";
|
||||
import { ContextualAI, toFile } from 'contextual-client';
|
||||
import type { Datastore } from 'contextual-client/resources/datastores';
|
||||
|
||||
export const ingestDocumentAction = createAction({
|
||||
auth: contextualAiAuth,
|
||||
name: 'ingest_document',
|
||||
displayName: 'Ingest Document',
|
||||
description: 'Upload and ingest a document into a Contextual AI datastore',
|
||||
props: {
|
||||
datastoreId: Property.Dropdown({
|
||||
auth: contextualAiAuth,
|
||||
displayName: 'Datastore',
|
||||
description: 'Select the datastore to upload the document to',
|
||||
required: true,
|
||||
refreshers: [],
|
||||
options: async ({ auth }) => {
|
||||
try {
|
||||
if (!auth) {
|
||||
return {
|
||||
disabled: true,
|
||||
options: [],
|
||||
placeholder: 'Please connect your account first',
|
||||
};
|
||||
}
|
||||
const { apiKey, baseUrl } = auth.props;
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
const allDatastores: Datastore[] = [];
|
||||
for await (const datastore of client.datastores.list()) {
|
||||
allDatastores.push(datastore);
|
||||
}
|
||||
|
||||
return {
|
||||
options: allDatastores.map((datastore: Datastore) => ({
|
||||
label: datastore.name,
|
||||
value: datastore.id,
|
||||
})),
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
options: [],
|
||||
error: 'Failed to fetch datastores. Please check your API key.',
|
||||
};
|
||||
}
|
||||
},
|
||||
}),
|
||||
file: Property.File({
|
||||
displayName: 'Document File',
|
||||
description: 'The document file to upload (PDF, HTML, DOC, DOCX, PPT, PPTX)',
|
||||
required: true,
|
||||
}),
|
||||
customMetadata: Property.Object({
|
||||
displayName: 'Custom Metadata',
|
||||
description: 'Optional custom metadata as key-value pairs (max 15 fields, 2KB total)',
|
||||
required: false,
|
||||
}),
|
||||
configuration: Property.LongText({
|
||||
displayName: 'Configuration Override',
|
||||
description: 'Optional configuration override in JSON format for this specific document',
|
||||
required: false,
|
||||
}),
|
||||
},
|
||||
async run({ auth, propsValue }) {
|
||||
const { apiKey, baseUrl } = auth.props;
|
||||
const { datastoreId, file, customMetadata, configuration } = propsValue;
|
||||
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
let metadataString: string | undefined;
|
||||
if (customMetadata && Object.keys(customMetadata).length > 0) {
|
||||
metadataString = JSON.stringify({
|
||||
custom_metadata: customMetadata,
|
||||
});
|
||||
}
|
||||
|
||||
const uploadableFile = await toFile(file.data, file.filename || 'uploaded-file', {
|
||||
type: file.extension ? `application/${file.extension}` : 'application/octet-stream',
|
||||
});
|
||||
|
||||
const fileData = {
|
||||
file: uploadableFile,
|
||||
custom_metadata: metadataString,
|
||||
configuration: configuration,
|
||||
};
|
||||
|
||||
const response = await client.datastores.documents.ingest(datastoreId, fileData);
|
||||
|
||||
return {
|
||||
document_id: response.id,
|
||||
status: 'Document ingestion started. Use the document ID to check status.',
|
||||
};
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,55 @@
|
||||
import { createAction, Property } from "@activepieces/pieces-framework";
|
||||
import { contextualAiAuth } from "../../index";
|
||||
import { ContextualAI } from 'contextual-client';
|
||||
|
||||
export const inviteUsersAction = createAction({
|
||||
auth: contextualAiAuth,
|
||||
name: 'invite_users',
|
||||
displayName: 'Invite Users',
|
||||
description: 'Invite new users to the Contextual AI workspace',
|
||||
props: {
|
||||
users: Property.Array({
|
||||
displayName: 'Users to Invite',
|
||||
description: 'List of users to invite',
|
||||
required: true,
|
||||
properties: {
|
||||
email: Property.ShortText({
|
||||
displayName: 'Email',
|
||||
description: 'Email address of the user to invite',
|
||||
required: true,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
tenantShortName: Property.ShortText({
|
||||
displayName: 'Tenant Short Name',
|
||||
description: 'The short name of the tenant/workspace',
|
||||
required: true,
|
||||
}),
|
||||
},
|
||||
async run({ auth, propsValue }) {
|
||||
const { apiKey, baseUrl } = auth.props;
|
||||
const { users, tenantShortName } = propsValue;
|
||||
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
const newUsers = users.map((user: any) => ({
|
||||
email: user.email,
|
||||
agent_level_roles: ['AGENT_LEVEL_USER' as const],
|
||||
}));
|
||||
|
||||
const response = await client.users.invite({
|
||||
new_users: newUsers,
|
||||
tenant_short_name: tenantShortName,
|
||||
});
|
||||
|
||||
return {
|
||||
invited_users: response.invited_user_emails,
|
||||
errors: response.error_details,
|
||||
total_invited: response.invited_user_emails.length,
|
||||
total_errors: Object.keys(response.error_details).length,
|
||||
};
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,98 @@
|
||||
import { createAction, Property } from "@activepieces/pieces-framework";
|
||||
import { contextualAiAuth } from "../../index";
|
||||
import { ContextualAI } from 'contextual-client';
|
||||
|
||||
export const parseFileAction = createAction({
|
||||
auth: contextualAiAuth,
|
||||
name: 'parse_file',
|
||||
displayName: 'Parse File',
|
||||
description: 'Parse a document file into structured Markdown and/or JSON format',
|
||||
props: {
|
||||
file: Property.File({
|
||||
displayName: 'Document File',
|
||||
description: 'The document file to parse (PDF, DOC, DOCX, PPT, PPTX)',
|
||||
required: true,
|
||||
}),
|
||||
parseMode: Property.StaticDropdown({
|
||||
displayName: 'Parse Mode',
|
||||
description: 'Parsing mode - basic for simple text, standard for complex documents',
|
||||
required: true,
|
||||
options: {
|
||||
options: [
|
||||
{ label: 'Basic (text-only)', value: 'basic' },
|
||||
{ label: 'Standard (complex documents)', value: 'standard' },
|
||||
],
|
||||
},
|
||||
defaultValue: 'standard',
|
||||
}),
|
||||
pageRange: Property.ShortText({
|
||||
displayName: 'Page Range',
|
||||
description: 'Optional page range to parse (e.g., "0,1,2" or "0-2,5,6")',
|
||||
required: false,
|
||||
}),
|
||||
enableDocumentHierarchy: Property.Checkbox({
|
||||
displayName: 'Enable Document Hierarchy',
|
||||
description: 'Add table of contents with document structure (beta feature)',
|
||||
required: false,
|
||||
defaultValue: false,
|
||||
}),
|
||||
enableSplitTables: Property.Checkbox({
|
||||
displayName: 'Enable Split Tables',
|
||||
description: 'Split large tables into multiple tables with headers',
|
||||
required: false,
|
||||
defaultValue: false,
|
||||
}),
|
||||
maxSplitTableCells: Property.Number({
|
||||
displayName: 'Max Split Table Cells',
|
||||
description: 'Threshold for splitting large tables (only used when split tables is enabled)',
|
||||
required: false,
|
||||
}),
|
||||
figureCaptionMode: Property.StaticDropdown({
|
||||
displayName: 'Figure Caption Mode',
|
||||
description: 'How thorough figure captions should be',
|
||||
required: false,
|
||||
options: {
|
||||
options: [
|
||||
{ label: 'Concise', value: 'concise' },
|
||||
{ label: 'Detailed (beta)', value: 'detailed' },
|
||||
],
|
||||
},
|
||||
defaultValue: 'concise',
|
||||
}),
|
||||
},
|
||||
async run({ auth, propsValue }) {
|
||||
const { apiKey, baseUrl } = auth.props;
|
||||
const {
|
||||
file,
|
||||
parseMode,
|
||||
pageRange,
|
||||
enableDocumentHierarchy,
|
||||
enableSplitTables,
|
||||
maxSplitTableCells,
|
||||
figureCaptionMode,
|
||||
} = propsValue;
|
||||
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
const parseParams: any = {
|
||||
raw_file: file.data,
|
||||
parse_mode: parseMode,
|
||||
};
|
||||
|
||||
if (pageRange) parseParams.page_range = pageRange;
|
||||
if (enableDocumentHierarchy !== undefined) parseParams.enable_document_hierarchy = enableDocumentHierarchy;
|
||||
if (enableSplitTables !== undefined) parseParams.enable_split_tables = enableSplitTables;
|
||||
if (maxSplitTableCells !== undefined) parseParams.max_split_table_cells = maxSplitTableCells;
|
||||
if (figureCaptionMode) parseParams.figure_caption_mode = figureCaptionMode;
|
||||
|
||||
const response = await client.parse.create(parseParams);
|
||||
|
||||
return {
|
||||
job_id: response.job_id,
|
||||
status: 'Parse job started. Use the job ID to check status and get results.',
|
||||
};
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,96 @@
|
||||
import { createAction, Property } from "@activepieces/pieces-framework";
|
||||
import { contextualAiAuth } from "../../index";
|
||||
import { ContextualAI } from 'contextual-client';
|
||||
import type { Agent } from 'contextual-client/resources/agents';
|
||||
|
||||
export const queryAgentAction = createAction({
|
||||
auth: contextualAiAuth,
|
||||
name: 'query_agent',
|
||||
displayName: 'Query Agent',
|
||||
description: 'Send a message to a Contextual AI agent and get a response',
|
||||
props: {
|
||||
agentId: Property.Dropdown({
|
||||
auth: contextualAiAuth,
|
||||
displayName: 'Agent',
|
||||
description: 'Select the agent to query',
|
||||
required: true,
|
||||
refreshers: [],
|
||||
options: async ({ auth }) => {
|
||||
try {
|
||||
if (!auth) {
|
||||
return {
|
||||
disabled: true,
|
||||
options: [],
|
||||
placeholder: 'Please connect your account first',
|
||||
};
|
||||
}
|
||||
const { apiKey, baseUrl } = auth.props;
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
const allAgents: Agent[] = [];
|
||||
for await (const agent of client.agents.list()) {
|
||||
allAgents.push(agent);
|
||||
}
|
||||
|
||||
return {
|
||||
options: allAgents.map((agent: Agent) => ({
|
||||
label: agent.name,
|
||||
value: agent.id,
|
||||
})),
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
options: [],
|
||||
error: 'Failed to fetch agents. Please check your API key.',
|
||||
};
|
||||
}
|
||||
},
|
||||
}),
|
||||
message: Property.LongText({
|
||||
displayName: 'Message',
|
||||
description: 'The message to send to the agent',
|
||||
required: true,
|
||||
}),
|
||||
conversationId: Property.ShortText({
|
||||
displayName: 'Conversation ID',
|
||||
description: 'Optional conversation ID to continue an existing conversation (leave empty for new conversation)',
|
||||
required: false,
|
||||
}),
|
||||
includeRetrievalContent: Property.Checkbox({
|
||||
displayName: 'Include Retrieval Content',
|
||||
description: 'Include the text of retrieved contents in the response',
|
||||
required: false,
|
||||
defaultValue: false,
|
||||
}),
|
||||
},
|
||||
async run({ auth, propsValue }) {
|
||||
const { apiKey, baseUrl } = auth.props;
|
||||
const { agentId, message, conversationId, includeRetrievalContent } = propsValue;
|
||||
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
const messages: Array<{ role: 'user' | 'system' | 'assistant' | 'knowledge'; content: string }> =
|
||||
conversationId ? [] : [{ role: 'user' as const, content: message }];
|
||||
|
||||
const response = await client.agents.query.create(agentId, {
|
||||
messages: messages,
|
||||
conversation_id: conversationId,
|
||||
include_retrieval_content_text: includeRetrievalContent,
|
||||
});
|
||||
|
||||
return {
|
||||
conversation_id: response.conversation_id,
|
||||
message: response.message,
|
||||
retrieval_contents: response.retrieval_contents,
|
||||
attributions: response.attributions,
|
||||
groundedness_scores: response.groundedness_scores,
|
||||
message_id: response.message_id,
|
||||
};
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,72 @@
|
||||
import { createTrigger, TriggerStrategy, AppConnectionValueForAuthProperty } from '@activepieces/pieces-framework';
|
||||
import { DedupeStrategy, Polling, pollingHelper } from '@activepieces/pieces-common';
|
||||
import { contextualAiAuth } from '../../index';
|
||||
import { ContextualAI } from 'contextual-client';
|
||||
import type { Agent } from 'contextual-client/resources/agents';
|
||||
|
||||
const polling: Polling<AppConnectionValueForAuthProperty<typeof contextualAiAuth>, Record<string, never>> = {
|
||||
strategy: DedupeStrategy.TIMEBASED,
|
||||
items: async ({ auth, lastFetchEpochMS }) => {
|
||||
const { apiKey, baseUrl } = auth.props;
|
||||
const client = new ContextualAI({
|
||||
apiKey: apiKey,
|
||||
baseURL: baseUrl || 'https://api.contextual.ai/v1',
|
||||
});
|
||||
|
||||
const allAgents: Agent[] = [];
|
||||
for await (const agent of client.agents.list()) {
|
||||
allAgents.push(agent);
|
||||
}
|
||||
|
||||
const newAgents = lastFetchEpochMS
|
||||
? allAgents.filter(agent => {
|
||||
return true;
|
||||
})
|
||||
: allAgents;
|
||||
|
||||
const items = newAgents.map((agent: Agent) => ({
|
||||
epochMilliSeconds: Date.now(),
|
||||
data: {
|
||||
id: agent.id,
|
||||
name: agent.name,
|
||||
description: agent.description,
|
||||
},
|
||||
}));
|
||||
|
||||
return items;
|
||||
},
|
||||
};
|
||||
|
||||
export const newAgentTrigger = createTrigger({
|
||||
auth: contextualAiAuth,
|
||||
name: 'new_agent',
|
||||
displayName: 'New Agent',
|
||||
description: 'Triggers when a new Contextual AI agent is created',
|
||||
props: {},
|
||||
type: TriggerStrategy.POLLING,
|
||||
sampleData: {
|
||||
id: 'agent_123',
|
||||
name: 'Sample Agent',
|
||||
description: 'A sample agent for testing',
|
||||
},
|
||||
async onEnable(context) {
|
||||
await pollingHelper.onEnable(polling, {
|
||||
auth: context.auth,
|
||||
store: context.store,
|
||||
propsValue: context.propsValue,
|
||||
});
|
||||
},
|
||||
async onDisable(context) {
|
||||
await pollingHelper.onDisable(polling, {
|
||||
auth: context.auth,
|
||||
store: context.store,
|
||||
propsValue: context.propsValue,
|
||||
});
|
||||
},
|
||||
async test(context) {
|
||||
return await pollingHelper.test(polling, context);
|
||||
},
|
||||
async run(context) {
|
||||
return await pollingHelper.poll(polling, context);
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"extends": "../../../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"importHelpers": true,
|
||||
"noImplicitOverride": true,
|
||||
"noImplicitReturns": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noPropertyAccessFromIndexSignature": true
|
||||
},
|
||||
"files": [],
|
||||
"include": [],
|
||||
"references": [
|
||||
{
|
||||
"path": "./tsconfig.lib.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "../../../../dist/out-tsc",
|
||||
"declaration": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["src/**/*.ts"]
|
||||
}
|
||||
Reference in New Issue
Block a user