Add Activepieces integration for workflow automation

- Add Activepieces fork with SmoothSchedule custom piece
- Create integrations app with Activepieces service layer
- Add embed token endpoint for iframe integration
- Create Automations page with embedded workflow builder
- Add sidebar visibility fix for embed mode
- Add list inactive customers endpoint to Public API
- Include SmoothSchedule triggers: event created/updated/cancelled
- Include SmoothSchedule actions: create/update/cancel events, list resources/services/customers

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
poduck
2025-12-18 22:59:37 -05:00
parent 9848268d34
commit 3aa7199503
16292 changed files with 1284892 additions and 4708 deletions

View File

@@ -0,0 +1,33 @@
{
"extends": [
"../../../../.eslintrc.base.json"
],
"ignorePatterns": [
"!**/*"
],
"overrides": [
{
"files": [
"*.ts",
"*.tsx",
"*.js",
"*.jsx"
],
"rules": {}
},
{
"files": [
"*.ts",
"*.tsx"
],
"rules": {}
},
{
"files": [
"*.js",
"*.jsx"
],
"rules": {}
}
]
}

View File

@@ -0,0 +1,7 @@
# pieces-exa
This library was generated with [Nx](https://nx.dev).
## Building
Run `nx build pieces-exa` to build the library.

View File

@@ -0,0 +1,4 @@
{
"name": "@activepieces/piece-exa",
"version": "0.0.6"
}

View File

@@ -0,0 +1,60 @@
{
"name": "pieces-exa",
"$schema": "../../../../node_modules/nx/schemas/project-schema.json",
"sourceRoot": "packages/pieces/community/exa/src",
"projectType": "library",
"release": {
"version": {
"currentVersionResolver": "git-tag",
"preserveLocalDependencyProtocols": false,
"manifestRootsToUpdate": [
"dist/{projectRoot}"
]
}
},
"tags": [],
"targets": {
"build": {
"executor": "@nx/js:tsc",
"outputs": [
"{options.outputPath}"
],
"options": {
"outputPath": "dist/packages/pieces/community/exa",
"tsConfig": "packages/pieces/community/exa/tsconfig.lib.json",
"packageJson": "packages/pieces/community/exa/package.json",
"main": "packages/pieces/community/exa/src/index.ts",
"assets": [
"packages/pieces/community/exa/*.md"
],
"buildableProjectDepsInPackageJsonType": "dependencies",
"updateBuildableProjectDepsInPackageJson": true
},
"dependsOn": [
"^build",
"prebuild"
]
},
"nx-release-publish": {
"options": {
"packageRoot": "dist/{projectRoot}"
}
},
"lint": {
"executor": "@nx/eslint:lint",
"outputs": [
"{options.outputFile}"
]
},
"prebuild": {
"executor": "nx:run-commands",
"options": {
"cwd": "packages/pieces/community/exa",
"command": "bun install --no-save --silent"
},
"dependsOn": [
"^build"
]
}
}
}

View File

@@ -0,0 +1,101 @@
{
"AI-powered search and content extraction from the web.": "KI-gestützte Suche und Inhaltsextraktion aus dem Internet.",
"Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).": "Erhalten Sie Ihre API-Taste über [Dashboard-Einstellung](https://dashboard.exa.ai/api-keys).",
"Get Contents": "Inhalte abrufen",
"Ask AI": "KI fragen",
"Perform Search": "Suche durchführen",
"Find Similar Links": "Ähnliche Links finden",
"Custom API Call": "Eigener API-Aufruf",
"Retrieve clean HTML content from specified URLs.": "Abrufen von reinen HTML-Inhalten von angegebenen URLs.",
"Provides direct answers to queries by summarizing results.": "Bietet direkte Antworten auf Anfragen durch Zusammenfassung der Ergebnisse.",
"Search the web using semantic or keyword-based search.": "Suchen Sie im Web mit semantischer oder Keyword-basierter Suche.",
"Find pages similar to a given URL.": "Suchen Sie Seiten ähnlich einer bestimmten URL.",
"Make a custom API call to a specific endpoint": "Einen benutzerdefinierten API-Aufruf an einen bestimmten Endpunkt machen",
"URLs": "URLs",
"Return Full Text": "Volltext zurückgeben",
"Livecrawl Option": "Livecrawl-Option",
"Livecrawl Timeout (ms)": "Livecrawl Timeout (ms)",
"Number of Subpages": "Anzahl der Unterseiten",
"Subpage Target": "Subpage Target",
"Query": "Abfrage",
"Include Text Content": "Textinhalt einfügen",
"Model": "Modell",
"Search Type": "Suchtyp",
"Category": "Kategorie",
"Number of Results": "Anzahl der Ergebnisse",
"Include Domains": "Domains einschließen",
"Exclude Domains": "Domains ausschließen",
"Start Crawl Date": "Crawl-Datum starten",
"End Crawl Date": "Crawl-Datum beenden",
"Start Published Date": "Veröffentlichungsdatum starten",
"End Published Date": "Veröffentlichungsdatum beenden",
"Include Text": "Text einfügen",
"Exclude Text": "Text ausschließen",
"URL": "URL",
"Start Crawl Date (ISO)": "Crawl-Datum (ISO) starten",
"End Crawl Date (ISO)": "Crawl-Datum beenden (ISO)",
"Start Published Date (ISO)": "Veröffentlichungsdatum (ISO) starten",
"End Published Date (ISO)": "Veröffentlichungsdatum beenden (ISO)",
"Method": "Methode",
"Headers": "Kopfzeilen",
"Query Parameters": "Abfrageparameter",
"Body": "Körper",
"Response is Binary ?": "Antwort ist binär?",
"No Error on Failure": "Kein Fehler bei Fehler",
"Timeout (in seconds)": "Timeout (in Sekunden)",
"Array of URLs to crawl": "Anordnung der zu crawlenden URLs",
"If true, returns full page text. If false, disables text return.": "Wenn aktiviert, gibt Vollseitentext zurück. Falls falsch, deaktiviert Text zurück.",
"Options for livecrawling pages.": "Optionen für Livekrawling-Seiten.",
"Timeout for livecrawling in milliseconds.": "Timeout für Livekrawling in Millisekunden.",
"Number of subpages to crawl.": "Anzahl der zu crawlenden Unterseiten.",
"Keyword(s) to find specific subpages.": "Keyword(s) um bestimmte Unterseiten zu finden.",
"Ask a question to get summarized answers from the web.": "Stellen Sie eine Frage, um zusammengefasste Antworten aus dem Web zu erhalten.",
"If true, includes full text content from the search results": "Wenn aktiviert, enthält Volltextinhalt aus den Suchergebnissen",
"Choose the Exa model to use for the answer.": "Wählen Sie das Exa-Modell für die Antwort aus.",
"Search query to find related articles and data.": "Suche nach verwandten Artikeln und Daten.",
"Type of search to perform.": "Art der durchzuführenden Suche.",
"Category of data to focus the search on.": "Kategorie der Daten, auf die die Suche fokussiert wird.",
"Number of results to return (max 100).": "Anzahl der zurückzugebenden Ergebnisse (max 100).",
"Limit results to only these domains.": "Limitieren Sie die Ergebnisse nur auf diese Domains.",
"Exclude results from these domains.": "Ergebnisse aus diesen Domains ausschließen.",
"Only include results crawled after this ISO date.": "Nur Ergebnisse enthalten, die nach diesem ISO-Datum durchsucht wurden.",
"Only include results crawled before this ISO date.": "Nur Ergebnisse enthalten, die vor diesem ISO-Datum durchsucht wurden.",
"Only include results published after this ISO date.": "Fügen Sie nur Ergebnisse hinzu, die nach diesem ISO-Datum veröffentlicht wurden.",
"Only include results published before this ISO date.": "Nur Ergebnisse enthalten, die vor diesem ISO-Datum veröffentlicht wurden.",
"Strings that must be present in the text of results.": "Zeichenketten, die im Ergebnistext enthalten sein müssen.",
"Strings that must not be present in the text of results.": "Zeichenketten, die nicht im Ergebnistext enthalten sein dürfen.",
"Reference URL to find semantically similar links.": "Referenz-URL um semantisch ähnliche Links zu finden.",
"List of domains to include in results.": "Liste der Domänen, die in die Ergebnisse aufgenommen werden sollen.",
"List of domains to exclude from results.": "Liste der Domänen, die von den Ergebnissen ausgeschlossen werden.",
"Include links crawled after this date (ISO format).": "Links einbinden, die nach diesem Datum durchsucht werden (ISO-Format).",
"Include links crawled before this date (ISO format).": "Links einbinden, die vor diesem Datum durchsucht werden (ISO-Format).",
"Only include links published after this date (ISO format).": "Links enthalten, die nach diesem Datum veröffentlicht wurden (ISO-Format).",
"Only include links published before this date (ISO format).": "Links enthalten, die vor diesem Datum veröffentlicht wurden (ISO-Format).",
"Strings that must be present in the webpage text (max 1 string of up to 5 words).": "Zeichenketten, die im Text der Webseite enthalten sein müssen (max. 1 Zeichenkette von bis zu 5 Wörter).",
"Strings that must not be present in the webpage text (max 1 string of up to 5 words).": "Zeichenketten, die nicht im Text der Webseite enthalten sein dürfen (max. 1 Zeichenkette von bis zu 5 Wörter).",
"Authorization headers are injected automatically from your connection.": "Autorisierungs-Header werden automatisch von Ihrer Verbindung injiziert.",
"Enable for files like PDFs, images, etc..": "Aktivieren für Dateien wie PDFs, Bilder, etc..",
"Never": "Nie",
"Fallback": "Fallback",
"Always": "Immer",
"Auto": "Auto",
"Exa": "Exa",
"Exa Pro": "Exa Pro",
"Keyword": "Keyword",
"Neural": "Neurral",
"Company": "Firma",
"Research Paper": "Forschungspapier",
"News": "Nachrichten",
"PDF": "PDF",
"GitHub": "GitHub",
"Tweet": "Tweet",
"Personal Site": "Persönliche Seite",
"LinkedIn Profile": "LinkedIn Profile",
"Financial Report": "Finanzbericht",
"GET": "ERHALTEN",
"POST": "POST",
"PATCH": "PATCH",
"PUT": "PUT",
"DELETE": "LÖSCHEN",
"HEAD": "HEAD"
}

View File

@@ -0,0 +1,101 @@
{
"AI-powered search and content extraction from the web.": "Búsqueda y extracción de contenido impulsada por la AIE de la web.",
"Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).": "Obtén tu clave API de [Configuración del panel](https://dashboard.exa.ai/api-keys).",
"Get Contents": "Obtener contenido",
"Ask AI": "Preguntar IA",
"Perform Search": "Realizar búsqueda",
"Find Similar Links": "Buscar enlaces similares",
"Custom API Call": "Llamada API personalizada",
"Retrieve clean HTML content from specified URLs.": "Recuperar el contenido HTML limpio de las URL especificadas.",
"Provides direct answers to queries by summarizing results.": "Proporciona respuestas directas a las consultas al resumir los resultados.",
"Search the web using semantic or keyword-based search.": "Busque en la web mediante búsqueda semántica o basada en palabras clave.",
"Find pages similar to a given URL.": "Buscar páginas similares a una URL dada.",
"Make a custom API call to a specific endpoint": "Hacer una llamada API personalizada a un extremo específico",
"URLs": "URLs",
"Return Full Text": "Volver texto completo",
"Livecrawl Option": "Opción de Livecrawl",
"Livecrawl Timeout (ms)": "Timeout de Livecrawl (ms)",
"Number of Subpages": "Número de subpáginas",
"Subpage Target": "Subpage Target",
"Query": "Consulta",
"Include Text Content": "Incluye contenido de texto",
"Model": "Modelo",
"Search Type": "Tipo de búsqueda",
"Category": "Categoría",
"Number of Results": "Número de resultados",
"Include Domains": "Incluye Dominios",
"Exclude Domains": "Excluir dominios",
"Start Crawl Date": "Fecha de inicio del rastreo",
"End Crawl Date": "Fecha de rastreo final",
"Start Published Date": "Fecha de inicio de publicación",
"End Published Date": "Fecha de fin de publicación",
"Include Text": "Incluye texto",
"Exclude Text": "Excluir texto",
"URL": "URL",
"Start Crawl Date (ISO)": "Iniciar fecha de rastreo (ISP)",
"End Crawl Date (ISO)": "Fin de fecha de rastreo (ISP)",
"Start Published Date (ISO)": "Fecha de inicio (ISP)",
"End Published Date (ISO)": "Fin de fecha de publicación (ISP)",
"Method": "Método",
"Headers": "Encabezados",
"Query Parameters": "Parámetros de consulta",
"Body": "Cuerpo",
"Response is Binary ?": "¿Respuesta es binaria?",
"No Error on Failure": "No hay ningún error en fallo",
"Timeout (in seconds)": "Tiempo de espera (en segundos)",
"Array of URLs to crawl": "Matriz de URLs a rascar",
"If true, returns full page text. If false, disables text return.": "Si es verdadero, devuelve el texto de página completa. Si es falso, deshabilita el texto regresar.",
"Options for livecrawling pages.": "Opciones para las páginas de livecrawling.",
"Timeout for livecrawling in milliseconds.": "Tiempo de espera para livecrawling en milisegundos.",
"Number of subpages to crawl.": "Número de subpáginas a rascar.",
"Keyword(s) to find specific subpages.": "Palabra(s) clave para encontrar subpáginas específicas.",
"Ask a question to get summarized answers from the web.": "Haz una pregunta para obtener respuestas resumidas de la web.",
"If true, includes full text content from the search results": "Si es verdadero, incluye contenido de texto completo de los resultados de búsqueda",
"Choose the Exa model to use for the answer.": "Elija el modelo Exa a usar para la respuesta.",
"Search query to find related articles and data.": "Buscar para encontrar artículos y datos relacionados.",
"Type of search to perform.": "Tipo de búsqueda a realizar.",
"Category of data to focus the search on.": "Categoría de datos en la que enfocar la búsqueda.",
"Number of results to return (max 100).": "Número de resultados a devolver (máx. 100).",
"Limit results to only these domains.": "Limita los resultados a estos dominios.",
"Exclude results from these domains.": "Excluir los resultados de estos dominios.",
"Only include results crawled after this ISO date.": "Incluir sólo los resultados después de esta fecha ISO.",
"Only include results crawled before this ISO date.": "Sólo incluya los resultados acumulados antes de esta fecha ISO.",
"Only include results published after this ISO date.": "Incluir sólo los resultados publicados después de esta fecha ISO.",
"Only include results published before this ISO date.": "Sólo incluya los resultados publicados antes de esta fecha ISO.",
"Strings that must be present in the text of results.": "Cadenas que deben estar presentes en el texto de los resultados.",
"Strings that must not be present in the text of results.": "Cadenas que no deben estar presentes en el texto de los resultados.",
"Reference URL to find semantically similar links.": "URL de referencia para encontrar enlaces semánticamente similares.",
"List of domains to include in results.": "Lista de dominios a incluir en los resultados.",
"List of domains to exclude from results.": "Lista de dominios a excluir de los resultados.",
"Include links crawled after this date (ISO format).": "Incluye enlaces cortados después de esta fecha (formato ISO).",
"Include links crawled before this date (ISO format).": "Incluye enlaces cortados antes de esta fecha (formato ISO).",
"Only include links published after this date (ISO format).": "Incluir sólo enlaces publicados después de esta fecha (formato ISO).",
"Only include links published before this date (ISO format).": "Incluir sólo enlaces publicados antes de esta fecha (formato ISO).",
"Strings that must be present in the webpage text (max 1 string of up to 5 words).": "Cadenas que deben estar presentes en el texto de la página web (máximo 1 cadena de hasta 5 palabras).",
"Strings that must not be present in the webpage text (max 1 string of up to 5 words).": "Cadenas que no deben estar presentes en el texto de la página web (máximo 1 cadena de hasta 5 palabras).",
"Authorization headers are injected automatically from your connection.": "Las cabeceras de autorización se inyectan automáticamente desde tu conexión.",
"Enable for files like PDFs, images, etc..": "Activar para archivos como PDFs, imágenes, etc.",
"Never": "Nunca",
"Fallback": "Fallback",
"Always": "Siempre",
"Auto": "Auto",
"Exa": "Exa",
"Exa Pro": "Exa Pro",
"Keyword": "Keyword",
"Neural": "Neural",
"Company": "Empresa",
"Research Paper": "Papel de investigación",
"News": "Noticias",
"PDF": "DF",
"GitHub": "GitHub",
"Tweet": "Tweet",
"Personal Site": "Sitio Personal",
"LinkedIn Profile": "LinkedIn Profile",
"Financial Report": "Informe financiero",
"GET": "RECOGER",
"POST": "POST",
"PATCH": "PATCH",
"PUT": "PUT",
"DELETE": "BORRAR",
"HEAD": "LIMPIO"
}

View File

@@ -0,0 +1,101 @@
{
"AI-powered search and content extraction from the web.": "La recherche et l'extraction de contenu à partir du Web.",
"Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).": "Obtenez votre clé API dans [Dashboard Setting](https://dashboard.exa.ai/api-keys).",
"Get Contents": "Obtenir du contenu",
"Ask AI": "Demander à l'IA",
"Perform Search": "Effectuer la recherche",
"Find Similar Links": "Trouver des liens similaires",
"Custom API Call": "Appel API personnalisé",
"Retrieve clean HTML content from specified URLs.": "Récupérer le contenu HTML propre à partir d'URL spécifiées.",
"Provides direct answers to queries by summarizing results.": "Fournit des réponses directes aux requêtes en résumant les résultats.",
"Search the web using semantic or keyword-based search.": "Recherchez sur le Web en utilisant une recherche sémantique ou basée sur des mots clés.",
"Find pages similar to a given URL.": "Trouver des pages similaires à une URL donnée.",
"Make a custom API call to a specific endpoint": "Passez un appel API personnalisé à un point de terminaison spécifique",
"URLs": "URLs",
"Return Full Text": "Retourner le texte complet",
"Livecrawl Option": "Option Livecrawl",
"Livecrawl Timeout (ms)": "Délai d'exploration en direct (ms)",
"Number of Subpages": "Nombre de sous-pages",
"Subpage Target": "Subpage Target",
"Query": "Requête",
"Include Text Content": "Inclure le contenu du texte",
"Model": "Modélisation",
"Search Type": "Type de recherche",
"Category": "Catégorie",
"Number of Results": "Nombre de résultats",
"Include Domains": "Inclure les domaines",
"Exclude Domains": "Exclure les domaines",
"Start Crawl Date": "Date de début de la rampe",
"End Crawl Date": "Date de fin de la rampe",
"Start Published Date": "Date de début de publication",
"End Published Date": "Date de fin de publication",
"Include Text": "Inclure le texte",
"Exclude Text": "Exclure le texte",
"URL": "URL",
"Start Crawl Date (ISO)": "Date de début du ramassage (ISO)",
"End Crawl Date (ISO)": "Date de fin du ramassage (ISO)",
"Start Published Date (ISO)": "Date de début de publication (ISO)",
"End Published Date (ISO)": "Date de fin de publication (ISO)",
"Method": "Méthode",
"Headers": "En-têtes",
"Query Parameters": "Paramètres de requête",
"Body": "Corps",
"Response is Binary ?": "La réponse est Binaire ?",
"No Error on Failure": "Aucune erreur en cas d'échec",
"Timeout (in seconds)": "Délai d'attente (en secondes)",
"Array of URLs to crawl": "Tableau d'URLs à explorer",
"If true, returns full page text. If false, disables text return.": "Si vrai, renvoie le texte de la page complète. Si faux, désactive le retour du texte.",
"Options for livecrawling pages.": "Options pour les pages d'exploration en direct.",
"Timeout for livecrawling in milliseconds.": "Délai d'attente pour le livecrawling en millisecondes.",
"Number of subpages to crawl.": "Nombre de sous-pages à explorer.",
"Keyword(s) to find specific subpages.": "Mot(s) clé(s) pour trouver des sous-pages spécifiques.",
"Ask a question to get summarized answers from the web.": "Posez une question pour obtenir des réponses sommaires du Web.",
"If true, includes full text content from the search results": "Si vrai, inclure le contenu en texte intégral des résultats de recherche",
"Choose the Exa model to use for the answer.": "Choisissez le modèle Exa à utiliser pour la réponse.",
"Search query to find related articles and data.": "Requête de recherche pour trouver des articles et des données connexes.",
"Type of search to perform.": "Type de recherche à effectuer.",
"Category of data to focus the search on.": "Catégorie de données sur laquelle se concentrer la recherche.",
"Number of results to return (max 100).": "Nombre de résultats à retourner (max 100).",
"Limit results to only these domains.": "Limiter les résultats à ces domaines uniquement.",
"Exclude results from these domains.": "Exclure les résultats de ces domaines.",
"Only include results crawled after this ISO date.": "N'inclure que les résultats explorés après cette date ISO.",
"Only include results crawled before this ISO date.": "N'inclure que les résultats explorés avant cette date ISO.",
"Only include results published after this ISO date.": "N'inclure que les résultats publiés après cette date ISO.",
"Only include results published before this ISO date.": "Inclure uniquement les résultats publiés avant cette date ISO.",
"Strings that must be present in the text of results.": "Chaînes qui doivent être présentes dans le texte des résultats.",
"Strings that must not be present in the text of results.": "Chaînes qui ne doivent pas être présentes dans le texte des résultats.",
"Reference URL to find semantically similar links.": "URL de référence pour trouver des liens sémantiquement similaires.",
"List of domains to include in results.": "Liste des domaines à inclure dans les résultats.",
"List of domains to exclude from results.": "Liste des domaines à exclure des résultats.",
"Include links crawled after this date (ISO format).": "Inclure les liens explorés après cette date (format ISO).",
"Include links crawled before this date (ISO format).": "Inclure les liens explorés avant cette date (format ISO).",
"Only include links published after this date (ISO format).": "N'inclure que les liens publiés après cette date (format ISO).",
"Only include links published before this date (ISO format).": "Inclure uniquement les liens publiés avant cette date (format ISO).",
"Strings that must be present in the webpage text (max 1 string of up to 5 words).": "Chaînes qui doivent être présentes dans le texte de la page Web (max 1 chaîne de caractères pouvant atteindre 5 mots).",
"Strings that must not be present in the webpage text (max 1 string of up to 5 words).": "Chaînes qui ne doivent pas être présentes dans le texte de la page Web (max 1 chaîne de caractères pouvant atteindre 5 mots).",
"Authorization headers are injected automatically from your connection.": "Les en-têtes d'autorisation sont injectés automatiquement à partir de votre connexion.",
"Enable for files like PDFs, images, etc..": "Activer pour les fichiers comme les PDFs, les images, etc.",
"Never": "Never",
"Fallback": "Fallback",
"Always": "Toujours",
"Auto": "Automatique",
"Exa": "Exa",
"Exa Pro": "Exa Pro",
"Keyword": "Keyword",
"Neural": "Neurale",
"Company": "Entreprise",
"Research Paper": "Papier de recherche",
"News": "Actualités",
"PDF": "PDF",
"GitHub": "GitHub",
"Tweet": "Tweeter",
"Personal Site": "Site personnel",
"LinkedIn Profile": "LinkedIn Profile",
"Financial Report": "Rapport financier",
"GET": "OBTENIR",
"POST": "POSTER",
"PATCH": "PATCH",
"PUT": "EFFACER",
"DELETE": "SUPPRIMER",
"HEAD": "TÊTE"
}

View File

@@ -0,0 +1,101 @@
{
"AI-powered search and content extraction from the web.": "AIを駆使した検索とWebからのコンテンツ抽出。",
"Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).": "[ダッシュボード設定](https://dashboard.exa.ai/api-keys)からAPIキーを取得します。",
"Get Contents": "内容を取得",
"Ask AI": "AIに聞く",
"Perform Search": "検索を実行",
"Find Similar Links": "類似のリンクを検索",
"Custom API Call": "カスタムAPI通話",
"Retrieve clean HTML content from specified URLs.": "指定された URL からクリーンな HTML コンテンツを取得します。",
"Provides direct answers to queries by summarizing results.": "結果をまとめることにより、クエリへの直接の回答を提供します。",
"Search the web using semantic or keyword-based search.": "セマンティックまたはキーワードベースの検索を使用してWebを検索します。",
"Find pages similar to a given URL.": "特定の URL に似たページを検索します。",
"Make a custom API call to a specific endpoint": "特定のエンドポイントへのカスタム API コールを実行します。",
"URLs": "URL",
"Return Full Text": "フルテキストを返す",
"Livecrawl Option": "Livecrawl Option",
"Livecrawl Timeout (ms)": "ライブクロールタイムアウト (ms)",
"Number of Subpages": "サブページ数",
"Subpage Target": "Subpage Target",
"Query": "クエリ",
"Include Text Content": "テキストコンテンツを含める",
"Model": "モデル",
"Search Type": "検索タイプ",
"Category": "カテゴリ",
"Number of Results": "結果の数",
"Include Domains": "ドメインを含める",
"Exclude Domains": "除外ドメイン",
"Start Crawl Date": "クロール開始日",
"End Crawl Date": "クロール終了日",
"Start Published Date": "公開開始日",
"End Published Date": "公開終了日",
"Include Text": "テキストを含める",
"Exclude Text": "テキストを除外",
"URL": "URL",
"Start Crawl Date (ISO)": "クロール開始日 (ISO)",
"End Crawl Date (ISO)": "クロール終了日時 (ISO)",
"Start Published Date (ISO)": "公開日(ISO)を開始",
"End Published Date (ISO)": "公開日時 (ISO)",
"Method": "方法",
"Headers": "ヘッダー",
"Query Parameters": "クエリパラメータ",
"Body": "本文",
"Response is Binary ?": "応答はバイナリですか?",
"No Error on Failure": "失敗時にエラーはありません",
"Timeout (in seconds)": "タイムアウト(秒)",
"Array of URLs to crawl": "クロールするURLの配列",
"If true, returns full page text. If false, disables text return.": "true の場合、ページ全体のテキストを返します。false の場合、テキストを返すことはできません。",
"Options for livecrawling pages.": "livecrawling ページのオプション。",
"Timeout for livecrawling in milliseconds.": "livecrawling のタイムアウト(ミリ秒単位)",
"Number of subpages to crawl.": "サブページをクロールする数です。",
"Keyword(s) to find specific subpages.": "特定のサブページを見つけるためのキーワード",
"Ask a question to get summarized answers from the web.": "ウェブから要約された回答を得るために質問をしなさい。",
"If true, includes full text content from the search results": "true の場合、検索結果の全文コンテンツが含まれています",
"Choose the Exa model to use for the answer.": "回答に使用するExaモデルを選択します。",
"Search query to find related articles and data.": "関連記事とデータを検索するクエリを検索します。",
"Type of search to perform.": "実行する検索タイプ.",
"Category of data to focus the search on.": "検索に集中するデータのカテゴリです。",
"Number of results to return (max 100).": "返す結果の数最大100",
"Limit results to only these domains.": "これらのドメインのみに結果を制限します。",
"Exclude results from these domains.": "これらのドメインから結果を除外します",
"Only include results crawled after this ISO date.": "このISO日付以降にクロールされた結果のみが含まれます。",
"Only include results crawled before this ISO date.": "このISO日付以前にクロールされた結果のみが含まれます。",
"Only include results published after this ISO date.": "このISO日付以降に公開された結果のみが含まれます。",
"Only include results published before this ISO date.": "このISO日付以前に公開された結果のみが含まれます。",
"Strings that must be present in the text of results.": "結果のテキストに存在しなければならない文字列。",
"Strings that must not be present in the text of results.": "結果のテキストには存在しない文字列。",
"Reference URL to find semantically similar links.": "参照URLは意味的に類似したリンクを見つけることができます。",
"List of domains to include in results.": "結果に含めるドメインのリストです。",
"List of domains to exclude from results.": "結果から除外するドメインのリストです。",
"Include links crawled after this date (ISO format).": "この日付の後にクロールされたリンクを含める (ISO 形式)。",
"Include links crawled before this date (ISO format).": "この日付の前にクロールされたリンクを含める (ISO 形式)。",
"Only include links published after this date (ISO format).": "この日付以降に公開されたリンクのみが含まれます(ISO形式)。",
"Only include links published before this date (ISO format).": "この日付(ISO形式)以前に公開されたリンクのみを含みます。",
"Strings that must be present in the webpage text (max 1 string of up to 5 words).": "ウェブページのテキストに存在しなければならない文字列 (最大 5 語までの 1 文字まで)。",
"Strings that must not be present in the webpage text (max 1 string of up to 5 words).": "ウェブページのテキストに存在してはならない文字列 (最大 5 語までの最大1文字)",
"Authorization headers are injected automatically from your connection.": "認証ヘッダは接続から自動的に注入されます。",
"Enable for files like PDFs, images, etc..": "PDF、画像などのファイルを有効にします。",
"Never": "一切なし",
"Fallback": "Fallback",
"Always": "常に表示",
"Auto": "自動",
"Exa": "Exa",
"Exa Pro": "Exa Pro",
"Keyword": "Keyword",
"Neural": "ニューラル",
"Company": "会社名",
"Research Paper": "研究用紙",
"News": "ニュース",
"PDF": "PDF",
"GitHub": "GitHub",
"Tweet": "ツイート",
"Personal Site": "個人サイト",
"LinkedIn Profile": "LinkedIn Profile",
"Financial Report": "財務報告",
"GET": "取得",
"POST": "POST",
"PATCH": "PATCH",
"PUT": "PUT",
"DELETE": "削除",
"HEAD": "頭"
}

View File

@@ -0,0 +1,101 @@
{
"AI-powered search and content extraction from the web.": "AI-aangedreven zoek- en inhoudsextractie van het web.",
"Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).": "Verkrijg je API-sleutel van [Dashboard Instellingen](https://dashboard.exa.ai/api-keys).",
"Get Contents": "Inhoud ophalen",
"Ask AI": "Vraag het AI",
"Perform Search": "Zoekopdracht uitvoeren",
"Find Similar Links": "Vind vergelijkbare links",
"Custom API Call": "Custom API Call",
"Retrieve clean HTML content from specified URLs.": "Haal schone HTML-inhoud op van de opgegeven URL's.",
"Provides direct answers to queries by summarizing results.": "Biedt directe antwoorden aan query's door resultaten samen te vatten.",
"Search the web using semantic or keyword-based search.": "Doorzoek het web met behulp van semantische of trefwoorden gebaseerde zoekopdracht.",
"Find pages similar to a given URL.": "Zoek pagina's vergelijkbaar met een opgegeven URL.",
"Make a custom API call to a specific endpoint": "Maak een aangepaste API call naar een specifiek eindpunt",
"URLs": "URL's",
"Return Full Text": "Retourneer volledige tekst",
"Livecrawl Option": "Livecrawl optie",
"Livecrawl Timeout (ms)": "Livecrawl time-out (ms)",
"Number of Subpages": "Aantal subpagina's",
"Subpage Target": "Subpage Target",
"Query": "Zoekopdracht",
"Include Text Content": "Tekstinhoud toevoegen",
"Model": "Model",
"Search Type": "Type zoeken",
"Category": "categorie",
"Number of Results": "Aantal resultaten",
"Include Domains": "Domeinen opnemen",
"Exclude Domains": "Domeinen uitsluiten",
"Start Crawl Date": "Start Crawl datum",
"End Crawl Date": "Einde Crawl Datum",
"Start Published Date": "Startdatum van publicatie",
"End Published Date": "Einde publicatiedatum",
"Include Text": "Tekst toevoegen",
"Exclude Text": "Tekst uitsluiten",
"URL": "URL",
"Start Crawl Date (ISO)": "Start Crawl Datum (ISO)",
"End Crawl Date (ISO)": "Einde Crawl Datum (ISO)",
"Start Published Date (ISO)": "Start publicatiedatum (ISO)",
"End Published Date (ISO)": "Einde publicatiedatum (ISO)",
"Method": "Methode",
"Headers": "Kopteksten",
"Query Parameters": "Query parameters",
"Body": "Lichaam",
"Response is Binary ?": "Antwoord is binair?",
"No Error on Failure": "Geen fout bij fout",
"Timeout (in seconds)": "Time-out (in seconden)",
"Array of URLs to crawl": "Array of URLs to crawl",
"If true, returns full page text. If false, disables text return.": "Indien waar, retourneert de volledige pagina-tekst. Als onjuist, schakelt u de tekstretour uit.",
"Options for livecrawling pages.": "Opties voor livecrawling pagina's.",
"Timeout for livecrawling in milliseconds.": "Time-out voor livecrawling in milliseconden.",
"Number of subpages to crawl.": "Aantal subpagina's om te crawen.",
"Keyword(s) to find specific subpages.": "Trefwoord(en) om specifieke subpagina's te vinden.",
"Ask a question to get summarized answers from the web.": "Stel een vraag om beknopte antwoorden van het web.",
"If true, includes full text content from the search results": "Indien waar, bevat volledige tekst inhoud van de zoekresultaten",
"Choose the Exa model to use for the answer.": "Kies het Exa model om te gebruiken voor het antwoord.",
"Search query to find related articles and data.": "Zoekopdracht zoeken om gerelateerde artikelen en gegevens te vinden.",
"Type of search to perform.": "Type zoekopdracht om uit te voeren.",
"Category of data to focus the search on.": "Gegevenscategorie waarop de zoekopdracht zich moet concentreren.",
"Number of results to return (max 100).": "Aantal resultaten om terug te keren (max 100).",
"Limit results to only these domains.": "Beperk resultaten tot alleen deze domeinen.",
"Exclude results from these domains.": "Uitsluiten van resultaten van deze domeinen.",
"Only include results crawled after this ISO date.": "Inclusief alleen resultaten gecrawled na deze ISO-datum.",
"Only include results crawled before this ISO date.": "Inclusief alleen gecrawled resultaten voor deze ISO datum.",
"Only include results published after this ISO date.": "Inclusief alleen resultaten gepubliceerd na deze ISO-datum.",
"Only include results published before this ISO date.": "Bevat alleen resultaten gepubliceerd voor deze ISO-datum.",
"Strings that must be present in the text of results.": "Tekenreeksen die in de resultaattekst aanwezig moeten zijn.",
"Strings that must not be present in the text of results.": "Tekenreeksen die niet in de resultaattekst aanwezig mogen zijn.",
"Reference URL to find semantically similar links.": "Referentie URL om semantisch vergelijkbare links te vinden.",
"List of domains to include in results.": "Lijst van domeinen om op te nemen in de resultaten.",
"List of domains to exclude from results.": "Lijst van domeinen om uit te sluiten van de resultaten.",
"Include links crawled after this date (ISO format).": "Links gecrawed na deze datum (ISO formaat) ook opnemen.",
"Include links crawled before this date (ISO format).": "Links gecrawt voor deze datum (ISO formaat) ook opnemen.",
"Only include links published after this date (ISO format).": "Voeg alleen links toe die gepubliceerd zijn na deze datum (ISO formaat).",
"Only include links published before this date (ISO format).": "Voeg alleen links toe die zijn gepubliceerd voor deze datum (ISO formaat).",
"Strings that must be present in the webpage text (max 1 string of up to 5 words).": "Tekenreeksen die moeten voorkomen in de tekst van de webpagina (max 1 tekenreeks van maximaal 5 woorden).",
"Strings that must not be present in the webpage text (max 1 string of up to 5 words).": "Tekenreeksen die niet aanwezig moeten zijn in de tekst van de webpagina (max 1 tekenreeks van maximaal 5 woorden).",
"Authorization headers are injected automatically from your connection.": "Autorisatie headers worden automatisch geïnjecteerd vanuit uw verbinding.",
"Enable for files like PDFs, images, etc..": "Inschakelen voor bestanden zoals PDF's, afbeeldingen etc..",
"Never": "Nooit",
"Fallback": "Fallback",
"Always": "altijd",
"Auto": "Automatisch",
"Exa": "Exa",
"Exa Pro": "Exa Pro",
"Keyword": "Keyword",
"Neural": "Neuraal",
"Company": "Bedrijfsnaam",
"Research Paper": "Onderzoek Papier",
"News": "Nieuws",
"PDF": "PDF-bestand",
"GitHub": "GitHub",
"Tweet": "tweet",
"Personal Site": "Persoonlijke site",
"LinkedIn Profile": "LinkedIn Profile",
"Financial Report": "Financieel verslag",
"GET": "KRIJG",
"POST": "POSTE",
"PATCH": "BEKIJK",
"PUT": "PUT",
"DELETE": "VERWIJDEREN",
"HEAD": "HOOFD"
}

View File

@@ -0,0 +1,101 @@
{
"AI-powered search and content extraction from the web.": "Pesquisa e extração de conteúdo a partir da web.",
"Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).": "Obtenha sua chave de API de [Configuração do Painel](https://dashboard.exa.ai/api-keys).",
"Get Contents": "Obter conteúdo",
"Ask AI": "Perguntar à IA",
"Perform Search": "Executar Pesquisa",
"Find Similar Links": "Localizar Links Similares",
"Custom API Call": "Chamada de API personalizada",
"Retrieve clean HTML content from specified URLs.": "Recuperar o conteúdo HTML limpo de URLs especificadas.",
"Provides direct answers to queries by summarizing results.": "Fornece respostas diretas às consultas, resumindo os resultados.",
"Search the web using semantic or keyword-based search.": "Pesquisar na web usando pesquisa semântica ou baseada em palavras-chave.",
"Find pages similar to a given URL.": "Localizar páginas similares a uma determinada URL.",
"Make a custom API call to a specific endpoint": "Faça uma chamada de API personalizada para um ponto de extremidade específico",
"URLs": "Links",
"Return Full Text": "Retornar texto completo",
"Livecrawl Option": "Opção Livecrawl",
"Livecrawl Timeout (ms)": "Tempo limite de Livecrawl (ms)",
"Number of Subpages": "Número de Subpáginas",
"Subpage Target": "Subpage Target",
"Query": "Requisição",
"Include Text Content": "Incluir Conteúdo do Texto",
"Model": "Modelo",
"Search Type": "Pesquisar Tipo",
"Category": "categoria",
"Number of Results": "Número de Resultados",
"Include Domains": "Incluir domínios",
"Exclude Domains": "Excluir domínios",
"Start Crawl Date": "Data de início do crachá",
"End Crawl Date": "Data de Travessia",
"Start Published Date": "Iniciar Data de Publicação",
"End Published Date": "Encerrar data de publicação",
"Include Text": "Incluir texto",
"Exclude Text": "Excluir Texto",
"URL": "URL:",
"Start Crawl Date (ISO)": "Iniciar Data Rastel (ISO)",
"End Crawl Date (ISO)": "Data de Travamento (ISO)",
"Start Published Date (ISO)": "Iniciar Data de Publicação (ISO)",
"End Published Date (ISO)": "Encerrar data de publicação (ISO)",
"Method": "Método",
"Headers": "Cabeçalhos",
"Query Parameters": "Parâmetros da consulta",
"Body": "Conteúdo",
"Response is Binary ?": "A resposta é binária ?",
"No Error on Failure": "Nenhum erro no Failure",
"Timeout (in seconds)": "Tempo limite (em segundos)",
"Array of URLs to crawl": "Array de URLs para crawl",
"If true, returns full page text. If false, disables text return.": "Se verdadeiro, retorna texto completo da página. Se falso, desativa a devolução de texto.",
"Options for livecrawling pages.": "Opções para páginas de livecrawing.",
"Timeout for livecrawling in milliseconds.": "Tempo limite para o livecrawling em milissegundos.",
"Number of subpages to crawl.": "Número de subpáginas para rastrear.",
"Keyword(s) to find specific subpages.": "Palavra-chave para encontrar subpáginas específicas.",
"Ask a question to get summarized answers from the web.": "Faça uma pergunta para obter respostas resumidas na web.",
"If true, includes full text content from the search results": "Se verdadeiro, inclui o conteúdo de texto completo dos resultados de pesquisa",
"Choose the Exa model to use for the answer.": "Escolha o modelo de Exa a a ser usado para a resposta.",
"Search query to find related articles and data.": "Pesquisar consulta para encontrar artigos e dados relacionados.",
"Type of search to perform.": "Tipo de pesquisa a ser executada.",
"Category of data to focus the search on.": "Categoria de dados para focar a busca.",
"Number of results to return (max 100).": "Número de resultados a retornar (máx. 100).",
"Limit results to only these domains.": "Limitar resultados apenas para estes domínios.",
"Exclude results from these domains.": "Excluir resultados destes domínios.",
"Only include results crawled after this ISO date.": "Só incluir resultados analisados após esta data ISO.",
"Only include results crawled before this ISO date.": "Só incluir os resultados analisados antes desta data ISO.",
"Only include results published after this ISO date.": "Só incluir resultados publicados após esta data ISO.",
"Only include results published before this ISO date.": "Só incluir resultados publicados antes desta data ISO.",
"Strings that must be present in the text of results.": "Frases que devem estar presentes no texto dos resultados.",
"Strings that must not be present in the text of results.": "Frases que não devem estar presentes no texto dos resultados.",
"Reference URL to find semantically similar links.": "URL de referência para encontrar links semânticos semelhantes.",
"List of domains to include in results.": "Lista de domínios a incluir nos resultados.",
"List of domains to exclude from results.": "Lista de domínios para excluir dos resultados.",
"Include links crawled after this date (ISO format).": "Incluir links arrastados após esta data (formato ISO).",
"Include links crawled before this date (ISO format).": "Incluir links arrastados antes desta data (formato ISO).",
"Only include links published after this date (ISO format).": "Inclua somente links publicados após esta data (formato ISO).",
"Only include links published before this date (ISO format).": "Inclua somente links publicados antes desta data (formato ISO).",
"Strings that must be present in the webpage text (max 1 string of up to 5 words).": "Frases que devem estar presentes no texto da página da Web (máximo 1 sequência de caracteres de até 5 palavras).",
"Strings that must not be present in the webpage text (max 1 string of up to 5 words).": "Frases que não devem estar presentes no texto da página da Web (número máximo 1 de sequência de caracteres de até 5 palavras).",
"Authorization headers are injected automatically from your connection.": "Os cabeçalhos de autorização são inseridos automaticamente a partir da sua conexão.",
"Enable for files like PDFs, images, etc..": "Habilitar para arquivos como PDFs, imagens, etc..",
"Never": "Nunca",
"Fallback": "Fallback",
"Always": "sempre",
"Auto": "Automático",
"Exa": "Exa",
"Exa Pro": "Exa Pro",
"Keyword": "Keyword",
"Neural": "Neural",
"Company": "Empresas",
"Research Paper": "Papel de pesquisa",
"News": "Notícias",
"PDF": "Pdf",
"GitHub": "GitHub",
"Tweet": "Tuitar",
"Personal Site": "Site pessoal",
"LinkedIn Profile": "LinkedIn Profile",
"Financial Report": "Relatório Financeiro",
"GET": "OBTER",
"POST": "POSTAR",
"PATCH": "COMPRAR",
"PUT": "COLOCAR",
"DELETE": "EXCLUIR",
"HEAD": "CABEÇA"
}

View File

@@ -0,0 +1,99 @@
{
"Exa": "Экса",
"AI-powered search and content extraction from the web.": "Поиск и извлечение контента с помощью AI из Интернета.",
"Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).": "Получите ключ API из [Настройки панели](https://dashboard.exa.ai/api-keys).",
"Get Contents": "Получить содержимое",
"Ask AI": "Ask AI",
"Perform Search": "Выполнить поиск",
"Find Similar Links": "Найти похожие ссылки",
"Custom API Call": "Пользовательский вызов API",
"Retrieve clean HTML content from specified URLs.": "Получить чистый HTML-контент с указанных URL.",
"Provides direct answers to queries by summarizing results.": "Предоставляет прямые ответы на запросы, суммируя результаты.",
"Search the web using semantic or keyword-based search.": "Искать в Интернете с помощью семантического или ключевого поиска.",
"Find pages similar to a given URL.": "Найти страницы, похожие на заданный URL.",
"Make a custom API call to a specific endpoint": "Сделать пользовательский API вызов к определенной конечной точке",
"URLs": "Адреса",
"Return Full Text": "Вернуть полный текст",
"Livecrawl Option": "Опция Livecrawl",
"Livecrawl Timeout (ms)": "Тайм-аут Livecrawl (мс)",
"Number of Subpages": "Количество подстраниц",
"Subpage Target": "Subpage Target",
"Query": "Запрос",
"Include Text Content": "Включить текстовый контент",
"Model": "Модель",
"Search Type": "Тип поиска",
"Category": "Категория",
"Number of Results": "Количество результатов",
"Include Domains": "Включить домены",
"Exclude Domains": "Исключить домены",
"Start Crawl Date": "Дата запуска Crawl",
"End Crawl Date": "Конец даты Crawl",
"Start Published Date": "Дата начала публикации",
"End Published Date": "Дата окончания публикации",
"Include Text": "Включить текст",
"Exclude Text": "Исключить текст",
"URL": "URL",
"Start Crawl Date (ISO)": "Дата Crawl (ISO)",
"End Crawl Date (ISO)": "Дата окончания сканирования (ISO)",
"Start Published Date (ISO)": "Дата начала публикации (ISO)",
"End Published Date (ISO)": "Дата окончания публикации (ISO)",
"Method": "Метод",
"Headers": "Заголовки",
"Query Parameters": "Параметры запроса",
"Body": "Тело",
"No Error on Failure": "Нет ошибок при ошибке",
"Timeout (in seconds)": "Таймаут (в секундах)",
"Array of URLs to crawl": "Массив URL-адресов для сканирования",
"If true, returns full page text. If false, disables text return.": "Если установлено значение true, возвращает полный текст страницы. Если значение false, отключит возврат текста.",
"Options for livecrawling pages.": "Варианты для погружения страниц.",
"Timeout for livecrawling in milliseconds.": "Таймаут для жизни в миллисекундах.",
"Number of subpages to crawl.": "Количество подстраниц для сканирования.",
"Keyword(s) to find specific subpages.": "Ключевое слово (ключевые слова) для поиска определенных подстраниц.",
"Ask a question to get summarized answers from the web.": "Задайте вопрос для получения кратких ответов из Интернета.",
"If true, includes full text content from the search results": "Если установлено значение \"true\", включает в себя весь текст из результатов поиска",
"Choose the Exa model to use for the answer.": "Выберите модель Exa, которая будет использоваться для ответа.",
"Search query to find related articles and data.": "Поисковый запрос для поиска связанных статей и данных.",
"Type of search to perform.": "Тип поиска для выполнения.",
"Category of data to focus the search on.": "Категория данных для фокусировки поиска.",
"Number of results to return (max 100).": "Количество результатов для возврата (не более 100).",
"Limit results to only these domains.": "Ограничить результаты только этими доменами.",
"Exclude results from these domains.": "Исключить результаты из этих доменов.",
"Only include results crawled after this ISO date.": "Включать результаты сканирования только после этой даты ISO.",
"Only include results crawled before this ISO date.": "Включать только результаты сканирования до этой даты ISO.",
"Only include results published after this ISO date.": "Включать результаты только после этой даты ISO.",
"Only include results published before this ISO date.": "Включать результаты только до этой даты ISO.",
"Strings that must be present in the text of results.": "Строки, которые должны присутствовать в тексте результатов.",
"Strings that must not be present in the text of results.": "Строки, которые не должны присутствовать в тексте результатов.",
"Reference URL to find semantically similar links.": "Ссылка на URL, чтобы найти семантически похожие ссылки.",
"List of domains to include in results.": "Список доменов для включения в результаты.",
"List of domains to exclude from results.": "Список доменов для исключения из результатов.",
"Include links crawled after this date (ISO format).": "Включите ссылки, сканированные после этой даты (формат ISO).",
"Include links crawled before this date (ISO format).": "Включите ссылки, пересканированные до этой даты (формат ISO).",
"Only include links published after this date (ISO format).": "Включать только ссылки, опубликованные после этой даты (формат ISO).",
"Only include links published before this date (ISO format).": "Включать только ссылки, опубликованные до этой даты (формат ISO).",
"Strings that must be present in the webpage text (max 1 string of up to 5 words).": "Строки, которые должны присутствовать в тексте веб-страницы (не более 1 строки до 5 слов).",
"Strings that must not be present in the webpage text (max 1 string of up to 5 words).": "Строки, которые не должны присутствовать в тексте веб-страницы (максимум 1 строка до 5 слов).",
"Authorization headers are injected automatically from your connection.": "Заголовки авторизации включаются автоматически из вашего соединения.",
"Never": "Никогда",
"Fallback": "Fallback",
"Always": "Всегда",
"Auto": "Авто",
"Exa Pro": "Exa Pro",
"Keyword": "Keyword",
"Neural": "Неал",
"Company": "Компания",
"Research Paper": "Исследовательская книга",
"News": "Новости",
"PDF": "PDF",
"GitHub": "GitHub",
"Tweet": "Твит",
"Personal Site": "Персональный сайт",
"LinkedIn Profile": "LinkedIn Profile",
"Financial Report": "Финансовый отчет",
"GET": "ПОЛУЧИТЬ",
"POST": "ПОСТ",
"PATCH": "ПАТЧ",
"PUT": "ПОКУПИТЬ",
"DELETE": "УДАЛИТЬ",
"HEAD": "HEAD"
}

View File

@@ -0,0 +1,101 @@
{
"AI-powered search and content extraction from the web.": "AI-powered search and content extraction from the web.",
"Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).": "Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).",
"Get Contents": "Get Contents",
"Ask AI": "Ask AI",
"Perform Search": "Perform Search",
"Find Similar Links": "Find Similar Links",
"Custom API Call": "Custom API Call",
"Retrieve clean HTML content from specified URLs.": "Retrieve clean HTML content from specified URLs.",
"Provides direct answers to queries by summarizing results.": "Provides direct answers to queries by summarizing results.",
"Search the web using semantic or keyword-based search.": "Search the web using semantic or keyword-based search.",
"Find pages similar to a given URL.": "Find pages similar to a given URL.",
"Make a custom API call to a specific endpoint": "Make a custom API call to a specific endpoint",
"URLs": "URLs",
"Return Full Text": "Return Full Text",
"Livecrawl Option": "Livecrawl Option",
"Livecrawl Timeout (ms)": "Livecrawl Timeout (ms)",
"Number of Subpages": "Number of Subpages",
"Subpage Target": "Subpage Target",
"Query": "Query",
"Include Text Content": "Include Text Content",
"Model": "Model",
"Search Type": "Search Type",
"Category": "Category",
"Number of Results": "Number of Results",
"Include Domains": "Include Domains",
"Exclude Domains": "Exclude Domains",
"Start Crawl Date": "Start Crawl Date",
"End Crawl Date": "End Crawl Date",
"Start Published Date": "Start Published Date",
"End Published Date": "End Published Date",
"Include Text": "Include Text",
"Exclude Text": "Exclude Text",
"URL": "URL",
"Start Crawl Date (ISO)": "Start Crawl Date (ISO)",
"End Crawl Date (ISO)": "End Crawl Date (ISO)",
"Start Published Date (ISO)": "Start Published Date (ISO)",
"End Published Date (ISO)": "End Published Date (ISO)",
"Method": "Method",
"Headers": "Headers",
"Query Parameters": "Query Parameters",
"Body": "Body",
"Response is Binary ?": "Response is Binary ?",
"No Error on Failure": "No Error on Failure",
"Timeout (in seconds)": "Timeout (in seconds)",
"Array of URLs to crawl": "Array of URLs to crawl",
"If true, returns full page text. If false, disables text return.": "If true, returns full page text. If false, disables text return.",
"Options for livecrawling pages.": "Options for livecrawling pages.",
"Timeout for livecrawling in milliseconds.": "Timeout for livecrawling in milliseconds.",
"Number of subpages to crawl.": "Number of subpages to crawl.",
"Keyword(s) to find specific subpages.": "Keyword(s) to find specific subpages.",
"Ask a question to get summarized answers from the web.": "Ask a question to get summarized answers from the web.",
"If true, includes full text content from the search results": "If true, includes full text content from the search results",
"Choose the Exa model to use for the answer.": "Choose the Exa model to use for the answer.",
"Search query to find related articles and data.": "Search query to find related articles and data.",
"Type of search to perform.": "Type of search to perform.",
"Category of data to focus the search on.": "Category of data to focus the search on.",
"Number of results to return (max 100).": "Number of results to return (max 100).",
"Limit results to only these domains.": "Limit results to only these domains.",
"Exclude results from these domains.": "Exclude results from these domains.",
"Only include results crawled after this ISO date.": "Only include results crawled after this ISO date.",
"Only include results crawled before this ISO date.": "Only include results crawled before this ISO date.",
"Only include results published after this ISO date.": "Only include results published after this ISO date.",
"Only include results published before this ISO date.": "Only include results published before this ISO date.",
"Strings that must be present in the text of results.": "Strings that must be present in the text of results.",
"Strings that must not be present in the text of results.": "Strings that must not be present in the text of results.",
"Reference URL to find semantically similar links.": "Reference URL to find semantically similar links.",
"List of domains to include in results.": "List of domains to include in results.",
"List of domains to exclude from results.": "List of domains to exclude from results.",
"Include links crawled after this date (ISO format).": "Include links crawled after this date (ISO format).",
"Include links crawled before this date (ISO format).": "Include links crawled before this date (ISO format).",
"Only include links published after this date (ISO format).": "Only include links published after this date (ISO format).",
"Only include links published before this date (ISO format).": "Only include links published before this date (ISO format).",
"Strings that must be present in the webpage text (max 1 string of up to 5 words).": "Strings that must be present in the webpage text (max 1 string of up to 5 words).",
"Strings that must not be present in the webpage text (max 1 string of up to 5 words).": "Strings that must not be present in the webpage text (max 1 string of up to 5 words).",
"Authorization headers are injected automatically from your connection.": "Authorization headers are injected automatically from your connection.",
"Enable for files like PDFs, images, etc..": "Enable for files like PDFs, images, etc..",
"Never": "Never",
"Fallback": "Fallback",
"Always": "Always",
"Auto": "Auto",
"Exa": "Exa",
"Exa Pro": "Exa Pro",
"Keyword": "Keyword",
"Neural": "Neural",
"Company": "Company",
"Research Paper": "Research Paper",
"News": "News",
"PDF": "PDF",
"GitHub": "GitHub",
"Tweet": "Tweet",
"Personal Site": "Personal Site",
"LinkedIn Profile": "LinkedIn Profile",
"Financial Report": "Financial Report",
"GET": "GET",
"POST": "POST",
"PATCH": "PATCH",
"PUT": "PUT",
"DELETE": "DELETE",
"HEAD": "HEAD"
}

View File

@@ -0,0 +1,99 @@
{
"Exa": "Exa",
"AI-powered search and content extraction from the web.": "AI-powered search and content extraction from the web.",
"Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).": "Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).",
"Get Contents": "Get Contents",
"Ask AI": "Ask AI",
"Perform Search": "Perform Search",
"Find Similar Links": "Find Similar Links",
"Custom API Call": "Custom API Call",
"Retrieve clean HTML content from specified URLs.": "Retrieve clean HTML content from specified URLs.",
"Provides direct answers to queries by summarizing results.": "Provides direct answers to queries by summarizing results.",
"Search the web using semantic or keyword-based search.": "Search the web using semantic or keyword-based search.",
"Find pages similar to a given URL.": "Find pages similar to a given URL.",
"Make a custom API call to a specific endpoint": "Make a custom API call to a specific endpoint",
"URLs": "URLs",
"Return Full Text": "Return Full Text",
"Livecrawl Option": "Livecrawl Option",
"Livecrawl Timeout (ms)": "Livecrawl Timeout (ms)",
"Number of Subpages": "Number of Subpages",
"Subpage Target": "Subpage Target",
"Query": "Query",
"Include Text Content": "Include Text Content",
"Model": "Model",
"Search Type": "Search Type",
"Category": "Category",
"Number of Results": "Number of Results",
"Include Domains": "Include Domains",
"Exclude Domains": "Exclude Domains",
"Start Crawl Date": "Start Crawl Date",
"End Crawl Date": "End Crawl Date",
"Start Published Date": "Start Published Date",
"End Published Date": "End Published Date",
"Include Text": "Include Text",
"Exclude Text": "Exclude Text",
"URL": "URL",
"Start Crawl Date (ISO)": "Start Crawl Date (ISO)",
"End Crawl Date (ISO)": "End Crawl Date (ISO)",
"Start Published Date (ISO)": "Start Published Date (ISO)",
"End Published Date (ISO)": "End Published Date (ISO)",
"Method": "Method",
"Headers": "Headers",
"Query Parameters": "Query Parameters",
"Body": "Body",
"No Error on Failure": "No Error on Failure",
"Timeout (in seconds)": "Timeout (in seconds)",
"Array of URLs to crawl": "Array of URLs to crawl",
"If true, returns full page text. If false, disables text return.": "If true, returns full page text. If false, disables text return.",
"Options for livecrawling pages.": "Options for livecrawling pages.",
"Timeout for livecrawling in milliseconds.": "Timeout for livecrawling in milliseconds.",
"Number of subpages to crawl.": "Number of subpages to crawl.",
"Keyword(s) to find specific subpages.": "Keyword(s) to find specific subpages.",
"Ask a question to get summarized answers from the web.": "Ask a question to get summarized answers from the web.",
"If true, includes full text content from the search results": "If true, includes full text content from the search results",
"Choose the Exa model to use for the answer.": "Choose the Exa model to use for the answer.",
"Search query to find related articles and data.": "Search query to find related articles and data.",
"Type of search to perform.": "Type of search to perform.",
"Category of data to focus the search on.": "Category of data to focus the search on.",
"Number of results to return (max 100).": "Number of results to return (max 100).",
"Limit results to only these domains.": "Limit results to only these domains.",
"Exclude results from these domains.": "Exclude results from these domains.",
"Only include results crawled after this ISO date.": "Only include results crawled after this ISO date.",
"Only include results crawled before this ISO date.": "Only include results crawled before this ISO date.",
"Only include results published after this ISO date.": "Only include results published after this ISO date.",
"Only include results published before this ISO date.": "Only include results published before this ISO date.",
"Strings that must be present in the text of results.": "Strings that must be present in the text of results.",
"Strings that must not be present in the text of results.": "Strings that must not be present in the text of results.",
"Reference URL to find semantically similar links.": "Reference URL to find semantically similar links.",
"List of domains to include in results.": "List of domains to include in results.",
"List of domains to exclude from results.": "List of domains to exclude from results.",
"Include links crawled after this date (ISO format).": "Include links crawled after this date (ISO format).",
"Include links crawled before this date (ISO format).": "Include links crawled before this date (ISO format).",
"Only include links published after this date (ISO format).": "Only include links published after this date (ISO format).",
"Only include links published before this date (ISO format).": "Only include links published before this date (ISO format).",
"Strings that must be present in the webpage text (max 1 string of up to 5 words).": "Strings that must be present in the webpage text (max 1 string of up to 5 words).",
"Strings that must not be present in the webpage text (max 1 string of up to 5 words).": "Strings that must not be present in the webpage text (max 1 string of up to 5 words).",
"Authorization headers are injected automatically from your connection.": "Authorization headers are injected automatically from your connection.",
"Never": "Never",
"Fallback": "Fallback",
"Always": "Always",
"Auto": "Auto",
"Exa Pro": "Exa Pro",
"Keyword": "Keyword",
"Neural": "Neural",
"Company": "Company",
"Research Paper": "Research Paper",
"News": "News",
"PDF": "PDF",
"GitHub": "GitHub",
"Tweet": "Tweet",
"Personal Site": "Personal Site",
"LinkedIn Profile": "LinkedIn Profile",
"Financial Report": "Financial Report",
"GET": "GET",
"POST": "POST",
"PATCH": "PATCH",
"PUT": "PUT",
"DELETE": "DELETE",
"HEAD": "HEAD"
}

View File

@@ -0,0 +1,101 @@
{
"AI-powered search and content extraction from the web.": "AI-powered search and content extraction from the web.",
"Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).": "Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).",
"Get Contents": "Get Contents",
"Ask AI": "询问AI",
"Perform Search": "Perform Search",
"Find Similar Links": "Find Similar Links",
"Custom API Call": "自定义 API 呼叫",
"Retrieve clean HTML content from specified URLs.": "Retrieve clean HTML content from specified URLs.",
"Provides direct answers to queries by summarizing results.": "Provides direct answers to queries by summarizing results.",
"Search the web using semantic or keyword-based search.": "Search the web using semantic or keyword-based search.",
"Find pages similar to a given URL.": "Find pages similar to a given URL.",
"Make a custom API call to a specific endpoint": "将一个自定义 API 调用到一个特定的终点",
"URLs": "URLs",
"Return Full Text": "Return Full Text",
"Livecrawl Option": "Livecrawl Option",
"Livecrawl Timeout (ms)": "Livecrawl Timeout (ms)",
"Number of Subpages": "Number of Subpages",
"Subpage Target": "Subpage Target",
"Query": "Query",
"Include Text Content": "Include Text Content",
"Model": "Model",
"Search Type": "Search Type",
"Category": "Category",
"Number of Results": "Number of Results",
"Include Domains": "Include Domains",
"Exclude Domains": "Exclude Domains",
"Start Crawl Date": "Start Crawl Date",
"End Crawl Date": "End Crawl Date",
"Start Published Date": "Start Published Date",
"End Published Date": "End Published Date",
"Include Text": "Include Text",
"Exclude Text": "Exclude Text",
"URL": "URL",
"Start Crawl Date (ISO)": "Start Crawl Date (ISO)",
"End Crawl Date (ISO)": "End Crawl Date (ISO)",
"Start Published Date (ISO)": "Start Published Date (ISO)",
"End Published Date (ISO)": "End Published Date (ISO)",
"Method": "方法",
"Headers": "信头",
"Query Parameters": "查询参数",
"Body": "正文内容",
"Response is Binary ?": "Response is Binary ?",
"No Error on Failure": "失败时没有错误",
"Timeout (in seconds)": "超时(秒)",
"Array of URLs to crawl": "Array of URLs to crawl",
"If true, returns full page text. If false, disables text return.": "If true, returns full page text. If false, disables text return.",
"Options for livecrawling pages.": "Options for livecrawling pages.",
"Timeout for livecrawling in milliseconds.": "Timeout for livecrawling in milliseconds.",
"Number of subpages to crawl.": "Number of subpages to crawl.",
"Keyword(s) to find specific subpages.": "Keyword(s) to find specific subpages.",
"Ask a question to get summarized answers from the web.": "Ask a question to get summarized answers from the web.",
"If true, includes full text content from the search results": "If true, includes full text content from the search results",
"Choose the Exa model to use for the answer.": "Choose the Exa model to use for the answer.",
"Search query to find related articles and data.": "Search query to find related articles and data.",
"Type of search to perform.": "Type of search to perform.",
"Category of data to focus the search on.": "Category of data to focus the search on.",
"Number of results to return (max 100).": "Number of results to return (max 100).",
"Limit results to only these domains.": "Limit results to only these domains.",
"Exclude results from these domains.": "Exclude results from these domains.",
"Only include results crawled after this ISO date.": "Only include results crawled after this ISO date.",
"Only include results crawled before this ISO date.": "Only include results crawled before this ISO date.",
"Only include results published after this ISO date.": "Only include results published after this ISO date.",
"Only include results published before this ISO date.": "Only include results published before this ISO date.",
"Strings that must be present in the text of results.": "Strings that must be present in the text of results.",
"Strings that must not be present in the text of results.": "Strings that must not be present in the text of results.",
"Reference URL to find semantically similar links.": "Reference URL to find semantically similar links.",
"List of domains to include in results.": "List of domains to include in results.",
"List of domains to exclude from results.": "List of domains to exclude from results.",
"Include links crawled after this date (ISO format).": "Include links crawled after this date (ISO format).",
"Include links crawled before this date (ISO format).": "Include links crawled before this date (ISO format).",
"Only include links published after this date (ISO format).": "Only include links published after this date (ISO format).",
"Only include links published before this date (ISO format).": "Only include links published before this date (ISO format).",
"Strings that must be present in the webpage text (max 1 string of up to 5 words).": "Strings that must be present in the webpage text (max 1 string of up to 5 words).",
"Strings that must not be present in the webpage text (max 1 string of up to 5 words).": "Strings that must not be present in the webpage text (max 1 string of up to 5 words).",
"Authorization headers are injected automatically from your connection.": "授权头自动从您的连接中注入。",
"Enable for files like PDFs, images, etc..": "Enable for files like PDFs, images, etc..",
"Never": "从不使用",
"Fallback": "Fallback",
"Always": "Always",
"Auto": "Auto",
"Exa": "Exa",
"Exa Pro": "Exa Pro",
"Keyword": "Keyword",
"Neural": "Neural",
"Company": "Company",
"Research Paper": "Research Paper",
"News": "News",
"PDF": "PDF",
"GitHub": "GitHub",
"Tweet": "Tweet",
"Personal Site": "Personal Site",
"LinkedIn Profile": "LinkedIn Profile",
"Financial Report": "Financial Report",
"GET": "获取",
"POST": "帖子",
"PATCH": "PATCH",
"PUT": "弹出",
"DELETE": "删除",
"HEAD": "黑色"
}

View File

@@ -0,0 +1,59 @@
import { createPiece, PieceAuth } from '@activepieces/pieces-framework';
import { PieceCategory } from '@activepieces/shared';
import { getContentsAction } from './lib/actions/get-contents';
import { generateAnswerAction } from './lib/actions/generate-answer';
import { performSearchAction } from './lib/actions/perform-search';
import { findSimilarLinksAction } from './lib/actions/find-similar-links';
import { createCustomApiCallAction, HttpMethod } from '@activepieces/pieces-common';
import { makeRequest } from './lib/common';
const markdownDescription = `Obtain your API key from [Dashboard Setting](https://dashboard.exa.ai/api-keys).`;
export const exaAuth = PieceAuth.SecretText({
displayName: 'API Key',
description: markdownDescription,
required: true,
validate:async ({auth})=>{
try
{
await makeRequest(auth,HttpMethod.POST,
'/search',{query:'Activepieces'}
)
return{
valid:true
}
}catch(e)
{
return{
valid:false,
error:'Invalid API Key.'
}
}
}
});
export const exa = createPiece({
displayName: 'Exa',
description: 'AI-powered search and content extraction from the web.',
auth: exaAuth,
minimumSupportedRelease: '0.36.1',
logoUrl: 'https://cdn.activepieces.com/pieces/exa.png',
categories: [PieceCategory.ARTIFICIAL_INTELLIGENCE,PieceCategory.PRODUCTIVITY],
authors: ['krushnarout','kishanprmr'],
actions: [
getContentsAction,
generateAnswerAction,
performSearchAction,
findSimilarLinksAction,
createCustomApiCallAction({
auth:exaAuth,
baseUrl: () => 'https://api.exa.ai',
authMapping: async (auth) => ({
'x-api-key': `${auth}`,
}),
})
],
triggers: [],
});

View File

@@ -0,0 +1,96 @@
import { createAction, Property } from '@activepieces/pieces-framework';
import { HttpMethod } from '@activepieces/pieces-common';
import { makeRequest } from '../common';
import { exaAuth } from '../../index';
export const findSimilarLinksAction = createAction({
name: 'find_similar_links',
displayName: 'Find Similar Links',
description: 'Find pages similar to a given URL.',
auth: exaAuth,
props: {
url: Property.ShortText({
displayName: 'URL',
description: 'Reference URL to find semantically similar links.',
required: true,
}),
numResults: Property.Number({
displayName: 'Number of Results',
description: 'Number of results to return (max 100).',
required: false,
}),
includeDomains: Property.Array({
displayName: 'Include Domains',
description: 'List of domains to include in results.',
required: false,
}),
excludeDomains: Property.Array({
displayName: 'Exclude Domains',
description: 'List of domains to exclude from results.',
required: false,
}),
startCrawlDate: Property.DateTime({
displayName: 'Start Crawl Date (ISO)',
description: 'Include links crawled after this date (ISO format).',
required: false,
}),
endCrawlDate: Property.DateTime({
displayName: 'End Crawl Date (ISO)',
description: 'Include links crawled before this date (ISO format).',
required: false,
}),
startPublishedDate: Property.DateTime({
displayName: 'Start Published Date (ISO)',
description: 'Only include links published after this date (ISO format).',
required: false,
}),
endPublishedDate: Property.DateTime({
displayName: 'End Published Date (ISO)',
description: 'Only include links published before this date (ISO format).',
required: false,
}),
includeText: Property.Array({
displayName: 'Include Text',
description: 'Strings that must be present in the webpage text (max 1 string of up to 5 words).',
required: false,
}),
excludeText: Property.Array({
displayName: 'Exclude Text',
description: 'Strings that must not be present in the webpage text (max 1 string of up to 5 words).',
required: false,
}),
},
async run(context) {
const apiKey = context.auth.secret_text;
const {
url,
numResults,
includeDomains,
excludeDomains,
startCrawlDate,
endCrawlDate,
startPublishedDate,
endPublishedDate,
includeText,
excludeText,
} = context.propsValue;
const body: Record<string, unknown> = {
url,
};
if (numResults !== undefined) body['numResults'] = numResults;
if (includeDomains) body['includeDomains'] = includeDomains;
if (excludeDomains) body['excludeDomains'] = excludeDomains;
if (startCrawlDate) body['startCrawlDate'] = startCrawlDate;
if (endCrawlDate) body['endCrawlDate'] = endCrawlDate;
if (startPublishedDate) body['startPublishedDate'] = startPublishedDate;
if (endPublishedDate) body['endPublishedDate'] = endPublishedDate;
if (includeText) body['includeText'] = includeText;
if (excludeText) body['excludeText'] = excludeText;
const response = await makeRequest(apiKey, HttpMethod.POST, '/findSimilar', body) as {results:Record<string,any>[]};
return response.results;
},
});

View File

@@ -0,0 +1,56 @@
import { createAction, Property } from '@activepieces/pieces-framework';
import { HttpMethod } from '@activepieces/pieces-common';
import { makeRequest } from '../common';
import { exaAuth } from '../../index';
export const generateAnswerAction = createAction({
name: 'generate_answer',
displayName: 'Ask AI',
description: 'Provides direct answers to queries by summarizing results.',
auth: exaAuth,
props: {
query: Property.ShortText({
displayName: 'Query',
description: 'Ask a question to get summarized answers from the web.',
required: true,
}),
text: Property.Checkbox({
displayName: 'Include Text Content',
description: 'If true, includes full text content from the search results',
required: false,
defaultValue: false,
}),
model: Property.StaticDropdown({
displayName: 'Model',
description: 'Choose the Exa model to use for the answer.',
required: true,
options: {
options: [
{ label: 'Exa', value: 'exa' },
{ label: 'Exa Pro', value: 'exa-pro' },
],
},
defaultValue: 'exa',
}),
},
async run(context) {
const apiKey = context.auth.secret_text;
const {
query,
text,
model,
} = context.propsValue;
const body: Record<string, unknown> = {
query,
text,
model
};
const response = await makeRequest(apiKey, HttpMethod.POST, '/answer', body);
return response.answer;
},
});

View File

@@ -0,0 +1,69 @@
import { createAction, Property } from '@activepieces/pieces-framework';
import { HttpMethod } from '@activepieces/pieces-common';
import { makeRequest } from '../common';
import { exaAuth } from '../../index';
export const getContentsAction = createAction({
name: 'get_contents',
displayName: 'Get Contents',
description: 'Retrieve clean HTML content from specified URLs.',
auth: exaAuth,
props: {
urls: Property.Array({
displayName: 'URLs',
required: true,
description: 'Array of URLs to crawl',
}),
text: Property.Checkbox({
displayName: 'Return Full Text',
description: 'If true, returns full page text. If false, disables text return.',
required: false,
defaultValue: true,
}),
livecrawl: Property.StaticDropdown({
displayName: 'Livecrawl Option',
description: 'Options for livecrawling pages.',
required: false,
options: {
options: [
{ label: 'Never', value: 'never' },
{ label: 'Fallback', value: 'fallback' },
{ label: 'Always', value: 'always' },
{ label: 'Auto', value: 'auto' },
],
},
}),
livecrawlTimeout: Property.Number({
displayName: 'Livecrawl Timeout (ms)',
description: 'Timeout for livecrawling in milliseconds.',
required: false,
}),
subpages: Property.Number({
displayName: 'Number of Subpages',
description: 'Number of subpages to crawl.',
required: false,
}),
subpageTarget: Property.ShortText({
displayName: 'Subpage Target',
description: 'Keyword(s) to find specific subpages.',
required: false,
}),
},
async run(context) {
const apiKey = context.auth.secret_text;
const body: Record<string, unknown> = {
urls: context.propsValue.urls,
};
if (context.propsValue.text !== undefined) body['text'] = context.propsValue.text;
if (context.propsValue.livecrawl) body['livecrawl'] = context.propsValue.livecrawl;
if (context.propsValue.livecrawlTimeout !== undefined) body['livecrawlTimeout'] = context.propsValue.livecrawlTimeout;
if (context.propsValue.subpages !== undefined) body['subpages'] = context.propsValue.subpages;
if (context.propsValue.subpageTarget) body['subpageTarget'] = context.propsValue.subpageTarget;
const response = await makeRequest(apiKey, HttpMethod.POST, '/contents', body) as {results:Record<string,any>[]};
return response.results;
},
});

View File

@@ -0,0 +1,121 @@
import { createAction, Property } from '@activepieces/pieces-framework';
import { HttpMethod } from '@activepieces/pieces-common';
import { makeRequest } from '../common';
import { exaAuth } from '../../index';
export const performSearchAction = createAction({
name: 'perform_search',
displayName: 'Perform Search',
description: "Search the web using semantic or keyword-based search.",
auth: exaAuth,
props: {
query: Property.ShortText({
displayName: 'Query',
description: 'Search query to find related articles and data.',
required: true,
}),
type: Property.StaticDropdown({
displayName: 'Search Type',
description: 'Type of search to perform.',
required: false,
defaultValue: 'auto',
options: {
options: [
{ label: 'Auto', value: 'auto' },
{ label: 'Keyword', value: 'keyword' },
{ label: 'Neural', value: 'neural' },
],
},
}),
category: Property.StaticDropdown({
displayName: 'Category',
description: 'Category of data to focus the search on.',
required: false,
options: {
options: [
{ label: 'Company', value: 'company' },
{ label: 'Research Paper', value: 'research paper' },
{ label: 'News', value: 'news' },
{ label: 'PDF', value: 'pdf' },
{ label: 'GitHub', value: 'github' },
{ label: 'Tweet', value: 'tweet' },
{ label: 'Personal Site', value: 'personal site' },
{ label: 'LinkedIn Profile', value: 'linkedin profile' },
{ label: 'Financial Report', value: 'financial report' },
],
},
}),
numResults: Property.Number({
displayName: 'Number of Results',
description: 'Number of results to return (max 100).',
required: false,
defaultValue: 10,
}),
includeDomains: Property.Array({
displayName: 'Include Domains',
description: 'Limit results to only these domains.',
required: false,
}),
excludeDomains: Property.Array({
displayName: 'Exclude Domains',
description: 'Exclude results from these domains.',
required: false,
}),
startCrawlDate: Property.DateTime({
displayName: 'Start Crawl Date',
description: 'Only include results crawled after this ISO date.',
required: false,
}),
endCrawlDate: Property.DateTime({
displayName: 'End Crawl Date',
description: 'Only include results crawled before this ISO date.',
required: false,
}),
startPublishedDate: Property.DateTime({
displayName: 'Start Published Date',
description: 'Only include results published after this ISO date.',
required: false,
}),
endPublishedDate: Property.DateTime({
displayName: 'End Published Date',
description: 'Only include results published before this ISO date.',
required: false,
}),
includeText: Property.Array({
displayName: 'Include Text',
description: 'Strings that must be present in the text of results.',
required: false,
}),
excludeText: Property.Array({
displayName: 'Exclude Text',
description: 'Strings that must not be present in the text of results.',
required: false,
}),
},
async run(context) {
const apiKey = context.auth.secret_text;
const body: Record<string, unknown> = {
query: context.propsValue.query,
contents:{
text:true
}
};
const optionalProps = [
'type', 'category', 'numResults', 'includeDomains', 'excludeDomains',
'startCrawlDate', 'endCrawlDate', 'startPublishedDate', 'endPublishedDate',
'includeText', 'excludeText',
];
for (const prop of optionalProps) {
const val = context.propsValue[prop as keyof typeof context.propsValue];
if (val !== undefined && val !== null && val !== '') {
body[prop] = val;
}
}
const response = await makeRequest(apiKey, HttpMethod.POST, '/search', body) as {results:Record<string,any>[]};
return response.results;
},
});

View File

@@ -0,0 +1,17 @@
import { HttpMethod, httpClient } from '@activepieces/pieces-common';
export const BASE_URL = 'https://api.exa.ai';
export async function makeRequest(auth: string, method: HttpMethod, path: string, body?: unknown) {
const response = await httpClient.sendRequest({
method,
url: `${BASE_URL}${path}`,
headers: {
'x-api-key': `${auth}`,
'Content-Type': 'application/json',
},
body,
});
return response.body;
}

View File

@@ -0,0 +1,19 @@
{
"extends": "../../../../tsconfig.base.json",
"compilerOptions": {
"module": "commonjs",
"forceConsistentCasingInFileNames": true,
"strict": true,
"noImplicitOverride": true,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"noPropertyAccessFromIndexSignature": true
},
"files": [],
"include": [],
"references": [
{
"path": "./tsconfig.lib.json"
}
]
}

View File

@@ -0,0 +1,11 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"module": "commonjs",
"outDir": "../../../../dist/out-tsc",
"declaration": true,
"types": ["node"]
},
"exclude": ["jest.config.ts", "src/**/*.spec.ts", "src/**/*.test.ts"],
"include": ["src/**/*.ts"]
}