diff --git a/packages/components/credentials/ApifyApi.ts b/packages/components/credentials/ApifyApi.credential.ts similarity index 85% rename from packages/components/credentials/ApifyApi.ts rename to packages/components/credentials/ApifyApi.credential.ts index c7e7322a..c961fd38 100644 --- a/packages/components/credentials/ApifyApi.ts +++ b/packages/components/credentials/ApifyApi.credential.ts @@ -1,6 +1,6 @@ import { INodeParams, INodeCredential } from '../src/Interface' -class ApifyApi implements INodeCredential { +class ApifyApiCredential implements INodeCredential { label: string name: string version: number @@ -23,4 +23,4 @@ class ApifyApi implements INodeCredential { } } -module.exports = { credClass: ApifyApi } +module.exports = { credClass: ApifyApiCredential } diff --git a/packages/components/nodes/documentloaders/ApifyWebsiteContentCrawler/ApifyWebsiteContentCrawler.ts b/packages/components/nodes/documentloaders/ApifyWebsiteContentCrawler/ApifyWebsiteContentCrawler.ts index 9fd0764c..a5e6a6e0 100644 --- a/packages/components/nodes/documentloaders/ApifyWebsiteContentCrawler/ApifyWebsiteContentCrawler.ts +++ b/packages/components/nodes/documentloaders/ApifyWebsiteContentCrawler/ApifyWebsiteContentCrawler.ts @@ -103,8 +103,8 @@ class ApifyWebsiteContentCrawler_DocumentLoaders implements INode { // Get input options and merge with additional input const urls = nodeData.inputs?.urls as string const crawlerType = nodeData.inputs?.crawlerType as string - const maxCrawlDepth = nodeData.inputs?.maxCrawlDepth as number - const maxCrawlPages = nodeData.inputs?.maxCrawlPages as number + const maxCrawlDepth = nodeData.inputs?.maxCrawlDepth as string + const maxCrawlPages = nodeData.inputs?.maxCrawlPages as string const additionalInput = typeof nodeData.inputs?.additionalInput === 'object' ? nodeData.inputs?.additionalInput @@ -112,8 +112,8 @@ class ApifyWebsiteContentCrawler_DocumentLoaders implements INode { const input = { startUrls: urls.split(',').map((url) => ({ url: url.trim() })), crawlerType, - maxCrawlDepth, - maxCrawlPages, + maxCrawlDepth: parseInt(maxCrawlDepth, 10), + maxCrawlPages: parseInt(maxCrawlPages, 10), ...additionalInput }