diff --git a/.github/workflows/github_pages.yml b/.github/workflows/github_pages.yml index 4c652ae8..fb116287 100644 --- a/.github/workflows/github_pages.yml +++ b/.github/workflows/github_pages.yml @@ -16,7 +16,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.4.2 + ref: v0.5.0 - name: Sleep for 30s run: sleep 30 - name: Github-pages-MAIN => Algolia crawler creation and recrawl (Push on Main branch) diff --git a/.github/workflows/netlify.yml b/.github/workflows/netlify.yml index 1bb87f73..30e0a039 100644 --- a/.github/workflows/netlify.yml +++ b/.github/workflows/netlify.yml @@ -18,7 +18,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.4.2 + ref: v0.5.0 - name: Sleep for 30s run: sleep 30 - name: Netlify-PR => Algolia crawler creation and recrawl on preview (Pull Request) diff --git a/.github/workflows/vercel_pr.yml b/.github/workflows/vercel_pr.yml index 5e0ac9c4..41e6b564 100644 --- a/.github/workflows/vercel_pr.yml +++ b/.github/workflows/vercel_pr.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.4.2 + ref: v0.5.0 - name: Vercel-PR => Algolia crawler creation and recrawl on preview (Pull Request) uses: ./ id: crawler_pr diff --git a/.github/workflows/vercel_push.yml b/.github/workflows/vercel_push.yml index 3849aaf2..3b23068c 100644 --- a/.github/workflows/vercel_push.yml +++ b/.github/workflows/vercel_push.yml @@ -16,7 +16,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.4.2 + ref: v0.5.0 - name: Vercel-MAIN => Algolia crawler creation and recrawl on preview (Push on Main branch) uses: ./ id: crawler_push diff --git a/src/crawler-api-client.ts b/src/crawler-api-client.ts index f071f662..054766ce 100644 --- a/src/crawler-api-client.ts +++ b/src/crawler-api-client.ts @@ -149,8 +149,11 @@ class CrawlerApiClient { /** * List all Crawlers. * - * @param itemsPerPage - The number of crawlers to return per page. - * @param page - The page to fetch. + * @param p - Params. + * @param p.itemsPerPage - The number of crawlers to return per page. + * @param p.page - The page to fetch. + * @param p.name - Name of the crawler to get. + * @param p.appId - Application of the crawlers to get. * @returns A promise that will resolve with an object looking like: * { * items: [{ id: 'crawler_1_id', name: 'crawler_1_name' }, { id: 'crawler_2_id, ... }], @@ -160,13 +163,22 @@ class CrawlerApiClient { * } * . */ - async getCrawlers( - itemsPerPage: number, - page: number - ): Promise { + async getCrawlers({ + itemsPerPage, + page, + name, + appId, + }: { + itemsPerPage?: number; + page?: number; + name?: string; + appId?: string; + }): Promise { const searchParams: SearchParams = {}; if (itemsPerPage) searchParams.itemsPerPage = itemsPerPage; if (page) searchParams.page = page; + if (name) searchParams.name = name; + if (appId) searchParams.appId = appId; const qs = Object.keys(searchParams) .map( (k) => `${encodeURIComponent(k)}=${encodeURIComponent(searchParams[k])}` diff --git a/src/index.ts b/src/index.ts index 7f405676..d1f6202a 100644 --- a/src/index.ts +++ b/src/index.ts @@ -55,31 +55,19 @@ function getRecordExtractorSource(): string { } async function crawlerReindex(): Promise { - const filteredCrawlers = []; let crawlerId = ''; - let currentPage = 1; - let nbFetchedCrawlers = 0; - let crawlers: GetCrawlersResponseBody | undefined; - // Searching for the crawler, based on the name - do { - crawlers = await client.getCrawlers(100, currentPage++); + // Searching for the crawler, based on the name and application ID + const crawlers: GetCrawlersResponseBody | undefined = + await client.getCrawlers({ name: CRAWLER_NAME, appId: ALGOLIA_APP_ID }); - if (typeof crawlers === 'undefined') { - break; - } - - nbFetchedCrawlers += crawlers.items.length; - filteredCrawlers.push( - ...crawlers.items.filter(({ name }) => { - return name.indexOf(CRAWLER_NAME) === 0; - }) - ); - } while (crawlers.total > nbFetchedCrawlers); + if (typeof crawlers === 'undefined') { + return; + } - if (filteredCrawlers.length !== 0) { + if (crawlers.items.length !== 0) { // If the crawler exists : update it - crawlerId = filteredCrawlers[0].id; + crawlerId = crawlers.items[0].id; if (OVERRIDE_CONFIG) { const config = getConfig(); await client.updateConfig(crawlerId, config);