From e8fbcd4312b21e74cde79f6e235284532bc27ae1 Mon Sep 17 00:00:00 2001 From: Damien Couchez Date: Thu, 1 Jul 2021 17:23:10 +0200 Subject: [PATCH 1/2] feat: use new crawler parameters name and appId --- .github/workflows/github_pages.yml | 2 +- .github/workflows/netlify.yml | 2 +- .github/workflows/vercel_pr.yml | 2 +- .github/workflows/vercel_push.yml | 2 +- src/crawler-api-client.ts | 8 +++++++- src/index.ts | 28 ++++++++-------------------- 6 files changed, 19 insertions(+), 25 deletions(-) diff --git a/.github/workflows/github_pages.yml b/.github/workflows/github_pages.yml index 4c652ae8..fb116287 100644 --- a/.github/workflows/github_pages.yml +++ b/.github/workflows/github_pages.yml @@ -16,7 +16,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.4.2 + ref: v0.5.0 - name: Sleep for 30s run: sleep 30 - name: Github-pages-MAIN => Algolia crawler creation and recrawl (Push on Main branch) diff --git a/.github/workflows/netlify.yml b/.github/workflows/netlify.yml index 1bb87f73..30e0a039 100644 --- a/.github/workflows/netlify.yml +++ b/.github/workflows/netlify.yml @@ -18,7 +18,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.4.2 + ref: v0.5.0 - name: Sleep for 30s run: sleep 30 - name: Netlify-PR => Algolia crawler creation and recrawl on preview (Pull Request) diff --git a/.github/workflows/vercel_pr.yml b/.github/workflows/vercel_pr.yml index 5e0ac9c4..41e6b564 100644 --- a/.github/workflows/vercel_pr.yml +++ b/.github/workflows/vercel_pr.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.4.2 + ref: v0.5.0 - name: Vercel-PR => Algolia crawler creation and recrawl on preview (Pull Request) uses: ./ id: crawler_pr diff --git a/.github/workflows/vercel_push.yml b/.github/workflows/vercel_push.yml index 3849aaf2..3b23068c 100644 --- a/.github/workflows/vercel_push.yml +++ b/.github/workflows/vercel_push.yml @@ -16,7 +16,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.4.2 + ref: v0.5.0 - name: Vercel-MAIN => Algolia crawler creation and recrawl on preview (Push on Main branch) uses: ./ id: crawler_push diff --git a/src/crawler-api-client.ts b/src/crawler-api-client.ts index f071f662..aba6073a 100644 --- a/src/crawler-api-client.ts +++ b/src/crawler-api-client.ts @@ -151,6 +151,8 @@ class CrawlerApiClient { * * @param itemsPerPage - The number of crawlers to return per page. * @param page - The page to fetch. + * @param name - Name of the crawler to get. + * @param appId - Application of the crawlers to get. * @returns A promise that will resolve with an object looking like: * { * items: [{ id: 'crawler_1_id', name: 'crawler_1_name' }, { id: 'crawler_2_id, ... }], @@ -162,11 +164,15 @@ class CrawlerApiClient { */ async getCrawlers( itemsPerPage: number, - page: number + page: number, + name?: string, + appId?: string ): Promise { const searchParams: SearchParams = {}; if (itemsPerPage) searchParams.itemsPerPage = itemsPerPage; if (page) searchParams.page = page; + if (name) searchParams.name = name; + if (appId) searchParams.appId = appId; const qs = Object.keys(searchParams) .map( (k) => `${encodeURIComponent(k)}=${encodeURIComponent(searchParams[k])}` diff --git a/src/index.ts b/src/index.ts index 7f405676..24c43045 100644 --- a/src/index.ts +++ b/src/index.ts @@ -55,31 +55,19 @@ function getRecordExtractorSource(): string { } async function crawlerReindex(): Promise { - const filteredCrawlers = []; let crawlerId = ''; - let currentPage = 1; - let nbFetchedCrawlers = 0; - let crawlers: GetCrawlersResponseBody | undefined; - // Searching for the crawler, based on the name - do { - crawlers = await client.getCrawlers(100, currentPage++); + // Searching for the crawler, based on the name and application ID + const crawlers: GetCrawlersResponseBody | undefined = + await client.getCrawlers(100, 1, CRAWLER_NAME, ALGOLIA_APP_ID); - if (typeof crawlers === 'undefined') { - break; - } - - nbFetchedCrawlers += crawlers.items.length; - filteredCrawlers.push( - ...crawlers.items.filter(({ name }) => { - return name.indexOf(CRAWLER_NAME) === 0; - }) - ); - } while (crawlers.total > nbFetchedCrawlers); + if (typeof crawlers === 'undefined') { + return; + } - if (filteredCrawlers.length !== 0) { + if (crawlers.items.length !== 0) { // If the crawler exists : update it - crawlerId = filteredCrawlers[0].id; + crawlerId = crawlers.items[0].id; if (OVERRIDE_CONFIG) { const config = getConfig(); await client.updateConfig(crawlerId, config); From 6dc9f3b0156297a375c93f5675f93134ce7cea6c Mon Sep 17 00:00:00 2001 From: Damien Couchez Date: Fri, 2 Jul 2021 14:36:07 +0200 Subject: [PATCH 2/2] use optional params --- src/crawler-api-client.ts | 26 ++++++++++++++++---------- src/index.ts | 2 +- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/src/crawler-api-client.ts b/src/crawler-api-client.ts index aba6073a..054766ce 100644 --- a/src/crawler-api-client.ts +++ b/src/crawler-api-client.ts @@ -149,10 +149,11 @@ class CrawlerApiClient { /** * List all Crawlers. * - * @param itemsPerPage - The number of crawlers to return per page. - * @param page - The page to fetch. - * @param name - Name of the crawler to get. - * @param appId - Application of the crawlers to get. + * @param p - Params. + * @param p.itemsPerPage - The number of crawlers to return per page. + * @param p.page - The page to fetch. + * @param p.name - Name of the crawler to get. + * @param p.appId - Application of the crawlers to get. * @returns A promise that will resolve with an object looking like: * { * items: [{ id: 'crawler_1_id', name: 'crawler_1_name' }, { id: 'crawler_2_id, ... }], @@ -162,12 +163,17 @@ class CrawlerApiClient { * } * . */ - async getCrawlers( - itemsPerPage: number, - page: number, - name?: string, - appId?: string - ): Promise { + async getCrawlers({ + itemsPerPage, + page, + name, + appId, + }: { + itemsPerPage?: number; + page?: number; + name?: string; + appId?: string; + }): Promise { const searchParams: SearchParams = {}; if (itemsPerPage) searchParams.itemsPerPage = itemsPerPage; if (page) searchParams.page = page; diff --git a/src/index.ts b/src/index.ts index 24c43045..d1f6202a 100644 --- a/src/index.ts +++ b/src/index.ts @@ -59,7 +59,7 @@ async function crawlerReindex(): Promise { // Searching for the crawler, based on the name and application ID const crawlers: GetCrawlersResponseBody | undefined = - await client.getCrawlers(100, 1, CRAWLER_NAME, ALGOLIA_APP_ID); + await client.getCrawlers({ name: CRAWLER_NAME, appId: ALGOLIA_APP_ID }); if (typeof crawlers === 'undefined') { return;