Skip to content

Commit

Permalink
feat: use new crawler parameters name and appId (#19)
Browse files Browse the repository at this point in the history
  • Loading branch information
damcou committed Jul 2, 2021
1 parent 6e4103f commit 7b5c1a3
Show file tree
Hide file tree
Showing 6 changed files with 30 additions and 30 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/github_pages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: algolia/algoliasearch-crawler-github-actions
ref: v0.4.2
ref: v0.5.0
- name: Sleep for 30s
run: sleep 30
- name: Github-pages-MAIN => Algolia crawler creation and recrawl (Push on Main branch)
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/netlify.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: algolia/algoliasearch-crawler-github-actions
ref: v0.4.2
ref: v0.5.0
- name: Sleep for 30s
run: sleep 30
- name: Netlify-PR => Algolia crawler creation and recrawl on preview (Pull Request)
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/vercel_pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: algolia/algoliasearch-crawler-github-actions
ref: v0.4.2
ref: v0.5.0
- name: Vercel-PR => Algolia crawler creation and recrawl on preview (Pull Request)
uses: ./
id: crawler_pr
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/vercel_push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: algolia/algoliasearch-crawler-github-actions
ref: v0.4.2
ref: v0.5.0
- name: Vercel-MAIN => Algolia crawler creation and recrawl on preview (Push on Main branch)
uses: ./
id: crawler_push
Expand Down
24 changes: 18 additions & 6 deletions src/crawler-api-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,11 @@ class CrawlerApiClient {
/**
* List all Crawlers.
*
* @param itemsPerPage - The number of crawlers to return per page.
* @param page - The page to fetch.
* @param p - Params.
* @param p.itemsPerPage - The number of crawlers to return per page.
* @param p.page - The page to fetch.
* @param p.name - Name of the crawler to get.
* @param p.appId - Application of the crawlers to get.
* @returns A promise that will resolve with an object looking like:
* {
* items: [{ id: 'crawler_1_id', name: 'crawler_1_name' }, { id: 'crawler_2_id, ... }],
Expand All @@ -160,13 +163,22 @@ class CrawlerApiClient {
* }
* .
*/
async getCrawlers(
itemsPerPage: number,
page: number
): Promise<GetCrawlersResponseBody> {
async getCrawlers({
itemsPerPage,
page,
name,
appId,
}: {
itemsPerPage?: number;
page?: number;
name?: string;
appId?: string;
}): Promise<GetCrawlersResponseBody> {
const searchParams: SearchParams = {};
if (itemsPerPage) searchParams.itemsPerPage = itemsPerPage;
if (page) searchParams.page = page;
if (name) searchParams.name = name;
if (appId) searchParams.appId = appId;
const qs = Object.keys(searchParams)
.map(
(k) => `${encodeURIComponent(k)}=${encodeURIComponent(searchParams[k])}`
Expand Down
28 changes: 8 additions & 20 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,31 +55,19 @@ function getRecordExtractorSource(): string {
}

async function crawlerReindex(): Promise<void> {
const filteredCrawlers = [];
let crawlerId = '';
let currentPage = 1;
let nbFetchedCrawlers = 0;
let crawlers: GetCrawlersResponseBody | undefined;

// Searching for the crawler, based on the name
do {
crawlers = await client.getCrawlers(100, currentPage++);
// Searching for the crawler, based on the name and application ID
const crawlers: GetCrawlersResponseBody | undefined =
await client.getCrawlers({ name: CRAWLER_NAME, appId: ALGOLIA_APP_ID });

if (typeof crawlers === 'undefined') {
break;
}

nbFetchedCrawlers += crawlers.items.length;
filteredCrawlers.push(
...crawlers.items.filter(({ name }) => {
return name.indexOf(CRAWLER_NAME) === 0;
})
);
} while (crawlers.total > nbFetchedCrawlers);
if (typeof crawlers === 'undefined') {
return;
}

if (filteredCrawlers.length !== 0) {
if (crawlers.items.length !== 0) {
// If the crawler exists : update it
crawlerId = filteredCrawlers[0].id;
crawlerId = crawlers.items[0].id;
if (OVERRIDE_CONFIG) {
const config = getConfig();
await client.updateConfig(crawlerId, config);
Expand Down

0 comments on commit 7b5c1a3

Please sign in to comment.