Skip to content

Commit

Permalink
feat: Add comment on PR (#20)
Browse files Browse the repository at this point in the history
  • Loading branch information
damcou committed Jul 13, 2021
1 parent de1ee6e commit 6ed9d2c
Show file tree
Hide file tree
Showing 7 changed files with 40 additions and 5 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/github_pages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: algolia/algoliasearch-crawler-github-actions
ref: v0.5.0
ref: v0.6.0
- name: Sleep for 30s
run: sleep 30
- name: Github-pages-MAIN => Algolia crawler creation and recrawl (Push on Main branch)
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/netlify.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: algolia/algoliasearch-crawler-github-actions
ref: v0.5.0
ref: v0.6.0
- name: Sleep for 30s
run: sleep 30
- name: Netlify-PR => Algolia crawler creation and recrawl on preview (Pull Request)
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/vercel_pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: algolia/algoliasearch-crawler-github-actions
ref: v0.5.0
ref: v0.6.0
- name: Vercel-PR => Algolia crawler creation and recrawl on preview (Pull Request)
uses: ./
id: crawler_pr
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/vercel_push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: algolia/algoliasearch-crawler-github-actions
ref: v0.5.0
ref: v0.6.0
- name: Vercel-MAIN => Algolia crawler creation and recrawl on preview (Push on Main branch)
uses: ./
id: crawler_push
Expand Down
4 changes: 4 additions & 0 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@ inputs:
description: 'Crawler API URL'
required: false
default: 'https://crawler.algolia.com/api/1/'
github-token:
description: 'Github token'
required: true
default: ${{ github.token }}

# CRAWLER CONFIGURATION
crawler-name:
Expand Down
31 changes: 31 additions & 0 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
/* eslint-disable no-console */
import * as core from '@actions/core';
import * as github from '@actions/github';

import { CrawlerApiClient } from './crawler-api-client';
import type { ConfigJson } from './types/configJson';
Expand All @@ -9,6 +10,7 @@ import type { GetCrawlersResponseBody } from './types/publicApiJsonResponses';
const CRAWLER_USER_ID = core.getInput('crawler-user-id');
const CRAWLER_API_KEY = core.getInput('crawler-api-key');
const CRAWLER_API_BASE_URL = core.getInput('crawler-api-base-url');
const GITHUB_TOKEN = core.getInput('github-token');

// CRAWLER CONFIGURATION
const CRAWLER_NAME = core.getInput('crawler-name').replace(/\//g, '-');
Expand Down Expand Up @@ -54,6 +56,34 @@ function getRecordExtractorSource(): string {
}`;
}

function addComment(crawlerId: string): void {
try {
const pathArray = CRAWLER_API_BASE_URL.split('/');
const protocol = pathArray[0];
const host = pathArray[2];
const baseUrl = `${protocol}//${host}`;

const message = `<p>Check your created <a href="${baseUrl}/admin/crawlers/${crawlerId}/overview" target="_blank">Crawler</a></p>
<p>Check your created index on your <a href="https://www.algolia.com/apps/${ALGOLIA_APP_ID}/explorer/browse/${CRAWLER_NAME}" target="_blank">Algolia Application</a></p>`;

const context = github.context;
if (context.payload.pull_request === undefined) {
core.info('No pull request found.');
return;
}
const prNumber = context.payload.pull_request.number;

const octokit = github.getOctokit(GITHUB_TOKEN);
octokit.rest.issues.createComment({
...context.repo,
issue_number: prNumber,
body: message,
});
} catch (error) {
core.setFailed(error.message);
}
}

async function crawlerReindex(): Promise<void> {
let crawlerId = '';

Expand All @@ -80,6 +110,7 @@ async function crawlerReindex(): Promise<void> {

console.log(`---------- Reindexing crawler ${crawlerId} ----------`);
await client.reindex(crawlerId);
addComment(crawlerId);
}

console.log('---------CRAWLER CONFIG---------');
Expand Down
2 changes: 1 addition & 1 deletion yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

"@actions/github@5.0.0":
version "5.0.0"
resolved "https://registry.npmjs.org/@actions/github/-/github-5.0.0.tgz"
resolved "https://registry.yarnpkg.com/@actions/github/-/github-5.0.0.tgz#1754127976c50bd88b2e905f10d204d76d1472f8"
integrity sha512-QvE9eAAfEsS+yOOk0cylLBIO/d6WyWIOvsxxzdrPFaud39G6BOkUwScXZn1iBzQzHyu9SBkkLSWlohDWdsasAQ==
dependencies:
"@actions/http-client" "^1.0.11"
Expand Down

0 comments on commit 6ed9d2c

Please sign in to comment.