diff --git a/.github/workflows/github_pages.yml b/.github/workflows/github_pages.yml index fb116287..d8358ef9 100644 --- a/.github/workflows/github_pages.yml +++ b/.github/workflows/github_pages.yml @@ -16,7 +16,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.5.0 + ref: v0.6.0 - name: Sleep for 30s run: sleep 30 - name: Github-pages-MAIN => Algolia crawler creation and recrawl (Push on Main branch) diff --git a/.github/workflows/netlify.yml b/.github/workflows/netlify.yml index 30e0a039..fc08320b 100644 --- a/.github/workflows/netlify.yml +++ b/.github/workflows/netlify.yml @@ -18,7 +18,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.5.0 + ref: v0.6.0 - name: Sleep for 30s run: sleep 30 - name: Netlify-PR => Algolia crawler creation and recrawl on preview (Pull Request) diff --git a/.github/workflows/vercel_pr.yml b/.github/workflows/vercel_pr.yml index 41e6b564..08d3bd31 100644 --- a/.github/workflows/vercel_pr.yml +++ b/.github/workflows/vercel_pr.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.5.0 + ref: v0.6.0 - name: Vercel-PR => Algolia crawler creation and recrawl on preview (Pull Request) uses: ./ id: crawler_pr diff --git a/.github/workflows/vercel_push.yml b/.github/workflows/vercel_push.yml index 3b23068c..3ceba6d5 100644 --- a/.github/workflows/vercel_push.yml +++ b/.github/workflows/vercel_push.yml @@ -16,7 +16,7 @@ jobs: uses: actions/checkout@v2 with: repository: algolia/algoliasearch-crawler-github-actions - ref: v0.5.0 + ref: v0.6.0 - name: Vercel-MAIN => Algolia crawler creation and recrawl on preview (Push on Main branch) uses: ./ id: crawler_push diff --git a/action.yml b/action.yml index 0312dd22..52724c9f 100644 --- a/action.yml +++ b/action.yml @@ -12,6 +12,10 @@ inputs: description: 'Crawler API URL' required: false default: 'https://crawler.algolia.com/api/1/' + github-token: + description: 'Github token' + required: true + default: ${{ github.token }} # CRAWLER CONFIGURATION crawler-name: diff --git a/src/index.ts b/src/index.ts index d1f6202a..74d6b64d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,5 +1,6 @@ /* eslint-disable no-console */ import * as core from '@actions/core'; +import * as github from '@actions/github'; import { CrawlerApiClient } from './crawler-api-client'; import type { ConfigJson } from './types/configJson'; @@ -9,6 +10,7 @@ import type { GetCrawlersResponseBody } from './types/publicApiJsonResponses'; const CRAWLER_USER_ID = core.getInput('crawler-user-id'); const CRAWLER_API_KEY = core.getInput('crawler-api-key'); const CRAWLER_API_BASE_URL = core.getInput('crawler-api-base-url'); +const GITHUB_TOKEN = core.getInput('github-token'); // CRAWLER CONFIGURATION const CRAWLER_NAME = core.getInput('crawler-name').replace(/\//g, '-'); @@ -54,6 +56,34 @@ function getRecordExtractorSource(): string { }`; } +function addComment(crawlerId: string): void { + try { + const pathArray = CRAWLER_API_BASE_URL.split('/'); + const protocol = pathArray[0]; + const host = pathArray[2]; + const baseUrl = `${protocol}//${host}`; + + const message = `
Check your created Crawler
+Check your created index on your Algolia Application
`; + + const context = github.context; + if (context.payload.pull_request === undefined) { + core.info('No pull request found.'); + return; + } + const prNumber = context.payload.pull_request.number; + + const octokit = github.getOctokit(GITHUB_TOKEN); + octokit.rest.issues.createComment({ + ...context.repo, + issue_number: prNumber, + body: message, + }); + } catch (error) { + core.setFailed(error.message); + } +} + async function crawlerReindex(): Promise