Skip to content

Commit

Permalink
Merge branch 'develop' into pixie-ga-2
Browse files Browse the repository at this point in the history
  • Loading branch information
mmfred committed Jul 15, 2021
2 parents c2b5bc2 + c901307 commit f5d076e
Show file tree
Hide file tree
Showing 497 changed files with 7,381 additions and 9,600 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/get-slugs-to-translate.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ jobs:
run: yarn install --frozen-lockfile

- name: Get slugs and save
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
URL="https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }}/files"
yarn get-translated-files $URL
16 changes: 10 additions & 6 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ jobs:
uses: actions/checkout@v2
with:
fetch-depth: '0'

- name: Setup python
uses: actions/setup-python@v2

Expand All @@ -28,23 +29,26 @@ jobs:
run: |
echo LAST_RELEASE=$(git describe --tags --abbrev=0) >> $GITHUB_ENV
echo $LAST_RELEASE
- name: Install Python Dependencies
run: |
python -m pip install --upgrade pip
pip install PyGithub
pip install invoke
- name: Create Updates
id: py-get-updates
run: |
python ./scripts/actions/release-notes.py
- name: Create release timestamp
run: |
echo RELEASE_TIMESTAMP=$(TZ=America/Los_Angeles date '+%m.%d.%Y-%H.%M') >> $GITHUB_ENV
- name: Create new release tag
id: create-new-tag
run: |
LAST_TAG_NUM=$(echo ${{ env.LAST_RELEASE }} | sed 's/release\-//')
echo NEW_TAG=$(echo "release-$((LAST_TAG_NUM + 1))") >> $GITHUB_ENV
echo NEW_TAG=$(echo "release-${{ env.RELEASE_TIMESTAMP }}") >> $GITHUB_ENV
- name: Create release
run: gh release create ${{ env.NEW_TAG }} -t ${{ env.NEW_TAG }} -n "${{ env.RESULT }}"
3 changes: 3 additions & 0 deletions .github/workflows/update-whats-new-ids.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ name: whats-new

on:
pull_request:
types:
- opened
branches:
- main

Expand All @@ -18,6 +20,7 @@ jobs:
uses: actions/checkout@v2
with:
ref: develop
persist-credentials: false

- name: Setup node.js
uses: actions/setup-node@v1
Expand Down
1 change: 1 addition & 0 deletions .prettierrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ module.exports = {
tabWidth: 2,
semi: true,
singleQuote: true,
arrowParens: 'always',
};
34 changes: 17 additions & 17 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,30 +9,30 @@
"@github-docs/frontmatter": "^1.3.1",
"@mdx-js/mdx": "^2.0.0-next.8",
"@mdx-js/react": "^2.0.0-next.8",
"@newrelic/gatsby-theme-newrelic": "^2.4.4",
"@newrelic/gatsby-theme-newrelic": "^2.4.5",
"@splitsoftware/splitio-react": "^1.2.4",
"babel-jest": "^26.3.0",
"common-tags": "^1.8.0",
"date-fns": "^2.17.0",
"front-matter": "^4.0.2",
"gatsby": "^3.5.1",
"gatsby-image": "^3.3.0",
"gatsby-plugin-gatsby-cloud": "^2.5.0",
"gatsby": "^3.8.1",
"gatsby-image": "^3.8.0",
"gatsby-plugin-gatsby-cloud": "^2.8.1",
"gatsby-plugin-json-output": "^1.2.0",
"gatsby-plugin-manifest": "^3.3.0",
"gatsby-plugin-mdx": "^2.3.0",
"gatsby-plugin-manifest": "^3.8.0",
"gatsby-plugin-mdx": "^2.8.0",
"gatsby-plugin-meta-redirect": "^1.1.1",
"gatsby-plugin-offline": "^4.3.0",
"gatsby-plugin-react-helmet": "^4.3.0",
"gatsby-plugin-sharp": "^3.3.1",
"gatsby-remark-autolink-headers": "^4.0.0",
"gatsby-remark-copy-linked-files": "^4.0.0",
"gatsby-remark-images": "^5.0.0",
"gatsby-source-filesystem": "^3.3.0",
"gatsby-transformer-json": "^3.3.0",
"gatsby-transformer-remark": "^4.0.0",
"gatsby-transformer-sharp": "^3.3.0",
"gatsby-transformer-yaml": "^3.3.0",
"gatsby-plugin-offline": "^4.8.0",
"gatsby-plugin-react-helmet": "^4.8.0",
"gatsby-plugin-sharp": "^3.8.0",
"gatsby-remark-autolink-headers": "^4.5.0",
"gatsby-remark-copy-linked-files": "^4.5.0",
"gatsby-remark-images": "^5.5.0",
"gatsby-source-filesystem": "^3.8.0",
"gatsby-transformer-json": "^3.8.0",
"gatsby-transformer-remark": "^4.5.0",
"gatsby-transformer-sharp": "^3.8.0",
"gatsby-transformer-yaml": "^3.8.0",
"github-slugger": "^1.3.0",
"hast-util-from-dom": "^3.0.0",
"hast-util-has-property": "^1.0.4",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
const fs = require('fs');

const { getContent } = require('../send-and-update-translation-queue');

jest.mock('../serialize-mdx');
jest.mock('fs');

test('getContent skips over files that dont exist', async () => {
fs.existsSync
.mockReturnValueOnce(false)
.mockReturnValueOnce(false)
.mockReturnValueOnce(false)
.mockReturnValue(true);

const mockInput = {
jp: ['slug1', 'slug2', 'slug3', 'slug4', 'slug5'],
};

const fileContent = getContent(mockInput);
const pages = await fileContent['jp'];

expect(pages.length).toBe(2); // 5 submitted slugs - 3 false mock returns = 2 files not skipped
});
5 changes: 2 additions & 3 deletions scripts/actions/add-files-to-translation-queue.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
const fs = require('fs');
const path = require('path');
const fetch = require('node-fetch');
const frontmatter = require('@github-docs/frontmatter');

const { fetchPaginatedGHResults } = require('./utils/github-api-helpers');
const { saveToTranslationQueue } = require('./utils/save-to-db');
const loadFromDB = require('./utils/load-from-db');
const checkArgs = require('./utils/check-args');
Expand All @@ -15,8 +15,7 @@ const { prop } = require('../utils/functional');
*/
const getUpdatedQueue = async (url, queue) => {
try {
const resp = await fetch(url);
const files = await resp.json();
const files = await fetchPaginatedGHResults(url, process.env.GITHUB_TOKEN);

const mdxFiles = files
? files.filter((file) => path.extname(file.filename) === '.mdx')
Expand Down
34 changes: 2 additions & 32 deletions scripts/actions/check-for-outdated-translations.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
const fs = require('fs');
const path = require('path');
const fetch = require('node-fetch');
const frontmatter = require('@github-docs/frontmatter');
const parseLinkHeader = require('parse-link-header');

const { fetchPaginatedGHResults } = require('./utils/github-api-helpers');
const checkArgs = require('./utils/check-args');
const { prop } = require('../utils/functional');
const { ADDITIONAL_LOCALES } = require('../utils/constants');
Expand All @@ -21,40 +20,11 @@ const doI18nFilesExist = (fileName, locales) => {
.filter(Boolean);
};

const fetchFilesFromGH = async (url) => {
let files = [];
let nextPageLink = url;

while (nextPageLink) {
const resp = await fetch(nextPageLink, {
headers: { authorization: `token ${process.env.GITHUB_TOKEN}` },
});
if (!resp.ok) {
throw new Error(
`Github API returned status ${resp.code} - ${resp.message}`
);
}
const page = await resp.json();
nextPageLink = getNextLink(resp.headers.get('Link'));
files = [...files, ...page];
}

return files;
};

const getNextLink = (linkHeader) => {
const parsedLinkHeader = parseLinkHeader(linkHeader);
if (parsedLinkHeader && parsedLinkHeader.next) {
return parsedLinkHeader.next.url || null;
}
return null;
};

/**
* @param {string} url The API url that is used to fetch files.
*/
const checkOutdatedTranslations = async (url) => {
const files = await fetchFilesFromGH(url);
const files = await fetchPaginatedGHResults(url, process.env.GITHUB_TOKEN);
const mdxFiles = files
? files.filter((file) => path.extname(file.filename) === '.mdx')
: [];
Expand Down
43 changes: 33 additions & 10 deletions scripts/actions/send-and-update-translation-queue.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,19 +28,31 @@ const DOCS_SITE_URL = 'https://docs.newrelic.com';
* @param {Object<string, string[]>} locales The queue of slugs to be translated.
* @returns {Object<string, Promise<Page[]>>}
*/
const getContent = (locales) =>
Object.entries(locales).reduce((acc, [locale, slugs]) => {
const getContent = (locales) => {
return Object.entries(locales).reduce((acc, [locale, slugs]) => {
return {
...acc,
[locale]: Promise.all(
slugs.map(async (slug) => {
const mdx = fs.readFileSync(path.join(process.cwd(), slug));
const html = await serializeMDX(mdx);
return { file: slug, html };
})
slugs
.filter((slug) => {
/**
* If a doc doesn't exist, it must have been renamed or deleted. In
* that case, it is safe to ignore. If we skip including a doc in
* this step, it won't become a failed upload, and will then be
* cleaned up from the queue.
*/
console.log(`Skipping over -- ${slug} -- since it no longer exists.`);
return fs.existsSync(path.join(process.cwd(), slug));
})
.map(async (slug) => {
const mdx = fs.readFileSync(path.join(process.cwd(), slug));
const html = await serializeMDX(mdx);
return { file: slug, html };
})
),
};
}, {});
};

/**
* @param {string} locale The locale that this file should be translated to.
Expand Down Expand Up @@ -80,7 +92,9 @@ const uploadFile = (locale, batchUid, accessToken) => async (page) => {
console.log(`[*] Successfully uploaded ${page.file}.`);
await sendPageContext(page.file, accessToken);
} else {
console.error(`[!] Unable to upload ${page.file}.`);
console.error(
`[!] Unable to upload ${page.file}. Code was ${code}. Response status: ${resp.status} -- ${resp.statusText}`
);
}

return { code, locale, slug: page.file };
Expand Down Expand Up @@ -225,7 +239,7 @@ const saveFailedUploads = async (failedUploads) => {
const updatedLocales = failedUploads.reduce(
(acc, page) => ({
...acc,
[page.locale]: [...acc[page.locale], page.slug],
[page.locale]: [...(acc[page.locale] || []), page.slug],
}),
{}
);
Expand Down Expand Up @@ -270,4 +284,13 @@ const main = async () => {
}
};

main();
/**
* This allows us to check if the script was invoked directly from the command line, i.e 'node validate_packs.js', or if it was imported.
* This would be true if this was used in one of our GitHub workflows, but false when imported for use in a test.
* See here: https://nodejs.org/docs/latest/api/modules.html#modules_accessing_the_main_module
*/
if (require.main === module) {
main();
}

module.exports = { main, getContent };
102 changes: 102 additions & 0 deletions scripts/actions/utils/__tests__/github-api-helpers.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
'use strict';

const {
getNextLink,
fetchPaginatedGHResults,
} = require('../github-api-helpers');

jest.mock('node-fetch');

const fetch = require('node-fetch');

describe('Github API Helpers', () => {
afterEach(() => {
jest.resetAllMocks();
});

describe('getNextLink', () => {
test('returns null for malformed header', () => {
expect(getNextLink('askdflaksjdhf2421dfdfs')).toBe(null);
});

test('returns null when `next` is not present', () => {
expect(
getNextLink('<https://fakesite.com/files?page=1>; rel="last"')
).toBe(null);
});

test('returns url for next page', () => {
expect(
getNextLink(
'<https://fakesite.com/files?page=36>; rel="last", <https://fakesite.com/files?page=2>; rel="next"'
)
).toEqual('https://fakesite.com/files?page=2');
});
});

describe('fetchPaginatedGHResults', () => {
test('throws error when response is not ok', async () => {
fetch.mockResolvedValueOnce({ ok: false });
await expect(fetchPaginatedGHResults('test', 'test')).rejects.toThrow();
});

test('properly sets API URL and token', async () => {
fetch.mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve([]),
headers: {
get: () => null,
},
});

await fetchPaginatedGHResults('testurl', 'testtoken');
expect(fetch.mock.calls[0]).toEqual([
'testurl',
{ headers: { authorization: 'token testtoken' } },
]);
});

test('returns 1 page of results', async () => {
const files = [{ file: 1 }, { file: 2 }, { file: 3 }];
fetch.mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve(files),
headers: {
get: () => null,
},
});

const page = await fetchPaginatedGHResults('test', 'test');
expect(page).toEqual(files);
});

test('returns more than 1 page of results', async () => {
const files = [{ file: 1 }, { file: 2 }, { file: 3 }];
fetch
.mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve(files.slice(0, 1)),
headers: {
get: () => '<https://fakesite.com/files?page=2>; rel="next"',
},
})
.mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve(files.slice(1, 2)),
headers: {
get: () => '<https://fakesite.com/files?page=3>; rel="next"',
},
})
.mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve(files.slice(2)),
headers: {
get: () => '',
},
});

const pages = await fetchPaginatedGHResults('test', 'test');
expect(pages).toEqual(files);
});
});
});

0 comments on commit f5d076e

Please sign in to comment.