Skip to content

Commit

Permalink
Merge branch 'develop' into campfire/translate-deletions
Browse files Browse the repository at this point in the history
  • Loading branch information
LizBaker committed Jun 21, 2021
2 parents 3b11885 + 81d7cff commit 1ee9f8b
Show file tree
Hide file tree
Showing 93 changed files with 1,949 additions and 326 deletions.
1 change: 1 addition & 0 deletions .github/workflows/check-translations-and-deserialize.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ env:
TRANSLATION_VENDOR_SECRET: ${{ secrets.TRANSLATION_VENDOR_SECRET }}
BOT_NAME: nr-opensource-bot
BOT_EMAIL: opensource+bot@newrelic.com
CI: true

jobs:
fetch-content:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/fetch-swiftype-content.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ jobs:
branch: 'main',
required_status_checks: {
strict: false,
contexts: ['Gatsby Build', 'Unpaired translations removed']
contexts: ['build the docs site', 'Unpaired translations removed']
},
restrictions: {
users: [],
Expand Down
7 changes: 3 additions & 4 deletions .github/workflows/pr-triage.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Auto-triage PRs to project boards
name: pr triage

on:
pull_request_target:
Expand All @@ -10,11 +10,11 @@ env:

jobs:
content_project:
name: pr added to the docs github board (optional)
runs-on: ubuntu-latest
env:
PROJECT_ID: 11873277
PROJECT_NAME: Docs PRs and Issues
name: Triage to Content Project
steps:
- name: Triages NEW pull requests to the Content Project
if: github.event.action == 'opened'
Expand All @@ -25,11 +25,11 @@ jobs:
gh api -H "$HEADER" -X POST projects/columns/$COLUMN/cards -f content_type='PullRequest' -F content_id=$PR_ID
localization_project:
name: pr added to the localization github board (optional)
runs-on: ubuntu-latest
env:
PROJECT_ID: 12004783
PROJECT_NAME: Docs Site Localization PRs and Issues
name: Triage to Localization Project
steps:
- name: Triages NEW pull requests to the Localization Project
if: |
Expand All @@ -40,4 +40,3 @@ jobs:
PR_ID=${{ github.event.pull_request.id }}
COLUMN=$(gh api -H "$HEADER" projects/$PROJECT_ID/columns --jq ".[] | select(.name == \"$COLUMN_NAME\").id")
gh api -H "$HEADER" -X POST projects/columns/$COLUMN/cards -f content_type='PullRequest' -F content_id=$PR_ID
16 changes: 8 additions & 8 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Release
name: github release

on:
pull_request:
Expand All @@ -12,39 +12,39 @@ env:

jobs:
create-release:
name: Create Github release
name: generate docs-website github release notes (optional)
runs-on: ubuntu-latest

steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
fetch-depth: '0'
- name: Setup python
uses: actions/setup-python@v2

- name: Get last release tag
id: get-last-tag
run: |
echo LAST_RELEASE=$(git describe --tags --abbrev=0) >> $GITHUB_ENV
echo $LAST_RELEASE
- name: Install Python Dependencies
run: |
python -m pip install --upgrade pip
pip install PyGithub
pip install invoke
- name: Create Updates
id: py-get-updates
run: |
python ./scripts/actions/release-notes.py
- name: Create new release tag
id: create-new-tag
run: |
LAST_TAG_NUM=$(echo ${{ env.LAST_RELEASE }} | sed 's/release\-//')
echo NEW_TAG=$(echo "release-$((LAST_TAG_NUM + 1))") >> $GITHUB_ENV
- name: Create release
run: gh release create ${{ env.NEW_TAG }} -t ${{ env.NEW_TAG }} -n "${{ env.RESULT }}"
run: gh release create ${{ env.NEW_TAG }} -t ${{ env.NEW_TAG }} -n "${{ env.RESULT }}"
5 changes: 3 additions & 2 deletions .github/workflows/update-whats-new-ids.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Update whats-new-ids
name: whats-new

on:
pull_request:
Expand All @@ -11,6 +11,7 @@ env:

jobs:
update-whats-new-ids:
name: generates in-product anncouncements (optional)
runs-on: ubuntu-latest
steps:
- name: Checkout repo
Expand All @@ -37,7 +38,7 @@ jobs:
git add ./src/data/whats-new-ids.json
git diff-index --quiet HEAD ./src/data/whats-new-ids.json || git commit -m 'chore(whats-new-ids): updated ids'
echo "::set-output name=commit::true"
- name: Temporarily disable branch protection
id: disable-branch-protection
uses: actions/github-script@v1
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/validate-pr.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Validate Pull Request
name: validate pull request

on:
pull_request:
Expand All @@ -12,7 +12,7 @@ env:

jobs:
build:
name: Gatsby Build
name: build the docs site
runs-on: ubuntu-latest
steps:
- name: Checkout repository
Expand Down Expand Up @@ -51,7 +51,7 @@ jobs:
CI: true

test:
name: Run Tests
name: run tests
runs-on: ubuntu-latest
steps:
- name: Checkout repository
Expand All @@ -77,7 +77,7 @@ jobs:
run: yarn test --passWithNoTests

lint:
name: Run Eslint
name: run linter
runs-on: ubuntu-latest
steps:
- name: Checkout repository
Expand Down
50 changes: 50 additions & 0 deletions scripts/actions/__tests__/fetch-and-deserialize.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import fetchAndDeserialize from '../fetch-and-deserialize';
const fse = require('fs-extra');
const vfile = require('vfile');
const { writeSync } = require('to-vfile');

jest.mock('fs-extra');
jest.mock('to-vfile');

describe('writeFilesSync', () => {
const testFiles = [
vfile({
contents: 'fake_content',
path: 'src/i18n/content/jp/docs/fake_path_1/fake_file_1.mdx',
extname: '.mdx',
}),
vfile({
contents: 'fake_content',
path: 'src/i18n/content/jp/docs/fake_path_1/fake_file_2.mdx',
extname: '.mdx',
}),
vfile({
contents: 'fake_content',
path: 'src/i18n/content/jp/docs/fake_path_2/fake_file_3.mdx',
extname: '.mdx',
}),
];

fse.existsSync.mockClear();
fse.existsSync.mockReturnValue(true);
fetchAndDeserialize.writeFilesSync(testFiles);

it('should call writeSync for each file', () => {
expect(writeSync).toHaveBeenCalledTimes(testFiles.length);
});

it('should call copySync for each unique directory path', () => {
expect(fse.copySync).toHaveBeenCalledTimes(2);
expect(fse.copySync).toHaveBeenNthCalledWith(1, 'src/content/docs/fake_path_1/images', 'src/i18n/content/jp/docs/fake_path_1/images', { overwrite: true });
expect(fse.copySync).toHaveBeenNthCalledWith(2, 'src/content/docs/fake_path_2/images', 'src/i18n/content/jp/docs/fake_path_2/images', { overwrite: true });
});

it('should not copy directories that dont have images', () => {
fse.existsSync.mockClear();
fse.existsSync.mockReturnValue(false);
fse.copySync.mockClear();
fetchAndDeserialize.writeFilesSync(testFiles);

expect(fse.copySync).toHaveBeenCalledTimes(0);
});
});
2 changes: 1 addition & 1 deletion scripts/actions/check-job-progress.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const loadFromDB = require('./utils/load-from-db');

const { getAccessToken, vendorRequest } = require('./utils/vendor-request');
const fetchAndDeserialize = require('./fetch-and-deserialize');
const { fetchAndDeserialize } = require('./fetch-and-deserialize');

const PROJECT_ID = process.env.TRANSLATION_VENDOR_PROJECT;

Expand Down
47 changes: 43 additions & 4 deletions scripts/actions/fetch-and-deserialize.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
const AdmZip = require('adm-zip');
const vfile = require('vfile');
const { writeSync } = require('to-vfile');
const path = require('path');
const fse = require('fs-extra');

const fetch = require('node-fetch');

Expand All @@ -13,6 +15,42 @@ const localesMap = {

const projectId = process.env.TRANSLATION_VENDOR_PROJECT;

/**
* Method which writes translated content to the 'src/content/i18n' path, and copies images for translated files.
* @param {vfile.VFile[]} vfiles
*/
const writeFilesSync = (vfiles) => {
const copiedDirectories = {};

vfiles.forEach((file) => {
writeSync(file, 'utf-8');

const imageDirectory = `${path.dirname(
file.path.substring(file.path.indexOf('/docs/'))
)}/images`;

/*
Check to see:
1. have we already copied this image directory for a different file (with the same parent path)?
2. does the image directory exist?
*/
if (
!(imageDirectory in copiedDirectories) &&
fse.existsSync(path.join('src/content/', imageDirectory))
) {
// sync 'src/content/docs/.../images' to 'src/i18n/content/.../docs/.../images'
fse.copySync(
path.join('src/content/', imageDirectory),
path.join(path.dirname(file.path), '/images'),
{
overwrite: true,
}
);
copiedDirectories[imageDirectory] = true;
}
});
};

const fetchTranslatedFilesZip = async (fileUris, locale, accessToken) => {
const fileUriStr = fileUris.reduce((str, uri) => {
return str.concat(`&fileUris[]=${encodeURIComponent(uri)}`);
Expand Down Expand Up @@ -69,10 +107,11 @@ const fetchAndDeserialize = (accessToken) => async ({ locale, fileUris }) => {
);

createDirectories(files);

files.forEach((file) => writeSync(file, 'utf-8'));
writeFilesSync(files);
};

fetchAndDeserialize();
if (process.env.CI) {
fetchAndDeserialize();
}

module.exports = fetchAndDeserialize;
module.exports = { writeFilesSync, fetchAndDeserialize };
9 changes: 8 additions & 1 deletion scripts/actions/utils/handlers.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,18 @@ module.exports = {
frontmatter: {
deserialize: (h, node) => {
const data = deserializeJSValue(node.properties.dataValue);
const frontMatterAtt = node.children.reduce((acc, child) => {
const key = child.properties.dataKey;
const value = child.children[0].value;
return { ...acc, [key]: value };
}, {});

return h(
node,
'yaml',
yaml.safeDump(data, { lineWidth: Infinity }).trim()
yaml
.safeDump({ ...data, ...frontMatterAtt }, { lineWidth: Infinity })
.trim()
);
},
serialize: (h, node) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@ Starting July 30, 2020, all of our new customers are on a pricing plan that we c

For New Relic One pricing, billing is based on these factors:

* The pricing edition (Standard, Pro, Enterprise). Higher editions give access to more account-related admin features, more support, longer data retention, and other features ([learn more](https://newrelic.com/pricing)).
* The amount of data ingested. 100 GBs per month is free. $0.25 per GB ingested above that.
* The total number of provisioned [full users](/docs/accounts/accounts-billing/new-relic-one-pricing-users/users-roles#user-type) (users with access to everything available at that edition). [Basic users](/docs/accounts/accounts-billing/new-relic-one-pricing-users/users-roles#user-type) are free. Standard edition includes one full user for free, and a max of five.
* The amount of data [ingested](/docs/telemetry-data-platform/get-started/introduction-new-relic-data-ingest-apis-sdks/). 100 GBs per month is free. $0.25 per GB ingested above that.
* The number of provisioned [full users](/docs/accounts/accounts-billing/new-relic-one-pricing-users/users-roles#user-type), defined as users with access to [Full Stack Observability features](/docs/accounts/accounts-billing/new-relic-one-user-management/new-relic-one-user-model-understand-user-structure/#user-capabilities). [Basic users](/docs/accounts/accounts-billing/new-relic-one-pricing-users/users-roles#user-type) are free.
* The cost of each full user depends on your edition: [Standard, Pro, or Enterprise](https://newrelic.com/pricing). Standard edition includes one full user for free, and a max of five. Pro and Enterprise give access to more account and user management features, more support, longer data retention, and [other features](https://newrelic.com/pricing).
* For [Applied Intelligence](https://newrelic.com/pricing), our intelligent alert/detection system: the number of incident events above the free 1000 per month. (Note that our alerting functionality is available for free and doesn't count towards this limit.)

For a summary of what's included for free, see [Free edition](#free).
Expand Down

0 comments on commit 1ee9f8b

Please sign in to comment.