Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
\.DS_Store
scripts/node_modules
scripts/learningpath
scripts/.env
7 changes: 6 additions & 1 deletion scripts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@ node substitute_article_urls.js isc

A node script to generate markdown files required for hosting Learning Path on innersourcecommons.org.

This script requires a [GitHub access token](https://docs.github.com/en/free-pro-team@latest/github/authenticating-to-github/creating-a-personal-access-token), as it uses the GitHub API to get Learning Path contributors. Your token does not require any scopes, as the Learning Path is Open Source. To provide this, create a `.env` file in this directory in the following format:
```
TOKEN=<your_github_token>
```

### Usage:
```
npm ci
Expand All @@ -36,7 +41,7 @@ If so, then do both of the following:
* update the ["sections" config](https://github.com/InnerSourceCommons/InnerSourceLearningPath/blob/master/scripts/generate_learning_path_markdown.js#L37) with the language code of the articles for the appropriate section.
Open a pull request for the change.
* update the [Learning Path landing page](https://github.com/InnerSourceCommons/innersourcecommons.org/blob/master/resources/learningpath/index.md) with a link to your new language pages.

3. Run **generate_learning_path_markdown.js** as described above.
3. `cp -r learningpath/* <path-to-innersourcecommons.org-repo>/resources/learningpath/`.
3. Open a pull request with the modified files in the [InnerSourceCommons/innersourcecommons.org] repo.
Expand Down
217 changes: 112 additions & 105 deletions scripts/generate_learning_path_markdown.js
Original file line number Diff line number Diff line change
@@ -1,120 +1,127 @@
const fs = require('fs')
const YAML = require('yaml')
const { EOL } = require('os')
const { join } = require('path')
(async() => {
const fs = require('fs')
const YAML = require('yaml')
const { EOL } = require('os')
const { join } = require('path')
const getContributors = require('./get_contributors')

const mkdirSync = (dir) => {
try {
fs.mkdirSync(dir)
} catch (e) {
if (e.code !== 'EEXIST') {
console.log(e)
const mkdirSync = (dir) => {
try {
fs.mkdirSync(dir)
} catch (e) {
if (e.code !== 'EEXIST') {
console.log(e)
}
}
}
}

const getArticleFiles = (path) => {
return fs.readdirSync(path).reduce((articles, filename) => {
const filePath = `${path}/${filename}`
if (filePath.match(/\d\d/) && !filePath.includes('-script.asciidoc')) {
return [...articles, {
filePath,
asciiDoc: fs.readFileSync(filePath, 'utf-8')
}]
} else {
return articles
}
}, [])
}
const getArticleFiles = (path) => {
return fs.readdirSync(path).reduce((articles, filename) => {
const filePath = `${path}/${filename}`
if (filePath.match(/\d\d/) && !filePath.includes('-script.asciidoc')) {
return [...articles, {
filePath,
asciiDoc: fs.readFileSync(filePath, 'utf-8')
}]
} else {
return articles
}
}, [])
}

const writeMarkdownFile = (filePath, frontMatter) => {
const frontMatterTerminator = '---'
const originStatement = '<!--- This file autogenerated from https://github.com/InnerSourceCommons/InnerSourceLearningPath/blob/master/scripts/generate_learning_path_markdown.js -->'
const output = [frontMatterTerminator, YAML.stringify(frontMatter).trim(), frontMatterTerminator, originStatement].join(EOL)
fs.writeFileSync(filePath, output)
}
const writeMarkdownFile = (filePath, frontMatter) => {
const frontMatterTerminator = '---'
const originStatement = '<!--- This file autogenerated from https://github.com/InnerSourceCommons/InnerSourceLearningPath/blob/master/scripts/generate_learning_path_markdown.js -->'
const output = [frontMatterTerminator, YAML.stringify(frontMatter).trim(), frontMatterTerminator, originStatement].join(EOL)
fs.writeFileSync(filePath, output)
}

const sections = [
{
learning_path_group: 'Introduction',
dirName: 'introduction',
workbook: '01-introduction.asciidoc',
translations: ['de', 'it', 'ja', 'zh'],
renderArticles: true
},
{
learning_path_group: 'Trusted Committer',
dirName: 'trusted-committer',
workbook: '02-trusted-committer.asciidoc',
translations: ['de', 'zh'],
renderArticles: true
},
{
learning_path_group: 'Contributor',
dirName: 'contributor',
workbook: '04-contributor.asciidoc',
translations: ['ja', 'zh'],
renderArticles: true
},
{
learning_path_group: 'Product Owner',
dirName: 'product-owner',
workbook: '03-product-owner.asciidoc',
translations: ['zh'],
renderArticles: true
},
]
const sections = [
{
learning_path_group: 'Introduction',
dirName: 'introduction',
workbook: '01-introduction.asciidoc',
translations: ['de', 'it', 'ja', 'zh'],
renderArticles: true
},
{
learning_path_group: 'Trusted Committer',
dirName: 'trusted-committer',
workbook: '02-trusted-committer.asciidoc',
translations: ['de', 'zh'],
renderArticles: true
},
{
learning_path_group: 'Contributor',
dirName: 'contributor',
workbook: '04-contributor.asciidoc',
translations: ['ja', 'zh'],
renderArticles: true
},
{
learning_path_group: 'Product Owner',
dirName: 'product-owner',
workbook: '03-product-owner.asciidoc',
translations: ['zh'],
renderArticles: true
},
]

mkdirSync('./learningpath')

sections.forEach(({ learning_path_group, dirName, workbook, translations, renderArticles }) => {
const baseReadPath = `../${dirName}`
const baseWritePath = `./learningpath/${dirName}`
mkdirSync(baseWritePath)
sections.forEach(({ learning_path_group, dirName, workbook, translations, renderArticles }) => {
const baseReadPath = `../${dirName}`
const baseWritePath = `./learningpath/${dirName}`
mkdirSync(baseWritePath)

translations.concat('' /* The English original */).forEach((translation) => {
const isTranslation = translation !== ''
const writePath = join(baseWritePath, translation)
mkdirSync(writePath)
translations.concat('' /* The English original */).forEach(async (translation) => {
const isTranslation = translation !== ''
const writePath = join(baseWritePath, translation)
mkdirSync(writePath)

const readPath = join(baseReadPath, translation)
const articles = getArticleFiles(readPath)
articles.forEach((article) => {
const articleTitle = article.asciiDoc.match(/== (.*)/)[1]
const articleNumber = article.filePath.split('/').pop().split('-')[0]
const fileName = articleNumber === '01' ? `${writePath}/index.md` : `${writePath}/${articleNumber}.md`
const frontMatter = {
layout: 'learning-path-page',
show_meta: false,
title: `Learning Path - ${learning_path_group} - ${articleTitle}`,
learning_path_article: renderArticles ? article.filePath.replace('../', '') : undefined,
learning_path_group,
learning_path_menu_title: `${articleNumber} - ${articleTitle}`,
learning_path_position: parseInt(articleNumber),
learning_path_translation: translation,
no_video: isTranslation // Videos not available translated.
}
const readPath = join(baseReadPath, translation)
const articles = getArticleFiles(readPath)
articles.forEach(async (article) => {
const articleTitle = article.asciiDoc.match(/== (.*)/)[1]
const articleNumber = article.filePath.split('/').pop().split('-')[0]
const fileName = articleNumber === '01' ? `${writePath}/index.md` : `${writePath}/${articleNumber}.md`
const contributors = await getContributors(article.filePath.replace('../', ''))
const frontMatter = {
layout: 'learning-path-page',
show_meta: false,
title: `Learning Path - ${learning_path_group} - ${articleTitle}`,
learning_path_article: renderArticles ? article.filePath.replace('../', '') : undefined,
learning_path_group,
learning_path_menu_title: `${articleNumber} - ${articleTitle}`,
learning_path_position: parseInt(articleNumber),
learning_path_translation: translation,
no_video: isTranslation, // Videos not available translated.
contributors
}

writeMarkdownFile(fileName, frontMatter)
})
writeMarkdownFile(fileName, frontMatter)
})

// Workbooks not translated.
if (!isTranslation) {
const workbookFileName = `${writePath}/workbook.md`
console.log('workbookFileName', workbookFileName)
const workbookFrontMatter = {
layout: 'learning-path-page',
show_meta: false,
title: `Learning Path - ${learning_path_group} - Workbook`,
learning_path_article: `workbook/${workbook}`,
learning_path_group,
learning_path_menu_title: `${learning_path_group} Workbook`,
learning_path_position: articles.length - articles.filter(Array.isArray).length + 1,
learning_path_translation: translation,
no_video: true
}
// Workbooks not translated.
if (!isTranslation) {
const workbookFileName = `${writePath}/workbook.md`
const contributors = await getContributors(`workbook/${workbook}`)
console.log('workbookFileName', workbookFileName)
const workbookFrontMatter = {
layout: 'learning-path-page',
show_meta: false,
title: `Learning Path - ${learning_path_group} - Workbook`,
learning_path_article: `workbook/${workbook}`,
learning_path_group,
learning_path_menu_title: `${learning_path_group} Workbook`,
learning_path_position: articles.length - articles.filter(Array.isArray).length + 1,
learning_path_translation: translation,
no_video: true,
contributors
}

writeMarkdownFile(workbookFileName, workbookFrontMatter)
}
writeMarkdownFile(workbookFileName, workbookFrontMatter)
}
})
})
})
})()
48 changes: 48 additions & 0 deletions scripts/get_contributors.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
require('dotenv').config()
const { graphql } = require("@octokit/graphql")

const graphqlWithAuth = graphql.defaults({
headers: {
authorization: `token ${process.env.TOKEN}`
}
})

module.exports = async function (filepath) {
const contributors = await graphqlWithAuth(
`{
repository(owner: "InnerSourceCommons", name: "InnerSourceLearningPath") {
object(expression: "master") {
... on Commit {
history(first: 100, path: "${filepath}") {
totalCount
nodes {
author {
name
user {
name
url
}
}
}
}
}
}
}
}`
)

const history = contributors.repository.object.history

if (history.totalCount > 100) {
throw Error('This script needs updating to handle >100 commits')
}

return Object.values(history.nodes.reduce((acc, { author }) => {
const name = (author.user && author.user.name) || author.name
acc[name] = {
name,
url: author.user && author.user.url
}
return acc
}, {}))
}
Loading