From 3dcbf8a6cc26392ec013f5794d1fb123bc4f2235 Mon Sep 17 00:00:00 2001 From: Sarah Schneider Date: Tue, 21 Oct 2025 10:21:03 -0400 Subject: [PATCH 1/4] Simplify the ai-tools CLI (#58066) --- package.json | 2 +- .../prompts/versioning-editor.prompt.yml | 31 --- src/ai-editors/scripts/ai-edit.ts | 195 ------------------ src/{ai-editors => ai-tools}/README.md | 21 +- .../lib/call-models-api.ts | 0 src/ai-tools/prompts/prompt-template.yml | 9 + src/ai-tools/prompts/versioning.md | 35 ++++ src/ai-tools/scripts/ai-tools.ts | 139 +++++++++++++ 8 files changed, 198 insertions(+), 234 deletions(-) delete mode 100644 src/ai-editors/prompts/versioning-editor.prompt.yml delete mode 100644 src/ai-editors/scripts/ai-edit.ts rename src/{ai-editors => ai-tools}/README.md (52%) rename src/{ai-editors => ai-tools}/lib/call-models-api.ts (100%) create mode 100644 src/ai-tools/prompts/prompt-template.yml create mode 100644 src/ai-tools/prompts/versioning.md create mode 100644 src/ai-tools/scripts/ai-tools.ts diff --git a/package.json b/package.json index 80ebbadd8de9..dab4d80be5b8 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "exports": "./src/frame/server.ts", "scripts": { "add-content-type": "tsx src/content-render/scripts/add-content-type.ts", - "ai-edit": "tsx src/ai-editors/scripts/ai-edit.ts", + "ai-tools": "tsx src/ai-tools/scripts/ai-tools.ts", "all-documents": "tsx src/content-render/scripts/all-documents/cli.ts", "analyze-text": "tsx src/search/scripts/analyze-text.ts", "analyze-comment": "tsx src/events/scripts/analyze-comment-cli.ts", diff --git a/src/ai-editors/prompts/versioning-editor.prompt.yml b/src/ai-editors/prompts/versioning-editor.prompt.yml deleted file mode 100644 index 2c1a2c0f384c..000000000000 --- a/src/ai-editors/prompts/versioning-editor.prompt.yml +++ /dev/null @@ -1,31 +0,0 @@ -messages: - - role: system - content: >- - Your task is to remove the conditional markup from content files that - looks like {% ifversion fpt or ghec %}Foo{% endif %}. You need to first try - to write the content without any versioning at all, so it still makes sense - to customers without causing confusion. If you need to explain versioning - differences, do so using prose. Here are the prose guidelines to follow: - * For versioning at the article level: - - When the feature is only available in certain products, use the "Who can - use this feature?" box to convey the content of this article applies only - to XYZ products. - - When an article only exists before the functionality is in older versions - of GHES (and not dotcom and newer versions of GHES), just remove that article. - (This is akin to declining to document a feature.) - * For versioning at the heading level: - - Use prose similar to the "Who can use this feature?" to convey that the - content of this section applies only to XYZ products. - * For versioning the paragraph or sentence level: - - Use one of the following content strategies: - - If you're briefly introducing a feature and then linking to an article, - there's no need to specify versioning. Let folks learn availability when - they follow the link, via the "Who can use this feature?" box. - - When necessary, start sentences with "With GitHub Enterprise Cloud...", - "On GitHub.com", "With GitHub Enterprise Server 3.15+..." etc. - - End list items with "(GitHub Enterprise Cloud only)", "(GitHub.com only)", etc. - - role: user - content: >- - Review this content according to the new prose versioning guidelines. {{responseTypeInstruction}} - {{input}} -model: openai/gpt-4.1-mini diff --git a/src/ai-editors/scripts/ai-edit.ts b/src/ai-editors/scripts/ai-edit.ts deleted file mode 100644 index 7af63bcc35df..000000000000 --- a/src/ai-editors/scripts/ai-edit.ts +++ /dev/null @@ -1,195 +0,0 @@ -#!/usr/bin/env node - -import { fileURLToPath } from 'url' -import { Command } from 'commander' -import fs from 'fs' -import yaml from 'js-yaml' -import path from 'path' -import ora from 'ora' -import { callModelsApi } from '@/ai-editors/lib/call-models-api' -import dotenv from 'dotenv' -dotenv.config({ quiet: true }) - -const __dirname = path.dirname(fileURLToPath(import.meta.url)) -const promptDir = path.join(__dirname, '../prompts') - -if (!process.env.GITHUB_TOKEN) { - throw new Error('Error! You must have a GITHUB_TOKEN set in an .env file to run this script.') -} - -interface ResponseTypes { - rewrite: string - list: string - json: string -} - -const responseTypes: ResponseTypes = { - rewrite: 'Edit the versioning only. Return the edited content.', - list: `Do NOT rewrite the content. Report your edits in numbered list format.`, - json: `Do NOT rewrite the content. Report your edits as a JSON list, with the format { lineNumber, currentText, suggestion }.`, -} - -const validResponseTypes = Object.keys(responseTypes) as Array - -interface EditorType { - promptFile: string - description: string -} - -interface EditorTypes { - versioning: EditorType - // TODO - // scannability: EditorType - // readability: EditorType - // technical: EditorType - // styleguide: EditorType - // contentModels: EditorType -} - -const editorTypes: EditorTypes = { - versioning: { - promptFile: 'versioning-editor.prompt.yml', - description: 'Review against simplifying versioning guidelines.', - }, - // TODO - // scannability: { - // promptFile: 'scannability-editor.prompt.yml', - // description: 'Review against scannability guidelines.', - // }, - // readability: { - // promptFile: 'readability-editor.prompt.yml', - // description: - // 'Review against readability criteria like Gunning Fog index, Hemingway, word count, sentence length, etc.', - // }, - // technical: { - // promptFile: 'technical-editor.prompt.yml', - // description: 'Review against provided product information for technical accuracy.', - // }, - // styleguide: { - // promptFile: 'styleguide-editor.prompt.yml', - // description: 'Review against the GitHub Docs style guide.', - // }, - // contentModels: { - // promptFile: 'content-models-editor.prompt.yml', - // description: 'Review against the GitHub Docs content models.', - // }, - // Add more here... -} - -const editorDescriptions = (): string => { - let str = '\n\n' - Object.entries(editorTypes).forEach(([ed, edObj]) => { - str += `\t${ed}\n\t\t\t${edObj.description}\n\n` - }) - return str -} - -interface CliOptions { - verbose?: boolean - editor?: Array - response?: keyof ResponseTypes - files: string[] -} - -const program = new Command() - -program - .name('ai-edit') - .description('Edit content files using AI') - .option('-v, --verbose', 'Enable verbose output') - .option( - '-e, --editor ', - `Specify one or more editor type: ${editorDescriptions().trimEnd()}\n`, - ) - .option( - '-r, --response ', - `Specify response type: ${validResponseTypes.join(', ')} (default: rewrite)`, - ) - .requiredOption( - '-f, --files ', - 'One or more content file paths in the content directory', - ) - .action((options: CliOptions) => { - ;(async () => { - const spinner = ora('Starting AI review...').start() - - const files = options.files - const editors = options.editor || ['versioning'] - const response = options.response || 'rewrite' - - let responseTypeInstruction: string - if (validResponseTypes.includes(response)) { - responseTypeInstruction = responseTypes[response] - } else { - console.error( - `Invalid response type: ${response}. Must be one of: ${validResponseTypes.join(', ')}`, - ) - process.exit(1) - } - - for (const file of files) { - const filePath = path.resolve(process.cwd(), file) - spinner.text = `Checking file: ${file}` - - if (!fs.existsSync(filePath)) { - spinner.fail(`File not found: ${filePath}`) - process.exitCode = 1 - continue - } - - try { - spinner.text = `Reading file: ${file}` - const content = fs.readFileSync(filePath, 'utf8') - - for (const editorType of editors) { - spinner.text = `Running the AI-powered ${editorType} editor...` - const answer = await callEditor(editorType, responseTypeInstruction, content) - - if (response === 'rewrite') { - fs.writeFileSync(file, answer, 'utf-8') - spinner.succeed(`Processed file: ${file}`) - console.log(`To see changes, run "git diff" on the file.`) - } else { - spinner.succeed(`Processed file: ${file}`) - console.log(answer) - } - } - } catch (err) { - const error = err as Error - spinner.fail(`Error processing file ${file}: ${error.message}`) - process.exitCode = 1 - } - } - })() - }) - -program.parse(process.argv) - -interface PromptMessage { - content: string - role: string -} - -interface PromptData { - messages: PromptMessage[] - model?: string - temperature?: number - max_tokens?: number -} - -async function callEditor( - editorType: keyof EditorTypes, - responseTypeInstruction: string, - content: string, -): Promise { - const promptName = editorTypes[editorType].promptFile - const promptPath = path.join(promptDir, promptName) - const prompt = yaml.load(fs.readFileSync(promptPath, 'utf8')) as PromptData - - prompt.messages.forEach((msg) => { - msg.content = msg.content.replace('{{responseTypeInstruction}}', responseTypeInstruction) - msg.content = msg.content.replace('{{input}}', content) - }) - - return callModelsApi(prompt) -} diff --git a/src/ai-editors/README.md b/src/ai-tools/README.md similarity index 52% rename from src/ai-editors/README.md rename to src/ai-tools/README.md index eff9519842a8..f5382845b7f3 100644 --- a/src/ai-editors/README.md +++ b/src/ai-tools/README.md @@ -1,25 +1,32 @@ -# AI-powered editors +# AI-powered tools A CLI tool for using AI to edit documentation according to defined prompts. -This tool runs an AI review of content files based on an (extensible) set of prompt-driven guidelines. The default is versioning. In the future we might add: scannability, readability, style, technical accuracy. +This tool refines content files using AI based on an (extensible) set of prompt-driven guidelines. The default is versioning refinement. In the future we might add: scannability, readability, style, technical accuracy. This script calls the [Models API](https://docs.github.com/en/rest/models/inference?apiVersion=2022-11-28#run-an-inference-request). It requires a personal access token with Models scopes in your `.env` file. ## Usage ```sh -tsx src/ai-editors/scripts/ai-edit.js --editor --response --files +# Direct command +tsx src/ai-tools/scripts/ai-tools.ts --refine --files + +# Or via npm script +npm run ai-tools -- --refine --files ``` * `--files, -f`: One or more content file paths to process (required). -* `--response, -r`: Specify the AI response format. Options: `rewrite` (default), `list`, `json`. -* `--editor, -e`: Specify one or more editor types (default: `versioning`). +* `--refine, -r`: Specify one or more refinement types (default: `versioning`). -**Example:** +**Examples:** ```sh -tsx src/ai-editors/scripts/ai-edit.js --files content/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo.md --editor versioning --response list +# Direct command +tsx src/ai-tools/scripts/ai-tools.ts --files content/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo.md --refine versioning + +# Via npm script +npm run ai-tools -- --files content/copilot/tutorials/coding-agent/get-the-best-results.md --refine intro ``` ## Requirements diff --git a/src/ai-editors/lib/call-models-api.ts b/src/ai-tools/lib/call-models-api.ts similarity index 100% rename from src/ai-editors/lib/call-models-api.ts rename to src/ai-tools/lib/call-models-api.ts diff --git a/src/ai-tools/prompts/prompt-template.yml b/src/ai-tools/prompts/prompt-template.yml new file mode 100644 index 000000000000..dab8d13adfa9 --- /dev/null +++ b/src/ai-tools/prompts/prompt-template.yml @@ -0,0 +1,9 @@ +messages: + - role: system + content: >- + {{markdownPrompt}} + - role: user + content: >- + Review this content file according to the provided system prompt. + {{input}} +model: openai/gpt-5 diff --git a/src/ai-tools/prompts/versioning.md b/src/ai-tools/prompts/versioning.md new file mode 100644 index 000000000000..a17720bcd304 --- /dev/null +++ b/src/ai-tools/prompts/versioning.md @@ -0,0 +1,35 @@ +Your task is to remove the conditional markup from content files that looks like {% ifversion fpt or ghec %}Foo{% endif %}. + +You need to first try to write the content without any versioning at all, so it still makes sense to customers without causing confusion. If you need to explain versioning differences, do so using prose. + +Here are the prose guidelines to follow: + +* For versioning at the article level: + - When the feature is only available in certain products, use the "Who can + use this feature?" box to convey the content of this article applies only + to XYZ products. + - When an article only exists before the functionality is in older versions + of GHES (and not dotcom and newer versions of GHES), just remove that article. + (This is akin to declining to document a feature.) + +* For versioning at the heading level: + - Use prose similar to the "Who can use this feature?" to convey that the + content of this section applies only to XYZ products. + +* For versioning the paragraph or sentence level: + - Use one of the following content strategies: + - If you're briefly introducing a feature and then linking to an article, + there's no need to specify versioning. Let folks learn availability when + they follow the link, via the "Who can use this feature?" box. + - When necessary, start sentences with "With GitHub Enterprise Cloud...", + "On GitHub.com", "With GitHub Enterprise Server 3.15+..." etc. + - End list items with "(GitHub Enterprise Cloud only)", "(GitHub.com only)", etc. + +Review this content according to the versioning guideline above. Edit the versioning only. + +**IMPORTANT OUTPUT FORMAT:** +- Return the complete, updated file content (including frontmatter) +- Make ONLY the versioning changes specified above +- Do NOT add explanatory text before or after the content +- Do NOT wrap the output in markdown code blocks +- Output should be ready to write directly to the original file diff --git a/src/ai-tools/scripts/ai-tools.ts b/src/ai-tools/scripts/ai-tools.ts new file mode 100644 index 000000000000..3debdcdf47cd --- /dev/null +++ b/src/ai-tools/scripts/ai-tools.ts @@ -0,0 +1,139 @@ +#!/usr/bin/env node + +import { fileURLToPath } from 'url' +import { Command } from 'commander' +import fs from 'fs' +import yaml from 'js-yaml' +import path from 'path' +import ora from 'ora' +import { callModelsApi } from '@/ai-tools/lib/call-models-api' +import dotenv from 'dotenv' +dotenv.config({ quiet: true }) + +const __dirname = path.dirname(fileURLToPath(import.meta.url)) +const promptDir = path.join(__dirname, '../prompts') +const promptTemplatePath = path.join(promptDir, 'prompt-template.yml') + +if (!process.env.GITHUB_TOKEN) { + throw new Error('Error! You must have a GITHUB_TOKEN set in an .env file to run this script.') +} + +interface EditorType { + description: string +} + +interface EditorTypes { + versioning: EditorType +} + +const editorTypes: EditorTypes = { + versioning: { + description: 'Refine versioning according to simplification guidance.', + }, +} + +const refinementDescriptions = (): string => { + let str = '\n\n' + Object.entries(editorTypes).forEach(([ed, edObj]) => { + str += ` ${ed.padEnd(12)} ${edObj.description}\n` + }) + return str +} + +interface CliOptions { + verbose?: boolean + refine: Array + files: string[] + write?: boolean +} + +const program = new Command() + +program + .name('ai-tools') + .description('AI-powered content tools for editing and analysis') + .option('-v, --verbose', 'Enable verbose output') + .option( + '-w, --write', + 'Write changes back to the original files (default: output to console only)', + ) + .requiredOption( + '-r, --refine ', + `Specify one or more refinement type: ${refinementDescriptions().trimEnd()}\n`, + ) + .requiredOption( + '-f, --files ', + 'One or more content file paths in the content directory', + ) + .action((options: CliOptions) => { + ;(async () => { + const spinner = ora('Starting AI review...').start() + + const files = options.files + const editors = options.refine + + for (const file of files) { + const filePath = path.resolve(process.cwd(), file) + spinner.text = `Checking file: ${file}` + + if (!fs.existsSync(filePath)) { + spinner.fail(`File not found: ${filePath}`) + process.exitCode = 1 + continue + } + + try { + spinner.text = `Reading file: ${file}` + const content = fs.readFileSync(filePath, 'utf8') + + for (const editorType of editors) { + spinner.text = `Running the AI-powered ${editorType} refinement...` + const answer = await callEditor(editorType, content) + spinner.stop() + + if (options.write) { + // Write the result back to the original file + fs.writeFileSync(filePath, answer, 'utf8') + console.log(`✅ Updated: ${file}`) + } else { + // Just output to console (current behavior) + console.log(answer) + } + } + } catch (err) { + const error = err as Error + spinner.fail(`Error processing file ${file}: ${error.message}`) + process.exitCode = 1 + } + } + + spinner.stop() + })() + }) + +program.parse(process.argv) + +interface PromptMessage { + content: string + role: string +} + +interface PromptData { + messages: PromptMessage[] + model?: string + temperature?: number + max_tokens?: number +} + +async function callEditor(editorType: keyof EditorTypes, content: string): Promise { + const markdownPromptPath = path.join(promptDir, `${editorType}.md`) + const markdownPrompt = fs.readFileSync(markdownPromptPath, 'utf8') + const prompt = yaml.load(fs.readFileSync(promptTemplatePath, 'utf8')) as PromptData + + prompt.messages.forEach((msg) => { + msg.content = msg.content.replace('{{markdownPrompt}}', markdownPrompt) + msg.content = msg.content.replace('{{input}}', content) + }) + + return callModelsApi(prompt) +} From cd5ca152ad2175a85f6dd53f53ff7d8cc50c778f Mon Sep 17 00:00:00 2001 From: Kevin Heis Date: Tue, 21 Oct 2025 07:49:26 -0700 Subject: [PATCH 2/4] Convert sync.js to TypeScript (#58009) --- src/github-apps/scripts/{sync.js => sync.ts} | 253 ++++++++++++++----- 1 file changed, 196 insertions(+), 57 deletions(-) rename src/github-apps/scripts/{sync.js => sync.ts} (72%) diff --git a/src/github-apps/scripts/sync.js b/src/github-apps/scripts/sync.ts similarity index 72% rename from src/github-apps/scripts/sync.js rename to src/github-apps/scripts/sync.ts index 088a11d11a56..d52b5092f0ca 100755 --- a/src/github-apps/scripts/sync.js +++ b/src/github-apps/scripts/sync.ts @@ -15,30 +15,124 @@ const ENABLED_APPS_DIR = 'src/github-apps/data' const CONFIG_FILE = 'src/github-apps/lib/config.json' // Actor type mapping from generic names to actual YAML values -export const actorTypeMap = { +export const actorTypeMap: Record = { fine_grained_pat: 'fine_grained_personal_access_token', server_to_server: 'github_app', user_to_server: 'user_access_token', } -// Also need to handle the actual values that come from the source data -// UserProgrammaticAccess maps to fine_grained_pat functionality -const sourceDataActorMap = { - UserProgrammaticAccess: 'fine_grained_pat', +interface AppDataOperation { + slug: string + subcategory: string + verb: string + requestPath: string } -export async function syncGitHubAppsData(openApiSource, sourceSchemas, progAccessSource) { +interface AppDataOperationWithCategory extends AppDataOperation { + category: string +} + +interface PermissionData { + title: string + displayTitle: string + permissions: Array< + AppDataOperationWithCategory & { + access: string + 'user-to-server'?: boolean + 'server-to-server'?: boolean + 'additional-permissions'?: boolean + } + > +} + +interface GitHubAppsData { + [pageType: string]: { + [category: string]: AppDataOperation[] | PermissionData + } +} + +interface ProgAccessOperationData { + userToServerRest: boolean + serverToServer: boolean + fineGrainedPat: boolean + permissions: Array> + allowPermissionlessAccess?: boolean + allowsPublicRead?: boolean + basicAuth?: boolean + disabledForPatV2?: boolean +} + +interface ProgAccessData { + [operationId: string]: ProgAccessOperationData +} + +interface ProgActorResource { + title?: string + resource_group?: string + visibility?: string + excluded_actors?: string[] +} + +interface ProgActorResources { + [key: string]: ProgActorResource +} + +interface OpenApiOperation { + operationId: string + summary: string + 'x-github': { + category: string + subcategory: string + } +} + +interface OpenApiData { + paths: { + [path: string]: { + [verb: string]: OpenApiOperation + } + } +} + +interface AppsDataConfig { + pages: { + [pageType: string]: unknown + } +} + +interface ProgAccessRawOperation { + operation_ids: string + user_to_server: { + enabled: boolean + } + server_to_server: { + enabled: boolean + } + disabled_for_patv2?: boolean + permission_sets?: Array> + allows_permissionless_access?: boolean + allows_public_read?: boolean + basic_auth?: boolean +} + +export async function syncGitHubAppsData( + openApiSource: string, + sourceSchemas: string[], + progAccessSource: string, +): Promise { console.log( `Generating GitHub Apps data from ${openApiSource}, ${sourceSchemas} and ${progAccessSource}`, ) const { progAccessData, progActorResources } = await getProgAccessData(progAccessSource) for (const schemaName of sourceSchemas) { - const data = JSON.parse(await readFile(path.join(openApiSource, schemaName), 'utf8')) - const appsDataConfig = JSON.parse(await readFile(CONFIG_FILE, 'utf8')) + const data = JSON.parse( + await readFile(path.join(openApiSource, schemaName), 'utf8'), + ) as OpenApiData + const appsDataConfig = JSON.parse(await readFile(CONFIG_FILE, 'utf8')) as AppsDataConfig // Initialize the data structure with keys for each page type - const githubAppsData = {} + const githubAppsData: GitHubAppsData = {} for (const pageType of Object.keys(appsDataConfig.pages)) { githubAppsData[pageType] = {} } @@ -54,13 +148,16 @@ export async function syncGitHubAppsData(openApiSource, sourceSchemas, progAcces const isFineGrainedPat = isUserAccessToken && !progAccessData[operation.operationId].disabledForPatV2 const { category, subcategory } = operation['x-github'] - const appDataOperation = { + const appDataOperation: AppDataOperation = { slug: slug(operation.summary), subcategory, verb, requestPath, } - const appDataOperationWithCategory = Object.assign({ category }, appDataOperation) + const appDataOperationWithCategory: AppDataOperationWithCategory = Object.assign( + { category }, + appDataOperation, + ) // server-to-server if (isInstallationAccessToken) { addAppData(githubAppsData['server-to-server-rest'], category, appDataOperation) @@ -85,11 +182,6 @@ export async function syncGitHubAppsData(openApiSource, sourceSchemas, progAcces ), ) - // Debug logging for checks-related operations - const hasChecksPermission = progAccessData[operation.operationId].permissions.some( - (permissionSet) => permissionSet.checks, - ) - if (!allPermissionSetsExcluded) { addAppData(githubAppsData['fine-grained-pat'], category, appDataOperation) } @@ -99,9 +191,9 @@ export async function syncGitHubAppsData(openApiSource, sourceSchemas, progAcces for (const permissionSet of progAccessData[operation.operationId].permissions) { for (const [permissionName, readOrWrite] of Object.entries(permissionSet)) { const { title, displayTitle } = getDisplayTitle(permissionName, progActorResources) - if (progActorResources[permissionName]['visibility'] === 'private') continue + if (progActorResources[permissionName]?.['visibility'] === 'private') continue - const excludedActors = progActorResources[permissionName]['excluded_actors'] + const excludedActors = progActorResources[permissionName]?.['excluded_actors'] const additionalPermissions = calculateAdditionalPermissions( progAccessData[operation.operationId].permissions, @@ -143,7 +235,9 @@ export async function syncGitHubAppsData(openApiSource, sourceSchemas, progAcces ), 'additional-permissions': additionalPermissions, } - serverToServerPermissions[permissionName].permissions.push( + const permissionsArray = (serverToServerPermissions[permissionName] as PermissionData) + .permissions + permissionsArray.push( Object.assign( {}, appDataOperationWithCategory, @@ -174,7 +268,9 @@ export async function syncGitHubAppsData(openApiSource, sourceSchemas, progAcces } } - findGrainedPatPermissions[permissionName].permissions.push( + const permissionsArray = (findGrainedPatPermissions[permissionName] as PermissionData) + .permissions + permissionsArray.push( Object.assign({}, appDataOperationWithCategory, { 'additional-permissions': additionalPermissions, access: readOrWrite, @@ -214,7 +310,10 @@ export async function syncGitHubAppsData(openApiSource, sourceSchemas, progAcces } } -export async function getProgAccessData(progAccessSource, isRest = false) { +export async function getProgAccessData( + progAccessSource: string, + isRest = false, +): Promise<{ progAccessData: ProgAccessData; progActorResources: ProgActorResources }> { const useRemoteGitHubFiles = progAccessSource === 'rest-api-description' // check for required PAT if (useRemoteGitHubFiles && !process.env.GITHUB_TOKEN) { @@ -223,8 +322,8 @@ export async function getProgAccessData(progAccessSource, isRest = false) { ) } - let progAccessDataRaw - let progActorResources + let progAccessDataRaw: ProgAccessRawOperation[] + let progActorResources: ProgActorResources const progAccessFilepath = 'config/access_control/programmatic_access.yaml' const progActorDirectory = 'config/access_control/fine_grained_permissions/programmatic_actor_fine_grained_resources' @@ -232,14 +331,14 @@ export async function getProgAccessData(progAccessSource, isRest = false) { if (!useRemoteGitHubFiles) { progAccessDataRaw = yaml.load( await readFile(path.join(progAccessSource, progAccessFilepath), 'utf8'), - ) + ) as ProgAccessRawOperation[] progActorResources = await getProgActorResourceContent({ gitHubSourceDirectory: path.join(progAccessSource, progActorDirectory), }) } else { progAccessDataRaw = yaml.load( await getContents('github', 'github', 'master', progAccessFilepath), - ) + ) as ProgAccessRawOperation[] progActorResources = await getProgActorResourceContent({ owner: 'github', repo: 'github', @@ -248,9 +347,9 @@ export async function getProgAccessData(progAccessSource, isRest = false) { }) } - const progAccessData = {} + const progAccessData: ProgAccessData = {} for (const operation of progAccessDataRaw) { - const operationData = { + const operationData: ProgAccessOperationData = { userToServerRest: operation.user_to_server.enabled, serverToServer: operation.server_to_server.enabled, fineGrainedPat: operation.user_to_server.enabled && !operation.disabled_for_patv2, @@ -260,6 +359,7 @@ export async function getProgAccessData(progAccessSource, isRest = false) { allowPermissionlessAccess: operation.allows_permissionless_access, allowsPublicRead: operation.allows_public_read, basicAuth: operation.basic_auth, + disabledForPatV2: operation.disabled_for_patv2, } // Handle comma-separated operation IDs @@ -272,9 +372,12 @@ export async function getProgAccessData(progAccessSource, isRest = false) { return { progAccessData, progActorResources } } -function getDisplayPermissions(permissionSets, progActorResources) { +function getDisplayPermissions( + permissionSets: Array>, + progActorResources: ProgActorResources, +): Array> { const displayPermissions = permissionSets.map((permissionSet) => { - const displayPermissionSet = {} + const displayPermissionSet: Record = {} Object.entries(permissionSet).forEach(([key, value]) => { const { displayTitle } = getDisplayTitle(key, progActorResources, true) displayPermissionSet[displayTitle] = value @@ -286,33 +389,45 @@ function getDisplayPermissions(permissionSets, progActorResources) { return displayPermissions } -function sortObjectByKeys(obj) { +function sortObjectByKeys(obj: Record): Record { return Object.keys(obj) .sort() - .reduce((acc, key) => { - acc[key] = obj[key] - return acc - }, {}) + .reduce( + (acc, key) => { + acc[key] = obj[key] + return acc + }, + {} as Record, + ) } -function sortObjectByTitle(obj) { +function sortObjectByTitle(obj: Record): Record { return Object.keys(obj) .sort((a, b) => { - if (obj[a].displayTitle > obj[b].displayTitle) { + const aData = obj[a] as PermissionData + const bData = obj[b] as PermissionData + if (aData.displayTitle > bData.displayTitle) { return 1 } - if (obj[a].displayTitle < obj[b].displayTitle) { + if (aData.displayTitle < bData.displayTitle) { return -1 } return 0 }) - .reduce((acc, key) => { - acc[key] = obj[key] - return acc - }, {}) + .reduce( + (acc, key) => { + acc[key] = obj[key] + return acc + }, + {} as Record, + ) } -function getDisplayTitle(permissionName, progActorResources, isRest = false) { +function getDisplayTitle( + permissionName: string, + progActorResources: ProgActorResources, + isRest = false, +): { title: string; displayTitle: string } { const tempTitle = permissionName.replace(/_/g, ' ') const permissionNameExists = progActorResources[permissionName] if (!permissionNameExists) { @@ -328,7 +443,7 @@ function getDisplayTitle(permissionName, progActorResources, isRest = false) { if (!title) { console.warn(`No title found for title ${title} resource group ${resourceGroup}`) - return '' + return { title: '', displayTitle: '' } } const displayTitle = isRest @@ -342,14 +457,16 @@ function getDisplayTitle(permissionName, progActorResources, isRest = false) { return { title, displayTitle } } -function sentenceCase(str) { +function sentenceCase(str: string): string { return str.charAt(0).toUpperCase() + str.slice(1) } /** * Calculates whether an operation has additional permissions beyond a single permission. */ -export function calculateAdditionalPermissions(permissionSets) { +export function calculateAdditionalPermissions( + permissionSets: Array>, +): boolean { return ( permissionSets.length > 1 || permissionSets.some((permissionSet) => Object.keys(permissionSet).length > 1) @@ -360,7 +477,10 @@ export function calculateAdditionalPermissions(permissionSets) { * Determines whether a metadata permission should be filtered out when it has additional permissions. * Prevents misleading documentation where mutating operations appear to only need metadata access. */ -export function shouldFilterMetadataPermission(permissionName, permissionSets) { +export function shouldFilterMetadataPermission( + permissionName: string, + permissionSets: Array>, +): boolean { if (permissionName !== 'metadata') { return false } @@ -368,7 +488,11 @@ export function shouldFilterMetadataPermission(permissionName, permissionSets) { return calculateAdditionalPermissions(permissionSets) } -export function isActorExcluded(excludedActors, actorType, actorTypeMap = {}) { +export function isActorExcluded( + excludedActors: string[] | undefined | null | unknown, + actorType: string, + actorTypeMap: Record = {}, +): boolean { if (!excludedActors || !Array.isArray(excludedActors)) { return false } @@ -394,14 +518,21 @@ export function isActorExcluded(excludedActors, actorType, actorTypeMap = {}) { return false } -function addAppData(storage, category, data) { +function addAppData( + storage: Record, + category: string, + data: AppDataOperation, +): void { if (!storage[category]) { storage[category] = [] } - storage[category].push(data) + ;(storage[category] as AppDataOperation[]).push(data) } -async function validateAppData(data, pageType) { +async function validateAppData( + data: Record, + pageType: string, +): Promise { if (pageType.includes('permissions')) { for (const value of Object.values(data)) { const { isValid, errors } = validateJson(permissionSchema, value) @@ -412,7 +543,7 @@ async function validateAppData(data, pageType) { } } else { for (const arrayItems of Object.values(data)) { - for (const item of arrayItems) { + for (const item of arrayItems as AppDataOperation[]) { const { isValid, errors } = validateJson(enabledSchema, item) if (!isValid) { console.error(JSON.stringify(errors, null, 2)) @@ -423,6 +554,14 @@ async function validateAppData(data, pageType) { } } +interface ProgActorResourceContentOptions { + owner?: string + repo?: string + branch?: string + path?: string + gitHubSourceDirectory?: string | null +} + // When getting files from the GitHub repo locally (or in a Codespace) // you can pass the full or relative path to the `github` repository // directory on disk. @@ -434,21 +573,21 @@ async function getProgActorResourceContent({ branch, path, gitHubSourceDirectory = null, -}) { +}: ProgActorResourceContentOptions): Promise { // Get files either locally from disk or from the GitHub remote repo - let files + let files: string[] if (gitHubSourceDirectory) { files = await getProgActorContentFromDisk(gitHubSourceDirectory) } else { - files = await getDirectoryContents(owner, repo, branch, path) + files = await getDirectoryContents(owner!, repo!, branch!, path!) } // We need to format the file content into a single object. Each file // contains a single key and a single value that needs to be added // to the object. - const progActorResources = {} + const progActorResources: ProgActorResources = {} for (const file of files) { - const fileContent = yaml.load(file) + const fileContent = yaml.load(file) as Record // Each file should only contain a single key and value. if (Object.keys(fileContent).length !== 1) { throw new Error(`Error: The file ${JSON.stringify(fileContent)} must only have one key.`) @@ -460,7 +599,7 @@ async function getProgActorResourceContent({ return progActorResources } -async function getProgActorContentFromDisk(directory) { +async function getProgActorContentFromDisk(directory: string): Promise { const files = walk(directory, { includeBasePath: true, directories: false, From 2e1fef3b8ccc3dd0ad7f8b7b6153776efedcbe27 Mon Sep 17 00:00:00 2001 From: Felicity Chapman Date: Tue, 21 Oct 2025 15:53:49 +0100 Subject: [PATCH 3/4] Update primer/octicons to 19.19.0 from 19.15.5 to add MCP icon and other new icons (#58084) Co-authored-by: mc <42146119+mchammer01@users.noreply.github.com> --- package-lock.json | 8 ++++---- package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index e50ab3411789..d3f93f38b24f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -17,7 +17,7 @@ "@primer/behaviors": "^1.7.2", "@primer/css": "^21.3.1", "@primer/live-region-element": "^0.7.2", - "@primer/octicons": "^19.15.5", + "@primer/octicons": "^19.19.0", "@primer/octicons-react": "^19.14.0", "@primer/react": "^37.31.0", "accept-language-parser": "^1.5.0", @@ -3379,9 +3379,9 @@ } }, "node_modules/@primer/octicons": { - "version": "19.15.5", - "resolved": "https://registry.npmjs.org/@primer/octicons/-/octicons-19.15.5.tgz", - "integrity": "sha384-BXOX4YiO3A43qIuOXqghQLeFwQxRTQLGVGJMEC7lRTd4XHwsgIWSHErWN6dIHXiW", + "version": "19.19.0", + "resolved": "https://registry.npmjs.org/@primer/octicons/-/octicons-19.19.0.tgz", + "integrity": "sha512-LBbL8nOl6FWMDy7riKB5ppHLtffY7loRq+CDGj0D5G1Xdo2mKlSOQy3rWy2RVE8SxxPFL+mj46C1nG+smKBEZA==", "license": "MIT", "dependencies": { "object-assign": "^4.1.1" diff --git a/package.json b/package.json index dab4d80be5b8..a2e3b945928b 100644 --- a/package.json +++ b/package.json @@ -159,7 +159,7 @@ "@primer/behaviors": "^1.7.2", "@primer/css": "^21.3.1", "@primer/live-region-element": "^0.7.2", - "@primer/octicons": "^19.15.5", + "@primer/octicons": "^19.19.0", "@primer/octicons-react": "^19.14.0", "@primer/react": "^37.31.0", "accept-language-parser": "^1.5.0", From e8be8376a5f33909ed0e8b6be0a9de525a0e06c7 Mon Sep 17 00:00:00 2001 From: Kevin Heis Date: Tue, 21 Oct 2025 08:00:13 -0700 Subject: [PATCH 4/4] Update js to ts references (#58029) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .github/instructions/code.instructions.md | 2 +- .github/instructions/content.instructions.md | 2 +- .github/workflows/codeql.yml | 2 - .../count-translation-corruptions.yml | 2 +- .github/workflows/enterprise-dates.yml | 4 +- .github/workflows/move-content.yml | 2 +- .github/workflows/orphaned-files-check.yml | 4 +- .../workflows/reviewers-docs-engineering.yml | 1 - Dockerfile | 2 - Dockerfile.openapi_decorator | 2 +- content/README.md | 10 ++--- .../troubleshooting-your-environment.md | 2 +- ...sing-markdown-and-liquid-in-github-docs.md | 2 +- .../using-yaml-frontmatter.md | 6 +-- .../versioning-documentation.md | 2 +- contributing/development.md | 2 +- data/features/README.md | 2 +- data/learning-tracks/README.md | 4 +- data/product-examples/README.md | 2 +- .../release-notes/enterprise-server/README.md | 6 +-- data/tables/README.md | 4 +- src/README.md | 2 +- src/ai-tools/README.md | 12 ++++++ src/archives/lib/old-versions-utils.ts | 2 +- .../middleware/archived-asset-redirects.ts | 2 +- .../archived-enterprise-versions-assets.ts | 2 +- .../archived-enterprise-versions.ts | 2 +- src/article-api/middleware/validation.ts | 2 +- src/audit-logs/README.md | 2 +- src/audit-logs/tests/fields.ts | 2 +- src/automated-pipelines/README.md | 6 +-- src/codeql-cli/README.md | 8 ++-- src/content-linter/README.md | 38 +++++++++---------- .../liquid-quoted-conditional-arg.ts | 2 +- .../lib/linting-rules/octicon-aria-labels.ts | 2 +- src/content-linter/scripts/disable-rules.ts | 2 +- src/content-linter/tests/category-pages.ts | 2 +- .../tests/lint-frontmatter-links.ts | 2 +- .../unit/code-annotation-comment-spacing.ts | 2 +- .../tests/unit/rule-filtering.ts | 2 +- src/content-render/README.md | 4 +- src/content-render/scripts/move-content.ts | 2 +- .../scripts/render-content-markdown.ts | 4 +- .../scripts/update-filepaths.ts | 2 +- src/content-render/tests/data.ts | 2 +- src/content-render/tests/liquid-helpers.ts | 2 +- .../tests/render-changed-and-deleted-files.ts | 2 +- src/content-render/unified/annotate.ts | 2 +- src/content-render/unified/code-header.ts | 2 +- .../unified/rewrite-asset-img-tags.ts | 2 +- .../unified/rewrite-local-links.ts | 2 +- src/data-directory/lib/data-schemas/index.ts | 10 ++--- src/data-directory/lib/get-data.ts | 12 +++--- src/data-directory/tests/orphaned-features.ts | 2 +- src/events/lib/schema.ts | 4 +- src/fixtures/README.md | 6 +-- .../content/get-started/liquid/ifversion.md | 2 +- .../liquid-tags/minimal-conditional.md | 2 +- .../page-with-invalid-product-version.md | 2 +- .../components/context/ArticleContext.tsx | 2 +- src/frame/components/context/MainContext.tsx | 2 +- .../components/page-header/Breadcrumbs.tsx | 2 +- .../ui/MarkdownContent/stylesheets/table.scss | 2 +- src/frame/lib/path-utils.ts | 2 +- src/frame/middleware/context/context.ts | 2 +- src/frame/middleware/reload-tree.ts | 2 +- src/frame/middleware/robots.ts | 2 +- src/frame/tests/server.ts | 2 +- src/ghes-releases/lib/deprecation-steps.md | 2 +- .../scripts/create-enterprise-issue.ts | 2 +- .../scripts/deprecate/archive-version.ts | 2 +- .../deprecate/update-automated-pipelines.ts | 4 +- src/ghes-releases/scripts/release-banner.ts | 2 +- src/github-apps/tests/metadata-permissions.js | 2 +- src/github-apps/tests/sync.ts | 2 +- src/graphql/README.md | 6 +-- src/graphql/lib/validator.ts | 2 +- src/graphql/scripts/README.md | 2 +- src/landings/tests/featured-links.ts | 2 +- src/languages/README-translation-comments.md | 4 +- src/languages/README.md | 8 ++-- src/languages/lib/languages.ts | 2 +- src/languages/lib/render-with-fallback.ts | 2 +- src/learning-track/tests/lint-data.ts | 2 +- src/links/lib/excluded-links.ts | 2 +- .../scripts/check-github-github-links.ts | 2 +- src/metrics/scripts/README.md | 14 +++---- src/pages/_error.tsx | 2 +- src/products/lib/all-products.ts | 2 +- src/redirects/README.md | 12 +++--- src/redirects/lib/permalinks.ts | 2 +- src/redirects/lib/precompile.ts | 6 +-- .../lib/static/redirect-exceptions.txt | 4 +- src/redirects/middleware/handle-redirects.ts | 2 +- .../tests/routing/versionless-redirects.ts | 2 +- src/rest/README.md | 2 +- src/rest/scripts/update-files.ts | 4 +- src/search/README.md | 2 +- .../data/public-docs-schema.ts | 2 +- src/types.ts | 2 +- src/versions/lib/all-versions.ts | 4 +- src/versions/lib/get-applicable-versions.ts | 2 +- src/versions/middleware/short-versions.ts | 2 +- 103 files changed, 182 insertions(+), 177 deletions(-) diff --git a/.github/instructions/code.instructions.md b/.github/instructions/code.instructions.md index ab4381066c01..35558ade0cc3 100644 --- a/.github/instructions/code.instructions.md +++ b/.github/instructions/code.instructions.md @@ -15,7 +15,7 @@ For code reviews, follow guidelines, tests, and validate instructions. For creat - Be careful fetching full HTML pages off the internet. Prefer to use gh cli whenever possible for github.com. Limit the number of tokens when grabbing HTML. - Avoid pull requests with over 300 lines of code changed. When significantly larger, offer to split up into smaller pull requests if possible. - All new code should be written in TypeScript and not JavaScript. -- We use absolute imports, relative to the `src` directory, using the `@` symbol. For example, `getRedirect` which lives in `src/redirects/lib/get-redirect.js` can be imported with `import getRedirect from '@/redirects/lib/get-redirect'`. The same rule applies for TypeScript (`.ts`) imports, e.g. `import type { GeneralSearchHit } from '@/search/types'` +- We use absolute imports, relative to the `src` directory, using the `@` symbol. For example, `getRedirect` which lives in `src/redirects/lib/get-redirect.ts` can be imported with `import getRedirect from '@/redirects/lib/get-redirect'`. The same rule applies for TypeScript (`.ts`) imports, e.g. `import type { GeneralSearchHit } from '@/search/types'` ## Tests diff --git a/.github/instructions/content.instructions.md b/.github/instructions/content.instructions.md index 85dad30268e3..17cf1a39c911 100644 --- a/.github/instructions/content.instructions.md +++ b/.github/instructions/content.instructions.md @@ -14,7 +14,7 @@ Before committing content changes, always: 1. **Use the content linter** to validate content: `npm run lint-content -- --paths ` 2. **Check for proper variable usage** in your content 3. **Verify [AUTOTITLE] links** point to existing articles -4. **Run tests** on changed content: `npm run test -- src/content-render/tests/render-changed-and-deleted-files.js` +4. **Run tests** on changed content: `npm run test -- src/content-render/tests/render-changed-and-deleted-files.ts` ## Bullet lists diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index b761b7d6d659..b5484c2f8c01 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -9,9 +9,7 @@ on: branches: - main paths: - - '**/*.js' - '**/*.ts' - - '**/*.jsx' - '**/*.tsx' - '.github/workflows/codeql.yml' # This is so that when CodeQL runs on a pull request, it can compare diff --git a/.github/workflows/count-translation-corruptions.yml b/.github/workflows/count-translation-corruptions.yml index 9b6b134a666b..8549c30fe247 100644 --- a/.github/workflows/count-translation-corruptions.yml +++ b/.github/workflows/count-translation-corruptions.yml @@ -9,7 +9,7 @@ on: pull_request: paths: - src/languages/scripts/count-translation-corruptions.ts - - src/languages/lib/correct-translation-content.js + - src/languages/lib/correct-translation-content.ts - .github/workflows/count-translation-corruptions.yml - .github/actions/node-npm-setup/action.yml - .github/actions/clone-translations/action.yml diff --git a/.github/workflows/enterprise-dates.yml b/.github/workflows/enterprise-dates.yml index d7e9e3f6d793..280f516dddb1 100644 --- a/.github/workflows/enterprise-dates.yml +++ b/.github/workflows/enterprise-dates.yml @@ -28,7 +28,7 @@ jobs: - uses: ./.github/actions/node-npm-setup - - name: Run src/ghes-releases/scripts/update-enterprise-dates.js + - name: Run src/ghes-releases/scripts/update-enterprise-dates.ts run: npm run update-enterprise-dates env: GITHUB_TOKEN: ${{ secrets.DOCS_BOT_PAT_BASE }} @@ -42,7 +42,7 @@ jobs: with: # need to use a token with repo and workflow scopes for this step token: ${{ secrets.DOCS_BOT_PAT_BASE }} - commit-message: '🤖 ran src/ghes-releases/scripts/update-enterprise-dates.js' + commit-message: '🤖 ran src/ghes-releases/scripts/update-enterprise-dates.ts' title: 🤖 src/ghes-releases/lib/enterprise-dates.json update body: "Hello! The GitHub Enterprise Server release dates have changed.\n\n diff --git a/.github/workflows/move-content.yml b/.github/workflows/move-content.yml index 8b8e1fbf4591..2737da134d57 100644 --- a/.github/workflows/move-content.yml +++ b/.github/workflows/move-content.yml @@ -7,7 +7,7 @@ name: Move content script test on: pull_request: paths: - - src/content-render/scripts/move-content.js + - src/content-render/scripts/move-content.ts - src/content-render/scripts/test-move-content.ts - 'src/frame/lib/**/*.js' - .github/workflows/move-content.yml diff --git a/.github/workflows/orphaned-files-check.yml b/.github/workflows/orphaned-files-check.yml index ee8073aa4eda..ba2e398c28e4 100644 --- a/.github/workflows/orphaned-files-check.yml +++ b/.github/workflows/orphaned-files-check.yml @@ -14,10 +14,10 @@ on: - .github/workflows/orphaned-files-check.yml # In case any of the dependencies affect the script - 'package*.json' - - src/assets/scripts/find-orphaned-assets.js + - src/assets/scripts/find-orphaned-assets.ts - src/content-render/scripts/reusables-cli/find/unused.ts - src/workflows/walk-files.ts - - src/languages/lib/languages.js + - src/languages/lib/languages.ts - .github/actions/clone-translations/action.yml - .github/actions/node-npm-setup/action.yml diff --git a/.github/workflows/reviewers-docs-engineering.yml b/.github/workflows/reviewers-docs-engineering.yml index b08adfc95739..8746c50cdbf4 100644 --- a/.github/workflows/reviewers-docs-engineering.yml +++ b/.github/workflows/reviewers-docs-engineering.yml @@ -15,7 +15,6 @@ on: - reopened - synchronize paths: - - '**.js' - '**.ts' - '**.tsx' - '**.scss' diff --git a/Dockerfile b/Dockerfile index a539cf735871..21435e93f877 100644 --- a/Dockerfile +++ b/Dockerfile @@ -152,6 +152,4 @@ ARG BUILD_SHA ENV BUILD_SHA=$BUILD_SHA # Entrypoint to start the server -# Note: Currently we have to use tsx because -# we have a mix of `.ts` and `.js` files with multiple import patterns CMD ["node_modules/.bin/tsx", "src/frame/server.ts"] diff --git a/Dockerfile.openapi_decorator b/Dockerfile.openapi_decorator index 7cc9fcfbdaa5..ddda5d8bf94d 100644 --- a/Dockerfile.openapi_decorator +++ b/Dockerfile.openapi_decorator @@ -16,4 +16,4 @@ ADD --chown=node:node data /openapi-check/data RUN npm ci -D -ENTRYPOINT ["node", "/openapi-check/src/rest/scripts/openapi-check.js"] +ENTRYPOINT ["node", "/openapi-check/src/rest/scripts/openapi-check.ts"] diff --git a/content/README.md b/content/README.md index c60ebd71e9f2..c0806e63ae49 100644 --- a/content/README.md +++ b/content/README.md @@ -51,13 +51,13 @@ It is a block of key-value content that lives at the top of every Markdown file. The following frontmatter values have special meanings and requirements for this site. There's also a schema that's used by the test suite to validate every page's frontmatter. -See [`lib/frontmatter.js`](/src/frame/lib/frontmatter.js). +See [`lib/frontmatter.ts`](/src/frame/lib/frontmatter.ts). ### `versions` -- Purpose: Indicates the [versions](/src/versions/lib/all-versions.js) to which a page applies. +- Purpose: Indicates the [versions](/src/versions/lib/all-versions.ts) to which a page applies. See [Versioning](#versioning) for more info. -- Type: `Object`. Allowable keys map to product names and can be found in the `versions` object in [`lib/frontmatter.js`](/src/frame/lib/frontmatter.js). +- Type: `Object`. Allowable keys map to product names and can be found in the `versions` object in [`lib/frontmatter.ts`](/src/frame/lib/frontmatter.ts). - This frontmatter value is currently **required** for all pages. - The `*` is used to denote all releases for the version. @@ -197,7 +197,7 @@ featuredLinks: ### `allowTitleToDifferFromFilename` -- Purpose: Indicates whether a page is allowed to have a title that differs from its filename. Pages with this frontmatter set to `true` will not be flagged in tests or updated by `src/content-render/scripts/reconcile-filenames-with-ids.js`. Use this value if a file's `title` frontmatter includes Liquid or punctuation that cannot be part of the filename. For example, the article [About Enterprise Managed Users](https://docs.github.com/en/enterprise-cloud@latest/admin/identity-and-access-management/using-enterprise-managed-users-for-iam/about-enterprise-managed-users) uses a Liquid reusable in its title, `'About {% data variables.product.prodname_emus %}'`, which cannot be in the filename, `about-enterprise-managed-users.md`, so the `allowTitleToDifferFromFilename` frontmatter is set to `true`. +- Purpose: Indicates whether a page is allowed to have a title that differs from its filename. Pages with this frontmatter set to `true` will not be flagged in tests or updated by `src/content-render/scripts/reconcile-filenames-with-ids.ts`. Use this value if a file's `title` frontmatter includes Liquid or punctuation that cannot be part of the filename. For example, the article [About Enterprise Managed Users](https://docs.github.com/en/enterprise-cloud@latest/admin/identity-and-access-management/using-enterprise-managed-users-for-iam/about-enterprise-managed-users) uses a Liquid reusable in its title, `'About {% data variables.product.prodname_emus %}'`, which cannot be in the filename, `about-enterprise-managed-users.md`, so the `allowTitleToDifferFromFilename` frontmatter is set to `true`. - Type: `Boolean`. Default is `false`. - Optional. @@ -327,7 +327,7 @@ A content file can have **two** types of versioning: - Liquid statements in content (**optional**) - Conditionally render content depending on the current version being viewed. See [Versioning documentation](https://docs.github.com/en/contributing/writing-for-github-docs/versioning-documentation#versioning-with-liquid-conditional-operators) for more info. Note Liquid conditionals can also appear in `data` and `include` files. -**Note**: As of early 2021, the `free-pro-team@latest` version is not included URLs. A helper function called `src/versions/lib/remove-fpt-from-path.js` removes the version from URLs. +**Note**: As of early 2021, the `free-pro-team@latest` version is not included URLs. A helper function called `src/versions/lib/remove-fpt-from-path.ts` removes the version from URLs. ## Filenames diff --git a/content/contributing/setting-up-your-environment-to-work-on-github-docs/troubleshooting-your-environment.md b/content/contributing/setting-up-your-environment-to-work-on-github-docs/troubleshooting-your-environment.md index 0c8370dc7f0e..b2020611c9b7 100644 --- a/content/contributing/setting-up-your-environment-to-work-on-github-docs/troubleshooting-your-environment.md +++ b/content/contributing/setting-up-your-environment-to-work-on-github-docs/troubleshooting-your-environment.md @@ -9,7 +9,7 @@ versions: ## Troubleshooting tests that fail locally but pass in CI -If you run tests locally and get failures in `tests/rendering/server.js` around static assets, stylesheets, or the client-side JavaScript bundle, but the same tests pass in CI on a PR, run the command `npm run build`. This is a one-time command that creates static assets locally. +If you run tests locally and get failures in `tests/rendering/server.ts` around static assets, stylesheets, or the client-side JavaScript bundle, but the same tests pass in CI on a PR, run the command `npm run build`. This is a one-time command that creates static assets locally. For more information, see [AUTOTITLE](/contributing/setting-up-your-environment-to-work-on-github-docs/creating-a-local-environment). diff --git a/content/contributing/writing-for-github-docs/using-markdown-and-liquid-in-github-docs.md b/content/contributing/writing-for-github-docs/using-markdown-and-liquid-in-github-docs.md index 1df97a512bf1..5188d08a5c62 100644 --- a/content/contributing/writing-for-github-docs/using-markdown-and-liquid-in-github-docs.md +++ b/content/contributing/writing-for-github-docs/using-markdown-and-liquid-in-github-docs.md @@ -469,7 +469,7 @@ For more information about links, see [AUTOTITLE](/contributing/style-guide-and- Because the site is dynamic, it does not build HTML files for each different version of an article. Instead it generates a "permalink" for every version of the article. It does this based on the article's [`versions` frontmatter](/contributing/syntax-and-versioning-for-github-docs/using-yaml-frontmatter#versions). > [!NOTE] -> As of early 2021, the `free-pro-team@latest` version is not included in URLs. A helper function called `lib/remove-fpt-from-path.js` removes the version from URLs. +> As of early 2021, the `free-pro-team@latest` version is not included in URLs. A helper function called `lib/remove-fpt-from-path.ts` removes the version from URLs. For example, an article that is available in currently supported versions will have permalink URLs like the following: diff --git a/content/contributing/writing-for-github-docs/using-yaml-frontmatter.md b/content/contributing/writing-for-github-docs/using-yaml-frontmatter.md index 28750a68d63a..d66fafb3d19e 100644 --- a/content/contributing/writing-for-github-docs/using-yaml-frontmatter.md +++ b/content/contributing/writing-for-github-docs/using-yaml-frontmatter.md @@ -19,7 +19,7 @@ It is a block of key-value content that lives at the top of every Markdown file The following frontmatter values have special meanings and requirements for {% data variables.product.prodname_docs %}. There's also a schema that's used by the test suite to validate every page's frontmatter. -For more information, see [`lib/frontmatter.js`](https://github.com/github/docs/blob/main/src/frame/lib/frontmatter.js). +For more information, see [`lib/frontmatter.ts`](https://github.com/github/docs/blob/main/src/frame/lib/frontmatter.ts). * [`versions`](#versions) * [`redirect_from`](#redirect_from) @@ -49,7 +49,7 @@ For more information, see [`lib/frontmatter.js`](https://github.com/github/docs/ * Purpose: Indicates the [versions](https://github.com/github/docs/blob/main/src/versions/lib/all-versions.ts) to which a page applies. For more information about the different types of versioning, see [Versioning documentation](/contributing/syntax-and-versioning-for-github-docs/versioning-documentation). -* Type: `Object`. Allowable keys map to product names and can be found in the `versions` object in [`lib/frontmatter.js`](https://github.com/github/docs/blob/main/src/frame/lib/frontmatter.js). +* Type: `Object`. Allowable keys map to product names and can be found in the `versions` object in [`lib/frontmatter.ts`](https://github.com/github/docs/blob/main/src/frame/lib/frontmatter.ts). * This frontmatter value is currently **required** for all pages. * The `*` is used to denote all releases for the version. * Must be present for all `index.md` files, but actual value is computed at runtime based on the children. @@ -191,7 +191,7 @@ featuredLinks: ### `allowTitleToDifferFromFilename` -* Purpose: Indicates whether a page is allowed to have a title that differs from its filename. For example, `content/rest/reference/orgs.md` has a title of `Organizations` instead of `Orgs`. Pages with this frontmatter set to `true` will not be flagged in tests or updated by `src/content-render/scripts/reconcile-filenames-with-ids.js`. +* Purpose: Indicates whether a page is allowed to have a title that differs from its filename. For example, `content/rest/reference/orgs.md` has a title of `Organizations` instead of `Orgs`. Pages with this frontmatter set to `true` will not be flagged in tests or updated by `src/content-render/scripts/reconcile-filenames-with-ids.ts`. * Type: `Boolean`. Default is `false`. * Optional. diff --git a/content/contributing/writing-for-github-docs/versioning-documentation.md b/content/contributing/writing-for-github-docs/versioning-documentation.md index caad2df40745..20c0d459df0e 100644 --- a/content/contributing/writing-for-github-docs/versioning-documentation.md +++ b/content/contributing/writing-for-github-docs/versioning-documentation.md @@ -68,7 +68,7 @@ For {% data variables.product.prodname_ghe_cloud %}, use `enterprise-cloud@lates ### {% data variables.product.prodname_ghe_server %} -Documentation for {% data variables.product.prodname_ghe_server %} has multiple versions and can be divided into two types: documentation for _supported releases_ (we support four at any one time), and documentation for _{% data variables.release-phases.closing_down %} releases_ (we do not link to these on the Docs site but we support a "frozen" snapshot of these docs in perpetuity, so they can still be accessed if you know the URLs). See [`lib/enterprise-server-releases.js`](https://github.com/github/docs/blob/main/src/versions/lib/enterprise-server-releases.js) for a list. +Documentation for {% data variables.product.prodname_ghe_server %} has multiple versions and can be divided into two types: documentation for _supported releases_ (we support four at any one time), and documentation for _{% data variables.release-phases.closing_down %} releases_ (we do not link to these on the Docs site but we support a "frozen" snapshot of these docs in perpetuity, so they can still be accessed if you know the URLs). See [`lib/enterprise-server-releases.ts`](https://github.com/github/docs/blob/main/src/versions/lib/enterprise-server-releases.ts) for a list. The versions are named `enterprise-server@`. The short name is `ghes`. In Liquid conditionals, we can specify ranges, like `ghes > 3.0`. For more information, see [Versioning with Liquid conditional operators](#versioning-with-liquid-conditional-operators). diff --git a/contributing/development.md b/contributing/development.md index 86a51865de83..b7b181740bc4 100644 --- a/contributing/development.md +++ b/contributing/development.md @@ -42,7 +42,7 @@ In a matter of minutes, you will be ready to edit, review and test your changes By default the local server won't run with all supported languages enabled. If you need to run the server with a particular language, you can temporarily edit the `start` script in `package.json` and update the `ENABLED_LANGUAGES` variable. For example, to enable Japanese and Portuguese, you can set it to `ENABLED_LANGUAGES='en,ja,pt'` and then you need to restart the server for the change to take effect. -The supported language codes are defined in [lib/languages.js](../src/languages/lib/languages.js). +The supported language codes are defined in [lib/languages.ts](../src/languages/lib/languages.ts). ## Site structure diff --git a/data/features/README.md b/data/features/README.md index e57fb62e1bd0..314d085980c5 100644 --- a/data/features/README.md +++ b/data/features/README.md @@ -39,7 +39,7 @@ You cannot use `feature:` to specify multiple concurrent versions, as this is no ## Schema enforcement -The schema for validating the feature versioning lives in [`src/data-directory/lib/data-schemas/features.js`](../../src/data-directory/lib/data-schemas/features.js). +The schema for validating the feature versioning lives in [`src/data-directory/lib/data-schemas/features.ts`](../../src/data-directory/lib/data-schemas/features.ts). ## Script to remove feature tags diff --git a/data/learning-tracks/README.md b/data/learning-tracks/README.md index 7bb8339b55a1..83b6b2bee1fb 100644 --- a/data/learning-tracks/README.md +++ b/data/learning-tracks/README.md @@ -25,7 +25,7 @@ Learning track data for a product is defined in two places: ## Versioning -Versioning for learning tracks is processed at page render time. The code lives in [`lib/learning-tracks.js`](lib/learning-tracks.js), which is called by `page.render()`. The processed learning tracks are then rendered by `components/guides`. +Versioning for learning tracks is processed at page render time. The code lives in [`lib/learning-tracks.ts`](lib/learning-tracks.ts), which is called by `page.render()`. The processed learning tracks are then rendered by `components/guides`. Liquid conditionals do **not** have to be used for versioning in the YAML file for guides. Only the learning track guides that apply to the current version will be rendered automatically. If there aren't any tracks with guides that belong to the current version, the learning tracks section will not render at all. @@ -48,4 +48,4 @@ If the `versions` property is not included, it's assumed the track is available ## Schema enforcement -The schema for validating the learning track YAML lives in [`src/content-linter/lib/learning-tracks-schema.js`](src/content-linter/lib/learning-tracks-schema.js) and is exercised by [`tests/content/lint-files.js`](tests/content/lint-files.js). +The schema for validating the learning track YAML lives in [`src/content-linter/lib/learning-tracks-schema.ts`](src/content-linter/lib/learning-tracks-schema.ts) and is exercised by [`tests/content/lint-files.ts`](tests/content/lint-files.ts). diff --git a/data/product-examples/README.md b/data/product-examples/README.md index 4791fb3312c8..c9bb346b96f8 100644 --- a/data/product-examples/README.md +++ b/data/product-examples/README.md @@ -35,7 +35,7 @@ where the syntax for `versions` is the same as the [frontmatter `versions` prope ## Rendering -The product example data is added to the `context` object in `src/frame/middleware/context/product-examples.js`. +The product example data is added to the `context` object in `src/frame/middleware/context/product-examples.ts`. The data is then rendered by `components/landing`. diff --git a/data/release-notes/enterprise-server/README.md b/data/release-notes/enterprise-server/README.md index 8add320023cf..c72a3fa09893 100644 --- a/data/release-notes/enterprise-server/README.md +++ b/data/release-notes/enterprise-server/README.md @@ -25,7 +25,7 @@ The directories are named by GHES release number (with a hyphen instead of a per The YAML files in each directory are named by patch number. Some patch filenames may end with `-rc.yml`, which means it's a release candidate. A release candidate file also requires `release_candidate: true` in the YAML data. -Release notes of deprecated GHES versions (see `lib/enterprise-server-releases.js`) are **not** removed from the site and will always be displayed alongside currently supported versions. +Release notes of deprecated GHES versions (see `lib/enterprise-server-releases.ts`) are **not** removed from the site and will always be displayed alongside currently supported versions. Note that patch files can be deprecated individually (i.e., hidden on the docs site) by an optional `deprecated: true` property. @@ -41,6 +41,6 @@ The release notes page has a custom design with CSS in `stylesheets/release-note ### Schema -The schema that validates the YAML data lives in `src/content-linter/lib/release-notes-schema.js`. See the schema file to find out the required and optional properties. +The schema that validates the YAML data lives in `src/content-linter/lib/release-notes-schema.ts`. See the schema file to find out the required and optional properties. -The schema is exercised by a test in `src/content-linter/tests/lint-files.js`. The test will fail if the data does not pass validation. +The schema is exercised by a test in `src/content-linter/tests/lint-files.ts`. The test will fail if the data does not pass validation. diff --git a/data/tables/README.md b/data/tables/README.md index 277669049288..b6c0e00efa9d 100644 --- a/data/tables/README.md +++ b/data/tables/README.md @@ -86,7 +86,7 @@ After creating all three files: 1. **Build the site**: Run `npm run build` 2. **Test schemas**: Run `npm test -- src/data-directory/tests` -3. **Fix any errors**: If you get failures in `src/data-directory/tests/data-schemas.js`: +3. **Fix any errors**: If you get failures in `src/data-directory/tests/data-schemas.ts`: - Copy the error message - In VS Code Copilot Chat, type: "When I ran the schema test, I got this error:" and paste the error - Update your schema file based on Copilot's suggestions @@ -96,4 +96,4 @@ After creating all three files: Once your table is working and tests pass, create a pull request for review. -The `docs-engineering` team must review and approve your implementation. \ No newline at end of file +The `docs-engineering` team must review and approve your implementation. diff --git a/src/README.md b/src/README.md index 4cb74fb47748..4746402940f1 100644 --- a/src/README.md +++ b/src/README.md @@ -55,7 +55,7 @@ Most subject folders have their own mention in `.github/workflows/test.yml`. Open the file to see the beginning of it. It's manually maintained but it's important to point out two things: -1. It's manually entered so creating a `src/foo/tests/*.js` doesn't +1. It's manually entered so creating a `src/foo/tests/*.ts` doesn't automatically start running those tests. 1. When you add an entry to `.github/workflows/test.yml`, and it's gone into `main`, don't forget to add it to the branch protection's diff --git a/src/ai-tools/README.md b/src/ai-tools/README.md index f5382845b7f3..b527174726c5 100644 --- a/src/ai-tools/README.md +++ b/src/ai-tools/README.md @@ -9,11 +9,17 @@ This script calls the [Models API](https://docs.github.com/en/rest/models/infere ## Usage ```sh +<<<<<<< HEAD:src/ai-editors/README.md +tsx src/ai-editors/scripts/ai-edit.ts --editor --response --files +||||||| 5ae4ec0f5cb:src/ai-editors/README.md +tsx src/ai-editors/scripts/ai-edit.js --editor --response --files +======= # Direct command tsx src/ai-tools/scripts/ai-tools.ts --refine --files # Or via npm script npm run ai-tools -- --refine --files +>>>>>>> origin/main:src/ai-tools/README.md ``` * `--files, -f`: One or more content file paths to process (required). @@ -22,11 +28,17 @@ npm run ai-tools -- --refine --files **Examples:** ```sh +<<<<<<< HEAD:src/ai-editors/README.md +tsx src/ai-editors/scripts/ai-edit.ts --files content/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo.md --editor versioning --response list +||||||| 5ae4ec0f5cb:src/ai-editors/README.md +tsx src/ai-editors/scripts/ai-edit.js --files content/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo.md --editor versioning --response list +======= # Direct command tsx src/ai-tools/scripts/ai-tools.ts --files content/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo.md --refine versioning # Via npm script npm run ai-tools -- --files content/copilot/tutorials/coding-agent/get-the-best-results.md --refine intro +>>>>>>> origin/main:src/ai-tools/README.md ``` ## Requirements diff --git a/src/archives/lib/old-versions-utils.ts b/src/archives/lib/old-versions-utils.ts index 055583ef31e6..6804f8551d3d 100644 --- a/src/archives/lib/old-versions-utils.ts +++ b/src/archives/lib/old-versions-utils.ts @@ -8,7 +8,7 @@ const oldVersions = ['dotcom'].concat(supported) const newVersions = Object.keys(allVersions) // Utility functions for converting between old version paths and new version paths. -// See lib/path-utils.js for utility functions based on new paths. +// See lib/path-utils.ts for utility functions based on new paths. // Examples: // OLD /github/category/article to NEW /free-pro-team@latest/github/category/article // OLD /enterprise/2.21/user/github/category/article to NEW /enterprise-server@2.21/github/category/article diff --git a/src/archives/middleware/archived-asset-redirects.ts b/src/archives/middleware/archived-asset-redirects.ts index 0f5c14b80a2d..9ee3a72946f0 100644 --- a/src/archives/middleware/archived-asset-redirects.ts +++ b/src/archives/middleware/archived-asset-redirects.ts @@ -14,7 +14,7 @@ import type { ExtendedRequest } from '@/types' // we strive to make the files in the repo only files that we actually // use and refer to in the non-archived content. -// Note that, we also have `archived-enterprise-versions-assets.js` +// Note that, we also have `archived-enterprise-versions-assets.ts` // but that one assumes the whole path refers to a prefix which is // considered archived. E.g. /en/enterprise-server@2.9/foo/bar.css diff --git a/src/archives/middleware/archived-enterprise-versions-assets.ts b/src/archives/middleware/archived-enterprise-versions-assets.ts index 91432e883afe..5bfb86b16f06 100644 --- a/src/archives/middleware/archived-enterprise-versions-assets.ts +++ b/src/archives/middleware/archived-enterprise-versions-assets.ts @@ -10,7 +10,7 @@ import type { ExtendedRequest } from '@/types' // This module handles requests for the CSS and JS assets for // deprecated GitHub Enterprise versions by routing them to static content in // one of the docs-ghes- repos. -// See also ./archived-enterprise-versions.js for non-CSS/JS paths +// See also ./archived-enterprise-versions.ts for non-CSS/JS paths export default async function archivedEnterpriseVersionsAssets( req: ExtendedRequest, diff --git a/src/archives/middleware/archived-enterprise-versions.ts b/src/archives/middleware/archived-enterprise-versions.ts index fd304e2e5a4b..35f65e00e68d 100644 --- a/src/archives/middleware/archived-enterprise-versions.ts +++ b/src/archives/middleware/archived-enterprise-versions.ts @@ -72,7 +72,7 @@ const cacheAggressively = (res: Response) => { // 3. ~4000ms // // ...if the limit we set is 3. -// Our own timeout, in @/frame/middleware/timeout.js defaults to 10 seconds. +// Our own timeout, in @/frame/middleware/timeout.ts defaults to 10 seconds. // So there's no point in trying more attempts than 3 because it would // just timeout on the 10s. (i.e. 1000 + 2000 + 4000 + 8000 > 10,000) const retryConfiguration = { limit: 3 } diff --git a/src/article-api/middleware/validation.ts b/src/article-api/middleware/validation.ts index b2cd10920e7a..1a92afff647b 100644 --- a/src/article-api/middleware/validation.ts +++ b/src/article-api/middleware/validation.ts @@ -83,7 +83,7 @@ export const pageValidationMiddleware = ( const redirectsContext = { pages: req.context.pages, redirects: req.context.redirects } - // Similar to how the `handle-redirects.js` middleware works, let's first + // Similar to how the `handle-redirects.ts` middleware works, let's first // check if the URL is just having a trailing slash. while (pathname.endsWith('/') && pathname.length > 1) { pathname = pathname.slice(0, -1) diff --git a/src/audit-logs/README.md b/src/audit-logs/README.md index 0f056fdde705..b9320daa1d42 100644 --- a/src/audit-logs/README.md +++ b/src/audit-logs/README.md @@ -8,7 +8,7 @@ The audit log event pipeline generates the event data for 3 audit log pages; the flowchart TD Start([Start])--> RunScript["Run: - src/auditlog/scripts/sync.js"]--> + src/auditlog/scripts/sync.ts"]--> GetContents["getContents() schema.json from github/audit-log-allowlists repo"]--> SchemaFiles["audit log schema file\n diff --git a/src/audit-logs/tests/fields.ts b/src/audit-logs/tests/fields.ts index d155375bf53f..5ee4eed74440 100644 --- a/src/audit-logs/tests/fields.ts +++ b/src/audit-logs/tests/fields.ts @@ -1,5 +1,5 @@ import { describe, expect, test } from 'vitest' -import { getAuditLogEvents, getCategorizedAuditLogEvents } from '../lib/index.js' +import { getAuditLogEvents, getCategorizedAuditLogEvents } from '../lib/index' import type { AuditLogEventT } from '../types' describe('Audit log fields functionality', () => { diff --git a/src/automated-pipelines/README.md b/src/automated-pipelines/README.md index 5ee2b556973a..1021cb35ca4d 100644 --- a/src/automated-pipelines/README.md +++ b/src/automated-pipelines/README.md @@ -41,10 +41,10 @@ When creating a new pipeline, the source data that is being consumed may not hav - Create a new directory in the `src` directory with the name of the pipeline. For example, `src/codeql-cli`. - Add a README.md file that describes the pipeline and how to use it. This should include any dependencies, how to run the pipeline, and any other information that is needed to use the pipeline. It's strongly recommended to include a diagram showing the overall flow of the pipeline. -- Each pipeline typically requires a workflow to allow scheduling or manually running the pipeline. The workflow should be placed in the `.github/workflows` directory and named `sync-.js`. Each workflow typically requires adding a manual run option and an input parameter to specify the source repo's branch to use. -- Each pipeline will need a `scripts` directory with (at minimum) a `scripts/sync.js` file to run the pipeline. +- Each pipeline typically requires a workflow to allow scheduling or manually running the pipeline. The workflow should be placed in the `.github/workflows` directory and named `sync-.ts`. Each workflow typically requires adding a manual run option and an input parameter to specify the source repo's branch to use. +- Each pipeline will need a `scripts` directory with (at minimum) a `scripts/sync.ts` file to run the pipeline. - If the pipeline will contain structured data, you will need to add a `src//data` directory. The files inside the `data` directory are typically organized by version (e.g., `src/webhooks/data/fpt/*`). -- Pipelines typically have tests specific to the pipeline that are placed in the `src//tests` directory. There is no need to add tests that render the page because all autogenerated pages are tested in `src/automated-pipelines/tests/rendering.js`. +- Pipelines typically have tests specific to the pipeline that are placed in the `src//tests` directory. There is no need to add tests that render the page because all autogenerated pages are tested in `src/automated-pipelines/tests/rendering.ts`. - If the pipeline uses a Next.js page component (e.g., `pages/**/*.tsx`), ensure there is a test that fails if that page component is moved or deleted. ## How to get help diff --git a/src/codeql-cli/README.md b/src/codeql-cli/README.md index d30bbb3c8ec1..2129efd84d65 100644 --- a/src/codeql-cli/README.md +++ b/src/codeql-cli/README.md @@ -10,7 +10,7 @@ The pipeline is used to generate Markdown files that create article pages on the A [workflow](https://github.com/github/docs-internal/blob/main/.github/workflows/sync-codeql-cli.yml) is used to trigger the automation of the CodeQL CLI documentation. The workflow is manually triggered by a member of the GitHub Docs team approximately every two weeks to align to releases of the CodeQL CLI. The workflow takes an input parameter that specifies the branch to pull the source files from in the semmle-code repo. If the branch input is omitted, the workflow will default to the `main` branch. -The workflow runs the `src/codeql-cli/scripts/sync.js` script, which generates Markdown files under `content/code-security/codeql-cli/codeql-cli-manual`. +The workflow runs the `src/codeql-cli/scripts/sync.ts` script, which generates Markdown files under `content/code-security/codeql-cli/codeql-cli-manual`. The workflow automatically creates a new pull request with the changes and the label `codeql-cli-pipeline`. @@ -20,17 +20,17 @@ To run the CodeQL CLI pipeline locally: 1. Clone the `semmle-code` repository inside your local `docs-internal` repository. 2. [Install Pandoc](https://pandoc.org/installing.html). You can `brew install pandoc` on macOS. -3. Run `src/codeql-cli/scripts/sync.js`. +3. Run `src/codeql-cli/scripts/sync.ts`. ## About this directory - `src/codeql-cli/lib/config.json` - A configuration file used to specify metadata about the CodeQL CLI pipeline. - `src/codeql-cli/scripts` - The scripts and source code used run the CodeQL CLI pipeline. - - `src/codeql-cli/scripts/sync.js` - The entrypoint script that runs the CodeQL CLI pipeline. + - `src/codeql-cli/scripts/sync.ts` - The entrypoint script that runs the CodeQL CLI pipeline. ## Content team -The content writers can manually update parts of the autogenerated Markdown files in `content/code-security/codeql-cli/codeql-cli-manual`. When new Markdown files are added they will get all of the frontmatter properties defined in the `defaultFrontmatter` property in `src/codeql-cli/lib/config.js`. +The content writers can manually update parts of the autogenerated Markdown files in `content/code-security/codeql-cli/codeql-cli-manual`. When new Markdown files are added they will get all of the frontmatter properties defined in the `defaultFrontmatter` property in `src/codeql-cli/lib/config.ts`. When a new Markdown file is created, a writer can manually change any of the frontmatter. The pipeline will not overwrite the frontmatter on subsequent runs. diff --git a/src/content-linter/README.md b/src/content-linter/README.md index b4fcea1f8bef..f68f5d074a73 100644 --- a/src/content-linter/README.md +++ b/src/content-linter/README.md @@ -9,15 +9,15 @@ This README shows you how to contribute to the content linter code by adding new At a high-level, there are four steps to create a new rule: 1. Adding a new rule file to [`src/content-linter/lib/linting-rules`](/src/content-linter/lib/linting-rules) -1. Importing the new rule and adding it to the custom rules array in [`src/content-linter/lib/linting-rules/index.js`](/src/content-linter/lib/linting-rules/index.js) -1. Adding the config for the new rule to [`src/content-linter/style/github-docs.js`](/src/content-linter/style/github-docs.js) +1. Importing the new rule and adding it to the custom rules array in [`src/content-linter/lib/linting-rules/index.ts`](/src/content-linter/lib/linting-rules/index.ts) +1. Adding the config for the new rule to [`src/content-linter/style/github-docs.ts`](/src/content-linter/style/github-docs.ts) 1. Adding a unit test for the new rule in [`src/content-linter/tests/unit`](/src/content-linter/tests/unit) Rules are located in the `src/content-linter/lib/linting-rules` directory. Each rule is a separate file that exports an object with metadata and a function. The function is the core logic that implements the rule. In some cases a single file contains more than one rule when colocating them makes more sense. Rules that are very specific can return more than one error type. ## Creating a new rule -Create a new file in the `src/content-linter/lib/linting-rules` directory. The file name should be the same as the rule name. For example, if the rule name is `no-whitespace`, the file name should be `no-whitespace.js`. Avoid using the rule ID name for the file name. There is more information about the ID in [names](#names). +Create a new file in the `src/content-linter/lib/linting-rules` directory. The file name should be the same as the rule name. For example, if the rule name is `no-whitespace`, the file name should be `no-whitespace.ts`. Avoid using the rule ID name for the file name. There is more information about the ID in [names](#names). Before creating a new rule, check that the rule does not already exist in [Markdownlint](https://github.com/DavidAnson/markdownlint/#rules--aliases). There are also many [open-source plugins](https://www.npmjs.com/search?q=keywords:markdownlint-rule) that may be used. @@ -51,12 +51,12 @@ See the [custom rules](https://github.com/DavidAnson/markdownlint/blob/main/doc/ ### Helper utilities -Markdownlint provides several helper functions. Take a look at the many exports in [markdownlint-rule-helpers](https://github.com/DavidAnson/markdownlint/blob/main/helpers/helpers.js). Note, this is unsupported and may stop being published to in the future. +Markdownlint provides several helper functions. Take a look at the many exports in [markdownlint-rule-helpers](https://github.com/DavidAnson/markdownlint/blob/main/helpers/helpers.ts). Note, this is unsupported and may stop being published to in the future. We've also written a few of our own: -- [`utils`](/src/content-linter/lib/helpers/utils.js) -- [`liquid-utils`](/src/content-linter/lib/helpers/liquid-utils.js) +- [`utils`](/src/content-linter/lib/helpers/utils.ts) +- [`liquid-utils`](/src/content-linter/lib/helpers/liquid-utils.ts) ### Setting errors @@ -66,7 +66,7 @@ When setting errors for a rule, there are a few different functions to choose fr - `addErrorContext` - when error detail is not needed but a specific range of context (Markdown snippet being checked) is needed - `addErrorDetailIf` - when the error detail just needs to be the expected and actual results -See [markdownlint-rule-helpers](https://github.com/DavidAnson/markdownlint/blob/main/helpers/helpers.js) for more details. +See [markdownlint-rule-helpers](https://github.com/DavidAnson/markdownlint/blob/main/helpers/helpers.ts) for more details. ### Async rules @@ -96,7 +96,7 @@ See the [Markdownlint async documentation](https://github.com/DavidAnson/markdow ### Reading the data directory -When you need to read files in the data directory, you can use the `getDataByLanguage` or `getDeepDataByLanguage` export in [`lib/get-data.js`](/lib/get-data.js). This allows you to write unit tests that read data fixtures rather than real content. For an example of using `getDataByLanguage` or `getDeepDataByLanguage`, see the [`liquid-data-tags.js`](/src/content-linter/lib/linting-rules/liquid-data-tags.js) or [`liquid-versioning.js`](/src/content-linter/lib/linting-rules/liquid-versioning.js) rules. +When you need to read files in the data directory, you can use the `getDataByLanguage` or `getDeepDataByLanguage` export in [`lib/get-data.ts`](/lib/get-data.ts). This allows you to write unit tests that read data fixtures rather than real content. For an example of using `getDataByLanguage` or `getDeepDataByLanguage`, see the [`liquid-data-tags.ts`](/src/content-linter/lib/linting-rules/liquid-data-tags.ts) or [`liquid-versioning.ts`](/src/content-linter/lib/linting-rules/liquid-versioning.ts) rules. ### `names` @@ -137,13 +137,13 @@ Tags are used to categorize rules. Choose one or more tags from the list below. ## Adding the rule to the custom rules array -To add the new rule to the list of custom rules that are run against GitHub Docs content, import the rule and add it to the `rules` array in [`src/content-linter/lib/linting-rules/index.js`](/src/content-linter/lib/linting-rules/index.js). The `rules` array defines all the custom rules that we add to the Markdownlint configuration [`options.customRules`](https://github.com/DavidAnson/markdownlint#optionscustomrules). Custom rules include the rules we write in this project and any open-source rules we use. +To add the new rule to the list of custom rules that are run against GitHub Docs content, import the rule and add it to the `rules` array in [`src/content-linter/lib/linting-rules/index.ts`](/src/content-linter/lib/linting-rules/index.ts). The `rules` array defines all the custom rules that we add to the Markdownlint configuration [`options.customRules`](https://github.com/DavidAnson/markdownlint#optionscustomrules). Custom rules include the rules we write in this project and any open-source rules we use. ## Configuring a new rule -Each rule that we configure for GitHub Docs has a corresponding entry in either `src/content-linter/style/base.js` or `src/content-linter/style/github-docs.js`. The `base.js` file contains rules that are available in the [Markdownlint](https://github.com/DavidAnson/markdownlint) project. The `github-docs.js` file contains open-source plugins (including [markdownlint-github](https://github.com/github/markdownlint-github/tree/main)) and the custom rules that we develop that are specific to GitHub Docs. +Each rule that we configure for GitHub Docs has a corresponding entry in either `src/content-linter/style/base.ts` or `src/content-linter/style/github-docs.ts`. The `base.ts` file contains rules that are available in the [Markdownlint](https://github.com/DavidAnson/markdownlint) project. The `github-docs.ts` file contains open-source plugins (including [markdownlint-github](https://github.com/github/markdownlint-github/tree/main)) and the custom rules that we develop that are specific to GitHub Docs. -Inside [`src/content-linter/style/github-docs.js`](/src/content-linter/style/github-docs.js), there are a few different sections: +Inside [`src/content-linter/style/github-docs.ts`](/src/content-linter/style/github-docs.ts), there are a few different sections: - `githubDocsConfig` - Primary area that new rules are added to. The rules in this section configure Markdownlint to separate frontmatter from Markdown automatically. Both the frontmatter and Markdown are available to read from the rule logic, but you cannot leave an error on a line that contains frontmatter. Frontmatter is not sent through the Markdown parser by Markdownlint. - `githubDocsFrontmatterConfig` - Contains rules that check frontmatter properties _and_ need to leave errors on frontmatter line numbers. @@ -182,23 +182,23 @@ Once a rule is written, added to the custom rules array, and configured, you can npm run lint-content -- --paths --rules ``` -Each custom rule must add a unit test in the `src/content-linter/tests/unit` directory. The unit test should be named the same as the rule file name. For example, if the rule file name is `no-whitespace.js`, the unit test file name should be `no-whitespace.js`. +Each custom rule must add a unit test in the `src/content-linter/tests/unit` directory. The unit test should be named the same as the rule file name. For example, if the rule file name is `no-whitespace.ts`, the unit test file name should be `no-whitespace.ts`. Unit tests must test auto-fixes if the rule allows them. The unit test should also test the line number and range. Include positive and negative tests. -If the test requires checking the file path, you can provide a fixture. For an example, see [`early-access-references.js`](/src/content-linter/tests/unit/early-access-references.js). Most tests pass Markdown strings to the rule directly. +If the test requires checking the file path, you can provide a fixture. For an example, see [`early-access-references.ts`](/src/content-linter/tests/unit/early-access-references.ts). Most tests pass Markdown strings to the rule directly. ## Content linter scripts -- [`lint-content.js`](/src/content-linter/scripts/lint-content.js) - The primary script used to run rules against content. We have a fairly customized implementation of Markdownlint, which prevented us from using [Markdownlint CLI2](https://github.com/DavidAnson/markdownlint-cli2). For example, we run Markdownlint more than once to allow different configurations for the `content` directory and `data` directory. We also run Markdownlint again to allow checking frontmatter properties. To view the options of the script, run `npm run lint-content -- --help`. -- [`disable-rules.js`](/src/content-linter/scripts/disable-rules.js) - This script is used to automatically add disable comments to the end of a line that violates a rule. This allows us to have violations in the content while also setting the rule's severity to `error`. -- [`pretty-print-results.js`](/src/content-linter/scripts/pretty-print-results.js) - This script simplifies and makes the results printed to the console easier to read. +- [`lint-content.ts`](/src/content-linter/scripts/lint-content.ts) - The primary script used to run rules against content. We have a fairly customized implementation of Markdownlint, which prevented us from using [Markdownlint CLI2](https://github.com/DavidAnson/markdownlint-cli2). For example, we run Markdownlint more than once to allow different configurations for the `content` directory and `data` directory. We also run Markdownlint again to allow checking frontmatter properties. To view the options of the script, run `npm run lint-content -- --help`. +- [`disable-rules.ts`](/src/content-linter/scripts/disable-rules.ts) - This script is used to automatically add disable comments to the end of a line that violates a rule. This allows us to have violations in the content while also setting the rule's severity to `error`. +- [`pretty-print-results.ts`](/src/content-linter/scripts/pretty-print-results.ts) - This script simplifies and makes the results printed to the console easier to read. ## Updating content to adhere to a new rule Introducing a new rule with a severity of `error` can be difficult when many violations of that rule exist in content. If the rule implements an autofix by setting the `fixInfo` property in the error object, you can use the rule to autofix content before shipping the rule. -If the new rule doesn't have a possible autofix, you can use `disable-rules.js` to automatically add disable comments to the end of each Markdown line that contains a violation. This is not always possible since some lines are within code blocks and cannot be disabled. +If the new rule doesn't have a possible autofix, you can use `disable-rules.ts` to automatically add disable comments to the end of each Markdown line that contains a violation. This is not always possible since some lines are within code blocks and cannot be disabled. The last option is to manually fix the violations. This is the most time-consuming option, but it's the only option when the rule cannot be autofixed and the line cannot be disabled. @@ -206,7 +206,7 @@ A rule with too many violations to fix can be set to a severity of `warning`. ## Using the search-replace plugin -Because the search-replace rule consists of many search terms, it essentially performs one or more rule checks. Each rule is defined in the [`src/content-linter/style/github-docs.js`](/src/content-linter/style/github-docs.js) config under `searchReplaceConfig`. +Because the search-replace rule consists of many search terms, it essentially performs one or more rule checks. Each rule is defined in the [`src/content-linter/style/github-docs.ts`](/src/content-linter/style/github-docs.ts) config under `searchReplaceConfig`. You can add a new `search-replace` rule using any search term or regex by adding it to the `rules` array. This is an easy way to perform checks if the check is just looking for a string or simple regex. @@ -222,7 +222,7 @@ docs.github.com ## Adding context to a base rule's error message -If you want to add context to a base rule's error message, go to[`base.js`](/src/content-linter/style/base.js), and add the `context` property to the base rule's object. For e.g. if you wanted to add `context` to `MD040` (the `fenced-code-language` base rule), the object would look like this: +If you want to add context to a base rule's error message, go to[`base.ts`](/src/content-linter/style/base.ts), and add the `context` property to the base rule's object. For e.g. if you wanted to add `context` to `MD040` (the `fenced-code-language` base rule), the object would look like this: ```javascript 'fenced-code-language': { diff --git a/src/content-linter/lib/linting-rules/liquid-quoted-conditional-arg.ts b/src/content-linter/lib/linting-rules/liquid-quoted-conditional-arg.ts index 45e25552a7a5..3549a7eb93bd 100644 --- a/src/content-linter/lib/linting-rules/liquid-quoted-conditional-arg.ts +++ b/src/content-linter/lib/linting-rules/liquid-quoted-conditional-arg.ts @@ -20,7 +20,7 @@ export const liquidQuotedConditionalArg: Rule = { tags: ['liquid', 'format'], function: (params: RuleParams, onError: RuleErrorCallback) => { const content = params.lines.join('\n') - // Using 'any' type for tokens as getLiquidTokens returns tokens from liquid-utils.js which lacks type definitions + // Using 'any' type for tokens as getLiquidTokens returns tokens from liquid-utils.ts which lacks type definitions const tokens = getLiquidTokens(content) .filter((token: any) => token.kind === TokenKind.Tag) .filter((token: any) => conditionalTags.includes(token.name)) diff --git a/src/content-linter/lib/linting-rules/octicon-aria-labels.ts b/src/content-linter/lib/linting-rules/octicon-aria-labels.ts index 8d42e6a590dd..a3eb531395ad 100644 --- a/src/content-linter/lib/linting-rules/octicon-aria-labels.ts +++ b/src/content-linter/lib/linting-rules/octicon-aria-labels.ts @@ -21,7 +21,7 @@ export const octiconAriaLabels: Rule = { parser: 'markdownit', function: (params: RuleParams, onError: RuleErrorCallback) => { const content = params.lines.join('\n') - // Using 'any' type for tokens as getLiquidTokens returns tokens from liquid-utils.js which lacks type definitions + // Using 'any' type for tokens as getLiquidTokens returns tokens from liquid-utils.ts which lacks type definitions const tokens = getLiquidTokens(content) .filter((token: any) => token.kind === TokenKind.Tag) .filter((token: any) => token.name === 'octicon') diff --git a/src/content-linter/scripts/disable-rules.ts b/src/content-linter/scripts/disable-rules.ts index cf0225f5c2e5..e03f09b764ef 100755 --- a/src/content-linter/scripts/disable-rules.ts +++ b/src/content-linter/scripts/disable-rules.ts @@ -4,7 +4,7 @@ // // Usage: // -// src/content-linter/scripts/disable-rules.js no-generic-link-text +// src/content-linter/scripts/disable-rules.ts no-generic-link-text import fs from 'fs' import { spawn } from 'child_process' diff --git a/src/content-linter/tests/category-pages.ts b/src/content-linter/tests/category-pages.ts index dc3773960cb0..6366e3d66f6a 100644 --- a/src/content-linter/tests/category-pages.ts +++ b/src/content-linter/tests/category-pages.ts @@ -223,7 +223,7 @@ describe.skip('category pages', () => { }` const expectedSlug = expectedSlugs.at(-1) as string const newCategoryDirPath = path.join(path.dirname(categoryDirPath), expectedSlug) - customMessage += `\nTo resolve this consider running:\n ./src/content-render/scripts/move-content.js ${categoryDirPath} ${newCategoryDirPath}\n` + customMessage += `\nTo resolve this consider running:\n ./src/content-render/scripts/move-content.ts ${categoryDirPath} ${newCategoryDirPath}\n` // Check if the directory name matches the expected slug expect(expectedSlugs.includes(categoryDirName), customMessage).toBeTruthy() }) diff --git a/src/content-linter/tests/lint-frontmatter-links.ts b/src/content-linter/tests/lint-frontmatter-links.ts index c0e5d9fbc45e..5e5985dc9fb5 100644 --- a/src/content-linter/tests/lint-frontmatter-links.ts +++ b/src/content-linter/tests/lint-frontmatter-links.ts @@ -35,7 +35,7 @@ describe('front matter', () => { // Using any type because trouble array contains objects with varying error properties if (trouble.find((t: any) => t.redirects)) { customErrorMessage += `\n\nNOTE! To automatically fix the redirects run this command:\n` - customErrorMessage += `\n\t./src/links/scripts/update-internal-links.js content/${page.relativePath}\n\n` + customErrorMessage += `\n\t./src/links/scripts/update-internal-links.ts content/${page.relativePath}\n\n` } } return customErrorMessage diff --git a/src/content-linter/tests/unit/code-annotation-comment-spacing.ts b/src/content-linter/tests/unit/code-annotation-comment-spacing.ts index b042715498c9..37f76e5fe8fc 100644 --- a/src/content-linter/tests/unit/code-annotation-comment-spacing.ts +++ b/src/content-linter/tests/unit/code-annotation-comment-spacing.ts @@ -232,7 +232,7 @@ describe(codeAnnotationCommentSpacing.names.join(' - '), () => { 'echo "Hello"', '', '// This JS-style comment is fine', - 'node script.js', + 'node script.ts', '', '--This SQL comment needs space', 'psql -c "SELECT 1;"', diff --git a/src/content-linter/tests/unit/rule-filtering.ts b/src/content-linter/tests/unit/rule-filtering.ts index 4ee5a27020bf..606ee95e7d19 100644 --- a/src/content-linter/tests/unit/rule-filtering.ts +++ b/src/content-linter/tests/unit/rule-filtering.ts @@ -1,5 +1,5 @@ import { describe, test, expect, vi } from 'vitest' -import { shouldIncludeRule } from '../../scripts/lint-content.js' +import { shouldIncludeRule } from '../../scripts/lint-content' // Mock the get-rules module to provide test data for rule definitions vi.mock('../../lib/helpers/get-rules', () => ({ diff --git a/src/content-render/README.md b/src/content-render/README.md index ab013400f2dc..2d134219cede 100644 --- a/src/content-render/README.md +++ b/src/content-render/README.md @@ -85,9 +85,9 @@ Each custom tag has the following: - a JavaScript class in `lib/liquid-tags/` - an HTML template in `includes/liquid-tags/` -The class and the template should have corresponding names, like `lib/liquid-tags/my-tag.js` and `includes/liquid-tags/my-tag.html` +The class and the template should have corresponding names, like `lib/liquid-tags/my-tag.ts` and `includes/liquid-tags/my-tag.html` -You must also register the new tag in `src/content-render/liquid/engine.js` with a line like this: +You must also register the new tag in `src/content-render/liquid/engine.ts` with a line like this: ``` engine.registerTag('my_tag', require('./liquid-tags/my-tag')) diff --git a/src/content-render/scripts/move-content.ts b/src/content-render/scripts/move-content.ts index 094bd2d300e9..018b98b57dcf 100755 --- a/src/content-render/scripts/move-content.ts +++ b/src/content-render/scripts/move-content.ts @@ -3,7 +3,7 @@ // // Use this script to help you move or rename a single file or a folder. The script will move or rename the file or folder for you, update relevant `children` in the index.md file(s), and add a `redirect_from` to frontmatter in the renamed file(s). Note: You will still need to manually update the `title` if necessary. // -// By default, the `move-content.js` script will commit the changes it makes. If you don't want the script to run any git commands for you, run it with the `--no-git` flag. Note: In most cases it will be easier and safer to let the script run the git commands for you, since git can get confused when a file is both renamed and edited. +// By default, the `move-content.ts` script will commit the changes it makes. If you don't want the script to run any git commands for you, run it with the `--no-git` flag. Note: In most cases it will be easier and safer to let the script run the git commands for you, since git can get confused when a file is both renamed and edited. // // To learn more about the script, you can run `npm run move-content --help`. // diff --git a/src/content-render/scripts/render-content-markdown.ts b/src/content-render/scripts/render-content-markdown.ts index a9cabc626775..a7d3d8cb296e 100755 --- a/src/content-render/scripts/render-content-markdown.ts +++ b/src/content-render/scripts/render-content-markdown.ts @@ -6,8 +6,8 @@ import { renderLiquid } from '@/content-render/liquid/index' import shortVersionsMiddleware from '@/versions/middleware/short-versions' import type { ExtendedRequest } from '@/types' -const { loadPages } = await import('@/frame/lib/page-data.js') -const { allVersions } = await import('@/versions/lib/all-versions.js') +const { loadPages } = await import('@/frame/lib/page-data') +const { allVersions } = await import('@/versions/lib/all-versions') const contentCopilotDir = path.join(process.cwd(), 'content-copilot') diff --git a/src/content-render/scripts/update-filepaths.ts b/src/content-render/scripts/update-filepaths.ts index c2acef0ba330..9ee8a696b897 100755 --- a/src/content-render/scripts/update-filepaths.ts +++ b/src/content-render/scripts/update-filepaths.ts @@ -165,7 +165,7 @@ function moveFile(result: string[], options: ScriptOptions): void { const stdout = execFileSync( 'tsx', [ - 'src/content-render/scripts/move-content.js', + 'src/content-render/scripts/move-content.ts', '--no-git', '--verbose', contentPath, diff --git a/src/content-render/tests/data.ts b/src/content-render/tests/data.ts index 322509b2ba63..b55f997d0269 100644 --- a/src/content-render/tests/data.ts +++ b/src/content-render/tests/data.ts @@ -6,7 +6,7 @@ import nonEnterpriseDefaultVersion from '@/versions/lib/non-enterprise-default-v import { DataDirectory } from '@/tests/helpers/data-directory' describe('data tag', () => { - // Using 'any' type as DataDirectory is from data-directory.js which lacks type definitions + // Using 'any' type as DataDirectory is from data-directory.ts which lacks type definitions let dd: any const enDirBefore = languages.en.dir diff --git a/src/content-render/tests/liquid-helpers.ts b/src/content-render/tests/liquid-helpers.ts index 34042034ecaf..57a9a863b489 100644 --- a/src/content-render/tests/liquid-helpers.ts +++ b/src/content-render/tests/liquid-helpers.ts @@ -9,7 +9,7 @@ describe('liquid helper tags', () => { // Using 'any' type as context is a test fixture with dynamic properties set in beforeAll const context: any = {} - // Using 'any' type as DataDirectory is from data-directory.js which lacks type definitions + // Using 'any' type as DataDirectory is from data-directory.ts which lacks type definitions let dd: any const enDirBefore = languages.en.dir diff --git a/src/content-render/tests/render-changed-and-deleted-files.ts b/src/content-render/tests/render-changed-and-deleted-files.ts index 404e7bf7da1f..c9177b975559 100644 --- a/src/content-render/tests/render-changed-and-deleted-files.ts +++ b/src/content-render/tests/render-changed-and-deleted-files.ts @@ -26,7 +26,7 @@ * * export DELETED_FILES=`git diff --name-only --diff-filter=D main...` * export CHANGED_FILES=`git diff --name-only --diff-filter=M main...` - * npm run test -- src/content-render/tests/render-changed-and-deleted-files.js + * npm run test -- src/content-render/tests/render-changed-and-deleted-files.ts */ import path from 'path' diff --git a/src/content-render/unified/annotate.ts b/src/content-render/unified/annotate.ts index e006abf317b4..02f6a9331d64 100644 --- a/src/content-render/unified/annotate.ts +++ b/src/content-render/unified/annotate.ts @@ -24,7 +24,7 @@ Contributing rules: - You must start the code section with a single line comment, otherwise the two will be flipped. - For HTML style, you can include a line after your annotations such as `` to maintain syntax highlighting; this will not impact what renders. -`parse-info-string.js` plugin is required for this to work, and must come before `remark-rehype`. +`parse-info-string.ts` plugin is required for this to work, and must come before `remark-rehype`. `annotate` must come before the `highlight` plugin. */ diff --git a/src/content-render/unified/code-header.ts b/src/content-render/unified/code-header.ts index 1e4c7d28b808..958c01b21595 100644 --- a/src/content-render/unified/code-header.ts +++ b/src/content-render/unified/code-header.ts @@ -51,7 +51,7 @@ function wrapCodeExample(node: any, tree: any): Element { const lang: string = node.children[0].properties.className?.[0].replace('language-', '') const code: string = node.children[0].children[0].value - const subnav = null // getSubnav() lives in annotate.js, not needed for normal code blocks + const subnav = null // getSubnav() lives in annotate.ts, not needed for normal code blocks const prompt = getPrompt(node, tree, code) // returns null if there's no prompt const hasCopy: boolean = Boolean(getPreMeta(node).copy) // defaults to true diff --git a/src/content-render/unified/rewrite-asset-img-tags.ts b/src/content-render/unified/rewrite-asset-img-tags.ts index c0f92d69bebb..d4c8f8481461 100644 --- a/src/content-render/unified/rewrite-asset-img-tags.ts +++ b/src/content-render/unified/rewrite-asset-img-tags.ts @@ -81,7 +81,7 @@ export default function rewriteAssetImgTags() { */ function injectMaxWidth(pathname: string, maxWidth: number): string { const split = pathname.split('/') - // This prefix needs to match what's possibly expected in dynamic-assets.js + // This prefix needs to match what's possibly expected in dynamic-assets.ts const inject = `mw-${maxWidth}` if (split.includes(inject)) { throw new Error(`pathname already includes '${inject}'`) diff --git a/src/content-render/unified/rewrite-local-links.ts b/src/content-render/unified/rewrite-local-links.ts index 2e942c355659..c33e7e512be4 100644 --- a/src/content-render/unified/rewrite-local-links.ts +++ b/src/content-render/unified/rewrite-local-links.ts @@ -274,7 +274,7 @@ Look for an internal link that starts with '${url}'. newHref = newHref.replace('/enterprise-server@latest/', `/enterprise-server@${latest}/`) if (newHref === url) { - // start clean with no language (TOC pages already include the lang codes via lib/liquid-tags/link.js) + // start clean with no language (TOC pages already include the lang codes via lib/liquid-tags/link.ts) const hrefWithoutLang = getPathWithoutLanguage(url) // normalize any legacy links so they conform to new link structure diff --git a/src/data-directory/lib/data-schemas/index.ts b/src/data-directory/lib/data-schemas/index.ts index 12a957e39c99..c51d866da498 100644 --- a/src/data-directory/lib/data-schemas/index.ts +++ b/src/data-directory/lib/data-schemas/index.ts @@ -32,13 +32,13 @@ function loadTableSchemas(): DataSchemas { // Manual schema registrations for non-table data const manualSchemas: DataSchemas = { - 'data/features': '@/data-directory/lib/data-schemas/features.js', + 'data/features': '@/data-directory/lib/data-schemas/features', 'data/variables': '@/data-directory/lib/data-schemas/variables', - 'data/learning-tracks': '@/data-directory/lib/data-schemas/learning-tracks.js', - 'data/release-notes': '@/data-directory/lib/data-schemas/release-notes.js', + 'data/learning-tracks': '@/data-directory/lib/data-schemas/learning-tracks', + 'data/release-notes': '@/data-directory/lib/data-schemas/release-notes', 'data/code-languages.yml': '@/data-directory/lib/data-schemas/code-languages', - 'data/glossaries/candidates.yml': '@/data-directory/lib/data-schemas/glossaries-candidates.js', - 'data/glossaries/external.yml': '@/data-directory/lib/data-schemas/glossaries-external.js', + 'data/glossaries/candidates.yml': '@/data-directory/lib/data-schemas/glossaries-candidates', + 'data/glossaries/external.yml': '@/data-directory/lib/data-schemas/glossaries-external', } // Combine manual registrations with auto-discovered table schemas diff --git a/src/data-directory/lib/get-data.ts b/src/data-directory/lib/get-data.ts index 8565df9b38d5..6478c8f3ec3f 100644 --- a/src/data-directory/lib/get-data.ts +++ b/src/data-directory/lib/get-data.ts @@ -38,15 +38,13 @@ export const getDeepDataByLanguage = memoize( } // The `dir` argument is only used for testing purposes. - // For example, our unit tests that depend on using a fixtures - // root. + // For example, our unit tests that depend on using a fixtures root. // If we don't allow those tests to override the `dir` argument, - // it'll be stuck from the first time `languages.js` was imported. - let actualDir = dir - if (actualDir === null) { - actualDir = languages[langCode].dir + // it'll be stuck from the first time `languages.ts` was imported. + if (dir === null) { + dir = languages[langCode].dir } - return getDeepDataByDir(dottedPath, actualDir) + return getDeepDataByDir(dottedPath, dir) }, ) diff --git a/src/data-directory/tests/orphaned-features.ts b/src/data-directory/tests/orphaned-features.ts index 6bb15c35704c..0ba25185a3a1 100644 --- a/src/data-directory/tests/orphaned-features.ts +++ b/src/data-directory/tests/orphaned-features.ts @@ -9,7 +9,7 @@ const fixturesDir = path.join(__dirname, 'orphaned-features', 'fixtures') // Import the actual helper functions from the orphaned features script const { getVariableFiles, getReusableFiles } = await import( - '@/data-directory/scripts/find-orphaned-features/find.js' + '@/data-directory/scripts/find-orphaned-features/find' ) describe('orphaned features detection', () => { diff --git a/src/events/lib/schema.ts b/src/events/lib/schema.ts index e16260bb5b8d..4583ae621f44 100644 --- a/src/events/lib/schema.ts +++ b/src/events/lib/schema.ts @@ -93,12 +93,12 @@ const context = { page_document_type: { type: 'string', description: 'The generic page document type based on URL path.', - enum: ['homepage', 'early-access', 'product', 'category', 'subcategory', 'article'], // get-document-type.js + enum: ['homepage', 'early-access', 'product', 'category', 'subcategory', 'article'], // get-document-type.ts }, page_type: { type: 'string', description: 'Optional page type from the content frontmatter.', - enum: ['overview', 'quick_start', 'tutorial', 'how_to', 'reference', 'rai'], // frontmatter.js + enum: ['overview', 'quick_start', 'tutorial', 'how_to', 'reference', 'rai'], // frontmatter.ts }, status: { type: 'number', diff --git a/src/fixtures/README.md b/src/fixtures/README.md index df63faa2990d..49a354472cf4 100644 --- a/src/fixtures/README.md +++ b/src/fixtures/README.md @@ -46,8 +46,8 @@ action. Feel free to create sub-directories or new files. For example, if it's about end-to-end testing a new custom Liquid tag called -`lib/liquid-tags/snacks.js` you create a new test called -`src/fixtures/tests/snack.js`. (And equally, you might want to create +`lib/liquid-tags/snacks.ts` you create a new test called +`src/fixtures/tests/snack.ts`. (And equally, you might want to create `src/fixtures/fixtures/content/get-started/foo/snacking.md`) To run the tests use: @@ -73,7 +73,7 @@ There's a script you can always run that makes sure all and any of these files are up to do: ```shell -./src/tests/scripts/copy-fixture-data.js +./src/tests/scripts/copy-fixture-data.ts ``` It's safe to run any time. And it might be necessary to run so that diff --git a/src/fixtures/fixtures/content/get-started/liquid/ifversion.md b/src/fixtures/fixtures/content/get-started/liquid/ifversion.md index f7eace4b031f..520dbd4c7271 100644 --- a/src/fixtures/fixtures/content/get-started/liquid/ifversion.md +++ b/src/fixtures/fixtures/content/get-started/liquid/ifversion.md @@ -36,7 +36,7 @@ condition-e too tied to the past. You can type "3.9" if you want, but that version is only working right now/today and will eventually break tests as the values in - `enterprise-server-releases.js` change over time. + `enterprise-server-releases.ts` change over time. --> {% ifversion ghes > __GHES_DEPRECATED__[0] %} diff --git a/src/fixtures/fixtures/liquid-tags/minimal-conditional.md b/src/fixtures/fixtures/liquid-tags/minimal-conditional.md index ffb4e46bc2e4..822d9a66f756 100644 --- a/src/fixtures/fixtures/liquid-tags/minimal-conditional.md +++ b/src/fixtures/fixtures/liquid-tags/minimal-conditional.md @@ -1,5 +1,5 @@ --- -front: matter used in tests/unit/liquid-tags/tokens-test.js +front: matter used in tests/unit/liquid-tags/tokens-test.ts --- - One {% if product.title == "Awesome Shoes" %} diff --git a/src/fixtures/fixtures/page-with-invalid-product-version.md b/src/fixtures/fixtures/page-with-invalid-product-version.md index 43632dc65e31..17c23d4b740a 100644 --- a/src/fixtures/fixtures/page-with-invalid-product-version.md +++ b/src/fixtures/fixtures/page-with-invalid-product-version.md @@ -3,5 +3,5 @@ title: This is an article intro: I have invalid versions frontmatter versions: fpt: '*' - ghec: 'issue-1234' # Only semver is allowed, per lib/all-versions.js + ghec: 'issue-1234' # Only semver is allowed, per lib/all-versions.ts --- diff --git a/src/frame/components/context/ArticleContext.tsx b/src/frame/components/context/ArticleContext.tsx index a067bc1cb910..3f35eebd5430 100644 --- a/src/frame/components/context/ArticleContext.tsx +++ b/src/frame/components/context/ArticleContext.tsx @@ -103,7 +103,7 @@ export const getArticleContextFromRequest = (req: any): ArticleContextT => { currentJourneyTrack: req.context.currentJourneyTrack, detectedPlatforms: page.detectedPlatforms || [], detectedTools: page.detectedTools || [], - allTools: page.allToolsParsed || [], // this is set at the page level, see lib/page.js + allTools: page.allToolsParsed || [], // this is set at the page level, see lib/page.ts supportPortalVaIframeProps, currentLayout: req.context.currentLayoutName, } diff --git a/src/frame/components/context/MainContext.tsx b/src/frame/components/context/MainContext.tsx index 346f92adbcd6..714dadb3a256 100644 --- a/src/frame/components/context/MainContext.tsx +++ b/src/frame/components/context/MainContext.tsx @@ -22,7 +22,7 @@ export type VersionItem = { latestApiVersion: string } -// This reflects what gets exported from `all-versions.js` in the +// This reflects what gets exported from `all-versions.ts` in the // `allVersions` object. // It's necessary for TypeScript, but we don't need to write down // every possible key that might be present because we don't need it diff --git a/src/frame/components/page-header/Breadcrumbs.tsx b/src/frame/components/page-header/Breadcrumbs.tsx index aca31cadf4a8..e3dbac632635 100644 --- a/src/frame/components/page-header/Breadcrumbs.tsx +++ b/src/frame/components/page-header/Breadcrumbs.tsx @@ -22,7 +22,7 @@ export const Breadcrumbs = ({ inHeader }: Props) => { NOTE: The breadcrumbs class and the nav tag are used by the Lunr search scripts. The a tag generated by the Link is also used. If these change, please also change - updating src/search/scripts/parse-page-sections-into-records.js. + updating src/search/scripts/parse-page-sections-into-records.ts. */