forked from DefinitelyTyped/dt-mergebot
/
pr-info.ts
585 lines (516 loc) · 24.6 KB
/
pr-info.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
import { ColumnName, PopularityLevel } from "./basic";
import { PR_repository_pullRequest,
PR_repository_pullRequest_commits_nodes_commit_checkSuites,
PR_repository_pullRequest_timelineItems,
PR_repository_pullRequest_comments_nodes,
PR_repository_pullRequest_commits_nodes_commit_checkSuites_nodes,
} from "./queries/schema/PR";
import { getMonthlyDownloadCount } from "./util/npm";
import { fetchFile as defaultFetchFile } from "./util/fetchFile";
import { noNullish, someLast, sameUser, authorNotBot, max, abbrOid } from "./util/util";
import { TOO_MANY_FILES } from "./queries/pr-query";
import * as comment from "./util/comment";
import * as urls from "./urls";
import * as OldHeaderParser from "@definitelytyped/old-header-parser";
import * as jsonDiff from "fast-json-patch";
import { isDeepStrictEqual } from "util";
import { isDeclarationPath } from "@definitelytyped/utils";
const CriticalPopularityThreshold = 5_000_000;
const NormalPopularityThreshold = 200_000;
// Some error found, will be passed to `process` to report in a comment
interface BotError {
readonly type: "error";
readonly now: Date;
readonly message: string;
readonly author: string | undefined;
}
interface BotEnsureRemovedFromProject {
readonly type: "remove";
readonly now: Date;
readonly message: string;
readonly isDraft: boolean;
}
export type PackageInfo = {
name: string | null; // null => not in a package (= infra files)
kind: "edit" | "add" | "delete";
files: FileInfo[];
owners: string[]; // existing owners on master
addedOwners: string[];
deletedOwners: string[];
popularityLevel: PopularityLevel;
isSafeInfrastructureEdit?: boolean;
};
type FileKind = "test" | "definition" | "markdown" | "package-meta" | "package-meta-ok"| "infrastructure";
export type FileInfo = {
path: string,
kind: FileKind,
suspect?: string // reason for a file being "package-meta" rather than "package-meta-ok"
};
export type ReviewInfo = {
type: string,
reviewer: string,
date: Date
} & (
| { type: "approved", isMaintainer: boolean }
| { type: "changereq" }
| { type: "stale", abbrOid: string }
);
export type CIResult = "unknown" | "pass" | "fail" | "missing" | "action_required";
export interface PrInfo {
readonly type: "info";
/** ISO8601 date string for the time the PR info was created at */
readonly now: Date;
readonly pr_number: number;
/**
* The head commit of this PR (full format)
*/
readonly headCommitOid: string;
/**
* merge-base-like commit for config comparisons (see getBaseId() below)
*/
readonly mergeBaseOid: string;
/**
* The GitHub login of the PR author
*/
readonly author: string;
/**
* The CI status of the head commit
*/
readonly ciResult: CIResult;
/**
* A link to the log for the failing CI if it exists
*/
readonly ciUrl?: string;
/**
* An ID for a check suite which could need re-running
*/
readonly reRunCheckSuiteIDs?: number[];
/**
* True if the PR has a merge conflict
*/
readonly hasMergeConflict: boolean;
/**
* The date the latest commit was pushed to GitHub
*/
readonly lastPushDate: Date;
/**
* The date of the last activity, including non-bot comments
*/
readonly lastActivityDate: Date;
/**
* Name of column used if a maintainer blessed this PR
*/
readonly maintainerBlessed?: ColumnName;
/**
* The time we posted a merge offer, if any (required for merge request in addition to passing CI and a review)
*/
readonly mergeOfferDate?: Date;
/*
* Time of a "ready to merge" request and the requestor
*/
readonly mergeRequestDate?: Date;
readonly mergeRequestUser?: string;
readonly isFirstContribution: boolean;
/*
* True if there are more files than we can fetch from the initial query (or no files)
*/
readonly tooManyFiles: boolean;
/*
* True for PRs with over 5k line changes (top ~3%)
*/
readonly hugeChange: boolean;
readonly popularityLevel: PopularityLevel;
readonly pkgInfo: readonly PackageInfo[];
readonly reviews: readonly ReviewInfo[];
// The ID of the main comment so that it can be linked to by other comments
readonly mainBotCommentID?: number;
}
export type BotResult =
| PrInfo
| BotError
| BotEnsureRemovedFromProject;
function getHeadCommit(pr: PR_repository_pullRequest) {
return pr.commits.nodes?.find(c => c?.commit.oid === pr.headRefOid)?.commit;
}
function getBaseId(pr: PR_repository_pullRequest): string | undefined {
// Finds a revision to compare config files against (similar to git merge-base, but simple (linear
// history on master, assume sane merges at most): finds the most recent sha1 that is not part of
// the PR -- not too reliable, but better than always using "master").
const nodes = pr.commitIds.nodes;
if (!nodes) return;
const prCommits = noNullish(nodes.map(node => node?.commit.oid));
if (!prCommits.length) return;
for (const node of nodes.slice(0).reverse()) {
const parents = node?.commit.parents.nodes;
if (!parents) continue;
for (const parent of parents) {
if (parent?.oid && !prCommits.includes(parent.oid)) return parent.oid;
}
}
return;
}
// The GQL response => Useful data for us
export async function deriveStateForPR(
prInfo: PR_repository_pullRequest,
fetchFile = defaultFetchFile,
getDownloads = getMonthlyDownloadCount,
now = new Date(),
): Promise<BotResult> {
if (prInfo.author == null) return botError("PR author does not exist");
if (prInfo.isDraft) return botEnsureRemovedFromProject("PR is a draft");
if (prInfo.state !== "OPEN") return botEnsureRemovedFromProject("PR is not active");
const headCommit = getHeadCommit(prInfo);
if (headCommit == null) return botError("No head commit found");
const baseId = getBaseId(prInfo) || "master";
const author = prInfo.author.login;
const isFirstContribution = prInfo.authorAssociation === "FIRST_TIME_CONTRIBUTOR";
const createdDate = new Date(prInfo.createdAt);
// apparently `headCommit.pushedDate` can be null in some cases (see #48708), use the PR creation time for that
// (it would be bad to use `committedDate`/`authoredDate`, since these can be set to arbitrary values)
const lastPushDate = new Date(headCommit.pushedDate || prInfo.createdAt);
const lastCommentDate = getLastCommentishActivityDate(prInfo);
const blessing = getLastMaintainerBlessing(lastPushDate, prInfo.timelineItems);
const reopenedDate = getReopenedDate(prInfo.timelineItems);
// we should generally have all files (except for draft PRs)
const fileCount = prInfo.changedFiles;
// we fetch all files so this shouldn't happen, but GH has a limit of 3k files even with
// pagination (docs.github.com/en/rest/reference/pulls#list-pull-requests-files) and in
// that case `files.totalCount` would be 3k so it'd fit the count but `changedFiles` would
// be correct; so to be safe: check it, and warn if there are many files (or zero)
const tooManyFiles = !fileCount // should never happen, make it look fishy if it does
|| fileCount > TOO_MANY_FILES // suspiciously many files
|| fileCount !== prInfo.files?.nodes?.length; // didn't get all files (probably too many)
const hugeChange = prInfo.additions + prInfo.deletions > 5000;
const paths = noNullish(prInfo.files?.nodes).map(f => f.path).sort();
if (paths.length > TOO_MANY_FILES) paths.length = TOO_MANY_FILES; // redundant, but just in case
const pkgInfoEtc = await getPackageInfosEtc(
paths, prInfo.headRefOid, baseId,
fetchFile, async name => await getDownloads(name, lastPushDate));
if (pkgInfoEtc instanceof Error) return botError(pkgInfoEtc.message);
const { pkgInfo, popularityLevel } = pkgInfoEtc;
const reviews = getReviews(prInfo);
const latestReview = max(reviews.map(r => r.date));
const comments = noNullish(prInfo.comments.nodes);
const mergeOfferDate = getMergeOfferDate(comments, prInfo.headRefOid);
const mergeRequest = getMergeRequest(comments,
pkgInfo.filter(p => p.name).length === 1 ? [author, ...pkgInfo.find(p => p.name)!.owners] : [author],
max([createdDate, reopenedDate, lastPushDate]));
const lastActivityDate = max([createdDate, lastPushDate, lastCommentDate, blessing?.date, reopenedDate, latestReview]);
const mainBotCommentID = getMainCommentID(comments);
return {
type: "info",
now,
pr_number: prInfo.number,
author,
headCommitOid: prInfo.headRefOid,
mergeBaseOid: baseId, // not needed, kept for debugging
lastPushDate, lastActivityDate,
maintainerBlessed: blessing?.column,
mergeOfferDate, mergeRequestDate: mergeRequest?.date, mergeRequestUser: mergeRequest?.user,
hasMergeConflict: prInfo.mergeable === "CONFLICTING",
isFirstContribution,
tooManyFiles,
hugeChange,
popularityLevel,
pkgInfo,
reviews,
mainBotCommentID,
...getCIResult(headCommit.checkSuites),
};
function botError(message: string): BotError {
return { type: "error", now, message, author: prInfo.author?.login };
}
function botEnsureRemovedFromProject(message: string): BotEnsureRemovedFromProject {
return { type: "remove", now, message, isDraft: prInfo.isDraft };
}
}
/** Either: when the PR was last opened, or switched to ready from draft */
function getReopenedDate(timelineItems: PR_repository_pullRequest_timelineItems) {
return someLast(timelineItems.nodes, item => (
(item.__typename === "ReopenedEvent" || item.__typename === "ReadyForReviewEvent")
&& new Date(item.createdAt)))
|| undefined;
}
function getMainCommentID(comments: PR_repository_pullRequest_comments_nodes[]) {
const comment = comments.find(c => !authorNotBot(c) && c.body.includes("<!--typescript_bot_welcome-->"));
if (!comment) return undefined;
return comment.databaseId!;
}
function getLastCommentishActivityDate(prInfo: PR_repository_pullRequest) {
const getCommentDate = (comment: { createdAt: string }) => new Date(comment.createdAt);
const latestIssueCommentDate = noNullish(prInfo.comments.nodes)
.filter(authorNotBot).map(getCommentDate);
const latestReviewCommentDate = noNullish(prInfo.reviews?.nodes)
.map(review => max(noNullish(review.comments.nodes).map(getCommentDate)));
return max([...latestIssueCommentDate, ...latestReviewCommentDate]);
}
function getLastMaintainerBlessing(after: Date, timelineItems: PR_repository_pullRequest_timelineItems) {
return someLast(timelineItems.nodes, item => {
if (!(item.__typename === "MovedColumnsInProjectEvent" && authorNotBot(item))) return undefined;
const d = new Date(item.createdAt);
if (d <= after) return undefined;
return { date: d, column: item.projectColumnName as ColumnName };
}) || undefined;
}
async function getPackageInfosEtc(
paths: string[], headId: string, baseId: string, fetchFile: typeof defaultFetchFile, getDownloads: typeof getMonthlyDownloadCount
): Promise<{pkgInfo: PackageInfo[], popularityLevel: PopularityLevel} | Error> {
const infos = new Map<string|null, FileInfo[]>();
for (const path of paths) {
const [pkg, fileInfo] = await categorizeFile(path, headId, baseId, fetchFile);
if (!infos.has(pkg)) infos.set(pkg, []);
infos.get(pkg)!.push(fileInfo);
}
const result: PackageInfo[] = [];
let maxDownloads = 0;
for (const [name, files] of infos) {
const oldOwners = !name ? null : await getOwnersOfPackage(name, baseId, fetchFile);
if (oldOwners instanceof Error) return oldOwners;
const newOwners0 = !name ? null : await getOwnersOfPackage(name, headId, fetchFile);
// A header error is still an add/edit whereas a missing file is
// delete, hence newOwners0 here
const kind = !name ? "edit" : !oldOwners ? "add" : !newOwners0 ? "delete" : "edit";
// treats a header error as a missing file, the CI will fail anyway
// (maybe add a way to pass the error in the info so people don't need to read the CI?)
const newOwners = newOwners0 instanceof Error ? null : newOwners0;
const owners = oldOwners || [];
const addedOwners = newOwners === null ? []
: oldOwners === null ? newOwners
: newOwners.filter(o => !oldOwners.includes(o));
const deletedOwners = oldOwners === null ? []
: newOwners === null ? []
: oldOwners.filter(o => !newOwners.includes(o));
// null name => infra => ensure critical (even though it's unused atm)
const downloads = name ? await getDownloads(name) : Infinity;
if (name && downloads > maxDownloads) maxDownloads = downloads;
// keep the popularity level and not the downloads since that can change often
const popularityLevel = downloadsToPopularityLevel(downloads);
const isSafeInfrastructureEdit = name === null
? kind === "edit" && files.length === 1 && files[0]?.path === "attw.json" && await isAllowedAttwEdit(headId, baseId, fetchFile)
: undefined;
result.push({ name, kind, files, owners, addedOwners, deletedOwners, popularityLevel, isSafeInfrastructureEdit });
}
return { pkgInfo: result, popularityLevel: downloadsToPopularityLevel(maxDownloads) };
}
async function categorizeFile(path: string, newId: string, oldId: string,
fetchFile: typeof defaultFetchFile): Promise<[string|null, FileInfo]> {
const pkg = /^types\/(.*?)\/.*$/.exec(path)?.[1];
if (!pkg) return [null, { path, kind: "infrastructure" }];
if (isDeclarationPath(path)) return [pkg, { path, kind: "definition" }];
if (/\.(?:[cm]?ts|tsx)$/.test(path)) return [pkg, { path, kind: "test" }];
if (path.endsWith(".md")) return [pkg, { path, kind: "markdown" }];
const contentGetter = (oid: string) => async () => fetchFile(`${oid}:${path}`);
const suspect = await configSuspicious(path, contentGetter(newId), contentGetter(oldId));
return [pkg, { path, kind: suspect ? "package-meta" : "package-meta-ok", suspect }];
}
async function isAllowedAttwEdit(headId: string, baseId: string, fetchFile: typeof defaultFetchFile): Promise<boolean> {
try {
const newAttwJson = JSON.parse((await fetchFile(`${headId}:attw.json`))!);
const oldAttwJson = JSON.parse((await fetchFile(`${baseId}:attw.json`))!);
const { failingPackages: newFailing, ...newAttw } = newAttwJson;
const { failingPackages: oldFailing, ...oldAttw } = oldAttwJson;
if (!isDeepStrictEqual(newAttw, oldAttw)) return false;
return newFailing.length < oldFailing.length && newFailing.every((p: string) => oldFailing.includes(p));
} catch {
return false;
}
}
interface ConfigSuspicious {
(path: string, getNew: () => Promise<string | undefined>, getOld: () => Promise<string | undefined>): Promise<string | undefined>;
[basename: string]: (text: string, oldText?: string) => string | undefined;
}
const configSuspicious = <ConfigSuspicious>(async (path, newContents, oldContents) => {
let basename = path.replace(/.*\//, "");
if (basename.startsWith("tsconfig.") && basename.endsWith(".json")) {
basename = "tsconfig.json";
}
const checker = configSuspicious[basename];
if (!checker) return `edited`;
const text = await newContents();
// Removing tslint.json, tsconfig.json, package.json and
// OTHER_FILES.txt is checked by the CI. Specifics are in my commit
// message.
if (text === undefined) return undefined;
const oldText = await oldContents();
return checker(text, oldText);
});
configSuspicious["package.json"] = () => undefined;
configSuspicious[".npmignore"] = () => undefined;
configSuspicious["tsconfig.json"] = makeChecker(
{
compilerOptions: {
lib: ["es6"],
noImplicitAny: true,
noImplicitThis: true,
strictFunctionTypes: true,
strictNullChecks: true,
types: [],
noEmit: true,
forceConsistentCasingInFileNames: true,
}
},
urls.tsconfigJson,
{ ignore: data => {
if (Array.isArray(data.compilerOptions?.lib)) {
data.compilerOptions.lib = data.compilerOptions.lib.filter((value: unknown) =>
!(typeof value === "string" && value.toLowerCase() === "dom"));
}
["baseUrl", "typeRoots", "paths", "jsx", "module"].forEach(k => delete data.compilerOptions[k]);
if (typeof data.compilerOptions?.target === "string" && data.compilerOptions.target.toLowerCase() === "es6") {
delete data.compilerOptions.target;
}
delete data.files;
} }
);
// helper for file checkers: allow either a given "expectedForm", or any edits that get closer
// to it, ignoring some keys. The ignored properties are in most cases checked
// elsewhere (dtslint), and in some cases they are irrelevant.
function makeChecker(expectedForm: any, expectedFormUrl: string, options?: { parse: (text: string) => unknown } | { ignore: (data: any) => void }) {
const diffFromExpected = (text: string) => {
let data: any;
if (options && "parse" in options) {
data = options.parse(text);
} else {
try { data = JSON.parse(text); } catch (e) { return "couldn't parse json"; }
}
if (options && "ignore" in options) options.ignore(data);
try { return jsonDiff.compare(expectedForm, data); } catch (e) { return "couldn't diff json"; }
};
return (contents: string, oldText?: string) => {
const theExpectedForm = `[the expected form](${expectedFormUrl})`;
const newDiff = diffFromExpected(contents);
if (typeof newDiff === "string") return newDiff;
if (newDiff.length === 0) return undefined;
const diffDescription = newDiff.every(d => /^\/[0-9]+($|\/)/.test(d.path)) ? ""
: ` (check: ${newDiff.map(d => `\`${d.path.slice(1).replace(/\//g, ".")}\``).join(", ")})`;
if (!oldText) return `not ${theExpectedForm}${diffDescription}`;
const oldDiff = diffFromExpected(oldText);
if (typeof oldDiff === "string") return oldDiff;
if (jsonDiff.compare(oldDiff, newDiff).every(({ op }) => op === "remove")) return undefined;
return `not ${theExpectedForm} and not moving towards it${diffDescription}`;
};
}
function latestComment(comments: PR_repository_pullRequest_comments_nodes[]) {
return max(comments, (r, c) => Date.parse(r.createdAt) - Date.parse(c.createdAt));
}
function getMergeOfferDate(comments: PR_repository_pullRequest_comments_nodes[], headOid: string) {
const offer = latestComment(comments.filter(c =>
sameUser("typescript-bot", c.author?.login || "-")
&& comment.parse(c.body)?.tag === "merge-offer"
&& c.body.includes(`(at ${abbrOid(headOid)})`)));
return offer && new Date(offer.createdAt);
}
function getMergeRequest(comments: PR_repository_pullRequest_comments_nodes[], users: string[], sinceDate: Date) {
const request = latestComment(comments.filter(comment =>
users.some(u => comment.author && sameUser(u, comment.author.login))
&& comment.body.split("\n").some(line => line.trim().toLowerCase().startsWith("ready to merge"))));
if (!request) return request;
const date = new Date(request.createdAt);
return date > sinceDate ? { date, user: request.author!.login } : undefined;
}
function getReviews(prInfo: PR_repository_pullRequest) {
if (!prInfo.reviews?.nodes) return [];
const headCommitOid: string = prInfo.headRefOid;
const reviews: ReviewInfo[] = [];
// Do this in reverse order so we can detect up-to-date-reviews correctly
for (const r of noNullish(prInfo.reviews.nodes).reverse()) {
const [reviewer, date] = [r.author?.login, new Date(r.submittedAt)];
// Skip nulls
if (!(r.commit && reviewer)) continue;
// Skip self-reviews
if (reviewer === prInfo.author!.login) continue;
// Only look at the most recent review per person (ignoring pending/commented)
if (reviews.some(r => sameUser(r.reviewer, reviewer))) continue;
// collect reviews by type
if (r.commit.oid !== headCommitOid) {
reviews.push({ type: "stale", reviewer, date, abbrOid: abbrOid(r.commit.oid) });
continue;
}
if (r.state === "CHANGES_REQUESTED") {
reviews.push({ type: "changereq", reviewer, date });
continue;
}
if (r.state !== "APPROVED") continue;
const isMaintainer =
(r.authorAssociation === "MEMBER")
|| (r.authorAssociation === "OWNER");
reviews.push({ type: "approved", reviewer, date, isMaintainer });
}
return reviews;
}
function getCIResult(checkSuites: PR_repository_pullRequest_commits_nodes_commit_checkSuites | null): { ciResult: CIResult, ciUrl?: string, reRunCheckSuiteIDs?: number[] } {
const ghActionsChecks = checkSuites?.nodes?.filter(check => check?.app?.name.includes("GitHub Actions"));
// Freakin' crypto miners ruined GitHub Actions, and now we need to manually confirm new folks can run CI
const actionRequiredIDs = noNullish(ghActionsChecks?.map(check =>
check?.conclusion === "ACTION_REQUIRED" ? check.databaseId : null));
if (actionRequiredIDs.length > 0)
return { ciResult: "action_required", reRunCheckSuiteIDs: actionRequiredIDs };
const latestChecks = [];
const checksByWorkflowPath = new Map<string, PR_repository_pullRequest_commits_nodes_commit_checkSuites_nodes>();
// Attempt to use only the latest run for a given workflow on a given commit.
// This may still be wrong if we _remove_ a workflow, but it's better than always
// taking the first one.
for (const check of ghActionsChecks || []) {
if (!check) {
continue;
}
const workflowPath = check.workflowRun?.file?.path;
if (!workflowPath) {
latestChecks.push(check);
continue;
}
const existingCheck = checksByWorkflowPath.get(workflowPath);
// createdAt is an ISO8601 string, so we can safely just compare.
if (!existingCheck || existingCheck.createdAt < check.createdAt) {
checksByWorkflowPath.set(workflowPath, check);
}
}
latestChecks.push(...checksByWorkflowPath.values());
if (latestChecks.length === 0) {
return { ciResult: "missing", ciUrl: undefined };
}
for (const check of latestChecks) {
switch (check.conclusion) {
case "SUCCESS":
continue;
case "FAILURE":
case "SKIPPED":
case "TIMED_OUT":
return { ciResult: "fail", ciUrl: check.url };
default:
return { ciResult: "unknown" };
}
}
return { ciResult: "pass" };
}
function downloadsToPopularityLevel(monthlyDownloads: number): PopularityLevel {
return monthlyDownloads > CriticalPopularityThreshold ? "Critical"
: monthlyDownloads > NormalPopularityThreshold ? "Popular"
: "Well-liked by everyone";
}
export async function getOwnersOfPackage(packageName: string, oid: string, fetchFile: typeof defaultFetchFile): Promise<string[] | null | Error> {
const packageJson = `${oid}:types/${packageName}/package.json`;
const packageJsonContent = await fetchFile(packageJson, 10240); // grab at most 10k
let packageJsonObj;
if (packageJsonContent !== undefined) {
try {
packageJsonObj = JSON.parse(packageJsonContent);
} catch (e) {
if (e instanceof Error) return new Error(`error parsing owners from package.json: ${e.message}`);
}
}
if (!packageJsonObj || !(packageJsonObj.name && packageJsonObj.version && packageJsonObj.owners)) {
// If we see that we're not in a post-pnpm world, try to get the owners from the index.d.ts.
const indexDts = `${oid}:types/${packageName}/index.d.ts`;
const indexDtsContent = await fetchFile(indexDts, 10240); // grab at most 10k
if (indexDtsContent === undefined) return null;
let parsed: OldHeaderParser.Header;
try {
parsed = OldHeaderParser.parseHeaderOrFail(indexDts, indexDtsContent);
} catch (e) {
if (e instanceof Error) return new Error(`error parsing owners: ${e.message}`);
}
return noNullish(parsed!.contributors.map(c => c.githubUsername));
}
return noNullish(packageJsonObj.owners?.map((c: any) => c?.githubUsername));
}