Skip to content

Commit

Permalink
fix(plugin-system): Resolved various linting issues with workspace
Browse files Browse the repository at this point in the history
  • Loading branch information
sullivanpj committed Jan 29, 2024
1 parent 0c85d8a commit 71d1541
Show file tree
Hide file tree
Showing 14 changed files with 281 additions and 324 deletions.
350 changes: 175 additions & 175 deletions packages/cli/src/program/error-report.ts
Original file line number Diff line number Diff line change
@@ -1,175 +1,175 @@
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
import archiver from "archiver";
import * as checkpoint from "checkpoint-client";
import globby from "globby";
import stripAnsi from "strip-ansi";
import tmp from "tmp";
import { P, match } from "ts-pattern";

import {
type CreateErrorReportInput,
createErrorReport,
makeErrorReportCompleted,
uploadZip
} from "./errorReporting";
import type { MigrateTypes } from "./migrateTypes";
import type { RustPanic } from "./panic";
import { ErrorArea } from "./panic";
import { mapScalarValues, maskSchema } from "./utils/maskSchema";

// cleanup the temporary files even when an uncaught exception occurs
tmp.setGracefulCleanup();

type SendPanic = {
error: RustPanic;
cliVersion: string;
enginesVersion: string;

// retrieve the database version for the given schema or url, without throwing any error
getDatabaseVersionSafe: (
args: MigrateTypes.GetDatabaseVersionParams
) => Promise<string | undefined>;
};
export async function sendPanic({
error,
cliVersion,
enginesVersion,
getDatabaseVersionSafe
}: SendPanic): Promise<number> {
const schema: string | undefined = match(error)
.with({ schemaPath: P.not(P.nullish) }, (err) => {
return fs.readFileSync(err.schemaPath, "utf-8");
})
.with({ schema: P.not(P.nullish) }, (err) => err.schema)
.otherwise(() => undefined);

const maskedSchema: string | undefined = schema ? maskSchema(schema) : undefined;

let dbVersion: string | undefined;
if (error.area === ErrorArea.LIFT_CLI) {
// For a SQLite datasource like `url = "file:dev.db"` only schema will be defined
const getDatabaseVersionParams: MigrateTypes.GetDatabaseVersionParams | undefined = match({
schema,
introspectionUrl: error.introspectionUrl
})
.with({ schema: P.not(undefined) }, ({ schema }) => {
return {
datasource: {
tag: "SchemaString",
schema
}
} as const;
})
.with({ introspectionUrl: P.not(undefined) }, ({ introspectionUrl }) => {
return {
datasource: {
tag: "ConnectionString",
url: introspectionUrl
}
} as const;
})
.otherwise(() => undefined);

dbVersion = await getDatabaseVersionSafe(getDatabaseVersionParams);
}

const migrateRequest = error.request
? JSON.stringify(
mapScalarValues(error.request, (value) => {
if (typeof value === "string") {
return maskSchema(value);
}
return value;
})
)
: undefined;

const params: CreateErrorReportInput = {
area: error.area,
kind: "PANIC",
cliVersion,
binaryVersion: enginesVersion,
command: process.argv.slice(2).join(" "),
jsStackTrace: stripAnsi(error.stack || error.message),
rustStackTrace: error.rustStack,
operatingSystem: `${os.arch()} ${os.platform()} ${os.release()}`,
platform: process.platform,
liftRequest: migrateRequest,
schemaFile: maskedSchema,
fingerprint: await checkpoint.getSignature(),
sqlDump: undefined,
dbVersion: dbVersion
};

// Get an AWS S3 signed URL from the server, so we can upload a zip file
const signedUrl = await createErrorReport(params);

// Create & upload the zip file
// only log if something fails
try {
if (error.schemaPath) {
const zip = await makeErrorZip(error);
await uploadZip(zip, signedUrl);
}
} catch (zipUploadError) {
console.error(`Error uploading zip file: ${zipUploadError.message}`);
}

// Mark the error report as completed
const id = await makeErrorReportCompleted(signedUrl);

return id;
}

async function makeErrorZip(error: RustPanic): Promise<Buffer> {
if (!error.schemaPath) {
throw new Error(`Can't make zip without schema path`);
}
const schemaDir = path.dirname(error.schemaPath);
const tmpFileObj = tmp.fileSync();
const outputFile = fs.createWriteStream(tmpFileObj.name);
const zip = archiver("zip", { zlib: { level: 9 } });

zip.pipe(outputFile);

// add schema file
// Note: the following reads `error.schemaPath` for the second time, we could just re-use
// `maskedSchema` from the `sendPanic` function's scope.
const schemaFile = maskSchema(fs.readFileSync(error.schemaPath, "utf-8"));
zip.append(schemaFile, { name: path.basename(error.schemaPath) });

if (fs.existsSync(schemaDir)) {
const filePaths = await globby("migrations/**/*", {
// globby doesn't have it in its types but it's part of mrmlnc/fast-glob
// @ts-ignore
cwd: schemaDir
});

for (const filePath of filePaths) {
let file = fs.readFileSync(path.resolve(schemaDir, filePath), "utf-8");
if (
filePath.endsWith("schema.prisma") ||
filePath.endsWith(path.basename(error.schemaPath))
) {
// Remove credentials from schema datasource url
file = maskSchema(file);
}
zip.append(file, { name: path.basename(filePath) });
}
}

zip.finalize();

return new Promise((resolve, reject) => {
outputFile.on("close", () => {
const buffer = fs.readFileSync(tmpFileObj.name);
resolve(buffer);
});

zip.on("error", (err) => {
reject(err);
});
});
}
// import fs from "node:fs";
// import os from "node:os";
// import path from "node:path";
// import archiver from "archiver";
// import * as checkpoint from "checkpoint-client";
// import globby from "globby";
// import stripAnsi from "strip-ansi";
// import tmp from "tmp";
// import { P, match } from "ts-pattern";

// import {
// type CreateErrorReportInput,
// createErrorReport,
// makeErrorReportCompleted,
// uploadZip
// } from "./errorReporting";
// import type { MigrateTypes } from "./migrateTypes";
// import type { RustPanic } from "./panic";
// import { ErrorArea } from "./panic";
// import { mapScalarValues, maskSchema } from "./utils/maskSchema";

// // cleanup the temporary files even when an uncaught exception occurs
// tmp.setGracefulCleanup();

// type SendPanic = {
// error: RustPanic;
// cliVersion: string;
// enginesVersion: string;

// // retrieve the database version for the given schema or url, without throwing any error
// getDatabaseVersionSafe: (
// args: MigrateTypes.GetDatabaseVersionParams
// ) => Promise<string | undefined>;
// };
// export async function sendPanic({
// error,
// cliVersion,
// enginesVersion,
// getDatabaseVersionSafe
// }: SendPanic): Promise<number> {
// const schema: string | undefined = match(error)
// .with({ schemaPath: P.not(P.nullish) }, (err) => {
// return fs.readFileSync(err.schemaPath, "utf-8");
// })
// .with({ schema: P.not(P.nullish) }, (err) => err.schema)
// .otherwise(() => undefined);

// const maskedSchema: string | undefined = schema ? maskSchema(schema) : undefined;

// let dbVersion: string | undefined;
// if (error.area === ErrorArea.LIFT_CLI) {
// // For a SQLite datasource like `url = "file:dev.db"` only schema will be defined
// const getDatabaseVersionParams: MigrateTypes.GetDatabaseVersionParams | undefined = match({
// schema,
// introspectionUrl: error.introspectionUrl
// })
// .with({ schema: P.not(undefined) }, ({ schema }) => {
// return {
// datasource: {
// tag: "SchemaString",
// schema
// }
// } as const;
// })
// .with({ introspectionUrl: P.not(undefined) }, ({ introspectionUrl }) => {
// return {
// datasource: {
// tag: "ConnectionString",
// url: introspectionUrl
// }
// } as const;
// })
// .otherwise(() => undefined);

// dbVersion = await getDatabaseVersionSafe(getDatabaseVersionParams);
// }

// const migrateRequest = error.request
// ? JSON.stringify(
// mapScalarValues(error.request, (value) => {
// if (typeof value === "string") {
// return maskSchema(value);
// }
// return value;
// })
// )
// : undefined;

// const params: CreateErrorReportInput = {
// area: error.area,
// kind: "PANIC",
// cliVersion,
// binaryVersion: enginesVersion,
// command: process.argv.slice(2).join(" "),
// jsStackTrace: stripAnsi(error.stack || error.message),
// rustStackTrace: error.rustStack,
// operatingSystem: `${os.arch()} ${os.platform()} ${os.release()}`,
// platform: process.platform,
// liftRequest: migrateRequest,
// schemaFile: maskedSchema,
// fingerprint: await checkpoint.getSignature(),
// sqlDump: undefined,
// dbVersion: dbVersion
// };

// // Get an AWS S3 signed URL from the server, so we can upload a zip file
// const signedUrl = await createErrorReport(params);

// // Create & upload the zip file
// // only log if something fails
// try {
// if (error.schemaPath) {
// const zip = await makeErrorZip(error);
// await uploadZip(zip, signedUrl);
// }
// } catch (zipUploadError) {
// console.error(`Error uploading zip file: ${zipUploadError.message}`);
// }

// // Mark the error report as completed
// const id = await makeErrorReportCompleted(signedUrl);

// return id;
// }

// async function makeErrorZip(error: RustPanic): Promise<Buffer> {
// if (!error.schemaPath) {
// throw new Error(`Can't make zip without schema path`);
// }
// const schemaDir = path.dirname(error.schemaPath);
// const tmpFileObj = tmp.fileSync();
// const outputFile = fs.createWriteStream(tmpFileObj.name);
// const zip = archiver("zip", { zlib: { level: 9 } });

// zip.pipe(outputFile);

// // add schema file
// // Note: the following reads `error.schemaPath` for the second time, we could just re-use
// // `maskedSchema` from the `sendPanic` function's scope.
// const schemaFile = maskSchema(fs.readFileSync(error.schemaPath, "utf-8"));
// zip.append(schemaFile, { name: path.basename(error.schemaPath) });

// if (fs.existsSync(schemaDir)) {
// const filePaths = await globby("migrations/**/*", {
// // globby doesn't have it in its types but it's part of mrmlnc/fast-glob
// // @ts-ignore
// cwd: schemaDir
// });

// for (const filePath of filePaths) {
// let file = fs.readFileSync(path.resolve(schemaDir, filePath), "utf-8");
// if (
// filePath.endsWith("schema.prisma") ||
// filePath.endsWith(path.basename(error.schemaPath))
// ) {
// // Remove credentials from schema datasource url
// file = maskSchema(file);
// }
// zip.append(file, { name: path.basename(filePath) });
// }
// }

// zip.finalize();

// return new Promise((resolve, reject) => {
// outputFile.on("close", () => {
// const buffer = fs.readFileSync(tmpFileObj.name);
// resolve(buffer);
// });

// zip.on("error", (err) => {
// reject(err);
// });
// });
// }

0 comments on commit 71d1541

Please sign in to comment.