-
Notifications
You must be signed in to change notification settings - Fork 0
/
bench.js
187 lines (163 loc) · 6.65 KB
/
bench.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
import * as process from "process";
import * as path from "path";
import * as os from "os";
import { createReadStream, existsSync } from "node:fs";
import * as readline from "node:readline";
import { mkdtemp, readFile, writeFile } from "node:fs/promises";
import { pipeline } from "node:stream/promises";
import esDirname from "es-dirname";
import { sql } from "slonik";
import getLogger from "pino";
import autocannon from "autocannon";
import { default as printResult } from "autocannon/lib/printResult.js";
import isURL from "validator/lib/isURL.js";
import esMain from "es-main";
import { generateUsers } from "./generate/users.js";
import { generatePosts } from "./generate/posts.js";
import { buildAPI } from "./api.js";
const DEFAULT_TEST_RECORD_SEEN_BY_COUNT = 1000;
const DEFAULT_TEST_GET_SEEN_BY_COUNT = 2000;
const DEFAULT_TEST_WORKER_COUNT = 2;
const DEFAULT_TEST_TARGET_BASE_URL = "http://localhost:5001";
const DEFAULT_TEST_SERVER_PORT = 5001;
const DEFAULT_TEST_DURATION_SECONDS = 30;
export default async function runBenchmark() {
const logger = getLogger();
// Ensure Target Base URL is a valid base url
let targetBaseURL = process.env.TEST_TARGET_BASE_URL ?? DEFAULT_TEST_TARGET_BASE_URL;
if (!targetBaseURL || !isURL(targetBaseURL, { require_tld: false })) {
throw new Error(`Missing/invalid target base URL [${targetBaseURL}`);
}
const targetbaseURL = new URL(targetBaseURL);
// Start the server
const api = await buildAPI();
logger.debug("starting API server...");
api.listen({ port: new URL(targetBaseURL).port });
// Temporary directory which is used to for generation
const tmpdir = await mkdtemp(path.join(os.tmpdir(), "supabase-seen-by-"));
// Clear out existing tables ahead of time
// TODO: more efficient reset -- posts/users don't need to be reset if already loaded
await api.db.query(sql`TRUNCATE posts, users, posts_seen_by_users;`);
///////////////////////////
// Generate & load users //
///////////////////////////
// Generate users, if necessary
// FUTURE: reduce to PG-only w/ generate_series
let users;
let usersJSONPath = process.env.TEST_USERS_JSON_PATH;
if (!usersJSONPath || !existsSync(usersJSONPath)) {
if (!usersJSONPath) { usersJSONPath = path.join(tmpdir, "users.json"); }
logger.info(`TEST_USERS_JSON_PATH not specified or empty, generating user seed list @ [${usersJSONPath}]`);
await generateUsers({
count: process.env.TEST_USER_COUNT ? parseInt(process.env.TEST_USER_COUNT, 10) : undefined,
outputFilePath: usersJSONPath,
});
}
// Load all user JSON (Ideally we'd use COPY here but slonik's copyFromBinary doesn't seem to work...)
// @ 100k users this takes ~5m 30s
logger.info(`inserting users from JSON file @ [${usersJSONPath}]`);
const userJSONFileStream = createReadStream(usersJSONPath);
const userJSONLines = readline.createInterface({
input: userJSONFileStream,
crlfDelay: Infinity
});
for await (const line of userJSONLines) {
try {
const user = JSON.parse(line);
await api.db.query(
sql`INSERT INTO users (id, email, name, about_html) VALUES (${user.id}, ${user.email}, ${user.name}, ${user.about_html})`,
);
} catch (err) {
logger.error({ err, data: { line } }, "failed to parse line");
}
}
logger.info(`finished inserting users from JSON file @ [${usersJSONPath}]`);
///////////////////////////
// Generate & load posts //
///////////////////////////
// Generate posts, if necessary
// FUTURE: reduce to PG-only w/ generate_series
let posts;
let postsJSONPath = process.env.TEST_POSTS_JSON_PATH;
if (!postsJSONPath || !existsSync(postsJSONPath)) {
if (!postsJSONPath) { postsJSONPath = path.join(tmpdir, "posts.json"); }
logger.info(`TEST_POSTS_JSON_PATH not specified or empty, generating post seed list @ [${postsJSONPath}]`);
await generatePosts({
userCount: process.env.TEST_USER_COUNT ? parseInt(process.env.TEST_USER_COUNT, 10) : undefined,
postCount: process.env.TEST_POST_COUNT ? parseInt(process.env.TEST_POST_COUNT, 10) : undefined,
outputFilePath: postsJSONPath,
});
}
// Load all post JSON (Ideally we'd use COPY here but slonik's copyFromBinary doesn't seem to work...)
// @ 100k posts this takes ~5m 30s
logger.info(`inserting posts from JSON file @ [${postsJSONPath}]`);
const postJSONFileStream = createReadStream(postsJSONPath);
const postJSONLines = readline.createInterface({
input: postJSONFileStream,
crlfDelay: Infinity
});
for await (const line of postJSONLines) {
try {
const post = JSON.parse(line);
await api.db.query(sql`
INSERT INTO posts
(id, title, content, main_image_src, main_link_src, created_by)
VALUES
(${post.id}, ${post.title}, ${post.content}, ${post.main_image_src}, ${post.main_link_src}, ${post.created_by})
`);
} catch (err) {
logger.error({ err, data: { line } }, "failed to parse line");
}
}
logger.info(`finished inserting posts from JSON file @ [${postsJSONPath}]`);
//////////////
// Run Test //
//////////////
// Parse test configuration
const duration = parseInt(
process.env.TEST_DURATION_SECONDS ?? `${DEFAULT_TEST_DURATION_SECONDS}`,
10,
);
const currentDir = path.resolve(await esDirname());
const setupRequestScriptPath = path.join(currentDir, "setup-request.cjs");
logger.info(`Using setup request script @ [${setupRequestScriptPath}]`);
let workers;
if (process.env.TEST_WORKER_COUNT) {
workers = parseInt(process.env.TEST_WORKER_COUNT ?? `${DEFAULT_TEST_WORKER_COUNT}`, 10);
}
// Execute the API calls with autocannon
logger.info(`starting autocannon executions against [${targetBaseURL}]...`);
const results = await autocannon({
workers,
url: targetBaseURL,
bailout: 3,
duration,
connections: 1,
requests: [
{
// If workers are being used, setup should be a path to require-able file,
// not the function actually contained therein.
// data sharing (ex. userCount) is done via ENV
setupRequest: workers ? setupRequestScriptPath : (await import(setupRequestScriptPath)).default,
},
],
});
// Write JSON results to tmpdir
const resultsOutputPath = path.join(tmpdir, "results.json");
await writeFile(resultsOutputPath, JSON.stringify(results, null, 2));
logger.info(`JSON stats output @ [${resultsOutputPath}]`);
// Pretty print the results
const prettyPrintedResults = printResult(
results,
{
outputStream: process.stdout,
renderResultsTable: true,
renderLatencyTable: true,
},
);
console.log(prettyPrintedResults);
// Stop the server
logger.debug("stopping API server...");
await api.close();
}
if (esMain(import.meta)) { runBenchmark(); }