Skip to content

Commit

Permalink
perf: a number of performance optimisations (#458)
Browse files Browse the repository at this point in the history
* Precompute for performance

* Use prepared statements

* Cache context

* More efficent 'beforeLocks'

* Fast loop

* Debug branching

* Cache hOP

* Marginally faster checks

* Faster loops

* Memo of size one

* FieldNode always has a name

* Make prepared statement cache configurable

* graphql-parse-resolve-info turbo

* One fewer closure

* Upgrade pg-sql2

* Avoid ||=

* Turbo entry point
  • Loading branch information
benjie committed May 7, 2019
1 parent 400dc75 commit 5bca05b
Show file tree
Hide file tree
Showing 20 changed files with 337 additions and 158 deletions.
1 change: 1 addition & 0 deletions .eslintignore
@@ -1,5 +1,6 @@
node_modules
node7minus
node8plus
build-turbo
examples
dist
51 changes: 30 additions & 21 deletions babel.config.js
@@ -1,23 +1,32 @@
module.exports = {
"plugins": ["@babel/plugin-transform-modules-commonjs", "@babel/plugin-syntax-object-rest-spread"],
"presets": [
["@babel/env", {
"targets": {
"node": "8.6"
}
}],
"@babel/flow"
plugins: [
"@babel/plugin-transform-modules-commonjs",
"@babel/plugin-syntax-object-rest-spread",
],
"env": {
"test": {
"presets": [
["@babel/env", {
"targets": {
"node": "current"
}
}],
"@babel/flow"
]
}
}
}
presets: [
[
"@babel/env",
{
targets: {
node: "8.6",
},
},
],
"@babel/flow",
],
env: {
test: {
presets: [
[
"@babel/env",
{
targets: {
node: "current",
},
},
],
"@babel/flow",
],
},
},
};
2 changes: 1 addition & 1 deletion packages/graphile-build-pg/package.json
Expand Up @@ -44,7 +44,7 @@
"jsonwebtoken": "^8.5.1",
"lodash": ">=4 <5",
"lru-cache": ">=4 <5",
"pg-sql2": "2.2.2",
"pg-sql2": "^2.2.3",
"postgres-interval": "^1.2.0"
},
"peerDependencies": {
Expand Down
39 changes: 18 additions & 21 deletions packages/graphile-build-pg/src/QueryBuilder.js
Expand Up @@ -100,6 +100,9 @@ class QueryBuilder {
last: ?number,
cursorComparator: ?CursorComparator,
};
lockContext: {
queryBuilder: QueryBuilder,
};

constructor(
options: QueryBuilderOptions = {},
Expand Down Expand Up @@ -182,6 +185,9 @@ class QueryBuilder {
this.lock("limit");
this.lock("offset");
});
this.lockContext = Object.freeze({
queryBuilder: this,
});
}

// ----------------------------------------
Expand Down Expand Up @@ -702,14 +708,13 @@ class QueryBuilder {
}
lock(type: string) {
if (this.locks[type]) return;
const getContext = () => ({
queryBuilder: this,
});
const beforeLocks = this.data.beforeLock[type];
if (beforeLocks && beforeLocks.length) {
this.data.beforeLock[type] = null;
for (const fn of beforeLocks) {
fn();
const context = this.lockContext;
const { beforeLock } = this.data;
let locks = beforeLock[type];
if (locks) {
beforeLock[type] = [];
for (let i = 0, l = locks.length; i < l; i++) {
locks[i]();
}
}
if (type !== "select") {
Expand All @@ -720,7 +725,6 @@ class QueryBuilder {
this.compiledData[type] = this.data[type];
} else if (type === "whereBound") {
// Handle properties separately
const context = getContext();
this.compiledData[type].lower = callIfNecessaryArray(
this.data[type].lower,
context
Expand All @@ -739,7 +743,6 @@ class QueryBuilder {
// Assume that duplicate fields must be identical, don't output the same
// key multiple times
const seenFields = {};
const context = getContext();
const data = [];
const selects = this.data[type];

Expand All @@ -751,19 +754,18 @@ class QueryBuilder {
// $FlowFixMe
seenFields[columnName] = true;
data.push([callIfNecessary(valueOrGenerator, context), columnName]);
const newBeforeLocks = this.data.beforeLock[type];
if (newBeforeLocks && newBeforeLocks.length) {
this.data.beforeLock[type] = null;
for (const fn of newBeforeLocks) {
fn();
locks = beforeLock[type];
if (locks) {
beforeLock[type] = [];
for (let i = 0, l = locks.length; i < l; i++) {
locks[i]();
}
}
}
}
this.locks[type] = isDev ? new Error("Initally locked here").stack : true;
this.compiledData[type] = data;
} else if (type === "orderBy") {
const context = getContext();
this.compiledData[type] = this.data[type].map(([a, b, c]) => [
callIfNecessary(a, context),
b,
Expand All @@ -772,24 +774,19 @@ class QueryBuilder {
} else if (type === "from") {
if (this.data.from) {
const f = this.data.from;
const context = getContext();
this.compiledData.from = [callIfNecessary(f[0], context), f[1]];
}
} else if (type === "join" || type === "where") {
const context = getContext();
this.compiledData[type] = callIfNecessaryArray(this.data[type], context);
} else if (type === "selectCursor") {
const context = getContext();
this.compiledData[type] = callIfNecessary(this.data[type], context);
} else if (type === "cursorPrefix") {
this.compiledData[type] = this.data[type];
} else if (type === "orderIsUnique") {
this.compiledData[type] = this.data[type];
} else if (type === "limit") {
const context = getContext();
this.compiledData[type] = callIfNecessary(this.data[type], context);
} else if (type === "offset") {
const context = getContext();
this.compiledData[type] = callIfNecessary(this.data[type], context);
} else if (type === "first") {
this.compiledData[type] = this.data[type];
Expand Down
65 changes: 65 additions & 0 deletions packages/graphile-build-pg/src/pgPrepareAndRun.js
@@ -0,0 +1,65 @@
//@flow
import { createHash } from "crypto";
import LRU from "lru-cache";
import type { PoolClient } from "pg";

const POSTGRAPHILE_PREPARED_STATEMENT_CACHE_SIZE =
parseInt(process.env.POSTGRAPHILE_PREPARED_STATEMENT_CACHE_SIZE, 10) || 100;

let lastString: string;
let lastHash: string;
const hash = (str: string): string => {
if (str !== lastString) {
lastString = str;
lastHash = createHash("sha1")
.update(str)
.digest("base64");
}
return lastHash;
};

export default function pgPrepareAndRun(
pgClient: PoolClient,
text: string,
// eslint-disable-next-line flowtype/no-weak-types
values: any
) {
const connection = pgClient.connection;
if (
!values ||
POSTGRAPHILE_PREPARED_STATEMENT_CACHE_SIZE < 1 ||
!connection ||
!connection.parsedStatements
) {
return pgClient.query(text, values);
} else {
const name = hash(text);
if (!connection._graphilePreparedStatementCache) {
connection._graphilePreparedStatementCache = LRU({
max: POSTGRAPHILE_PREPARED_STATEMENT_CACHE_SIZE,
dispose(key) {
if (connection.parsedStatements[key]) {
pgClient
.query(`deallocate ${pgClient.escapeIdentifier(key)}`)
.then(() => {
delete connection.parsedStatements[key];
})
.catch(e => {
// eslint-disable-next-line no-console
console.error("Error releasing prepared query", e);
});
}
},
});
}
if (!connection._graphilePreparedStatementCache.get(name)) {
// We're relying on dispose to clear out the old ones.
connection._graphilePreparedStatementCache.set(name, true);
}
return pgClient.query({
name,
text,
values,
});
}
}
7 changes: 6 additions & 1 deletion packages/graphile-build-pg/src/plugins/PgAllRows.js
Expand Up @@ -22,6 +22,7 @@ export default (async function PgAllRows(
pgQueryFromResolveData: queryFromResolveData,
pgAddStartEndCursor: addStartEndCursor,
pgOmit: omit,
pgPrepareAndRun,
} = build;
const {
fieldWithHooks,
Expand Down Expand Up @@ -163,7 +164,11 @@ export default (async function PgAllRows(
);
const { text, values } = sql.compile(query);
if (debugSql.enabled) debugSql(text);
const result = await pgClient.query(text, values);
const result = await pgPrepareAndRun(
pgClient,
text,
values
);

const liveCollection =
resolveInfo.rootValue &&
Expand Down
2 changes: 2 additions & 0 deletions packages/graphile-build-pg/src/plugins/PgBasicsPlugin.js
Expand Up @@ -31,6 +31,7 @@ import viaTemporaryTable from "./viaTemporaryTable";
import chalk from "chalk";
import pickBy from "lodash/pickBy";
import PgLiveProvider from "../PgLiveProvider";
import pgPrepareAndRun from "../pgPrepareAndRun";

const defaultPgColumnFilter = (_attr, _build, _context) => true;
type Keys = Array<{
Expand Down Expand Up @@ -352,6 +353,7 @@ export default (function PgBasicsPlugin(
describePgEntity,
pgField,
sqlCommentByAddingTags,
pgPrepareAndRun,
});
},
["PgBasics"]
Expand Down
Expand Up @@ -112,7 +112,12 @@ export default (function PgConnectionArgOrderBy(builder, { orderByNullsLast }) {
const cursorPrefixFromOrderBy = orderBy => {
if (orderBy) {
let cursorPrefixes = [];
for (const item of orderBy) {
for (
let itemIndex = 0, itemCount = orderBy.length;
itemIndex < itemCount;
itemIndex++
) {
const item = orderBy[itemIndex];
if (item.alias) {
cursorPrefixes.push(sql.literal(item.alias));
}
Expand Down

0 comments on commit 5bca05b

Please sign in to comment.