Skip to content

Commit

Permalink
fix(postgres): only include columns in insert statements if needed (g…
Browse files Browse the repository at this point in the history
…raphile#367)

* Only include required columns in insert statments

Columns that have no specific value set (are undefined) previously
were excplicitly set to `default`. However, this causes problems if you
do not have permission to set this column, whereas not specifing the
column at all works fine (and still ends up with the default value).

This changes that, by only including columns in the insert statement if
they have a value set. If the insert statement is for multiple rows, and
not all rows have the same columns, all columns that are present in at
least one row will be included. Rows that don't have that column
specified will revert to the old behavior, being set to `default`
explicitly.

* Added a test case

* Use snapshots for the test

Also added a sanity check to make sure exactly one (1) query has been
executed

* Added a TODO comment about optimisation
  • Loading branch information
Michon van Dooren authored and calebmer committed Mar 5, 2017
1 parent 1ea8b92 commit 930b75b
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 6 deletions.
24 changes: 18 additions & 6 deletions src/postgres/inventory/collection/PgCollection.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import DataLoader = require('dataloader')
import { Client } from 'pg'
import { Collection } from '../../../interface'
import { sql, memoizeMethod } from '../../utils'
import { PgCatalog, PgCatalogNamespace, PgCatalogClass, PgCatalogAttribute } from '../../introspection'
import { PgCatalog, PgCatalogNamespace, PgCatalogClass } from '../../introspection'
import PgClassType from '../type/PgClassType'
import Options from '../Options'
import pgClientFromContext from '../pgClientFromContext'
Expand All @@ -27,7 +27,6 @@ class PgCollection implements Collection<PgClassType.Value> {
* of `PgCatalog`.
*/
private _pgNamespace: PgCatalogNamespace = this._pgCatalog.assertGetNamespace(this.pgClass.namespaceId)
private _pgAttributes: Array<PgCatalogAttribute> = this._pgCatalog.getClassAttributes(this.pgClass.id)

/**
* The name of this collection. A pluralized version of the class name. We
Expand Down Expand Up @@ -133,23 +132,36 @@ class PgCollection implements Collection<PgClassType.Value> {
async (values: Array<PgClassType.Value>): Promise<Array<PgClassType.Value>> => {
const insertionIdentifier = Symbol()

// Get the fields that actually have been used in any of the values, so
// we can leave the rest out of the query completely to prevent
// permission issues
// TODO: Use field information from GraphQL instead of iterating through values
const fields = new Map()
values.forEach((map) => {
map.forEach((value, key) => {
if (typeof value !== 'undefined') {
fields.set(key, this.type.fields.get(key))
}
})
})

// Create our insert query.
const query = sql.compile(sql.query`
with ${sql.identifier(insertionIdentifier)} as (
-- Start by defining our header which will be the class we are
-- inserting into (prefixed by namespace of course).
insert into ${sql.identifier(this._pgNamespace.name, this.pgClass.name)}
-- Add all of our attributes as columns to be inserted into. This is
-- helpful in case the columns differ from what we expect.
(${sql.join(this._pgAttributes.map(({ name }) => sql.identifier(name)), ', ')})
-- Add all of the attributes that we're going to use as columns to be inserted into.
-- This is helpful in case the columns differ from what we expect.
(${sql.join(Array.from(fields.values()).map(({ pgAttribute }) => sql.identifier(pgAttribute.name)), ', ')})
-- Next, add all of our value tuples.
values ${sql.join(values.map(value =>
// Make sure we have one value for every attribute in the class,
// if there was no such value defined, we should just use
// `default` and use the user’s default value.
sql.query`(${sql.join(Array.from(this.type.fields.values()).map(field => {
sql.query`(${sql.join(Array.from(fields.values()).map(field => {
const fieldValue = field.getValue(value)
return typeof fieldValue === 'undefined' ? sql.query`default` : field.type.transformValueIntoPgValue(fieldValue)
}), ', ')})`,
Expand Down
20 changes: 20 additions & 0 deletions src/postgres/inventory/collection/__tests__/PgCollection-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,26 @@ test('create will insert new rows into the database', withPgClient(async client
.toEqual(values.map(mapToObject))
}))

test('create will only include relevant columns', withPgClient(async client => {
const context = { [$$pgClient]: client }

// Note how the about column is not used
const value1 = new Map([['name', 'John Smith'], ['email', 'john.smith@email.com']])
const value2 = new Map([['name', 'Sarah Smith'], ['email', 'sarah.smith@email.com']])
const value3 = new Map([['name', 'Budd Deey'], ['email', 'budd.deey@email.com']])

client.query.mockClear()

await Promise.all([
collection1.create(context, value1),
collection1.create(context, value2),
collection1.create(context, value3),
])

expect(client.query.mock.calls.length).toBe(1)
expect(client.query.mock.calls[0][0].text).toMatchSnapshot()
}))

// TODO: reimplement
// test('paginator will have the same name and type', () => {
// expect(collection1.paginator.name).toBe(collection1.name)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
exports[`test create will only include relevant columns 1`] = `"with __local_0__ as ( insert into \"c\".\"person\" (\"name\", \"email\") values ($1, $2), ($3, $4), ($5, $6) returning * ) select row_to_json(__local_0__) as object from __local_0__"`;

0 comments on commit 930b75b

Please sign in to comment.