diff --git a/.github/dependabot.yml b/.github/dependabot.yml index e2709d676..a34a93708 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -29,6 +29,10 @@ updates: directory: "/src/test/nodejs/node-postgres" schedule: interval: "daily" + - package-ecosystem: "npm" + directory: "/src/test/nodejs/sequelize-tests" + schedule: + interval: "daily" - package-ecosystem: "npm" directory: "/src/test/nodejs/typeorm/data-test" schedule: @@ -74,7 +78,13 @@ updates: directory: "/samples/java/spring-data-jpa" schedule: interval: "daily" - + + # Node.js samples + - package-ecosystem: "npm" + directory: "/samples/nodejs/sequelize" + schedule: + interval: "daily" + # Python Samples # Python psycopg3 Sample - package-ecosystem: "pip" diff --git a/.github/workflows/samples.yaml b/.github/workflows/samples.yaml index 75aac7a6c..5c0f3bcda 100644 --- a/.github/workflows/samples.yaml +++ b/.github/workflows/samples.yaml @@ -73,6 +73,11 @@ jobs: run: | npm install npm start + - name: Run Sequelize Sample tests + working-directory: ./samples/nodejs/sequelize + run: | + npm install + npm start ruby-samples: runs-on: ubuntu-latest steps: diff --git a/.gitignore b/.gitignore index f0d25a99a..dbe8c704c 100644 --- a/.gitignore +++ b/.gitignore @@ -30,6 +30,7 @@ samples/nodejs/**/.DS_Store samples/cloud-run/nodejs/**/node_modules samples/cloud-run/nodejs/**/package-lock.json samples/cloud-run/nodejs/**/.DS_Store +samples/nodejs/sequelize/**/*.js src/test/ruby/**/Gemfile.lock samples/**/ruby/**/Gemfile.lock diff --git a/README.md b/README.md index 50183d95c..f49228f0e 100644 --- a/README.md +++ b/README.md @@ -41,6 +41,8 @@ PGAdapter can be used with the following frameworks and tools: carefully for how to set up ActiveRecord to work with PGAdapter. 1. `Knex.js` query builder can be used with PGAdapter. See [Knex.js sample application](samples/nodejs/knex) for a sample application. +1. `Sequelize.js` ORM can be used with PGAdapter. See [Sequelize.js sample application](samples/nodejs/sequelize) + for a sample application. ## FAQ See [Frequently Asked Questions](docs/faq.md) for answers to frequently asked questions. diff --git a/samples/nodejs/sequelize/README.md b/samples/nodejs/sequelize/README.md new file mode 100644 index 000000000..1f9879556 --- /dev/null +++ b/samples/nodejs/sequelize/README.md @@ -0,0 +1,17 @@ + + +# PGAdapter Spanner and Sequelize + +PGAdapter has experimental support for [Sequelize](https://sequelize.org/) with the standard Node.js +`pg` driver. This sample application shows how to connect to PGAdapter with Sequelize, and how to +execute queries and transactions on Cloud Spanner. + +The sample uses the Cloud Spanner emulator. You can run the sample on the emulator with this +command: + +```shell +npm start +``` + +PGAdapter and the emulator are started in a Docker test container by the sample application. +Docker is therefore required to be installed on your system to run this sample. diff --git a/samples/nodejs/sequelize/models/models.ts b/samples/nodejs/sequelize/models/models.ts new file mode 100644 index 000000000..4fc43dcd8 --- /dev/null +++ b/samples/nodejs/sequelize/models/models.ts @@ -0,0 +1,264 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { + CreationOptional, + DataTypes, + HasManyAddAssociationMixin, + HasManyGetAssociationsMixin, + HasOneGetAssociationMixin, + HasOneSetAssociationMixin, + InferAttributes, + InferCreationAttributes, + Model, + Sequelize +} from 'sequelize'; + +export class Singer extends Model, InferCreationAttributes> { + declare id: number; + declare firstName: string; + declare lastName: string; + declare fullName: string; + declare active: boolean; + declare createdAt: CreationOptional; + declare updatedAt: CreationOptional; + + declare getAlbums: HasManyGetAssociationsMixin; +} + +export class Album extends Model, InferCreationAttributes> { + declare id: number; + declare title: string; + declare SingerId: number; + declare marketingBudget: number; + declare createdAt: CreationOptional; + declare updatedAt: CreationOptional; + + declare getSinger: HasOneGetAssociationMixin; + declare setSinger: HasOneSetAssociationMixin; + declare getTracks: HasManyGetAssociationsMixin; + declare addTrack: HasManyAddAssociationMixin; +} + +export class Track extends Model, InferCreationAttributes> { + declare id: number; + declare trackNumber: number; + declare title: string; + declare sampleRate: number; + declare createdAt: CreationOptional; + declare updatedAt: CreationOptional; + + declare getAlbum: HasOneGetAssociationMixin; +} + +export class Venue extends Model, InferCreationAttributes> { + declare id: number; + declare name: string; + // description is mapped to a JSONB column and can be used as an object. + declare description: string; + declare createdAt: CreationOptional; + declare updatedAt: CreationOptional; +} + +export class Concert extends Model, InferCreationAttributes> { + declare id: number; + declare VenueId: number; + declare SingerId: number; + declare name: string; + declare startTime: Date; + declare endTime: Date; + declare createdAt: CreationOptional; + declare updatedAt: CreationOptional; +} + +export class TicketSale extends Model, InferCreationAttributes> { + declare id: number; + declare ConcertId: number; + declare customerName: string; + declare price: number; + declare seats: string[]; + declare createdAt: CreationOptional; + declare updatedAt: CreationOptional; +} + +export function initModels(sequelize: Sequelize) { + Singer.init({ + id: { + type: DataTypes.BIGINT, + primaryKey: true, + autoIncrement: true, + }, + firstName: { + type: DataTypes.STRING, + }, + lastName: { + type: DataTypes.STRING, + }, + // The fullName property is generated by Spanner + fullName: { + type: DataTypes.STRING, + }, + active: { + type: DataTypes.BOOLEAN, + }, + createdAt: { + type: DataTypes.DATE, + }, + updatedAt: { + type: DataTypes.DATE, + } + }, {sequelize}); + + Album.init({ + id: { + type: DataTypes.BIGINT, + primaryKey: true, + autoIncrement: true, + }, + title: { + type: DataTypes.STRING, + }, + SingerId: { + type: DataTypes.INTEGER, + }, + marketingBudget: { + type: DataTypes.DECIMAL, + }, + createdAt: { + type: DataTypes.DATE, + }, + updatedAt: { + type: DataTypes.DATE, + } + }, {sequelize}); + + // Note: Track is interleaved in Album. + Track.init({ + id: { + type: DataTypes.BIGINT, + primaryKey: true, + // Track.id is not an auto-increment column, because it has to have the same value + // as the Album that it belongs to. + autoIncrement: false, + }, + trackNumber: { + type: DataTypes.INTEGER, + primaryKey: true, + autoIncrement: false, + }, + title: { + type: DataTypes.STRING, + }, + sampleRate: { + type: DataTypes.DOUBLE, + }, + createdAt: { + type: DataTypes.DATE, + }, + updatedAt: { + type: DataTypes.DATE, + } + }, {sequelize}); + + Venue.init({ + id: { + type: DataTypes.BIGINT, + primaryKey: true, + autoIncrement: true, + }, + name: { + type: DataTypes.STRING, + }, + description: { + type: DataTypes.STRING, + }, + createdAt: { + type: DataTypes.DATE, + }, + updatedAt: { + type: DataTypes.DATE, + } + }, {sequelize}) + + Concert.init({ + id: { + type: DataTypes.BIGINT, + primaryKey: true, + autoIncrement: true, + }, + VenueId: { + type: DataTypes.BIGINT, + }, + SingerId: { + type: DataTypes.BIGINT, + }, + name: { + type: DataTypes.STRING, + }, + startTime: { + type: DataTypes.DATE, + }, + endTime: { + type: DataTypes.DATE, + }, + createdAt: { + type: DataTypes.DATE, + }, + updatedAt: { + type: DataTypes.DATE, + } + }, {sequelize}) + + TicketSale.init({ + id: { + type: DataTypes.BIGINT, + primaryKey: true, + autoIncrement: true, + }, + ConcertId: { + type: DataTypes.BIGINT, + }, + customerName: { + type: DataTypes.STRING, + }, + price: { + type: DataTypes.DECIMAL, + }, + seats: { + type: DataTypes.ARRAY(DataTypes.STRING), + }, + createdAt: { + type: DataTypes.DATE, + }, + updatedAt: { + type: DataTypes.DATE, + } + }, {sequelize}) + + Singer.hasMany(Album); + Album.belongsTo(Singer); + + // Tracks is interleaved in Albums. + // This means that they both share the first primary key column `id`. + // In addition, Tracks has a `trackNumber` primary key column. + // The reference ('foreign key') column from Track to Album is therefore the `id` column. + Album.hasMany(Track, {foreignKey: "id"}); + Track.belongsTo(Album, {foreignKey: "id"}); + + Venue.hasMany(Concert); + Concert.belongsTo(Venue); + + Concert.hasMany(TicketSale); + TicketSale.belongsTo(Concert); +} diff --git a/samples/nodejs/sequelize/package.json b/samples/nodejs/sequelize/package.json new file mode 100644 index 000000000..8350a6354 --- /dev/null +++ b/samples/nodejs/sequelize/package.json @@ -0,0 +1,22 @@ +{ + "name": "pgadapter-sequelize-sample", + "version": "0.0.1", + "description": "PGAdapter Sequelize Sample", + "type": "commonjs", + "devDependencies": { + "@types/node": "^20.1.4", + "sequelize-cli": "^6.6.2", + "testcontainers": "^10.7.1", + "ts-node": "10.9.1", + "typescript": "5.2.2" + }, + "dependencies": { + "pg": "^8.11.3", + "sequelize": "^6.36.0", + "umzug": "^3.6.1", + "yargs": "^17.5.1" + }, + "scripts": { + "start": "ts-node src/index.ts" + } +} diff --git a/samples/nodejs/sequelize/src/index.ts b/samples/nodejs/sequelize/src/index.ts new file mode 100644 index 000000000..0a655fb0e --- /dev/null +++ b/samples/nodejs/sequelize/src/index.ts @@ -0,0 +1,239 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {createDataModel, startPGAdapter} from './init' +import {Album, Concert, initModels, Singer, TicketSale, Track, Venue} from '../models/models'; +import {QueryTypes, Sequelize} from "sequelize"; +import {randomInt} from "crypto"; +import {randomAlbumTitle, randomFirstName, randomLastName, randomTrackTitle} from "./random"; + +async function main() { + // Start PGAdapter in a Docker test container. + // PGAdapter will by default connect to the Spanner emulator. + // The emulator runs in the same Docker container as PGAdapter. + const pgAdapter = await startPGAdapter(); + + console.log('Initializing Sequelize'); + // Connect Sequelize to PGAdapter using the standard PostgreSQL Sequelize provider. + const sequelize = new Sequelize('sample-database', null, null, { + dialect: "postgres", + + // Connect to PGAdapter on localhost and the randomly assigned port that is mapped to port 5432 + // in the test container. + host: 'localhost', + port: pgAdapter.getMappedPort(5432), + ssl: false, + + // Setting the timezone is required, as Sequelize otherwise tries to use an INTERVAL to set + // the timezone. That is not supported on PGAdapter, and you will get the following error: + // invalid value for parameter "TimeZone": "INTERVAL '+00:00' HOUR TO MINUTE" + timezone: 'UTC', + + // The following configuration options are optional. + omitNull: false, + pool: { + max: 50, min: 10, acquire: 2000, idle: 20000, + }, + logging: false, + }); + + // Create the tables that are needed for this sample (if they do not already exist). + await createDataModel(sequelize); + + // Initialize the Sequelize models. + initModels(sequelize); + + // Delete any existing test data in the database before running the sample. + await deleteAllData(); + + // Create and then print some random data. + await createRandomSingersAndAlbums(sequelize, 20); + await printSingersAlbums(); + + // Create Venues and Concerts rows. + // The "venues" table contains a JSONB column. + // The "ticket_sales" table contains a text array column. + await createVenuesAndConcerts(sequelize); + + await staleRead(sequelize); + + // Close the sequelize connection pool and shut down PGAdapter. + await sequelize.close(); + await pgAdapter.stop(); +} + +async function deleteAllData() { + console.log("Deleting all existing test data..."); + await TicketSale.destroy({truncate: true}); + await Concert.destroy({truncate: true}); + await Venue.destroy({truncate: true}); + await Track.destroy({truncate: true}); + await Album.destroy({truncate: true}); + await Singer.destroy({truncate: true}); + console.log("Finished deleting all existing test data"); +} + +async function createRandomSingersAndAlbums(sequelize: Sequelize, numSingers: number) { + console.log("Creating random singers and albums..."); + await sequelize.transaction(async tx => { + // Generate some random singers. + for (let i=0; i { + // Select a random singer. + const singer = await Singer.findOne({limit: 1, transaction: tx}); + + // Create a venue. Note that we can set the description as an object. + // Description is mapped to a JSONB column. + const venue = await Venue.create({ + name: 'Avenue Park', + description: '{Capacity: 5000, Location: "New York", Country: "US"}', + }, {transaction: tx}); + + // Create a concert and a ticket sale. + const concert = await Concert.create({ + name: 'Avenue Park Open', + SingerId: singer.id, + VenueId: venue.id, + startTime: new Date('2023-02-01T20:00:00-05:00'), + endTime: new Date('2023-02-02T02:00:00-05:00'), + }, {transaction: tx}); + + // The "ticket_sales" table contains an array column "seats". + await TicketSale.create({ + ConcertId: concert.id, + customerName: `${randomFirstName()} ${randomLastName()}`, + price: Math.random() * 1000, + seats: ['A19', 'A20', 'A21'], + }, {transaction: tx}); + }); + console.log("Finished creating venues and concerts"); +} + +(async () => { + await main(); +})().catch(e => { + console.error(e); +}); diff --git a/samples/nodejs/sequelize/src/init.ts b/samples/nodejs/sequelize/src/init.ts new file mode 100644 index 000000000..3027c2b40 --- /dev/null +++ b/samples/nodejs/sequelize/src/init.ts @@ -0,0 +1,121 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {QueryTypes, Sequelize} from 'sequelize'; +import {GenericContainer, PullPolicy, StartedTestContainer, TestContainer} from "testcontainers"; + +/** + * Creates the data model that is needed for this sample application. + * + * The Cloud Spanner PostgreSQL dialect does not support all system tables (pg_catalog tables) that + * are present in open-source PostgreSQL databases. Those tables are used by Sequelize migrations. + * Migrations are therefore not supported. + */ +export async function createDataModel(sequelize: Sequelize) { + console.log("Checking whether tables already exists"); + const result: any = await sequelize.query( + `SELECT COUNT(1) AS c + FROM information_schema.tables + WHERE table_schema='public' + AND table_name IN ('Singers', 'Albums', 'Tracks', 'Venues', 'Concerts', 'TicketSales')`, + { type: QueryTypes.SELECT, raw: true, plain: true }); + if (result.c == '6') { + return; + } + console.log("Creating tables"); + // Create the data model. + await sequelize.query( + ` + create sequence if not exists singers_seq bit_reversed_positive; + create table "Singers" ( + id bigint not null primary key default nextval('singers_seq'), + "firstName" varchar, + "lastName" varchar, + "fullName" varchar generated always as ( + CASE WHEN "firstName" IS NULL THEN "lastName" + WHEN "lastName" IS NULL THEN "firstName" + ELSE "firstName" || ' ' || "lastName" + END) stored, + "active" boolean, + "createdAt" timestamptz, + "updatedAt" timestamptz + ); + + create sequence if not exists albums_seq bit_reversed_positive; + create table "Albums" ( + id bigint not null primary key default nextval('albums_seq'), + title varchar, + "marketingBudget" numeric, + "SingerId" bigint, + "createdAt" timestamptz, + "updatedAt" timestamptz, + constraint fk_albums_singers foreign key ("SingerId") references "Singers" (id) + ); + + create table if not exists "Tracks" ( + id bigint not null, + "trackNumber" bigint not null, + title varchar not null, + "sampleRate" float8 not null, + "createdAt" timestamptz, + "updatedAt" timestamptz, + primary key (id, "trackNumber") + ) interleave in parent "Albums" on delete cascade; + + create sequence if not exists venues_seq bit_reversed_positive; + create table if not exists "Venues" ( + id bigint not null primary key default nextval('venues_seq'), + name varchar not null, + description varchar not null, + "createdAt" timestamptz, + "updatedAt" timestamptz + ); + + create sequence if not exists concerts_seq bit_reversed_positive; + create table if not exists "Concerts" ( + id bigint not null primary key default nextval('concerts_seq'), + "VenueId" bigint not null, + "SingerId" bigint not null, + name varchar not null, + "startTime" timestamptz not null, + "endTime" timestamptz not null, + "createdAt" timestamptz, + "updatedAt" timestamptz, + constraint fk_concerts_venues foreign key ("VenueId") references "Venues" (id), + constraint fk_concerts_singers foreign key ("SingerId") references "Singers" (id), + constraint chk_end_time_after_start_time check ("endTime" > "startTime") + ); + + create sequence if not exists ticket_sales_seq bit_reversed_positive; + create table if not exists "TicketSales" ( + id bigint not null primary key default nextval('ticket_sales_seq'), + "ConcertId" bigint not null, + "customerName" varchar not null, + price decimal not null, + seats text[], + "createdAt" timestamptz, + "updatedAt" timestamptz, + constraint fk_ticket_sales_concerts foreign key ("ConcertId") references "Concerts" (id) + );`, + {type: QueryTypes.RAW}) +} + +export async function startPGAdapter(): Promise { + console.log("Pulling PGAdapter and Spanner emulator"); + const container: TestContainer = new GenericContainer("gcr.io/cloud-spanner-pg-adapter/pgadapter-emulator") + .withPullPolicy(PullPolicy.alwaysPull()) + .withExposedPorts(5432); + console.log("Starting PGAdapter and Spanner emulator"); + return await container.start(); +} diff --git a/samples/nodejs/sequelize/src/random.ts b/samples/nodejs/sequelize/src/random.ts new file mode 100644 index 000000000..501b2fcd5 --- /dev/null +++ b/samples/nodejs/sequelize/src/random.ts @@ -0,0 +1,134 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + + +export function randomInt(min: number, max: number): number { + return Math.floor(Math.random() * (max - min + 1) + min); +} + +export function randomFirstName(): string { + return randomArrayElement(first_names); +} + +export function randomLastName(): string { + return randomArrayElement(last_names); +} + +export function randomAlbumTitle(): string { + return `${randomArrayElement(adjectives)} ${randomArrayElement(nouns)}`; +} + +export function randomTrackTitle(): string { + return `${randomArrayElement(adverbs)} ${randomArrayElement(verbs)}`; +} + +function randomArrayElement(array: Array): string { + return array[Math.floor(Math.random() * array.length)]; +} + +const first_names: string[] = [ + "Saffron", "Eleanor", "Ann", "Salma", "Kiera", "Mariam", "Georgie", "Eden", "Carmen", "Darcie", + "Antony", "Benjamin", "Donald", "Keaton", "Jared", "Simon", "Tanya", "Julian", "Eugene", "Laurence"]; + +const last_names: string[] = [ + "Terry", "Ford", "Mills", "Connolly", "Newton", "Rodgers", "Austin", "Floyd", "Doherty", "Nguyen", + "Chavez", "Crossley", "Silva", "George", "Baldwin", "Burns", "Russell", "Ramirez", "Hunter", "Fuller"]; + +export const adjectives: string[] = [ + "ultra", + "happy", + "emotional", + "filthy", + "charming", + "alleged", + "talented", + "exotic", + "lamentable", + "lewd", + "old-fashioned", + "savory", + "delicate", + "willing", + "habitual", + "upset", + "gainful", + "nonchalant", + "kind", + "unruly"]; + +export const nouns: string[] = [ + "improvement", + "control", + "tennis", + "gene", + "department", + "person", + "awareness", + "health", + "development", + "platform", + "garbage", + "suggestion", + "agreement", + "knowledge", + "introduction", + "recommendation", + "driver", + "elevator", + "industry", + "extent"]; + +export const verbs: string[] = [ + "instruct", + "rescue", + "disappear", + "import", + "inhibit", + "accommodate", + "dress", + "describe", + "mind", + "strip", + "crawl", + "lower", + "influence", + "alter", + "prove", + "race", + "label", + "exhaust", + "reach", + "remove"]; + +export const adverbs: string[] = [ + "cautiously", + "offensively", + "immediately", + "soon", + "judgementally", + "actually", + "honestly", + "slightly", + "limply", + "rigidly", + "fast", + "normally", + "unnecessarily", + "wildly", + "unimpressively", + "helplessly", + "rightfully", + "kiddingly", + "early", + "queasily"]; diff --git a/samples/nodejs/sequelize/src/tsconfig.json b/samples/nodejs/sequelize/src/tsconfig.json new file mode 100644 index 000000000..d73f85e26 --- /dev/null +++ b/samples/nodejs/sequelize/src/tsconfig.json @@ -0,0 +1,7 @@ +{ + "noImplicitAny": false, + "compilerOptions": { + "target": "es6", + "module": "commonjs" + }, +} diff --git a/src/test/java/com/google/cloud/spanner/pgadapter/nodejs/SequelizeMockServerTest.java b/src/test/java/com/google/cloud/spanner/pgadapter/nodejs/SequelizeMockServerTest.java new file mode 100644 index 000000000..905c558b5 --- /dev/null +++ b/src/test/java/com/google/cloud/spanner/pgadapter/nodejs/SequelizeMockServerTest.java @@ -0,0 +1,200 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.cloud.spanner.pgadapter.nodejs; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.spanner.MockSpannerServiceImpl.StatementResult; +import com.google.cloud.spanner.Statement; +import com.google.cloud.spanner.pgadapter.AbstractMockServerTest; +import com.google.common.collect.ImmutableList; +import com.google.protobuf.Duration; +import com.google.protobuf.ListValue; +import com.google.protobuf.Value; +import com.google.spanner.v1.CommitRequest; +import com.google.spanner.v1.ExecuteSqlRequest; +import com.google.spanner.v1.ResultSet; +import com.google.spanner.v1.RollbackRequest; +import com.google.spanner.v1.TypeCode; +import io.grpc.Status; +import java.io.IOException; +import java.util.List; +import java.util.stream.Collectors; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +@Category(NodeJSTest.class) +@RunWith(JUnit4.class) +public class SequelizeMockServerTest extends AbstractMockServerTest { + + @BeforeClass + public static void installDependencies() throws IOException, InterruptedException { + NodeJSTest.installDependencies("sequelize-tests"); + + mockSpanner.putStatementResult( + StatementResult.query( + Statement.of( + "with pg_range as (\n" + + "select * from (select 0::bigint as rngtypid, 0::bigint as rngsubtype, 0::bigint as rngmultitypid, 0::bigint as rngcollation, 0::bigint as rngsubopc, ''::varchar as rngcanonical, ''::varchar as rngsubdiff\n" + + ") range where false),\n" + + "pg_namespace as (\n" + + " select case schema_name when 'pg_catalog' then 11 when 'public' then 2200 else 0 end as oid,\n" + + " schema_name as nspname, null as nspowner, null as nspacl\n" + + " from information_schema.schemata\n" + + "),\n" + + "pg_type as (\n" + + " select 16 as oid, 'bool' as typname, 11 as typnamespace, null as typowner, 1 as typlen, true as typbyval, 'b' as typtype, 'B' as typcategory, true as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1000 as typarray, 'boolin' as typinput, 'boolout' as typoutput, 'boolrecv' as typreceive, 'boolsend' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'c' as typalign, 'p' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'boolean' as spanner_type union all\n" + + " select 17 as oid, 'bytea' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'U' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1001 as typarray, 'byteain' as typinput, 'byteaout' as typoutput, 'bytearecv' as typreceive, 'byteasend' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'bytea' as spanner_type union all\n" + + " select 20 as oid, 'int8' as typname, 11 as typnamespace, null as typowner, 8 as typlen, true as typbyval, 'b' as typtype, 'N' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1016 as typarray, 'int8in' as typinput, 'int8out' as typoutput, 'int8recv' as typreceive, 'int8send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'd' as typalign, 'p' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'bigint' as spanner_type union all\n" + + " select 21 as oid, 'int2' as typname, 11 as typnamespace, null as typowner, 2 as typlen, true as typbyval, 'b' as typtype, 'N' as typcategory, false as typispreferred, false as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1005 as typarray, 'int2in' as typinput, 'int2out' as typoutput, 'int2recv' as typreceive, 'int2send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 's' as typalign, 'p' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, null as spanner_type union all\n" + + " select 23 as oid, 'int4' as typname, 11 as typnamespace, null as typowner, 4 as typlen, true as typbyval, 'b' as typtype, 'N' as typcategory, false as typispreferred, false as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1007 as typarray, 'int4in' as typinput, 'int4out' as typoutput, 'int4recv' as typreceive, 'int4send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'p' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, null as spanner_type union all\n" + + " select 25 as oid, 'text' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'S' as typcategory, true as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1009 as typarray, 'textin' as typinput, 'textout' as typoutput, 'textrecv' as typreceive, 'textsend' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 100 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, null as spanner_type union all\n" + + " select 700 as oid, 'float4' as typname, 11 as typnamespace, null as typowner, 4 as typlen, true as typbyval, 'b' as typtype, 'N' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1021 as typarray, 'float4in' as typinput, 'float4out' as typoutput, 'float4recv' as typreceive, 'float4send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'd' as typalign, 'p' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'real' as spanner_type union all\n" + + " select 701 as oid, 'float8' as typname, 11 as typnamespace, null as typowner, 8 as typlen, true as typbyval, 'b' as typtype, 'N' as typcategory, true as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1022 as typarray, 'float8in' as typinput, 'float8out' as typoutput, 'float8recv' as typreceive, 'float8send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'd' as typalign, 'p' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'double precision' as spanner_type union all\n" + + " select 1043 as oid, 'varchar' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'S' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1015 as typarray, 'varcharin' as typinput, 'varcharout' as typoutput, 'varcharrecv' as typreceive, 'varcharsend' as typsend, 'varchartypmodin' as typmodin, 'varchartypmodout' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 100 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'character varying' as spanner_type union all\n" + + " select 1082 as oid, 'date' as typname, 11 as typnamespace, null as typowner, 4 as typlen, true as typbyval, 'b' as typtype, 'D' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1182 as typarray, 'date_in' as typinput, 'date_out' as typoutput, 'date_recv' as typreceive, 'date_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'p' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'date' as spanner_type union all\n" + + " select 1114 as oid, 'timestamp' as typname, 11 as typnamespace, null as typowner, 8 as typlen, true as typbyval, 'b' as typtype, 'D' as typcategory, false as typispreferred, false as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1115 as typarray, 'timestamp_in' as typinput, 'timestamp_out' as typoutput, 'timestamp_recv' as typreceive, 'timestamp_send' as typsend, 'timestamptypmodin' as typmodin, 'timestamptypmodout' as typmodout, '-' as typanalyze, 'd' as typalign, 'p' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, null as spanner_type union all\n" + + " select 1184 as oid, 'timestamptz' as typname, 11 as typnamespace, null as typowner, 8 as typlen, true as typbyval, 'b' as typtype, 'D' as typcategory, true as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1185 as typarray, 'timestamptz_in' as typinput, 'timestamptz_out' as typoutput, 'timestamptz_recv' as typreceive, 'timestamptz_send' as typsend, 'timestamptztypmodin' as typmodin, 'timestamptztypmodout' as typmodout, '-' as typanalyze, 'd' as typalign, 'p' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'timestamp with time zone' as spanner_type union all\n" + + " select 1700 as oid, 'numeric' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'N' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 1231 as typarray, 'numeric_in' as typinput, 'numeric_out' as typoutput, 'numeric_recv' as typreceive, 'numeric_send' as typsend, 'numerictypmodin' as typmodin, 'numerictypmodout' as typmodout, '-' as typanalyze, 'i' as typalign, 'm' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'numeric' as spanner_type union all\n" + + " select 3802 as oid, 'jsonb' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'U' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 3807 as typarray, 'jsonb_in' as typinput, 'jsonb_out' as typoutput, 'jsonb_recv' as typreceive, 'jsonb_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'jsonb' as spanner_type union all\n" + + " select 1000 as oid, '_bool' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 16 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'boolean[]' as spanner_type union all\n" + + " select 1001 as oid, '_bytea' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 17 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'bytea[]' as spanner_type union all\n" + + " select 1016 as oid, '_int8' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 20 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'bigint[]' as spanner_type union all\n" + + " select 1005 as oid, '_int2' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, false as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 21 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, null as spanner_type union all\n" + + " select 1007 as oid, '_int4' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, false as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 23 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, null as spanner_type union all\n" + + " select 1009 as oid, '_text' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 25 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 100 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, null as spanner_type union all\n" + + " select 1021 as oid, '_float4' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 700 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'real[]' as spanner_type union all\n" + + " select 1022 as oid, '_float8' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 701 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'double precision[]' as spanner_type union all\n" + + " select 1015 as oid, '_varchar' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 1043 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 100 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'character varying[]' as spanner_type union all\n" + + " select 1182 as oid, '_date' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 1082 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'date[]' as spanner_type union all\n" + + " select 1115 as oid, '_timestamp' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, false as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 1114 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, null as spanner_type union all\n" + + " select 1185 as oid, '_timestamptz' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 1184 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'timestamp with time zone[]' as spanner_type union all\n" + + " select 1231 as oid, '_numeric' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 1700 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'numeric[]' as spanner_type union all\n" + + " select 3807 as oid, '_jsonb' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'b' as typtype, 'A' as typcategory, false as typispreferred, true as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 3802 as typelem, 0 as typarray, 'array_in' as typinput, 'array_out' as typoutput, 'array_recv' as typreceive, 'array_send' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'i' as typalign, 'x' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, 'jsonb[]' as spanner_type union all\n" + + " select 705 as oid, 'unknown' as typname, 11 as typnamespace, null as typowner, -1 as typlen, false as typbyval, 'p' as typtype, 'X' as typcategory, false as typispreferred, false as typisdefined, ',' as typdelim, '0'::varchar as typrelid, 0 as typelem, 0 as typarray, 'unknownin' as typinput, 'unknownout' as typoutput, 'unknownrecv' as typreceive, 'unknownsend' as typsend, '-' as typmodin, '-' as typmodout, '-' as typanalyze, 'c' as typalign, 'p' as typstorage, false as typnotnull, 0 as typbasetype, -1 as typtypmod, 0 as typndims, 0 as typcollation, null as typdefaultbin, null as typdefault, null as typacl, null as spanner_type\n" + + "),\n" + + " ranges AS ( SELECT pg_range.rngtypid, pg_type.typname AS rngtypname, pg_type.typarray AS rngtyparray, pg_range.rngsubtype FROM pg_range LEFT OUTER JOIN pg_type ON pg_type.oid = pg_range.rngtypid)SELECT pg_type.typname, pg_type.typtype, pg_type.oid, pg_type.typarray, ranges.rngtypname, ranges.rngtypid, ranges.rngtyparray FROM pg_type LEFT OUTER JOIN ranges ON pg_type.oid = ranges.rngsubtype WHERE (pg_type.typtype IN('b', 'e'))"), + ResultSet.newBuilder() + .setMetadata( + createMetadata( + ImmutableList.of( + TypeCode.STRING, + TypeCode.STRING, + TypeCode.INT64, + TypeCode.BOOL, + TypeCode.STRING, + TypeCode.INT64, + TypeCode.BOOL))) + .build())); + mockSpanner.putStatementResult( + StatementResult.query(Statement.of("SELECT 1+1 AS result"), SELECT2_RESULTSET)); + mockSpanner.putStatementResult( + StatementResult.query( + Statement.of("SELECT * FROM users"), + ResultSet.newBuilder() + .setMetadata( + createMetadata( + ImmutableList.of(TypeCode.INT64, TypeCode.STRING), + ImmutableList.of("id", "name"))) + .addRows( + ListValue.newBuilder() + .addValues(Value.newBuilder().setStringValue("1").build()) + .addValues(Value.newBuilder().setStringValue("Alice").build()) + .build()) + .build())); + } + + private String getHost() { + return "localhost"; + } + + @Test + public void testSelectUsers() throws Exception { + String sql = "SELECT * FROM users"; + + String output = runTest("testSelectUsers", getHost(), pgServer.getLocalPort()); + + assertEquals("Users: 1,Alice\n", output); + + List executeSqlRequests = + mockSpanner.getRequestsOfType(ExecuteSqlRequest.class).stream() + .filter(request -> request.getSql().equals(sql)) + .collect(Collectors.toList()); + assertEquals(1, executeSqlRequests.size()); + ExecuteSqlRequest request = executeSqlRequests.get(0); + assertTrue(request.getTransaction().hasSingleUse()); + assertTrue(request.getTransaction().getSingleUse().hasReadOnly()); + assertFalse(request.getTransaction().getSingleUse().getReadOnly().hasStrong()); + assertTrue(request.getTransaction().getSingleUse().getReadOnly().hasMaxStaleness()); + assertEquals( + request.getTransaction().getSingleUse().getReadOnly().getMaxStaleness(), + Duration.newBuilder().setSeconds(15L).build()); + } + + @Test + public void testSelectUsersInTransaction() throws Exception { + String sql = "SELECT * FROM users"; + + String output = runTest("testSelectUsersInTransaction", getHost(), pgServer.getLocalPort()); + + assertEquals("Users: 1,Alice\n", output); + + List executeSqlRequests = + mockSpanner.getRequestsOfType(ExecuteSqlRequest.class).stream() + .filter(request -> request.getSql().equals(sql)) + .collect(Collectors.toList()); + assertEquals(1, executeSqlRequests.size()); + ExecuteSqlRequest request = executeSqlRequests.get(0); + assertTrue(request.getTransaction().hasBegin()); + assertTrue(request.getTransaction().getBegin().hasReadWrite()); + assertEquals(1, mockSpanner.countRequestsOfType(CommitRequest.class)); + } + + @Test + public void testErrorInTransaction() throws Exception { + String sql = "SELECT * FROM non_existing_table"; + mockSpanner.putStatementResult( + StatementResult.exception( + Statement.of(sql), + Status.NOT_FOUND.withDescription("Table not found").asRuntimeException())); + + String output = runTest("testErrorInTransaction", getHost(), pgServer.getLocalPort()); + + assertEquals( + "Users: 1,Alice\n" + + "Transaction error: SequelizeDatabaseError: Table not found - Statement: 'SELECT * FROM non_existing_table'\n", + output); + + List executeSqlRequests = + mockSpanner.getRequestsOfType(ExecuteSqlRequest.class).stream() + .filter(request -> request.getSql().equals(sql)) + .collect(Collectors.toList()); + assertEquals(1, executeSqlRequests.size()); + ExecuteSqlRequest request = executeSqlRequests.get(0); + assertTrue(request.getTransaction().hasId()); + assertEquals(0, mockSpanner.countRequestsOfType(CommitRequest.class)); + assertEquals(1, mockSpanner.countRequestsOfType(RollbackRequest.class)); + } + + static String runTest(String testName, String host, int port) + throws IOException, InterruptedException { + return NodeJSTest.runTest("sequelize-tests", testName, host, port, "db"); + } +} diff --git a/src/test/nodejs/sequelize-tests/package.json b/src/test/nodejs/sequelize-tests/package.json new file mode 100644 index 000000000..9f0e8b7d2 --- /dev/null +++ b/src/test/nodejs/sequelize-tests/package.json @@ -0,0 +1,19 @@ +{ + "name": "sequelize-tests", + "version": "0.0.1", + "description": "Sequelize tests", + "type": "commonjs", + "devDependencies": { + "@types/node": "^20.1.4", + "ts-node": "10.9.1", + "typescript": "5.2.2" + }, + "dependencies": { + "pg": "^8.11.3", + "sequelize": "^6.34.0", + "yargs": "^17.5.1" + }, + "scripts": { + "start": "ts-node src/index.ts" + } +} diff --git a/src/test/nodejs/sequelize-tests/src/index.ts b/src/test/nodejs/sequelize-tests/src/index.ts new file mode 100644 index 000000000..5de62b98c --- /dev/null +++ b/src/test/nodejs/sequelize-tests/src/index.ts @@ -0,0 +1,111 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { Sequelize, QueryTypes } = require('sequelize'); + +function runTest(host: string, port: number, database: string, test: (client) => Promise) { + const sequelize = new Sequelize(database, null, null, { + host: host, + port: port, + dialect: "postgres", + dialectOptions: { + clientMinMessages: "ignore", + options: "-c spanner.read_only_staleness='MAX_STALENESS 15s'", + }, + timestamps: false, + omitNull: false, + pool: { + max: 50, min: 10, acquire: 2000, idle: 20000, + }, + timezone: 'UTC', + logging: false, + }); + runTestWithClient(sequelize, test); +} + +function runTestWithClient(client, test: (client) => Promise) { + client.authenticate() + .then(() => { + test(client).then(() => client.close()); + }) + .catch((error) => { + console.error(error); + client.close(); + }); +} + +async function testSelectUsers(client) { + try { + const rows = await client.query("SELECT * FROM users", { type: QueryTypes.SELECT }); + if (rows) { + console.log(`Users: ${Object.values(rows[0])}`); + } else { + console.error('Could not select users'); + } + } catch (e) { + console.error(`Query error: ${e}`); + } +} + +async function testSelectUsersInTransaction(client) { + const t = await client.transaction(); + try { + const rows = await client.query("SELECT * FROM users", { type: QueryTypes.SELECT, transaction: t }); + console.log(`Users: ${Object.values(rows[0])}`); + await t.commit(); + } catch (e) { + console.error(`Query error: ${e}`); + await t.rollback(); + } +} + +async function testErrorInTransaction(client) { + try { + await client.transaction(async tx => { + const rows = await client.query("SELECT * FROM users", { type: QueryTypes.SELECT, transaction: tx }); + console.log(`Users: ${Object.values(rows[0])}`); + await client.query("SELECT * FROM non_existing_table", { + type: QueryTypes.SELECT, + transaction: tx + }); + }); + } catch (e) { + console.log(`Transaction error: ${e}`); + } +} + +require('yargs') +.demand(4) +.command( + 'testSelectUsers ', + 'Executes SELECT * FROM users', + {}, + opts => runTest(opts.host, opts.port, opts.database, testSelectUsers) +) +.command( + 'testSelectUsersInTransaction ', + 'Executes SELECT * FROM users in a transaction', + {}, + opts => runTest(opts.host, opts.port, opts.database, testSelectUsersInTransaction) +) +.command( + 'testErrorInTransaction ', + 'Executes a statement in a transaction that fails', + {}, + opts => runTest(opts.host, opts.port, opts.database, testErrorInTransaction) +) +.wrap(120) +.recommendCommands() +.strict() +.help().argv; diff --git a/src/test/nodejs/sequelize-tests/src/tsconfig.json b/src/test/nodejs/sequelize-tests/src/tsconfig.json new file mode 100644 index 000000000..2f720408e --- /dev/null +++ b/src/test/nodejs/sequelize-tests/src/tsconfig.json @@ -0,0 +1,3 @@ +{ + "noImplicitAny": false +}