From 20ae6536a8a20a8224c375daae4c2052db1f6c1c Mon Sep 17 00:00:00 2001 From: wunderbarr Date: Thu, 6 May 2021 12:11:56 +0200 Subject: [PATCH 1/8] feat: presto/trino integration --- funks.js | 77 ++- lib/generators-aux.js | 6 + .../Dockerfile.postgres2 | 4 + .../data_models_storage_config2.json | 14 + .../docker-compose-test.yml | 28 +- test/integration_test_misc/initUserDb2.sh | 72 +++ .../dist_presto_doctor.json | 16 + .../dist_presto_doctor_instance1.json | 17 + .../dist_trino_doctor.json | 16 + .../dist_trino_doctor_instance1.json | 17 + .../presto_doctor.json | 18 + .../trino_doctor.json | 18 + .../postgresql.properties | 4 + test/mocha_integration_amazon_s3.test.js | 28 +- test/mocha_integration_presto.test.js | 538 ++++++++++++++++++ test/mocha_integration_trino.test.js | 538 ++++++++++++++++++ test/mocha_unit.test.js | 41 ++ test/unit_test_misc/data_models_trino.js | 35 ++ .../test-describe/cassandra-storagetype.js | 2 + .../test-describe/distributed-models.js | 4 + .../test-describe/handle-error-ddm.js | 4 + .../test-describe/trino-unittest.js | 228 ++++++++ views/create-distributed-model.ejs | 4 + views/create-models-amazonS3.ejs | 4 +- views/create-models-trino.ejs | 434 ++++++++++++++ views/create-trino-adapter.ejs | 400 +++++++++++++ 26 files changed, 2556 insertions(+), 11 deletions(-) create mode 100644 test/integration_test_misc/Dockerfile.postgres2 create mode 100644 test/integration_test_misc/initUserDb2.sh create mode 100644 test/integration_test_misc/integration_test_models_instance2/dist_presto_doctor.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/dist_presto_doctor_instance1.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/dist_trino_doctor.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/dist_trino_doctor_instance1.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/presto_doctor.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/trino_doctor.json create mode 100644 test/integration_test_misc/postgresql.properties create mode 100644 test/mocha_integration_presto.test.js create mode 100644 test/mocha_integration_trino.test.js create mode 100644 test/unit_test_misc/data_models_trino.js create mode 100644 test/unit_test_misc/test-describe/trino-unittest.js create mode 100644 views/create-models-trino.ejs create mode 100644 views/create-trino-adapter.ejs diff --git a/funks.js b/funks.js index 30af9654..8b54e4c5 100644 --- a/funks.js +++ b/funks.js @@ -978,6 +978,7 @@ generateSections = async function (sections, opts, dir_write) { case "models-cassandra": case "models-mongodb": case "models-amazonS3": + case "models-trino": //adapters case "sql-adapter": case "zendro-adapters": @@ -985,6 +986,7 @@ generateSections = async function (sections, opts, dir_write) { case "cassandra-adapter": case "mongodb-adapter": case "amazonS3-adapter": + case "trino-adapter": file_name = dir_write + "/" + section.dir + "/" + section.fileName + ".js"; break; @@ -1088,6 +1090,8 @@ getStorageType = function (dataModel) { case "cassandra": case "mongodb": case "amazon-s3": + case "trino": + case "presto": //adapters case "sql-adapter": case "ddm-adapter": @@ -1096,6 +1100,8 @@ getStorageType = function (dataModel) { case "cassandra-adapter": case "mongodb-adapter": case "amazon-s3-adapter": + case "trino-adapter": + case "presto-adapter": //ok break; @@ -1104,9 +1110,12 @@ getStorageType = function (dataModel) { valid = false; console.error( colors.red( - `ERROR: The attribute 'storageType' has an invalid value. \nOne of the following types is expected: [sql, distributed-data-model, zendro-server, generic, sql-adapter, ddm-adapter, zendro-webservice-adapter, generic-adapter]. But '${ - dataModel.storageType - }' was obtained on ${ + `ERROR: The attribute 'storageType' has an invalid value. \n + One of the following types is expected: [sql, distributed-data-model, + zendro-server, generic, sql-adapter, ddm-adapter, zendro-webservice-adapter, generic-adapter, + cassandra, mongodb, amazon-s3, trino, presto, cassandra-adapter, mongodb-adapter, + amazon-s3-adapter, trino-adapter, presto-adapter]. + But '${dataModel.storageType}' was obtained on ${ dataModel.adapterName !== undefined ? "adapter" : "model" } '${ dataModel.adapterName !== undefined @@ -1155,6 +1164,8 @@ module.exports.generateCode = async function (json_dir, dir_write, options) { "models/cassandra", "models/mongodb", "models/amazonS3", + "models/trino", + "models/presto", ]; let models = []; let adapters = []; @@ -1459,6 +1470,42 @@ module.exports.generateCode = async function (json_dir, dir_write, options) { ]; break; + case "trino": + sections = [ + { dir: "schemas", template: "schemas", fileName: opts.nameLc }, + { dir: "resolvers", template: "resolvers", fileName: opts.nameLc }, + { + dir: "models/trino", + template: "models-trino", + fileName: opts.nameLc, + }, + { + dir: "validations", + template: "validations", + fileName: opts.nameLc, + }, + { dir: "patches", template: "patches", fileName: opts.nameLc }, + ]; + break; + + case "presto": + sections = [ + { dir: "schemas", template: "schemas", fileName: opts.nameLc }, + { dir: "resolvers", template: "resolvers", fileName: opts.nameLc }, + { + dir: "models/presto", + template: "models-trino", + fileName: opts.nameLc, + }, + { + dir: "validations", + template: "validations", + fileName: opts.nameLc, + }, + { dir: "patches", template: "patches", fileName: opts.nameLc }, + ]; + break; + case "zendro-webservice-adapter": sections = [ { @@ -1541,6 +1588,28 @@ module.exports.generateCode = async function (json_dir, dir_write, options) { { dir: "patches", template: "patches", fileName: opts.adapterName }, ]; break; + + case "trino-adapter": + sections = [ + { + dir: "models/adapters", + template: "trino-adapter", + fileName: opts.adapterName, + }, + { dir: "patches", template: "patches", fileName: opts.adapterName }, + ]; + break; + + case "presto-adapter": + sections = [ + { + dir: "models/adapters", + template: "trino-adapter", + fileName: opts.adapterName, + }, + { dir: "patches", template: "patches", fileName: opts.adapterName }, + ]; + break; default: break; } @@ -1572,6 +1641,8 @@ module.exports.generateCode = async function (json_dir, dir_write, options) { "mongodb-adapter", "cassandra-adapter", "amazon-s3-adapter", + "trino-adapter", + "presto-adapter", ].includes(opts.storageType) ) { adapters.push(opts.adapterName); diff --git a/lib/generators-aux.js b/lib/generators-aux.js index 3962a9ac..65553bac 100644 --- a/lib/generators-aux.js +++ b/lib/generators-aux.js @@ -12,6 +12,12 @@ exports.getModelDatabase = function (dataModel) { "cassandra-adapter": "default-cassandra", mongodb: "default-mongodb", "mongodb-adapter": "default-mongodb", + "amazon-s3": "default-amazonS3", + "amazon-s3-adapter": "default-amazonS3", + trino: "default-trino", + "trino-adapter": "default-trino", + presto: "default-presto", + "presto-adapter": "default-presto", }; const storageType = dataModel.storageType.toLowerCase(); diff --git a/test/integration_test_misc/Dockerfile.postgres2 b/test/integration_test_misc/Dockerfile.postgres2 new file mode 100644 index 00000000..c1e2f6aa --- /dev/null +++ b/test/integration_test_misc/Dockerfile.postgres2 @@ -0,0 +1,4 @@ +FROM postgres:11.1-alpine + +COPY initUserDb2.sh /docker-entrypoint-initdb.d/initUserDb.sh +RUN chmod 755 /docker-entrypoint-initdb.d/initUserDb.sh diff --git a/test/integration_test_misc/data_models_storage_config2.json b/test/integration_test_misc/data_models_storage_config2.json index 2a7027fa..23a752b6 100644 --- a/test/integration_test_misc/data_models_storage_config2.json +++ b/test/integration_test_misc/data_models_storage_config2.json @@ -6,5 +6,19 @@ "database": "sciencedb_development", "host": "gql_postgres2", "dialect": "postgres" + }, + "default-trino": { + "storageType": "trino", + "catalog":"postgresql", + "schema":"public", + "trino_host": "gql_trino", + "trino_port": "8080" + }, + "default-presto": { + "storageType": "presto", + "catalog":"postgresql", + "schema":"public", + "presto_host": "gql_presto", + "presto_port": "8080" } } \ No newline at end of file diff --git a/test/integration_test_misc/docker-compose-test.yml b/test/integration_test_misc/docker-compose-test.yml index b6c7d49d..7dca60f6 100644 --- a/test/integration_test_misc/docker-compose-test.yml +++ b/test/integration_test_misc/docker-compose-test.yml @@ -108,9 +108,33 @@ services: container_name: postgres2 build: context: . - dockerfile: Dockerfile.postgres + dockerfile: Dockerfile.postgres2 + ports: + - 1235:5432 + networks: + - instance2 + + gql_presto: + image: ahanaio/prestodb-sandbox + container_name: presto1 + depends_on: + - gql_postgres2 + volumes: + - ./postgresql.properties:/opt/presto-server/etc/catalog/postgresql.properties + ports: + - 8081:8080 + networks: + - instance2 + + gql_trino: + image: trinodb/trino + container_name: trino1 + depends_on: + - gql_postgres2 + volumes: + - ./postgresql.properties:/etc/trino/catalog/postgresql.properties ports: - - 1235:5431 + - 8080:8080 networks: - instance2 diff --git a/test/integration_test_misc/initUserDb2.sh b/test/integration_test_misc/initUserDb2.sh new file mode 100644 index 00000000..01d78fff --- /dev/null +++ b/test/integration_test_misc/initUserDb2.sh @@ -0,0 +1,72 @@ +#!/bin/bash +psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL + CREATE USER sciencedb WITH SUPERUSER PASSWORD 'sciencedb'; + CREATE DATABASE sciencedb_development OWNER sciencedb; + CREATE DATABASE sciencedb_test OWNER sciencedb; + CREATE DATABASE sciencedb_production OWNER sciencedb; +EOSQL +psql -U sciencedb -d sciencedb_development <<-EOSQL +CREATE TABLE trino_doctors ( + doctor_id varchar(255) PRIMARY KEY, + birthday timestamp, + experience integer, + rating float, + on_holiday boolean, + speciality text, + telephone text +); +INSERT INTO trino_doctors (doctor_id, birthday, experience, rating, on_holiday, speciality, telephone) VALUES + ('d1', '1989-12-03T10:15:30.000Z', 3, 4.9, false, '["Tinnitus","Allergology"]', '[152234,137584]'), + ('d2', '1977-12-03T10:15:30.000Z', 15, 5.0, false, '["Cardiology","Cardiothoracic Surgery"]', '[142234,127584]'), + ('d3', '1987-12-03T10:15:30.000Z', 5, 4.8, true, '["Dermatology","Allergology"]', '[162234,177584]'), + ('d4', '1988-12-03T10:15:30.000Z', 4, 4.9, false, '["Child Psychiatry","Adolescent Psychiatry"]', '[192234,197584]'), + ('d5', '1986-12-03T10:15:30.000Z', 6, 4.7, true, '["Neurology"]', '[122234,187584]'); + +CREATE TABLE presto_doctors ( + doctor_id varchar(255) PRIMARY KEY, + birthday timestamp, + experience integer, + rating float, + on_holiday boolean, + speciality text, + telephone text +); +INSERT INTO presto_doctors (doctor_id, birthday, experience, rating, on_holiday, speciality, telephone) VALUES + ('d1', '1989-12-03T10:15:30.000Z', 3, 4.9, false, '["Tinnitus","Allergology"]', '[152234,137584]'), + ('d2', '1977-12-03T10:15:30.000Z', 15, 5.0, false, '["Cardiology","Cardiothoracic Surgery"]', '[142234,127584]'), + ('d3', '1987-12-03T10:15:30.000Z', 5, 4.8, true, '["Dermatology","Allergology"]', '[162234,177584]'), + ('d4', '1988-12-03T10:15:30.000Z', 4, 4.9, false, '["Child Psychiatry","Adolescent Psychiatry"]', '[192234,197584]'), + ('d5', '1986-12-03T10:15:30.000Z', 6, 4.7, true, '["Neurology"]', '[122234,187584]'); + +CREATE TABLE dist_trino_doctors ( + doctor_id varchar(255) PRIMARY KEY, + birthday timestamp, + experience integer, + rating float, + on_holiday boolean, + speciality text, + telephone text +); +INSERT INTO dist_trino_doctors (doctor_id, birthday, experience, rating, on_holiday, speciality, telephone) VALUES + ('instance1-d1', '1989-12-03T10:15:30.000Z', 3, 4.9, false, '["Tinnitus","Allergology"]', '[152234,137584]'), + ('instance1-d2', '1977-12-03T10:15:30.000Z', 15, 5.0, false, '["Cardiology","Cardiothoracic Surgery"]', '[142234,127584]'), + ('instance1-d3', '1987-12-03T10:15:30.000Z', 5, 4.8, true, '["Dermatology","Allergology"]', '[162234,177584]'), + ('instance1-d4', '1988-12-03T10:15:30.000Z', 4, 4.9, false, '["Child Psychiatry","Adolescent Psychiatry"]', '[192234,197584]'), + ('instance1-d5', '1986-12-03T10:15:30.000Z', 6, 4.7, true, '["Neurology"]', '[122234,187584]'); + +CREATE TABLE dist_presto_doctors ( + doctor_id varchar(255) PRIMARY KEY, + birthday timestamp, + experience integer, + rating float, + on_holiday boolean, + speciality text, + telephone text +); +INSERT INTO dist_presto_doctors (doctor_id, birthday, experience, rating, on_holiday, speciality, telephone) VALUES + ('instance1-d1', '1989-12-03T10:15:30.000Z', 3, 4.9, false, '["Tinnitus","Allergology"]', '[152234,137584]'), + ('instance1-d2', '1977-12-03T10:15:30.000Z', 15, 5.0, false, '["Cardiology","Cardiothoracic Surgery"]', '[142234,127584]'), + ('instance1-d3', '1987-12-03T10:15:30.000Z', 5, 4.8, true, '["Dermatology","Allergology"]', '[162234,177584]'), + ('instance1-d4', '1988-12-03T10:15:30.000Z', 4, 4.9, false, '["Child Psychiatry","Adolescent Psychiatry"]', '[192234,197584]'), + ('instance1-d5', '1986-12-03T10:15:30.000Z', 6, 4.7, true, '["Neurology"]', '[122234,187584]'); +EOSQL \ No newline at end of file diff --git a/test/integration_test_misc/integration_test_models_instance2/dist_presto_doctor.json b/test/integration_test_misc/integration_test_models_instance2/dist_presto_doctor.json new file mode 100644 index 00000000..116062c8 --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/dist_presto_doctor.json @@ -0,0 +1,16 @@ +{ + "model": "dist_presto_doctor", + "storageType" : "distributed-data-model", + "registry": ["dist_presto_doctor_instance1"], + "attributes": { + "doctor_id": "String", + "birthday": "DateTime", + "experience": "Int", + "rating": "Float", + "on_holiday": "Boolean", + "speciality": "[String]", + "telephone": "[Int]" + }, + + "internalId" : "doctor_id" +} \ No newline at end of file diff --git a/test/integration_test_misc/integration_test_models_instance2/dist_presto_doctor_instance1.json b/test/integration_test_misc/integration_test_models_instance2/dist_presto_doctor_instance1.json new file mode 100644 index 00000000..43f5c383 --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/dist_presto_doctor_instance1.json @@ -0,0 +1,17 @@ +{ + "model": "dist_presto_doctor", + "storageType": "presto-adapter", + "adapterName": "dist_presto_doctor_instance1", + "regex": "instance1", + "attributes": { + "doctor_id": "String", + "birthday": "DateTime", + "experience": "Int", + "rating": "Float", + "on_holiday": "Boolean", + "speciality": "[String]", + "telephone": "[Int]" + }, + + "internalId": "doctor_id" +} \ No newline at end of file diff --git a/test/integration_test_misc/integration_test_models_instance2/dist_trino_doctor.json b/test/integration_test_misc/integration_test_models_instance2/dist_trino_doctor.json new file mode 100644 index 00000000..3d4815aa --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/dist_trino_doctor.json @@ -0,0 +1,16 @@ +{ + "model": "dist_trino_doctor", + "storageType" : "distributed-data-model", + "registry": ["dist_trino_doctor_instance1"], + "attributes": { + "doctor_id": "String", + "birthday": "DateTime", + "experience": "Int", + "rating": "Float", + "on_holiday": "Boolean", + "speciality": "[String]", + "telephone": "[Int]" + }, + + "internalId" : "doctor_id" +} \ No newline at end of file diff --git a/test/integration_test_misc/integration_test_models_instance2/dist_trino_doctor_instance1.json b/test/integration_test_misc/integration_test_models_instance2/dist_trino_doctor_instance1.json new file mode 100644 index 00000000..52426d81 --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/dist_trino_doctor_instance1.json @@ -0,0 +1,17 @@ +{ + "model": "dist_trino_doctor", + "storageType": "trino-adapter", + "adapterName": "dist_trino_doctor_instance1", + "regex": "instance1", + "attributes": { + "doctor_id": "String", + "birthday": "DateTime", + "experience": "Int", + "rating": "Float", + "on_holiday": "Boolean", + "speciality": "[String]", + "telephone": "[Int]" + }, + + "internalId": "doctor_id" +} \ No newline at end of file diff --git a/test/integration_test_misc/integration_test_models_instance2/presto_doctor.json b/test/integration_test_misc/integration_test_models_instance2/presto_doctor.json new file mode 100644 index 00000000..b331ec04 --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/presto_doctor.json @@ -0,0 +1,18 @@ +{ + "model": "presto_doctor", + "storageType": "presto", + "attributes": { + "doctor_id": "String", + "birthday": "DateTime", + "experience": "Int", + "rating": "Float", + "on_holiday": "Boolean", + "speciality": "[String]", + "telephone": "[Int]" + }, + "internalId": "doctor_id", + "id": { + "name": "doctor_id", + "type": "String" + } +} \ No newline at end of file diff --git a/test/integration_test_misc/integration_test_models_instance2/trino_doctor.json b/test/integration_test_misc/integration_test_models_instance2/trino_doctor.json new file mode 100644 index 00000000..594b6a44 --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/trino_doctor.json @@ -0,0 +1,18 @@ +{ + "model": "trino_doctor", + "storageType": "trino", + "attributes": { + "doctor_id": "String", + "birthday": "DateTime", + "experience": "Int", + "rating": "Float", + "on_holiday": "Boolean", + "speciality": "[String]", + "telephone": "[Int]" + }, + "internalId": "doctor_id", + "id": { + "name": "doctor_id", + "type": "String" + } +} \ No newline at end of file diff --git a/test/integration_test_misc/postgresql.properties b/test/integration_test_misc/postgresql.properties new file mode 100644 index 00000000..d2093675 --- /dev/null +++ b/test/integration_test_misc/postgresql.properties @@ -0,0 +1,4 @@ +connector.name=postgresql +connection-url=jdbc:postgresql://gql_postgres2:5432/sciencedb_development +connection-user=sciencedb +connection-password=sciencedb diff --git a/test/mocha_integration_amazon_s3.test.js b/test/mocha_integration_amazon_s3.test.js index dee69b6c..6dbb88f4 100644 --- a/test/mocha_integration_amazon_s3.test.js +++ b/test/mocha_integration_amazon_s3.test.js @@ -304,6 +304,28 @@ describe("Amazon S3/ Minio - Upload/Read Operations", () => { expect(res.statusCode).to.equal(200); expect(resBody.data.readersConnection.edges.length).equal(1); + + res = itHelpers.request_graph_ql_post(` + { + readersConnection( + search:{ + operator:eq, + field:history, + value:"Critique of Pure Reason,The World as Will and Representation", + valueType:Array + }, + pagination:{first:5}) { + edges{ + node{ + reader_name + } + } + } + }`); + resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.readersConnection.edges.length).equal(1); }); it("13. Reader: search with ne operator for array field", () => { @@ -466,7 +488,7 @@ describe("Amazon S3/ Minio - Distributed Data Models", () => { }); }); - it("02. Reader DDM: search", () => { + it("03. Reader DDM: search", () => { res = itHelpers.request_graph_ql_post( `{ dist_readersConnection( @@ -512,7 +534,7 @@ describe("Amazon S3/ Minio - Distributed Data Models", () => { }); }); - it("03. Reader DDM: paginate", () => { + it("04. Reader DDM: paginate", () => { res = itHelpers.request_graph_ql_post( `{ dist_readersConnection(pagination: { @@ -576,7 +598,7 @@ describe("Amazon S3/ Minio - Distributed Data Models", () => { }); }); - it("04. Reader DDM: count readers", () => { + it("05. Reader DDM: count readers", () => { res = itHelpers.request_graph_ql_post(`{countDist_readers}`); resBody = JSON.parse(res.body.toString("utf8")); expect(res.statusCode).to.equal(200); diff --git a/test/mocha_integration_presto.test.js b/test/mocha_integration_presto.test.js new file mode 100644 index 00000000..5dcc1d56 --- /dev/null +++ b/test/mocha_integration_presto.test.js @@ -0,0 +1,538 @@ +const { expect } = require("chai"); +const itHelpers = require("./integration_test_misc/integration_test_helpers"); + +describe("Presto - Read Access", () => { + it("01. presto_doctor: read one record", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + readOnePresto_doctor(doctor_id:"d1"){ + doctor_id + birthday + experience + rating + on_holiday + speciality + telephone + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + readOnePresto_doctor: { + doctor_id: "d1", + birthday: "1989-12-03T10:15:30.000Z", + experience: 3, + + rating: 4.9, + on_holiday: false, + speciality: ["Tinnitus", "Allergology"], + telephone: [152234, 137584], + }, + }, + }); + }); + + it("02. presto_doctor: count presto_doctors with like operator", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{countPresto_doctors(search:{operator: like, field: doctor_id, + value: "d%"})}` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.countPresto_doctors).equal(5); + }); + + it("03. presto_doctor: search with and operator", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + presto_doctors( + search: { + operator: and, + search: [ + {operator: eq, field: doctor_id, value: "d1"}, + {operator: eq, field: on_holiday, value: "false"} + ] + }, + pagination:{limit:5}) { + doctor_id + on_holiday + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + presto_doctors: [ + { + doctor_id: "d1", + on_holiday: false, + }, + ], + }, + }); + }); + + it("04. presto_doctor: search with or operator", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + presto_doctors( + search: { + operator: or, + search: [ + {operator: eq, field: doctor_id, value: "d1"}, + {operator: eq, field: doctor_id, value: "d2"} + ] + }, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + presto_doctors: [ + { + doctor_id: "d1", + }, + { + doctor_id: "d2", + }, + ], + }, + }); + }); + + it("05. presto_doctor: search with in operator for primitive field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + presto_doctors( + search: { + operator: in, + field: doctor_id, + value: "d1,d2,d3", + valueType: Array + }, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.presto_doctors.length).equal(3); + }); + + it("06. presto_doctor: search with not & in operators for primitive field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + presto_doctors( + search:{ + operator: not, + search: { + operator: in, + field: doctor_id, + value: "d1,d2,d3", + valueType: Array + } + }, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.presto_doctors.length).equal(2); + }); + + it("07. presto_doctor: search with in operator for array field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + presto_doctors( + search:{operator:in, field:telephone, value:"152234"}, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.presto_doctors.length).equal(1); + + res = itHelpers.request_graph_ql_post_instance2(` + { + presto_doctors( + search:{operator:in, field:speciality, value:"Tinnitus"}, + pagination:{limit:5}) { + doctor_id + } + }`); + resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.presto_doctors.length).equal(1); + }); + + it("08. presto_doctor: search with not & in operators for array field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + presto_doctors( + search:{ + operator:not, + search:{operator:in, field:telephone, value:"152234"} + }, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.presto_doctors.length).equal(4); + }); + + it("09. presto_doctor: search with eq operator for array field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + presto_doctors( + search:{operator:eq, field:telephone, value:"[152234,137584]"}, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.presto_doctors.length).equal(1); + + res = itHelpers.request_graph_ql_post_instance2(` + { + presto_doctors( + search:{operator:eq, field:telephone, value:"152234,137584", valueType:Array}, + pagination:{limit:5}) { + doctor_id + } + }`); + resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.presto_doctors.length).equal(1); + + res = itHelpers.request_graph_ql_post_instance2(` + { + presto_doctors( + search:{operator:eq, field:speciality, value:"Tinnitus,Allergology", valueType:Array}, + pagination:{limit:5}) { + doctor_id + } + }`); + resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.presto_doctors.length).equal(1); + }); + + it("10. presto_doctor: search with ne operator for array field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + presto_doctors( + search:{operator:ne, field:speciality, value:"Tinnitus,Allergology", valueType:Array}, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.presto_doctors.length).equal(4); + }); + + it("11. presto_doctor: sort", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{ + presto_doctors(pagination: {limit:5}, order: [{field: doctor_id, order: DESC}]) { + doctor_id + } + }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + + expect(resBody).to.deep.equal({ + data: { + presto_doctors: [ + { doctor_id: "d5" }, + { doctor_id: "d4" }, + { doctor_id: "d3" }, + { doctor_id: "d2" }, + { doctor_id: "d1" }, + ], + }, + }); + }); + + it("12. presto_doctor: paginate", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{ + presto_doctorsConnection(pagination:{first:10}) { + edges{ + cursor + node{ + doctor_id + } + } + } + }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + let edges = resBody.data.presto_doctorsConnection.edges; + let idArray = edges.map((edge) => edge.node.doctor_id); + let cursorArray = edges.map((edge) => edge.cursor); + res = itHelpers.request_graph_ql_post_instance2( + `{ + presto_doctorsConnection(pagination:{first: 2, after: "${cursorArray[1]}"}) { + edges{ + cursor + node{ + doctor_id + } + } + presto_doctors{ + doctor_id + } + pageInfo{ + startCursor + endCursor + hasNextPage + hasPreviousPage + } + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + + expect(resBody).to.deep.equal({ + data: { + presto_doctorsConnection: { + edges: [ + { + cursor: cursorArray[2], + node: { + doctor_id: idArray[2], + }, + }, + { + cursor: cursorArray[3], + node: { + doctor_id: idArray[3], + }, + }, + ], + presto_doctors: [ + { + doctor_id: "d3", + }, + { + doctor_id: "d4", + }, + ], + pageInfo: { + startCursor: cursorArray[2], + endCursor: cursorArray[3], + hasNextPage: true, + hasPreviousPage: true, + }, + }, + }, + }); + + res = itHelpers.request_graph_ql_post_instance2( + `{ + presto_doctorsConnection(pagination: {last: 4, before:"${cursorArray[4]}"}) { + pageInfo { + startCursor + endCursor + hasNextPage + hasPreviousPage + } + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + + expect(resBody).to.deep.equal({ + data: { + presto_doctorsConnection: { + pageInfo: { + startCursor: cursorArray[0], + endCursor: cursorArray[3], + hasNextPage: true, + hasPreviousPage: false, + }, + }, + }, + }); + }); + + it("13. presto_doctor: get the table template", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{csvTableTemplatePresto_doctor}` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + csvTableTemplatePresto_doctor: [ + "doctor_id,birthday,experience,rating,on_holiday,speciality,telephone", + "String,DateTime,Int,Float,Boolean,[String],[Int]", + ], + }, + }); + }); +}); + +describe("Presto - Distributed Data Models", () => { + it("01. presto_doctor: read one presto_doctor", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + readOneDist_presto_doctor(doctor_id:"instance1-d1"){ + doctor_id + birthday + experience + rating + on_holiday + speciality + telephone + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + readOneDist_presto_doctor: { + doctor_id: "instance1-d1", + birthday: "1989-12-03T10:15:30.000Z", + experience: 3, + rating: 4.9, + on_holiday: false, + speciality: ["Tinnitus", "Allergology"], + telephone: [152234, 137584], + }, + }, + }); + }); + + it("02. presto_doctor DDM: search & sort", () => { + res = itHelpers.request_graph_ql_post_instance2( + `{ + dist_presto_doctorsConnection( + search: {field: doctor_id, value: "instance1%", operator: like}, + order: [{field: doctor_id, order: DESC}] + pagination: {first: 5}) { + dist_presto_doctors{ + doctor_id + } + } + } + ` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + dist_presto_doctorsConnection: { + dist_presto_doctors: [ + { doctor_id: "instance1-d5" }, + { doctor_id: "instance1-d4" }, + { doctor_id: "instance1-d3" }, + { doctor_id: "instance1-d2" }, + { doctor_id: "instance1-d1" }, + ], + }, + }, + }); + }); + + it("03. presto_doctor DDM: paginate", () => { + res = itHelpers.request_graph_ql_post_instance2( + `{ + dist_presto_doctorsConnection(pagination: { + first: 2, + after: "eyJkb2N0b3JfaWQiOiJpbnN0YW5jZTEtZDEiLCJiaXJ0aGRheSI6IjE5ODktMTItMDNUMTA6MTU6MzAuMDAwWiIsImV4cGVyaWVuY2UiOjMsInJhdGluZyI6NC45LCJvbl9ob2xpZGF5IjpmYWxzZSwic3BlY2lhbGl0eSI6WyJUaW5uaXR1cyIsIkFsbGVyZ29sb2d5Il0sInRlbGVwaG9uZSI6WzE1MjIzNCwxMzc1ODRdfQ==" + }){ + edges { + node { + doctor_id + } + cursor + } + dist_presto_doctors{ + doctor_id + } + pageInfo { + startCursor + endCursor + hasNextPage + hasPreviousPage + } + } + } + ` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + dist_presto_doctorsConnection: { + edges: [ + { + cursor: + "eyJkb2N0b3JfaWQiOiJpbnN0YW5jZTEtZDIiLCJiaXJ0aGRheSI6IjE5NzctMTItMDNUMTA6MTU6MzAuMDAwWiIsImV4cGVyaWVuY2UiOjE1LCJyYXRpbmciOjUsIm9uX2hvbGlkYXkiOmZhbHNlLCJzcGVjaWFsaXR5IjpbIkNhcmRpb2xvZ3kiLCJDYXJkaW90aG9yYWNpYyBTdXJnZXJ5Il0sInRlbGVwaG9uZSI6WzE0MjIzNCwxMjc1ODRdfQ==", + node: { + doctor_id: "instance1-d2", + }, + }, + { + cursor: + "eyJkb2N0b3JfaWQiOiJpbnN0YW5jZTEtZDMiLCJiaXJ0aGRheSI6IjE5ODctMTItMDNUMTA6MTU6MzAuMDAwWiIsImV4cGVyaWVuY2UiOjUsInJhdGluZyI6NC44LCJvbl9ob2xpZGF5Ijp0cnVlLCJzcGVjaWFsaXR5IjpbIkRlcm1hdG9sb2d5IiwiQWxsZXJnb2xvZ3kiXSwidGVsZXBob25lIjpbMTYyMjM0LDE3NzU4NF19", + node: { + doctor_id: "instance1-d3", + }, + }, + ], + dist_presto_doctors: [ + { doctor_id: "instance1-d2" }, + { doctor_id: "instance1-d3" }, + ], + pageInfo: { + startCursor: + "eyJkb2N0b3JfaWQiOiJpbnN0YW5jZTEtZDIiLCJiaXJ0aGRheSI6IjE5NzctMTItMDNUMTA6MTU6MzAuMDAwWiIsImV4cGVyaWVuY2UiOjE1LCJyYXRpbmciOjUsIm9uX2hvbGlkYXkiOmZhbHNlLCJzcGVjaWFsaXR5IjpbIkNhcmRpb2xvZ3kiLCJDYXJkaW90aG9yYWNpYyBTdXJnZXJ5Il0sInRlbGVwaG9uZSI6WzE0MjIzNCwxMjc1ODRdfQ==", + endCursor: + "eyJkb2N0b3JfaWQiOiJpbnN0YW5jZTEtZDMiLCJiaXJ0aGRheSI6IjE5ODctMTItMDNUMTA6MTU6MzAuMDAwWiIsImV4cGVyaWVuY2UiOjUsInJhdGluZyI6NC44LCJvbl9ob2xpZGF5Ijp0cnVlLCJzcGVjaWFsaXR5IjpbIkRlcm1hdG9sb2d5IiwiQWxsZXJnb2xvZ3kiXSwidGVsZXBob25lIjpbMTYyMjM0LDE3NzU4NF19", + hasNextPage: true, + hasPreviousPage: false, + }, + }, + }, + }); + }); + + it("04. presto_doctor DDM: count presto_doctors", () => { + res = itHelpers.request_graph_ql_post_instance2( + `{countDist_presto_doctors}` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody.data.countDist_presto_doctors).equal(5); + }); +}); diff --git a/test/mocha_integration_trino.test.js b/test/mocha_integration_trino.test.js new file mode 100644 index 00000000..d446f7d8 --- /dev/null +++ b/test/mocha_integration_trino.test.js @@ -0,0 +1,538 @@ +const { expect } = require("chai"); +const itHelpers = require("./integration_test_misc/integration_test_helpers"); + +describe("Trino - Read Access", () => { + it("01. trino_doctor: read one record", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + readOneTrino_doctor(doctor_id:"d1"){ + doctor_id + birthday + experience + rating + on_holiday + speciality + telephone + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + readOneTrino_doctor: { + doctor_id: "d1", + birthday: "1989-12-03T10:15:30.000Z", + experience: 3, + + rating: 4.9, + on_holiday: false, + speciality: ["Tinnitus", "Allergology"], + telephone: [152234, 137584], + }, + }, + }); + }); + + it("02. trino_doctor: count trino_doctors with like operator", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{countTrino_doctors(search:{operator: like, field: doctor_id, + value: "d%"})}` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.countTrino_doctors).equal(5); + }); + + it("03. trino_doctor: search with and operator", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + trino_doctors( + search: { + operator: and, + search: [ + {operator: eq, field: doctor_id, value: "d1"}, + {operator: eq, field: on_holiday, value: "false"} + ] + }, + pagination:{limit:5}) { + doctor_id + on_holiday + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + trino_doctors: [ + { + doctor_id: "d1", + on_holiday: false, + }, + ], + }, + }); + }); + + it("04. trino_doctor: search with or operator", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + trino_doctors( + search: { + operator: or, + search: [ + {operator: eq, field: doctor_id, value: "d1"}, + {operator: eq, field: doctor_id, value: "d2"} + ] + }, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + trino_doctors: [ + { + doctor_id: "d1", + }, + { + doctor_id: "d2", + }, + ], + }, + }); + }); + + it("05. trino_doctor: search with in operator for primitive field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + trino_doctors( + search: { + operator: in, + field: doctor_id, + value: "d1,d2,d3", + valueType: Array + }, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.trino_doctors.length).equal(3); + }); + + it("06. trino_doctor: search with not & in operators for primitive field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + trino_doctors( + search:{ + operator: not, + search: { + operator: in, + field: doctor_id, + value: "d1,d2,d3", + valueType: Array + } + }, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.trino_doctors.length).equal(2); + }); + + it("07. trino_doctor: search with in operator for array field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + trino_doctors( + search:{operator:in, field:telephone, value:"152234"}, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.trino_doctors.length).equal(1); + + res = itHelpers.request_graph_ql_post_instance2(` + { + trino_doctors( + search:{operator:in, field:speciality, value:"Tinnitus"}, + pagination:{limit:5}) { + doctor_id + } + }`); + resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.trino_doctors.length).equal(1); + }); + + it("08. trino_doctor: search with not & in operators for array field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + trino_doctors( + search:{ + operator:not, + search:{operator:in, field:telephone, value:"152234"} + }, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.trino_doctors.length).equal(4); + }); + + it("09. trino_doctor: search with eq operator for array field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + trino_doctors( + search:{operator:eq, field:telephone, value:"[152234,137584]"}, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.trino_doctors.length).equal(1); + + res = itHelpers.request_graph_ql_post_instance2(` + { + trino_doctors( + search:{operator:eq, field:telephone, value:"152234,137584", valueType:Array}, + pagination:{limit:5}) { + doctor_id + } + }`); + resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.trino_doctors.length).equal(1); + + res = itHelpers.request_graph_ql_post_instance2(` + { + trino_doctors( + search:{operator:eq, field:speciality, value:"Tinnitus,Allergology", valueType:Array}, + pagination:{limit:5}) { + doctor_id + } + }`); + resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.trino_doctors.length).equal(1); + }); + + it("10. trino_doctor: search with ne operator for array field", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + trino_doctors( + search:{operator:ne, field:speciality, value:"Tinnitus,Allergology", valueType:Array}, + pagination:{limit:5}) { + doctor_id + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.trino_doctors.length).equal(4); + }); + + it("11. trino_doctor: sort", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{ + trino_doctors(pagination: {limit:5}, order: [{field: doctor_id, order: DESC}]) { + doctor_id + } + }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + + expect(resBody).to.deep.equal({ + data: { + trino_doctors: [ + { doctor_id: "d5" }, + { doctor_id: "d4" }, + { doctor_id: "d3" }, + { doctor_id: "d2" }, + { doctor_id: "d1" }, + ], + }, + }); + }); + + it("12. trino_doctor: paginate", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{ + trino_doctorsConnection(pagination:{first:10}) { + edges{ + cursor + node{ + doctor_id + } + } + } + }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + let edges = resBody.data.trino_doctorsConnection.edges; + let idArray = edges.map((edge) => edge.node.doctor_id); + let cursorArray = edges.map((edge) => edge.cursor); + res = itHelpers.request_graph_ql_post_instance2( + `{ + trino_doctorsConnection(pagination:{first: 2, after: "${cursorArray[1]}"}) { + edges{ + cursor + node{ + doctor_id + } + } + trino_doctors{ + doctor_id + } + pageInfo{ + startCursor + endCursor + hasNextPage + hasPreviousPage + } + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + + expect(resBody).to.deep.equal({ + data: { + trino_doctorsConnection: { + edges: [ + { + cursor: cursorArray[2], + node: { + doctor_id: idArray[2], + }, + }, + { + cursor: cursorArray[3], + node: { + doctor_id: idArray[3], + }, + }, + ], + trino_doctors: [ + { + doctor_id: "d3", + }, + { + doctor_id: "d4", + }, + ], + pageInfo: { + startCursor: cursorArray[2], + endCursor: cursorArray[3], + hasNextPage: true, + hasPreviousPage: true, + }, + }, + }, + }); + + res = itHelpers.request_graph_ql_post_instance2( + `{ + trino_doctorsConnection(pagination: {last: 4, before:"${cursorArray[4]}"}) { + pageInfo { + startCursor + endCursor + hasNextPage + hasPreviousPage + } + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + + expect(resBody).to.deep.equal({ + data: { + trino_doctorsConnection: { + pageInfo: { + startCursor: cursorArray[0], + endCursor: cursorArray[3], + hasNextPage: true, + hasPreviousPage: false, + }, + }, + }, + }); + }); + + it("13. trino_doctor: get the table template", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{csvTableTemplateTrino_doctor}` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + csvTableTemplateTrino_doctor: [ + "doctor_id,birthday,experience,rating,on_holiday,speciality,telephone", + "String,DateTime,Int,Float,Boolean,[String],[Int]", + ], + }, + }); + }); +}); + +describe("Trino - Distributed Data Models", () => { + it("01. trino_doctor: read one trino_doctor", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + readOneDist_trino_doctor(doctor_id:"instance1-d1"){ + doctor_id + birthday + experience + rating + on_holiday + speciality + telephone + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + readOneDist_trino_doctor: { + doctor_id: "instance1-d1", + birthday: "1989-12-03T10:15:30.000Z", + experience: 3, + rating: 4.9, + on_holiday: false, + speciality: ["Tinnitus", "Allergology"], + telephone: [152234, 137584], + }, + }, + }); + }); + + it("02. trino_doctor DDM: search & sort", () => { + res = itHelpers.request_graph_ql_post_instance2( + `{ + dist_trino_doctorsConnection( + search: {field: doctor_id, value: "instance1%", operator: like}, + order: [{field: doctor_id, order: DESC}] + pagination: {first: 5}) { + dist_trino_doctors{ + doctor_id + } + } + } + ` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + dist_trino_doctorsConnection: { + dist_trino_doctors: [ + { doctor_id: "instance1-d5" }, + { doctor_id: "instance1-d4" }, + { doctor_id: "instance1-d3" }, + { doctor_id: "instance1-d2" }, + { doctor_id: "instance1-d1" }, + ], + }, + }, + }); + }); + + it("03. trino_doctor DDM: paginate", () => { + res = itHelpers.request_graph_ql_post_instance2( + `{ + dist_trino_doctorsConnection(pagination: { + first: 2, + after: "eyJkb2N0b3JfaWQiOiJpbnN0YW5jZTEtZDEiLCJiaXJ0aGRheSI6IjE5ODktMTItMDNUMTA6MTU6MzAuMDAwWiIsImV4cGVyaWVuY2UiOjMsInJhdGluZyI6NC45LCJvbl9ob2xpZGF5IjpmYWxzZSwic3BlY2lhbGl0eSI6WyJUaW5uaXR1cyIsIkFsbGVyZ29sb2d5Il0sInRlbGVwaG9uZSI6WzE1MjIzNCwxMzc1ODRdfQ==" + }){ + edges { + node { + doctor_id + } + cursor + } + dist_trino_doctors{ + doctor_id + } + pageInfo { + startCursor + endCursor + hasNextPage + hasPreviousPage + } + } + } + ` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + dist_trino_doctorsConnection: { + edges: [ + { + cursor: + "eyJkb2N0b3JfaWQiOiJpbnN0YW5jZTEtZDIiLCJiaXJ0aGRheSI6IjE5NzctMTItMDNUMTA6MTU6MzAuMDAwWiIsImV4cGVyaWVuY2UiOjE1LCJyYXRpbmciOjUsIm9uX2hvbGlkYXkiOmZhbHNlLCJzcGVjaWFsaXR5IjpbIkNhcmRpb2xvZ3kiLCJDYXJkaW90aG9yYWNpYyBTdXJnZXJ5Il0sInRlbGVwaG9uZSI6WzE0MjIzNCwxMjc1ODRdfQ==", + node: { + doctor_id: "instance1-d2", + }, + }, + { + cursor: + "eyJkb2N0b3JfaWQiOiJpbnN0YW5jZTEtZDMiLCJiaXJ0aGRheSI6IjE5ODctMTItMDNUMTA6MTU6MzAuMDAwWiIsImV4cGVyaWVuY2UiOjUsInJhdGluZyI6NC44LCJvbl9ob2xpZGF5Ijp0cnVlLCJzcGVjaWFsaXR5IjpbIkRlcm1hdG9sb2d5IiwiQWxsZXJnb2xvZ3kiXSwidGVsZXBob25lIjpbMTYyMjM0LDE3NzU4NF19", + node: { + doctor_id: "instance1-d3", + }, + }, + ], + dist_trino_doctors: [ + { doctor_id: "instance1-d2" }, + { doctor_id: "instance1-d3" }, + ], + pageInfo: { + startCursor: + "eyJkb2N0b3JfaWQiOiJpbnN0YW5jZTEtZDIiLCJiaXJ0aGRheSI6IjE5NzctMTItMDNUMTA6MTU6MzAuMDAwWiIsImV4cGVyaWVuY2UiOjE1LCJyYXRpbmciOjUsIm9uX2hvbGlkYXkiOmZhbHNlLCJzcGVjaWFsaXR5IjpbIkNhcmRpb2xvZ3kiLCJDYXJkaW90aG9yYWNpYyBTdXJnZXJ5Il0sInRlbGVwaG9uZSI6WzE0MjIzNCwxMjc1ODRdfQ==", + endCursor: + "eyJkb2N0b3JfaWQiOiJpbnN0YW5jZTEtZDMiLCJiaXJ0aGRheSI6IjE5ODctMTItMDNUMTA6MTU6MzAuMDAwWiIsImV4cGVyaWVuY2UiOjUsInJhdGluZyI6NC44LCJvbl9ob2xpZGF5Ijp0cnVlLCJzcGVjaWFsaXR5IjpbIkRlcm1hdG9sb2d5IiwiQWxsZXJnb2xvZ3kiXSwidGVsZXBob25lIjpbMTYyMjM0LDE3NzU4NF19", + hasNextPage: true, + hasPreviousPage: false, + }, + }, + }, + }); + }); + + it("04. trino_doctor DDM: count trino_doctors", () => { + res = itHelpers.request_graph_ql_post_instance2( + `{countDist_trino_doctors}` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody.data.countDist_trino_doctors).equal(5); + }); +}); diff --git a/test/mocha_unit.test.js b/test/mocha_unit.test.js index 40033ac2..1a97f822 100644 --- a/test/mocha_unit.test.js +++ b/test/mocha_unit.test.js @@ -11,6 +11,7 @@ const models_generic = require("./unit_test_misc/data_models_generic"); const models_cassandra = require("./unit_test_misc/data_models_cassandra"); const models_mongodb = require("./unit_test_misc/data_models_mongodb"); const models_amazonS3 = require("./unit_test_misc/data_models_amazonS3"); +const models_trino = require("./unit_test_misc/data_models_trino"); const requireFromString = require("require-from-string"); const helpers = require("./unit_test_misc/helpers/reporting_helpers"); const { test } = require("mocha"); @@ -3016,3 +3017,43 @@ describe("Amazon S3/ Minio Unit Test", function () { testCompare(generated_model, data_test.amazonS3_adapter_readById); }); }); + +describe("Trino/Presto Unit Test", () => { + let data_test = require("./unit_test_misc/test-describe/trino-unittest.js"); + + it("Trino model - doctor constructor", async () => { + let opts = funks.getOptions(models_trino.doctor); + let generated_model = await funks.generateJs("create-models-trino", opts); + testCompare(generated_model, data_test.doctor_constructor); + }); + + it("Trino model - doctor readById", async () => { + let opts = funks.getOptions(models_trino.doctor); + let generated_model = await funks.generateJs("create-models-trino", opts); + testCompare(generated_model, data_test.doctor_readById); + }); + + it("Trino model - doctor countRecords", async () => { + let opts = funks.getOptions(models_trino.doctor); + let generated_model = await funks.generateJs("create-models-trino", opts); + testCompare(generated_model, data_test.doctor_countRecords); + }); + + it("Trino model - doctor readAll", async () => { + let opts = funks.getOptions(models_trino.doctor); + let generated_model = await funks.generateJs("create-models-trino", opts); + testCompare(generated_model, data_test.doctor_readAll); + }); + + it("Trino model - doctor readAllCursor", async () => { + let opts = funks.getOptions(models_trino.doctor); + let generated_model = await funks.generateJs("create-models-trino", opts); + testCompare(generated_model, data_test.doctor_readAllCursor); + }); + + it("Trino adapter - dist_doctor_instance1 readById ", async () => { + let opts = funks.getOptions(models_trino.dist_doctor_instance1); + let generated_model = await funks.generateJs("create-trino-adapter", opts); + testCompare(generated_model, data_test.trino_adapter_readById); + }); +}); diff --git a/test/unit_test_misc/data_models_trino.js b/test/unit_test_misc/data_models_trino.js new file mode 100644 index 00000000..441c49e5 --- /dev/null +++ b/test/unit_test_misc/data_models_trino.js @@ -0,0 +1,35 @@ +module.exports.doctor = { + model: "doctor", + storageType: "trino", + attributes: { + doctor_id: "String", + birthday: "DateTime", + experience: "Int", + rating: "Float", + on_holiday: "Boolean", + speciality: "[String]", + telephone: "[Int]", + }, + internalId: "doctor_id", + id: { + name: "doctor_id", + type: "String", + }, +}; + +module.exports.dist_doctor_instance1 = { + model: "dist_doctor", + storageType: "trino-adapter", + adapterName: "dist_doctor_instance1", + regex: "instance1", + attributes: { + doctor_id: "String", + birthday: "DateTime", + experience: "Int", + rating: "Float", + on_holiday: "Boolean", + speciality: "[String]", + telephone: "[Int]", + }, + internalId: "doctor_id", +}; diff --git a/test/unit_test_misc/test-describe/cassandra-storagetype.js b/test/unit_test_misc/test-describe/cassandra-storagetype.js index 645c2637..0189285b 100644 --- a/test/unit_test_misc/test-describe/cassandra-storagetype.js +++ b/test/unit_test_misc/test-describe/cassandra-storagetype.js @@ -456,6 +456,8 @@ static readAllCursor(search, order, pagination, authorizedAdapters, benignErrorR case 'sql-adapter': case 'mongodb-adapter': case 'amazon-s3-adapter': + case 'trino-adapter': + case 'presto-adapter': case 'zendro-webservice-adapter': return adapter.readAllCursor(search, order, pagination, benignErrorReporter); case 'cassandra-adapter': diff --git a/test/unit_test_misc/test-describe/distributed-models.js b/test/unit_test_misc/test-describe/distributed-models.js index bc9e77bd..2670362c 100644 --- a/test/unit_test_misc/test-describe/distributed-models.js +++ b/test/unit_test_misc/test-describe/distributed-models.js @@ -167,6 +167,8 @@ static countRecords(search, authorizedAdapters, benignErrorReporter, searchAutho case 'sql-adapter': case 'mongodb-adapter': case 'amazon-s3-adapter': + case 'trino-adapter': + case 'presto-adapter': case 'zendro-webservice-adapter': return adapter.countRecords(search, benignErrorReporter); case 'cassandra-adapter': @@ -243,6 +245,8 @@ static readAllCursor(search, order, pagination, authorizedAdapters, benignErrorR case 'sql-adapter': case 'mongodb-adapter': case 'amazon-s3-adapter': + case 'trino-adapter': + case 'presto-adapter': case 'zendro-webservice-adapter': return adapter.readAllCursor(search, order, pagination, benignErrorReporter); case 'cassandra-adapter': diff --git a/test/unit_test_misc/test-describe/handle-error-ddm.js b/test/unit_test_misc/test-describe/handle-error-ddm.js index d446c1f0..ba0620bc 100644 --- a/test/unit_test_misc/test-describe/handle-error-ddm.js +++ b/test/unit_test_misc/test-describe/handle-error-ddm.js @@ -47,6 +47,8 @@ static countRecords(search, authorizedAdapters, benignErrorReporter, searchAutho case 'sql-adapter': case 'mongodb-adapter': case 'amazon-s3-adapter': + case 'trino-adapter': + case 'presto-adapter': case 'zendro-webservice-adapter': return adapter.countRecords(search, benignErrorReporter); case 'cassandra-adapter': @@ -124,6 +126,8 @@ static readAllCursor(search, order, pagination, authorizedAdapters, benignErrorR case 'sql-adapter': case 'mongodb-adapter': case 'amazon-s3-adapter': + case 'trino-adapter': + case 'presto-adapter': case 'zendro-webservice-adapter': return adapter.readAllCursor(search, order, pagination, benignErrorReporter); case 'cassandra-adapter': diff --git a/test/unit_test_misc/test-describe/trino-unittest.js b/test/unit_test_misc/test-describe/trino-unittest.js new file mode 100644 index 00000000..c8a8bee4 --- /dev/null +++ b/test/unit_test_misc/test-describe/trino-unittest.js @@ -0,0 +1,228 @@ +module.exports.doctor_constructor = ` +constructor(input) { + for (let key of Object.keys(input)) { + this[key] = input[key]; + } +} +`; +module.exports.doctor_readById = ` +static async readById(id) { + const query = \`SELECT * FROM doctors WHERE \${this.idAttribute()} = '\${id}'\`; + let item = null; + try { + const client = await this.storageHandler; + item = await prestoHelper.queryData(query, client, "trino"); + + if (!item) { + throw new Error(\`Record with ID = "\${id}" does not exist\`); + } + } catch (e) { + throw new Error(e); + } + item = doctor.postReadCast(item)[0]; + return validatorUtil.validateData("validateAfterRead", this, item); +} +`; +module.exports.doctor_countRecords = ` +static async countRecords(search, benignErrorReporter) { + const whereOptions = prestoHelper.searchConditionsToTrino( + search, + definition + ); + const query = \`SELECT COUNT(*) AS num FROM doctors \${whereOptions}\`; + let num = null; + try { + const client = await this.storageHandler; + const result = await prestoHelper.queryData( + query, + client, + "trino" + ); + num = result[1][0][0]; + } catch (e) { + throw new Error(e); + } + return num; +} +`; +module.exports.doctor_readAll = ` +static async readAll(search, order, pagination, benignErrorReporter) { + //use default BenignErrorReporter if no BenignErrorReporter defined + benignErrorReporter = errorHelper.getDefaultBenignErrorReporterIfUndef( + benignErrorReporter + ); + // build the whereOptions for limit-offset-based pagination + const whereOptions = prestoHelper.searchConditionsToTrino( + search, + definition + ); + const orderOptions = prestoHelper.orderConditionsToTrino( + order, + this.idAttribute(), + true + ); + + const limit = pagination.limit; + const offset = pagination.offset ? pagination.offset : 0; + + let query = \`SELECT * FROM (SELECT row_number() over() AS rn, * FROM trino_doctors) \`; + query += + whereOptions !== "" + ? \`\${whereOptions} AND (rn BETWEEN \${offset + 1} AND \${offset + limit})\` + : \`WHERE rn BETWEEN \${offset + 1} AND \${offset + limit}\`; + query += \` \${orderOptions}\`; + + let result = null; + try { + const client = await this.storageHandler; + result = await prestoHelper.queryData(query, client, "trino"); + } catch (e) { + throw new Error(e); + } + result = doctor.postReadCast(result); + + return validatorUtil.bulkValidateData( + "validateAfterRead", + this, + result, + benignErrorReporter + ); +} +`; + +module.exports.doctor_readAllCursor = ` +static async readAllCursor(search, order, pagination, benignErrorReporter) { + //use default BenignErrorReporter if no BenignErrorReporter defined + benignErrorReporter = errorHelper.getDefaultBenignErrorReporterIfUndef( + benignErrorReporter + ); + let isForwardPagination = helper.isForwardPagination(pagination); + // build the whereOptions. + let filter = prestoHelper.searchConditionsToTrino(search, definition); + let newOrder = isForwardPagination + ? order + : helper.reverseOrderConditions(order); + // depending on the direction build the order object + let sort = prestoHelper.orderConditionsToTrino( + newOrder, + this.idAttribute(), + isForwardPagination + ); + let orderFields = newOrder ? newOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + filter = prestoHelper.cursorPaginationArgumentsToTrino( + pagination, + sort, + filter, + orderFields, + this.idAttribute(), + definition.attributes + ); + + // add +1 to the LIMIT to get information about following pages. + let limit = helper.isNotUndefinedAndNotNull(pagination.first) + ? pagination.first + 1 + : helper.isNotUndefinedAndNotNull(pagination.last) + ? pagination.last + 1 + : undefined; + + let query = \`SELECT * FROM doctors + \${filter} + \${sort} + LIMIT \${limit}\`; + let result = null; + + const client = await this.storageHandler; + result = await prestoHelper.queryData(query, client, "trino"); + + result = doctor.postReadCast(result); + // validationCheck after read + result = await validatorUtil.bulkValidateData( + "validateAfterRead", + this, + result, + benignErrorReporter + ); + // get the first record (if exists) in the opposite direction to determine pageInfo. + // if no cursor was given there is no need for an extra query as the results will start at the first (or last) page. + let oppResult = []; + if (pagination && (pagination.after || pagination.before)) { + // reverse the pagination Arguement. after -> before; set first/last to 0, so LIMIT 1 is executed in the reverse Search + let oppPagination = helper.reversePaginationArgument({ + ...pagination, + includeCursor: false, + }); + let oppForwardPagination = helper.isForwardPagination(oppPagination); + // build the filter object. + let oppFilter = prestoHelper.searchConditionsToTrino(search, definition); + + let oppOrder = oppForwardPagination + ? order + : helper.reverseOrderConditions(order); + // depending on the direction build the order object + let oppSort = prestoHelper.orderConditionsToTrino( + oppOrder, + this.idAttribute(), + oppForwardPagination + ); + let oppOrderFields = oppOrder ? oppOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + oppFilter = prestoHelper.cursorPaginationArgumentsToTrino( + oppPagination, + oppSort, + oppFilter, + oppOrderFields, + this.idAttribute(), + definition.attributes + ); + // add +1 to the LIMIT to get information about following pages. + let oppLimit = helper.isNotUndefinedAndNotNull(oppPagination.first) + ? oppPagination.first + 1 + : helper.isNotUndefinedAndNotNull(oppPagination.last) + ? oppPagination.last + 1 + : undefined; + query = \`SELECT * FROM doctors + \${oppFilter} + \${oppSort} + LIMIT \${oppLimit}\`; + oppResult = await prestoHelper.queryData( + query, + client, + "trino" + ); + oppResult = doctor.postReadCast(oppResult); + } + + // build the graphql Connection Object + result = result.map((res) => { + return new doctor(res); + }); + let edges = result.map((res) => { + return { + node: res, + cursor: res.base64Enconde(), + }; + }); + const pageInfo = helper.buildPageInfo(edges, oppResult, pagination); + return { edges, pageInfo, doctors: edges.map((edge) => edge.node) }; +} +`; + +module.exports.trino_adapter_readById = ` +static async readById(id){ + const query = \`SELECT * FROM dist_doctors WHERE \${this.idAttribute()} = '\${id}'\`; + let item = null; + try { + const client = await this.storageHandler; + item = await prestoHelper.queryData(query, client, "trino-adapter"); + + if (!item) { + throw new Error(\`Record with ID = "\${id}" does not exist\`); + } + } catch (e) { + throw new Error(e); + } + item = dist_doctor_instance1.postReadCast(item)[0]; + return validatorUtil.validateData("validateAfterRead", this, item); +} +`; diff --git a/views/create-distributed-model.ejs b/views/create-distributed-model.ejs index afa28b5e..37432e94 100644 --- a/views/create-distributed-model.ejs +++ b/views/create-distributed-model.ejs @@ -135,6 +135,8 @@ module.exports = class <%- nameLc -%>{ case 'sql-adapter': case 'mongodb-adapter': case 'amazon-s3-adapter': + case 'trino-adapter': + case 'presto-adapter': case 'zendro-webservice-adapter': return adapter.countRecords(search, benignErrorReporter); case 'cassandra-adapter': @@ -209,6 +211,8 @@ module.exports = class <%- nameLc -%>{ case 'sql-adapter': case 'mongodb-adapter': case 'amazon-s3-adapter': + case 'trino-adapter': + case 'presto-adapter': case 'zendro-webservice-adapter': return adapter.readAllCursor(search, order, pagination, benignErrorReporter); case 'cassandra-adapter': diff --git a/views/create-models-amazonS3.ejs b/views/create-models-amazonS3.ejs index 24e8b33f..c3b18fdf 100644 --- a/views/create-models-amazonS3.ejs +++ b/views/create-models-amazonS3.ejs @@ -5,9 +5,7 @@ const validatorUtil = require("../../utils/validatorUtil"); const helper = require("../../utils/helper"); const errorHelper = require("../../utils/errors"); const fs = require("fs"); -const config = require("../../config/data_models_storage_config.json")[ - "default-amazonS3" -]; +const config = require("../../config/data_models_storage_config.json")["<%- database -%>"]; const amazonS3Helper = require("../../utils/amazonS3_helper"); const path = require("path"); const uuidv4 = require("uuidv4").uuid; diff --git a/views/create-models-trino.ejs b/views/create-models-trino.ejs new file mode 100644 index 00000000..8d9d7c53 --- /dev/null +++ b/views/create-models-trino.ejs @@ -0,0 +1,434 @@ +"use strict"; + +const _ = require("lodash"); +const validatorUtil = require("../../utils/validatorUtil"); +const helper = require("../../utils/helper"); +const errorHelper = require("../../utils/errors"); +const fs = require("fs"); +const prestoHelper = require("../../utils/presto_helper"); +const path = require("path"); +const uuidv4 = require("uuidv4").uuid; +const os = require("os"); + +// An exact copy of the the model definition that comes from the .json file +const definition = <%- definition -%>; +/** + * module - Creates a class to administer model + */ +module.exports = class <%- nameLc -%> { + constructor(input) { + for (let key of Object.keys(input)) { + this[key] = input[key]; + } + } + + get storageHandler() { + return <%- nameLc -%>.storageHandler; + } + + /** + * name - Getter for the name attribute + * + * This attribute is needed by the models' index + * @return {string} The name of the model + */ + static get name() { + return "<%- nameLc -%>"; + } + + /** + * Cast JSON string to array for the validation. + * @param {object} record Record with JSON string if necessary. + * @return {object} Parsed data record. + */ + static postReadCast(record) { + if (!record) { + return []; + } + const column_index = {}; + record[0].map((obj, index) => { + column_index[obj.name] = index; + }); + + let result = []; + for (const item of record[1]) { + let record = {}; + for (const attr in definition.attributes) { + const type = definition.attributes[attr].replace(/\s+/g, ""); + if ( + type[0] === "[" && + item[column_index[attr]] !== undefined && + item[column_index[attr]] !== null + ) { + record[attr] = JSON.parse(item[column_index[attr]]); + } else if (type === "DateTime") { + record[attr] = new Date(item[column_index[attr]]).toISOString(); + } else { + record[attr] = item[column_index[attr]]; + } + } + result.push(record); + } + return result; + } + + /** + * readById - The model implementation for reading a single record given by its ID + * + * This method is the implementation for reading a single record for the trino storage type, based on SQL. + * @param {string} id - The ID of the requested record + * @return {object} The requested record as an object with the type <%- nameLc -%>, or an error object if the validation after reading fails + * @throws {Error} If the requested record does not exist + */ + static async readById(id) { + const query = `SELECT * FROM <%- namePl -%> WHERE ${this.idAttribute()} = '${id}'`; + let item = null; + try { + const client = await this.storageHandler; + item = await prestoHelper.queryData(query, client, "<%- storageType -%>"); + + if (!item) { + throw new Error(`Record with ID = "${id}" does not exist`); + } + } catch (e) { + throw new Error(e); + } + item = <%- nameLc -%>.postReadCast(item)[0]; + return validatorUtil.validateData("validateAfterRead", this, item); + } + + /** + * countRecords - The model implementation for counting the number of records, possibly restricted by a search term + * + * This method is the implementation for counting the number of records that fulfill a given condition, or for all records in the table, + * for the trino storage type, based on SQL. + * @param {object} search - The search term that restricts the set of records to be counted - if undefined, all records in the table + * @param {BenignErrorReporter} benignErrorReporter can be used to generate the standard + * @return {number} The number of records that fulfill the condition, or of all records in the table + */ + static async countRecords(search, benignErrorReporter) { + const whereOptions = prestoHelper.searchConditionsToTrino( + search, + definition + ); + const query = `SELECT COUNT(*) AS num FROM <%- namePl -%> ${whereOptions}`; + let num = null; + try { + const client = await this.storageHandler; + const result = await prestoHelper.queryData( + query, + client, + "<%- storageType -%>" + ); + num = result[1][0][0]; + } catch (e) { + throw new Error(e); + } + return num; + } + + /** + * readAll - Limit-offset based pagination is not offered by Trino, and this method is left here only as information + * to the user / developer. Use *readAllCursor* instead, which relies on cursor based pagination. + * @throw {Error} If this method is used at all, an Error is thrown + */ + static async readAll(search, order, pagination, benignErrorReporter) { + //use default BenignErrorReporter if no BenignErrorReporter defined + benignErrorReporter = errorHelper.getDefaultBenignErrorReporterIfUndef( + benignErrorReporter + ); + // build the whereOptions for limit-offset-based pagination + const whereOptions = prestoHelper.searchConditionsToTrino( + search, + definition + ); + const orderOptions = prestoHelper.orderConditionsToTrino( + order, + this.idAttribute(), + true + ); + + const limit = pagination.limit; + const offset = pagination.offset ? pagination.offset : 0; + + let query = `SELECT * FROM (SELECT row_number() over() AS rn, * FROM trino_doctors) `; + query += + whereOptions !== "" + ? `${whereOptions} AND (rn BETWEEN ${offset + 1} AND ${offset + limit})` + : `WHERE rn BETWEEN ${offset + 1} AND ${offset + limit}`; + query += ` ${orderOptions}`; + let result = null; + try { + const client = await this.storageHandler; + result = await prestoHelper.queryData(query, client, "<%- storageType -%>"); + } catch (e) { + throw new Error(e); + } + result = <%- nameLc -%>.postReadCast(result); + + return validatorUtil.bulkValidateData( + "validateAfterRead", + this, + result, + benignErrorReporter + ); + } + + /** + * readAllCursor - The model implementation for searching for records in Trino. This method uses cursor based pagination. + * + * @param {object} search - The search condition for which records shall be fetched + * @param {object} pagination - The parameters for pagination, which can be used to get a subset of the requested record set. + * @param {BenignErrorReporter} benignErrorReporter can be used to generate the standard + * @return {object} The set of records, possibly constrained by pagination, with full cursor information for all records + */ + static async readAllCursor(search, order, pagination, benignErrorReporter) { + //use default BenignErrorReporter if no BenignErrorReporter defined + benignErrorReporter = errorHelper.getDefaultBenignErrorReporterIfUndef( + benignErrorReporter + ); + let isForwardPagination = helper.isForwardPagination(pagination); + // build the whereOptions. + let filter = prestoHelper.searchConditionsToTrino(search, definition); + let newOrder = isForwardPagination + ? order + : helper.reverseOrderConditions(order); + // depending on the direction build the order object + let sort = prestoHelper.orderConditionsToTrino( + newOrder, + this.idAttribute(), + isForwardPagination + ); + let orderFields = newOrder ? newOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + filter = prestoHelper.cursorPaginationArgumentsToTrino( + pagination, + sort, + filter, + orderFields, + this.idAttribute(), + definition.attributes + ); + + // add +1 to the LIMIT to get information about following pages. + let limit = helper.isNotUndefinedAndNotNull(pagination.first) + ? pagination.first + 1 + : helper.isNotUndefinedAndNotNull(pagination.last) + ? pagination.last + 1 + : undefined; + + let query = `SELECT * FROM <%- namePl -%> + ${filter} + ${sort} + LIMIT ${limit}`; + let result = null; + + const client = await this.storageHandler; + result = await prestoHelper.queryData(query, client, "<%- storageType -%>"); + + result = <%- nameLc -%>.postReadCast(result); + // validationCheck after read + result = await validatorUtil.bulkValidateData( + "validateAfterRead", + this, + result, + benignErrorReporter + ); + // get the first record (if exists) in the opposite direction to determine pageInfo. + // if no cursor was given there is no need for an extra query as the results will start at the first (or last) page. + let oppResult = []; + if (pagination && (pagination.after || pagination.before)) { + // reverse the pagination Arguement. after -> before; set first/last to 0, so LIMIT 1 is executed in the reverse Search + let oppPagination = helper.reversePaginationArgument({ + ...pagination, + includeCursor: false, + }); + let oppForwardPagination = helper.isForwardPagination(oppPagination); + // build the filter object. + let oppFilter = prestoHelper.searchConditionsToTrino(search, definition); + + let oppOrder = oppForwardPagination + ? order + : helper.reverseOrderConditions(order); + // depending on the direction build the order object + let oppSort = prestoHelper.orderConditionsToTrino( + oppOrder, + this.idAttribute(), + oppForwardPagination + ); + let oppOrderFields = oppOrder ? oppOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + oppFilter = prestoHelper.cursorPaginationArgumentsToTrino( + oppPagination, + oppSort, + oppFilter, + oppOrderFields, + this.idAttribute(), + definition.attributes + ); + // add +1 to the LIMIT to get information about following pages. + let oppLimit = helper.isNotUndefinedAndNotNull(oppPagination.first) + ? oppPagination.first + 1 + : helper.isNotUndefinedAndNotNull(oppPagination.last) + ? oppPagination.last + 1 + : undefined; + query = `SELECT * FROM <%- namePl -%> + ${oppFilter} + ${oppSort} + LIMIT ${oppLimit}`; + oppResult = await prestoHelper.queryData( + query, + client, + "<%- storageType -%>" + ); + oppResult = <%- nameLc -%>.postReadCast(oppResult); + } + + // build the graphql Connection Object + result = result.map((res) => { + return new <%- nameLc -%>(res); + }); + let edges = result.map((res) => { + return { + node: res, + cursor: res.base64Enconde(), + }; + }); + const pageInfo = helper.buildPageInfo(edges, oppResult, pagination); + return { edges, pageInfo, <%- namePl -%>: edges.map((edge) => edge.node) }; + } + + /** + * addOne - Not implemented for Trino. + */ + static async addOne(input) { + throw new Error("Not supported by Trino"); + } + + /** + * deleteOne - Delete the whole file. + */ + static async deleteOne(id) { + throw new Error("Not supported by Trino"); + } + + /** + * updateOne - Not implemented for Trino. + */ + static async updateOne(input) { + throw new Error("Not supported by Trino"); + } + + /** + * bulkAddCsv - Add records from csv file + * + * @param {object} context - contextual information, e.g. csv file, record delimiter and column names. + */ + static async bulkAddCsv(context) { + throw new Error("Not supported by Trino"); + } + /** + * csvTableTemplate - Allows the user to download a template in CSV format with the + * properties and types of this model. + * + * @param {BenignErrorReporter} benignErrorReporter can be used to generate the standard + * GraphQL output {error: ..., data: ...}. If the function reportError of the benignErrorReporter + * is invoked, the server will include any so reported errors in the final response, i.e. the + * GraphQL response will have a non empty errors property. + */ + static async csvTableTemplate(benignErrorReporter) { + return helper.csvTableTemplate(definition); + } + + /** + * idAttribute - Check whether an attribute "internalId" is given in the JSON model. If not the standard "id" is used instead. + * + * @return {type} Name of the attribute that functions as an internalId + */ + + static idAttribute() { + return <%- nameLc -%>.definition.id.name; + } + + /** + * idAttributeType - Return the Type of the internalId. + * + * @return {type} Type given in the JSON model + */ + + static idAttributeType() { + return <%- nameLc -%>.definition.id.type; + } + + /** + * getIdValue - Get the value of the idAttribute ("id", or "internalId") for an instance of <%- nameLc -%>. + * + * @return {type} id value + */ + + getIdValue() { + return this[<%- nameLc -%>.idAttribute()]; + } + + /** + * definition - Getter for the attribute 'definition' + * @return {string} the definition string + */ + static get definition() { + return definition; + } + + /** + * base64Decode - Decode a base 64 String to UTF-8. + * @param {string} cursor - The cursor to be decoded into the record, given in base 64 + * @return {string} The stringified object in UTF-8 format + */ + static base64Decode(cursor) { + return Buffer.from(cursor, "base64").toString("utf-8"); + } + + /** + * base64Enconde - Encode <%- nameLc -%> to a base 64 String + * + * @return {string} The <%- nameLc -%> object, encoded in a base 64 String + */ + base64Enconde() { + return Buffer.from(JSON.stringify(this.stripAssociations())).toString( + "base64" + ); + } + + /** + * stripAssociations - Instant method for getting all attributes of <%- nameLc -%>. + * + * @return {object} The attributes of <%- nameLc -%> in object form + */ + stripAssociations() { + let attributes = Object.keys(<%- nameLc -%>.definition.attributes); + let data_values = _.pick(this, attributes); + return data_values; + } + + /** + * externalIdsArray - Get all attributes of <%- nameLc -%> that are marked as external IDs. + * + * @return {Array} An array of all attributes of <%- nameLc -%> that are marked as external IDs + */ + static externalIdsArray() { + let externalIds = []; + if (definition.externalIds) { + externalIds = definition.externalIds; + } + + return externalIds; + } + + /** + * externalIdsObject - Get all external IDs of <%- nameLc -%>. + * + * @return {object} An object that has the names of the external IDs as keys and their types as values + */ + static externalIdsObject() { + return {}; + } +}; diff --git a/views/create-trino-adapter.ejs b/views/create-trino-adapter.ejs new file mode 100644 index 00000000..cead04e2 --- /dev/null +++ b/views/create-trino-adapter.ejs @@ -0,0 +1,400 @@ +'use strict'; + +const _ = require("lodash"); +const validatorUtil = require("../../utils/validatorUtil"); +const helper = require("../../utils/helper"); +const errorHelper = require("../../utils/errors"); +const fs = require("fs"); +const prestoHelper = require("../../utils/presto_helper"); +const path = require("path"); +const uuidv4 = require("uuidv4").uuid; +const os = require("os"); + +const iriRegex = new RegExp('<%- regex -%>'); + +// An exact copy of the the model definition that comes from the .json file +const definition = <%- definition -%>; +/** + * module - Creates a class to administer model + */ +module.exports = class <%- adapterName -%> { + constructor(input) { + for (let key of Object.keys(input)) { + this[key] = input[key]; + } + } + + get storageHandler() { + return <%- adapterName-%>.storageHandler + } + + /** + * adapterName - Getter for the name attribute + * + * This attribute is needed by the models' index + * @return {string} The adapterName of the model + */ + static get adapterName(){ + return "<%- adapterName -%>"; + } + + static get adapterType(){ + return '<%- storageType -%>'; + } + + static recognizeId(iri){ + return iriRegex.test(iri); + } + + /** + * Cast JSON string to array for the validation. + * @param {object} record Record with JSON string if necessary. + * @return {object} Parsed data record. + */ + static postReadCast(record) { + if (!record) { + return []; + } + const column_index = {}; + record[0].map((obj, index) => { + column_index[obj.name] = index; + }); + + let result = []; + for (const item of record[1]) { + let record = {}; + for (const attr in definition.attributes) { + const type = definition.attributes[attr].replace(/\s+/g, ""); + if ( + type[0] === "[" && + item[column_index[attr]] !== undefined && + item[column_index[attr]] !== null + ) { + record[attr] = JSON.parse(item[column_index[attr]]); + } else if (type === "DateTime") { + record[attr] = new Date(item[column_index[attr]]).toISOString(); + } else { + record[attr] = item[column_index[attr]]; + } + } + result.push(record); + } + return result; + } + + /** + * readById - The model implementation for reading a single record given by its ID + * + * This method is the implementation for reading a single record for the trino storage type, based on SQL. + * @param {string} id - The ID of the requested record + * @return {object} The requested record as an object with the type <%- nameLc -%>, or an error object if the validation after reading fails + * @throws {Error} If the requested record does not exist + */ + static async readById(id){ + const query = `SELECT * FROM <%- namePl -%> WHERE ${this.idAttribute()} = '${id}'`; + let item = null; + try { + const client = await this.storageHandler; + item = await prestoHelper.queryData(query, client, "<%- storageType -%>"); + + if (!item) { + throw new Error(`Record with ID = "${id}" does not exist`); + } + } catch (e) { + throw new Error(e); + } + item = <%- adapterName -%>.postReadCast(item)[0]; + return validatorUtil.validateData("validateAfterRead", this, item); + } + + /** + * countRecords - The model implementation for counting the number of records, possibly restricted by a search term + * + * This method is the implementation for counting the number of records that fulfill a given condition, or for all records in the table, + * for the trino storage type, based on SQL. + * @param {object} search - The search term that restricts the set of records to be counted - if undefined, all records in the table + * @param {BenignErrorReporter} benignErrorReporter can be used to generate the standard + * @return {number} The number of records that fulfill the condition, or of all records in the table + */ + static async countRecords(search, benignErrorReporter){ + const whereOptions = prestoHelper.searchConditionsToTrino( + search, + definition + ); + const query = `SELECT COUNT(*) AS num FROM <%- namePl -%> ${whereOptions}`; + let num = null; + try { + const client = await this.storageHandler; + const result = await prestoHelper.queryData( + query, + client, + "<%- storageType -%>" + ); + num = result[1][0][0]; + } catch (e) { + throw new Error(e); + } + return num; + } + + /** + * readAllCursor - The model implementation for searching for records in Trino. This method uses cursor based pagination. + * + * @param {object} search - The search condition for which records shall be fetched + * @param {object} pagination - The parameters for pagination, which can be used to get a subset of the requested record set. + * @param {BenignErrorReporter} benignErrorReporter can be used to generate the standard + * @return {object} The set of records, possibly constrained by pagination, with full cursor information for all records + */ + static async readAllCursor(search, order, pagination, benignErrorReporter){ + //use default BenignErrorReporter if no BenignErrorReporter defined + benignErrorReporter = errorHelper.getDefaultBenignErrorReporterIfUndef( + benignErrorReporter + ); + let isForwardPagination = helper.isForwardPagination(pagination); + // build the whereOptions. + let filter = prestoHelper.searchConditionsToTrino(search, definition); + let newOrder = isForwardPagination + ? order + : helper.reverseOrderConditions(order); + // depending on the direction build the order object + let sort = prestoHelper.orderConditionsToTrino( + newOrder, + this.idAttribute(), + isForwardPagination + ); + let orderFields = newOrder ? newOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + filter = prestoHelper.cursorPaginationArgumentsToTrino( + pagination, + sort, + filter, + orderFields, + this.idAttribute(), + definition.attributes + ); + + // add +1 to the LIMIT to get information about following pages. + let limit = helper.isNotUndefinedAndNotNull(pagination.first) + ? pagination.first + 1 + : helper.isNotUndefinedAndNotNull(pagination.last) + ? pagination.last + 1 + : undefined; + + let query = `SELECT * FROM <%- namePl -%> + ${filter} + ${sort} + LIMIT ${limit}`; + let result = null; + + const client = await this.storageHandler; + result = await prestoHelper.queryData(query, client, "<%- storageType -%>"); + + result = <%- adapterName -%>.postReadCast(result); + // validationCheck after read + result = await validatorUtil.bulkValidateData( + "validateAfterRead", + this, + result, + benignErrorReporter + ); + // get the first record (if exists) in the opposite direction to determine pageInfo. + // if no cursor was given there is no need for an extra query as the results will start at the first (or last) page. + let oppResult = []; + if (pagination && (pagination.after || pagination.before)) { + // reverse the pagination Arguement. after -> before; set first/last to 0, so LIMIT 1 is executed in the reverse Search + let oppPagination = helper.reversePaginationArgument({ + ...pagination, + includeCursor: false, + }); + let oppForwardPagination = helper.isForwardPagination(oppPagination); + // build the filter object. + let oppFilter = prestoHelper.searchConditionsToTrino(search, definition); + + let oppOrder = oppForwardPagination + ? order + : helper.reverseOrderConditions(order); + // depending on the direction build the order object + let oppSort = prestoHelper.orderConditionsToTrino( + oppOrder, + this.idAttribute(), + oppForwardPagination + ); + let oppOrderFields = oppOrder ? oppOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + oppFilter = prestoHelper.cursorPaginationArgumentsToTrino( + oppPagination, + oppSort, + oppFilter, + oppOrderFields, + this.idAttribute(), + definition.attributes + ); + // add +1 to the LIMIT to get information about following pages. + let oppLimit = helper.isNotUndefinedAndNotNull(oppPagination.first) + ? oppPagination.first + 1 + : helper.isNotUndefinedAndNotNull(oppPagination.last) + ? oppPagination.last + 1 + : undefined; + query = `SELECT * FROM <%- namePl -%> + ${oppFilter} + ${oppSort} + LIMIT ${oppLimit}`; + oppResult = await prestoHelper.queryData( + query, + client, + "<%- storageType -%>" + ); + oppResult = <%- adapterName -%>.postReadCast(oppResult); + } + + // build the graphql Connection Object + result = result.map((res) => { + return new <%- adapterName -%>(res); + }); + let edges = result.map((res) => { + return { + node: res, + cursor: res.base64Enconde(), + }; + }); + const pageInfo = helper.buildPageInfo(edges, oppResult, pagination); + return { edges, pageInfo, <%- namePl -%>: edges.map((edge) => edge.node) }; + } + + /** + * addOne - Not implemented for Trino. + */ + static async addOne(input){ + throw new Error('Not supported by Trino'); + } + + /** + * deleteOne - Delete the whole file. + */ + static async deleteOne(id){ + throw new Error('Not supported by Trino'); + } + + /** + * updateOne - Not implemented for Trino. + */ + static async updateOne(input){ + throw new Error('Not supported by Trino'); + } + + /** + * bulkAddCsv - Add records from csv file + * + * @param {object} context - contextual information, e.g. csv file, record delimiter and column names. + */ + static async bulkAddCsv(context) { + throw new Error("Not supported by Trino"); + } + + /** + * csvTableTemplate - Allows the user to download a template in CSV format with the + * properties and types of this model. + * + * @param {BenignErrorReporter} benignErrorReporter can be used to generate the standard + * GraphQL output {error: ..., data: ...}. If the function reportError of the benignErrorReporter + * is invoked, the server will include any so reported errors in the final response, i.e. the + * GraphQL response will have a non empty errors property. + */ + static async csvTableTemplate(benignErrorReporter){ + return helper.csvTableTemplate(definition); + } + + + /** + * idAttribute - Check whether an attribute "internalId" is given in the JSON model. If not the standard "id" is used instead. + * + * @return {type} Name of the attribute that functions as an internalId + */ + + static idAttribute() { + return <%- adapterName -%>.definition.id.name; + } + + /** + * idAttributeType - Return the Type of the internalId. + * + * @return {type} Type given in the JSON model + */ + + static idAttributeType() { + return <%- adapterName -%>.definition.id.type; + } + + /** + * getIdValue - Get the value of the idAttribute ("id", or "internalId") for an instance of <%- adapterName -%>. + * + * @return {type} id value + */ + + getIdValue() { + return this[<%- adapterName -%>.idAttribute()] + } + + /** + * definition - Getter for the attribute 'definition' + * @return {string} the definition string + */ + static get definition(){ + return definition; + } + + /** + * base64Decode - Decode a base 64 String to UTF-8. + * @param {string} cursor - The cursor to be decoded into the record, given in base 64 + * @return {string} The stringified object in UTF-8 format + */ + static base64Decode(cursor){ + return Buffer.from(cursor, 'base64').toString('utf-8'); + } + + /** + * base64Enconde - Encode <%- adapterName -%> to a base 64 String + * + * @return {string} The <%- adapterName -%> object, encoded in a base 64 String + */ + base64Enconde(){ + return Buffer.from(JSON.stringify(this.stripAssociations())).toString('base64'); + } + + /** + * stripAssociations - Instant method for getting all attributes of <%- adapterName -%>. + * + * @return {object} The attributes of <%- adapterName -%> in object form + */ + stripAssociations(){ + let attributes = Object.keys(<%- adapterName -%>.definition.attributes); + <%if( defaultId ){-%>attributes.push('<%- idAttribute -%>'); <%}-%> + let data_values = _.pick(this, attributes); + return data_values; + } + + /** + * externalIdsArray - Get all attributes of <%- adapterName -%> that are marked as external IDs. + * + * @return {Array} An array of all attributes of <%- adapterName -%> that are marked as external IDs + */ + static externalIdsArray(){ + let externalIds = []; + if(definition.externalIds){ + externalIds = definition.externalIds; + } + + return externalIds; + } + + /** + * externalIdsObject - Get all external IDs of <%- adapterName -%>. + * + * @return {object} An object that has the names of the external IDs as keys and their types as values + */ + static externalIdsObject(){ + return { + <%for(let i=0; i < externalIds.length; i++){-%> <%=externalIds[i]-%>: '<%=attributes[ externalIds[i] ]-%>' <%if(i !== (externalIds.length -1) ){ -%>,<%}-%><%}-%> + }; + } +} \ No newline at end of file From 775675be2121df5ff511589d2d6698afaff8451a Mon Sep 17 00:00:00 2001 From: wunderbarr Date: Thu, 6 May 2021 12:21:48 +0200 Subject: [PATCH 2/8] feat: add cli configuration --- test/testenv_cli.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/test/testenv_cli.sh b/test/testenv_cli.sh index cb47a65b..5fba6eee 100644 --- a/test/testenv_cli.sh +++ b/test/testenv_cli.sh @@ -112,6 +112,8 @@ if [[ $OPT_RUN_TESTS == "true" ]]; then mocha "${TEST_DIR}/mocha_integration_mongodb.test.js" mocha "${TEST_DIR}/mocha_integration_cassandra.test.js" mocha "${TEST_DIR}/mocha_integration_amazon_s3.test.js" + mocha --timeout 10000 "${TEST_DIR}/mocha_integration_trino.test.js" + mocha --timeout 10000 "${TEST_DIR}/mocha_integration_presto.test.js" # 1. Remove docker containers, images, and volumes # 2. Remove the testing environment @@ -138,6 +140,8 @@ if [[ $OPT_GENCODE_RUNTESTS == "true" ]]; then mocha "${TEST_DIR}/mocha_integration_mongodb.test.js" mocha "${TEST_DIR}/mocha_integration_cassandra.test.js" mocha "${TEST_DIR}/mocha_integration_amazon_s3.test.js" + mocha --timeout 10000 "${TEST_DIR}/mocha_integration_trino.test.js" + mocha --timeout 10000 "${TEST_DIR}/mocha_integration_presto.test.js" # 1. Remove docker containers, images, and volumes # 2. Remove the testing environment @@ -166,6 +170,8 @@ if [[ $DEFAULT_RUN == "true" ]]; then mocha "${TEST_DIR}/mocha_integration_mongodb.test.js" mocha "${TEST_DIR}/mocha_integration_cassandra.test.js" mocha "${TEST_DIR}/mocha_integration_amazon_s3.test.js" + mocha --timeout 10000 "${TEST_DIR}/mocha_integration_trino.test.js" + mocha --timeout 10000 "${TEST_DIR}/mocha_integration_presto.test.js" # 1. Remove docker containers, images, and volumes # 2. Remove the testing environment From deb3aec330bc482ee5347fcd3125cadde616be37 Mon Sep 17 00:00:00 2001 From: wunderbarr Date: Mon, 10 May 2021 11:27:01 +0200 Subject: [PATCH 3/8] update: update calling query method --- views/create-models-trino.ejs | 18 +++++------------- views/create-trino-adapter.ejs | 16 ++++------------ 2 files changed, 9 insertions(+), 25 deletions(-) diff --git a/views/create-models-trino.ejs b/views/create-models-trino.ejs index 8d9d7c53..c0657d89 100644 --- a/views/create-models-trino.ejs +++ b/views/create-models-trino.ejs @@ -85,7 +85,7 @@ module.exports = class <%- nameLc -%> { let item = null; try { const client = await this.storageHandler; - item = await prestoHelper.queryData(query, client, "<%- storageType -%>"); + item = await prestoHelper.queryData(query, client); if (!item) { throw new Error(`Record with ID = "${id}" does not exist`); @@ -115,11 +115,7 @@ module.exports = class <%- nameLc -%> { let num = null; try { const client = await this.storageHandler; - const result = await prestoHelper.queryData( - query, - client, - "<%- storageType -%>" - ); + const result = await prestoHelper.queryData(query, client); num = result[1][0][0]; } catch (e) { throw new Error(e); @@ -160,7 +156,7 @@ module.exports = class <%- nameLc -%> { let result = null; try { const client = await this.storageHandler; - result = await prestoHelper.queryData(query, client, "<%- storageType -%>"); + result = await prestoHelper.queryData(query, client); } catch (e) { throw new Error(e); } @@ -224,7 +220,7 @@ module.exports = class <%- nameLc -%> { let result = null; const client = await this.storageHandler; - result = await prestoHelper.queryData(query, client, "<%- storageType -%>"); + result = await prestoHelper.queryData(query, client); result = <%- nameLc -%>.postReadCast(result); // validationCheck after read @@ -276,11 +272,7 @@ module.exports = class <%- nameLc -%> { ${oppFilter} ${oppSort} LIMIT ${oppLimit}`; - oppResult = await prestoHelper.queryData( - query, - client, - "<%- storageType -%>" - ); + oppResult = await prestoHelper.queryData(query, client); oppResult = <%- nameLc -%>.postReadCast(oppResult); } diff --git a/views/create-trino-adapter.ejs b/views/create-trino-adapter.ejs index cead04e2..fe10fe86 100644 --- a/views/create-trino-adapter.ejs +++ b/views/create-trino-adapter.ejs @@ -95,7 +95,7 @@ module.exports = class <%- adapterName -%> { let item = null; try { const client = await this.storageHandler; - item = await prestoHelper.queryData(query, client, "<%- storageType -%>"); + item = await prestoHelper.queryData(query, client); if (!item) { throw new Error(`Record with ID = "${id}" does not exist`); @@ -125,11 +125,7 @@ module.exports = class <%- adapterName -%> { let num = null; try { const client = await this.storageHandler; - const result = await prestoHelper.queryData( - query, - client, - "<%- storageType -%>" - ); + const result = await prestoHelper.queryData(query, client); num = result[1][0][0]; } catch (e) { throw new Error(e); @@ -187,7 +183,7 @@ module.exports = class <%- adapterName -%> { let result = null; const client = await this.storageHandler; - result = await prestoHelper.queryData(query, client, "<%- storageType -%>"); + result = await prestoHelper.queryData(query, client); result = <%- adapterName -%>.postReadCast(result); // validationCheck after read @@ -239,11 +235,7 @@ module.exports = class <%- adapterName -%> { ${oppFilter} ${oppSort} LIMIT ${oppLimit}`; - oppResult = await prestoHelper.queryData( - query, - client, - "<%- storageType -%>" - ); + oppResult = await prestoHelper.queryData(query, client); oppResult = <%- adapterName -%>.postReadCast(oppResult); } From 631f6c5bab6f88f35b61ea2c3a748e69473fa874 Mon Sep 17 00:00:00 2001 From: wunderbarr Date: Mon, 10 May 2021 11:40:39 +0200 Subject: [PATCH 4/8] fix: trino unittest --- .../test-describe/trino-unittest.js | 20 ++++++------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/test/unit_test_misc/test-describe/trino-unittest.js b/test/unit_test_misc/test-describe/trino-unittest.js index c8a8bee4..ade313b7 100644 --- a/test/unit_test_misc/test-describe/trino-unittest.js +++ b/test/unit_test_misc/test-describe/trino-unittest.js @@ -11,7 +11,7 @@ static async readById(id) { let item = null; try { const client = await this.storageHandler; - item = await prestoHelper.queryData(query, client, "trino"); + item = await prestoHelper.queryData(query, client); if (!item) { throw new Error(\`Record with ID = "\${id}" does not exist\`); @@ -33,11 +33,7 @@ static async countRecords(search, benignErrorReporter) { let num = null; try { const client = await this.storageHandler; - const result = await prestoHelper.queryData( - query, - client, - "trino" - ); + const result = await prestoHelper.queryData(query, client); num = result[1][0][0]; } catch (e) { throw new Error(e); @@ -75,7 +71,7 @@ static async readAll(search, order, pagination, benignErrorReporter) { let result = null; try { const client = await this.storageHandler; - result = await prestoHelper.queryData(query, client, "trino"); + result = await prestoHelper.queryData(query, client); } catch (e) { throw new Error(e); } @@ -133,7 +129,7 @@ static async readAllCursor(search, order, pagination, benignErrorReporter) { let result = null; const client = await this.storageHandler; - result = await prestoHelper.queryData(query, client, "trino"); + result = await prestoHelper.queryData(query, client); result = doctor.postReadCast(result); // validationCheck after read @@ -185,11 +181,7 @@ static async readAllCursor(search, order, pagination, benignErrorReporter) { \${oppFilter} \${oppSort} LIMIT \${oppLimit}\`; - oppResult = await prestoHelper.queryData( - query, - client, - "trino" - ); + oppResult = await prestoHelper.queryData(query, client); oppResult = doctor.postReadCast(oppResult); } @@ -214,7 +206,7 @@ static async readById(id){ let item = null; try { const client = await this.storageHandler; - item = await prestoHelper.queryData(query, client, "trino-adapter"); + item = await prestoHelper.queryData(query, client); if (!item) { throw new Error(\`Record with ID = "\${id}" does not exist\`); From cf228c125f42588bdb86d751341077cf3b2519bd Mon Sep 17 00:00:00 2001 From: wunderbarr Date: Wed, 26 May 2021 18:24:27 +0200 Subject: [PATCH 5/8] feat: neo4j integration --- funks.js | 48 +- lib/generators-aux.js | 2 + views/create-amazonS3-adapter.ejs | 4 +- views/create-distributed-model.ejs | 2 + views/create-models-neo4j.ejs | 530 ++++++++++++++++++ views/create-neo4j-adapter.ejs | 482 ++++++++++++++++ views/includes/bulkAssociations-models.ejs | 37 ++ .../create-adapter-fields-mutations.ejs | 87 +++ .../create-models-fieldMutations-neo4j.ejs | 97 ++++ 9 files changed, 1280 insertions(+), 9 deletions(-) create mode 100644 views/create-models-neo4j.ejs create mode 100644 views/create-neo4j-adapter.ejs create mode 100644 views/includes/create-models-fieldMutations-neo4j.ejs diff --git a/funks.js b/funks.js index 69c059c0..cef9c62b 100644 --- a/funks.js +++ b/funks.js @@ -462,8 +462,8 @@ writeAcls = async function (dir_write, models, adapters) { //set file name let file_name = dir_write + "/acl_rules.js"; //set names - const modelsNames = models.map(item => item[0]); - let adminModelsNames = ['role', 'user', 'role_to_user']; + const modelsNames = models.map((item) => item[0]); + let adminModelsNames = ["role", "user", "role_to_user"]; //generate await generateSection( "acl_rules", @@ -744,7 +744,8 @@ module.exports.parseAssociations = function (dataModel) { if (isStandardAssociation) { //standard associations_info.associations.push(association); - association.targetStorageType = association.targetStorageType.toLowerCase(); + association.targetStorageType = + association.targetStorageType.toLowerCase(); associations_info.foreignKeyAssociations[name] = association.targetKey; } else { //generic @@ -977,6 +978,7 @@ generateSections = async function (sections, opts, dir_write) { case "models-mongodb": case "models-amazonS3": case "models-trino": + case "models-neo4j": //adapters case "sql-adapter": case "zendro-adapters": @@ -985,6 +987,7 @@ generateSections = async function (sections, opts, dir_write) { case "mongodb-adapter": case "amazonS3-adapter": case "trino-adapter": + case "neo4j-adapter": file_name = dir_write + "/" + section.dir + "/" + section.fileName + ".js"; break; @@ -1090,6 +1093,7 @@ getStorageType = function (dataModel) { case "amazon-s3": case "trino": case "presto": + case "neo4j": //adapters case "sql-adapter": case "ddm-adapter": @@ -1100,6 +1104,7 @@ getStorageType = function (dataModel) { case "amazon-s3-adapter": case "trino-adapter": case "presto-adapter": + case "neo4j-adapter": //ok break; @@ -1111,8 +1116,8 @@ getStorageType = function (dataModel) { `ERROR: The attribute 'storageType' has an invalid value. \n One of the following types is expected: [sql, distributed-data-model, zendro-server, generic, sql-adapter, ddm-adapter, zendro-webservice-adapter, generic-adapter, - cassandra, mongodb, amazon-s3, trino, presto, cassandra-adapter, mongodb-adapter, - amazon-s3-adapter, trino-adapter, presto-adapter]. + cassandra, mongodb, amazon-s3, trino, presto, neo4j, cassandra-adapter, mongodb-adapter, + amazon-s3-adapter, trino-adapter, presto-adapter, neo4j-adapter]. But '${dataModel.storageType}' was obtained on ${ dataModel.adapterName !== undefined ? "adapter" : "model" } '${ @@ -1164,6 +1169,7 @@ module.exports.generateCode = async function (json_dir, dir_write, options) { "models/amazonS3", "models/trino", "models/presto", + "models/neo4j", ]; let models = []; let adapters = []; @@ -1504,6 +1510,24 @@ module.exports.generateCode = async function (json_dir, dir_write, options) { ]; break; + case "neo4j": + sections = [ + { dir: "schemas", template: "schemas", fileName: opts.nameLc }, + { dir: "resolvers", template: "resolvers", fileName: opts.nameLc }, + { + dir: "models/neo4j", + template: "models-neo4j", + fileName: opts.nameLc, + }, + { + dir: "validations", + template: "validations", + fileName: opts.nameLc, + }, + { dir: "patches", template: "patches", fileName: opts.nameLc }, + ]; + break; + case "zendro-webservice-adapter": sections = [ { @@ -1608,6 +1632,18 @@ module.exports.generateCode = async function (json_dir, dir_write, options) { { dir: "patches", template: "patches", fileName: opts.adapterName }, ]; break; + + case "neo4j-adapter": + sections = [ + { + dir: "models/adapters", + template: "neo4j-adapter", + fileName: opts.adapterName, + }, + { dir: "patches", template: "patches", fileName: opts.adapterName }, + ]; + break; + default: break; } @@ -1643,7 +1679,7 @@ module.exports.generateCode = async function (json_dir, dir_write, options) { ) { adapters.push(opts.adapterName); } else { - models.push([opts.name , opts.namePl, opts.nameLc]); + models.push([opts.name, opts.namePl, opts.nameLc]); } } //msg diff --git a/lib/generators-aux.js b/lib/generators-aux.js index 65553bac..3f3b5662 100644 --- a/lib/generators-aux.js +++ b/lib/generators-aux.js @@ -18,6 +18,8 @@ exports.getModelDatabase = function (dataModel) { "trino-adapter": "default-trino", presto: "default-presto", "presto-adapter": "default-presto", + neo4j: "default-neo4j", + "neo4j-adapter": "default-neo4j", }; const storageType = dataModel.storageType.toLowerCase(); diff --git a/views/create-amazonS3-adapter.ejs b/views/create-amazonS3-adapter.ejs index c93e2879..291f82ad 100644 --- a/views/create-amazonS3-adapter.ejs +++ b/views/create-amazonS3-adapter.ejs @@ -5,9 +5,7 @@ const validatorUtil = require("../../utils/validatorUtil"); const helper = require("../../utils/helper"); const errorHelper = require("../../utils/errors"); const fs = require("fs"); -const config = require("../../config/data_models_storage_config.json")[ - "default-amazonS3" -]; +const config = require("../../config/data_models_storage_config.json")["<%- database -%>"]; const amazonS3Helper = require("../../utils/amazonS3_helper"); const path = require("path"); const uuidv4 = require("uuidv4").uuid; diff --git a/views/create-distributed-model.ejs b/views/create-distributed-model.ejs index 37432e94..e7f58011 100644 --- a/views/create-distributed-model.ejs +++ b/views/create-distributed-model.ejs @@ -137,6 +137,7 @@ module.exports = class <%- nameLc -%>{ case 'amazon-s3-adapter': case 'trino-adapter': case 'presto-adapter': + case 'neo4j-adapter': case 'zendro-webservice-adapter': return adapter.countRecords(search, benignErrorReporter); case 'cassandra-adapter': @@ -213,6 +214,7 @@ module.exports = class <%- nameLc -%>{ case 'amazon-s3-adapter': case 'trino-adapter': case 'presto-adapter': + case 'neo4j-adapter': case 'zendro-webservice-adapter': return adapter.readAllCursor(search, order, pagination, benignErrorReporter); case 'cassandra-adapter': diff --git a/views/create-models-neo4j.ejs b/views/create-models-neo4j.ejs new file mode 100644 index 00000000..7a52f9dc --- /dev/null +++ b/views/create-models-neo4j.ejs @@ -0,0 +1,530 @@ +"use strict"; + +const _ = require("lodash"); +const validatorUtil = require("../../utils/validatorUtil"); +const path = require("path"); +const helper = require("../../utils/helper"); +const neo4jHelper = require("../../utils/neo4j_helper"); +const errorHelper = require("../../utils/errors"); +const neo4j = require("neo4j-driver"); +const config = require("../../config/data_models_storage_config.json")["<%- database -%>"]; +const models = require(path.join(__dirname, "..", "index.js")); + +// An exact copy of the the model definition that comes from the .json file +const definition = <%- definition -%>; + +/** + * module - Creates a class for Neo4j data model + * + */ + +module.exports = class <%- nameLc -%> { + constructor(input) { + for (let key of Object.keys(input)) { + this[key] = input[key]; + } + } + + /** + * Get the storage handler, which is a static property of the data model class. + * @returns connected neo4j client + */ + get storageHandler() { + return <%- nameLc -%>.storageHandler; + } + + /** + * name - Getter for the name attribute + * + * This attribute is needed by the models' index + * @return {string} The name of the model + */ + static get name() { + return "<%- nameLc -%>"; + } + + /** + * readById - The model implementation for reading a single record given by its ID + * + * This method is the implementation for reading a single record for the neo4j storage type, based on neo4j node driver. + * @param {string} id - The ID of the requested record + * @return {object} The requested record as an object with the type <%- nameLc -%>, or an error object if the validation after reading fails + * @throws {Error} If the requested record does not exist + */ + static async readById(id) { + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + try { + const result = await session.run( + `MATCH (n:<%- nameCp -%> {${this.idAttribute()}:$id}) RETURN n`, + { id: id } + ); + if (result.records.length === 0) { + throw new Error(`Record with ID = "${id}" does not exist`); + } + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return validatorUtil.validateData( + "validateAfterRead", + this, + new <%- nameLc -%>(node.properties) + ); + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + + /** + * countRecords - The model implementation for counting the number of records, possibly restricted by a search term + * + * This method is the implementation for counting the number of records that fulfill a given condition, or for all records in the table, + * for the Neo4j storage type, based on Neo4j node driver. + * @param {object} search - The search term that restricts the set of records to be counted - if undefined, all records in the table + * @param {BenignErrorReporter} benignErrorReporter can be used to generate the standard + * @return {number} The number of records that fulfill the condition, or of all records in the table + */ + static async countRecords(search) { + const whereOptions = neo4jHelper.searchConditionsToNeo4j( + search, + definition + ); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + try { + const result = await session.run( + `MATCH (n:<%- nameCp -%>) ${whereOptions} RETURN COUNT(n)` + ); + const singleRecord = result.records[0]; + const num = singleRecord.get(0); + return num; + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + + /** + * readAll - The model implementation for searching for records in Neo4j. This method uses limit-offset-based pagination. + * + * @param {object} search - Search argument for filtering records + * @param {array} order - Type of sorting (ASC, DESC) for each field + * @param {object} pagination - Offset and limit to get the records from and to respectively + * @param {BenignErrorReporter} - benignErrorReporter can be used to generate the standard + * @return {array} Array of records holding conditions specified by search, order and pagination argument + */ + static async readAll(search, order, pagination, benignErrorReporter) { + //use default BenignErrorReporter if no BenignErrorReporter defined + benignErrorReporter = + errorHelper.getDefaultBenignErrorReporterIfUndef(benignErrorReporter); + // build the filter object for limit-offset-based pagination + const whereOptions = neo4jHelper.searchConditionsToNeo4j( + search, + definition + ); + const orderOptions = neo4jHelper.orderConditionsToNeo4j( + order, + this.idAttribute(), + true + ); + + const limit = pagination.limit; + const offset = pagination.offset ? pagination.offset : 0; + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + try { + const result = await session.run( + `MATCH (n:<%- nameCp -%>) ${whereOptions} RETURN n ${orderOptions} SKIP ${offset} LIMIT ${limit} ` + ); + const nodes = result.records.map((res) => res.get(0).properties); + return validatorUtil.bulkValidateData( + "validateAfterRead", + this, + nodes, + benignErrorReporter + ); + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + + /** + * readAllCursor - The model implementation for searching for records in Neo4j. This method uses cursor based pagination. + * + * @param {object} search - The search condition for which records shall be fetched + * @param {array} order - Type of sorting (ASC, DESC) for each field + * @param {object} pagination - The parameters for pagination, which can be used to get a subset of the requested record set. + * @param {BenignErrorReporter} benignErrorReporter can be used to generate the standard + * @return {object} The set of records, possibly constrained by pagination, with full cursor information for all records + */ + static async readAllCursor(search, order, pagination, benignErrorReporter) { + //use default BenignErrorReporter if no BenignErrorReporter defined + benignErrorReporter = + errorHelper.getDefaultBenignErrorReporterIfUndef(benignErrorReporter); + let isForwardPagination = helper.isForwardPagination(pagination); + // build the whereOptions. + let filter = neo4jHelper.searchConditionsToNeo4j(search, definition); + let newOrder = isForwardPagination + ? order + : helper.reverseOrderConditions(order); + // depending on the direction build the order object + let sort = neo4jHelper.orderConditionsToNeo4j( + newOrder, + this.idAttribute(), + isForwardPagination + ); + let orderFields = newOrder ? newOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + filter = neo4jHelper.cursorPaginationArgumentsToNeo4j( + pagination, + sort, + filter, + orderFields, + this.idAttribute(), + definition.attributes + ); + + // add +1 to the LIMIT to get information about following pages. + let limit = helper.isNotUndefinedAndNotNull(pagination.first) + ? pagination.first + 1 + : helper.isNotUndefinedAndNotNull(pagination.last) + ? pagination.last + 1 + : undefined; + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + let nodes = []; + try { + const result = await session.run( + `MATCH (n:<%- nameCp -%>) ${filter} RETURN n ${sort} LIMIT ${limit}` + ); + nodes = result.records.map((res) => new <%- nameLc -%>(res.get(0).properties)); + nodes = await validatorUtil.bulkValidateData( + "validateAfterRead", + this, + nodes, + benignErrorReporter + ); + } catch (error) { + throw error; + } + + // get the first record (if exists) in the opposite direction to determine pageInfo. + // if no cursor was given there is no need for an extra query as the results will start at the first (or last) page. + let oppNodes = []; + if (pagination && (pagination.after || pagination.before)) { + // reverse the pagination Arguement. after -> before; set first/last to 0, so LIMIT 1 is executed in the reverse Search + let oppPagination = helper.reversePaginationArgument({ + ...pagination, + includeCursor: false, + }); + let oppForwardPagination = helper.isForwardPagination(oppPagination); + // build the filter object. + let oppFilter = neo4jHelper.searchConditionsToNeo4j(search, definition); + + let oppOrder = oppForwardPagination + ? order + : helper.reverseOrderConditions(order); + // depending on the direction build the order object + let oppSort = neo4jHelper.orderConditionsToNeo4j( + oppOrder, + this.idAttribute(), + oppForwardPagination + ); + let oppOrderFields = oppOrder ? oppOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + oppFilter = neo4jHelper.cursorPaginationArgumentsToNeo4j( + oppPagination, + oppSort, + oppFilter, + oppOrderFields, + this.idAttribute(), + definition.attributes + ); + // add +1 to the LIMIT to get information about following pages. + let oppLimit = helper.isNotUndefinedAndNotNull(oppPagination.first) + ? oppPagination.first + 1 + : helper.isNotUndefinedAndNotNull(oppPagination.last) + ? oppPagination.last + 1 + : undefined; + + try { + const oppResult = await session.run( + `MATCH (n:<%- nameCp -%>) ${oppFilter} RETURN n ${oppSort} LIMIT ${oppLimit}` + ); + oppNodes = oppResult.records.map( + (res) => new <%- nameLc -%>(res.get(0).properties) + ); + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + + // build the graphql Connection Object + let edges = nodes.map((res) => { + return { + node: res, + cursor: res.base64Enconde(), + }; + }); + const pageInfo = helper.buildPageInfo(edges, oppNodes, pagination); + return { edges, pageInfo, <%- namePl -%>: edges.map((edge) => edge.node) }; + } + + /** + * addOne - The model implementation method for adding a record in Neo4j, based on Neo4j Node driver. + * + * @param {object} input - The input object. + * @return {object} The created record + * @throw {Error} If the process fails, an error is thrown + */ + static async addOne(input) { + // validate input + await validatorUtil.validateData("validateForCreate", this, input); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + try { + delete input.skipAssociationsExistenceChecks; + input = neo4jHelper.processDateTime(input, definition.attributes); + + const result = await session.run(`CREATE (a:<%- nameCp -%> $props) RETURN a`, { + props: input, + }); + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return new <%- nameLc -%>(node.properties); + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + + /** + * deleteOne - The model implementation for deleting a single record, given by its ID, in Neo4j. + * + * @param {string} id - The ID of the record to be deleted + * @returns {string} A success message is returned + * @throw {Error} If the record could not be deleted - this means a record with the ID is still present + */ + static async deleteOne(id) { + //validate id + await validatorUtil.validateData("validateForDelete", this, id); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + try { + const result = await session.run( + `MATCH (n:<%- nameCp -%> {${this.idAttribute()}:$id}) DELETE n`, + { id: id } + ); + if (result.records.length !== 0) { + throw new Error(`Record with ID = ${id} has not been deleted!`); + } + return "Item successfully deleted"; + } catch (error) { + console.log(`Record with ID = ${id} could not be deleted`); + throw error; + } finally { + await session.close(); + } + } + + /** + * updateOne - The model implementation for updating a single record in Neo4j. + * + * @param {object} input - The input object. + * @returns {object} The updated record + * @throw {Error} If this method fails, an error is thrown + */ + static async updateOne(input) { + //validate input + await validatorUtil.validateData("validateForUpdate", this, input); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + const id = input[this.idAttribute()]; + try { + delete input.skipAssociationsExistenceChecks; + delete input[this.idAttribute()]; + input = neo4jHelper.processDateTime(input, definition.attributes); + + const result = await session.run( + `MATCH (n:<%- nameCp -%> {${this.idAttribute()}:$id}) SET n+=$props RETURN n`, + { id: id, props: input } + ); + if (result.records.length !== 1) { + throw new Error(`Record with ID = ${id} has not been updated!`); + } + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return new <%- nameLc -%>(node.properties); + } catch (error) { + console.log(`Record with ID = ${id} could not be updated`); + throw error; + } finally { + await session.close(); + } + } + + /** + * bulkAddCsv - Add records from csv file + * + * @param {object} context - contextual information, e.g. csv file, record delimiter and column names. + */ + static async bulkAddCsv(context) { + let field_delim = config.fieldDelimiter ?? ","; + let array_delim = config.arrayDelimiter ?? ";"; + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + try { + let query = `LOAD CSV WITH HEADERS FROM 'file:///<%- nameLc -%>.csv' AS line FIELDTERMINATOR '${field_delim}' CREATE (:Movie {`; + for (let attr of Object.keys(definition.attributes)) { + let type = definition.attributes[attr].replace(/\s+/g, ""); + if (type[0] === "[") { + type = type.slice(1, type.length - 1); + if (type === "Int") { + query += `${attr}: [i in split(line.${attr}, "${array_delim}") | toInteger(i)], `; + } else if (type === "Boolean") { + query += `${attr}: [i in split(line.${attr}, "${array_delim}") | toBoolean(i)], `; + } else if (type === "Float") { + query += `${attr}: [i in split(line.${attr}, "${array_delim}") | toFloat(i)], `; + } else { + query += `${attr}: split(line.${attr}, "${array_delim}"), `; + } + } else { + if (type === "Int") { + query += `${attr}: toInteger(line.${attr}), `; + } else if (type === "Boolean") { + query += `${attr}: toBoolean(line.${attr}), `; + } else if (type === "Float") { + query += `${attr}: toFloat(line.${attr}), `; + } else { + query += `${attr}: line.${attr}, `; + } + } + } + query = query.slice(0, query.length - 2) + "})"; + const result = await session.run(query); + return `Successfully upload file`; + } catch (e) { + throw new Error(e); + } finally { + await session.close(); + } + } + + /** + * csvTableTemplate - Allows the user to download a template in CSV format with the + * properties and types of this model. + * + * @param {BenignErrorReporter} benignErrorReporter can be used to generate the standard + * GraphQL output {error: ..., data: ...}. If the function reportError of the benignErrorReporter + * is invoked, the server will include any so reported errors in the final response, i.e. the + * GraphQL response will have a non empty errors property. + */ + static async csvTableTemplate(benignErrorReporter) { + return helper.csvTableTemplate(definition); + } + + <%- include('./includes/create-generic-fieldResolvers'); %> + <%- include('./includes/create-models-fieldMutations-neo4j', {op: "add"}); %> + <%- include('./includes/create-models-fieldMutations-neo4j', {op: "remove"}); %> + <%- include('./includes/create-models-fieldMutations-generic-associations', {op: "add"}); %> + <%- include('./includes/create-models-fieldMutations-generic-associations', {op: "remove"}); %> + <%- include('./includes/bulkAssociations-models', {op: "add"}); %> + <%- include('./includes/bulkAssociations-models', {op: "remove"}); %> + + /** + * idAttribute - Check whether an attribute "internalId" is given in the JSON model. If not the standard "id" is used instead. + * + * @return {type} Name of the attribute that functions as an internalId + */ + static idAttribute() { + return <%- nameLc -%>.definition.id.name; + } + + /** + * idAttributeType - Return the Type of the internalId. + * + * @return {type} Type given in the JSON model + */ + static idAttributeType() { + return <%- nameLc -%>.definition.id.type; + } + + /** + * getIdValue - Get the value of the idAttribute ("id", or "internalId") for an instance of movie. + * + * @return {type} id value + */ + getIdValue() { + return this[<%- nameLc -%>.idAttribute()]; + } + + static get definition() { + return definition; + } + + static base64Decode(cursor) { + return Buffer.from(cursor, "base64").toString("utf-8"); + } + + base64Enconde() { + return Buffer.from(JSON.stringify(this.stripAssociations())).toString( + "base64" + ); + } + + stripAssociations(){ + let attributes = Object.keys(<%- nameLc -%>.definition.attributes); + <%if( defaultId ){-%>attributes.push('<%- idAttribute -%>'); <%}-%> + let data_values = _.pick(this, attributes); + return data_values; + } + + static externalIdsArray() { + let externalIds = []; + if (definition.externalIds) { + externalIds = definition.externalIds; + } + + return externalIds; + } + + static externalIdsObject(){ + return { + <%for(let i=0; i < externalIds.length; i++){-%> <%=externalIds[i]-%>: '<%=attributes[ externalIds[i] ]-%>' <%if(i !== (externalIds.length -1) ){ -%>,<%}-%><%}-%> + }; + } +}; diff --git a/views/create-neo4j-adapter.ejs b/views/create-neo4j-adapter.ejs new file mode 100644 index 00000000..3f8c427f --- /dev/null +++ b/views/create-neo4j-adapter.ejs @@ -0,0 +1,482 @@ +const _ = require("lodash"); +const validatorUtil = require("../../utils/validatorUtil"); +const path = require("path"); +const helper = require("../../utils/helper"); +const neo4jHelper = require("../../utils/neo4j_helper"); +const errorHelper = require("../../utils/errors"); +const neo4j = require("neo4j-driver"); +const config = require("../../config/data_models_storage_config.json")["<%- database -%>"]; +const models = require(path.join(__dirname, "..", "index.js")); + +const remoteZendroURL = "<%- url -%>"; +const iriRegex = new RegExp('<%- regex -%>'); + +// An exact copy of the the model definition that comes from the .json file +const definition = <%- definition -%>; + +/** + * module - Creates a class for Neo4j data model + * + */ + +module.exports = class <%- adapterName -%> { + + constructor(input){ + for (let key of Object.keys(input)) { + this[key] = input[key]; + } + } + + /** + * Get the storage handler, which is a static property of the data model class. + * @returns connected Neo4j client + */ + get storageHandler() { + return <%- adapterName-%>.storageHandler; + } + + static get adapterName(){ + return '<%- adapterName -%>'; + } + + static get adapterType(){ + return '<%- storageType -%>'; + } + + static recognizeId(iri){ + return iriRegex.test(iri); + } + + /** + * readById - The model implementation for reading a single record given by its ID + * + * This method is the implementation for reading a single record for the neo4j storage type, based on neo4j node driver. + * @param {string} id - The ID of the requested record + * @return {object} The requested record as an object with the type <%- nameLc -%>, or an error object if the validation after reading fails + * @throws {Error} If the requested record does not exist + */ + static async readById(id) { + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + try { + const result = await session.run( + `MATCH (n:<%- nameCp -%> {${this.idAttribute()}:$id}) RETURN n`, + { id: id } + ); + if (result.records.length === 0) { + throw new Error(`Record with ID = "${id}" does not exist`); + } + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return validatorUtil.validateData( + "validateAfterRead", + this, + new <%- adapterName-%>(node.properties) + ); + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + + /** + * countRecords - The model implementation for counting the number of records, possibly restricted by a search term + * + * This method is the implementation for counting the number of records that fulfill a given condition, or for all records in the table, + * for the Neo4j storage type, based on Neo4j node driver. + * @param {object} search - The search term that restricts the set of records to be counted - if undefined, all records in the table + * @param {BenignErrorReporter} benignErrorReporter can be used to generate the standard + * @return {number} The number of records that fulfill the condition, or of all records in the table + */ + static async countRecords(search) { + const whereOptions = neo4jHelper.searchConditionsToNeo4j( + search, + definition + ); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + try { + const result = await session.run( + `MATCH (n:<%- nameCp -%>) ${whereOptions} RETURN COUNT(n)` + ); + const singleRecord = result.records[0]; + const num = singleRecord.get(0); + return num; + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + + /** + * readAllCursor - The model implementation for searching for records in Neo4j. This method uses cursor based pagination. + * + * @param {object} search - The search condition for which records shall be fetched + * @param {array} order - Type of sorting (ASC, DESC) for each field + * @param {object} pagination - The parameters for pagination, which can be used to get a subset of the requested record set. + * @param {BenignErrorReporter} benignErrorReporter can be used to generate the standard + * @return {object} The set of records, possibly constrained by pagination, with full cursor information for all records + */ + static async readAllCursor(search, order, pagination, benignErrorReporter) { + //use default BenignErrorReporter if no BenignErrorReporter defined + benignErrorReporter = + errorHelper.getDefaultBenignErrorReporterIfUndef(benignErrorReporter); + let isForwardPagination = helper.isForwardPagination(pagination); + // build the whereOptions. + let filter = neo4jHelper.searchConditionsToNeo4j(search, definition); + let newOrder = isForwardPagination + ? order + : helper.reverseOrderConditions(order); + // depending on the direction build the order object + let sort = neo4jHelper.orderConditionsToNeo4j( + newOrder, + this.idAttribute(), + isForwardPagination + ); + let orderFields = newOrder ? newOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + filter = neo4jHelper.cursorPaginationArgumentsToNeo4j( + pagination, + sort, + filter, + orderFields, + this.idAttribute(), + definition.attributes + ); + + // add +1 to the LIMIT to get information about following pages. + let limit = helper.isNotUndefinedAndNotNull(pagination.first) + ? pagination.first + 1 + : helper.isNotUndefinedAndNotNull(pagination.last) + ? pagination.last + 1 + : undefined; + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + let nodes = []; + try { + const result = await session.run( + `MATCH (n:<%- nameCp -%>) ${filter} RETURN n ${sort} LIMIT ${limit}` + ); + nodes = result.records.map((res) => new <%- adapterName-%>(res.get(0).properties)); + nodes = await validatorUtil.bulkValidateData( + "validateAfterRead", + this, + nodes, + benignErrorReporter + ); + } catch (error) { + throw error; + } + + // get the first record (if exists) in the opposite direction to determine pageInfo. + // if no cursor was given there is no need for an extra query as the results will start at the first (or last) page. + let oppNodes = []; + if (pagination && (pagination.after || pagination.before)) { + // reverse the pagination Arguement. after -> before; set first/last to 0, so LIMIT 1 is executed in the reverse Search + let oppPagination = helper.reversePaginationArgument({ + ...pagination, + includeCursor: false, + }); + let oppForwardPagination = helper.isForwardPagination(oppPagination); + // build the filter object. + let oppFilter = neo4jHelper.searchConditionsToNeo4j(search, definition); + + let oppOrder = oppForwardPagination + ? order + : helper.reverseOrderConditions(order); + // depending on the direction build the order object + let oppSort = neo4jHelper.orderConditionsToNeo4j( + oppOrder, + this.idAttribute(), + oppForwardPagination + ); + let oppOrderFields = oppOrder ? oppOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + oppFilter = neo4jHelper.cursorPaginationArgumentsToNeo4j( + oppPagination, + oppSort, + oppFilter, + oppOrderFields, + this.idAttribute(), + definition.attributes + ); + // add +1 to the LIMIT to get information about following pages. + let oppLimit = helper.isNotUndefinedAndNotNull(oppPagination.first) + ? oppPagination.first + 1 + : helper.isNotUndefinedAndNotNull(oppPagination.last) + ? oppPagination.last + 1 + : undefined; + + try { + const oppResult = await session.run( + `MATCH (n:<%- nameCp -%>) ${oppFilter} RETURN n ${oppSort} LIMIT ${oppLimit}` + ); + oppNodes = oppResult.records.map( + (res) => new <%- adapterName-%>(res.get(0).properties) + ); + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + + // build the graphql Connection Object + let edges = nodes.map((res) => { + return { + node: res, + cursor: res.base64Enconde(), + }; + }); + const pageInfo = helper.buildPageInfo(edges, oppNodes, pagination); + return { edges, pageInfo, <%- namePl -%>: edges.map((edge) => edge.node) }; + } + + /** + * addOne - The model implementation method for adding a record in Neo4j, based on Neo4j Node driver. + * + * @param {object} input - The input object. + * @return {object} The created record + * @throw {Error} If the process fails, an error is thrown + */ + static async addOne(input) { + // validate input + await validatorUtil.validateData("validateForCreate", this, input); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + try { + delete input.skipAssociationsExistenceChecks; + input = neo4jHelper.processDateTime(input, definition.attributes); + + const result = await session.run(`CREATE (a:<%- nameCp -%> $props) RETURN a`, { + props: input, + }); + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return new <%- adapterName-%>(node.properties); + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + + /** + * deleteOne - The model implementation for deleting a single record, given by its ID, in Neo4j. + * + * @param {string} id - The ID of the record to be deleted + * @returns {string} A success message is returned + * @throw {Error} If the record could not be deleted - this means a record with the ID is still present + */ + static async deleteOne(id) { + //validate id + await validatorUtil.validateData("validateForDelete", this, id); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + try { + const result = await session.run( + `MATCH (n:<%- nameCp -%> {${this.idAttribute()}:$id}) DELETE n`, + { id: id } + ); + if (result.records.length !== 0) { + throw new Error(`Record with ID = ${id} has not been deleted!`); + } + return "Item successfully deleted"; + } catch (error) { + console.log(`Record with ID = ${id} could not be deleted`); + throw error; + } finally { + await session.close(); + } + } + + /** + * updateOne - The model implementation for updating a single record in Neo4j. + * + * @param {object} input - The input object. + * @returns {object} The updated record + * @throw {Error} If this method fails, an error is thrown + */ + static async updateOne(input) { + //validate input + await validatorUtil.validateData("validateForUpdate", this, input); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + const id = input[this.idAttribute()]; + try { + delete input.skipAssociationsExistenceChecks; + delete input[this.idAttribute()]; + input = neo4jHelper.processDateTime(input, definition.attributes); + + const result = await session.run( + `MATCH (n:<%- nameCp -%> {${this.idAttribute()}:$id}) SET n+=$props RETURN n`, + { id: id, props: input } + ); + if (result.records.length !== 1) { + throw new Error(`Record with ID = ${id} has not been updated!`); + } + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return new <%- adapterName-%>(node.properties); + } catch (error) { + console.log(`Record with ID = ${id} could not be updated`); + throw error; + } finally { + await session.close(); + } + } + + + <%# + /** + * Add and remove methods for to-one association where the foreign key + * is stored in this model and therefore this adapter is the responsible to update the foreign key. + */ + -%> + <%- include('./includes/create-adapter-fields-mutations', {op: "add"}); %> + <%- include('./includes/create-adapter-fields-mutations', {op: "remove"}); %> + + /** + * bulkAddCsv - Add records from csv file + * + * @param {object} context - contextual information, e.g. csv file, record delimiter and column names. + */ + static async bulkAddCsv(context) { + let field_delim = config.fieldDelimiter ?? ","; + let array_delim = config.arrayDelimiter ?? ";"; + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + try { + let query = `LOAD CSV WITH HEADERS FROM 'file:///<%- adapterName-%>.csv' AS line FIELDTERMINATOR '${field_delim}' CREATE (:Movie {`; + for (let attr of Object.keys(definition.attributes)) { + let type = definition.attributes[attr].replace(/\s+/g, ""); + if (type[0] === "[") { + type = type.slice(1, type.length - 1); + if (type === "Int") { + query += `${attr}: [i in split(line.${attr}, "${array_delim}") | toInteger(i)], `; + } else if (type === "Boolean") { + query += `${attr}: [i in split(line.${attr}, "${array_delim}") | toBoolean(i)], `; + } else if (type === "Float") { + query += `${attr}: [i in split(line.${attr}, "${array_delim}") | toFloat(i)], `; + } else { + query += `${attr}: split(line.${attr}, "${array_delim}"), `; + } + } else { + if (type === "Int") { + query += `${attr}: toInteger(line.${attr}), `; + } else if (type === "Boolean") { + query += `${attr}: toBoolean(line.${attr}), `; + } else if (type === "Float") { + query += `${attr}: toFloat(line.${attr}), `; + } else { + query += `${attr}: line.${attr}, `; + } + } + } + query = query.slice(0, query.length - 2) + "})"; + const result = await session.run(query); + return `Successfully upload file`; + } catch (e) { + throw new Error(e); + } finally { + await session.close(); + } + } + + static csvTableTemplate(){ + return helper.csvTableTemplate(definition); + } + + <%- include('./includes/bulkAssociations-models', {op: "add"}); %> + <%- include('./includes/bulkAssociations-models', {op: "remove"}); %> + + /** + * idAttribute - Check whether an attribute "internalId" is given in the JSON model. If not the standard "id" is used instead. + * + * @return {type} Name of the attribute that functions as an internalId + */ + + static idAttribute() { + return <%- adapterName -%>.definition.id.name; + } + + /** + * idAttributeType - Return the Type of the internalId. + * + * @return {type} Type given in the JSON model + */ + + static idAttributeType() { + return <%- adapterName -%>.definition.id.type; + } + + /** + * getIdValue - Get the value of the idAttribute ("id", or "internalId") for an instance of <%- adapterName -%>. + * + * @return {type} id value + */ + + getIdValue() { + return this[<%- adapterName -%>.idAttribute()] + } + + static get definition(){ + return definition; + } + + static base64Decode(cursor){ + return Buffer.from(cursor, 'base64').toString('utf-8'); + } + + base64Enconde(){ + return Buffer.from(JSON.stringify(this.stripAssociations())).toString('base64'); + } + + stripAssociations(){ + let attributes = Object.keys(<%- adapterName -%>.definition.attributes); + <%if( defaultId ){-%>attributes.push('<%- idAttribute -%>'); <%}-%> + let data_values = _.pick(this, attributes); + return data_values; + } + + static externalIdsArray(){ + let externalIds = []; + if(definition.externalIds){ + externalIds = definition.externalIds; + } + + return externalIds; + } + + static externalIdsObject(){ + return { + <%for(let i=0; i < externalIds.length; i++){-%> <%=externalIds[i]-%>: '<%=attributes[ externalIds[i] ]-%>' <%if(i !== (externalIds.length -1) ){ -%>,<%}-%><%}-%> + }; + } + +} diff --git a/views/includes/bulkAssociations-models.ejs b/views/includes/bulkAssociations-models.ejs index 33444c0c..d820ae16 100644 --- a/views/includes/bulkAssociations-models.ejs +++ b/views/includes/bulkAssociations-models.ejs @@ -94,6 +94,43 @@ return "Records successfully updated!" } <%}_%> + <% if(storageType === 'neo4j' || storageType === 'neo4j-adapter'){_%> + static async bulk<% if(op === 'remove'){%>Dis<%}%>Associate<%-nameCp-%>With<%-associationsArguments["to_one"][i].targetKey_cp-%>(bulkAssociationInput, benignErrorReporter){ + let mappedForeignKeys = helper.mapForeignKeysToPrimaryKeyArray(bulkAssociationInput, "<%-idAttribute%>", "<%-associationsArguments["to_one"][i].targetKey%>"); + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + let foreignKey = `MATCH (n:<%- nameCp -%>) WHERE n.<%-idAttribute%> IN $id + SET n.<%-associationsArguments["to_one"][i].targetKey-%> = $target` + + <% if (op == 'remove') { %>let delete_relationships = `MATCH (a:<%- nameCp -%>)-[r:${"<%-associationsArguments["to_one"][i].name-%>".toUpperCase() + "_EDGE"}]-> (b:<%-associationsArguments["to_one"][i].target_cp-%>) + WHERE a.<%-idAttribute%> IN $id AND b.<%-associationsArguments["to_one"][i].targetKey-%> = $target + DELETE r`<% } + else { %>let create_relationships = `MATCH (a:<%- nameCp -%>), (b:<%-associationsArguments["to_one"][i].target_cp-%>) + WHERE a.<%-idAttribute%> IN $id AND b.<%-associationsArguments["to_one"][i].targetKey-%> = $target + CREATE (a)-[r:${"<%-associationsArguments["to_one"][i].name-%>".toUpperCase() + "_EDGE"}]->(b)`<%}-%> + + try { + for (let { <%-associationsArguments["to_one"][i].targetKey-%>, <%-idAttribute%> } of mappedForeignKeys) { + await session.run(foreignKey, {id: <%-idAttribute%>, target: <% if (op == 'remove') { _%>null<% } else { %><%-associationsArguments["to_one"][i].targetKey-%><%}-%>}) + await session.run(<% if (op == 'remove') { %>delete_relationships<% } else { %>create_relationships<%}-%>, + { + id: <%-idAttribute%>, + target: <%-associationsArguments["to_one"][i].targetKey-%>, + }) + } + } catch (error) { + throw error; + } finally { + await session.close(); + } + + return "Records successfully updated!" + } + <%}_%> <% if(storageType === 'cassandra' || storageType === 'cassandra-adapter'){_%> static async bulk<% if(op === 'remove'){%>Dis<%}%>Associate<%-nameCp-%>With<%-associationsArguments["to_one"][i].targetKey_cp-%>(bulkAssociationInput, benignErrorReporter){ let mappedForeignKeys = helper.mapForeignKeysToPrimaryKeyArray(bulkAssociationInput, "<%-idAttribute%>", "<%-associationsArguments["to_one"][i].targetKey%>"); diff --git a/views/includes/create-adapter-fields-mutations.ejs b/views/includes/create-adapter-fields-mutations.ejs index c1816b2b..42d7d290 100644 --- a/views/includes/create-adapter-fields-mutations.ejs +++ b/views/includes/create-adapter-fields-mutations.ejs @@ -93,6 +93,47 @@ <%}-%> <%# /*** End of the the mongodb-adapter case */ -%> + <%# + /** + * check the type of adapter and handle cassandra-adapter + */ + -%> + <%if(storageType === 'neo4j-adapter'){-%> + static async <%- op -%>_<%-associationsArguments["to_one"][i].targetKey-%>(<%- idAttribute-%>, <%-associationsArguments["to_one"][i].targetKey-%>) { + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + let foreignKey = `MATCH (n:<%- nameCp -%>) WHERE n.<%-idAttribute%> = $id + SET n.<%-associationsArguments["to_one"][i].targetKey-%> = $target RETURN head(collect(n))` + + <% if (op == 'remove') { %>let delete_relationships = `MATCH (a:<%- nameCp -%>)-[r:${"<%-associationsArguments["to_one"][i].name-%>".toUpperCase() + "_EDGE"}]-> (b:<%-associationsArguments["to_one"][i].target_cp-%>) + WHERE a.<%-idAttribute%> = $id AND b.${models.<%-associationsArguments["to_one"][i].target-%>.idAttribute()} = $target + DELETE r`<% } + else { %>let create_relationships = `MATCH (a:<%- nameCp -%>), (b:<%-associationsArguments["to_one"][i].target_cp-%>) + WHERE a.<%-idAttribute%> = $id AND b.${models.<%-associationsArguments["to_one"][i].target-%>.idAttribute()} = $target + CREATE (a)-[r:${"<%-associationsArguments["to_one"][i].name-%>".toUpperCase() + "_EDGE"}]->(b)`<%}-%> + + try{ + const result = await session.run(foreignKey, {id: <%-idAttribute%>, target: <% if (op == 'remove') { _%>null<% } else { %><%-associationsArguments["to_one"][i].targetKey-%><%}-%>}); + await session.run(<% if (op == 'remove') { %>delete_relationships<% } else { %>create_relationships<%}-%>, + { + id: <%-idAttribute%>, + target: <%-associationsArguments["to_one"][i].targetKey-%>, + }) + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return new <%- adapterName -%>(node) + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + <%}-%> + <%# /*** End of the the cassandra-adapter case */ -%> + <%# /** * check the type of adapter and handle cassandra-adapter @@ -168,7 +209,53 @@ } <%}-%> <%# /*** End of the the mongodb-adapter case */ -%> + <%if(storageType === 'neo4j-adapter'){-%> + static async <%- op -%>_<%-associationsArguments["to_many"][i].sourceKey-%>(<%- idAttribute-%>, <%-associationsArguments["to_many"][i].sourceKey-%>, benignErrorReporter, handle_inverse = true) { + //handle inverse association + if(handle_inverse){ + let promises = []; + <%-associationsArguments["to_many"][i].sourceKey-%>.forEach( idx =>{ + promises.push(models.<%-associationsArguments["to_many"][i].target_lc-%>.<%- op %>_<%-associationsArguments["to_many"][i].targetKey%>(idx, [`${<%- idAttribute-%>}`], benignErrorReporter, false) ); + }); + await Promise.all(promises); + } + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + let foreignKey = `MATCH (n:<%- nameCp -%>) WHERE n.<%-idAttribute%> = $id + SET n.<%-associationsArguments["to_many"][i].sourceKey-%> = $updated_ids` + + <% if (op == 'remove') { %>let delete_relationships = `MATCH (a:<%- nameCp -%>)-[r:${"<%-associationsArguments["to_many"][i].name-%>".toUpperCase() + "_EDGE"}]-> (b:<%-associationsArguments["to_many"][i].target_cp-%>) + WHERE a.<%-idAttribute%> = $id AND b.${models.<%-associationsArguments["to_many"][i].target-%>.idAttribute()} IN $source + DELETE r`<% } + else { %>let create_relationships = `MATCH (a:<%- nameCp -%>), (b:<%-associationsArguments["to_many"][i].target_cp-%>) + WHERE a.<%-idAttribute%> = $id AND b.${models.<%-associationsArguments["to_many"][i].target-%>.idAttribute()} IN $source + CREATE (a)-[r:${"<%-associationsArguments["to_many"][i].name-%>".toUpperCase() + "_EDGE"}]->(b)`<%}-%> + + try{ + let record = await this.readById(<%- idAttribute-%>); + + if (record!==null){ + let updated_ids = helper.<% if(op === 'remove'){%>differenceIds<%}else{%>unionIds<%}-%>(record.<%-associationsArguments["to_many"][i].sourceKey-%>, <%-associationsArguments["to_many"][i].sourceKey-%>); + await session.run(foreignKey, {id: <%-idAttribute%>, updated_ids: updated_ids}); + await session.run(<% if (op == 'remove') { %>delete_relationships<% } else { %>create_relationships<%}-%>, + { + id: <%-idAttribute%>, + source: <%-associationsArguments["to_many"][i].sourceKey-%>, + }) + } + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + <%}-%> + <%# /*** End of the the neo4j-adapter case */ -%> <% if(storageType === 'zendro-webservice-adapter' || storageType === 'ddm-adapter'){-%> static async <%- op -%>_<%-associationsArguments["to_many"][i].sourceKey-%>(<%- idAttribute-%>, <%-associationsArguments["to_many"][i].sourceKey-%>, benignErrorReporter) { let query = ` diff --git a/views/includes/create-models-fieldMutations-neo4j.ejs b/views/includes/create-models-fieldMutations-neo4j.ejs new file mode 100644 index 00000000..1379630c --- /dev/null +++ b/views/includes/create-models-fieldMutations-neo4j.ejs @@ -0,0 +1,97 @@ +<%for(let i=0; i < associationsArguments["to_one"].length; i++){-%> + <% if (associationsArguments["to_one"][i].holdsForeignKey) { -%> + /** + * <%- op %>_<%-associationsArguments["to_one"][i].targetKey-%> - field Mutation (model-layer) for to_one associationsArguments to <%- op %> + * + * @param {Id} <%- idAttribute-%> IdAttribute of the root model to be updated + * @param {Id} <%-associationsArguments["to_one"][i].targetKey-%> Foreign Key (stored in "Me") of the Association to be updated. + */ + static async <%- op -%>_<%-associationsArguments["to_one"][i].targetKey-%>(<%- idAttribute-%>, <%-associationsArguments["to_one"][i].targetKey-%>) { + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + let foreignKey = `MATCH (n:<%- nameCp -%> ) WHERE n.<%-idAttribute%> = $id + SET n.<%-associationsArguments["to_one"][i].targetKey-%> = $target RETURN head(collect(n))` + + <% if (op == 'remove') { %>let delete_relationships = `MATCH (a:<%- nameCp -%>)-[r:${"<%-associationsArguments["to_one"][i].name-%>".toUpperCase() + "_EDGE"}]-> (b:<%-associationsArguments["to_one"][i].target_cp-%>) + WHERE a.<%-idAttribute%> = $id AND b.${models.<%-associationsArguments["to_one"][i].target-%>.idAttribute()} = $target + DELETE r`<% } + else { %>let create_relationships = `MATCH (a:<%- nameCp -%>), (b:<%-associationsArguments["to_one"][i].target_cp-%>) + WHERE a.<%-idAttribute%> = $id AND b.${models.<%-associationsArguments["to_one"][i].target-%>.idAttribute()} = $target + CREATE (a)-[r:${"<%-associationsArguments["to_one"][i].name-%>".toUpperCase() + "_EDGE"}]->(b)`<%}-%> + + try{ + const result = await session.run(foreignKey, {id: <%-idAttribute%>, target: <% if (op == 'remove') { _%>null<% } else { %><%-associationsArguments["to_one"][i].targetKey-%><%}-%>}); + await session.run(<% if (op == 'remove') { %>delete_relationships<% } else { %>create_relationships<%}-%>, + { + id: <%-idAttribute%>, + target: <%-associationsArguments["to_one"][i].targetKey-%>, + }) + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return new <%- name -%>(node) + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + <%} -%> + <%}-%> + + <%for(let i=0; i < associationsArguments["to_many"].length; i++){-%> + <% if (associationsArguments["to_many"][i].assocThroughArray) { -%> + /** + * <%- op %>_<%-associationsArguments["to_many"][i].sourceKey-%> - field Mutation (model-layer) for to_many associationsArguments to <%- op %> + * + * @param {Id} <%- idAttribute-%> IdAttribute of the root model to be updated + * @param {Array} <%-associationsArguments["to_many"][i].sourceKey-%> Array foreign Key (stored in "Me") of the Association to be updated. + */ + static async <%- op -%>_<%-associationsArguments["to_many"][i].sourceKey-%>(<%- idAttribute-%>, <%-associationsArguments["to_many"][i].sourceKey-%>, benignErrorReporter, handle_inverse = true) { + //handle inverse association + if(handle_inverse){ + let promises = []; + <%-associationsArguments["to_many"][i].sourceKey-%>.forEach( idx =>{ + promises.push(models.<%-associationsArguments["to_many"][i].target_lc-%>.<%- op %>_<%-associationsArguments["to_many"][i].targetKey%>(idx, [`${<%- idAttribute-%>}`], benignErrorReporter, false) ); + }); + await Promise.all(promises); + } + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + let foreignKey = `MATCH (n:<%- nameCp -%> ) WHERE n.<%-idAttribute%> = $id + SET n.<%-associationsArguments["to_many"][i].sourceKey-%> = $updated_ids` + + <% if (op == 'remove') { %>let delete_relationships = `MATCH (a:<%- nameCp -%>)-[r:${"<%-associationsArguments["to_many"][i].name-%>".toUpperCase() + "_EDGE"}]-> (b:<%-associationsArguments["to_many"][i].target_cp-%>) + WHERE a.<%-idAttribute%> = $id AND b.${models.<%-associationsArguments["to_many"][i].target-%>.idAttribute()} IN $source + DELETE r`<% } + else { %>let create_relationships = `MATCH (a:<%- nameCp -%>), (b:<%-associationsArguments["to_many"][i].target_cp-%>) + WHERE a.<%-idAttribute%> = $id AND b.${models.<%-associationsArguments["to_many"][i].target-%>.idAttribute()} IN $source + CREATE (a)-[r:${"<%-associationsArguments["to_many"][i].name-%>".toUpperCase() + "_EDGE"}]->(b)`<%}-%> + + try{ + let record = await this.readById(<%- idAttribute-%>); + + if (record!==null){ + let updated_ids = helper.<% if(op === 'remove'){%>differenceIds<%}else{%>unionIds<%}-%>(record.<%-associationsArguments["to_many"][i].sourceKey-%>, <%-associationsArguments["to_many"][i].sourceKey-%>); + await session.run(foreignKey, {id: <%-idAttribute%>, updated_ids: updated_ids}); + + await session.run(<% if (op == 'remove') { %>delete_relationships<% } else { %>create_relationships<%}-%>, + { + id: <%-idAttribute%>, + source: <%-associationsArguments["to_many"][i].sourceKey-%>, + }) + } + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + <%} -%> + <%}-%> \ No newline at end of file From f78ec567635725bb46a87064e3ae44a9ff9bb0fe Mon Sep 17 00:00:00 2001 From: wunderbarr Date: Thu, 27 May 2021 18:28:38 +0200 Subject: [PATCH 6/8] test: neo4j unit test --- test/mocha_unit.test.js | 176 +++-- test/unit_test_misc/data_models_neo4j.js | 70 ++ .../test-describe/cassandra-storagetype.js | 1 + .../test-describe/distributed-models.js | 2 + .../test-describe/handle-error-ddm.js | 2 + .../test-describe/neo4j-unittest.js | 634 ++++++++++++++++++ 6 files changed, 845 insertions(+), 40 deletions(-) create mode 100644 test/unit_test_misc/data_models_neo4j.js create mode 100644 test/unit_test_misc/test-describe/neo4j-unittest.js diff --git a/test/mocha_unit.test.js b/test/mocha_unit.test.js index 1a97f822..46ea0322 100644 --- a/test/mocha_unit.test.js +++ b/test/mocha_unit.test.js @@ -12,6 +12,7 @@ const models_cassandra = require("./unit_test_misc/data_models_cassandra"); const models_mongodb = require("./unit_test_misc/data_models_mongodb"); const models_amazonS3 = require("./unit_test_misc/data_models_amazonS3"); const models_trino = require("./unit_test_misc/data_models_trino"); +const models_neo4j = require("./unit_test_misc/data_models_neo4j"); const requireFromString = require("require-from-string"); const helpers = require("./unit_test_misc/helpers/reporting_helpers"); const { test } = require("mocha"); @@ -66,10 +67,8 @@ describe("Lower-case models", function () { ); expect(g_model, "Incorrect model").to.have.string(test_model_attributes); - let test_model_associations = data_test.individual_model_associations.replace( - /\s/g, - "" - ); + let test_model_associations = + data_test.individual_model_associations.replace(/\s/g, ""); expect(g_model, "Incorrect model").to.have.string(test_model_associations); }); }); @@ -988,10 +987,11 @@ describe("Cursor based pagination", function () { let opts = funks.getOptions(models_zendro.person); let generated_model = await funks.generateJs("create-models-zendro", opts); let g_model = generated_model.replace(/\s/g, ""); - let test_model = data_test.many_to_many_association_connection_zendro_server.replace( - /\s/g, - "" - ); + let test_model = + data_test.many_to_many_association_connection_zendro_server.replace( + /\s/g, + "" + ); expect(g_model, "No method found").to.have.string(test_model); }); }); @@ -1743,10 +1743,8 @@ describe("Refactor associations - add / update SQL models", function () { let opts = funks.getOptions(models_refactoring.accession); let generated_resolver = await funks.generateJs("create-resolvers", opts); let g_resolver = generated_resolver.replace(/\s/g, ""); - let test_resolver = data_test.add_assoc_to_one_fieldMutation_resolver.replace( - /\s/g, - "" - ); + let test_resolver = + data_test.add_assoc_to_one_fieldMutation_resolver.replace(/\s/g, ""); expect(g_resolver).to.have.string(test_resolver); }); @@ -1754,10 +1752,8 @@ describe("Refactor associations - add / update SQL models", function () { let opts = funks.getOptions(models_refactoring.accession); let generated_resolver = await funks.generateJs("create-resolvers", opts); let g_resolver = generated_resolver.replace(/\s/g, ""); - let test_resolver = data_test.remove_assoc_to_one_fieldMutation_resolver.replace( - /\s/g, - "" - ); + let test_resolver = + data_test.remove_assoc_to_one_fieldMutation_resolver.replace(/\s/g, ""); expect(g_resolver).to.have.string(test_resolver); }); @@ -1765,10 +1761,11 @@ describe("Refactor associations - add / update SQL models", function () { let opts = funks.getOptions(models.researcher); let generated_resolvers = await funks.generateJs("create-resolvers", opts); let g_resolvers = generated_resolvers.replace(/\s/g, ""); - let test_resolver = data_test.add_assoc_to_one_fieldMutation_resolver_fK_in_target.replace( - /\s/g, - "" - ); + let test_resolver = + data_test.add_assoc_to_one_fieldMutation_resolver_fK_in_target.replace( + /\s/g, + "" + ); expect(g_resolvers).to.have.string(test_resolver); }); @@ -1776,10 +1773,11 @@ describe("Refactor associations - add / update SQL models", function () { let opts = funks.getOptions(models.researcher); let generated_resolvers = await funks.generateJs("create-resolvers", opts); let g_resolvers = generated_resolvers.replace(/\s/g, ""); - let test_resolver = data_test.remove_assoc_to_one_fieldMutation_resolver_fK_in_target.replace( - /\s/g, - "" - ); + let test_resolver = + data_test.remove_assoc_to_one_fieldMutation_resolver_fK_in_target.replace( + /\s/g, + "" + ); expect(g_resolvers).to.have.string(test_resolver); }); @@ -1787,10 +1785,8 @@ describe("Refactor associations - add / update SQL models", function () { let opts = funks.getOptions(models_refactoring.accession); let generated_resolver = await funks.generateJs("create-resolvers", opts); let g_resolver = generated_resolver.replace(/\s/g, ""); - let test_resolver = data_test.add_assoc_to_many_fieldMutation_resolver.replace( - /\s/g, - "" - ); + let test_resolver = + data_test.add_assoc_to_many_fieldMutation_resolver.replace(/\s/g, ""); expect(g_resolver).to.have.string(test_resolver); }); @@ -1798,10 +1794,8 @@ describe("Refactor associations - add / update SQL models", function () { let opts = funks.getOptions(models_refactoring.accession); let generated_resolver = await funks.generateJs("create-resolvers", opts); let g_resolver = generated_resolver.replace(/\s/g, ""); - let test_resolver = data_test.remove_assoc_to_many_fieldMutation_resolver.replace( - /\s/g, - "" - ); + let test_resolver = + data_test.remove_assoc_to_many_fieldMutation_resolver.replace(/\s/g, ""); expect(g_resolver).to.have.string(test_resolver); }); @@ -1809,10 +1803,8 @@ describe("Refactor associations - add / update SQL models", function () { let opts = funks.getOptions(models_refactoring.accession); let generated_resolver = await funks.generateJs("create-models", opts); let g_resolver = generated_resolver.replace(/\s/g, ""); - let test_resolver = data_test._addAssoc_to_one_fieldMutation_sql_model.replace( - /\s/g, - "" - ); + let test_resolver = + data_test._addAssoc_to_one_fieldMutation_sql_model.replace(/\s/g, ""); expect(g_resolver).to.have.string(test_resolver); }); @@ -1820,10 +1812,8 @@ describe("Refactor associations - add / update SQL models", function () { let opts = funks.getOptions(models_refactoring.accession); let generated_resolver = await funks.generateJs("create-models", opts); let g_resolver = generated_resolver.replace(/\s/g, ""); - let test_resolver = data_test._removeAssoc_to_one_fieldMutation_sql_model.replace( - /\s/g, - "" - ); + let test_resolver = + data_test._removeAssoc_to_one_fieldMutation_sql_model.replace(/\s/g, ""); expect(g_resolver).to.have.string(test_resolver); }); }); @@ -3057,3 +3047,109 @@ describe("Trino/Presto Unit Test", () => { testCompare(generated_model, data_test.trino_adapter_readById); }); }); + +describe("Neo4j Unit Test", function () { + let data_test = require("./unit_test_misc/test-describe/neo4j-unittest"); + + it("neo4j model - movie constructor", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_constructor); + }); + + it("neo4j model - movie readById", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_readById); + }); + + it("neo4j model - movie countRecords", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_countRecords); + }); + + it("neo4j model - movie readAll", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_readAll); + }); + + it("neo4j model - movie readAllCursor", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_readAllCursor); + }); + + it("neo4j model - movie addOne", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_addOne); + }); + + it("neo4j model - movie deleteOne", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_deleteOne); + }); + + it("neo4j model - movie updateOne", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_updateOne); + }); + + it("neo4j model - movie bulkAddCsv", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_bulkAddCsv); + }); + + it("neo4j model - fieldMutations toOne - add director to movie", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_fieldMutation_add_director); + }); + + it("neo4j model - fieldMutations toOne - remove director from movie", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_fieldMutation_remove_director); + }); + + it("neo4j model - fieldMutations toMany - add actor to movie", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_fieldMutation_add_actor); + }); + + it("neo4j model - fieldMutations toMany - remove actor from movie", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare(generated_model, data_test.movie_fieldMutation_remove_actor); + }); + + it("neo4j model - fieldMutations bulkAssociation - add director to movie", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare( + generated_model, + data_test.movie_fieldMutation_bulkAssociate_add + ); + }); + + it("neo4j model - fieldMutations bulkAssociation - remove director from movie", async function () { + let opts = funks.getOptions(models_neo4j.movie); + let generated_model = await funks.generateJs("create-models-neo4j", opts); + testCompare( + generated_model, + data_test.movie_fieldMutation_bulkAssociate_remove + ); + }); + + it("neo4j adapter - dist_movie_instance1 readById ", async function () { + let opts = funks.getOptions(models_neo4j.dist_movie_instance1); + let generated_model = await funks.generateJs("create-neo4j-adapter", opts); + testCompare(generated_model, data_test.neo4j_adapter_readById); + }); +}); diff --git a/test/unit_test_misc/data_models_neo4j.js b/test/unit_test_misc/data_models_neo4j.js new file mode 100644 index 00000000..49cc6546 --- /dev/null +++ b/test/unit_test_misc/data_models_neo4j.js @@ -0,0 +1,70 @@ +module.exports.movie = { + model: "movie", + storageType: "neo4j", + attributes: { + movie_id: "String", + release: "DateTime", + runtime: "Int", + box_office: "Float", + is_adult: "Boolean", + genres: "[String]", + votes: "[Int]", + director_id: "String", + actor_ids: "[String]", + }, + associations: { + director: { + type: "to_one", + target: "director", + targetKey: "director_id", + keyIn: "movie", + targetStorageType: "neo4j", + label: "director_name", + }, + actor: { + type: "to_many", + reverseAssociationType: "to_many", + target: "actor", + targetKey: "movie_ids", + sourceKey: "actor_ids", + keyIn: "movie", + targetStorageType: "neo4j", + }, + unique_review: { + type: "to_one", + target: "review", + targetKey: "movie_id", + keyIn: "tracker", + targetStorageType: "neo4j", + }, + }, + internalId: "movie_id", +}; + +module.exports.dist_movie_instance1 = { + model: "dist_movie", + storageType: "neo4j-adapter", + adapterName: "dist_movie_instance1", + regex: "instance1", + attributes: { + movie_id: "String", + release: "DateTime", + runtime: "Int", + box_office: "Float", + is_adult: "Boolean", + genres: "[String]", + votes: "[Int]", + director_id: "String", + }, + associations: { + dist_director: { + type: "to_one", + target: "dist_director", + targetKey: "director_id", + keyIn: "dist_movie", + targetStorageType: "distributed-data-model", + }, + }, + + internalId: "movie_id", +}; diff --git a/test/unit_test_misc/test-describe/cassandra-storagetype.js b/test/unit_test_misc/test-describe/cassandra-storagetype.js index 0189285b..e79108bb 100644 --- a/test/unit_test_misc/test-describe/cassandra-storagetype.js +++ b/test/unit_test_misc/test-describe/cassandra-storagetype.js @@ -458,6 +458,7 @@ static readAllCursor(search, order, pagination, authorizedAdapters, benignErrorR case 'amazon-s3-adapter': case 'trino-adapter': case 'presto-adapter': + case 'neo4j-adapter': case 'zendro-webservice-adapter': return adapter.readAllCursor(search, order, pagination, benignErrorReporter); case 'cassandra-adapter': diff --git a/test/unit_test_misc/test-describe/distributed-models.js b/test/unit_test_misc/test-describe/distributed-models.js index 2670362c..fc1309dc 100644 --- a/test/unit_test_misc/test-describe/distributed-models.js +++ b/test/unit_test_misc/test-describe/distributed-models.js @@ -169,6 +169,7 @@ static countRecords(search, authorizedAdapters, benignErrorReporter, searchAutho case 'amazon-s3-adapter': case 'trino-adapter': case 'presto-adapter': + case 'neo4j-adapter': case 'zendro-webservice-adapter': return adapter.countRecords(search, benignErrorReporter); case 'cassandra-adapter': @@ -247,6 +248,7 @@ static readAllCursor(search, order, pagination, authorizedAdapters, benignErrorR case 'amazon-s3-adapter': case 'trino-adapter': case 'presto-adapter': + case 'neo4j-adapter': case 'zendro-webservice-adapter': return adapter.readAllCursor(search, order, pagination, benignErrorReporter); case 'cassandra-adapter': diff --git a/test/unit_test_misc/test-describe/handle-error-ddm.js b/test/unit_test_misc/test-describe/handle-error-ddm.js index ba0620bc..fa657eef 100644 --- a/test/unit_test_misc/test-describe/handle-error-ddm.js +++ b/test/unit_test_misc/test-describe/handle-error-ddm.js @@ -49,6 +49,7 @@ static countRecords(search, authorizedAdapters, benignErrorReporter, searchAutho case 'amazon-s3-adapter': case 'trino-adapter': case 'presto-adapter': + case 'neo4j-adapter': case 'zendro-webservice-adapter': return adapter.countRecords(search, benignErrorReporter); case 'cassandra-adapter': @@ -128,6 +129,7 @@ static readAllCursor(search, order, pagination, authorizedAdapters, benignErrorR case 'amazon-s3-adapter': case 'trino-adapter': case 'presto-adapter': + case 'neo4j-adapter': case 'zendro-webservice-adapter': return adapter.readAllCursor(search, order, pagination, benignErrorReporter); case 'cassandra-adapter': diff --git a/test/unit_test_misc/test-describe/neo4j-unittest.js b/test/unit_test_misc/test-describe/neo4j-unittest.js new file mode 100644 index 00000000..ed0655c6 --- /dev/null +++ b/test/unit_test_misc/test-describe/neo4j-unittest.js @@ -0,0 +1,634 @@ +module.exports.movie_constructor = ` +constructor(input) { + for (let key of Object.keys(input)) { + this[key] = input[key]; + } +} +`; + +module.exports.movie_readById = ` +static async readById(id) { + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + try { + const result = await session.run( + \`MATCH (n:Movie {\${this.idAttribute()}:\$id}) RETURN n\`, { + id: id + } + ); + if (result.records.length === 0) { + throw new Error(\`Record with ID = "\${id}" does not exist\`); + } + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return validatorUtil.validateData( + "validateAfterRead", + this, + new movie(node.properties) + ); + } catch (error) { + throw error; + } finally { + await session.close(); + } +} +`; + +module.exports.movie_countRecords = ` +static async countRecords(search) { + const whereOptions = neo4jHelper.searchConditionsToNeo4j( + search, + definition + ); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + try { + const result = await session.run( + \`MATCH (n:Movie) \${whereOptions} RETURN COUNT(n)\` + ); + const singleRecord = result.records[0]; + const num = singleRecord.get(0); + return num; + } catch (error) { + throw error; + } finally { + await session.close(); + } +} +`; + +module.exports.movie_readAll = ` +static async readAll(search, order, pagination, benignErrorReporter) { + //use default BenignErrorReporter if no BenignErrorReporter defined + benignErrorReporter = + errorHelper.getDefaultBenignErrorReporterIfUndef(benignErrorReporter); + // build the filter object for limit-offset-based pagination + const whereOptions = neo4jHelper.searchConditionsToNeo4j( + search, + definition + ); + const orderOptions = neo4jHelper.orderConditionsToNeo4j( + order, + this.idAttribute(), + true + ); + + const limit = pagination.limit; + const offset = pagination.offset ? pagination.offset : 0; + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + try { + const result = await session.run( + \`MATCH (n:Movie) \${whereOptions} RETURN n \${orderOptions} SKIP \${offset} LIMIT \${limit} \` + ); + const nodes = result.records.map((res) => res.get(0).properties); + return validatorUtil.bulkValidateData( + "validateAfterRead", + this, + nodes, + benignErrorReporter + ); + } catch (error) { + throw error; + } finally { + await session.close(); + } +} +`; + +module.exports.movie_readAllCursor = ` +static async readAllCursor(search, order, pagination, benignErrorReporter) { + //use default BenignErrorReporter if no BenignErrorReporter defined + benignErrorReporter = + errorHelper.getDefaultBenignErrorReporterIfUndef(benignErrorReporter); + let isForwardPagination = helper.isForwardPagination(pagination); + // build the whereOptions. + let filter = neo4jHelper.searchConditionsToNeo4j(search, definition); + let newOrder = isForwardPagination ? + order : + helper.reverseOrderConditions(order); + // depending on the direction build the order object + let sort = neo4jHelper.orderConditionsToNeo4j( + newOrder, + this.idAttribute(), + isForwardPagination + ); + let orderFields = newOrder ? newOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + filter = neo4jHelper.cursorPaginationArgumentsToNeo4j( + pagination, + sort, + filter, + orderFields, + this.idAttribute(), + definition.attributes + ); + + // add +1 to the LIMIT to get information about following pages. + let limit = helper.isNotUndefinedAndNotNull(pagination.first) ? + pagination.first + 1 : + helper.isNotUndefinedAndNotNull(pagination.last) ? + pagination.last + 1 : + undefined; + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + let nodes = []; + try { + const result = await session.run( + \`MATCH (n:Movie) \${filter} RETURN n \${sort} LIMIT \${limit}\` + ); + nodes = result.records.map((res) => new movie(res.get(0).properties)); + nodes = await validatorUtil.bulkValidateData( + "validateAfterRead", + this, + nodes, + benignErrorReporter + ); + } catch (error) { + throw error; + } + + // get the first record (if exists) in the opposite direction to determine pageInfo. + // if no cursor was given there is no need for an extra query as the results will start at the first (or last) page. + let oppNodes = []; + if (pagination && (pagination.after || pagination.before)) { + // reverse the pagination Arguement. after -> before; set first/last to 0, so LIMIT 1 is executed in the reverse Search + let oppPagination = helper.reversePaginationArgument({ + ...pagination, + includeCursor: false, + }); + let oppForwardPagination = helper.isForwardPagination(oppPagination); + // build the filter object. + let oppFilter = neo4jHelper.searchConditionsToNeo4j(search, definition); + + let oppOrder = oppForwardPagination ? + order : + helper.reverseOrderConditions(order); + // depending on the direction build the order object + let oppSort = neo4jHelper.orderConditionsToNeo4j( + oppOrder, + this.idAttribute(), + oppForwardPagination + ); + let oppOrderFields = oppOrder ? oppOrder.map((x) => x.field) : []; + // extend the filter for the given order and cursor + oppFilter = neo4jHelper.cursorPaginationArgumentsToNeo4j( + oppPagination, + oppSort, + oppFilter, + oppOrderFields, + this.idAttribute(), + definition.attributes + ); + // add +1 to the LIMIT to get information about following pages. + let oppLimit = helper.isNotUndefinedAndNotNull(oppPagination.first) ? + oppPagination.first + 1 : + helper.isNotUndefinedAndNotNull(oppPagination.last) ? + oppPagination.last + 1 : + undefined; + + try { + const oppResult = await session.run( + \`MATCH (n:Movie) \${oppFilter} RETURN n \${oppSort} LIMIT \${oppLimit}\` + ); + oppNodes = oppResult.records.map( + (res) => new movie(res.get(0).properties) + ); + } catch (error) { + throw error; + } finally { + await session.close(); + } + } + + // build the graphql Connection Object + let edges = nodes.map((res) => { + return { + node: res, + cursor: res.base64Enconde(), + }; + }); + const pageInfo = helper.buildPageInfo(edges, oppNodes, pagination); + return { + edges, + pageInfo, + movies: edges.map((edge) => edge.node) + }; +} +`; + +module.exports.movie_addOne = ` +static async addOne(input) { + // validate input + await validatorUtil.validateData("validateForCreate", this, input); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + try { + delete input.skipAssociationsExistenceChecks; + input = neo4jHelper.processDateTime(input, definition.attributes); + + const result = await session.run(\`CREATE (a:Movie $props) RETURN a\`, { + props: input, + }); + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return new movie(node.properties); + } catch (error) { + throw error; + } finally { + await session.close(); + } +} +`; + +module.exports.movie_deleteOne = ` +static async deleteOne(id) { + //validate id + await validatorUtil.validateData("validateForDelete", this, id); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + try { + const result = await session.run( + \`MATCH (n:Movie {\${this.idAttribute()}:$id}) DELETE n\`, { + id: id + } + ); + if (result.records.length !== 0) { + throw new Error(\`Record with ID = \${id} has not been deleted!\`); + } + return "Item successfully deleted"; + } catch (error) { + console.log(\`Record with ID = \${id} could not be deleted\`); + throw error; + } finally { + await session.close(); + } +} +`; + +module.exports.movie_updateOne = ` +static async updateOne(input) { + //validate input + await validatorUtil.validateData("validateForUpdate", this, input); + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + const id = input[this.idAttribute()]; + try { + delete input.skipAssociationsExistenceChecks; + delete input[this.idAttribute()]; + input = neo4jHelper.processDateTime(input, definition.attributes); + + const result = await session.run( + \`MATCH (n:Movie {\${this.idAttribute()}:$id}) SET n+=$props RETURN n\`, { + id: id, + props: input + } + ); + if (result.records.length !== 1) { + throw new Error(\`Record with ID = \${id} has not been updated!\`); + } + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return new movie(node.properties); + } catch (error) { + console.log(\`Record with ID = \${id} could not be updated\`); + throw error; + } finally { + await session.close(); + } +} +`; + +module.exports.movie_bulkAddCsv = ` +static async bulkAddCsv(context) { + let field_delim = config.fieldDelimiter ?? ","; + let array_delim = config.arrayDelimiter ?? ";"; + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + try { + let query = \`LOAD CSV WITH HEADERS FROM 'file:///movie.csv' AS line FIELDTERMINATOR '\${field_delim}' CREATE (:Movie {\`; + for (let attr of Object.keys(definition.attributes)) { + let type = definition.attributes[attr].replace(/\\s+/g, ""); + if (type[0] === "[") { + type = type.slice(1, type.length - 1); + if (type === "Int") { + query += \`\${attr}: [i in split(line.\${attr}, "\${array_delim}") | toInteger(i)], \`; + } else if (type === "Boolean") { + query += \`\${attr}: [i in split(line.\${attr}, "\${array_delim}") | toBoolean(i)], \`; + } else if (type === "Float") { + query += \`\${attr}: [i in split(line.\${attr}, "\${array_delim}") | toFloat(i)], \`; + } else { + query += \`\${attr}: split(line.\${attr}, "\${array_delim}"), \`; + } + } else { + if (type === "Int") { + query += \`\${attr}: toInteger(line.\${attr}), \`; + } else if (type === "Boolean") { + query += \`\${attr}: toBoolean(line.\${attr}), \`; + } else if (type === "Float") { + query += \`\${attr}: toFloat(line.\${attr}), \`; + } else { + query += \`\${attr}: line.\${attr}, \`; + } + } + } + query = query.slice(0, query.length - 2) + "})"; + const result = await session.run(query); + return \`Successfully upload file\`; + } catch (e) { + throw new Error(e); + } finally { + await session.close(); + } +`; + +module.exports.movie_fieldMutation_add_director = ` +static async add_director_id(movie_id, director_id) { + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + let foreignKey = \`MATCH (n:Movie ) WHERE n.movie_id = $id + SET n.director_id = $target RETURN head(collect(n))\` + + let create_relationships = \`MATCH (a:Movie), (b:Director) + WHERE a.movie_id = $id AND b.\${models.director.idAttribute()} = $target + CREATE (a)-[r:\${"director".toUpperCase() + "_EDGE"}]->(b)\` + try { + const result = await session.run(foreignKey, { + id: movie_id, + target: director_id + }); + await session.run(create_relationships, { + id: movie_id, + target: director_id, + }) + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return new movie(node) + } catch (error) { + throw error; + } finally { + await session.close(); + } +} +`; + +module.exports.movie_fieldMutation_remove_director = ` +static async remove_director_id(movie_id, director_id) { + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + let foreignKey = \`MATCH (n:Movie ) WHERE n.movie_id = $id + SET n.director_id = $target RETURN head(collect(n))\` + + let delete_relationships = \`MATCH (a:Movie)-[r:\${"director".toUpperCase() + "_EDGE"}]-> (b:Director) + WHERE a.movie_id = $id AND b.\${models.director.idAttribute()} = $target + DELETE r\` + try { + const result = await session.run(foreignKey, { + id: movie_id, + target: null + }); + await session.run(delete_relationships, { + id: movie_id, + target: director_id, + }) + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return new movie(node) + } catch (error) { + throw error; + } finally { + await session.close(); + } +} +`; + +module.exports.movie_fieldMutation_add_actor = ` +static async add_actor_ids(movie_id, actor_ids, benignErrorReporter, handle_inverse = true) { + //handle inverse association + if (handle_inverse) { + let promises = []; + actor_ids.forEach(idx => { + promises.push(models.actor.add_movie_ids(idx, [\`\${movie_id}\`], benignErrorReporter, false)); + }); + await Promise.all(promises); + } + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + let foreignKey = \`MATCH (n:Movie ) WHERE n.movie_id = $id + SET n.actor_ids = $updated_ids\` + + let create_relationships = \`MATCH (a:Movie), (b:Actor) + WHERE a.movie_id = $id AND b.\${models.actor.idAttribute()} IN $source + CREATE (a)-[r:\${"actor".toUpperCase() + "_EDGE"}]->(b)\` + try { + let record = await this.readById(movie_id); + + if (record !== null) { + let updated_ids = helper.unionIds(record.actor_ids, actor_ids); + await session.run(foreignKey, { + id: movie_id, + updated_ids: updated_ids + }); + + await session.run(create_relationships, { + id: movie_id, + source: actor_ids, + }) + } + } catch (error) { + throw error; + } finally { + await session.close(); + } +} +`; + +module.exports.movie_fieldMutation_remove_actor = ` +static async remove_actor_ids(movie_id, actor_ids, benignErrorReporter, handle_inverse = true) { + //handle inverse association + if (handle_inverse) { + let promises = []; + actor_ids.forEach(idx => { + promises.push(models.actor.remove_movie_ids(idx, [\`\${movie_id}\`], benignErrorReporter, false)); + }); + await Promise.all(promises); + } + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + let foreignKey = \`MATCH (n:Movie ) WHERE n.movie_id = $id + SET n.actor_ids = $updated_ids\` + + let delete_relationships = \`MATCH (a:Movie)-[r:\${"actor".toUpperCase() + "_EDGE"}]-> (b:Actor) + WHERE a.movie_id = $id AND b.\${models.actor.idAttribute()} IN $source + DELETE r\` + try { + let record = await this.readById(movie_id); + + if (record !== null) { + let updated_ids = helper.differenceIds(record.actor_ids, actor_ids); + await session.run(foreignKey, { + id: movie_id, + updated_ids: updated_ids + }); + + await session.run(delete_relationships, { + id: movie_id, + source: actor_ids, + }) + } + } catch (error) { + throw error; + } finally { + await session.close(); + } +} +`; + +module.exports.movie_fieldMutation_bulkAssociate_add = ` +static async bulkAssociateMovieWithDirector_id(bulkAssociationInput, benignErrorReporter) { + let mappedForeignKeys = helper.mapForeignKeysToPrimaryKeyArray(bulkAssociationInput, "movie_id", "director_id"); + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + let foreignKey = \`MATCH (n:Movie) WHERE n.movie_id IN $id + SET n.director_id = $target\` + + let create_relationships = \`MATCH (a:Movie), (b:Director) + WHERE a.movie_id IN $id AND b.director_id = $target + CREATE (a)-[r:\${"director".toUpperCase() + "_EDGE"}]->(b)\` + try { + for (let { + director_id, + movie_id + } of mappedForeignKeys) { + await session.run(foreignKey, { + id: movie_id, + target: director_id + }) + await session.run(create_relationships, { + id: movie_id, + target: director_id, + }) + } + } catch (error) { + throw error; + } finally { + await session.close(); + } + + return "Records successfully updated!" +} +`; +module.exports.movie_fieldMutation_bulkAssociate_remove = ` +static async bulkDisAssociateMovieWithDirector_id(bulkAssociationInput, benignErrorReporter) { + let mappedForeignKeys = helper.mapForeignKeysToPrimaryKeyArray(bulkAssociationInput, "movie_id", "director_id"); + + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.WRITE, + }); + let foreignKey = \`MATCH (n:Movie) WHERE n.movie_id IN $id + SET n.director_id = $target\` + + let delete_relationships = \`MATCH (a:Movie)-[r:\${"director".toUpperCase() + "_EDGE"}]-> (b:Director) + WHERE a.movie_id IN $id AND b.director_id = $target + DELETE r\` + try { + for (let { + director_id, + movie_id + } of mappedForeignKeys) { + await session.run(foreignKey, { + id: movie_id, + target: null + }) + await session.run(delete_relationships, { + id: movie_id, + target: director_id, + }) + } + } catch (error) { + throw error; + } finally { + await session.close(); + } + + return "Records successfully updated!" +} +`; + +module.exports.neo4j_adapter_readById = ` +static async readById(id){ + const driver = await this.storageHandler; + const session = driver.session({ + database: config.database, + defaultAccessMode: neo4j.session.READ, + }); + try { + const result = await session.run( + \`MATCH (n:Dist_movie {\${this.idAttribute()}:\$id}) RETURN n\`, { + id: id + } + ); + if (result.records.length === 0) { + throw new Error(\`Record with ID = "\${id}" does not exist\`); + } + const singleRecord = result.records[0]; + const node = singleRecord.get(0); + return validatorUtil.validateData( + "validateAfterRead", + this, + new dist_movie_instance1(node.properties) + ); + } catch (error) { + throw error; + } finally { + await session.close(); + } +} +`; From 51d6195866fa7f2a95b439d5786f3bb83ec17d1a Mon Sep 17 00:00:00 2001 From: wunderbarr Date: Wed, 2 Jun 2021 12:01:00 +0200 Subject: [PATCH 7/8] test: add neo4j integration tests --- .../data_models_storage_config2.json | 8 + .../docker-compose-test.yml | 19 + test/integration_test_misc/import/movie.csv | 7 + .../actor.json | 27 + .../director.json | 24 + .../dist_director.json | 20 + .../dist_director_instance1.json | 22 + .../dist_movie.json | 27 + .../dist_movie_instance1.json | 28 + .../movie.json | 42 + .../review.json | 23 + test/mocha_integration_neo4j.test.js | 998 ++++++++++++++++++ test/testenv_cli.sh | 7 +- test/testenv_docker_up.sh | 2 +- test/testenv_remove.sh | 2 +- 15 files changed, 1252 insertions(+), 4 deletions(-) create mode 100644 test/integration_test_misc/import/movie.csv create mode 100644 test/integration_test_misc/integration_test_models_instance2/actor.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/director.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/dist_director.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/dist_director_instance1.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/dist_movie.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/dist_movie_instance1.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/movie.json create mode 100644 test/integration_test_misc/integration_test_models_instance2/review.json create mode 100644 test/mocha_integration_neo4j.test.js diff --git a/test/integration_test_misc/data_models_storage_config2.json b/test/integration_test_misc/data_models_storage_config2.json index 23a752b6..4d891137 100644 --- a/test/integration_test_misc/data_models_storage_config2.json +++ b/test/integration_test_misc/data_models_storage_config2.json @@ -20,5 +20,13 @@ "schema":"public", "presto_host": "gql_presto", "presto_port": "8080" + }, + "default-neo4j": { + "storageType": "neo4j", + "username": "neo4j", + "password": "sciencedb", + "database": "neo4j", + "host": "gql_neo4j", + "port": "7687" } } \ No newline at end of file diff --git a/test/integration_test_misc/docker-compose-test.yml b/test/integration_test_misc/docker-compose-test.yml index 1c7809ce..5e38572c 100644 --- a/test/integration_test_misc/docker-compose-test.yml +++ b/test/integration_test_misc/docker-compose-test.yml @@ -58,6 +58,8 @@ services: depends_on: - gql_postgres1 - gql_mongodb1 + - gql_cassandra1 + - gql_minio1 build: context: . dockerfile: Dockerfile.graphql_server @@ -139,10 +141,27 @@ services: networks: - instance2 + gql_neo4j: + image: neo4j + container_name: neo4j1 + user: ${UID_GID} + ports: + - 7474:7474 + - 7687:7687 + volumes: + - ./import:/var/lib/neo4j/import + environment: + NEO4J_AUTH: neo4j/sciencedb + networks: + - instance2 + gql_science_db_graphql_server2: container_name: server2 depends_on: - gql_postgres2 + - gql_presto + - gql_trino + - gql_neo4j build: context: . dockerfile: Dockerfile.graphql_server diff --git a/test/integration_test_misc/import/movie.csv b/test/integration_test_misc/import/movie.csv new file mode 100644 index 00000000..6391ccb5 --- /dev/null +++ b/test/integration_test_misc/import/movie.csv @@ -0,0 +1,7 @@ +movie_id,release,runtime,box_office,is_adult,genres,votes +m1,2008-12-03T10:15:30Z,130,17845632.32,true,action;thriller,50;200;140;1200;150 +m2,2018-12-03T10:15:30Z,110,15645632.32,true,comedy,1;2;14;1200;15000 +m3,2017-12-03T10:15:30Z,120,12345632.32,false,crime;horror,10;21;40;1000;10000 +m4,2018-12-03T10:15:30Z,120,15645632.32,false,crime;horror,1;2;14;1000;15000 +m5,2002-12-03T10:15:30Z,100,145632.32,false,action;mystery,5;20;150;1300;1500 +m6,2021-12-03T10:15:30Z,109,175632.32,true,wuxia;mystery,2;50;140;800;14000 diff --git a/test/integration_test_misc/integration_test_models_instance2/actor.json b/test/integration_test_misc/integration_test_models_instance2/actor.json new file mode 100644 index 00000000..ab42dfae --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/actor.json @@ -0,0 +1,27 @@ +{ + "model" : "actor", + "storageType" : "neo4j", + "attributes" : { + "actor_id": "String", + "actor_name": "String", + "movie_ids": "[String]" + }, + + "associations":{ + "movies":{ + "type": "to_many", + "reverseAssociationType": "to_many", + "target": "movie", + "targetKey": "actor_ids", + "sourceKey": "movie_ids", + "keyIn": "actor", + "targetStorageType": "neo4j" + } + }, + + "internalId": "actor_id", + "id":{ + "name": "actor_id", + "type": "String" + } +} diff --git a/test/integration_test_misc/integration_test_models_instance2/director.json b/test/integration_test_misc/integration_test_models_instance2/director.json new file mode 100644 index 00000000..c5a1d2e8 --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/director.json @@ -0,0 +1,24 @@ +{ + "model": "director", + "storageType": "neo4j", + "attributes": { + "director_id": "String", + "director_name": "String", + "nationality": "String" + }, + "associations": { + "movies": { + "type": "to_many", + "target": "movie", + "targetKey": "director_id", + "keyIn": "movie", + "targetStorageType": "neo4j", + "label": "movie_id" + } + }, + "internalId": "director_id", + "id": { + "name": "director_id", + "type": "String" + } +} \ No newline at end of file diff --git a/test/integration_test_misc/integration_test_models_instance2/dist_director.json b/test/integration_test_misc/integration_test_models_instance2/dist_director.json new file mode 100644 index 00000000..8654b523 --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/dist_director.json @@ -0,0 +1,20 @@ +{ + "model": "dist_director", + "storageType" : "distributed-data-model", + "registry": ["dist_director_instance1"], + "attributes": { + "director_id": "String", + "director_name": "String", + "nationality": "String" + }, + "associations": { + "dist_movies": { + "type": "to_many", + "target": "dist_movie", + "targetKey": "director_id", + "keyIn": "dist_movie", + "targetStorageType": "distributed-data-model" + } + }, + "internalId": "director_id" +} \ No newline at end of file diff --git a/test/integration_test_misc/integration_test_models_instance2/dist_director_instance1.json b/test/integration_test_misc/integration_test_models_instance2/dist_director_instance1.json new file mode 100644 index 00000000..776f86bd --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/dist_director_instance1.json @@ -0,0 +1,22 @@ +{ + "model": "dist_director", + "storageType": "neo4j-adapter", + "adapterName": "dist_director_instance1", + "regex": "instance1", + "attributes": { + "director_id": "String", + "director_name": "String", + "nationality": "String" + }, + "associations": { + "dist_movies": { + "type": "to_many", + "target": "dist_movie", + "targetKey": "director_id", + "keyIn": "dist_movie", + "targetStorageType": "distributed-data-model" + } + }, + "internalId": "director_id" +} + \ No newline at end of file diff --git a/test/integration_test_misc/integration_test_models_instance2/dist_movie.json b/test/integration_test_misc/integration_test_models_instance2/dist_movie.json new file mode 100644 index 00000000..3d9a93fa --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/dist_movie.json @@ -0,0 +1,27 @@ +{ + "model": "dist_movie", + "storageType" : "distributed-data-model", + "registry": ["dist_movie_instance1"], + "attributes": { + "movie_id": "String", + "release": "DateTime", + "runtime": "Int", + "box_office": "Float", + "is_adult": "Boolean", + "genres": "[String]", + "votes": "[Int]", + "director_id": "String" + }, + + "associations": { + "dist_director": { + "type": "to_one", + "target": "dist_director", + "targetKey": "director_id", + "keyIn" : "dist_movie", + "targetStorageType": "distributed-data-model" + } + }, + + "internalId" : "movie_id" +} \ No newline at end of file diff --git a/test/integration_test_misc/integration_test_models_instance2/dist_movie_instance1.json b/test/integration_test_misc/integration_test_models_instance2/dist_movie_instance1.json new file mode 100644 index 00000000..5816349a --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/dist_movie_instance1.json @@ -0,0 +1,28 @@ +{ + "model": "dist_movie", + "storageType": "neo4j-adapter", + "adapterName": "dist_movie_instance1", + "regex": "instance1", + "attributes": { + "movie_id": "String", + "release": "DateTime", + "runtime": "Int", + "box_office": "Float", + "is_adult": "Boolean", + "genres": "[String]", + "votes": "[Int]", + "director_id": "String" + }, + "associations": { + "dist_director": { + "type": "to_one", + "target": "dist_director", + "targetKey": "director_id", + "keyIn": "dist_movie", + "targetStorageType": "distributed-data-model" + } + }, + + "internalId": "movie_id" +} + \ No newline at end of file diff --git a/test/integration_test_misc/integration_test_models_instance2/movie.json b/test/integration_test_misc/integration_test_models_instance2/movie.json new file mode 100644 index 00000000..a06dca5d --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/movie.json @@ -0,0 +1,42 @@ +{ + "model" : "movie", + "storageType" : "neo4j", + "attributes" : { + "movie_id": "String", + "release":"DateTime", + "runtime": "Int", + "box_office": "Float", + "is_adult": "Boolean", + "genres": "[String]", + "votes":"[Int]", + "director_id":"String", + "actor_ids": "[String]" + }, + "associations": { + "director": { + "type": "to_one", + "target": "director", + "targetKey": "director_id", + "keyIn": "movie", + "targetStorageType": "neo4j", + "label": "director_name" + }, + "actor":{ + "type": "to_many", + "reverseAssociationType": "to_many", + "target": "actor", + "targetKey": "movie_ids", + "sourceKey": "actor_ids", + "keyIn": "movie", + "targetStorageType": "neo4j" + }, + "unique_review":{ + "type" : "to_one", + "target" : "review", + "targetKey" : "movie_id", + "keyIn": "tracker", + "targetStorageType" : "neo4j" + } + }, + "internalId": "movie_id" +} diff --git a/test/integration_test_misc/integration_test_models_instance2/review.json b/test/integration_test_misc/integration_test_models_instance2/review.json new file mode 100644 index 00000000..2c5006da --- /dev/null +++ b/test/integration_test_misc/integration_test_models_instance2/review.json @@ -0,0 +1,23 @@ +{ + "model" : "review", + "storageType" : "neo4j", + "attributes" : { + "review_id": "String", + "movie_id": "String", + "rating": "Float" + }, + "associations": { + "unique_movie": { + "type" : "to_one", + "target" : "movie", + "targetKey" : "movie_id", + "keyIn": "review", + "targetStorageType" : "neo4j" + } + }, + "internalId": "review_id", + "id": { + "name": "review_id", + "type": "String" + } +} \ No newline at end of file diff --git a/test/mocha_integration_neo4j.test.js b/test/mocha_integration_neo4j.test.js new file mode 100644 index 00000000..1624eb6e --- /dev/null +++ b/test/mocha_integration_neo4j.test.js @@ -0,0 +1,998 @@ +const { expect } = require("chai"); +const delay = require("delay"); +const path = require("path"); +const itHelpers = require("./integration_test_misc/integration_test_helpers"); + +describe("Neo4j - Basic CRUD Operations", () => { + after(async () => { + // Delete all movies + let res = itHelpers.request_graph_ql_post_instance2( + "{ movies(pagination:{limit:25}) {movie_id} }" + ); + let movies = JSON.parse(res.body.toString("utf8")).data.movies; + + for (let i = 0; i < movies.length; i++) { + res = itHelpers.request_graph_ql_post_instance2( + `mutation { deleteMovie (movie_id: "${movies[i].movie_id}") }` + ); + expect(res.statusCode).to.equal(200); + } + res = itHelpers.request_graph_ql_post_instance2("{ countMovies }"); + let cnt = JSON.parse(res.body.toString("utf8")).data.countMovies; + expect(cnt).to.equal(0); + }); + + it("01. Movie: empty table", () => { + let res = itHelpers.request_graph_ql_post_instance2("{ countMovies }"); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.countMovies).equal(0); + }); + + it("02. Movie: add", async () => { + // movie_id,release,runtime,box_office,is_adult,genres,votes + // m1,2008-12-03T10:15:30Z,130,17845632.32,true,action;thriller,50;200;140;1200;150 + let res = itHelpers.request_graph_ql_post_instance2( + `mutation{ + addMovie(movie_id:"m0", release:"1998-12-03T10:15:30Z", runtime:110, box_office:13145632.32, + is_adult:false, genres:["action","thriller"], votes: [50,200,140,1200,150]) + { + movie_id + genres + votes + } + }` + ); + + expect(res.statusCode).to.equal(200); + + res = itHelpers.request_graph_ql_post_instance2("{ countMovies }"); + let cnt = JSON.parse(res.body.toString("utf8")).data.countMovies; + expect(cnt).to.equal(1); + }); + + it("03. Movie: update", () => { + let res = itHelpers.request_graph_ql_post_instance2( + '{movies(search:{field:is_adult operator:eq value:"false"}, pagination:{limit:25}){movie_id}}' + ); + let resBody = JSON.parse(res.body.toString("utf8")); + let movie = resBody.data.movies[0].movie_id; + + res = itHelpers.request_graph_ql_post_instance2( + `mutation { updateMovie(movie_id: "${movie}", runtime:111) { + movie_id + runtime + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + updateMovie: { + movie_id: `${movie}`, + runtime: 111, + }, + }, + }); + }); + + it("04. Movie: read", () => { + let res = itHelpers.request_graph_ql_post_instance2( + '{movies(search:{field:release, operator:eq, value:"1998-12-03T10:15:30.000Z"}, pagination:{limit:25}){movie_id}}' + ); + let resBody = JSON.parse(res.body.toString("utf8")); + let movie = resBody.data.movies[0].movie_id; + + res = itHelpers.request_graph_ql_post_instance2( + `{ + readOneMovie(movie_id : "${movie}") { + movie_id + release + runtime + box_office + is_adult + genres + votes + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + readOneMovie: { + movie_id: "m0", + release: "1998-12-03T10:15:30.000Z", + runtime: 111, + box_office: 13145632.32, + is_adult: false, + genres: ["action", "thriller"], + votes: [50, 200, 140, 1200, 150], + }, + }, + }); + }); + + it("05. Movie: search with regex", () => { + let res = itHelpers.request_graph_ql_post_instance2( + '{movies(search:{field:movie_id, value:"m.*", operator:regexp},pagination:{limit:25}) {movie_id}}' + ); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.movies.length).equal(1); + }); + + it("06. Movie: delete", async () => { + let res = itHelpers.request_graph_ql_post_instance2( + "{ movies(pagination:{limit:25}) {movie_id} }" + ); + let movies = JSON.parse(res.body.toString("utf8")).data.movies; + + for (let i = 0; i < movies.length; i++) { + res = itHelpers.request_graph_ql_post_instance2( + `mutation { deleteMovie (movie_id: "${movies[i].movie_id}") }` + ); + expect(res.statusCode).to.equal(200); + } + + res = itHelpers.request_graph_ql_post_instance2("{ countMovies }"); + let cnt = JSON.parse(res.body.toString("utf8")).data.countMovies; + expect(cnt).to.equal(0); + }); + + it("07. Movie: CSV bulkUpload", async () => { + let res = itHelpers.request_graph_ql_post_instance2("{ countMovies }"); + let cnt1 = JSON.parse(res.body.toString("utf8")).data.countMovies; + + res = itHelpers.request_graph_ql_post_instance2( + "mutation {bulkAddMovieCsv}" + ); + + expect(JSON.parse(res.body.toString("utf8")).data.bulkAddMovieCsv).equal( + "Successfully upload file" + ); + await delay(500); + + res = itHelpers.request_graph_ql_post_instance2("{ countMovies }"); + let cnt2 = JSON.parse(res.body.toString("utf8")).data.countMovies; + expect(cnt2 - cnt1).to.equal(6); + }); + + it("08. Movie: paginate", () => { + let res = itHelpers.request_graph_ql_post_instance2( + "{movies(pagination:{limit:1}) {movie_id}}" + ); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody.data.movies.length).equal(1); + + res = itHelpers.request_graph_ql_post_instance2( + `{ + moviesConnection(pagination:{first:10}) { + edges{ + cursor + node{ + movie_id + } + } + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + let edges = resBody.data.moviesConnection.edges; + let idArray = edges.map((edge) => edge.node.movie_id); + let cursorArray = edges.map((edge) => edge.cursor); + res = itHelpers.request_graph_ql_post_instance2( + `{ + moviesConnection(pagination:{first: 2, after: "${cursorArray[1]}"}) { + movies{ + movie_id + } + edges{ + cursor + node{ + movie_id + } + } + pageInfo{ + endCursor + hasNextPage + } + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + moviesConnection: { + movies: [ + { + movie_id: "m3", + }, + { + movie_id: "m4", + }, + ], + edges: [ + { + cursor: cursorArray[2], + node: { + movie_id: idArray[2], + }, + }, + { + cursor: cursorArray[3], + node: { + movie_id: idArray[3], + }, + }, + ], + pageInfo: { + endCursor: cursorArray[3], + hasNextPage: true, + }, + }, + }, + }); + }); + + it("09. Movie: sort", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{ + movies(pagination: {limit:2}, order: [{field: runtime, order: DESC}]) { + movie_id + runtime + } + }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + movies: [ + { movie_id: "m1", runtime: 130 }, + { movie_id: "m3", runtime: 120 }, + ], + }, + }); + }); + + it("10. Movie: get the table template", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{csvTableTemplateMovie}` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + csvTableTemplateMovie: [ + "movie_id,release,runtime,box_office,is_adult,genres,votes,director_id,actor_ids", + "String,DateTime,Int,Float,Boolean,[String],[Int],String,[String]", + ], + }, + }); + }); +}); + +describe("Neo4j - Association", () => { + // set up the environment + before(async () => { + let res = itHelpers.request_graph_ql_post_instance2( + "mutation {bulkAddMovieCsv}" + ); + + expect(JSON.parse(res.body.toString("utf8")).data.bulkAddMovieCsv).equal( + "Successfully upload file" + ); + await delay(500); + }); + + // clean up records + after(async () => { + // Delete all movies + let res = itHelpers.request_graph_ql_post_instance2( + "{ movies(pagination:{limit:25}) {movie_id} }" + ); + let movies = JSON.parse(res.body.toString("utf8")).data.movies; + + for (let i = 0; i < movies.length; i++) { + res = itHelpers.request_graph_ql_post_instance2( + `mutation { deleteMovie (movie_id: "${movies[i].movie_id}") }` + ); + expect(res.statusCode).to.equal(200); + } + + res = itHelpers.request_graph_ql_post_instance2("{ countMovies }"); + let cnt = JSON.parse(res.body.toString("utf8")).data.countMovies; + expect(cnt).to.equal(0); + + // Delete all directors + res = itHelpers.request_graph_ql_post_instance2( + "{ directors(pagination:{limit:25}) {director_id} }" + ); + let directors = JSON.parse(res.body.toString("utf8")).data.directors; + + for (let i = 0; i < directors.length; i++) { + res = itHelpers.request_graph_ql_post_instance2( + `mutation { deleteDirector (director_id: "${directors[i].director_id}") }` + ); + expect(res.statusCode).to.equal(200); + } + + res = itHelpers.request_graph_ql_post_instance2("{ countDirectors }"); + cnt = JSON.parse(res.body.toString("utf8")).data.countDirectors; + expect(cnt).to.equal(0); + + // Delete all kinds of actor + res = itHelpers.request_graph_ql_post_instance2( + "{ actors(pagination:{limit:25}) {actor_id} }" + ); + let actor = JSON.parse(res.body.toString("utf8")).data.actors; + + for (let i = 0; i < actor.length; i++) { + res = itHelpers.request_graph_ql_post_instance2( + `mutation { deleteActor (actor_id: "${actor[i].actor_id}") }` + ); + expect(res.statusCode).to.equal(200); + } + + res = itHelpers.request_graph_ql_post_instance2("{ countActors }"); + cnt = JSON.parse(res.body.toString("utf8")).data.countActors; + expect(cnt).to.equal(0); + + // Delete all reviews + res = itHelpers.request_graph_ql_post_instance2( + "{ reviews(pagination:{limit:25}) {review_id} }" + ); + let review = JSON.parse(res.body.toString("utf8")).data.reviews; + + for (let i = 0; i < review.length; i++) { + res = itHelpers.request_graph_ql_post_instance2( + `mutation { deleteReview (review_id: "${review[i].review_id}") }` + ); + expect(res.statusCode).to.equal(200); + } + + res = itHelpers.request_graph_ql_post_instance2("{ countReviews }"); + cnt = JSON.parse(res.body.toString("utf8")).data.countReviews; + expect(cnt).to.equal(0); + }); + + it("01. Movie : Director (n:1) - add movies to director", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `mutation{ + addDirector( director_id: "d1", director_name: "Chloé Zhao", addMovies: ["m1", "m2"] ){ + director_name + moviesFilter(pagination:{limit:10}){ + movie_id + genres + } + } + }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + addDirector: { + moviesFilter: [ + { + movie_id: "m1", + genres: ["action", "thriller"], + }, + { + movie_id: "m2", + genres: ["comedy"], + }, + ], + director_name: "Chloé Zhao", + }, + }, + }); + }); + it("02. Movie : Director (n:1) - read one associated movie", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{ + readOneMovie(movie_id: "m1"){ + movie_id + director{ + director_name + } + } + }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + readOneMovie: { + director: { + director_name: "Chloé Zhao", + }, + movie_id: "m1", + }, + }, + }); + }); + + it("03. Movie : Director (n:1) - deleting the director record with associations fails", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `mutation { deleteDirector (director_id: "d1") }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(500); + expect(resBody).to.deep.equal({ + errors: [ + { + message: `director with director_id d1 has associated records and is NOT valid for deletion. Please clean up before you delete.`, + locations: [ + { + column: 12, + line: 1, + }, + ], + path: ["deleteDirector"], + }, + ], + data: null, + }); + }); + + it("04. Movie : Director (n:1) - delete the associations in the director record", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `mutation{updateDirector(director_id: "d1", removeMovies: ["m1", "m2"]) { + director_name + moviesFilter(pagination:{limit:10}){ + movie_id + } + moviesConnection(pagination:{first:5}){ + movies{ + movie_id + } + } + } + }` + ); + + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + updateDirector: { + moviesFilter: [], + director_name: "Chloé Zhao", + moviesConnection: { + movies: [], + }, + }, + }, + }); + }); + + it("05. Movie : Actor (n:n) - add movies to actor", () => { + const actors = ["Yokohama Ryusei", "Minami Hamabe"]; + for (let i = 1; i < 3; i++) { + let res = itHelpers.request_graph_ql_post_instance2( + `mutation{ + addActor(actor_id:"a${i}", actor_name:"${actors[i - 1]}", + addMovies:["m3","m4"]){ + actor_id + movie_ids + } + }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + addActor: { + actor_id: `a${i}`, + movie_ids: ["m3", "m4"], + }, + }, + }); + } + }); + + it("06. Movie : Actor (n:n) - read one associated movie", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + readOneMovie(movie_id: "m3"){ + movie_id + actor_ids + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + readOneMovie: { + movie_id: "m3", + actor_ids: ["a1", "a2"], + }, + }, + }); + }); + + it("07. Movie : Actor (n:n) - delete the associations in the actor records", () => { + for (let i = 1; i < 3; i++) { + let res = itHelpers.request_graph_ql_post_instance2( + `mutation{ + updateActor(actor_id:"a${i}", removeMovies:["m3","m4"]){ + actor_id + movie_ids + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + updateActor: { + actor_id: `a${i}`, + movie_ids: [], + }, + }, + }); + } + }); + + it("08. Movie : Review (1:1) - add movie to review", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `mutation{ + addReview(review_id:"r1", rating:4.9, addUnique_movie:"m5"){ + review_id + movie_id + } + }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + addReview: { + review_id: "r1", + movie_id: "m5", + }, + }, + }); + }); + + it("09. Movie : Review (1:1) - read one associated movie", () => { + let res = itHelpers.request_graph_ql_post_instance2(` + { + readOneMovie(movie_id: "m5"){ + movie_id + unique_review { + review_id + rating + } + } + }`); + let resBody = JSON.parse(res.body.toString("utf8")); + + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + readOneMovie: { + movie_id: "m5", + unique_review: { + review_id: "r1", + rating: 4.9, + }, + }, + }, + }); + }); + + it("10. Movie : Review (1:1) - violate the unique rule", () => { + itHelpers.request_graph_ql_post_instance2( + `mutation{ + addReview(review_id:"r2", rating:3.5, addUnique_movie:"m5"){ + review_id + movie_id + } + }` + ); + res = itHelpers.request_graph_ql_post_instance2( + `{ + readOneMovie(movie_id: "m5"){ + movie_id + unique_review { + review_id + } + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + errors: [ + { + message: + 'Not unique "to_one" association Error: Found > 1 reviews matching movie with movie_id m5. Consider making this a "to_many" association, or using unique constraints, or moving the foreign key into the movie model. Returning first review.', + locations: "", + }, + ], + data: { + readOneMovie: { + movie_id: "m5", + unique_review: { + review_id: "r1", + }, + }, + }, + }); + }); + + it("11. Movie : Review (1:1) - delete the associations in the review record", () => { + for (let i = 1; i < 3; i++) { + let res = itHelpers.request_graph_ql_post_instance2( + `mutation{ + updateReview(review_id:"r${i}", removeUnique_movie:"m5"){ + review_id + movie_id + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + updateReview: { + review_id: `r${i}`, + movie_id: null, + }, + }, + }); + } + }); +}); + +describe("Neo4j - Distributed Data Models", () => { + after(async () => { + // Delete all movies + let res = itHelpers.request_graph_ql_post_instance2( + "{ dist_moviesConnection(pagination:{first:10}) {edges {node {movie_id}}}}" + ); + let edges = JSON.parse(res.body.toString("utf8")).data.dist_moviesConnection + .edges; + for (let edge of edges) { + res = itHelpers.request_graph_ql_post_instance2( + `mutation { deleteDist_movie (movie_id: "${edge.node.movie_id}") }` + ); + expect(res.statusCode).to.equal(200); + } + res = itHelpers.request_graph_ql_post_instance2("{ countDist_movies }"); + let cnt = JSON.parse(res.body.toString("utf8")).data.countDist_movies; + expect(cnt).to.equal(0); + // Delete all directors + res = itHelpers.request_graph_ql_post_instance2( + "{ dist_directorsConnection(pagination:{first:10}) {edges {node {director_id}}}}" + ); + edges = JSON.parse(res.body.toString("utf8")).data.dist_directorsConnection + .edges; + for (let edge of edges) { + res = itHelpers.request_graph_ql_post_instance2( + `mutation { deleteDist_director (director_id: "${edge.node.director_id}") }` + ); + expect(res.statusCode).to.equal(200); + } + res = itHelpers.request_graph_ql_post_instance2("{ countDist_directors }"); + cnt = JSON.parse(res.body.toString("utf8")).data.countDist_directors; + expect(cnt).to.equal(0); + }); + + it("01. Movie DDM: create a director and 3 movies", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `mutation { + addDist_director(director_id: "instance1-01", director_name: "Chloé Zhao") { + director_id + director_name + } + }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + addDist_director: { + director_id: "instance1-01", + director_name: "Chloé Zhao", + }, + }, + }); + const runtime = [110, 145, 120]; + for (let i = 0; i < runtime.length; i++) { + res = itHelpers.request_graph_ql_post_instance2( + `mutation { + addDist_movie(movie_id: "instance1-0${i + 2}", + runtime: ${runtime[i]}) + { + movie_id + runtime + } + } + ` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + addDist_movie: { + movie_id: `instance1-0${i + 2}`, + runtime: runtime[i], + }, + }, + }); + } + }); + it("02. Movie DDM: update the director to associate with movies", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `mutation { + updateDist_director(director_id: "instance1-01", addDist_movies: ["instance1-02", "instance1-03"]) { + director_name + countFilteredDist_movies + dist_moviesConnection(pagination: {first: 5}) { + edges { + node { + runtime + } + } + dist_movies{ + movie_id + } + } + } + } + ` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + updateDist_director: { + director_name: "Chloé Zhao", + countFilteredDist_movies: 2, + dist_moviesConnection: { + edges: [ + { + node: { + runtime: 110, + }, + }, + { + node: { + runtime: 145, + }, + }, + ], + dist_movies: [ + { movie_id: "instance1-02" }, + { movie_id: "instance1-03" }, + ], + }, + }, + }, + }); + }); + it("03. Movie DDM: read all", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `{ + dist_directorsConnection(pagination: {first: 25}) { + edges { + node { + director_id + countFilteredDist_movies + } + } + } + } + ` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + dist_directorsConnection: { + edges: [ + { + node: { + director_id: "instance1-01", + countFilteredDist_movies: 2, + }, + }, + ], + }, + }, + }); + }); + it("04. Movie DDM: search, sort and pagination", () => { + res = itHelpers.request_graph_ql_post_instance2( + `{ + dist_moviesConnection(search: {field: movie_id, value: "instance.*", operator: regexp}, pagination: {first: 5}) { + edges { + node { + movie_id + runtime + } + } + } + } + ` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + dist_moviesConnection: { + edges: [ + { + node: { + movie_id: "instance1-02", + runtime: 110, + }, + }, + { + node: { + movie_id: "instance1-03", + runtime: 145, + }, + }, + { + node: { + movie_id: "instance1-04", + runtime: 120, + }, + }, + ], + }, + }, + }); + // The same search, but order by name descending + res = itHelpers.request_graph_ql_post_instance2( + `{ + dist_moviesConnection(search: {field: movie_id, value: "instance.*", operator: regexp}, + order:{field:runtime order:DESC}, pagination: {first: 5}) { + edges { + node { + movie_id + runtime + } + } + } + }` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + dist_moviesConnection: { + edges: [ + { + node: { + movie_id: "instance1-03", + runtime: 145, + }, + }, + { + node: { + movie_id: "instance1-04", + runtime: 120, + }, + }, + { + node: { + movie_id: "instance1-02", + runtime: 110, + }, + }, + ], + }, + }, + }); + res = itHelpers.request_graph_ql_post_instance2( + `{ + dist_moviesConnection(pagination: {first: 5, + after:"eyJtb3ZpZV9pZCI6Imluc3RhbmNlMS0wMiIsInJ1bnRpbWUiOjExMCwiZGlyZWN0b3JfaWQiOiJpbnN0YW5jZTEtMDEifQ==" + }){ + edges { + node { + movie_id + runtime + } + cursor + } + pageInfo { + startCursor + endCursor + hasNextPage + hasPreviousPage + } + } + } + ` + ); + resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + dist_moviesConnection: { + edges: [ + { + cursor: + "eyJtb3ZpZV9pZCI6Imluc3RhbmNlMS0wMyIsInJ1bnRpbWUiOjE0NSwiZGlyZWN0b3JfaWQiOiJpbnN0YW5jZTEtMDEifQ==", + node: { + movie_id: "instance1-03", + runtime: 145, + }, + }, + { + cursor: + "eyJtb3ZpZV9pZCI6Imluc3RhbmNlMS0wNCIsInJ1bnRpbWUiOjEyMH0=", + node: { + movie_id: "instance1-04", + runtime: 120, + }, + }, + ], + pageInfo: { + startCursor: + "eyJtb3ZpZV9pZCI6Imluc3RhbmNlMS0wMyIsInJ1bnRpbWUiOjE0NSwiZGlyZWN0b3JfaWQiOiJpbnN0YW5jZTEtMDEifQ==", + endCursor: + "eyJtb3ZpZV9pZCI6Imluc3RhbmNlMS0wNCIsInJ1bnRpbWUiOjEyMH0=", + hasNextPage: false, + hasPreviousPage: false, + }, + }, + }, + }); + }); + it("05. Movie DDM: update the director to remove associations", () => { + let res = itHelpers.request_graph_ql_post_instance2( + `mutation { + updateDist_director(director_id:"instance1-01" removeDist_movies:["instance1-02", "instance1-03"]) { + director_name + countFilteredDist_movies + dist_moviesConnection(pagination:{first:5}){ + edges { + node { + runtime + } + } + dist_movies{ + movie_id + } + } + } + }` + ); + let resBody = JSON.parse(res.body.toString("utf8")); + expect(res.statusCode).to.equal(200); + expect(resBody).to.deep.equal({ + data: { + updateDist_director: { + director_name: "Chloé Zhao", + countFilteredDist_movies: 0, + dist_moviesConnection: { + edges: [], + dist_movies: [], + }, + }, + }, + }); + }); +}); diff --git a/test/testenv_cli.sh b/test/testenv_cli.sh index 5fba6eee..3275bf17 100644 --- a/test/testenv_cli.sh +++ b/test/testenv_cli.sh @@ -98,7 +98,7 @@ fi # 1. Stop docker containers and remove anonymous volumes # 2. Re-start the docker containers if [[ $OPT_RESTART_DOCKER == "true" ]]; then - docker-compose -f "${TEST_DIR}/integration_test_misc/docker-compose-test.yml" down -v + UID_GID="$(id -u):$(id -g)" docker-compose -f "${TEST_DIR}/integration_test_misc/docker-compose-test.yml" down -v bash "${TEST_DIR}/testenv_docker_up.sh" exit 0 fi @@ -114,6 +114,7 @@ if [[ $OPT_RUN_TESTS == "true" ]]; then mocha "${TEST_DIR}/mocha_integration_amazon_s3.test.js" mocha --timeout 10000 "${TEST_DIR}/mocha_integration_trino.test.js" mocha --timeout 10000 "${TEST_DIR}/mocha_integration_presto.test.js" + mocha "${TEST_DIR}/mocha_integration_neo4j.test.js" # 1. Remove docker containers, images, and volumes # 2. Remove the testing environment @@ -132,7 +133,7 @@ fi # 4. Run integration tests # 5. Perform a full cleanup (optionally disabled) if [[ $OPT_GENCODE_RUNTESTS == "true" ]]; then - docker-compose -f "${TEST_DIR}/integration_test_misc/docker-compose-test.yml" down -v + UID_GID="$(id -u):$(id -g)" docker-compose -f "${TEST_DIR}/integration_test_misc/docker-compose-test.yml" down -v bash "${TEST_DIR}/testenv_generate_code.sh" bash "${TEST_DIR}/testenv_sync.sh" bash "${TEST_DIR}/testenv_docker_up.sh" @@ -142,6 +143,7 @@ if [[ $OPT_GENCODE_RUNTESTS == "true" ]]; then mocha "${TEST_DIR}/mocha_integration_amazon_s3.test.js" mocha --timeout 10000 "${TEST_DIR}/mocha_integration_trino.test.js" mocha --timeout 10000 "${TEST_DIR}/mocha_integration_presto.test.js" + mocha "${TEST_DIR}/mocha_integration_neo4j.test.js" # 1. Remove docker containers, images, and volumes # 2. Remove the testing environment @@ -172,6 +174,7 @@ if [[ $DEFAULT_RUN == "true" ]]; then mocha "${TEST_DIR}/mocha_integration_amazon_s3.test.js" mocha --timeout 10000 "${TEST_DIR}/mocha_integration_trino.test.js" mocha --timeout 10000 "${TEST_DIR}/mocha_integration_presto.test.js" + mocha "${TEST_DIR}/mocha_integration_neo4j.test.js" # 1. Remove docker containers, images, and volumes # 2. Remove the testing environment diff --git a/test/testenv_docker_up.sh b/test/testenv_docker_up.sh index b323c690..04d49a43 100644 --- a/test/testenv_docker_up.sh +++ b/test/testenv_docker_up.sh @@ -35,7 +35,7 @@ source "${SCRIPT_DIR}/testenv_constants.sh" printBlockHeader "START" "UP DOCKER CONTAINERS" # Up detached docker containers -docker-compose \ +UID_GID="$(id -u):$(id -g)" docker-compose \ -f "${TEST_DIR}/integration_test_misc/docker-compose-test.yml" up -d \ --force-recreate \ --remove-orphans \ diff --git a/test/testenv_remove.sh b/test/testenv_remove.sh index b0efd575..58a0da00 100644 --- a/test/testenv_remove.sh +++ b/test/testenv_remove.sh @@ -10,7 +10,7 @@ source "${SCRIPT_DIR}/testenv_constants.sh" printBlockHeader "START" "REMOVE TESTING ENVIRONMENT" # Remove docker containers, images, and volumes -docker-compose -f "${TEST_DIR}/integration_test_misc/docker-compose-test.yml" down -v --rmi all +UID_GID="$(id -u):$(id -g)" docker-compose -f "${TEST_DIR}/integration_test_misc/docker-compose-test.yml" down -v --rmi all # Remove testing environment echo "Removing ${ENV_DIR}" From cf4f829d8b50b20109885a785a5afd66cf24017a Mon Sep 17 00:00:00 2001 From: wunderbarr Date: Mon, 14 Jun 2021 10:59:26 +0200 Subject: [PATCH 8/8] refactor: format for integration tests --- README.md | 2 +- test/mocha_integration.test.js | 130 ++++++++++++++++----------------- 2 files changed, 64 insertions(+), 68 deletions(-) diff --git a/README.md b/README.md index 11b995ac..c5c3f641 100644 --- a/README.md +++ b/README.md @@ -303,7 +303,7 @@ For relevant files see `package.json` (section scripts), directories `.test` and Zendro is the product of a joint effort between the Forschungszentrum Jülich, Germany and the Comisión Nacional para el Conocimiento y Uso de la Biodiversidad, México, to generate a tool that allows efficiently building data warehouses capable of dealing with diverse data generated by different research groups in the context of the FAIR principles and multidisciplinary projects. The name Zendro comes from the words Zenzontle and Drossel, which are Mexican and German words denoting a mockingbird, a bird capable of “talking” different languages, similar to how Zendro can connect your data warehouse from any programming language or data analysis pipeline. ### Zendro contributors in alphabetical order -Francisca Acevedo1, Vicente Arriaga1, Katja Dohm3, Constantin Eiteneuer2, Sven Fahrner2, Frank Fischer4, Asis Hallab2, Alicia Mastretta-Yanes1, Roland Pieruschka2, Alejandro Ponce1, Yaxal Ponce2, Francisco Ramírez1, Irene Ramos1, Bernardo Terroba1, Tim Rehberg3, Verónica Suaste1, Björn Usadel2, David Velasco2, Thomas Voecking3 +Francisca Acevedo1, Vicente Arriaga1, Katja Dohm3, Constantin Eiteneuer2, Sven Fahrner2, Frank Fischer4, Asis Hallab2, Alicia Mastretta-Yanes1, Roland Pieruschka2, Alejandro Ponce1, Yaxal Ponce2, Francisco Ramírez1, Irene Ramos1, Bernardo Terroba1, Tim Rehberg3, Verónica Suaste1, Björn Usadel2, David Velasco2, Thomas Voecking3, Dan Wang2 #### Author affiliations 1. CONABIO - Comisión Nacional para el Conocimiento y Uso de la Biodiversidad, México diff --git a/test/mocha_integration.test.js b/test/mocha_integration.test.js index 956012e7..f139f428 100644 --- a/test/mocha_integration.test.js +++ b/test/mocha_integration.test.js @@ -383,8 +383,8 @@ describe('Clean GraphQL Server: one new basic function per test ("Individual" mo expect(res.statusCode).to.equal(200); expect(resBody).to.deep.equal({ - data:{t:[{gene:"Gene D"}],i:[{name:"Zazaniza"}]} - }) + data: { t: [{ gene: "Gene D" }], i: [{ name: "Zazaniza" }] }, + }); res = itHelpers.request_graph_ql_post( `{ individuals (search: {field: name, operator: regexp, value: "Zazan[aeiou]za"},pagination:{limit:25}) {name}}` @@ -420,10 +420,10 @@ describe('Clean GraphQL Server: one new basic function per test ("Individual" mo resBody = JSON.parse(res.body.toString("utf8")); expect(res.statusCode).to.equal(200); expect(resBody).to.deep.equal({ - "data":{ - "firstPerson":[{"name":"Zazanaza"}], - "secondPerson":[{"name":"Zazaniza"}] - } + data: { + firstPerson: [{ name: "Zazanaza" }], + secondPerson: [{ name: "Zazaniza" }], + }, }); res = await itHelpers.request_metaquery_post( @@ -436,38 +436,36 @@ describe('Clean GraphQL Server: one new basic function per test ("Individual" mo ); resBody = JSON.parse(res.body.toString("utf8")); expect(res.statusCode).to.equal(200); - - expect(resBody).to.deep.equal( - { - "errors": [ - { - "message": "Cannot query field \"names\" on type \"individual\". Did you mean \"name\"?", - "locations": [ - { - "line": 2, - "column": 114 - } - ] - }, - { - "message": "Cannot query field \"names\" on type \"individual\". Did you mean \"name\"?", - "locations": [ - { - "line": 3, - "column": 116 - } - ] - } - ] - } - ); + expect(resBody).to.deep.equal({ + errors: [ + { + message: + 'Cannot query field "names" on type "individual". Did you mean "name"?', + locations: [ + { + line: 2, + column: 114, + }, + ], + }, + { + message: + 'Cannot query field "names" on type "individual". Did you mean "name"?', + locations: [ + { + line: 3, + column: 116, + }, + ], + }, + ], + }); res = await itHelpers.request_metaquery_post( `{ firstPerson: individuals (search: {field: name, operator: eq, value: "Zazanaza"},pagination:{limit:10}) {name} secondPerson: individuals (search: {field: name, operator: eq, value: "Zazaniza"},pagination:{limit:10}) {name} - }` - , + }`, ".firstPerson", null ); @@ -475,13 +473,12 @@ describe('Clean GraphQL Server: one new basic function per test ("Individual" mo resBody = JSON.parse(res.body.toString("utf8")); expect(res.statusCode).to.equal(200); expect(resBody).to.deep.equal({ - "data": [ + data: [ { - "name": "Zazanaza" - } - ] - } - ); + name: "Zazanaza", + }, + ], + }); res = await itHelpers.request_metaquery_post( `{ @@ -494,16 +491,16 @@ describe('Clean GraphQL Server: one new basic function per test ("Individual" mo resBody = JSON.parse(res.body.toString("utf8")); expect(res.statusCode).to.equal(200); - + expect(resBody).to.deep.equal({ - "data": null, - "errors": [ + data: null, + errors: [ { - "message": "jq: error: syntax error, unexpected INVALID_CHARACTER (Unix shell quoting issues?) at , line 1:\n.~firstPerson \njq: error: try .[\"field\"] instead of .field for unusually named fields at , line 1:\n.~firstPerson\njq: 2 compile errors\n" - } - ] - } - ); + message: + 'jq: error: syntax error, unexpected INVALID_CHARACTER (Unix shell quoting issues?) at , line 1:\n.~firstPerson \njq: error: try .["field"] instead of .field for unusually named fields at , line 1:\n.~firstPerson\njq: 2 compile errors\n', + }, + ], + }); res = await itHelpers.request_metaquery_post( `{ @@ -517,20 +514,19 @@ describe('Clean GraphQL Server: one new basic function per test ("Individual" mo resBody = JSON.parse(res.body.toString("utf8")); expect(res.statusCode).to.equal(200); expect(resBody).to.deep.equal({ - "data": { - "firstPerson": [ + data: { + firstPerson: [ { - "name": "Zazanaza" - } + name: "Zazanaza", + }, ], - "secondPerson": [ + secondPerson: [ { - "name": "Zazaniza" - } - ] - } - } - ); + name: "Zazaniza", + }, + ], + }, + }); res = await itHelpers.request_metaquery_post( `{ @@ -542,14 +538,14 @@ describe('Clean GraphQL Server: one new basic function per test ("Individual" mo ); resBody = JSON.parse(res.body.toString("utf8")); - + expect(res.statusCode).to.equal(200); expect(resBody).to.deep.equal({ - "data": [ + data: [ { - "name": "Zazanaza" - } - ] + name: "Zazanaza", + }, + ], }); res = await itHelpers.request_metaquery_post( @@ -564,11 +560,11 @@ describe('Clean GraphQL Server: one new basic function per test ("Individual" mo resBody = JSON.parse(res.body.toString("utf8")); expect(res.statusCode).to.equal(200); expect(resBody).to.deep.equal({ - "data": [ + data: [ { - "name": "Zazanaza" - } - ] + name: "Zazanaza", + }, + ], }); res = await itHelpers.request_metaquery_post(