diff --git a/seeders/20211209205139-add-units.cjs b/seeders/20211209205139-add-units.cjs index 76b001e2..41d13dd6 100644 --- a/seeders/20211209205139-add-units.cjs +++ b/seeders/20211209205139-add-units.cjs @@ -1,20 +1,15 @@ 'use strict'; const UnitStub = require('../src/models/units/units.stub.json'); -const JunctionStub = [ - { - warehouseUnitId: '5c960ac1-a180-45a4-9850-be177e26d2fb', - qualificationId: '702cafbb-c624-4273-9cdc-c617ad5675df', - }, - { - warehouseUnitId: '5c960ac1-a180-45a4-9850-be177e26d2fb', - qualificationId: '76903895-840e-406c-b2a0-f90244acf02d', - }, -]; +const QualificationUnitStub = require('../src/models/qualificationUnits/qualificationUnits.stub.json'); module.exports = { up: async (queryInterface) => { await queryInterface.bulkInsert('units', UnitStub, {}); - await queryInterface.bulkInsert('qualification_unit', JunctionStub, {}); + await queryInterface.bulkInsert( + 'qualification_unit', + QualificationUnitStub, + {}, + ); }, down: async (queryInterface) => { diff --git a/src/controllers/vintage.controller.js b/src/controllers/vintage.controller.js index 1e44bad5..faab5a85 100644 --- a/src/controllers/vintage.controller.js +++ b/src/controllers/vintage.controller.js @@ -6,7 +6,7 @@ export const findAll = async (req, res) => { try { await assertHomeOrgExists(); const homeOrg = await Organization.getHomeOrg(); - console.log({ orgUid: Object.keys(homeOrg)[0] }); + return res.json( await Vintage.findAll({ where: { orgUid: Object.keys(homeOrg)[0] }, diff --git a/src/fullnode/data-layer-utils.js b/src/fullnode/data-layer-utils.js deleted file mode 100644 index 9b88894c..00000000 --- a/src/fullnode/data-layer-utils.js +++ /dev/null @@ -1,28 +0,0 @@ -export const changeListFactory = (action, id, record) => { - console.log({ action, id, record }); - switch (action) { - case 'INSERT': - return { - action: 'insert', - key: Buffer.from(id).toString('hex'), - value: Buffer.from(JSON.stringify(record)).toString('hex'), - }; - case 'UPDATE': - return [ - { - action: 'delete', - key: Buffer.from(id).toString('hex'), - }, - { - action: 'insert', - key: Buffer.from(id).toString('hex'), - value: Buffer.from(JSON.stringify(record)).toString('hex'), - }, - ]; - case 'DELETE': - return { - action: 'delete', - key: Buffer.from(id).toString('hex'), - }; - } -}; diff --git a/src/fullnode/dataLayerReadService.js b/src/fullnode/dataLayerReadService.js deleted file mode 100644 index 74fcc75d..00000000 --- a/src/fullnode/dataLayerReadService.js +++ /dev/null @@ -1,69 +0,0 @@ -import _ from 'lodash'; -import { Sequelize } from 'sequelize'; - -import { Meta } from '../models'; -import * as dataLayer from './persistance'; -import * as simulator from './simulatorV2'; - -const Op = Sequelize.Op; - -let updateInterval; - -export const startDataLayerUpdatePolling = () => { - console.log('Start Dataayer Update Polling'); - /* updateInterval = setInterval(async () => { - const tablesToUpdate = await dataLayerWasUpdated(); - _.keys(tablesToUpdate).forEach((tableMetaId) => {}); - }, 10000);*/ -}; - -export const stopDataLayerUpdatePolling = () => { - clearInterval(updateInterval); -}; - -export const dataLayerWasUpdated = async () => { - const tableIdsMeta = await Meta.findAll({ - where: { - metaKey: { - [Op.like]: '%TableStoreId', - }, - }, - raw: true, - }); - - const tableHashesMeta = await Meta.findAll({ - where: { - metaKey: { - [Op.like]: '%TableStoreHash', - }, - }, - raw: true, - }); - - const tableHashMap = {}; - - tableIdsMeta.forEach((meta) => { - tableHashMap[meta.metaKey] = null; - }); - - tableHashesMeta.forEach((meta) => { - const tableKey = meta.metaKey.replace('Hash', 'Id'); - tableHashMap[tableKey] = meta.metaValue; - }); - - let newHashes; - if (process.env.USE_SIMULATOR === 'true') { - newHashes = await simulator.getRoots(_.keys(tableHashMap)); - } else { - newHashes = await dataLayer.getRoots(_.keys(tableHashMap)); - } - - console.log(newHashes); - - const tablesWereUpdatedMap = {}; - _.keys(tableHashMap).map((key, index) => { - tablesWereUpdatedMap[key] = tableHashMap[key] !== newHashes[index]; - }); - - return tablesWereUpdatedMap; -}; diff --git a/src/fullnode/dataLayerWriteServiceOld.js b/src/fullnode/dataLayerWriteServiceOld.js deleted file mode 100644 index 5bda33d1..00000000 --- a/src/fullnode/dataLayerWriteServiceOld.js +++ /dev/null @@ -1,114 +0,0 @@ -import { Meta, Organization } from '../models'; -import * as dataLayer from './persistance'; -import * as simulator from './simulatorV2'; - -const strToHex = (str) => { - return new Buffer(str).toString('hex'); -}; - -const createChangeObject = (type, key, value) => { - return { action: type, key: strToHex(key), value: strToHex(value) }; -}; - -const ensureDataLayerStore = async (metaKey) => { - const storeMeta = await Meta.findOne({ - where: { metaKey }, - }); - - if (!storeMeta) { - let storeId; - if (process.env.USE_SIMULATOR === 'true') { - storeId = await simulator.createDataLayerStore(); - } else { - storeId = await dataLayer.createDataLayerStore(); - } - - await Meta.create({ metaKey, metaValue: storeId }); - return storeId; - } - - return storeMeta.metaValue; -}; - -const ensureRegistryStore = async (orgUid) => { - const storeId = await ensureDataLayerStore(`registryId`); - const changeList = [createChangeObject('insert', 'registryId', storeId)]; - - if (process.env.USE_SIMULATOR === 'true') { - await simulator.pushChangeListToDataLayer(orgUid, changeList); - } else { - await dataLayer.pushChangeListToDataLayer(orgUid, changeList); - } - - return storeId; -}; - -const ensureRegistryTableStore = async (tableName) => { - const myOrganization = await Organization.findOne({ - where: { isHome: true }, - raw: true, - }); - - const orgUid = myOrganization.orgUid; - const registryId = await ensureRegistryStore(orgUid); - const metaKey = `${tableName}TableStoreId`; - const storeId = await ensureDataLayerStore(metaKey); - const changeList = [createChangeObject('insert', metaKey, storeId)]; - - if (process.env.USE_SIMULATOR === 'true') { - await simulator.pushChangeListToDataLayer(registryId, changeList); - } else { - await dataLayer.pushChangeListToDataLayer(registryId, changeList); - } - - return storeId; -}; - -export const pushChangeListToRegistryTable = async (tableName, changeList) => { - const storeId = await ensureRegistryTableStore(tableName); - - if (storeId) { - if (process.env.USE_SIMULATOR === 'true') { - return simulator.pushChangeListToDataLayer(storeId, changeList); - } else { - return dataLayer.pushChangeListToDataLayer(storeId, changeList); - } - } - - throw new Error('Could not create datalayer store'); -}; - -export const updateOrganization = async (orgName, orgIconUrl, orgWebSite) => { - const orgUid = await ensureDataLayerStore(); - - const changeList = []; - const metaUpdateList = []; - if (orgName) { - changeList.push[createChangeObject('insert', 'name', orgName)]; - metaUpdateList.push({ metaKey: 'organizationName', metaValue: orgName }); - } - - if (orgIconUrl) { - changeList.push[createChangeObject('insert', 'iconUrl', orgIconUrl)]; - metaUpdateList.push({ - metaKey: 'organizationIconUrl', - metaValue: orgIconUrl, - }); - } - - if (orgWebSite) { - changeList.push[createChangeObject('insert', 'website', orgWebSite)]; - metaUpdateList.push({ - metaKey: 'organizationWebsite', - metaValue: orgWebSite, - }); - } - - await Meta.bulkCreate(metaUpdateList); - - if (process.env.USE_SIMULATOR === 'true') { - return simulator.pushChangeListToDataLayer(orgUid, changeList); - } else { - return dataLayer.pushChangeListToDataLayer(orgUid, changeList); - } -}; diff --git a/src/fullnode/dispatcher.js b/src/fullnode/dispatcher.js deleted file mode 100644 index ad5f1177..00000000 --- a/src/fullnode/dispatcher.js +++ /dev/null @@ -1,39 +0,0 @@ -import * as simulator from './simulator'; - -export const updateProjectRecord = async (uuid, record, stagingRecordId) => { - record.updatedAt = Date.now(); - const encoded = btoa(JSON.stringify(record)); - await simulator.updateProjectRecord(uuid, encoded, stagingRecordId); -}; - -export const createProjectRecord = async (uuid, record, stagingRecordId) => { - const encoded = btoa(JSON.stringify(record)); - await simulator.createProjectRecord(uuid, encoded, stagingRecordId); -}; - -export const deleteProjectRecord = async (uuid, stagingRecordId) => { - await simulator.deleteProjectRecord(uuid, stagingRecordId); -}; - -export const updateUnitRecord = async (uuid, record, stagingRecordId) => { - if (Array.isArray(record)) { - record.forEach(async (_record) => { - _record.updatedAt = Date.now(); - const encoded = btoa(JSON.stringify(_record)); - simulator.updateUnitRecord(uuid, encoded, stagingRecordId); - }); - } else { - record.updatedAt = Date.now(); - const encoded = btoa(JSON.stringify(record)); - await simulator.updateUnitRecord(uuid, encoded, stagingRecordId); - } -}; - -export const createUnitRecord = async (uuid, record, stagingRecordId) => { - const encoded = btoa(JSON.stringify(record)); - await simulator.createUnitRecord(uuid, encoded, stagingRecordId); -}; - -export const deleteUnitRecord = async (uuid, stagingRecordId) => { - await simulator.deleteUnitRecord(uuid, stagingRecordId); -}; diff --git a/src/fullnode/index.js b/src/fullnode/index.js index e9e13781..4353803b 100644 --- a/src/fullnode/index.js +++ b/src/fullnode/index.js @@ -1,2 +1,2 @@ -export * from './dataLayerWriteService'; -export * from './dataLayerReadService'; +export * from './writeService'; +export * from './syncService'; diff --git a/src/fullnode/simulator.js b/src/fullnode/simulator.js index 2a815945..a9fc9c27 100644 --- a/src/fullnode/simulator.js +++ b/src/fullnode/simulator.js @@ -1,158 +1,116 @@ -import _ from 'lodash'; -import { Project, Unit, Staging } from '../models'; -export const WAIT_TIME = 1500; - -// Simulate 30 seconds passing before commited to node - -export const updateProjectRecord = async ( - uuid, - encodedRecord, - stagingRecordId, -) => { - let record = JSON.parse(atob(encodedRecord)); - record = Array.isArray(record) ? _.head(record) : record; - - return new Promise((resolve) => { - setTimeout(async () => { - if (stagingRecordId) { - await Staging.destroy({ - where: { - id: stagingRecordId, - }, - }); - - await Project.destroy({ - where: { - warehouseProjectId: uuid, - }, - }); +import { uuid as uuidv4 } from 'uuidv4'; +import { Simulator, Organization } from '../models'; +import { Sequelize } from 'sequelize'; +import { createHash } from 'crypto'; - await Project.create({ - ...record, - warehouseProjectId: uuid, - }); +const Op = Sequelize.Op; - resolve(); - } - }, WAIT_TIME); - }); +export const createDataLayerStore = async () => { + return uuidv4(); }; -export const createProjectRecord = (uuid, encodedRecord, stagingRecordId) => { - let record = JSON.parse(atob(encodedRecord)); - record = Array.isArray(record) ? _.head(record) : record; - - return new Promise((resolve) => { - setTimeout(async () => { - if (stagingRecordId) { - await Staging.destroy({ - where: { - id: stagingRecordId, - }, +export const pushChangeListToDataLayer = async (storeId, changeList) => { + await Promise.all( + changeList.map(async (change) => { + if (change.action === 'insert') { + await Simulator.upsert({ + key: `${storeId}_${change.key}`, + value: change.value, + }); + } else if (change.action === 'delete') { + await Simulator.destroy({ + where: { key: `${storeId}_${change.key}` }, }); } + }), + ); +}; - await Project.create({ - ...record, - warehouseProjectId: uuid, - }); +export const getStoreData = async (storeId) => { + if (storeId) { + const results = await await Simulator.findAll({ + where: { + key: { [Op.like]: `${storeId}%` }, + }, + raw: true, + }); - resolve(); - }, WAIT_TIME); - }); + // return the store data in a form that mirrors that datalayer response + return { + root: createHash('md5').update(JSON.stringify(results)).digest('hex'), + keys_values: results + .filter((result) => result.value) + .map((result) => { + const simulatedResult = result; + + simulatedResult.hash = createHash('md5') + .update(result.value) + .digest('hex'); + simulatedResult.atom = null; + simulatedResult.key = result.key; + simulatedResult.value = result.value; + return simulatedResult; + }), + }; + } + + return new Error('Error getting datalayer store data'); }; -export const deleteProjectRecord = (uuid, stagingRecordId) => { - return new Promise((resolve) => { - setTimeout(async () => { - if (stagingRecordId) { - await Staging.destroy({ - where: { - id: stagingRecordId, - }, - }); - } +// eslint-disable-next-line +export const getRoot = async (storeId) => { + const simulatorTable = await Simulator.findAll({ raw: true }); - await Project.destroy({ - where: { - warehouseProjectId: uuid, - }, - }); - - resolve(); - }, WAIT_TIME); + const myOrganization = await Organization.findOne({ + where: { isHome: true }, + raw: true, }); -}; -export const updateUnitRecord = async ( - uuid, - encodedRecord, - stagingRecordId, -) => { - setTimeout(async () => { - let record = JSON.parse(atob(encodedRecord)); - await Unit.create({ - ...record, + if (!myOrganization) { + console.log('Cant get root, Home org does not yet exist'); + return Promise.resolve({ + hash: null, + success: false, }); + } - await Unit.destroy({ - where: { - warehouseUnitId: uuid, - }, - }); + let hash = 0; - if (stagingRecordId) { - await Staging.destroy({ - where: { - id: stagingRecordId, - }, - }); - } - }, WAIT_TIME); -}; + if (myOrganization.registryId === storeId) { + createHash('md5').update(JSON.stringify(simulatorTable)).digest('hex'); + } -export const createUnitRecord = (uuid, encodedRecord, stagingRecordId) => { - let record = JSON.parse(atob(encodedRecord)); - record = Array.isArray(record) ? _.head(record) : record; - - // eslint-disable-next-line no-async-promise-executor - return new Promise(async (resolve) => { - setTimeout(async () => { - await Unit.create({ - ...record, - }); - - if (stagingRecordId) { - await Staging.destroy({ - where: { - id: stagingRecordId, - }, - }); - } - - resolve(); - }, WAIT_TIME); + return Promise.resolve({ + hash, + success: true, }); }; -export const deleteUnitRecord = (uuid, stagingRecordId) => { - return new Promise((resolve) => { - setTimeout(async () => { - await Unit.destroy({ - where: { - warehouseUnitId: uuid, - }, - }); - - if (stagingRecordId) { - await Staging.destroy({ - where: { - id: stagingRecordId, - }, - }); +export const getRoots = async (storeIds) => { + const simulatorTable = await Simulator.findAll({ raw: true }); + const myOrganization = await Organization.findOne({ + where: { isHome: true }, + raw: true, + }); + + if (!myOrganization) { + console.log('Cant get root, Home org does not yet exist'); + return Promise.resolve({ + hash: null, + success: false, + }); + } + + return Promise.resolve({ + hash: storeIds.map((storeId) => { + if (myOrganization.registryId === storeId) { + return createHash('md5') + .update(JSON.stringify(simulatorTable)) + .digest('hex'); } - resolve(); - }, WAIT_TIME); + return 0; + }), + success: true, }); }; diff --git a/src/fullnode/simulatorV2.js b/src/fullnode/simulatorV2.js deleted file mode 100644 index af2e199c..00000000 --- a/src/fullnode/simulatorV2.js +++ /dev/null @@ -1,79 +0,0 @@ -import { uuid as uuidv4 } from 'uuidv4'; -import { Simulator } from '../models'; -import { Sequelize } from 'sequelize'; -import { RandomHash } from 'random-hash'; -import { randomBytes } from 'crypto'; - -const Op = Sequelize.Op; - -const generateHash = new RandomHash({ - length: 55, - charset: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_', - rng: randomBytes, -}); - -export const createDataLayerStore = async () => { - return uuidv4(); -}; - -export const pushChangeListToDataLayer = async (storeId, changeList) => { - console.log(storeId, changeList); - await Promise.all( - changeList.map(async (change) => { - if (change.action === 'insert') { - await Simulator.upsert({ - key: `${storeId}_${change.key}`, - value: change.value, - }); - } else if (change.action === 'delete') { - await Simulator.destroy({ - where: { key: `${storeId}_${change.key}` }, - }); - } - }), - ); -}; - -export const getStoreData = async (storeId) => { - if (storeId) { - const results = await await Simulator.findAll({ - where: { - key: { [Op.like]: `${storeId}%` }, - }, - }); - - // return the store data in a form that mirrors that datalayer response - return { - root: `0x${generateHash()}`, - keys_values: results.map((result) => { - const simulatedResult = result; - simulatedResult.hash = result.metaValue.split('').reduce((a, b) => { - a = (a << 5) - a + b.charCodeAt(0); - return a & a; - }, 0); - simulatedResult.atom = null; - simulatedResult.key = result.metaKey; - simulatedResult.value = result.metaValue; - return simulatedResult; - }), - }; - } - - return new Error('Error getting datalayer store data'); -}; - -// eslint-disable-next-line -export const getRoot = (storeId) => { - return Promise.resolve({ - // fake hash - hash: `0x${generateHash()}`, - success: true, - }); -}; - -export const getRoots = (storeIds) => { - return Promise.resolve({ - hash: storeIds.map(() => `0x${generateHash()}`), - success: true, - }); -}; diff --git a/src/fullnode/syncService.js b/src/fullnode/syncService.js new file mode 100644 index 00000000..42cdf2dc --- /dev/null +++ b/src/fullnode/syncService.js @@ -0,0 +1,120 @@ +import _ from 'lodash'; + +import { + Organization, + Unit, + Project, + RelatedProject, + Qualification, + Vintage, + CoBenefit, + ProjectLocation, + QualificationUnit, + Staging, +} from '../models'; +import * as dataLayer from './persistance'; +import * as simulator from './simulator'; + +let updateInterval; + +export const startDataLayerUpdatePolling = () => { + console.log('Start Datalayer Update Polling'); + updateInterval = setInterval(async () => { + const tablesToUpdate = await dataLayerWasUpdated(); + _.keys(tablesToUpdate).forEach((storeId) => { + if (tablesToUpdate[storeId]) { + syncDataLayerStore(storeId); + } + }); + }, 10000); +}; + +export const stopDataLayerUpdatePolling = () => { + clearInterval(updateInterval); +}; + +export const syncDataLayerStore = async (storeId) => { + let storeData; + + if (process.env.USE_SIMULATOR === 'true') { + storeData = await simulator.getStoreData(storeId); + } else { + storeData = await dataLayer.getStoreData(storeId); + } + + await Promise.all([ + Unit.destroy({ where: {}, truncate: true }), + Project.destroy({ where: {}, truncate: true }), + RelatedProject.destroy({ where: {}, truncate: true }), + QualificationUnit.destroy({ where: {}, truncate: true }), + CoBenefit.destroy({ where: {}, truncate: true }), + Vintage.destroy({ where: {}, truncate: true }), + ProjectLocation.destroy({ where: {}, truncate: true }), + Qualification.destroy({ where: {}, truncate: true }), + ]); + + await Promise.all( + storeData.keys_values.map(async (kv) => { + const key = new Buffer( + kv.key.replace(`${storeId}_`, ''), + 'hex', + ).toString(); + const value = new Buffer(kv.value, 'hex').toString(); + + if (key.includes('unit')) { + const data = JSON.parse(value); + await Unit.upsert(data); + await Staging.destroy({ where: { uuid: data.warehouseUnitId } }); + } else if (key.includes('project')) { + const data = JSON.parse(value); + await Project.upsert(data); + await Staging.destroy({ where: { uuid: data.warehouseProjectId } }); + } else if (key.includes('relatedProjects')) { + await RelatedProject.upsert(JSON.parse(value)); + } else if (key.includes('qualification_units')) { + await QualificationUnit.upsert(JSON.parse(value)); + } else if (key.includes('coBenefits')) { + await CoBenefit.upsert(JSON.stringify(value)); + } else if (key.includes('vintages')) { + await Vintage.upsert(JSON.parse(value)); + } else if (key.includes('projectLocations')) { + await ProjectLocation.upsert(JSON.parse(value)); + } else if (key.includes('qualifications')) { + await Qualification.upsert(JSON.parse(value)); + } + }), + ); +}; + +export const dataLayerWasUpdated = async () => { + const organizations = await Organization.findAll({ + attributes: ['registryId', 'registryHash'], + raw: true, + }); + + let hashMap = {}; + + organizations.forEach((org) => { + hashMap[org.registryId] = org.registryHash; + }); + + let newHashes; + if (process.env.USE_SIMULATOR === 'true') { + newHashes = await simulator.getRoots(_.keys(hashMap)); + } else { + newHashes = await dataLayer.getRoots(_.keys(hashMap)); + } + + const tablesWereUpdatedMap = {}; + await Promise.all( + _.keys(hashMap).map(async (key, index) => { + await Organization.update( + { registryHash: newHashes.hash[index] }, + { where: { registryId: key } }, + ); + tablesWereUpdatedMap[key] = hashMap[key] !== newHashes.hash[index]; + }), + ); + + return tablesWereUpdatedMap; +}; diff --git a/src/fullnode/dataLayerWriteService.js b/src/fullnode/writeService.js similarity index 95% rename from src/fullnode/dataLayerWriteService.js rename to src/fullnode/writeService.js index 57326f8a..5a7234e6 100644 --- a/src/fullnode/dataLayerWriteService.js +++ b/src/fullnode/writeService.js @@ -1,5 +1,5 @@ import * as dataLayer from './persistance'; -import * as simulator from './simulatorV2'; +import * as simulator from './simulator'; export const createDataLayerStore = async () => { let storeId; diff --git a/src/models/co-benefits/co-benefits.model.js b/src/models/co-benefits/co-benefits.model.js index d307f7b8..28003c25 100644 --- a/src/models/co-benefits/co-benefits.model.js +++ b/src/models/co-benefits/co-benefits.model.js @@ -33,17 +33,6 @@ class CoBenefit extends Model { safeMirrorDbHandler(() => CoBenefitMirror.destroy(values)); return super.destroy(values); } - - static async generateChangeListFromStagedData( - // eslint-disable-next-line - action, - // eslint-disable-next-line - id, - // eslint-disable-next-line - stagedData, - ) { - return {}; - } } CoBenefit.init(ModelTypes, { diff --git a/src/models/index.js b/src/models/index.js index db521fb3..b5e06ddc 100644 --- a/src/models/index.js +++ b/src/models/index.js @@ -28,3 +28,4 @@ export * from './staging'; export * from './organizations'; export * from './meta'; export * from './simulator'; +export * from './qualificationUnits'; diff --git a/src/models/locations/locations.model.js b/src/models/locations/locations.model.js index 16e042ed..2ae636fa 100644 --- a/src/models/locations/locations.model.js +++ b/src/models/locations/locations.model.js @@ -35,17 +35,6 @@ class ProjectLocation extends Model { safeMirrorDbHandler(() => ProjectLocationMirror.destroy(values)); return super.destroy(values); } - - static async generateChangeListFromStagedData( - // eslint-disable-next-line - action, - // eslint-disable-next-line - id, - // eslint-disable-next-line - stagedData, - ) { - return {}; - } } ProjectLocation.init(ModelTypes, { diff --git a/src/models/organizations/organizations.model.js b/src/models/organizations/organizations.model.js index dd7a1d2e..cd6690d8 100644 --- a/src/models/organizations/organizations.model.js +++ b/src/models/organizations/organizations.model.js @@ -44,7 +44,11 @@ class Organization extends Model { return myOrganization.orgUid; } - const newOrganizationId = await createDataLayerStore(); + const newOrganizationId = + process.env.USE_SIMULATOR === 'true' + ? 'f1c54511-865e-4611-976c-7c3c1f704662' + : await createDataLayerStore(); + const newRegistryId = await createDataLayerStore(); const registryVersionId = await createDataLayerStore(); @@ -60,41 +64,9 @@ class Organization extends Model { [dataVersion]: registryVersionId, }); - // Create the TableStores - const coBenefitsStoreId = await createDataLayerStore(); - const projectLocationStoreId = await createDataLayerStore(); - const projectsStoreId = await createDataLayerStore(); - const projectRatingStoreId = await createDataLayerStore(); - const relatedProjectsStoreId = await createDataLayerStore(); - const qualificationsStoreId = await createDataLayerStore(); - const unitsStoreId = await createDataLayerStore(); - const vintagesStoreId = await createDataLayerStore(); - const qualificationUnitJunctionStoreId = await createDataLayerStore(); - - await syncDataLayer(registryVersionId, { - coBenefitsStoreId, - projectLocationStoreId, - projectsStoreId, - projectRatingStoreId, - relatedProjectsStoreId, - qualificationsStoreId, - unitsStoreId, - vintagesStoreId, - qualificationUnitJunctionStoreId, - }); - await Organization.create({ orgUid: newOrganizationId, registryId: registryVersionId, - coBenefitsStoreId, - projectLocationStoreId, - projectsStoreId, - projectRatingStoreId, - relatedProjectsStoreId, - qualificationsStoreId, - qualificationUnitJunctionStoreId, - unitsStoreId, - vintagesStoreId, isHome: true, subscribed: true, name, diff --git a/src/models/organizations/organizations.modeltypes.cjs b/src/models/organizations/organizations.modeltypes.cjs index c7dae3ab..63f821d2 100644 --- a/src/models/organizations/organizations.modeltypes.cjs +++ b/src/models/organizations/organizations.modeltypes.cjs @@ -13,24 +13,7 @@ module.exports = { name: Sequelize.STRING, icon: Sequelize.STRING, registryId: Sequelize.STRING, - projectLocationStoreId: Sequelize.STRING, - projectLocationStoreHash: Sequelize.STRING, - projectRatingStoreId: Sequelize.STRING, - projectRatingStoreHash: Sequelize.STRING, - coBenefitsStoreId: Sequelize.STRING, - coBenefitsStoreHash: Sequelize.STRING, - projectsStoreId: Sequelize.STRING, - projectsStoreIdHash: Sequelize.STRING, - relatedProjectsStoreId: Sequelize.STRING, - relatedProjectsStoreHash: Sequelize.STRING, - vintagesStoreId: Sequelize.STRING, - vintagesStoreHash: Sequelize.STRING, - qualificationsStoreId: Sequelize.STRING, - qualificationsStoreHash: Sequelize.STRING, - qualificationUnitJunctionStoreId: Sequelize.STRING, - qualificationUnitJunctionStoreHash: Sequelize.STRING, - unitsStoreId: Sequelize.STRING, - unitsStoreHash: Sequelize.STRING, + registryHash: Sequelize.STRING, subscribed: { type: Sequelize.BOOLEAN, defaultValue: false, diff --git a/src/models/projects/projects.model.js b/src/models/projects/projects.model.js index 28130d56..d785dd06 100644 --- a/src/models/projects/projects.model.js +++ b/src/models/projects/projects.model.js @@ -78,9 +78,11 @@ class Project extends Model { safeMirrorDbHandler(() => ProjectMirror.destroy(values)); const record = await super.findOne(values.where); - const { orgUid } = record.dataValues; - Project.changes.next(['projects', orgUid]); + if (record) { + const { orgUid } = record.dataValues; + Project.changes.next(['projects', orgUid]); + } return super.destroy(values); } @@ -245,11 +247,11 @@ class Project extends Model { ); const primaryKeyMap = { - projects: 'warehouseProjectId', + project: 'warehouseProjectId', projectLocations: 'id', qualifications: 'id', vintages: 'id', - coBenifets: 'id', + coBenefits: 'id', relatedProjects: 'id', }; @@ -283,9 +285,9 @@ class Project extends Model { ..._.get(insertChangeList, 'vintages', []), ..._.get(updateChangeList, 'vintages', []), ], - coBenifets: [ - ..._.get(insertChangeList, 'coBenifets', []), - ..._.get(updateChangeList, 'coBenifets', []), + coBenefits: [ + ..._.get(insertChangeList, 'coBenefits', []), + ..._.get(updateChangeList, 'coBenefits', []), ], relatedProjects: [ ..._.get(insertChangeList, 'relatedProjects', []), diff --git a/src/models/qualificationUnits/index.js b/src/models/qualificationUnits/index.js new file mode 100644 index 00000000..55729955 --- /dev/null +++ b/src/models/qualificationUnits/index.js @@ -0,0 +1,2 @@ +export * from './qualificationUnits.model'; +export * from './qualificationUnits.mock'; diff --git a/src/models/qualificationUnits/qualificationUnits.mock.js b/src/models/qualificationUnits/qualificationUnits.mock.js new file mode 100644 index 00000000..c022baf7 --- /dev/null +++ b/src/models/qualificationUnits/qualificationUnits.mock.js @@ -0,0 +1,8 @@ +import stub from './qualificationUnits.stub.json'; + +export const QualificationMock = { + findAll: () => stub, + findOne: (id) => { + return stub.find((record) => record.id == id); + }, +}; diff --git a/src/models/qualificationUnits/qualificationUnits.model.js b/src/models/qualificationUnits/qualificationUnits.model.js new file mode 100644 index 00000000..ed418cda --- /dev/null +++ b/src/models/qualificationUnits/qualificationUnits.model.js @@ -0,0 +1,28 @@ +'use strict'; + +import Sequelize from 'sequelize'; +const { Model } = Sequelize; +import { sequelize, safeMirrorDbHandler } from '../database'; + +import ModelTypes from './qualificationUnits.modeltypes.cjs'; +import { QualificationUnitMirror } from './qualificationUnits.model.mirror'; + +class QualificationUnit extends Model { + static async create(values, options) { + safeMirrorDbHandler(() => QualificationUnitMirror.create(values, options)); + return super.create(values, options); + } + + static async destroy(values) { + safeMirrorDbHandler(() => QualificationUnitMirror.destroy(values)); + return super.destroy(values); + } +} + +QualificationUnit.init(ModelTypes, { + sequelize, + modelName: 'qualification_unit', + freezeTableName: true, +}); + +export { QualificationUnit }; diff --git a/src/models/qualificationUnits/qualificationUnits.model.mirror.js b/src/models/qualificationUnits/qualificationUnits.model.mirror.js new file mode 100644 index 00000000..87202685 --- /dev/null +++ b/src/models/qualificationUnits/qualificationUnits.model.mirror.js @@ -0,0 +1,19 @@ +'use strict'; + +import Sequelize from 'sequelize'; +const { Model } = Sequelize; + +import { sequelizeMirror, safeMirrorDbHandler } from '../database'; +import ModelTypes from './qualificationUnits.modeltypes.cjs'; + +class QualificationUnitMirror extends Model {} + +safeMirrorDbHandler(() => { + QualificationUnitMirror.init(ModelTypes, { + sequelize: sequelizeMirror, + modelName: 'qualification_unit', + freezeTableName: true, + }); +}); + +export { QualificationUnitMirror }; diff --git a/src/models/qualificationUnits/qualificationUnits.modeltypes.cjs b/src/models/qualificationUnits/qualificationUnits.modeltypes.cjs new file mode 100644 index 00000000..95ddabae --- /dev/null +++ b/src/models/qualificationUnits/qualificationUnits.modeltypes.cjs @@ -0,0 +1,15 @@ +const Sequelize = require('sequelize'); + +module.exports = { + warehouseUnitId: Sequelize.STRING, + qualificationId: Sequelize.STRING, + createdAt: { + type: Sequelize.DATE, + defaultValue: Sequelize.NOW, + }, + updatedAt: { + type: Sequelize.DATE, + defaultValue: Sequelize.NOW, + allowNull: false, + }, +}; diff --git a/src/models/qualificationUnits/qualificationUnits.stub.json b/src/models/qualificationUnits/qualificationUnits.stub.json new file mode 100644 index 00000000..46acd246 --- /dev/null +++ b/src/models/qualificationUnits/qualificationUnits.stub.json @@ -0,0 +1,10 @@ +[ + { + "warehouseUnitId": "5c960ac1-a180-45a4-9850-be177e26d2fb", + "qualificationId": "702cafbb-c624-4273-9cdc-c617ad5675df" + }, + { + "warehouseUnitId": "5c960ac1-a180-45a4-9850-be177e26d2fb", + "qualificationId": "76903895-840e-406c-b2a0-f90244acf02d" + } +] diff --git a/src/models/ratings/ratings.model.js b/src/models/ratings/ratings.model.js index ce5efe11..147f96f6 100644 --- a/src/models/ratings/ratings.model.js +++ b/src/models/ratings/ratings.model.js @@ -33,17 +33,6 @@ class Rating extends Model { safeMirrorDbHandler(() => RatingMirror.destroy(values)); return super.destroy(values); } - - static async generateChangeListFromStagedData( - // eslint-disable-next-line - action, - // eslint-disable-next-line - id, - // eslint-disable-next-line - stagedData, - ) { - return {}; - } } Rating.init(ModelTypes, { diff --git a/src/models/related-projects/related-projects.model.js b/src/models/related-projects/related-projects.model.js index cbb5e1ab..e14e745f 100644 --- a/src/models/related-projects/related-projects.model.js +++ b/src/models/related-projects/related-projects.model.js @@ -34,17 +34,6 @@ class RelatedProject extends Model { safeMirrorDbHandler(() => RelatedProjectMirror.destroy(values)); return super.destroy(values); } - - static async generateChangeListFromStagedData( - // eslint-disable-next-line - action, - // eslint-disable-next-line - id, - // eslint-disable-next-line - stagedData, - ) { - return {}; - } } RelatedProject.init(ModelTypes, { diff --git a/src/models/staging/staging.model.js b/src/models/staging/staging.model.js index afffb8d3..0cc17e33 100644 --- a/src/models/staging/staging.model.js +++ b/src/models/staging/staging.model.js @@ -1,12 +1,15 @@ 'use strict'; +import _ from 'lodash'; import Sequelize from 'sequelize'; const { Model } = Sequelize; -import { Project, Unit } from '../../models'; +import { Project, Unit, Organization } from '../../models'; import rxjs from 'rxjs'; import { sequelize } from '../database'; +import { pushDataLayerChangeList } from '../../fullnode'; + import ModelTypes from './staging.modeltypes.cjs'; class Staging extends Model { @@ -30,6 +33,11 @@ class Staging extends Model { stagedData .filter((stagingRecord) => stagingRecord.table === table) .forEach((stagingRecord) => { + // TODO: Think of a better place to mark the records as commited + Staging.update( + { commited: true }, + { where: { uuid: stagingRecord.uuid } }, + ); if (stagingRecord.action === 'INSERT') { insertRecords.push(...JSON.parse(stagingRecord.data)); } else if (stagingRecord.action === 'UPDATE') { @@ -63,7 +71,16 @@ class Staging extends Model { ], }; - console.log(unifiedChangeList); + const myOrganization = await Organization.findOne({ + where: { isHome: true }, + raw: true, + }); + + await pushDataLayerChangeList( + myOrganization.registryId, + // sort so that deletes are first and inserts second + _.sortBy(_.flatten(_.values(unifiedChangeList)), 'action'), + ); } } diff --git a/src/models/units/units.model.js b/src/models/units/units.model.js index 22bae31d..b3c704bd 100644 --- a/src/models/units/units.model.js +++ b/src/models/units/units.model.js @@ -114,9 +114,11 @@ class Unit extends Model { safeMirrorDbHandler(() => UnitMirror.destroy(values)); const record = await super.findOne(values.where); - const { orgUid } = record.dataValues; - Unit.changes.next(['units', orgUid]); + if (record) { + const { orgUid } = record.dataValues; + Unit.changes.next(['units', orgUid]); + } return super.destroy(values); } @@ -292,6 +294,7 @@ class Unit extends Model { unit: 'warehouseUnitId', qualifications: 'id', qualification_units: 'qualificationunitId', + vintages: 'id', }; const insertChangeList = transformFullXslsToChangeList( diff --git a/src/models/units/units.modeltypes.cjs b/src/models/units/units.modeltypes.cjs index f4c6d40c..ba379f33 100644 --- a/src/models/units/units.modeltypes.cjs +++ b/src/models/units/units.modeltypes.cjs @@ -42,6 +42,5 @@ module.exports = { updatedAt: { type: Sequelize.DATE, defaultValue: Sequelize.NOW, - allowNull: false, }, }; diff --git a/src/models/vintages/vintages.model.js b/src/models/vintages/vintages.model.js index 87273407..9650f4db 100644 --- a/src/models/vintages/vintages.model.js +++ b/src/models/vintages/vintages.model.js @@ -40,17 +40,6 @@ class Vintage extends Model { safeMirrorDbHandler(() => VintageMirror.destroy(values)); return super.destroy(values); } - - static async generateChangeListFromStagedData( - // eslint-disable-next-line - action, - // eslint-disable-next-line - id, - // eslint-disable-next-line - stagedData, - ) { - return {}; - } } Vintage.init(ModelTypes, { diff --git a/src/utils/data-assertions.js b/src/utils/data-assertions.js index ce720c12..1dd004be 100644 --- a/src/utils/data-assertions.js +++ b/src/utils/data-assertions.js @@ -37,7 +37,8 @@ export const assertCsvFileInRequest = (req) => { export const assertOrgIsHomeOrg = async (orgUid) => { const homeOrg = await Organization.getHomeOrg(); - if (!homeOrg[orgUid]) { + + if (homeOrg.orgUid !== orgUid) { throw new Error( `Restricted data: can not modify this record with orgUid ${orgUid}`, ); diff --git a/src/utils/helpers.js b/src/utils/helpers.js index b6442fc4..b51580ed 100644 --- a/src/utils/helpers.js +++ b/src/utils/helpers.js @@ -71,7 +71,6 @@ export const transformSerialNumberBlock = ( // serial number format: ABC1000-ABC1010 serialNumberPattern, ) => { - console.log(serialNumberPattern); const unitBlocks = serialNumberBlock.match(serialNumberPattern); if (!unitBlocks) { diff --git a/src/utils/xls.js b/src/utils/xls.js index 623ac020..208ffa3c 100644 --- a/src/utils/xls.js +++ b/src/utils/xls.js @@ -1,3 +1,4 @@ +import _ from 'lodash'; import xlsx from 'node-xlsx'; import stream from 'stream'; @@ -30,7 +31,7 @@ export const createXlsFromSequelizeResults = ( rows, model, hex = false, - csv = false, + toStructuredCsv = false, ) => { rows = JSON.parse(JSON.stringify(rows)); // Sadly this is the best way to simplify sequelize's return shape @@ -159,7 +160,7 @@ export const createXlsFromSequelizeResults = ( return sheets; }, initialReduceValue); - if (!csv) { + if (!toStructuredCsv) { return xlsx.build(Object.values(xlsData)); } else { return xlsData; @@ -197,26 +198,31 @@ export const transformFullXslsToChangeList = ( // filter out the header row .filter((r) => r[primaryKeyIndex] !== headerRow[primaryKeyIndex]) .forEach((r) => { + console.log(_.zipObject(headerRow, r)); + const dataLayerKey = Buffer.from( + `${key}_${r[primaryKeyIndex]}`, + ).toString('hex'); + if (action === 'update') { changeList[key].push( { action: 'delete', - id: Buffer.from(r[primaryKeyIndex]).toString('hex'), + key: dataLayerKey, }, { action: 'insert', - id: Buffer.from(r[primaryKeyIndex]).toString('hex'), - data: Buffer.from( - r.filter((x) => typeof x === 'string').join(','), + key: dataLayerKey, + value: Buffer.from( + JSON.stringify(_.zipObject(headerRow, r)), ).toString('hex'), }, ); } else { changeList[key].push({ action: action, - id: Buffer.from(r[primaryKeyIndex]).toString('hex'), - data: Buffer.from( - r.filter((x) => typeof x === 'string').join(','), + key: dataLayerKey, + value: Buffer.from( + JSON.stringify(_.zipObject(headerRow, r)), ).toString('hex'), }); }