diff --git a/src/server/models/Reading.js b/src/server/models/Reading.js index e257b74a4..ff1991af8 100644 --- a/src/server/models/Reading.js +++ b/src/server/models/Reading.js @@ -171,11 +171,6 @@ class Reading { // Separate the result rows by meter_id and return a nested object. const compressedReadingsByGroupID = mapToObject(groupIDs, () => []); // Returns { 1: [], 2: [], ... } for (const row of allCompressedReadings) { - // HACK. Something in the SQL is causing these to be weird for some rows. - // This solves the problem but is not elegant. - if (compressedReadingsByGroupID[row.group_id] === undefined) { - compressedReadingsByGroupID[row.group_id] = []; - } compressedReadingsByGroupID[row.group_id].push( { reading_rate: row.reading_rate, start_timestamp: row.start_timestamp, end_timestamp: row.end_timestamp } ); diff --git a/src/server/sql/group/create_groups_tables.sql b/src/server/sql/group/create_groups_tables.sql index 3f076bb8b..cdd3dead6 100644 --- a/src/server/sql/group/create_groups_tables.sql +++ b/src/server/sql/group/create_groups_tables.sql @@ -108,11 +108,16 @@ CREATE VIEW groups_deep_meters AS */ WITH all_deep_meters(group_id, meter_id) AS ( - SELECT DISTINCT -- Distinct because two children might include the same meter, and we only want it once. - COALESCE(gdc.parent_id, gim.group_id) AS group_id, - gim.meter_id - FROM groups_deep_children gdc - RIGHT JOIN groups_immediate_meters gim ON gdc.parent_id = gim.group_id OR gdc.child_id = gim.group_id + SELECT DISTINCT -- Distinct because two children might include the same meter, and we only want it once. + gdc.parent_id AS group_id, + gim.meter_id AS meter_id + FROM groups_immediate_meters gim + INNER JOIN groups_deep_children gdc ON gdc.child_id = gim.group_id + UNION + SELECT + gim.group_id AS group_id, + gim.meter_id AS meter_id + from groups_immediate_meters gim ) SELECT adm.group_id AS group_id, diff --git a/src/server/sql/reading/create_function_get_compressed_groups_readings.sql b/src/server/sql/reading/create_function_get_compressed_groups_readings.sql index d39d798ff..cd356b5d5 100644 --- a/src/server/sql/reading/create_function_get_compressed_groups_readings.sql +++ b/src/server/sql/reading/create_function_get_compressed_groups_readings.sql @@ -13,7 +13,7 @@ CREATE OR REPLACE FUNCTION compressed_group_readings( BEGIN SELECT array_agg(meter_id) INTO meter_ids - FROM groups_immediate_meters gdm + FROM groups_deep_meters gdm INNER JOIN unnest(group_ids) gids(id) ON gids.id = gdm.group_id; RETURN QUERY @@ -24,6 +24,9 @@ CREATE OR REPLACE FUNCTION compressed_group_readings( compressed.end_timestamp AS end_timestamp FROM compressed_readings(meter_ids, from_timestamp, to_timestamp, num_points) compressed INNER JOIN groups_deep_meters gdm ON gdm.meter_id = compressed.meter_id + -- The previous line would include groups that are parents of the groups we want, + -- so we remove those groups that are not requested here + INNER JOIN unnest(group_ids) gids(group_id) ON gdm.group_id = gids.group_id GROUP by gdm.group_id, compressed.start_timestamp, compressed.end_timestamp ORDER BY gdm.group_id, compressed.start_timestamp; END; diff --git a/src/server/sql/reading/create_function_get_group_barchart_readings.sql b/src/server/sql/reading/create_function_get_group_barchart_readings.sql index ba6b09ecd..94f3c17ee 100644 --- a/src/server/sql/reading/create_function_get_group_barchart_readings.sql +++ b/src/server/sql/reading/create_function_get_group_barchart_readings.sql @@ -9,7 +9,7 @@ CREATE OR REPLACE FUNCTION barchart_group_readings( BEGIN SELECT array_agg(meter_id) INTO meter_ids - FROM groups_immediate_meters gdm + FROM groups_deep_meters gdm INNER JOIN unnest(group_ids) gids(id) ON gids.id = gdm.group_id; RETURN QUERY @@ -20,6 +20,9 @@ CREATE OR REPLACE FUNCTION barchart_group_readings( compressed.end_timestamp AS end_timestamp FROM barchart_readings(meter_ids, duration, from_timestamp, to_timestamp) compressed INNER JOIN groups_deep_meters gdm ON gdm.meter_id = compressed.meter_id + -- The previous line would include groups that are parents of the groups we want, + -- so we remove those groups that are not requested here + INNER JOIN unnest(group_ids) gids(group_id) ON gdm.group_id = gids.group_id GROUP by gdm.group_id, compressed.start_timestamp, compressed.end_timestamp; END; $$ LANGUAGE plpgsql; diff --git a/src/server/test/db/compressedReadingsTests.js b/src/server/test/db/compressedReadingsTests.js index f871380a7..147f220d3 100644 --- a/src/server/test/db/compressedReadingsTests.js +++ b/src/server/test/db/compressedReadingsTests.js @@ -11,6 +11,7 @@ const expect = chai.expect; const recreateDB = require('./common').recreateDB; const db = require('../../models/database').db; +const Group = require('../../models/Group'); const Meter = require('../../models/Meter'); const Reading = require('../../models/Reading'); @@ -96,4 +97,50 @@ mocha.describe('Compressed Readings', () => { expect(result).to.deep.equal({ [meter.id]: [] }); }); + + mocha.describe('With groups, meters, and readings set up', async () => { + let meter2; + const startTimestamp = moment('2017-01-01'); + const endTimestamp = moment('2017-01-01').add(1, 'hour'); + const readingValue = 10; + mocha.beforeEach(async () => { + // Groups A and B will each contain a meter + const groupA = new Group(undefined, 'A'); + const groupB = new Group(undefined, 'B'); + // Group C will contain A and B + const groupC = new Group(undefined, 'C'); + await Promise.all([groupA, groupB, groupC].map(group => group.insert())); + + // Create and get a handle to a new meter + await new Meter(undefined, 'Meter2', null, false, Meter.type.MAMAC).insert(); + meter2 = await Meter.getByName('Meter2'); + + // Place meters & groups in hierarchy + await groupA.adoptMeter(meter.id); + await groupB.adoptMeter(meter2.id); + await groupC.adoptGroup(groupA.id); + await groupC.adoptGroup(groupB.id); + + // Add some readings to the meters + const reading1 = new Reading(meter.id, readingValue, startTimestamp, endTimestamp); + const reading2 = new Reading(meter2.id, readingValue, startTimestamp, endTimestamp); + await Reading.insertAll([reading1, reading2]); + }); + mocha.it('can get readings from a group containing meters', async () => { + const groupA = await Group.getByName('A'); + const actualReadings = await Reading.getCompressedGroupReadings([groupA.id], null, null, 1); + expect(actualReadings[groupA.id]).to.have.lengthOf(1); + expect(actualReadings[groupA.id][0].start_timestamp.isSame(startTimestamp)).to.equal(true); + expect(actualReadings[groupA.id][0].end_timestamp.isSame(endTimestamp)).to.equal(true); + expect(actualReadings[groupA.id][0].reading_rate).to.equal(readingValue); + }); + mocha.it('can get readings from a group containing groups containing meters', async () => { + const groupC = await Group.getByName('C'); + const actualReadings = await Reading.getCompressedGroupReadings([groupC.id], null, null, 1); + expect(actualReadings[groupC.id]).to.have.lengthOf(1); + expect(actualReadings[groupC.id][0].start_timestamp.isSame(startTimestamp)).to.equal(true); + expect(actualReadings[groupC.id][0].end_timestamp.isSame(endTimestamp)).to.equal(true); + expect(actualReadings[groupC.id][0].reading_rate).to.equal(readingValue * 2); + }); + }); });