Skip to content

Commit

Permalink
Merge pull request #3141 from SEED-platform/Add-Create-Sensor-Reading…
Browse files Browse the repository at this point in the history
…s-button

Add create sensor readings button
  • Loading branch information
haneslinger committed Feb 24, 2022
2 parents fdd7a36 + 3668ebf commit b266222
Show file tree
Hide file tree
Showing 19 changed files with 7,846 additions and 33 deletions.
78 changes: 78 additions & 0 deletions seed/data_importer/sensor_readings_parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# !/usr/bin/env python
# encoding: utf-8

from config.settings.common import TIME_ZONE

from pytz import timezone

from seed.lib.mcm import reader
from seed.models import Sensor


class SensorsReadingsParser(object):
"""
This class parses and validates different details about sensor readings
Import File - including sensor types & units - to be created before execution.
The expected input is a csv/xlsx. The columns headers should be:
- timestamp : datetime (required, unique)
then any number of column where the header is a Sensor column_name, and the rows are doubles
It's able to create a collection of Sensor Readings object details.
"""

_tz = timezone(TIME_ZONE)

def __init__(self, org_id, sensor_readings_details, property_id):
# defaulted to None to show it hasn't been cached yet
self.sensor_readings_details = sensor_readings_details
self._org_id = org_id
self._property_id = property_id

@classmethod
def factory(cls, sensor_readings_file, org_id, property_id):
"""Factory function for sensorReadingsParser
:param sensor_readings_file: File
:param org_id: int
:param property_id: int, id of property - required if sensor data is for a specific property
:return: SensorReadingsParser
"""
parser = reader.MCMParser(sensor_readings_file)
raw_sensor_readings_data = list(parser.data)

try:
keys = list(raw_sensor_readings_data[0].keys())
except IndexError:
raise ValueError("File has no rows")

if "timestamp" not in keys:
raise ValueError("File does not contain correct columns")

sensor_names = keys
sensor_names.remove("timestamp")

sensor_readings_by_sensor_name = {sensor_name: {} for sensor_name in sensor_names}

for reading in raw_sensor_readings_data:
timestamp = reading["timestamp"]
for sensor_name in sensor_names:
sensor_readings_by_sensor_name[sensor_name][timestamp] = reading[sensor_name]

return cls(org_id, sensor_readings_by_sensor_name, property_id=property_id)

def get_validation_report(self):
sensor_names = list(Sensor.objects.filter(sensor_property_id=self._property_id).values_list('column_name', flat=True))

result = [
{
"column_name": sensor_name,
"exists": sensor_name in sensor_names,
"num_readings": sum((v != "0" and v is not None) for v in readings.values())
}
for sensor_name, readings in self.sensor_readings_details.items()
]

return result
55 changes: 55 additions & 0 deletions seed/data_importer/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
from django.utils import timezone as tz
from django.utils.timezone import make_naive
from past.builtins import basestring
from seed.models.sensors import SensorReading
from unidecode import unidecode

from seed.building_sync import validation_client
Expand All @@ -42,6 +43,7 @@
match_and_link_incoming_properties_and_taxlots,
)
from seed.data_importer.meters_parser import MetersParser
from seed.data_importer.sensor_readings_parser import SensorsReadingsParser
from seed.data_importer.models import (
ImportFile,
ImportRecord,
Expand Down Expand Up @@ -746,6 +748,10 @@ def finish_raw_save(results, file_pk, progress_key):
import_file.cycle_id = None
new_summary = _append_sensor_import_results_to_summary(results)
finished_progress_data = progress_data.finish_with_success(new_summary)

elif import_file.source_type == "SensorReadings":
finished_progress_data = progress_data.finish_with_success(results)

else:
finished_progress_data = progress_data.finish_with_success()

Expand Down Expand Up @@ -867,6 +873,53 @@ def _save_sensor_data_create_tasks(file_pk, progress_key):
return finish_raw_save(sensors, file_pk, progress_data.key)


@shared_task
def _save_sensor_readings_data_create_tasks(file_pk, progress_key):
progress_data = ProgressData.from_key(progress_key)

import_file = ImportFile.objects.get(pk=file_pk)
org_id = import_file.cycle.organization.id
property_id = import_file.matching_results_data['property_id']

# matching_results_data gets cleared out since the field wasn't meant for this
import_file.matching_results_data = {}
import_file.save()

parser = SensorsReadingsParser.factory(
import_file.local_file,
org_id,
property_id=property_id
)
sensor_readings_data = parser.sensor_readings_details

sensor_readings_summary = []
for sensor_column_name, readings in sensor_readings_data.items():
sensor = Sensor.objects.get(column_name=sensor_column_name)

num_readings = 0
if sensor:
for timestamp, value in readings.items():
sr, _ = SensorReading.objects.get_or_create(**{
"sensor": sensor,
"timestamp": timestamp
})
sr.reading = value
sr.save()
num_readings += 1

sensor_readings_summary.append({
"column_name": sensor.display_name,
"exists": sensor is not None,
"num_readings": num_readings
})

# add in the proposed_imports into the progress key to be used later. (This used to be the summary).
progress_data.total = 0
progress_data.save()

return finish_raw_save(sensor_readings_summary, file_pk, progress_data.key)


@shared_task
def _save_greenbutton_data_task(readings, meter_id, meter_usage_point_id, progress_key):
"""
Expand Down Expand Up @@ -1167,6 +1220,8 @@ def save_raw_data(file_pk):
_save_greenbutton_data_create_tasks.s(file_pk, progress_data.key).delay()
elif import_file.source_type == 'SensorMetaData':
_save_sensor_data_create_tasks.s(file_pk, progress_data.key).delay()
elif import_file.source_type == 'SensorReadings':
_save_sensor_readings_data_create_tasks.s(file_pk, progress_data.key).delay()
else:
_save_raw_data_create_tasks.s(file_pk, progress_data.key).delay()
except StopIteration:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ angular.module('BE.seed.controller.inventory_detail_sensors', [])
};

$scope.usageGridOptions = {
data: 'data',
data: property_sensor_usage.readings,
columnDefs: property_sensor_usage.column_defs,
enableColumnResizing: true,
enableFiltering: true,
Expand Down Expand Up @@ -201,6 +201,7 @@ angular.module('BE.seed.controller.inventory_detail_sensors', [])

$scope.data = readings;
$scope.usageGridOptions.columnDefs = columnDefs;
$scope.usageGridOptions.data = readings;
$scope.has_sensor_readings = $scope.data.length > 0;
$scope.apply_column_settings();
};
Expand Down Expand Up @@ -247,6 +248,29 @@ angular.module('BE.seed.controller.inventory_detail_sensors', [])
});
};

$scope.open_sensor_readings_upload_modal = function () {
$uibModal.open({
templateUrl: urls.static_url + 'seed/partials/sensor_readings_upload_modal.html',
controller: 'sensor_readings_upload_modal_controller',
resolve: {
filler_cycle: function () {
return $scope.filler_cycle;
},
organization_id: function () {
return $scope.organization.id;
},
view_id: function () {
return $scope.inventory.view_id;
},
datasets: function () {
return dataset_service.get_datasets().then(function (result) {
return result.datasets;
});
}
}
});
};

$scope.inventory_display_name = function (property_type) {
let error = '';
let field = property_type == 'property' ? $scope.organization.property_display_field : $scope.organization.taxlot_display_field;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,177 @@
angular.module('BE.seed.controller.sensor_readings_upload_modal', [])
.controller('sensor_readings_upload_modal_controller', [
'$scope',
'$state',
'$uibModalInstance',
'uiGridConstants',
'filler_cycle',
'dataset_service',
'organization_id',
'uploader_service',
'view_id',
'datasets',
function (
$scope,
$state,
$uibModalInstance,
uiGridConstants,
filler_cycle,
dataset_service,
organization_id,
uploader_service,
view_id,
datasets
) {
$scope.step = {
number: 1
};
$scope.view_id = view_id;
$scope.selectedCycle = filler_cycle;
$scope.organization_id = organization_id;
$scope.datasets = datasets;

if (datasets.length) $scope.selectedDataset = datasets[0];

$scope.uploader = {
invalid_file_contents: false,
invalid_csv_extension_alert: false,
progress: 0,
status_message: ''
};

$scope.datasetChanged = function (dataset) {
// set selectedDataset to null to rerender button
$scope.selectedDataset = null;
$scope.selectedDataset = dataset;
};

$scope.cancel = function () {
// If step 2, GB import confirmation was not accepted by user, so delete file
if ($scope.step.number === 2) {
dataset_service.delete_file($scope.file_id).then(function (/*results*/) {
$uibModalInstance.dismiss('cancel');
});
} else {
$uibModalInstance.dismiss('cancel');
}
};

$scope.uploaderfunc = function (event_message, file/*, progress*/) {
switch (event_message) {
case 'invalid_extension':
$scope.$apply(function () {
$scope.uploader.invalid_csv_extension_alert = true;
$scope.uploader.invalid_file_contents = false;
});
break;

case 'upload_complete':
$scope.file_id = file.file_id;
$scope.filename = file.filename;
show_confirmation_info();
break;
}
};

var saveFailure = function (error) {
// Delete file and present error message

// file_id source varies depending on which step the error occurs
var file_id = $scope.file_id || error.config.data.file_id;
dataset_service.delete_file(file_id);

$scope.uploader.invalid_csv_extension_alert = false;
$scope.uploader.invalid_file_contents = true;

// Be sure user is back to step 1 where the error is shown and they can upload another file
$scope.step.number = 1;
};

var base_sensor_readings_col_defs = [{
field: 'column_name',
displayName: 'column name',
enableHiding: false,
type: 'string'
}, {
field: 'exists',
enableHiding: false
}, {
field: 'num_readings',
displayName: 'number of readings',
enableHiding: false
}
];

var successfully_imported_col_def = {
field: 'successfully_imported',
enableHiding: false
};

var grid_rows_to_display = function (data) {
return Math.min(data.length, 5);
};

var show_confirmation_info = function () {
uploader_service.sensor_readings_preview($scope.file_id, $scope.organization_id, $scope.view_id).then(function (result) {
$scope.proposed_imports_options = {
data: result,
columnDefs: base_sensor_readings_col_defs,
enableColumnResizing: true,
enableHorizontalScrollbar: uiGridConstants.scrollbars.NEVER,
enableVerticalScrollbar: result.length <= 5 ? uiGridConstants.scrollbars.NEVER : uiGridConstants.scrollbars.WHEN_NEEDED,
minRowsToShow: grid_rows_to_display(result)
};

var modal_element = angular.element(document.getElementsByClassName('modal-dialog'));
modal_element.addClass('modal-lg');

$scope.step.number = 2;
}).catch(saveFailure);
};

var saveSuccess = function (progress_data) {
// recheck progress in order to ensure message has been appended to progress_data
uploader_service.check_progress(progress_data.progress_key).then(function (data) {
$scope.uploader.status_message = 'saving complete';
$scope.uploader.progress = 100;
buildImportResults(data.message);
$scope.step.number = 4;
});
};

var buildImportResults = function (message) {
$scope.import_result_options = {
data: message,
columnDefs: base_sensor_readings_col_defs,
enableColumnResizing: true,
enableHorizontalScrollbar: uiGridConstants.scrollbars.NEVER,
enableVerticalScrollbar: message.length <= 5 ? uiGridConstants.scrollbars.NEVER : uiGridConstants.scrollbars.WHEN_NEEDED,
minRowsToShow: grid_rows_to_display(message)
};
};

$scope.accept_sensor_readings = function () {
uploader_service.save_raw_data($scope.file_id, $scope.selectedCycle).then(function (data) {
$scope.uploader.status_message = 'saving data';
$scope.uploader.progress = 0;
$scope.step.number = 3;

var progress = _.clamp(data.progress, 0, 100);

uploader_service.check_progress_loop(
data.progress_key,
progress,
1 - (progress / 100),
saveSuccess,
saveFailure, // difficult to reach this as failures should be caught in confirmation step
$scope.uploader
);
});
};

$scope.refresh_page = function () {
$state.reload();
$uibModalInstance.dismiss('cancel');
};

}]);

0 comments on commit b266222

Please sign in to comment.