Skip to content

Commit

Permalink
Merge pull request #3166 from SEED-platform/Add-Data-Logger-creation
Browse files Browse the repository at this point in the history
Add data logger creation
  • Loading branch information
haneslinger committed Apr 1, 2022
2 parents 5c81ed4 + b649812 commit d0f9697
Show file tree
Hide file tree
Showing 24 changed files with 537 additions and 152 deletions.
4 changes: 3 additions & 1 deletion seed/api/v3/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
from seed.views.v3.portfolio_manager import PortfolioManagerViewSet
from seed.views.v3.postoffice import PostOfficeViewSet, PostOfficeEmailViewSet
from seed.views.v3.progress import ProgressViewSet
from seed.views.v3.data_logger import DataLoggerViewSet
from seed.views.v3.properties import PropertyViewSet
from seed.views.v3.property_states import PropertyStateViewSet
from seed.views.v3.property_views import PropertyViewViewSet
Expand All @@ -52,6 +53,8 @@
api_v3_router.register(r'column_mapping_profiles', ColumnMappingProfileViewSet, basename='column_mapping_profiles')
api_v3_router.register(r'columns', ColumnViewSet, basename='columns')
api_v3_router.register(r'cycles', CycleViewSet, basename='cycles')
api_v3_router.register(r'data_loggers', DataLoggerViewSet, basename="data_logger")
api_v3_router.register(r'data_quality_checks', DataQualityCheckViewSet, basename='data_quality_checks')
api_v3_router.register(r'datasets', DatasetViewSet, basename='datasets')
api_v3_router.register(r'derived_columns', DerivedColumnViewSet, basename='derived_columns')
api_v3_router.register(r'gbr_properties', GBRPropertyViewSet, basename="properties")
Expand All @@ -60,7 +63,6 @@
api_v3_router.register(r'green_assessment_urls', GreenAssessmentURLViewSet, basename="green_assessment_urls")
api_v3_router.register(r'green_assessments', GreenAssessmentViewSet, basename="green_assessments")
api_v3_router.register(r'labels', LabelViewSet, basename='labels')
api_v3_router.register(r'data_quality_checks', DataQualityCheckViewSet, basename='data_quality_checks')
api_v3_router.register(r'import_files', ImportFileViewSet, basename='import_files')
api_v3_router.register(r'measures', MeasureViewSet, basename='measures')
api_v3_router.register(r'meters', MeterViewSet, basename='meters')
Expand Down
12 changes: 6 additions & 6 deletions seed/data_importer/sensor_readings_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,19 @@ class SensorsReadingsParser(object):

_tz = timezone(TIME_ZONE)

def __init__(self, org_id, sensor_readings_details, property_id):
def __init__(self, org_id, sensor_readings_details, data_logger_id):
# defaulted to None to show it hasn't been cached yet
self.sensor_readings_details = sensor_readings_details
self._org_id = org_id
self._property_id = property_id
self._data_logger_id = data_logger_id

@classmethod
def factory(cls, sensor_readings_file, org_id, property_id):
def factory(cls, sensor_readings_file, org_id, data_logger_id):
"""Factory function for sensorReadingsParser
:param sensor_readings_file: File
:param org_id: int
:param property_id: int, id of property - required if sensor data is for a specific property
:param data_logger_id: int, id of data_logger
:return: SensorReadingsParser
"""
parser = reader.MCMParser(sensor_readings_file)
Expand All @@ -61,10 +61,10 @@ def factory(cls, sensor_readings_file, org_id, property_id):
for sensor_name in sensor_names:
sensor_readings_by_sensor_name[sensor_name][timestamp] = reading[sensor_name]

return cls(org_id, sensor_readings_by_sensor_name, property_id=property_id)
return cls(org_id, sensor_readings_by_sensor_name, data_logger_id=data_logger_id)

def get_validation_report(self):
sensor_names = list(Sensor.objects.filter(sensor_property_id=self._property_id).values_list('column_name', flat=True))
sensor_names = Sensor.objects.filter(data_logger=self._data_logger_id).values_list('column_name', flat=True)

result = [
{
Expand Down
18 changes: 9 additions & 9 deletions seed/data_importer/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,12 @@
Column,
ColumnMapping,
Meter,
Property,
PropertyState,
PropertyView,
TaxLotView,
TaxLotState,
Sensor,
DataLogger,
DATA_STATE_IMPORT,
DATA_STATE_MAPPING,
DATA_STATE_MATCHING,
Expand Down Expand Up @@ -841,8 +841,8 @@ def _save_sensor_data_create_tasks(file_pk, progress_key):
progress_data = ProgressData.from_key(progress_key)

import_file = ImportFile.objects.get(pk=file_pk)
property_id = import_file.matching_results_data['property_id']
sensor_property = Property.objects.get(id=property_id)
data_logger_id = import_file.matching_results_data['data_logger_id']
data_logger = DataLogger.objects.get(id=data_logger_id)

# matching_results_data gets cleared out since the field wasn't meant for this
import_file.matching_results_data = {}
Expand All @@ -855,7 +855,7 @@ def _save_sensor_data_create_tasks(file_pk, progress_key):
for sensor_datum in sensor_data:
s, _ = Sensor.objects.get_or_create(**{
"column_name": sensor_datum["column_name"],
"sensor_property": sensor_property
"data_logger": data_logger
})
s.display_name = sensor_datum["display_name"]
s.location_identifier = sensor_datum["location_identifier"]
Expand All @@ -879,7 +879,7 @@ def _save_sensor_readings_data_create_tasks(file_pk, progress_key):

import_file = ImportFile.objects.get(pk=file_pk)
org_id = import_file.cycle.organization.id
property_id = import_file.matching_results_data['property_id']
data_logger_id = import_file.matching_results_data['data_logger_id']

# matching_results_data gets cleared out since the field wasn't meant for this
import_file.matching_results_data = {}
Expand All @@ -888,7 +888,7 @@ def _save_sensor_readings_data_create_tasks(file_pk, progress_key):
parser = SensorsReadingsParser.factory(
import_file.local_file,
org_id,
property_id=property_id
data_logger_id=data_logger_id
)
sensor_readings_data = parser.sensor_readings_details

Expand All @@ -897,7 +897,7 @@ def _save_sensor_readings_data_create_tasks(file_pk, progress_key):
for sensor_column_name, readings in sensor_readings_data.items():
readings_tuples = [t for t in readings.items()]
for batch_readings in batch(readings_tuples, chunk_size):
tasks.append(_save_sensor_readings_task.s(batch_readings, sensor_column_name, progress_data.key))
tasks.append(_save_sensor_readings_task.s(batch_readings, data_logger_id, sensor_column_name, progress_data.key))

progress_data.total = len(tasks)
progress_data.save()
Expand All @@ -906,12 +906,12 @@ def _save_sensor_readings_data_create_tasks(file_pk, progress_key):


@shared_task
def _save_sensor_readings_task(readings_tuples, sensor_column_name, progress_key):
def _save_sensor_readings_task(readings_tuples, data_logger_id, sensor_column_name, progress_key):
progress_data = ProgressData.from_key(progress_key)

result = {}
try:
sensor = Sensor.objects.get(column_name=sensor_column_name)
sensor = Sensor.objects.get(data_logger_id=data_logger_id, column_name=sensor_column_name)

except Sensor.DoesNotExist:
result[sensor_column_name] = {'error': 'No such sensor.'}
Expand Down
50 changes: 50 additions & 0 deletions seed/migrations/0159_auto_20220310_1648.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# Generated by Django 3.2.12 on 2022-03-10 16:48

from django.db import migrations, models
import django.db.models.deletion


class Migration(migrations.Migration):

dependencies = [
('seed', '0158_sensorreading'),
]

operations = [
migrations.AlterField(
model_name='sensor',
name='column_name',
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name='sensor',
name='display_name',
field=models.CharField(max_length=255),
),
migrations.CreateModel(
name='DataLogger',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('display_name', models.CharField(max_length=255)),
('location_identifier', models.CharField(default='', max_length=2047)),
('property', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='data_loggers', to='seed.property')),
],
options={
'unique_together': {('property', 'display_name')},
},
),
migrations.AddField(
model_name='sensor',
name='data_logger',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='sensors', to='seed.datalogger'),
preserve_default=False,
),
migrations.AlterUniqueTogether(
name='sensor',
unique_together={('data_logger', 'column_name')},
),
migrations.RemoveField(
model_name='sensor',
name='sensor_property',
),
]
26 changes: 22 additions & 4 deletions seed/models/sensors.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,39 @@
from seed.models import Property


class Sensor(models.Model):
sensor_property = models.ForeignKey(
class DataLogger(models.Model):
property = models.ForeignKey(
Property,
on_delete=models.CASCADE,
related_name='data_loggers',
)

display_name = models.CharField(max_length=255)
location_identifier = models.CharField(max_length=2047, default="")

class Meta:
unique_together = ('property', 'display_name')


class Sensor(models.Model):
data_logger = models.ForeignKey(
DataLogger,
on_delete=models.CASCADE,
related_name='sensors',
)

display_name = models.CharField(unique=True, max_length=255)
display_name = models.CharField(max_length=255)
location_identifier = models.CharField(max_length=2047, default="")
description = models.CharField(max_length=2047, default="")

sensor_type = models.CharField(max_length=63)
units = models.CharField(max_length=63)

column_name = models.CharField(unique=True, max_length=255)
column_name = models.CharField(max_length=255)

class Meta:
unique_together = ('data_logger', 'display_name')
unique_together = ('data_logger', 'column_name')


class SensorReading(models.Model):
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
angular.module('BE.seed.controller.data_logger_upload_modal', [])
.controller('data_logger_upload_modal_controller', [
'$scope',
'$state',
'$uibModalInstance',
'filler_cycle',
'organization_id',
'sensor_service',
'view_id',
function (
$scope,
$state,
$uibModalInstance,
filler_cycle,
organization_id,
sensor_service,
view_id,
) {
$scope.view_id = view_id;
$scope.selectedCycle = filler_cycle;
$scope.organization_id = organization_id;
$scope.data_logger = {
display_name: null,
location_identifier: "",
id: null,
number_of_sensors: 0
};

$scope.create_data_logger = function(){
if ($scope.data_logger.display_name == null || $scope.data_logger.display_name == ""){
$scope.data_logger_display_name_not_entered_alert = true
}
else {
$scope.data_logger_display_name_not_entered_alert = false

sensor_service.create_data_logger(
$scope.view_id,
$scope.organization_id,
$scope.data_logger.display_name,
$scope.data_logger.location_identifier
).then((result) => {
$scope.data_logger = result;
$scope.refresh_page();
})
.catch((err) => {
if(err.status == 400){
$scope.data_logger_display_name_not_unique_alert = true
}
})
}
}

$scope.refresh_page = function () {
$state.reload();
$uibModalInstance.dismiss('cancel');
};

}]);

0 comments on commit d0f9697

Please sign in to comment.