Skip to content

Commit

Permalink
Merge pull request #3008 from SEED-platform/develop
Browse files Browse the repository at this point in the history
Release 2.12.3 to main branch
  • Loading branch information
nllong committed Nov 11, 2021
2 parents 6a31f3d + 5e23df5 commit f92d4b4
Show file tree
Hide file tree
Showing 42 changed files with 244 additions and 209 deletions.
14 changes: 14 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,17 @@
# SEED Version 2.12.3

Date Range: 11/05/21 - 11/10/21

Closed Issues and Features (Total: 7):
- Fixed [#2702]( https://github.com/SEED-platform/seed/issues/2702 ), Add sample data record for BETTER
- Fixed [#2930]( https://github.com/SEED-platform/seed/issues/2930 ), BuildingSync: improve UI/UX for post-mapping messages
- Fixed [#2945]( https://github.com/SEED-platform/seed/issues/2945 ), Deprecate PropertyState's analysis_state field
- Fixed [#2958]( https://github.com/SEED-platform/seed/issues/2958 ), Improve progress data text when validating BuildingSync files
- Fixed [#2997]( https://github.com/SEED-platform/seed/issues/2997 ), CO2 Analysis -- eGRID Subregion Code not picked up by program
- Fixed [#2999]( https://github.com/SEED-platform/seed/issues/2999 ), Bug: property details "notes" tab breaks links
- Fixed [#3001]( https://github.com/SEED-platform/seed/issues/3001 ), Tax Lot Inventory List View -- clicking on "i" to get to Detail view generates error
(

# SEED Version 2.12.2

Date Range: 09/28/21 - 11/04/21
Expand Down
4 changes: 2 additions & 2 deletions docker/backup_k8s/backup_database.sh
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ do
send_slack_notification "[ERROR-$ENVIRONMENT]-PostgreSQL-backup-file-was-empty"
else
aws s3 cp $file $S3_BUCKET/$RUN_DATE/
send_slack_notification "[$ENVIRONMENT]-PostgreSQL-uploaded-to-s3"
send_slack_notification "[$ENVIRONMENT]-PostgreSQL-uploaded-to-s3://$S3_BUCKET/$RUN_DATE/$file"
fi
done

Expand All @@ -106,7 +106,7 @@ do
send_slack_notification "[ERROR-$ENVIRONMENT]-Mediadata-backup-file-was-empty"
else
aws s3 cp $file $S3_BUCKET/$RUN_DATE/
send_slack_notification "[$ENVIRONMENT]-Mediadata-uploaded-to-s3"
send_slack_notification "[$ENVIRONMENT]-Mediadata-uploaded-to-s3://$S3_BUCKET/$RUN_DATE/$file"
fi
done

Expand Down
2 changes: 1 addition & 1 deletion docs/scripts/change_log.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Instructions:
# Get a token from github's settings (https://github.com/settings/tokens)
# Install github3 using pip (pip install --pre github3.py)
# Install github3 using pip (pip install github3.py)
#
# Example:
# python change_log.py -k abcdefghijklmnopqrstuvwxyz -s 2020-12-29
Expand Down
2 changes: 1 addition & 1 deletion npm-shrinkwrap.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "seed",
"version": "2.12.2",
"version": "2.12.3",
"description": "Standard Energy Efficiency Data (SEED) Platform™",
"license": "SEE LICENSE IN LICENSE",
"directories": {
Expand Down
2 changes: 2 additions & 0 deletions seed/analysis_pipelines/better/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

class BuildingAnalysis:
"""Used to track AnalysisPropertyViews and BETTER Building and Analysis IDs"""

def __init__(self, analysis_property_view_id, better_building_id, better_analysis_id):
self.analysis_property_view_id = analysis_property_view_id
self.better_building_id = better_building_id
Expand All @@ -25,6 +26,7 @@ def __init__(self, analysis_property_view_id, better_building_id, better_analysi

class BETTERPipelineContext:
"""Datastructure to avoid multiple pass-through variables"""

def __init__(self, analysis, progress_data, better_client):
"""
:param analysis: Analysis
Expand Down
2 changes: 1 addition & 1 deletion seed/analysis_pipelines/better/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -485,7 +485,7 @@ def _process_results(self, analysis_id):
# that fuel type wasn't valid (e.g. if electricity model is invalid,
# set "potential electricity savings" to null)
for col_name, value in simplified_results.items():
value = value if type(value) is not float else round(value, 2)
value = value if not isinstance(value, float) else round(value, 2)
if col_name.endswith('_electricity') and col_name != BETTER_VALID_MODEL_E_COL:
cleaned_results[col_name] = value if electricity_model_is_valid else None
elif col_name.endswith('_fuel') and col_name != BETTER_VALID_MODEL_F_COL:
Expand Down
1 change: 1 addition & 0 deletions seed/analysis_pipelines/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,7 @@ class AnalysisPipeline(abc.ABC):
AnalysisPipeline is an abstract class for defining workflows for preparing,
running, and post processing analyses.
"""

def __init__(self, analysis_id):
self._analysis_id = analysis_id

Expand Down
2 changes: 1 addition & 1 deletion seed/analysis_pipelines/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def get_json_path(json_path, data):
for key in json_path:
result = result.get(key, {})

if type(result) is dict and not result:
if isinstance(result, dict) and not result:
# path was probably not valid in the data...
return None
else:
Expand Down
22 changes: 1 addition & 21 deletions seed/api/v2_1/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
from seed.lib.superperms.orgs.decorators import has_perm_class
from seed.models import (
PropertyView,
PropertyState,
BuildingFile,
Cycle,
ColumnMappingProfile,
Expand All @@ -40,14 +39,13 @@ class PropertyViewFilterSet(FilterSet, OrgMixin):
Advanced filtering for PropertyView sets version 2.1.
"""
address_line_1 = CharFilter(field_name="state__address_line_1", lookup_expr='contains')
analysis_state = CharFilter(method='analysis_state_filter')
identifier = CharFilter(method='identifier_filter')
cycle_start = DateFilter(field_name='cycle__start', lookup_expr='lte')
cycle_end = DateFilter(field_name='cycle__end', lookup_expr='gte')

class Meta:
model = PropertyView
fields = ['identifier', 'address_line_1', 'cycle', 'property', 'cycle_start', 'cycle_end', 'analysis_state']
fields = ['identifier', 'address_line_1', 'cycle', 'property', 'cycle_start', 'cycle_end']

def identifier_filter(self, queryset, name, value):
address_line_1 = Q(state__address_line_1__icontains=value)
Expand All @@ -65,24 +63,6 @@ def identifier_filter(self, queryset, name, value):
)
return queryset.filter(query).order_by('-state__id')

def analysis_state_filter(self, queryset, name, value):
# For some reason a ChoiceFilter doesn't work on this object. I wanted to have it
# magically look up the map from the analysis_state string to the analysis_state ID, but
# it isn't working. Forcing it manually.

# If the user puts in a bogus filter, then it will return All, for now

state_id = None
for state in PropertyState.ANALYSIS_STATE_TYPES:
if state[1].upper() == value.upper():
state_id = state[0]
break

if state_id is not None:
return queryset.filter(Q(state__analysis_state__exact=state_id)).order_by('-state__id')
else:
return queryset.order_by('-state__id')


class PropertyViewSetV21(SEEDOrgReadOnlyModelViewSet):
"""
Expand Down
2 changes: 1 addition & 1 deletion seed/data_importer/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1622,7 +1622,7 @@ def pair_new_states(merged_property_views, merged_taxlot_views):
def _validate_use_cases(file_pk, progress_key):
import_file = ImportFile.objects.get(pk=file_pk)
progress_data = ProgressData.from_key(progress_key)
progress_data.step('validating data with Selection Tool')
progress_data.step('Validating data at buildingsync.net')
try:
found_version = 0

Expand Down
3 changes: 0 additions & 3 deletions seed/lib/merging/tests/test_merging.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,6 @@ def test_property_state(self):

expected = (('address_line_1', 'address_line_1'),
('address_line_2', 'address_line_2'),
('analysis_end_time', 'analysis_end_time'),
('analysis_start_time', 'analysis_start_time'),
('analysis_state_message', 'analysis_state_message'),
('building_certification', 'building_certification'),
('building_count', 'building_count'),
('city', 'city'),
Expand Down
82 changes: 82 additions & 0 deletions seed/migrations/0156_auto_20211104_1638.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
# Generated by Django 3.2.7 on 2021-11-04 23:38

from django.db import migrations


def forwards(apps, schema_editor):
Column = apps.get_model("seed", "Column")
Organization = apps.get_model("orgs", "Organization")

columns_to_remove = [
'analysis_state', 'analysis_state_message', 'analysis_end_time', 'analysis_start_time'
]

# Go through all the organizations and find all the oclumns to remove.
# There is really no reason to use org.id, but it is there, so doing it that way.
for org in Organization.objects.all():
for column_to_remove in columns_to_remove:
columns = Column.objects.filter(
organization_id=org.id,
table_name='PropertyState',
column_name=column_to_remove,
is_extra_data=False,
)

if not columns.count():
# no column found (should not be the case)
continue
elif columns.count() == 1:
# If the column exists, then just update the display_name and data_type if empty
c = columns.first()
c.delete()
print(f'deleting column {column_to_remove} for org {org.id}')
else:
print(" More than one column returned!")


class Migration(migrations.Migration):

dependencies = [
('data_importer', '0015_auto_20210712_2134'),
('seed', '0155_propertystate_egrid_subregion_code'),
]

operations = [
migrations.RemoveField(
model_name='scenario',
name='analysis_end_time',
),
migrations.RemoveField(
model_name='scenario',
name='analysis_start_time',
),
migrations.RemoveField(
model_name='scenario',
name='analysis_state',
),
migrations.RemoveField(
model_name='scenario',
name='analysis_state_message',
),
migrations.AlterIndexTogether(
name='propertystate',
index_together={('import_file', 'data_state', 'merge_state'), ('import_file', 'data_state', 'source_type'), ('hash_object',), ('import_file', 'data_state')},
),
migrations.RemoveField(
model_name='propertystate',
name='analysis_end_time',
),
migrations.RemoveField(
model_name='propertystate',
name='analysis_start_time',
),
migrations.RemoveField(
model_name='propertystate',
name='analysis_state',
),
migrations.RemoveField(
model_name='propertystate',
name='analysis_state_message',
),
migrations.RunPython(forwards),
]
2 changes: 1 addition & 1 deletion seed/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from .analysis_input_files import * # noqa
from .analysis_output_files import * # noqa
from .analysis_messages import * # noqa
from .postoffice import * # noqa
from .postoffice import * # noqa

from .certification import ( # noqa
GreenAssessment,
Expand Down
29 changes: 0 additions & 29 deletions seed/models/columns.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,6 @@ class Column(models.Model):

# These are the columns that are removed when looking to see if the records are the same
COLUMN_EXCLUDE_FIELDS = [
'analysis_state',
'bounding_box',
'centroid',
'created',
Expand Down Expand Up @@ -139,10 +138,6 @@ class Column(models.Model):

# These are columns that should not be offered as suggestions during mapping
UNMAPPABLE_PROPERTY_FIELDS = [
'analysis_end_time',
'analysis_start_time',
'analysis_state',
'analysis_state_message',
'campus',
'created',
'geocoding_confidence',
Expand Down Expand Up @@ -521,30 +516,6 @@ class Column(models.Model):
'table_name': 'PropertyState',
'display_name': 'Building Certification',
'data_type': 'string',
}, {
'column_name': 'analysis_start_time',
'table_name': 'PropertyState',
'display_name': 'Analysis Start Time',
'data_type': 'datetime',
# 'type': 'date',
# 'cellFilter': 'date:\'yyyy-MM-dd h:mm a\'',
}, {
'column_name': 'analysis_end_time',
'table_name': 'PropertyState',
'display_name': 'Analysis End Time',
'data_type': 'datetime',
# 'type': 'date',
# 'cellFilter': 'date:\'yyyy-MM-dd h:mm a\'',
}, {
'column_name': 'analysis_state',
'table_name': 'PropertyState',
'display_name': 'Analysis State',
'data_type': 'string',
}, {
'column_name': 'analysis_state_message',
'table_name': 'PropertyState',
'display_name': 'Analysis State Message',
'data_type': 'string',
}, {
'column_name': 'number_properties',
'table_name': 'TaxLotState',
Expand Down
1 change: 1 addition & 0 deletions seed/models/derived_columns.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ def evaluate(self, parameters=None):

class InvalidExpression(Exception):
"""Raised when parsing an expression"""

def __init__(self, expression, error_position=None):
super().__init__()
self.expression = expression
Expand Down
28 changes: 1 addition & 27 deletions seed/models/properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,19 +129,6 @@ def copy_meters(self, source_property_id, source_persists=True):

class PropertyState(models.Model):
"""Store a single property. This contains all the state information about the property"""
ANALYSIS_STATE_NOT_STARTED = 0
ANALYSIS_STATE_STARTED = 1
ANALYSIS_STATE_COMPLETED = 2
ANALYSIS_STATE_FAILED = 3
ANALYSIS_STATE_QUEUED = 4 # analysis queue was added after the others above.

ANALYSIS_STATE_TYPES = (
(ANALYSIS_STATE_NOT_STARTED, 'Not Started'),
(ANALYSIS_STATE_QUEUED, 'Queued'),
(ANALYSIS_STATE_STARTED, 'Started'),
(ANALYSIS_STATE_COMPLETED, 'Completed'),
(ANALYSIS_STATE_FAILED, 'Failed'),
)

# Support finding the property by the import_file and source_type
import_file = models.ForeignKey(ImportFile, on_delete=models.CASCADE, null=True, blank=True)
Expand Down Expand Up @@ -227,13 +214,6 @@ class PropertyState(models.Model):
space_alerts = models.TextField(null=True, blank=True)
building_certification = models.CharField(max_length=255, null=True, blank=True)

analysis_start_time = models.DateTimeField(null=True)
analysis_end_time = models.DateTimeField(null=True)
analysis_state = models.IntegerField(choices=ANALYSIS_STATE_TYPES,
default=ANALYSIS_STATE_NOT_STARTED,
null=True)
analysis_state_message = models.TextField(null=True)

# Need to add another field eventually to define the source of the EUI's and other
# reported fields. Ideally would have the ability to provide the same field from
# multiple data sources. For example, site EUI (portfolio manager), site EUI (calculated),
Expand Down Expand Up @@ -283,7 +263,6 @@ class Meta:
['import_file', 'data_state'],
['import_file', 'data_state', 'merge_state'],
['import_file', 'data_state', 'source_type'],
['analysis_state', 'organization'],
]

def promote(self, cycle, property_id=None):
Expand Down Expand Up @@ -589,10 +568,6 @@ def coparent(cls, state_id):
ps.energy_alerts,
ps.space_alerts,
ps.building_certification,
ps.analysis_start_time,
ps.analysis_end_time,
ps.analysis_state,
ps.analysis_state_message,
ps.egrid_subregion_code,
ps.extra_data,
NULL
Expand All @@ -617,8 +592,7 @@ def coparent(cls, state_id):
'home_energy_score_id', 'generation_date', 'release_date',
'source_eui_weather_normalized', 'site_eui_weather_normalized',
'source_eui', 'source_eui_modeled', 'energy_alerts', 'space_alerts',
'building_certification', 'analysis_start_time', 'analysis_end_time',
'analysis_state', 'analysis_state_message', 'extra_data', ]
'building_certification', 'extra_data', ]
coparents = [{key: getattr(c, key) for key in keep_fields} for c in coparents]

return coparents, len(coparents)
Expand Down

0 comments on commit f92d4b4

Please sign in to comment.