Skip to content

Commit

Permalink
Smart history update
Browse files Browse the repository at this point in the history
Past data from entities is uploaded in the background in fragments. Data is no longer uploaded in the config_flow stage.
  • Loading branch information
Big-Tree committed Jan 15, 2024
1 parent 2d95d46 commit 854eb75
Show file tree
Hide file tree
Showing 6 changed files with 292 additions and 90 deletions.
53 changes: 41 additions & 12 deletions custom_components/optispark/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import gzip
import base64
from .const import LOGGER
import traceback


class OptisparkApiClientError(Exception):
Expand Down Expand Up @@ -91,31 +92,57 @@ def __init__(
"""Sample API Client."""
self._session = session

def extra_to_datetime(self, extra):
"""Corvert unix_time to datetime object."""
if 'oldest_dates' in extra and 'newest_dates' in extra:
# Convert from unix time to datetime object
for key in ['oldest_dates', 'newest_dates']:
for column in extra[key]:
if extra[key][column] is not None:
extra[key][column] = datetime.fromtimestamp(extra[key][column])
return extra

async def upload_history(self, dynamo_data):
"""Upload historical data to dynamoDB without calculating heat pump profile."""
lambda_url = 'https://lhyj2mknjfmatuwzkxn4uuczrq0fbsbd.lambda-url.eu-west-2.on.aws/'
payload = {'dynamo_data': dynamo_data}
payload['upload_only'] = True
await self._api_wrapper(
extra = await self._api_wrapper(
method="post",
url=lambda_url,
data=payload,
)
extra = self.extra_to_datetime(extra)
return extra['oldest_dates'], extra['newest_dates']

async def get_data_dates(self, dynamo_data: dict):
"""Call lambda and only get the newest and oldest dates in dynamo.
dynamo_data will only contain the user_hash.
"""
lambda_url = 'https://lhyj2mknjfmatuwzkxn4uuczrq0fbsbd.lambda-url.eu-west-2.on.aws/'
payload = {'dynamo_data': dynamo_data}
payload['get_newest_oldest_data_date_only'] = True
extra = await self._api_wrapper(
method="post",
url=lambda_url,
data=payload,
headers={"Content-type": "application/json; charset=UTF-8"},
)
extra = self.extra_to_datetime(extra)
return extra['oldest_dates'], extra['newest_dates']

async def async_get_data(self, lambda_args: dict, dynamo_data: dict) -> any:
"""Upload historical data to dynamoDB and calculate heat pump profile."""
async def async_get_profile(self, lambda_args: dict):
"""Get heat pump profile only."""
lambda_url = 'https://lhyj2mknjfmatuwzkxn4uuczrq0fbsbd.lambda-url.eu-west-2.on.aws/'

payload = lambda_args
payload['dynamo_data'] = dynamo_data
results, errors, extra = await self._api_wrapper(
payload['get_profile_only'] = True
LOGGER.debug('----------Lambda get profile----------')
results, errors = await self._api_wrapper(
method="post",
url=lambda_url,
data=payload,
headers={"Content-type": "application/json; charset=UTF-8"},
)
LOGGER.debug('----------Lambda request----------')
if errors['success'] is False:
LOGGER.debug(f'OptisparkApiClientLambdaError: {errors["error_message"]}')
raise OptisparkApiClientLambdaError(errors['error_message'])
Expand All @@ -124,18 +151,18 @@ async def async_get_data(self, lambda_args: dict, dynamo_data: dict) -> any:
results['projected_percent_savings'] = 100
else:
results['projected_percent_savings'] = results['base_cost']/results['optimised_cost']*100 - 100
return results, extra['oldest_dates']
return results

async def _api_wrapper(
self,
method: str,
url: str,
data: dict | None = None,
headers: dict | None = None,
):
"""Call the Lambda function."""
try:
data['dynamo_data'] = floats_to_decimal(data['dynamo_data'])
if 'dynamo_data' in data:
data['dynamo_data'] = floats_to_decimal(data['dynamo_data'])
uncompressed_data = pickle.dumps(data)
compressed_data = gzip.compress(uncompressed_data)
LOGGER.debug(f'len(uncompressed_data): {len(uncompressed_data)}')
Expand All @@ -146,7 +173,6 @@ async def _api_wrapper(
response = await self._session.request(
method=method,
url=url,
#headers=headers,
json=base64_string,
)
if response.status in (401, 403):
Expand All @@ -162,16 +188,19 @@ async def _api_wrapper(
return await response.json()

except asyncio.TimeoutError as exception:
LOGGER.error(traceback.format_exc())
LOGGER.error('OptisparkApiClientTimeoutError:\n Timeout error fetching information')
raise OptisparkApiClientTimeoutError(
"Timeout error fetching information",
) from exception
except (aiohttp.ClientError, socket.gaierror) as exception:
LOGGER.error(traceback.format_exc())
LOGGER.error('OptisparkApiClientCommunicationError:\n Error fetching information')
raise OptisparkApiClientCommunicationError(
"Error fetching information",
) from exception
except Exception as exception: # pylint: disable=broad-except
LOGGER.error(traceback.format_exc())
LOGGER.error('OptisparkApiClientError:\n Something really wrong happened!')
raise OptisparkApiClientError(
"Something really wrong happened!"
Expand Down
42 changes: 4 additions & 38 deletions custom_components/optispark/config_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

import voluptuous as vol
from homeassistant import config_entries
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import selector
from geopy.adapters import AioHTTPAdapter
from geopy.geocoders import Nominatim
Expand All @@ -13,15 +12,10 @@
from .api import (
OptisparkApiClientPostcodeError,
OptisparkApiClientUnitError,
OptisparkApiClient,
OptisparkApiClientTimeoutError,
OptisparkApiClientCommunicationError,
OptisparkApiClientError
)
from . import OptisparkGetEntityError
from .const import DOMAIN, LOGGER
from . import const, get_entity, get_username
from .history import get_history, OptisparkGetHistoryError
from . import get_entity, get_username


class OptisparkFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
Expand Down Expand Up @@ -155,37 +149,9 @@ async def async_step_accept(self, user_input: dict | None = None, reject=False)
user_hash = hashlib.sha256(user_hash.encode('utf-8')).hexdigest()
user_input['user_hash'] = user_hash

try:
dynamo_data = await get_history(
hass=self.hass,
history_days=const.HISTORY_DAYS,
climate_entity_id=user_input['climate_entity_id'],
heat_pump_power_entity_id=user_input['heat_pump_power_entity_id'],
external_temp_entity_id=user_input['external_temp_entity_id'],
user_hash=user_hash,
postcode=user_input['postcode'],
tariff=user_input['tariff'],
include_user_info=False)
LOGGER.debug('************ Uploading history ***********')
tmp_client = OptisparkApiClient(
session=async_get_clientsession(self.hass))

await tmp_client.upload_history(dynamo_data)
LOGGER.debug('************ Upload complete ***********')

except OptisparkApiClientTimeoutError:
errors['base'] = 'optispark_timeout_error'
except OptisparkApiClientCommunicationError:
errors['base'] = 'optispark_communication_error'
except OptisparkApiClientError:
errors['base'] = 'optispark_communication_error'
except OptisparkGetHistoryError:
errors['base'] = 'optispark_history_error'

if errors == {}:
return self.async_create_entry(
title='OptiSpark Entry',
data=user_input)
return self.async_create_entry(
title='OptiSpark Entry',
data=user_input)
else:
errors['base'] = 'accept_agreement'
data_schema = {}
Expand Down
6 changes: 4 additions & 2 deletions custom_components/optispark/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

NAME = "Optispark"
DOMAIN = "optispark"
VERSION = "0.1.5"
VERSION = "0.1.6"
ATTRIBUTION = "Data provided by http://jsonplaceholder.typicode.com/"

LAMBDA_TEMP = 'temps'
Expand All @@ -21,7 +21,9 @@
LAMBDA_POSTCODE = 'postcode'
LAMBDA_HOUSE_CONFIG = 'house_config'

HISTORY_DAYS = 28
HISTORY_DAYS = 3 # the number of days initially required by our algorithm
DYNAMO_HISTORY_DAYS = 365*5
MAX_UPLOAD_HISTORY_READINGS = 5000
DATABASE_COLUMN_SENSOR_HEAT_PUMP_POWER = 'heat_pump_power'
DATABASE_COLUMN_SENSOR_EXTERNAL_TEMPERATURE = 'external_temperature'
DATABASE_COLUMN_SENSOR_CLIMATE_ENTITY = 'climate_entity'
Expand Down
Loading

0 comments on commit 854eb75

Please sign in to comment.