Skip to content

Commit

Permalink
Add PCHIP spline to forecast remaining
Browse files Browse the repository at this point in the history
  • Loading branch information
autoSteve committed Jul 10, 2024
1 parent 3a877d9 commit d31c181
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 11 deletions.
2 changes: 1 addition & 1 deletion custom_components/solcast_solar/manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"issue_tracker": "https://github.com/BJReplay/ha-solcast-solar/issues",
"requirements": ["aiohttp>=3.8.5", "datetime>=4.3", "isodate>=0.6.1"],
"requirements": ["aiohttp>=3.8.5", "aiofiles>=24.1.0", "datetime>=4.3", "isodate>=0.6.1", "scipy>=1.0.0"],
"version": "4.0.37"
}
41 changes: 31 additions & 10 deletions custom_components/solcast_solar/solcastapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import traceback
import random
import re
from scipy.interpolate import PchipInterpolator
from dataclasses import dataclass
from datetime import datetime as dt
from datetime import timedelta, timezone
Expand Down Expand Up @@ -280,7 +281,9 @@ def redact(s):
_LOGGER.error(f"Cached sites are not yet available for {self.redact_api_key(spl)} to cope with Solcast API call failure")
_LOGGER.error(f"At least one successful API 'get sites' call is needed, so the integration cannot function")
if error:
_LOGGER.error("Timed out getting Solcast sites, and one or more site caches failed to load. This is critical, and the integration cannot function reliably. Suggestion: Attempt integration reload.")
_LOGGER.error("Timed out getting Solcast sites, and one or more site caches failed to load.")
_LOGGER.error("This is critical, and the integration cannot function reliably.")
_LOGGER.error("Suggestion: Double check your configuration, and solcast.com connectivity, and attempt integration reload.")
except Exception as e:
pass
except Exception as e:
Expand Down Expand Up @@ -664,7 +667,7 @@ def get_forecast_remaining_today(self, _use_data_field=None) -> float:
# time remaining today
start_utc = self.get_now_utc()
end_utc = self.get_day_start_utc() + timedelta(days=1)
res = round(0.5 * self.get_forecast_pv_estimates(start_utc, end_utc, site=None, _use_data_field=_use_data_field), 4)
res = round(0.5 * self.get_forecast_pv_estimates(start_utc, end_utc, site=None, _use_data_field=_use_data_field, interpolate=True), 4)
return res

def get_forecasts_remaining_today(self) -> Dict[str, Any]:
Expand Down Expand Up @@ -706,24 +709,37 @@ def get_forecast_list_slice(self, _data, start_utc, end_utc, search_past=False):
end_i = 0
return st_i, end_i

def get_forecast_pv_estimates(self, start_utc, end_utc, site=None, _use_data_field=None) -> float:
def get_forecast_pv_estimates(self, start_utc, end_utc, site=None, _use_data_field=None, interpolate=False) -> float:
"""Return Solcast pv_estimates for interval [start_utc, end_utc)"""
try:
_data = self._data_forecasts if site is None else self._site_data_forecasts[site]
_data_field = self._use_data_field if _use_data_field is None else _use_data_field
res = 0
st_i, end_i = self.get_forecast_list_slice(_data, start_utc, end_utc)
def pchip(xx, i):
x = [-1800, 0, 1800, 3600, ]
y = [_data[i-1][_data_field] + _data[i][_data_field], _data[i][_data_field], 0, -1 * _data[i+1][_data_field], ]
i = PchipInterpolator(x, y)
return float(i([xx])[0])
# Calculate remaining
for d in _data[st_i:end_i]:
d1 = d['period_start']
d2 = d1 + timedelta(seconds=1800)
s = 1800
if not interpolate:
s = 1800
f = d[_data_field]
if start_utc > d1:
s -= (start_utc - d1).total_seconds()
if not interpolate:
s -= (start_utc - d1).total_seconds()
else:
f = pchip((start_utc - d1).total_seconds(), st_i)
if end_utc < d2:
s -= (d2 - end_utc).total_seconds()
if s < 1800:
f *= s / 1800
if not interpolate:
s -= (d2 - end_utc).total_seconds()
else:
f = pchip((d2 - end_utc).total_seconds(), end_i)
if not interpolate and s < 1800:
f *= s / 1800 # Simple linear interpolation
res += f
if _SENSOR_DEBUG_LOGGING: _LOGGER.debug("Get estimate: %s()%s st %s end %s st_i %d end_i %d res %s",
currentFuncName(1),
Expand Down Expand Up @@ -772,13 +788,15 @@ async def http_data(self, dopast = False):
return

failure = False
sitesAttempted = 0
for site in self._sites:
sitesAttempted += 1
_LOGGER.info(f"Getting forecast update for Solcast site {site['resource_id']}")
result = await self.http_data_call(self.get_api_usage_cache_filename(site['apikey']), site['resource_id'], site['apikey'], dopast)
if not result:
failure = True

if not failure:
if sitesAttempted > 0 and not failure:
self._data["last_updated"] = dt.now(timezone.utc).isoformat()
#self._data["weather"] = self._weather

Expand All @@ -788,7 +806,10 @@ async def http_data(self, dopast = False):

await self.serialize_data()
else:
_LOGGER.error("At least one Solcast site forecast failed to fetch, so forecast data has not been built")
if sitesAttempted > 0:
_LOGGER.error("At least one Solcast site forecast failed to fetch, so forecast data has not been built")
else:
_LOGGER.error("No Solcast sites were attempted, so forecast data has not been built - check for earlier failure to retrieve sites")
except Exception as ex:
_LOGGER.error("Exception in http_data: %s - Forecast data has not been built", ex)
_LOGGER.error(traceback.format_exc())
Expand Down

0 comments on commit d31c181

Please sign in to comment.