Skip to content

Commit

Permalink
refactor: upgrade CA-SK with event classes (#6013)
Browse files Browse the repository at this point in the history
* refactor: upgrade CA-SK with event classes

Upgrade the parser to use the ProductionBreakdown, ProductionMix,
TotalConsumption, and ZoneKey classes. This should yield no functional
change. Note that this does not resolve the current issue with the
fetch_consumption function. Additionally:

- Comply with Black's 88-column default line length limit.
- Factor out duplicate code.

Refs: #5986, #6011, #6013

* refactor: address review comments

- Be more specific about the return type of the _request function.
- Import the Any type directly.
- Swap out pytz in exchange for zoneinfo.
- Use the ProductionMix.add_value method instead of a dictionary
  comprehension.

Refs: #5986, #6011, #6013

* refactor: switch to TotalConsumptionList

Swap out TotalConsumption for TotalConsumptionList, as the former is
considered an implementation detail while the latter is part of the
public API.

Refs: #5986, #6011, #6013

* refactor: narrow down payload types

Refs: #5986, #6011, #6013

---------

Co-authored-by: Viktor Andersson <30777521+VIKTORVAV99@users.noreply.github.com>
  • Loading branch information
kruschk and VIKTORVAV99 committed Oct 25, 2023
1 parent e673724 commit 266eff0
Showing 1 changed file with 76 additions and 102 deletions.
178 changes: 76 additions & 102 deletions parsers/CA_SK.py
Original file line number Diff line number Diff line change
@@ -1,142 +1,116 @@
from datetime import datetime, timedelta
from logging import Logger, getLogger
from typing import Any
from zoneinfo import ZoneInfo

from pytz import timezone
from requests import Response, Session
from requests import Session

from electricitymap.contrib.config import ZoneKey
from electricitymap.contrib.lib.models.event_lists import (
ProductionBreakdownList,
TotalConsumptionList,
)
from electricitymap.contrib.lib.models.events import ProductionMix
from parsers.lib.exceptions import ParserException

TIMEZONE = timezone("America/Regina")

# URLs for the different endpoints.
TIMEZONE = ZoneInfo("America/Regina")
PRODUCTION_URL = (
"https://www.saskpower.com/ignitionapi/PowerUseDashboard/GetPowerUseDashboardData"
"https://www.saskpower.com/ignitionapi/PowerUseDashboard"
"/GetPowerUseDashboardData"
)
CONSUMPTION_URL = "https://www.saskpower.com/ignitionapi/Content/GetNetLoad"

PRODUCTION_MAPPING = {
"Hydro": "hydro",
"Wind": "wind",
"Solar": "solar",
"Natural Gas": "gas",
"Coal": "coal",
"Other": "unknown", # This is internal consumption, losses, heat recovery facilities and small independent power producers.
# "Other" represents internal consumption, losses, heat recovery facilities
# and small independent power producers.
"Other": "unknown",
}

USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36"


def validate_zone_key(zone_key: str) -> None:
if zone_key != "CA-SK":
raise ParserException(
"CA_SK.py",
f"CA_SK.py is not designed to parse zone_key: {zone_key}.",
zone_key,
)
USER_AGENT = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
"(KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36"
)


def validate_no_datetime(target_datetime: datetime | None, zone_key) -> None:
def _request(
session: Session | None,
target_datetime: datetime | None,
url: str,
zone_key: ZoneKey,
) -> dict | str:
# The source does not offer historical data, so bail out if it's requested.
if target_datetime:
raise ParserException("CA_SK.py", "Unable to fetch historical data", zone_key)
# The zone key must be "CA-SK"; bail out otherwise.
if zone_key != "CA-SK":
raise ParserException("CA_SK.py", f"Cannot parse zone '{zone_key}'", zone_key)
session = session or Session()
# Mimic a user browser in the headers or the API will respond with a 403.
response = session.get(url, headers={"user-agent": USER_AGENT})
if not response.ok:
raise ParserException(
"CA_SK.py",
"This parser is unable to fetch historical data.",
f"Request to {url} failed. Response Code: {response.status_code}\n"
f"Error:\n{response.text}",
zone_key,
)
return response.json()


def fetch_production(
zone_key: str = "CA-SK",
zone_key: ZoneKey = ZoneKey("CA-SK"),
session: Session | None = None,
target_datetime: datetime | None = None,
logger: Logger = getLogger(__name__),
):
"""This parser function will currently return the daily average of the day in question as hourly data.
This is because the API only returns daily data but the backend expects hourly values.
This is in order to facilitate the estimation of the hourly values from the daily average.
"""
# Validate that the zone key is equal to CA-SK.
validate_zone_key(zone_key)
# Validate that the target_datetime is None as this parser is unable to fetch historical data.
validate_no_datetime(target_datetime, zone_key)

session = session or Session()

# Set the headers to mimic a user browser as the API will return a 403 if not.
headers = {"user-agent": USER_AGENT}
response: Response = session.get(PRODUCTION_URL, headers=headers)

if not response.ok:
raise ParserException(
"CA_SK.py",
f"Failed to fetch production data. Response Code: {response.status_code}\nError:\n{response.text}",
zone_key,
)

raw_data = response.json()
) -> list[dict[str, Any]]:
payload = _request(session, target_datetime, PRODUCTION_URL, zone_key)
if not isinstance(payload, dict):
raise ParserException("CA_SK.py", "Unexpected payload type", zone_key)
# Date is in the format "Jan 01, 2020"
raw_date = raw_data["SupplyDataText"]
date = datetime.strptime(raw_date, "%b %d, %Y")
production_data = {}

for value in raw_data["PowerCacheData"]["generationByType"]:
production_data[PRODUCTION_MAPPING[value["type"]]] = value[
"totalGenerationForType"
]

data_list: list[dict] = []
# Hack to return hourly data from daily data for the backend as it expects hourly data.
for hour in range(0, 24):
data_list.append(
{
"zoneKey": zone_key,
"datetime": date.replace(hour=hour, tzinfo=TIMEZONE),
"production": production_data,
"source": "saskpower.com",
}
date = datetime.strptime(payload["SupplyDataText"], "%b %d, %Y")
production_mix = ProductionMix()
for generation_by_type in payload["PowerCacheData"]["generationByType"]:
production_mix.add_value(
PRODUCTION_MAPPING[generation_by_type["type"]],
generation_by_type["totalGenerationForType"],
)

return data_list
production_breakdown_list = ProductionBreakdownList(logger)
# Copy the daily average returned by the API into hourly values. This is a
# bit of a hack, but it's required because the back-end requires hourly
# datapoints while the API only provides daily averages.
for hour in range(24):
production_breakdown_list.append(
datetime=date.replace(hour=hour, tzinfo=TIMEZONE),
production=production_mix,
source="saskpower.com",
zoneKey=ZoneKey(zone_key),
)
return production_breakdown_list.to_list()


def fetch_consumption(
zone_key: str = "CA-SK",
zone_key: ZoneKey = ZoneKey("CA-SK"),
session: Session | None = None,
target_datetime: datetime | None = None,
logger: Logger = getLogger(__name__),
):
# Validate that the zone key is equal to CA-SK.
validate_zone_key(zone_key)
# Validate that the target_datetime is None as this parser is unable to fetch historical data.
validate_no_datetime(target_datetime, zone_key)

session = session or Session()

# Set the headers to mimic a user browser as the API will return a 403 if not.
headers = {"user-agent": USER_AGENT}

response: Response = session.get(CONSUMPTION_URL, headers=headers)

if not response.ok:
raise ParserException(
"CA_SK.py",
f"Failed to fetch consumption data. Response Code: {response.status_code}\nError:\n{response.text}",
zone_key,
)

raw_data = response.json()

) -> list[dict[str, Any]]:
payload = _request(session, target_datetime, CONSUMPTION_URL, zone_key)
if not isinstance(payload, str):
raise ParserException("CA_SK.py", "Unexpected payload type", zone_key)
# The source refreshes every 5 minutes, so we assume the current data is
# from 5 minutes before the most recent multiple of 5 minutes.
now = datetime.now(TIMEZONE)

# Data is updated every 5 minutes so we assume the data is from a multiple of 5 minutes and has a 5 minute delay from that multiple.
assumed_datetime = now.replace(second=0, microsecond=0) - timedelta(
minutes=(now.minute % 5) + 5
minutes=(5 + now.minute % 5)
)

return [
{
"zoneKey": zone_key,
"datetime": assumed_datetime,
"consumption": int(raw_data),
"source": "saskpower.com",
}
]
total_consumption = TotalConsumptionList(logger)
total_consumption.append(
consumption=float(payload),
datetime=assumed_datetime,
source="saskpower.com",
zoneKey=zone_key,
)
return total_consumption.to_list()

0 comments on commit 266eff0

Please sign in to comment.