Skip to content

Commit

Permalink
refactor: upgrade CA-SK with event classes
Browse files Browse the repository at this point in the history
Upgrade the parser to use the ProductionBreakdown, ProductionMix, and
TotalConsumption, and ZoneKey classes. This should yield no functional
change to the parser. Note that this does not resolve the current issue
with the parser's fetch_consumption function. Additionally:

- Conform to Black's 88-column default line length limit.
- Factor out duplicate code.

Refs: electricitymaps#5986, electricitymaps#6011
  • Loading branch information
kruschk committed Oct 15, 2023
1 parent 82f5942 commit 3577bfa
Showing 1 changed file with 67 additions and 101 deletions.
168 changes: 67 additions & 101 deletions parsers/CA_SK.py
Original file line number Diff line number Diff line change
@@ -1,142 +1,108 @@
import typing
from datetime import datetime, timedelta
from logging import Logger, getLogger

from pytz import timezone
from requests import Response, Session
from requests import Session

from electricitymap.contrib.config import ZoneKey
from electricitymap.contrib.lib.models.event_lists import ProductionBreakdownList
from electricitymap.contrib.lib.models.events import ProductionMix, TotalConsumption
from parsers.lib.exceptions import ParserException

TIMEZONE = timezone("America/Regina")

# URLs for the different endpoints.
PRODUCTION_URL = (
"https://www.saskpower.com/ignitionapi/PowerUseDashboard/GetPowerUseDashboardData"
"https://www.saskpower.com/ignitionapi/PowerUseDashboard"
"/GetPowerUseDashboardData"
)
CONSUMPTION_URL = "https://www.saskpower.com/ignitionapi/Content/GetNetLoad"

PRODUCTION_MAPPING = {
"Hydro": "hydro",
"Wind": "wind",
"Solar": "solar",
"Natural Gas": "gas",
"Coal": "coal",
"Other": "unknown", # This is internal consumption, losses, heat recovery facilities and small independent power producers.
# "Other" represents internal consumption, losses, heat recovery facilities
# and small independent power producers.
"Other": "unknown",
}

USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36"


def validate_zone_key(zone_key: str) -> None:
if zone_key != "CA-SK":
raise ParserException(
"CA_SK.py",
f"CA_SK.py is not designed to parse zone_key: {zone_key}.",
zone_key,
)
USER_AGENT = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
"(KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36"
)


def validate_no_datetime(target_datetime: datetime | None, zone_key) -> None:
def _request(
session: Session | None,
target_datetime: datetime | None,
url: str,
zone_key: ZoneKey,
) -> typing.Any:
# The source does not offer historical data, so bail out if it's requested.
if target_datetime:
raise ParserException("CA_SK.py", "Unable to fetch historical data", zone_key)
# The zone key must be "CA-SK"; bail out otherwise.
if zone_key != "CA-SK":
raise ParserException("CA_SK.py", f"Cannot parse zone '{zone_key}'", zone_key)
session = session or Session()
# Mimic a user browser in the headers or the API will respond with a 403.
response = session.get(url, headers={"user-agent": USER_AGENT})
if not response.ok:
raise ParserException(
"CA_SK.py",
"This parser is unable to fetch historical data.",
f"Request to {url} failed. Response Code: {response.status_code}\n"
f"Error:\n{response.text}",
zone_key,
)
return response.json()


def fetch_production(
zone_key: str = "CA-SK",
session: Session | None = None,
zone_key: ZoneKey = ZoneKey("CA-SK"), # TODO: type?
session: Session | None = None, # TODO: default to Session()...?
target_datetime: datetime | None = None,
logger: Logger = getLogger(__name__),
):
"""This parser function will currently return the daily average of the day in question as hourly data.
This is because the API only returns daily data but the backend expects hourly values.
This is in order to facilitate the estimation of the hourly values from the daily average.
"""
# Validate that the zone key is equal to CA-SK.
validate_zone_key(zone_key)
# Validate that the target_datetime is None as this parser is unable to fetch historical data.
validate_no_datetime(target_datetime, zone_key)

session = session or Session()

# Set the headers to mimic a user browser as the API will return a 403 if not.
headers = {"user-agent": USER_AGENT}
response: Response = session.get(PRODUCTION_URL, headers=headers)

if not response.ok:
raise ParserException(
"CA_SK.py",
f"Failed to fetch production data. Response Code: {response.status_code}\nError:\n{response.text}",
zone_key,
)

raw_data = response.json()
) -> list[dict[str, typing.Any]]:
payload = _request(session, target_datetime, PRODUCTION_URL, zone_key)
# Date is in the format "Jan 01, 2020"
raw_date = raw_data["SupplyDataText"]
date = datetime.strptime(raw_date, "%b %d, %Y")
production_data = {}

for value in raw_data["PowerCacheData"]["generationByType"]:
production_data[PRODUCTION_MAPPING[value["type"]]] = value[
"totalGenerationForType"
]

data_list: list[dict] = []
# Hack to return hourly data from daily data for the backend as it expects hourly data.
for hour in range(0, 24):
data_list.append(
{
"zoneKey": zone_key,
"datetime": date.replace(hour=hour, tzinfo=TIMEZONE),
"production": production_data,
"source": "saskpower.com",
}
date = datetime.strptime(payload["SupplyDataText"], "%b %d, %Y")
production_mix = ProductionMix(
**{
PRODUCTION_MAPPING[value["type"]]: value["totalGenerationForType"]
for value in payload["PowerCacheData"]["generationByType"]
}
)
production_breakdown_list = ProductionBreakdownList(logger)
# Convert the daily average returned by the API into hourly values. This is
# a bit of a hack, but it's required because the back-end requires hourly
# datapoints while the API only provides daily averages.
for hour in range(24):
production_breakdown_list.append(
datetime=date.replace(hour=hour, tzinfo=TIMEZONE),
production=production_mix,
source="saskpower.com",
zoneKey=ZoneKey(zone_key),
)

return data_list
return production_breakdown_list.to_list()


def fetch_consumption(
zone_key: str = "CA-SK",
zone_key: ZoneKey = ZoneKey("CA-SK"),
session: Session | None = None,
target_datetime: datetime | None = None,
logger: Logger = getLogger(__name__),
):
# Validate that the zone key is equal to CA-SK.
validate_zone_key(zone_key)
# Validate that the target_datetime is None as this parser is unable to fetch historical data.
validate_no_datetime(target_datetime, zone_key)

session = session or Session()

# Set the headers to mimic a user browser as the API will return a 403 if not.
headers = {"user-agent": USER_AGENT}

response: Response = session.get(CONSUMPTION_URL, headers=headers)

if not response.ok:
raise ParserException(
"CA_SK.py",
f"Failed to fetch consumption data. Response Code: {response.status_code}\nError:\n{response.text}",
zone_key,
)

raw_data = response.json()

) -> dict[str, typing.Any]:
payload = _request(session, target_datetime, CONSUMPTION_URL, zone_key)
# The source refreshes every 5 minutes, so we assume the current data is
# from 5 minutes before the most recent multiple of 5 minutes.
now = datetime.now(TIMEZONE)

# Data is updated every 5 minutes so we assume the data is from a multiple of 5 minutes and has a 5 minute delay from that multiple.
assumed_datetime = now.replace(second=0, microsecond=0) - timedelta(
minutes=(now.minute % 5) + 5
minutes=(5 + now.minute % 5)
)

return [
{
"zoneKey": zone_key,
"datetime": assumed_datetime,
"consumption": int(raw_data),
"source": "saskpower.com",
}
]
return TotalConsumption.create(
consumption=float(payload),
datetime=assumed_datetime,
logger=logger,
source="saskpower.com",
zoneKey=zone_key,
).to_dict()

0 comments on commit 3577bfa

Please sign in to comment.