Skip to content
This repository was archived by the owner on Sep 11, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 37 additions & 16 deletions nowcasting_dataset/data_sources/gsp/eso.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,33 +40,54 @@
rename_load_columns = {v: k for k, v in rename_save_columns.items()}


def get_gsp_metadata_from_eso(calculate_centroid: bool = True) -> pd.DataFrame:
def get_gsp_metadata_from_eso(
calculate_centroid: bool = True, load_local_file: bool = True, save_local_file: bool = False
) -> pd.DataFrame:
"""
Get the metadata for the gsp, from ESO.

Args:
calculate_centroid: Load the shape file also, and calculate the Centroid
load_local_file: Load from a local file, not from ESO
save_local_file: Save to a local file, only need to do this is Data is updated.

Returns: Dataframe of ESO Metadata

"""
logger.debug("Getting GSP shape file")

# call ESO website. There is a possibility that this API will be replaced and its unclear if
# this original API will will stay operational
url = (
"https://data.nationalgrideso.com/api/3/action/datastore_search?"
"resource_id=bbe2cc72-a6c6-46e6-8f4e-48b879467368&limit=400"
)
with urllib.request.urlopen(url) as fileobj:
d = json.loads(fileobj.read())

# make dataframe
results = d["result"]["records"]
metadata = pd.DataFrame(results)

# drop duplicates
metadata = metadata.drop_duplicates(subset=["gsp_id"])
local_file = f"{os.path.dirname(os.path.realpath(__file__))}/eso_metadata.csv"

if not os.path.isfile(local_file):
logger.debug("There is no local file so going to get it from ESO, and save it afterwards")
load_local_file = False
save_local_file = True

if load_local_file:
logger.debug("loading local file for ESO metadata")
metadata = pd.read_csv(local_file)
# rename the columns to full name
logger.debug("loading local file for ESO metadata:done")
else:
# call ESO website. There is a possibility that this API will be replaced and its unclear if
# this original API will will stay operational
url = (
"https://data.nationalgrideso.com/api/3/action/datastore_search?"
"resource_id=bbe2cc72-a6c6-46e6-8f4e-48b879467368&limit=400"
)
with urllib.request.urlopen(url) as fileobj:
d = json.loads(fileobj.read())

# make dataframe
results = d["result"]["records"]
metadata = pd.DataFrame(results)

# drop duplicates
metadata = metadata.drop_duplicates(subset=["gsp_id"])

if save_local_file:
# save file
metadata.to_csv(local_file)

if calculate_centroid:
# get shape data from eso
Expand Down
340 changes: 340 additions & 0 deletions nowcasting_dataset/data_sources/gsp/eso_metadata.csv

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ISO-8859-1
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
PROJCS["British_National_Grid",GEOGCS["GCS_OSGB_1936",DATUM["D_OSGB_1936",SPHEROID["Airy_1830",6377563.396,299.3249646]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",400000.0],PARAMETER["False_Northing",-100000.0],PARAMETER["Central_Meridian",-2.0],PARAMETER["Scale_Factor",0.9996012717],PARAMETER["Latitude_Of_Origin",49.0],UNIT["Meter",1.0]]
Binary file not shown.
Binary file not shown.