Skip to content

Commit

Permalink
move load paths to snakefile
Browse files Browse the repository at this point in the history
  • Loading branch information
pz-max committed Mar 18, 2022
1 parent a0dad25 commit f593c9d
Show file tree
Hide file tree
Showing 2 changed files with 127 additions and 10 deletions.
4 changes: 3 additions & 1 deletion Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ from shutil import copyfile
from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider

from scripts.download_osm_data import create_country_list
from scripts.add_electricity import get_load_paths_gegis

HTTP = HTTPRemoteProvider()

Expand All @@ -23,6 +24,7 @@ configfile: "config.yaml"
# convert country list according to the desired region
config["countries"] = create_country_list(config["countries"])

load_data_paths = get_load_paths_gegis(config)
COSTS = "data/costs.csv"
ATLITE_NPROCESSES = config["atlite"].get("nprocesses", 20)

Expand Down Expand Up @@ -247,7 +249,7 @@ rule add_electricity:
tech_costs=COSTS,
regions="resources/regions_onshore.geojson",
powerplants='resources/powerplants.csv',
load='resources/ssp2-2.6/2030/era5_2013/Africa.nc',
load=load_data_paths,
gadm_shapes='resources/gadm_shapes.geojson',
hydro_capacities='data/hydro_capacities.csv',
**{f"profile_{tech}": f"resources/profile_{tech}.nc"
Expand Down
133 changes: 124 additions & 9 deletions scripts/add_electricity.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,127 @@ def load_powerplants(ppl_fn=None):
columns=["efficiency"]).replace({"carrier": carrier_dict}))


def get_load_paths_gegis(config):
"""
Creates load paths for the GEGIS outputs
The paths are created automatically according to included country,
weather year, prediction year and ssp scenario
Example
-------
["/resources/ssp2-2.6/2030/era5_2013/Africa.nc", "/resources/ssp2-2.6/2030/era5_2013/Africa.nc"]
"""
countries = config.get("countries")
region_load = getContinent(countries)
weather_year = config.get("load_options")["weather_year"]
prediction_year = config.get("load_options")["prediction_year"]
ssp = config.get("load_options")["ssp"]

load_paths = []
for continent in region_load:
load_path = os.path.join(
"resources",
str(ssp),
str(prediction_year),
"era5_" + str(weather_year),
str(continent).capitalize() + ".nc",
)
load_paths.append(load_path)

return load_paths


def attach_load(
n,
load_paths,
regions,
admin_shapes,
countries,
scale,
):
"""
Add load to the network and distributes them according GDP and population.
Parameters
----------
n : pypsa network
regions : .geojson
Contains bus_id of low voltage substations and
bus region shapes (voronoi cells)
load_paths: paths of the load files
admin_shapes : .geojson
contains subregional gdp, population and shape data
countries : list
List of countries that is config input
scale : float
The scale factor is multiplied with the load (1.3 = 30% more load)
Returns
-------
n : pypsa network
Now attached with load time series
"""
substation_lv_i = n.buses.index[n.buses["substation_lv"]]
regions = (
gpd.read_file(regions).set_index("name").reindex(substation_lv_i)
).dropna(
axis="rows") # TODO: check if dropna required here. NaN shapes exist?

load_paths = load_paths
# Merge load .nc files: https://stackoverflow.com/questions/47226429/join-merge-multiple-netcdf-files-using-xarray
gegis_load = xr.open_mfdataset(load_paths, combine='nested')
gegis_load = gegis_load.to_dataframe().reset_index().set_index("time")
# filter load for analysed countries
gegis_load = gegis_load.loc[gegis_load.region_code.isin(countries)]
logger.info(f"Load data scaled with scalling factor {scale}.")
gegis_load *= scale
shapes = gpd.read_file(admin_shapes).set_index("GADM_ID")
shapes.loc[:,
"geometry"] = shapes["geometry"].apply(lambda x: make_valid(x))

def upsample(cntry, group):
"""
Distributes load in country according to population and gdp
"""
l = gegis_load.loc[gegis_load.region_code ==
cntry]["Electricity demand"]
if len(group) == 1:
return pd.DataFrame({group.index[0]: l})
else:
shapes_cntry = shapes.loc[shapes.country == cntry]
transfer = vtransfer.Shapes2Shapes(group,
shapes_cntry.geometry,
normed=False).T.tocsr()
gdp_n = pd.Series(transfer.dot(
shapes_cntry["gdp"].fillna(1.0).values),
index=group.index)
pop_n = pd.Series(transfer.dot(
shapes_cntry["pop"].fillna(1.0).values),
index=group.index)

# relative factors 0.6 and 0.4 have been determined from a linear
# regression on the country to EU continent load data
# (refer to vresutils.load._upsampling_weights)
# TODO: require adjustment for Africa
factors = normed(0.6 * normed(gdp_n) + 0.4 * normed(pop_n))
return pd.DataFrame(
factors.values * l.values[:, np.newaxis],
index=l.index,
columns=factors.index,
)

load = pd.concat(
[
upsample(cntry, group)
for cntry, group in regions.geometry.groupby(regions.country)
],
axis=1,
)

n.madd("Load", substation_lv_i, bus=substation_lv_i, p_set=load)


def attach_load(

This comment has been minimized.

Copy link
@davide-f

davide-f Mar 18, 2022

Member

Is this function redundant now?

n,
regions,
Expand Down Expand Up @@ -738,24 +859,18 @@ def add_nice_carrier_names(n, config=None):
# Snakemake imports:
regions = snakemake.input.regions

load_paths = snakemake.input["load"]
countries = snakemake.config["countries"]
weather_year = snakemake.config["load_options"]["weather_year"]
prediction_year = snakemake.config["load_options"]["prediction_year"]
region_load = getContinent(countries)
ssp = snakemake.config["load_options"]["ssp"]
scale = snakemake.config["load_options"]["scale"]
admin_shapes = snakemake.input.gadm_shapes
scale = snakemake.config["load_options"]["scale"]

costs = load_costs(Nyears)
ppl = load_powerplants()

attach_load(
n,
load_paths,
regions,
weather_year,
prediction_year,
region_load,
ssp,
admin_shapes,
countries,
scale,
Expand Down

0 comments on commit f593c9d

Please sign in to comment.