Skip to content

Commit

Permalink
Merge pull request #678 from openego/features/#663-update-pypsa-version
Browse files Browse the repository at this point in the history
Features/#663 update pypsa version
  • Loading branch information
ClaraBuettner committed Dec 1, 2023
2 parents 58b953f + 530d753 commit 68920de
Show file tree
Hide file tree
Showing 11 changed files with 193 additions and 152 deletions.
1 change: 0 additions & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ jobs:
with:
python-version: |
3
3.8
3.9
3.10
3.11
Expand Down
1 change: 1 addition & 0 deletions etrago/appl.py
Original file line number Diff line number Diff line change
Expand Up @@ -679,6 +679,7 @@ def run_etrago(args, json_path):

# spatial clustering
etrago.spatial_clustering()

etrago.spatial_clustering_gas()

# snapshot clustering
Expand Down
86 changes: 38 additions & 48 deletions etrago/cluster/electrical.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,8 @@
import logging

from pypsa import Network
from pypsa.networkclustering import (
from pypsa.clustering.spatial import (
aggregatebuses,
aggregategenerators,
aggregateoneport,
get_clustering_from_busmap,
)
Expand All @@ -43,7 +42,9 @@
group_links,
kmean_clustering,
kmedoids_dijkstra_clustering,
strategies_buses,
strategies_generators,
strategies_lines,
strategies_one_ports,
)
from etrago.tools.utilities import set_control_strategies
Expand Down Expand Up @@ -275,6 +276,8 @@ def cluster_on_extra_high_voltage(etrago, busmap, with_time=True):
{
"x": _leading(busmap, network.buses),
"y": _leading(busmap, network.buses),
"geom": lambda x: np.nan,
"country": lambda x: "",
},
)

Expand Down Expand Up @@ -327,27 +330,20 @@ def cluster_on_extra_high_voltage(etrago, busmap, with_time=True):
io.import_series_from_dataframe(network_c, df, "Link", attr)

# dealing with generators
network.generators["weight"] = 1
# network.generators["weight"] = 1

new_df, new_pnl = aggregategenerators(
network, busmap, with_time, custom_strategies=strategies_generators()
)
io.import_components_from_dataframe(network_c, new_df, "Generator")
for attr, df in iteritems(new_pnl):
io.import_series_from_dataframe(network_c, df, "Generator", attr)

# dealing with all other components
aggregate_one_ports = network.one_port_components.copy()
aggregate_one_ports.discard("Generator")
for one_port in network.one_port_components.copy():
if one_port == "Generator":
custom_strategies = strategies_generators()

for one_port in aggregate_one_ports:
one_port_strategies = strategies_one_ports()
else:
custom_strategies = strategies_one_ports().get(one_port, {})
new_df, new_pnl = aggregateoneport(
network,
busmap,
component=one_port,
with_time=with_time,
custom_strategies=one_port_strategies.get(one_port, {}),
custom_strategies=custom_strategies,
)
io.import_components_from_dataframe(network_c, new_df, one_port)
for attr, df in iteritems(new_pnl):
Expand Down Expand Up @@ -454,6 +450,13 @@ def ehv_clustering(self):
self.update_busmap(busmap)
self.buses_by_country()

# Drop nan values in timeseries after clustering
for c in self.network.iterate_components():
for pnl in c.attrs[
(c.attrs.status == "Output") & (c.attrs.varying)
].index:
c.pnl[pnl] = pd.DataFrame(index=self.network.snapshots)

logger.info("Network clustered to EHV-grid")


Expand Down Expand Up @@ -684,6 +687,7 @@ def preprocessing(etrago):
[
"bus0",
"bus1",
"r",
"x",
"s_nom",
"capital_cost",
Expand All @@ -696,6 +700,7 @@ def preprocessing(etrago):
x=network.transformers.x
* (380.0 / transformer_voltages.max(axis=1)) ** 2,
length=1,
v_nom=380.0,
)
.set_index("T" + trafo_index),
"Line",
Expand Down Expand Up @@ -857,8 +862,22 @@ def postprocessing(etrago, busmap, busmap_foreign, medoid_idx=None):
generator_strategies=strategies_generators(),
aggregate_one_ports=aggregate_one_ports,
line_length_factor=settings["line_length_factor"],
bus_strategies=strategies_buses(),
line_strategies=strategies_lines(),
)

# Drop nan values after clustering
clustering.network.links.min_up_time.fillna(0, inplace=True)
clustering.network.links.min_down_time.fillna(0, inplace=True)
clustering.network.links.up_time_before.fillna(0, inplace=True)
clustering.network.links.down_time_before.fillna(0, inplace=True)
# Drop nan values in timeseries after clustering
for c in clustering.network.iterate_components():
for pnl in c.attrs[
(c.attrs.status == "Output") & (c.attrs.varying)
].index:
c.pnl[pnl] = pd.DataFrame(index=clustering.network.snapshots)

if method == "kmedoids-dijkstra":
for i in clustering.network.buses[
clustering.network.buses.carrier == "AC"
Expand Down Expand Up @@ -928,41 +947,12 @@ def calc_availability_factor(gen):
cannot be found in the dictionary, it is assumed to be 1.
"""

if gen["carrier"] in time_dependent:
if gen.name in network.generators_t.p_max_pu.columns:
cf = network.generators_t["p_max_pu"].loc[:, gen.name].mean()
else:
try:
cf = fixed_capacity_fac[gen["carrier"]]
except KeyError:
cf = 1
return cf
cf = network.generators.loc[gen.name, "p_max_pu"]

time_dependent = [
"solar_rooftop",
"solar",
"wind_onshore",
"wind_offshore",
]
fixed_capacity_fac = {
# A value of 1 is given to power plants where its availability
# does not depend on the weather
"industrial_gas_CHP": 1,
"industrial_biomass_CHP": 1,
"biomass": 1,
"central_biomass_CHP": 1,
"central_gas_CHP": 1,
"OCGT": 1,
"other_non_renewable": 1,
"run_of_river": 0.50,
"reservoir": 1,
"gas": 1,
"oil": 1,
"others": 1,
"coal": 1,
"lignite": 1,
"nuclear": 1,
}
return cf

gen = network.generators[network.generators.carrier != "load shedding"][
["bus", "carrier", "p_nom"]
Expand Down
46 changes: 29 additions & 17 deletions etrago/cluster/gas.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@
if "READTHEDOCS" not in os.environ:
import logging

from pypsa import Network
from pypsa.networkclustering import (
from pypsa.clustering.spatial import (
aggregatebuses,
aggregateoneport,
busmap_by_kmeans,
)
from pypsa.components import Network
from six import iteritems
import numpy as np
import pandas as pd
Expand Down Expand Up @@ -84,10 +84,15 @@ def preprocessing(etrago):
# Create network_ch4 (grid nodes in order to create the busmap basis)
network_ch4 = Network()

buses_ch4 = etrago.network.buses
links_ch4 = etrago.network.links
buses_ch4 = etrago.network.buses.copy()
links_ch4 = etrago.network.links.copy()

io.import_components_from_dataframe(network_ch4, buses_ch4, "Bus")
io.import_components_from_dataframe(network_ch4, links_ch4, "Link")
network_ch4.madd(
"Link", links_ch4.index, **links_ch4.loc[:, ~links_ch4.isna().any()]
)

network_ch4.buses["country"] = buses_ch4.country

# Cluster ch4 buses
settings = etrago.args["network_clustering"]
Expand Down Expand Up @@ -482,6 +487,7 @@ def gas_postprocessing(etrago, busmap, medoid_idx=None):
network_gasgrid_c, etrago.network.carriers, "Carrier"
)

network_gasgrid_c.consistency_check()
network_gasgrid_c.determine_network_topology()

# Adjust x and y coordinates of 'CH4' and 'H2_grid' medoids
Expand All @@ -493,32 +499,34 @@ def gas_postprocessing(etrago, busmap, medoid_idx=None):
if cluster in busmap[medoid_idx].values:
medoid = busmap[medoid_idx][
busmap[medoid_idx] == cluster
].index
].index[0]
h2_idx = network_gasgrid_c.buses.loc[
(network_gasgrid_c.buses.carrier == "H2_grid")
& (
network_gasgrid_c.buses.y
== network_gasgrid_c.buses.at[i, "y"]
== network_gasgrid_c.buses.loc[i, "y"]
)
& (
network_gasgrid_c.buses.x
== network_gasgrid_c.buses.at[i, "x"]
== network_gasgrid_c.buses.loc[i, "x"]
)
]
if len(h2_idx) > 0:
h2_idx = h2_idx.index.tolist()[0]
network_gasgrid_c.buses.at[
network_gasgrid_c.buses.loc[
h2_idx, "x"
] = etrago.network.buses["x"].loc[medoid]
network_gasgrid_c.buses.at[
network_gasgrid_c.buses.loc[
h2_idx, "y"
] = etrago.network.buses["y"].loc[medoid]
network_gasgrid_c.buses.at[i, "x"] = etrago.network.buses[
"x"
].loc[medoid]
network_gasgrid_c.buses.at[i, "y"] = etrago.network.buses[
"y"
].loc[medoid]

network_gasgrid_c.buses.loc[i, "x"] = etrago.network.buses.loc[
medoid, "x"
]
network_gasgrid_c.buses.loc[i, "y"] = etrago.network.buses.loc[
medoid, "y"
]

return (network_gasgrid_c, busmap)


Expand Down Expand Up @@ -859,6 +867,7 @@ def get_clustering_from_busmap(
A new gas grid pypsa.Network object with aggregated components based
on the bus mapping.
"""

network_gasgrid_c = Network()

# Aggregate buses
Expand Down Expand Up @@ -919,14 +928,17 @@ def get_clustering_from_busmap(

# import the links and the respective time series with the bus0 and bus1
# values updated from the busmap
io.import_components_from_dataframe(network_gasgrid_c, new_links, "Link")
io.import_components_from_dataframe(
network_gasgrid_c, new_links.loc[:, ~new_links.isna().all()], "Link"
)

if with_time:
for attr, df in network.links_t.items():
if not df.empty:
io.import_series_from_dataframe(
network_gasgrid_c, df, "Link", attr
)

return network_gasgrid_c


Expand Down
30 changes: 26 additions & 4 deletions etrago/cluster/spatial.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@
import multiprocessing as mp

from networkx import NetworkXNoPath
from pypsa.networkclustering import (
_flatten_multiindex,
from pypsa.clustering.spatial import (
busmap_by_kmeans,
busmap_by_stubs,
flatten_multiindex,
get_clustering_from_busmap,
)
from sklearn.cluster import KMeans
Expand Down Expand Up @@ -108,6 +108,18 @@ def sum_with_inf(x):
return x.sum()


def strategies_buses():
return {
"geom": nan_links,
}


def strategies_lines():
return {
"geom": nan_links,
}


def strategies_one_ports():
return {
"StorageUnit": {
Expand All @@ -128,6 +140,8 @@ def strategies_one_ports():
"e_nom_min": np.sum,
"e_nom_max": sum_with_inf,
"e_initial": np.sum,
"e_min_pu": np.mean,
"e_max_pu": np.mean,
},
}

Expand Down Expand Up @@ -170,6 +184,11 @@ def strategies_links():
"country": nan_links,
"build_year": np.mean,
"lifetime": np.mean,
"min_up_time": np.mean,
"min_down_time": np.mean,
"up_time_before": np.mean,
"down_time_before": np.mean,
"committable": np.all,
}


Expand Down Expand Up @@ -235,8 +254,11 @@ def arrange_dc_bus0_bus1(network):
)
strategies = strategies_links()
strategies.update(cus_strateg)
strategies.pop("topo")
strategies.pop("geom")

new_df = links.groupby(grouper, axis=0).agg(strategies)
new_df.index = _flatten_multiindex(new_df.index).rename("name")
new_df.index = flatten_multiindex(new_df.index).rename("name")
new_df = pd.concat(
[new_df, network.links.loc[~links_agg_b]], axis=0, sort=False
)
Expand All @@ -256,7 +278,7 @@ def arrange_dc_bus0_bus1(network):
weighting.loc[df_agg.columns], axis=1
)
pnl_df = df_agg.groupby(grouper, axis=1).sum()
pnl_df.columns = _flatten_multiindex(pnl_df.columns).rename(
pnl_df.columns = flatten_multiindex(pnl_df.columns).rename(
"name"
)
new_pnl[attr] = pd.concat(
Expand Down

0 comments on commit 68920de

Please sign in to comment.