Skip to content

Commit

Permalink
Merge pull request #632 from ekatef/fix_isolated_buses23
Browse files Browse the repository at this point in the history
Fix islanded buses and add clustering by networks - clean branch
  • Loading branch information
davide-f committed Mar 14, 2023
2 parents 6f01921 + 8b85e63 commit 8018eeb
Show file tree
Hide file tree
Showing 7 changed files with 233 additions and 20 deletions.
2 changes: 2 additions & 0 deletions config.default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@ cluster_options:
exclude_carriers: []
remove_stubs: true
remove_stubs_across_borders: true
p_threshold_drop_isolated: 10 # [MW] isolated buses are being discarded if bus mean power is below the specified threshold
p_threshold_merge_isolated: 300 # [MW] isolated buses are being merged into a single isolated bus if bus mean power is above the specified threshold
cluster_network:
algorithm: kmeans
feature: solar+onwind-time
Expand Down
2 changes: 2 additions & 0 deletions config.tutorial.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@ cluster_options:
exclude_carriers: []
remove_stubs: true
remove_stubs_across_borders: true
p_threshold_drop_isolated: 10 # [MW] isolated buses are being discarded if bus mean power is below the specified threshold
p_threshold_merge_isolated: 300 # [MW] isolated buses are being merged into a single isolated bus if bus mean power is above the specified threshold
cluster_network:
algorithm: kmeans
feature: solar+onwind-time
Expand Down
3 changes: 3 additions & 0 deletions doc/configtables/clustering.csv
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@
-- -- remove_stubs,bool,"true/false",
-- remove stubs,bool,"true/false","Controls whether radial parts of the network should be recursively aggregated. Defaults to true."
-- -- remove_stubs_across_borders,bool,"true/false","Controls whether radial parts of the network should be recursively aggregated across borders. Defaults to true."
-- -- p_threshold_drop_isolated,--,float,"Isolated buses are being discarded if bus mean power is below the specified threshold."
-- -- p_threshold_merge_isolated,--,float,"Isolated buses are being merged into a single isolated bus if bus mean power is above the specified threshold."

-- cluster_network,,,
-- -- algorithm,str,"One of {‘kmeans’, ‘hac’}","Clustering algorithm"
-- -- feature,str,"Str in the format ‘carrier1+carrier2+...+carrierN-X’, where CarrierI can be from {‘solar’, ‘onwind’, ‘offwind’, ‘ror’} and X is one of {‘cap’, ‘time’}.","Is used for hierarchical clustering"
Expand Down
2 changes: 2 additions & 0 deletions doc/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@ Upcoming Release

* Revise OSM cleaning to improve the cleaning process and error resilience `PR #620 https://github.com/pypsa-meets-earth/pypsa-earth/pull/620`__

* Fix isolated buses when simplifying the network and add clustering by networks `PR #632 https://github.com/pypsa-meets-earth/pypsa-earth/pull/632`__

PyPSA-Earth 0.1.0
=================

Expand Down
2 changes: 1 addition & 1 deletion scripts/build_demand_profiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def upsample(cntry, group):
demand_profiles = demand_profiles.loc[start_date:end_date]
demand_profiles.to_csv(out_path, header=True)

logger.info(f"Demand_profiles csv file created for the corrisponding snapshots.")
logger.info(f"Demand_profiles csv file created for the corresponding snapshots.")


if __name__ == "__main__":
Expand Down
49 changes: 33 additions & 16 deletions scripts/cluster_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def get_feature_for_hac(n, buses_i=None, feature=None):
if "offwind" in carriers:
carriers.remove("offwind")
carriers = np.append(
carriers, network.generators.carrier.filter(like="offwind").unique()
carriers, n.generators.carrier.filter(like="offwind").unique()
)

if feature.split("-")[1] == "cap":
Expand Down Expand Up @@ -249,13 +249,14 @@ def distribute_clusters(
n.loads_t.p_set.mean()
.groupby(n.loads.bus)
.sum()
.groupby([n.buses.country])
.groupby([n.buses.country, n.buses.sub_network])
.sum()
.pipe(normed)
)
assert len(L.index) == len(n.buses.country.unique()), (
countries_in_L = pd.unique(L.index.get_level_values(0))
assert len(countries_in_L) == len(n.buses.country.unique()), (
"The following countries have no load: "
f"{list(set(L.index).symmetric_difference(set(n.buses.country.unique())))}"
f"{list(set(countries_in_L).symmetric_difference(set(n.buses.country.unique())))}"
)
distribution_factor = L

Expand All @@ -264,11 +265,17 @@ def distribute_clusters(
columns={"name": "country"}
)
add_population_data(
df_pop_c, country_list, year, update, out_logging, nprocesses=nprocesses
df_pop_c, country_list, "standard", year, update, out_logging
)
P = df_pop_c.loc[:, ("country", "pop")]
P = P.groupby(P["country"]).sum().pipe(normed).squeeze()
distribution_factor = P
n_df = n.buses.copy()[["country", "sub_network"]]

pop_dict = P.set_index("country")["pop"].to_dict()
n_df["pop"] = n_df["country"].map(pop_dict)

distribution_factor = (
n_df.groupby(["country", "sub_network"]).sum().pipe(normed).squeeze()
)

if distribution_cluster == ["gdp"]:
df_gdp_c = gpd.read_file(inputs.country_shapes).rename(
Expand All @@ -281,12 +288,19 @@ def distribute_clusters(
out_logging,
name_file_nc="GDP_PPP_1990_2015_5arcmin_v2.nc",
)

G = df_gdp_c.loc[:, ("country", "gdp")]
G = G.groupby(df_gdp_c["country"]).sum().pipe(normed).squeeze()
distribution_factor = G
n_df = n.buses.copy()[["country", "sub_network"]]

gdp_dict = G.set_index("country")["gdp"].to_dict()
n_df["gdp"] = n_df["country"].map(gdp_dict)

distribution_factor = (
n_df.groupby(["country", "sub_network"]).sum().pipe(normed).squeeze()
)

# TODO: 1. Check if sub_networks can be added here i.e. ["country", "sub_network"]
N = n.buses.groupby(["country"]).size()
N = n.buses.groupby(["country", "sub_network"]).size()

assert (
n_clusters >= len(N) and n_clusters <= N.sum()
Expand Down Expand Up @@ -318,7 +332,7 @@ def distribute_clusters(

m = po.ConcreteModel()

def n_bounds(model, n_id):
def n_bounds(model, *n_id):
"""
Create a function that makes a bound pair for pyomo
Expand Down Expand Up @@ -379,7 +393,11 @@ def locate_bus(coords, co):
buses["gadm_{}".format(gadm_level)] = buses[["x", "y", "country"]].apply(
lambda bus: locate_bus(bus[["x", "y"]], bus["country"]), axis=1
)
busmap = buses["gadm_{}".format(gadm_level)]

buses["gadm_subnetwork"] = (
buses["gadm_{}".format(gadm_level)] + "_" + buses["carrier"].astype(str)
)
busmap = buses["gadm_subnetwork"]

return busmap

Expand Down Expand Up @@ -476,11 +494,10 @@ def reduce_network(n, buses):
def busmap_for_country(x):
# A number of the countries in the clustering can be > 1
if isinstance(n_clusters, pd.Series):
n_cluster_c = n_clusters[x.name]
if isinstance(x.name, tuple):
n_cluster_c = n_clusters[x.name[0]]
prefix = x.name[0] + x.name[1] + " "
else:
n_cluster_c = n_clusters[x.name]
prefix = x.name + " "
else:
n_cluster_c = n_clusters
Expand Down Expand Up @@ -519,8 +536,8 @@ def busmap_for_country(x):

return (
n.buses.groupby(
["country"],
# ["country", "sub_network"] # TODO: 2. Add sub_networks (see previous TODO)
# ["country"],
["country", "sub_network"], # TODO: 2. Add sub_networks (see previous TODO)
group_keys=False,
)
.apply(busmap_for_country)
Expand Down

0 comments on commit 8018eeb

Please sign in to comment.