Skip to content

Commit

Permalink
Merge branch 'master' of github.com:PyPSA/pypsa-eur into eurostat-mar…
Browse files Browse the repository at this point in the history
…ch2024
  • Loading branch information
fneum committed May 21, 2024
2 parents 288ccbd + fd7dcb2 commit c600d32
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 14 deletions.
12 changes: 5 additions & 7 deletions scripts/add_brownfield.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,22 +132,20 @@ def disable_grid_expansion_if_limit_hit(n):
minimum and extendable is turned off; the corresponding global
constraint is then dropped.
"""
cols = {"cost": "capital_cost", "volume": "length"}
for limit_type in ["cost", "volume"]:
glcs = n.global_constraints.query(
f"type == 'transmission_expansion_{limit_type}_limit'"
)
types = {"expansion_cost": "capital_cost", "volume_expansion": "length"}
for limit_type in types:
glcs = n.global_constraints.query(f"type == 'transmission_{limit_type}_limit'")

for name, glc in glcs.iterrows():
total_expansion = (
(
n.lines.query("s_nom_extendable")
.eval(f"s_nom_min * {cols[limit_type]}")
.eval(f"s_nom_min * {types[limit_type]}")
.sum()
)
+ (
n.links.query("carrier == 'DC' and p_nom_extendable")
.eval(f"p_nom_min * {cols[limit_type]}")
.eval(f"p_nom_min * {types[limit_type]}")
.sum()
)
).sum()
Expand Down
19 changes: 12 additions & 7 deletions scripts/prepare_sector_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -3634,15 +3634,13 @@ def set_temporal_aggregation(n, resolution, snapshot_weightings):
logger.info("Use every %s snapshot as representative", sn)
n.set_snapshots(n.snapshots[::sn])
n.snapshot_weightings *= sn
return n
else:
# Otherwise, use the provided snapshots
snapshot_weightings = pd.read_csv(
snapshot_weightings, index_col=0, parse_dates=True
)

n.set_snapshots(snapshot_weightings.index)
n.snapshot_weightings = snapshot_weightings

# Define a series used for aggregation, mapping each hour in
# n.snapshots to the closest previous timestep in
# snapshot_weightings.index
Expand All @@ -3656,16 +3654,23 @@ def set_temporal_aggregation(n, resolution, snapshot_weightings):
.map(lambda i: snapshot_weightings.index[i])
)

m = n.copy(with_time=False)
m.set_snapshots(snapshot_weightings.index)
m.snapshot_weightings = snapshot_weightings

# Aggregation all time-varying data.
for c in n.iterate_components():
pnl = getattr(m, c.list_name + "_t")
for k, df in c.pnl.items():
if not df.empty:
if c.list_name == "stores" and k == "e_max_pu":
c.pnl[k] = df.groupby(aggregation_map).min()
pnl[k] = df.groupby(aggregation_map).min()
elif c.list_name == "stores" and k == "e_min_pu":
c.pnl[k] = df.groupby(aggregation_map).max()
pnl[k] = df.groupby(aggregation_map).max()
else:
c.pnl[k] = df.groupby(aggregation_map).mean()
pnl[k] = df.groupby(aggregation_map).mean()

return m


def lossy_bidirectional_links(n, carrier, efficiencies={}):
Expand Down Expand Up @@ -3818,7 +3823,7 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}):
if options["allam_cycle"]:
add_allam(n, costs)

set_temporal_aggregation(
n = set_temporal_aggregation(
n, snakemake.params.time_resolution, snakemake.input.snapshot_weightings
)

Expand Down

0 comments on commit c600d32

Please sign in to comment.