Skip to content

Commit

Permalink
Merge pull request #981 from openego/features/#955-add-voltage-level-…
Browse files Browse the repository at this point in the history
…buildings-peak-load

Add voltage level buildings peak load
  • Loading branch information
nailend committed Nov 15, 2022
2 parents 8f094a6 + 5da3c55 commit b5081f8
Show file tree
Hide file tree
Showing 4 changed files with 92 additions and 23 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,8 @@ Added
`#868 <https://github.com/openego/eGon-data/issues/868>`_
* Write simBEV metadata to DB table
`PR #978 <https://github.com/openego/eGon-data/pull/978>`_
* Add voltage level for electricity building loads
`#955 <https://github.com/openego/eGon-data/issues/955>`_

.. _PR #159: https://github.com/openego/eGon-data/pull/159
.. _PR #703: https://github.com/openego/eGon-data/pull/703
Expand Down Expand Up @@ -633,4 +635,3 @@ Bug Fixes
.. _#343: https://github.com/openego/eGon-data/issues/343
.. _#556: https://github.com/openego/eGon-data/issues/556
.. _#641: https://github.com/openego/eGon-data/issues/641
.. _#669: https://github.com/openego/eGon-data/issues/669
Original file line number Diff line number Diff line change
Expand Up @@ -247,12 +247,13 @@ class CtsDemandBuildings(Dataset):
def __init__(self, dependencies):
super().__init__(
name="CtsDemandBuildings",
version="0.0.0",
version="0.0.1",
dependencies=dependencies,
tasks=(
cts_buildings,
{cts_electricity, cts_heat},
{get_cts_electricity_peak_load, get_cts_heat_peak_load},
assign_voltage_level_to_buildings,
),
)

Expand Down Expand Up @@ -1356,7 +1357,6 @@ def cts_electricity():
write_table_to_postgres(
df_demand_share,
EgonCtsElectricityDemandBuildingShare,
engine=engine,
drop=True,
)
log.info("Profile share exported to DB!")
Expand Down Expand Up @@ -1395,7 +1395,6 @@ def cts_heat():
write_table_to_postgres(
df_demand_share,
EgonCtsHeatDemandBuildingShare,
engine=engine,
drop=True,
)
log.info("Profile share exported to DB!")
Expand Down Expand Up @@ -1466,7 +1465,6 @@ def get_cts_electricity_peak_load():
write_table_to_postgres(
df_peak_load,
BuildingElectricityPeakLoads,
engine=engine,
drop=False,
index=False,
if_exists="append",
Expand Down Expand Up @@ -1542,10 +1540,70 @@ def get_cts_heat_peak_load():
write_table_to_postgres(
df_peak_load,
BuildingHeatPeakLoads,
engine=engine,
drop=False,
index=False,
if_exists="append",
)

log.info(f"Peak load for {scenario} exported to DB!")


def assign_voltage_level_to_buildings():
"""
Add voltage level to all buildings by summed peak demand.
All entries with same building id get the voltage level corresponding
to their summed residential and cts peak demand.
"""

with db.session_scope() as session:
cells_query = session.query(BuildingElectricityPeakLoads)

df_peak_loads = pd.read_sql(
cells_query.statement,
cells_query.session.bind,
)

df_peak_load_buildings = df_peak_loads.groupby(
["building_id", "scenario"]
)["peak_load_in_w"].sum()
df_peak_load_buildings = df_peak_load_buildings.to_frame()
df_peak_load_buildings.loc[:, "voltage_level"] = 0

# Identify voltage_level by thresholds defined in the eGon project
df_peak_load_buildings.loc[
df_peak_load_buildings["peak_load_in_w"] <= 0.1 * 1e6, "voltage_level"
] = 7
df_peak_load_buildings.loc[
df_peak_load_buildings["peak_load_in_w"] > 0.1 * 1e6, "voltage_level"
] = 6
df_peak_load_buildings.loc[
df_peak_load_buildings["peak_load_in_w"] > 0.2 * 1e6, "voltage_level"
] = 5
df_peak_load_buildings.loc[
df_peak_load_buildings["peak_load_in_w"] > 5.5 * 1e6, "voltage_level"
] = 4
df_peak_load_buildings.loc[
df_peak_load_buildings["peak_load_in_w"] > 20 * 1e6, "voltage_level"
] = 3
df_peak_load_buildings.loc[
df_peak_load_buildings["peak_load_in_w"] > 120 * 1e6, "voltage_level"
] = 1

df_peak_load = pd.merge(
left=df_peak_loads.drop(columns="voltage_level"),
right=df_peak_load_buildings["voltage_level"],
how="left",
left_on=["building_id", "scenario"],
right_index=True,
)

# Write peak loads into db
# remove table and replace by new
write_table_to_postgres(
df_peak_load,
BuildingElectricityPeakLoads,
drop=True,
index=False,
if_exists="append",
)
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,7 @@ class BuildingElectricityPeakLoads(Base):
scenario = Column(String, primary_key=True)
sector = Column(String, primary_key=True)
peak_load_in_w = Column(REAL)
voltage_level = Column(Integer, index=True)


def match_osm_and_zensus_data(
Expand Down
43 changes: 26 additions & 17 deletions src/egon/data/datasets/electricity_demand_timeseries/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import numpy as np
import pandas as pd

from egon.data import db
from egon.data import db, logger

engine = db.engine()

Expand Down Expand Up @@ -139,21 +139,20 @@ def psql_insert_copy(table, conn, keys, data_iter):


def write_table_to_postgres(
df, db_table, engine=db.engine(), drop=False, index=False, if_exists="append"
df, db_table, drop=False, index=False, if_exists="append"
):
"""
Helper function to append df data to table in db. Fast string-copy is used.
Only predefined columns are passed. Error will raise if column is missing.
Dtype of columns are taken from table definition.
Only predefined columns are passed. If column is missing in dataframe a
warning is logged. Dtypes of columns are taken from table definition. The
writing process happens in a scoped session.
Parameters
----------
df: pd.DataFrame
Table of data
db_table: declarative_base
Metadata of db table to export to
engine:
connection to database db.engine()
drop: boolean, default False
Drop db-table before appending
index: boolean, default False
Expand All @@ -164,23 +163,33 @@ def write_table_to_postgres(
- append: If table exists, insert data. Create if does not exist.
"""

logger.info("Write table to db")
# Only take in db table defined columns and dtypes
columns = {
column.key: column.type for column in db_table.__table__.columns
}
df = df.loc[:, columns.keys()]

# Take only the columns defined in class
# pandas raises an error if column is missing
try:
df = df.loc[:, columns.keys()]
except KeyError:
same = df.columns.intersection(columns.keys())
missing = same.symmetric_difference(df.columns)
logger.warning(f"Columns: {missing.values} missing!")
df = df.loc[:, same]

if drop:
db_table.__table__.drop(bind=engine, checkfirst=True)
db_table.__table__.create(bind=engine)

df.to_sql(
name=db_table.__table__.name,
schema=db_table.__table__.schema,
con=engine,
if_exists=if_exists,
index=index,
method=psql_insert_copy,
dtype=columns,
)
with db.session_scope() as session:
df.to_sql(
name=db_table.__table__.name,
schema=db_table.__table__.schema,
con=session.connection(),
if_exists=if_exists,
index=index,
method=psql_insert_copy,
dtype=columns,
)

0 comments on commit b5081f8

Please sign in to comment.