Skip to content

Commit

Permalink
finished weather scripts before pulling them out
Browse files Browse the repository at this point in the history
  • Loading branch information
Sven Eggimann committed Apr 1, 2019
1 parent 9d3e34f commit 210ef16
Show file tree
Hide file tree
Showing 6 changed files with 66 additions and 48 deletions.
14 changes: 9 additions & 5 deletions energy_demand/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,14 +158,15 @@ def energy_demand_model(
# ----------------------------------------------------------------------
# Load data
# ----------------------------------------------------------------------
region_name_attribute ='lad_uk_2016'
data['scenario_data'] = defaultdict(dict)
data['enduses'], data['sectors'], data['fuels'], lookup_enduses, lookup_sector_enduses = data_loader.load_fuels(data['paths'])
data['regions'] = read_data.get_region_names(name_region_set)
data['reg_coord'] = basic_functions.get_long_lat_decimal_degrees(read_data.get_region_centroids(name_region_set))
data['scenario_data']['population'] = data_loader.read_scenario_data(name_population_dataset, region_name='lad_uk_2016', value_name='population')
data['scenario_data']['population'] = data_loader.read_scenario_data(name_population_dataset, region_name=region_name_attribute, value_name='population')

data['scenario_data']['gva_industry'] = data_loader.read_scenario_data_gva(name_gva_dataset, region_name='lad_uk_2016', value_name='gva_per_head', all_dummy_data=False)
data['scenario_data']['gva_per_head'] = data_loader.read_scenario_data(name_gva_dataset_per_head, region_name='lad_uk_2016', value_name='gva_per_head')
data['scenario_data']['gva_industry'] = data_loader.read_scenario_data_gva(name_gva_dataset, region_name=region_name_attribute, value_name='gva_per_head', all_dummy_data=False)
data['scenario_data']['gva_per_head'] = data_loader.read_scenario_data(name_gva_dataset_per_head, region_name=region_name_attribute, value_name='gva_per_head')

# -----------------------------
# Assumptions
Expand Down Expand Up @@ -272,6 +273,7 @@ def energy_demand_model(
regions=data['regions'],
weather_realisation=weather_realisation,
path_weather_data=path_weather_data,
region_name_attribute=region_name_attribute,
same_base_year_weather=False)

# ------------------------------------------------------------
Expand Down Expand Up @@ -401,7 +403,6 @@ def energy_demand_model(
# Main model run function
# -----------------------
for sim_yr in data['assumptions'].sim_yrs:

print("Local simulation for year: " + str(sim_yr))
setattr(data['assumptions'], 'curr_yr', sim_yr) # Set current year

Expand Down Expand Up @@ -459,7 +460,7 @@ def energy_demand_model(
config['CRITERIA']['plot_crit'])

# -------------------------------------
# # Generate YAML file with keynames for `sector_model`
# Generate ouputs
# -------------------------------------
if config['CRITERIA']['mode_constrained']:
supply_results = demand_supply_interaction.constrained_results(
Expand Down Expand Up @@ -557,6 +558,9 @@ def energy_demand_model(

print("... Finished writing results to file")

for key_name in sim_obj.supply_results.keys():
print("KEYNAME " + str(key_name))

print("-------------------------")
print("... Finished running HIRE")
print("-------------------------")
9 changes: 5 additions & 4 deletions energy_demand/read_write/data_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -886,6 +886,7 @@ def load_temp_data(
regions,
weather_realisation,
path_weather_data,
region_name_attribute,
same_base_year_weather=False,
):
"""Read in cleaned temperature and weather station data
Expand Down Expand Up @@ -932,11 +933,11 @@ def load_temp_data(
df_timestep_t_max = df_t_max.loc[df_t_max['timestep'] == weather_data_yr]

for region in regions:
df_timestep_station_t_min = df_timestep_t_min.loc[df_timestep_t_min['region'] == region]
df_timestep_station_t_max = df_timestep_t_max.loc[df_timestep_t_max['region'] == region]
df_timestep_station_t_min = df_timestep_t_min.loc[df_timestep_t_min[region_name_attribute] == region]
df_timestep_station_t_max = df_timestep_t_max.loc[df_timestep_t_max[region_name_attribute] == region]

t_min = list(df_timestep_station_t_min['value'].values)
t_max = list(df_timestep_station_t_max['value'].values)
t_min = list(df_timestep_station_t_min['t_min'].values)
t_max = list(df_timestep_station_t_max['t_max'].values)

temp_data_short[sim_yr][region] = {
't_min': np.array(t_min),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,23 @@
import pandas as pd
import numpy as np

from energy_demand.basic import basic_functions
def create_folder(path_folder, name_subfolder=None):
"""Creates folder or subfolder
Arguments
----------
path : str
Path to folder
folder_name : str, default=None
Name of subfolder to create
"""
if not name_subfolder:
if not os.path.exists(path_folder):
os.makedirs(path_folder)
else:
path_result_subolder = os.path.join(path_folder, name_subfolder)
if not os.path.exists(path_result_subolder):
os.makedirs(path_result_subolder)

def remap_year(year):
"""Remap year"""
Expand All @@ -23,6 +39,7 @@ def generate_weather_at_home_realisation(
path_stiching_table,
base_yr_remapped_weather_path,
scenarios=range(100),
attributes=['t_min', 't_max', 'rsds', 'wss'],
years=range(2015, 2051)
):
"""
Expand All @@ -31,7 +48,7 @@ def generate_weather_at_home_realisation(
# Create result path
result_path_realizations = os.path.join(path_results, "_realizations")

basic_functions.create_folder(result_path_realizations)
create_folder(result_path_realizations)
result_path_realizations = "C:/AAA"
# Read in stiching table
df_path_stiching_table = pd.read_table(path_stiching_table, sep=" ")
Expand All @@ -44,8 +61,6 @@ def generate_weather_at_home_realisation(

for scenario_nr in scenarios:
realisation = realisations[scenario_nr]

attributes = ['t_min', 't_max']#, 'rsds', 'wss']
for attribute in attributes:
columns = ['timestep', 'station_id', 'longitude', 'latitude', 'yearday', attribute]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,27 @@
import pandas as pd
from haversine import haversine

from energy_demand.basic import basic_functions
def create_folder(path_folder, name_subfolder=None):
"""Creates folder or subfolder
Arguments
----------
path : str
Path to folder
folder_name : str, default=None
Name of subfolder to create
"""
if not name_subfolder:
if not os.path.exists(path_folder):
os.makedirs(path_folder)
else:
path_result_subolder = os.path.join(path_folder, name_subfolder)
if not os.path.exists(path_result_subolder):
os.makedirs(path_result_subolder)

def spatially_map_data(
path_results,
result_folder,
result_out_path,
path_weather_at_home_stations,
path_input_coordinates,
attributes,
Expand All @@ -19,9 +35,7 @@ def spatially_map_data(

# Path to scenarios
path_to_scenario_data = os.path.join(path_results, '_realizations')
result_out_path = os.path.join(path_results, result_folder)
basic_functions.create_folder(result_out_path)
result_out_path = "C:/AAA/energy_demand"
create_folder(result_out_path)

# Read in input stations and coordinages to map
stations_to_map_to = pd.read_csv(path_input_coordinates)
Expand All @@ -40,8 +54,7 @@ def spatially_map_data(
for index in stations_grid_cells.index:
stations_grid_cells_dict[index] = {
'longitude': stations_grid_cells.loc[index, 'longitude'],
'latitude': stations_grid_cells.loc[index, 'latitude']
}
'latitude': stations_grid_cells.loc[index, 'latitude']}

weather_stations_per_attribute[attribute] = stations_grid_cells_dict

Expand All @@ -64,7 +77,7 @@ def spatially_map_data(
closest_weather_ids[index][attribute] = closest_marius_station

# ----------------------------------------
# Temperature data
# Get data
# ----------------------------------------
for scenario_nr in scenarios:
scenario_name = scenario_names[scenario_nr]
Expand All @@ -87,7 +100,7 @@ def spatially_map_data(
data_yr = data.loc[data['timestep'] == year]

for index in stations_to_map_to.index:
region_name = stations_to_map_to.loc[index, 'region_name']
region_name = stations_to_map_to.loc[index, 'region_id']
closest_weather_station_id = closest_weather_ids[index][attribute]
closest_data = data_yr.loc[closest_weather_station_id]
closest_data_list = list(closest_data.values)
Expand All @@ -100,20 +113,6 @@ def spatially_map_data(
year,
int(row[position_yearday])])

'''for yearday in range(0, 365):
values_row = closest_data.loc[closest_data['yearday'] == yearday]
value = values_row[attribute].values[0]
#print(" ... value: {} attribute: {} {} {}".format(value, attribute, index, closest_weather_station_id), flush=True)
list_entry = (
index,
scenario_nr,
attribute,
value,
year,
yearday)
stations_to_map_to_list.append(list_entry)'''

# ----------------------------------------------------------
# Write out data
# ----------------------------------------------------------
Expand Down
21 changes: 10 additions & 11 deletions energy_demand/scripts/weather_at_home_data_processing/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,13 @@
path_results = "X:/nismod/data/energy_supply/weather_files" # Path to store results
base_yr_remapped_weather_path = "X:/nismod/data/energy_demand/J-MARIUS_data/_weather_data_cleaned/2015_remapped"

#
result_folder ='_spatially_mapped_supply_data'
# Energy supply data
result_folder = "C:/AAA/energy_supply"
path_input_coordinates = os.path.abspath("X:/nismod/data/energy_supply/regions_input_supply_model.csv") # Path to file with coordinates to map on to

#
result_folder ='_spatially_mapped_demand_data'
path_input_coordinates = os.path.abspath("X:/nismod/data/energy_supply/regions_energy_demand_model.csv") # Path to file with coordinates to map on to

# Energy demand data
#result_folder ='_spatially_mapped_demand_data'
#path_input_coordinates = os.path.abspath("X:/nismod/data/energy_supply/regions_energy_demand_model.csv") # Path to file with coordinates to map on to

extract_data = False
stich_together = False
Expand All @@ -71,8 +70,8 @@
path_results=path_results,
path_stiching_table=path_stiching_table,
base_yr_remapped_weather_path=base_yr_remapped_weather_path,
scenarios=[27])#range(47, 61))
#scenarios=range(28, 62))
attributes=['t_min', 't_max', 'rsds', 'wss'],
scenarios=range(0, 100))
print("... finished creating realisations")

if append_closest_weather_data:
Expand All @@ -81,9 +80,9 @@
# =================================
map_weather_data.spatially_map_data(
path_results=path_results,
result_folder=result_folder,
result_out_path=result_folder,
path_weather_at_home_stations=os.path.join(path_results, "_cleaned_csv"),
path_input_coordinates=path_input_coordinates,
attributes=['t_min', 't_max'],# 'wss','rsds'], #['wss','rsds'], #
scenarios=range(10))
attributes=['t_min', 't_max', 'wss','rsds'],
scenarios=range(0, 100))
print("... append closest weather information")
2 changes: 1 addition & 1 deletion local_run_config_file.ini
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ user_defined_weather_by = 2015

[CRITERIA]
cluster_calc = False
mode_constrained = True
mode_constrained = False
virtual_building_stock_criteria = True
write_out_national = False
reg_selection = False
Expand Down

0 comments on commit 210ef16

Please sign in to comment.