Skip to content

Commit

Permalink
updated temperature scripts
Browse files Browse the repository at this point in the history
  • Loading branch information
Sven Eggimann committed Mar 28, 2019
1 parent 170a237 commit 532ae59
Show file tree
Hide file tree
Showing 6 changed files with 125 additions and 37 deletions.
1 change: 0 additions & 1 deletion energy_demand/geography/weather_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -676,6 +676,5 @@ def get_weather_station_selection(
# Not enough stations to select position in list
station_id = False
all_weather_stations_out = []
logging.debug("... no weather station found")

return all_weather_stations_out, station_id
10 changes: 10 additions & 0 deletions energy_demand/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,16 @@ def energy_demand_model(
data['regions'] = read_data.get_region_names(name_region_set)
data['reg_coord'] = basic_functions.get_long_lat_decimal_degrees(read_data.get_region_centroids(name_region_set))
data['scenario_data']['population'] = data_loader.read_scenario_data(name_population_dataset, region_name='lad_uk_2016', value_name='population')

# Write out coordinates
statistics_to_print = []
for i, j in data['reg_coord'].items():
statistics_to_print.append("{},{},{}".format(i,j['latitude'], j['longitude']))
# Write info to txt
write_data.write_list_to_txt(
os.path.join("C:/AAA/_test.txt"),
statistics_to_print)

data['scenario_data']['gva_industry'] = data_loader.read_scenario_data_gva(name_gva_dataset, region_name='lad_uk_2016', value_name='gva_per_head', all_dummy_data=False)
data['scenario_data']['gva_per_head'] = data_loader.read_scenario_data(name_gva_dataset_per_head, region_name='lad_uk_2016', value_name='gva_per_head')

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def generate_weather_at_home_realisation(
"""
# Create result path
result_path_realizations = os.path.join(path_results, "_realizations")
result_path_realizations = "C:/AAA"
#result_path_realizations = "C:/AAA"
basic_functions.create_folder(result_path_realizations)

# Read in stiching table
Expand All @@ -42,11 +42,11 @@ def generate_weather_at_home_realisation(
# Realisations
realisations = list(df_path_stiching_table.columns)

attributes = ['rsds', 'wss']

for scenario_nr in scenarios:
realisation = realisations[scenario_nr]

attributes = ['t_min', 't_max'] #['rsds', 'wss']
#attributes = ['rsds', 'wss']
for attribute in attributes:
columns = ['timestep', 'station_id', 'longitude', 'latitude', 'yearday', attribute]

Expand All @@ -56,9 +56,9 @@ def generate_weather_at_home_realisation(
for sim_yr in years:
#print(" ... year: " + str(sim_yr), flush=True)
year = remap_year(sim_yr)

stiching_name = df_path_stiching_table[realisation][year]
path_weather_data = os.path.join(path_results, '_cleaned_csv', str(year), stiching_name)

path_attribute = os.path.join(path_weather_data, "{}.npy".format(attribute))
path_attribute_stations = os.path.join(path_results, '_cleaned_csv', "stations_{}.csv".format(attribute))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,53 @@
import numpy as np
import pandas as pd

def convert_to_celcius(df, attribute_to_convert):
"""# Convert Kelvin to Celsius (# Kelvin to Celsius)
"""
df[attribute_to_convert] = df[attribute_to_convert].apply(pytemperature.k2c)
return df

def write_weather_data(data_list):
"""Write weather data to array
data_list : list
[[lat, long, yearday365, value]]
"""
station_coordinates = []

assert not len(data_list) % 365 #Check if dividable by 365
nr_stations = int(len(data_list) / 365)

stations_data = np.zeros((nr_stations, 365))
station_data = np.zeros((365))

station_id_cnt = 0
cnt = 0

for row in data_list:
station_data[cnt] = row[3]

if cnt == 364:

# 365 day data for weather station
stations_data[station_id_cnt] = station_data

# Weather station metadata
station_lat = row[0]
station_lon = row[1]

station_id = "station_id_{}".format(station_id_cnt)

#ID, latitude, longitude
station_coordinates.append([station_id, station_lat, station_lon])

# Reset
station_data = np.zeros((365))
station_id_cnt += 1
cnt = -1
cnt += 1

return station_coordinates, stations_data

def create_folder(path_folder, name_subfolder=None):
"""Creates folder or subfolder
Expand Down Expand Up @@ -43,32 +90,35 @@ def weather_dat_prepare(
path_realization = os.path.join(path_year, realization_name)

if os.path.exists(path_realization):
pass #already extracted
else:
#remove##pass #already extracted
#remove##else:
print("... processing {} {}".format(str(year), str(realization_name)), flush=True)
create_folder(path_realization)
#remove##create_folder(path_realization)

# ------------------------
# Original data to extract
# ------------------------

# Daily mean wind speed at 10 m above ground
path_wind = os.path.join(path_realizations, realization_name, 'daily', 'WAH_{}_wss_daily_g2_{}.nc'.format(realization_name, year))
# Paths
#remove##path_wind = os.path.join(path_realizations, realization_name, 'daily', 'WAH_{}_wss_daily_g2_{}.nc'.format(realization_name, year))
#remove##path_rsds = os.path.join(path_realizations, realization_name, 'daily', 'WAH_{}_rsds_daily_g2_{}.nc'.format(realization_name, year))
path_tasmin = os.path.join(path_realizations, realization_name, 'daily', 'WAH_{}_tasmin_daily_g2_{}.nc'.format(realization_name, year))
path_tasmax = os.path.join(path_realizations, realization_name, 'daily', 'WAH_{}_tasmax_daily_g2_{}.nc'.format(realization_name, year))

#Daily mean incoming shortwave radiation at the surface
path_rsds = os.path.join(path_realizations, realization_name, 'daily', 'WAH_{}_rsds_daily_g2_{}.nc'.format(realization_name, year))
#remove##wss = get_temp_data_from_nc(path_wind, 'wss')
#remove##rsds = get_temp_data_from_nc(path_rsds, 'rsds')
df_min = get_temp_data_from_nc(path_tasmin, 'tasmin')
df_max = get_temp_data_from_nc(path_tasmax, 'tasmax')

# Load data
print(" ..load data", flush=True)
wss = get_temp_data_from_nc(path_wind, 'wss')
rsds = get_temp_data_from_nc(path_rsds, 'rsds')

# Write out weather stations
station_coordinates_wss = write_weather_stations(wss)
station_coordinates_rsrds = write_weather_stations(rsds)
#remove##station_coordinates_wss = write_weather_stations(wss)
#remove##station_coordinates_rsrds = write_weather_stations(rsds)
station_coordinates_tmin = write_weather_stations(df_min)
station_coordinates_tmax = write_weather_stations(df_max)

# Write weather coordinates
path_station_coordinates_wss = os.path.join(folder_results, "stations_wss.csv")
'''path_station_coordinates_wss = os.path.join(folder_results, "stations_wss.csv")
if os.path.exists(path_station_coordinates_wss):
pass
else:
Expand All @@ -81,20 +131,44 @@ def weather_dat_prepare(
else:
df = pd.DataFrame(station_coordinates_rsrds, columns=['station_id', 'latitude', 'longitude'])
df.to_csv(path_station_coordinates_rsrds, index=False)

'''
path_station_coordinates_t_min = os.path.join(folder_results, "stations_t_min.csv")
if os.path.exists(path_station_coordinates_t_min):
pass
else:
df = pd.DataFrame(station_coordinates_tmin, columns=['station_id', 'latitude', 'longitude'])
df.to_csv(path_station_coordinates_t_min, index=False)

path_station_coordinates_t_max = os.path.join(folder_results, "stations_t_max.csv")
if os.path.exists(path_station_coordinates_t_max):
pass
else:
df = pd.DataFrame(station_coordinates_tmax, columns=['station_id', 'latitude', 'longitude'])
df.to_csv(path_station_coordinates_t_max, index=False)

# Convert Kelvin to Celsius (# Kelvin to Celsius)
df_min = convert_to_celcius(df_min, 'tasmin')
df_max = convert_to_celcius(df_max, 'tasmax')

# Convert 360 day to 365 days
print(" ..extend day", flush=True)
list_wss = extend_360_day_to_365(wss, 'wss')
list_rsds = extend_360_day_to_365(rsds, 'rsds')
#remove##list_wss = extend_360_day_to_365(wss, 'wss')
#remove##list_rsds = extend_360_day_to_365(rsds, 'rsds')
list_min = extend_360_day_to_365(df_min, 'tasmin')
list_max = extend_360_day_to_365(df_max, 'tasmax')

# Write out single weather stations as numpy array
print(" ..write out", flush=True)
data_wss = write_weather_data(list_wss)
data_rsds = write_weather_data(list_rsds)

#remove##data_wss = write_weather_data(list_wss)
#remove##data_rsds = write_weather_data(list_rsds)
t_min = write_weather_data(list_min)
t_max = write_weather_data(list_max)

# Write to csv
np.save(os.path.join(path_realization, "wss.npy"), data_wss)
np.save(os.path.join(path_realization, "rsds.npy"), data_rsds)
#remove##np.save(os.path.join(path_realization, "wss.npy"), data_wss)
#remove##np.save(os.path.join(path_realization, "rsds.npy"), data_rsds)
np.save(os.path.join(path_realization, "t_min.npy"), t_min)
np.save(os.path.join(path_realization, "t_max.npy"), t_max)

print("... finished cleaning weather data")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ def spatially_map_data(
# Iterate geography and assign closest weather station data
closest_weather_ids = {}
for index in stations_to_map_to.index:

closest_weather_ids[index] = {}

# Marius weather station
Expand Down Expand Up @@ -77,16 +76,20 @@ def spatially_map_data(
data = data.set_index("station_id")

for year in range(2015, 2051):

print(" ... {}".format(year), flush=True)
data_yr = data.loc[data['timestep'] == year]

for index in stations_to_map_to.index:
#print(" ... {} {}".format(index, closest_weather_station_id), flush=True)
closest_weather_station_id = closest_weather_ids[index][name_attribute]
closest_weather_station_id = "station_id_0"
closest_data = data_yr.loc[closest_weather_station_id]

for yearday in range(1, 366):
value = closest_data.loc[closest_data['yearday'] == yearday][attribute]
for yearday in range(0, 365):
values_row = closest_data.loc[closest_data['yearday'] == yearday]
value = values_row[attribute].values[0]

#print(" ... value: {} attribute: {} {} {}".format(value, attribute, index, closest_weather_station_id), flush=True)
list_entry = (
stations_to_map_to.loc[index, 'region_id'],
stations_to_map_to.loc[index, 'Latitude'],
Expand All @@ -96,7 +99,7 @@ def spatially_map_data(
name_attribute,
value,
year,
yearday)
yearday + 1)

stations_to_map_to_list.append(list_entry)

Expand All @@ -114,7 +117,7 @@ def spatially_map_data(
'day'
])

result_file = os.path.join(result_out_path, "remapped_and_append_weather_data__{}.csv".format(scenario_nr))
result_file = os.path.join(result_out_path, "remapped_wind_and_solar__{}.csv".format(scenario_nr))
stations_to_map_to_out.to_csv(result_file, index=False)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@
path_input_coordinates = os.path.abspath("X:/nismod/data/energy_supply/regions_input.csv") # Path to file with coordinates to map on to

extract_data = False
stich_together = False
append_closest_weather_data = True
stich_together = True
append_closest_weather_data = False

if extract_data:
# =================================
Expand All @@ -43,7 +43,8 @@
# =================================
extract_weather_data.weather_dat_prepare(
path_extracted_files,
path_results)
path_results,
years=range(2046, 2051))
print("... finished extracting data")

if stich_together:
Expand All @@ -53,7 +54,8 @@
create_realisation.generate_weather_at_home_realisation(
path_results=path_results,
path_stiching_table=path_stiching_table,
scenarios=range(80, 101))
scenarios=range(0, 21))
#scenarios=range(28, 62))
print("... finished creating realisations")

if append_closest_weather_data:
Expand Down

0 comments on commit 532ae59

Please sign in to comment.