Skip to content

Commit

Permalink
Multiple Changes. Working on Technological stock, assumptions....
Browse files Browse the repository at this point in the history
  • Loading branch information
eggimasv authored and eggimasv committed Mar 9, 2017
1 parent 057d15f commit cc2e6f6
Show file tree
Hide file tree
Showing 11 changed files with 682 additions and 215 deletions.
Binary file not shown.
49 changes: 49 additions & 0 deletions docs/residential_notes.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@


# Vessels for yearly data calculation.


1. Yearly Fuel Data Array [YEAR_FUEL_ARRAY]
---------------------------
For every end use creat the following array and store in yearly dict:
array (Fuel_Type, reg, End_Use, YEARLYDATA)


2. Yearly & Hourly Load Shape for every end_use [LOAD_SHAPE_REGULAR_YEARLY]
---------------------------
For every end use create the following array:

array: (yearday, hour, % of tot)

--> Assign for every day energy demand (e.g. also only with monthly data)
--> Sum over the full year
--> Calculate hourly of total percentage

3. Peak hourly load shape [LOAD_SHAPE_DAILY_PEAK]
---------------------------
# PEAK: Hourly Load Shape for every end_use for peak day:

array(hour, % of day)

--> Calculate maximum daily demand of peak day of total yearly regular demand
--> The percentage between total yearly demand and peak day allows to derive peak from any total demand
--> Use peak day distribution
--> Like this daily peak load can be derived.

4. Resulting Array from every Sub-Module
----------------------------------------

array: (fuel_type, reg, enduse, yearday, hour)

A. With scenario driver adapt YEAR_FUEL_ARRAY
B. Iterate over







# a.shape --> (3, 2, 2, 2, 24)
iterate over all fuels: a[:, 0, 0, 0]

136 changes: 104 additions & 32 deletions energy_demand/assumptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,41 +13,118 @@ def load_assumptions(data):
-----
"""
assumptions_dict = {}
assump_dict = {}


# Load assumptions from csv files
dwtype_floorarea = data['dwtype_floorarea']


# ============================================================
# Assumptions Technological stock
# Assumptions Technological Stock
# ============================================================

# eff_by: Efficiencies of technologes in base year
# eff_ey: Efficiencies of technologes in end year

# -----------------
# Efficiencies
# -----------------

## Efficiencies residential, base year
eff_by = {
'boiler_A' : 0.7,
'boiler_B' : 0.8,
'new_tech_A': 0.1,
'tech_A' : 0.5,
'tech_B' : 0.5,
'tech_C': 0.0,
'tech_D' : 0.5,
'tech_E' : 0.5,
'tech_F': 0.0
}

# Efficiencies residential, end year
eff_ey = {
'boiler_A' : 0.7,
'boiler_B' : 0.9,
'new_tech_A': 0.2,
'tech_A' : 0.5,
'tech_B' : 0.5,
'tech_C': 0.0,
'tech_D' : 0.5,
'tech_E' : 0.5,
'tech_F': 0.0
}

# Share of fuel types (base year could also be calculated and loaded)
fuel_type_p_by = {
'space_heating_solid' : 0.0182,
'space_heating_gas' : 0.7633,
'space_heating_elec' : 0.0791,
'space_heating_oil' : 0.0811,
'space_heating_HEATSOLD' : 0.0,
'space_heating_renew' : 0.0581,
'space_heating_hydro' : 0.0,
}

fuel_type_p_ey = {
'space_heating_solid' : 0.0182,
'space_heating_gas' : 0.7633,
'space_heating_elec' : 0.0791,
'space_heating_oil' : 0.0811,
'space_heating_HEATSOLD' : 0.0,
'space_heating_renew' : 0.0581,
'space_heating_hydro' : 0.0,
}


# ----------------------------------
# Fraction of technologies
# ----------------------------------
# p_tech_by : Share of technology in base year [in %]
# p_tech_ey : Share of technology in the end year

# Residential, base year
p_tech_by = {
'boiler_A' : 0.5,
'boiler_B' : 0.5,
'new_tech_A': 0.0
}

tech_market_year = {
'new_tech_A': 2000
}

tech_saturation_year = {
'new_tech_A': 2017
}


# Check if 100 %
assert p_tech_by['boiler_A'] + p_tech_by['boiler_B'] == 1.0

# Residential, end year
p_tech_ey = {
'boiler_A' : 0.4,
'boiler_B' : 0.5,
'new_tech_A' : 0.1
}


# Add dictionaries to assumptions
assump_dict['eff_by'] = eff_by
assump_dict['eff_ey'] = eff_ey
assump_dict['p_tech_by'] = p_tech_by
assump_dict['p_tech_ey'] = p_tech_ey
assump_dict['tech_market_year'] = tech_market_year
assump_dict['tech_saturation_year'] = tech_saturation_year
assump_dict['fuel_type_p_by'] = fuel_type_p_by
assump_dict['fuel_type_p_ey'] = fuel_type_p_ey

# -- Efficiencies

# Residential - Appliances
eff_boiler_A_by = 0.5 # Efficiency of technology




# -- Share of technology in base year [in %]
distr_boiler_A_by = 0.5 # [%]




# Share of technology in the year 2060
distr_boiler_A_by =



assumptions_dict['distr_e_boiler_A'] = generate_distr(eff_boiler_A_by,

# ============================================================
# Residential model
# Assumptions Residential Building Stock
# ============================================================

# Building stock related
Expand All @@ -57,14 +134,9 @@ def load_assumptions(data):








# Add to dictionary
assumptions_dict['assump_change_floorarea_pp'] = assump_change_floorarea_pp
assumptions_dict['assump_dwtype_distr_ey'] = assump_dwtype_distr_ey
assumptions_dict['assump_dwtype_floorarea'] = assump_dwtype_floorarea
assump_dict['assump_change_floorarea_pp'] = assump_change_floorarea_pp
assump_dict['assump_dwtype_distr_ey'] = assump_dwtype_distr_ey
assump_dict['assump_dwtype_floorarea'] = assump_dwtype_floorarea

return assumptions_dict
return assump_dict
99 changes: 97 additions & 2 deletions energy_demand/load_profiles_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,76 @@
import unittest
import matplotlib.pyplot as plt

# Yearly

# Daily load profiles
# dtype, hour, data
#
#



'''
def get_max_daily_load(main_dict_dayyear):
result_max = {0: None, 1: None}
# Iterate days to get maximum day
for daytape in main_dict_dayyear:
max_p_demand_of_an_hour = 0
for day in main_dict_dayyear[daytape]:
daily_max = max(main_dict_dayyear[daytape][day])
if max_p_demand_of_an_hour < daily_max:
daytype_max = day
result_max[daytape] = main_dict_dayyear[daytape][daytype_max]
return result_max
'''
'''
def ABSOLUTE (main_dict_dayyear_absolute):
"""
This function creates the shape of the base year heating demand over the full year
"""
# Initilaise array to store all values for a year
daytype, year_days, hours = 2, 365, 24
# Get hourly distribution (Sansom Data)
# ------------------------------------
hourly_hd = np.zeros((1, hours), dtype=float)
# List storing all
yeardays_percent = np.zeros((year_days, hours), dtype=float)
# Initialise dictionary with every day and hour
hd_data = np.zeros((daytype, year_days, hours), dtype=float)
for yearday in main_dict_dayyear_absolute:
#_info = main_dict_dayyear_absolute[yearday].timetuple() # Get date
#yearday_python = _info[7] - 1 # - 1 because in _info: 1.Jan = 1
#weekday = _info[6] # 0: Monday
year_raw_values[yearday] = main_dict_dayyear_absolute[yearday] # Copy all values into
# Distribute daily deamd into hourly demand
if weekday == 5 or weekday == 6:
_data = hourly_gas_shape_wkend * heating_demand_correlation
hd_data[yearday_python] = _data # DATA ARRAY
else:
_data = hourly_gas_shape_wkday * heating_demand_correlation
hd_data[yearday_python] = _data # DATA ARRAY
# Convert yearly data into percentages (create shape). Calculate Shape of the eletrictiy distribution of the appliances by assigning percent values each
total_y_hd = hd_data.sum() # Calculate yearly total demand over all day years and all appliances
shape_hd = np.zeros((len(year_days), len(hours)), dtype=float)
shape_hd = (1.0/total_y_hd) * hd_data
return shape_hd
'''

def read_in_non_residential_load_curves():
"""
Expand All @@ -23,8 +92,8 @@ def read_in_non_residential_load_curves():
# out_dict_av: Every daily measurment is taken from all files and averaged
# out_dict_not_average: Every measurment of of every file is plotted

folder_path = r'C:\Users\cenv0553\Dropbox\00-Office_oxford\07-Data\09_Carbon_Trust_advanced_metering_trial_(owen)\_all_gas' #Community
#folder_path = r'C:\Users\cenv0553\Dropbox\00-Office_oxford\07-Data\09_Carbon_Trust_advanced_metering_trial_(owen)\__OWN_SEWAGE' #Community
#folder_path = r'C:\Users\cenv0553\Dropbox\00-Office_oxford\07-Data\09_Carbon_Trust_advanced_metering_trial_(owen)\_all_gas' #Community
folder_path = r'C:\Users\cenv0553\Dropbox\00-Office_oxford\07-Data\09_Carbon_Trust_advanced_metering_trial_(owen)\__OWN_SEWAGE' #Community

all_csv_in_folder = os.listdir(folder_path) # Get all files in folder

Expand All @@ -37,6 +106,14 @@ def read_in_non_residential_load_curves():
month_dict[f] = day_dict
main_dict[i] = month_dict

# Initialise yearday dict
main_dict_dayyear_absolute = {}
for f in range(365):
day_dict_h = {k: [] for k in range(24)}
main_dict_dayyear_absolute[f] = day_dict_h



# Dict
dict_result = {0: {}, 1: {}}

Expand All @@ -61,6 +138,9 @@ def read_in_non_residential_load_curves():
# Test if file has correct form
if len(row) != 49: # Skip row
continue

# Test if file has 365 entries:


#try:
# Convert all values except date into float values
Expand Down Expand Up @@ -89,6 +169,10 @@ def read_in_non_residential_load_curves():
# Daytype
daytype = mf.get_weekday_type(date_row)

# Get Dayyear
_info = date_row.timetuple()
yearday_python = _info[7] - 1 # - 1 because in _info: 1.Jan = 1

# Month Python
month_python = month - 1

Expand All @@ -100,6 +184,10 @@ def read_in_non_residential_load_curves():
if cnt == 2:
control_sum += abs(first_half_hour + half_hour_val)
main_dict[daytype][month_python][h_day].append((first_half_hour + half_hour_val) * (100 / daily_sum)) # Calc percent of total daily demand

# Add to day_dict absolute numbers
#main_dict_dayyear_absolute[yearday_python][h_day].append((first_half_hour + half_hour_val)) # Calc percent of total daily demand

cnt = 0
h_day += 1

Expand All @@ -109,6 +197,13 @@ def read_in_non_residential_load_curves():
assertions = unittest.TestCase('__init__')
assertions.assertAlmostEqual(control_sum, daily_sum, places=7, msg=None, delta=None)


# Get min and max daily load curve over the full year
#(main_dict_dayyear_absolute)

#max_loads = get_max_daily_load(main_dict_dayyear) # Funkt nicht
#print(max_loads)
#prnt("..")
# -----------------------------------------------
# Calculate average and add to overall dictionary
# -----------------------------------------------
Expand Down

0 comments on commit cc2e6f6

Please sign in to comment.