Skip to content

Commit

Permalink
Merge branch 'release/publication'
Browse files Browse the repository at this point in the history
  • Loading branch information
samuelduchesne committed Jun 8, 2020
2 parents ab7d4ab + 9ea590f commit 1c38713
Show file tree
Hide file tree
Showing 16 changed files with 1,283 additions and 436 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,6 @@ before_install:
- conda activate test-env
- python --version
install:
- pip install --upgrade pip
# - pip install --upgrade pip
- pip install --upgrade setuptools
- pip install .[dev]
2 changes: 1 addition & 1 deletion archetypal/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
################################################################################

# Version of the package
__version__ = "1.3.1"
__version__ = "1.3.2"

# warn if a newer version of archetypal is available
from outdated import warn_if_outdated
Expand Down
31 changes: 20 additions & 11 deletions archetypal/dataportal.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,12 +337,12 @@ def _resolve_codecountry(code_country):
Args:
code_country:
"""
if len(code_country) == 2:
if isinstance(code_country, int):
code_country = pycountry.countries.get(numeric=str(code_country))
elif len(code_country) == 2:
code_country = pycountry.countries.get(alpha_2=code_country)
elif len(code_country) == 3:
code_country = pycountry.countries.get(alpha_3=code_country)
elif isinstance(code_country, int):
code_country = pycountry.countries.get(numeric=str(code_country))
else:
code_country = pycountry.countries.get(name=code_country)

Expand Down Expand Up @@ -484,6 +484,15 @@ def nrel_api_cbr_request(data):
response_json["remark"], level=lg.WARNING
)
)
elif "error" in response_json:
log(
"Server at {} returned status code {} meaning {}.".format(
domain, response.status_code, response_json["error"]["code"]
),
level=lg.ERROR,
)
else:
pass
save_to_cache(prepared_url, response_json)
except Exception:
# deal with response satus_code here
Expand Down Expand Up @@ -545,11 +554,7 @@ def nrel_bcl_api_request(data):
# json safely
response_json = response.json()
if "remark" in response_json:
log(
'Server remark: "{}"'.format(
response_json["remark"], level=lg.WARNING
)
)
log('Server remark: "{}"'.format(response_json["remark"], level=lg.WARNING))
save_to_cache(prepared_url, response_json)
return response_json

Expand Down Expand Up @@ -826,10 +831,14 @@ def download_bld_window(
for info in z.infolist():
if info.filename.endswith(extension):
z.extract(info, path=output_folder)
results.append(os.path.join(settings.data_folder, info.filename))
results.append(
os.path.join(settings.data_folder, info.filename)
)
return results
else:
return response["result"]
else:
raise ValueError("Could not download window from NREL Building Components "
"Library. An error occurred with the nrel_api_request")
raise ValueError(
"Could not download window from NREL Building Components "
"Library. An error occurred with the nrel_api_request"
)
5 changes: 3 additions & 2 deletions archetypal/idfclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -889,7 +889,7 @@ def save_idf_object_to_cache(idf_object, idf_file, output_folder=None, how=None)
output_folder (Path): temporary output directory (default:
settings.cache_folder)
how (str, optional): How the pickling is done. Choices are 'json' or
'pickle'. json dump doen't quite work yet. 'pickle' will save to a
'pickle'. json dump does not quite work yet. 'pickle' will save to a
gzip'ed file instead of a regular binary file (.dat).
Returns:
Expand Down Expand Up @@ -1867,7 +1867,8 @@ def upgraded_file(eplus_file, output_directory):
eplus_file:
output_directory:
"""
eplus_file = next(iter(output_directory.glob("*.idf")), eplus_file)
if settings.use_cache:
eplus_file = next(iter(output_directory.glob("*.idf")), eplus_file)
return eplus_file


Expand Down
2 changes: 1 addition & 1 deletion archetypal/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@
unique_schedules = []

# TRNSYS default location
trnsys_default_folder = r"C:\TRNSYS18"
trnsys_default_folder = Path(r"C:\TRNSYS18")

# region read template - use io.BytesIO(settings.template) in code
import pkg_resources
Expand Down
105 changes: 83 additions & 22 deletions archetypal/trnsys.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,14 @@ def convert_idf_to_trnbuild(
"""

# Assert all path needed exist
idf_file, weather_file, window_lib, output_folder, trnsidf_exe, template = _assert_files(
(
idf_file,
weather_file,
window_lib,
output_folder,
trnsidf_exe,
template,
) = _assert_files(
idf_file, weather_file, window_lib, output_folder, trnsidf_exe, template
)

Expand Down Expand Up @@ -182,16 +189,46 @@ def convert_idf_to_trnbuild(
lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines()

# Get objects from IDF file
buildingSurfs, buildings, constructions, equipments, fenestrationSurfs, globGeomRules, lights, locations, materialAirGap, materialNoMass, materials, peoples, versions, zones, zonelists = get_idf_objects(
idf_2
)
(
buildingSurfs,
buildings,
constructions,
equipments,
fenestrationSurfs,
globGeomRules,
lights,
locations,
materialAirGap,
materialNoMass,
materials,
peoples,
versions,
zones,
zonelists,
) = get_idf_objects(idf_2)

# Get all construction EXCEPT fenestration ones
constr_list = _get_constr_list(buildingSurfs)

# If ordered=True, ordering idf objects
ordered = kwargs.get("ordered", False)
buildingSurfs, buildings, constr_list, constructions, equipments, fenestrationSurfs, globGeomRules, lights, locations, materialAirGap, materialNoMass, materials, peoples, zones, zonelists = _order_objects(
(
buildingSurfs,
buildings,
constr_list,
constructions,
equipments,
fenestrationSurfs,
globGeomRules,
lights,
locations,
materialAirGap,
materialNoMass,
materials,
peoples,
zones,
zonelists,
) = _order_objects(
buildingSurfs,
buildings,
constr_list,
Expand Down Expand Up @@ -622,7 +659,11 @@ def _write_heat_cool_to_b18(list_dict, old_new_names, zone, b18_lines, string):
f_count = checkStr(b18_lines, "Z o n e " + zone.Name)
regimeNum = checkStr(b18_lines, "REGIME", f_count)
# Write
b18_lines.insert(regimeNum, string + " = " + list_dict[key][0] + "\n")
if not isinstance(list_dict[key], list):
value = list_dict[key]
else:
value = list_dict[key][0]
b18_lines.insert(regimeNum, string + " = " + value + "\n")


def zone_where_gain_is(gains, zones, zonelists):
Expand Down Expand Up @@ -929,20 +970,29 @@ def _assert_files(
trnsidf_exe (str): Path to *trnsidf.exe*.
template (str): Path to d18 template file.
"""
if not os.path.isfile(idf_file):
raise IOError("idf_file file not found")
if isinstance(idf_file, str):
if not os.path.isfile(idf_file):
raise IOError("idf_file file not found")
else:
raise IOError("idf_file file is not a string (path)")

if not os.path.isfile(weather_file):
raise IOError("idf_file file not found")
if isinstance(weather_file, str):
if not os.path.isfile(weather_file):
raise IOError("weather file not found")
else:
raise IOError("weather file is not a string (path)")

if window_lib:
if not os.path.isfile(window_lib):
raise IOError("window_lib file not found")
if isinstance(window_lib, str):
if not os.path.isfile(window_lib):
raise IOError("window_lib file not found")
else:
raise IOError("window_lib file is not a string (path)")

if not output_folder:
output_folder = os.path.relpath(settings.data_folder)
if not os.path.exists(output_folder):
os.mkdir(output_folder)
if not os.path.exists(output_folder):
os.mkdir(output_folder)

if not template:
template = settings.path_template_d18
Expand All @@ -951,8 +1001,8 @@ def _assert_files(
raise IOError("template file not found")

if not trnsidf_exe:
trnsidf_exe = os.path.join(
settings.trnsys_default_folder, r"Building\trnsIDF\trnsidf.exe"
trnsidf_exe = settings.trnsys_default_folder / Path(
r"Building\trnsIDF\trnsidf.exe"
)

if not os.path.isfile(trnsidf_exe):
Expand Down Expand Up @@ -1010,7 +1060,7 @@ def _add_change_adj_surf(buildingSurfs, idf):
break
# If boundary surface does not exist, append the list of surface
# to create
if not adj_surfs_to_change:
if not buildingSurf.Name in adj_surfs_to_change:
if not buildingSurf.Name in adj_surfs_to_make:
adj_surfs_to_make.append(buildingSurf.Name)
# If adjacent surface found, check if Outside boundary
Expand Down Expand Up @@ -1431,7 +1481,18 @@ def choose_window(u_value, shgc, t_vis, tolerance, window_lib_path):
)
.idxmin()
)
win_id, description, design, u_win, shgc_win, t_sol_win, rf_sol_win, t_vis_win, lay_win, width = df_windows.loc[
(
win_id,
description,
design,
u_win,
shgc_win,
t_sol_win,
rf_sol_win,
t_vis_win,
lay_win,
width,
) = df_windows.loc[
best_window_index,
[
"WinID",
Expand Down Expand Up @@ -1809,9 +1870,9 @@ def _write_zone_buildingSurf_fenestrationSurf(
)

# Round vertex to 4 decimal digit max
zone.X_Origin = round(zone.X_Origin, 4)
zone.Y_Origin = round(zone.Y_Origin, 4)
zone.Z_Origin = round(zone.Z_Origin, 4)
zone.X_Origin = round(zone_origin(zone)[0], 4)
zone.Y_Origin = round(zone_origin(zone)[1], 4)
zone.Z_Origin = round(zone_origin(zone)[2], 4)

lines.insert(variableDictNum + 2, zone)
return win_slope_dict
Expand Down Expand Up @@ -2126,7 +2187,7 @@ def _write_schedules(lines, schedule_names, schedules, schedule_as_input, idf_fi
# Get annual hourly values of schedules
arr = schedules[schedule_name]["all values"]
# Find the hours where hourly values change
hours_list, = np.where(np.roll(arr, 1) != arr)
(hours_list,) = np.where(np.roll(arr, 1) != arr)
# if hours_list is empty, give it hour 0
if hours_list.size == 0:
hours_list = np.array([0])
Expand Down
6 changes: 3 additions & 3 deletions archetypal/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,10 @@ def config(
data_folder (str): where to save and load data files.
logs_folder (str): where to write the log files.
imgs_folder (str): where to save figures.
cache_folder (str): where to save the simluation results.
cache_folder (str): where to save the simulation results.
use_cache (bool): if True, use a local cache to save/retrieve many of
archetypal outputs such as EnergyPlus simulation results. This can
save a lot of time by not calling the simulation and dataportal APIs
save a lot of time by not calling the simulation and DataPortal APIs
repetitively for the same requests.
log_file (bool): if true, save log output to a log file in logs_folder.
log_console (bool): if true, print log output to the console.
Expand Down Expand Up @@ -123,7 +123,7 @@ def validate_trnsys_folder(trnsys_default_folder):
trnsys_default_folder
)
)
return None
return trnsys_default_folder
else:
return trnsys_default_folder

Expand Down
Loading

0 comments on commit 1c38713

Please sign in to comment.