diff --git a/.travis.yml b/.travis.yml index 9fafabb5..c71a9996 100644 --- a/.travis.yml +++ b/.travis.yml @@ -48,6 +48,6 @@ before_install: - conda activate test-env - python --version install: -- pip install --upgrade pip +# - pip install --upgrade pip - pip install --upgrade setuptools - pip install .[dev] \ No newline at end of file diff --git a/archetypal/__init__.py b/archetypal/__init__.py index 74f2cfe7..0265c56e 100644 --- a/archetypal/__init__.py +++ b/archetypal/__init__.py @@ -6,7 +6,7 @@ ################################################################################ # Version of the package -__version__ = "1.3.1" +__version__ = "1.3.2" # warn if a newer version of archetypal is available from outdated import warn_if_outdated diff --git a/archetypal/dataportal.py b/archetypal/dataportal.py index 0e3386fd..8e29f88a 100644 --- a/archetypal/dataportal.py +++ b/archetypal/dataportal.py @@ -337,12 +337,12 @@ def _resolve_codecountry(code_country): Args: code_country: """ - if len(code_country) == 2: + if isinstance(code_country, int): + code_country = pycountry.countries.get(numeric=str(code_country)) + elif len(code_country) == 2: code_country = pycountry.countries.get(alpha_2=code_country) elif len(code_country) == 3: code_country = pycountry.countries.get(alpha_3=code_country) - elif isinstance(code_country, int): - code_country = pycountry.countries.get(numeric=str(code_country)) else: code_country = pycountry.countries.get(name=code_country) @@ -484,6 +484,15 @@ def nrel_api_cbr_request(data): response_json["remark"], level=lg.WARNING ) ) + elif "error" in response_json: + log( + "Server at {} returned status code {} meaning {}.".format( + domain, response.status_code, response_json["error"]["code"] + ), + level=lg.ERROR, + ) + else: + pass save_to_cache(prepared_url, response_json) except Exception: # deal with response satus_code here @@ -545,11 +554,7 @@ def nrel_bcl_api_request(data): # json safely response_json = response.json() if "remark" in response_json: - log( - 'Server remark: "{}"'.format( - response_json["remark"], level=lg.WARNING - ) - ) + log('Server remark: "{}"'.format(response_json["remark"], level=lg.WARNING)) save_to_cache(prepared_url, response_json) return response_json @@ -826,10 +831,14 @@ def download_bld_window( for info in z.infolist(): if info.filename.endswith(extension): z.extract(info, path=output_folder) - results.append(os.path.join(settings.data_folder, info.filename)) + results.append( + os.path.join(settings.data_folder, info.filename) + ) return results else: return response["result"] else: - raise ValueError("Could not download window from NREL Building Components " - "Library. An error occurred with the nrel_api_request") + raise ValueError( + "Could not download window from NREL Building Components " + "Library. An error occurred with the nrel_api_request" + ) diff --git a/archetypal/idfclass.py b/archetypal/idfclass.py index 7a8597c9..15c935d1 100644 --- a/archetypal/idfclass.py +++ b/archetypal/idfclass.py @@ -889,7 +889,7 @@ def save_idf_object_to_cache(idf_object, idf_file, output_folder=None, how=None) output_folder (Path): temporary output directory (default: settings.cache_folder) how (str, optional): How the pickling is done. Choices are 'json' or - 'pickle'. json dump doen't quite work yet. 'pickle' will save to a + 'pickle'. json dump does not quite work yet. 'pickle' will save to a gzip'ed file instead of a regular binary file (.dat). Returns: @@ -1867,7 +1867,8 @@ def upgraded_file(eplus_file, output_directory): eplus_file: output_directory: """ - eplus_file = next(iter(output_directory.glob("*.idf")), eplus_file) + if settings.use_cache: + eplus_file = next(iter(output_directory.glob("*.idf")), eplus_file) return eplus_file diff --git a/archetypal/settings.py b/archetypal/settings.py index 6d031b36..c042c74f 100644 --- a/archetypal/settings.py +++ b/archetypal/settings.py @@ -120,7 +120,7 @@ unique_schedules = [] # TRNSYS default location -trnsys_default_folder = r"C:\TRNSYS18" +trnsys_default_folder = Path(r"C:\TRNSYS18") # region read template - use io.BytesIO(settings.template) in code import pkg_resources diff --git a/archetypal/trnsys.py b/archetypal/trnsys.py index 6cb86c88..1def4ee4 100644 --- a/archetypal/trnsys.py +++ b/archetypal/trnsys.py @@ -116,7 +116,14 @@ def convert_idf_to_trnbuild( """ # Assert all path needed exist - idf_file, weather_file, window_lib, output_folder, trnsidf_exe, template = _assert_files( + ( + idf_file, + weather_file, + window_lib, + output_folder, + trnsidf_exe, + template, + ) = _assert_files( idf_file, weather_file, window_lib, output_folder, trnsidf_exe, template ) @@ -182,16 +189,46 @@ def convert_idf_to_trnbuild( lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() # Get objects from IDF file - buildingSurfs, buildings, constructions, equipments, fenestrationSurfs, globGeomRules, lights, locations, materialAirGap, materialNoMass, materials, peoples, versions, zones, zonelists = get_idf_objects( - idf_2 - ) + ( + buildingSurfs, + buildings, + constructions, + equipments, + fenestrationSurfs, + globGeomRules, + lights, + locations, + materialAirGap, + materialNoMass, + materials, + peoples, + versions, + zones, + zonelists, + ) = get_idf_objects(idf_2) # Get all construction EXCEPT fenestration ones constr_list = _get_constr_list(buildingSurfs) # If ordered=True, ordering idf objects ordered = kwargs.get("ordered", False) - buildingSurfs, buildings, constr_list, constructions, equipments, fenestrationSurfs, globGeomRules, lights, locations, materialAirGap, materialNoMass, materials, peoples, zones, zonelists = _order_objects( + ( + buildingSurfs, + buildings, + constr_list, + constructions, + equipments, + fenestrationSurfs, + globGeomRules, + lights, + locations, + materialAirGap, + materialNoMass, + materials, + peoples, + zones, + zonelists, + ) = _order_objects( buildingSurfs, buildings, constr_list, @@ -622,7 +659,11 @@ def _write_heat_cool_to_b18(list_dict, old_new_names, zone, b18_lines, string): f_count = checkStr(b18_lines, "Z o n e " + zone.Name) regimeNum = checkStr(b18_lines, "REGIME", f_count) # Write - b18_lines.insert(regimeNum, string + " = " + list_dict[key][0] + "\n") + if not isinstance(list_dict[key], list): + value = list_dict[key] + else: + value = list_dict[key][0] + b18_lines.insert(regimeNum, string + " = " + value + "\n") def zone_where_gain_is(gains, zones, zonelists): @@ -929,20 +970,29 @@ def _assert_files( trnsidf_exe (str): Path to *trnsidf.exe*. template (str): Path to d18 template file. """ - if not os.path.isfile(idf_file): - raise IOError("idf_file file not found") + if isinstance(idf_file, str): + if not os.path.isfile(idf_file): + raise IOError("idf_file file not found") + else: + raise IOError("idf_file file is not a string (path)") - if not os.path.isfile(weather_file): - raise IOError("idf_file file not found") + if isinstance(weather_file, str): + if not os.path.isfile(weather_file): + raise IOError("weather file not found") + else: + raise IOError("weather file is not a string (path)") if window_lib: - if not os.path.isfile(window_lib): - raise IOError("window_lib file not found") + if isinstance(window_lib, str): + if not os.path.isfile(window_lib): + raise IOError("window_lib file not found") + else: + raise IOError("window_lib file is not a string (path)") if not output_folder: output_folder = os.path.relpath(settings.data_folder) - if not os.path.exists(output_folder): - os.mkdir(output_folder) + if not os.path.exists(output_folder): + os.mkdir(output_folder) if not template: template = settings.path_template_d18 @@ -951,8 +1001,8 @@ def _assert_files( raise IOError("template file not found") if not trnsidf_exe: - trnsidf_exe = os.path.join( - settings.trnsys_default_folder, r"Building\trnsIDF\trnsidf.exe" + trnsidf_exe = settings.trnsys_default_folder / Path( + r"Building\trnsIDF\trnsidf.exe" ) if not os.path.isfile(trnsidf_exe): @@ -1010,7 +1060,7 @@ def _add_change_adj_surf(buildingSurfs, idf): break # If boundary surface does not exist, append the list of surface # to create - if not adj_surfs_to_change: + if not buildingSurf.Name in adj_surfs_to_change: if not buildingSurf.Name in adj_surfs_to_make: adj_surfs_to_make.append(buildingSurf.Name) # If adjacent surface found, check if Outside boundary @@ -1431,7 +1481,18 @@ def choose_window(u_value, shgc, t_vis, tolerance, window_lib_path): ) .idxmin() ) - win_id, description, design, u_win, shgc_win, t_sol_win, rf_sol_win, t_vis_win, lay_win, width = df_windows.loc[ + ( + win_id, + description, + design, + u_win, + shgc_win, + t_sol_win, + rf_sol_win, + t_vis_win, + lay_win, + width, + ) = df_windows.loc[ best_window_index, [ "WinID", @@ -1809,9 +1870,9 @@ def _write_zone_buildingSurf_fenestrationSurf( ) # Round vertex to 4 decimal digit max - zone.X_Origin = round(zone.X_Origin, 4) - zone.Y_Origin = round(zone.Y_Origin, 4) - zone.Z_Origin = round(zone.Z_Origin, 4) + zone.X_Origin = round(zone_origin(zone)[0], 4) + zone.Y_Origin = round(zone_origin(zone)[1], 4) + zone.Z_Origin = round(zone_origin(zone)[2], 4) lines.insert(variableDictNum + 2, zone) return win_slope_dict @@ -2126,7 +2187,7 @@ def _write_schedules(lines, schedule_names, schedules, schedule_as_input, idf_fi # Get annual hourly values of schedules arr = schedules[schedule_name]["all values"] # Find the hours where hourly values change - hours_list, = np.where(np.roll(arr, 1) != arr) + (hours_list,) = np.where(np.roll(arr, 1) != arr) # if hours_list is empty, give it hour 0 if hours_list.size == 0: hours_list = np.array([0]) diff --git a/archetypal/utils.py b/archetypal/utils.py index 6464acbb..d5ba4d9a 100644 --- a/archetypal/utils.py +++ b/archetypal/utils.py @@ -60,10 +60,10 @@ def config( data_folder (str): where to save and load data files. logs_folder (str): where to write the log files. imgs_folder (str): where to save figures. - cache_folder (str): where to save the simluation results. + cache_folder (str): where to save the simulation results. use_cache (bool): if True, use a local cache to save/retrieve many of archetypal outputs such as EnergyPlus simulation results. This can - save a lot of time by not calling the simulation and dataportal APIs + save a lot of time by not calling the simulation and DataPortal APIs repetitively for the same requests. log_file (bool): if true, save log output to a log file in logs_folder. log_console (bool): if true, print log output to the console. @@ -123,7 +123,7 @@ def validate_trnsys_folder(trnsys_default_folder): trnsys_default_folder ) ) - return None + return trnsys_default_folder else: return trnsys_default_folder diff --git a/docs/caching.rst b/docs/caching.rst new file mode 100644 index 00000000..83f2aea9 --- /dev/null +++ b/docs/caching.rst @@ -0,0 +1,135 @@ +Caching +======= + +Archetypal features a caching api aimed at accelerating reproducible workflows using EnergyPlus simulations by reducing +unnecessary calls to the EnergyPlus executable or transitioning programs. Concretely, caching an IDF model means that, +for instance, if an older version model (less than 9.2) is ran, archetypal will transition a copy of that file to +version 9.2 (making a copy beforehand) and run the simulation with the matching EnergyPlus executable. The next time the +:func:`archetypal.idfclass.run_eplus` or the :func:`archetypal.idfclass.load_idf` method is called, the cached +(transitioned) file will be readily available and used; This helps to save time especially with reproducible workflows +since transitioning files can take a while to complete. + +As for simulation results, after :func:`archetypal.idfclass run_eplus` is called, the EnergyPlus outputs (.csv, sqlite, +mtd, .mdd, etc.) are cached in a folder structure than is identified according to the simulation parameters; those +parameters include the content of the IDF file itself (if the file has changed, a new simulation is required), whether +an annual or design day simulation is executed, etc. This means that if run_eplus is called a second time (let us say +after restarting a Jupyter Notebook kernel), the run_eplus will bypass the EnergyPlus executable and retrieve the cached +simulation results instead. This has two advantages, the first one being a quicker workflow and the second one making +sure that whatever `run_eplus` returns fits the parameters used with the executable. Let us use this in a real world +example. First, caching is enabled using the `config` method: + +Enabling caching +---------------- + +Caching is enabled by passing the `use_cache=True` attribute to the :func:`archetypal.utils.config` method. The +configuration of archetypal settings are not persistent and must be called whenever a python session is started. It is +recommended to put the `config` method at the beginning of a script or in the first cells of a Jupyter Notebook +(after the import statements). + +.. code-block:: python + + import archetypal as ar + ar.config(use_cache=True, log_console=True) + +Example +------- + +In a Jupyter Notebook, one would typically do the following: + +.. code-block:: python + + _, idf, results = ar.run_eplus( + eplus_file=ar.utils.get_eplus_dirs("8-9-0") / "ExampleFiles" / "BasicsFiles/AdultEducationCenter.idf", + weather_file=ar.utils.get_eplus_dirs("8-9-0") / "WeatherData" / "USA_IL_Chicago-OHare.Intl.AP.725300_TMY3.epw", + design_day=True, + return_files=True, + annual=False, + return_idf=True, + expandobjects=True, + prep_outputs=True, + ) + +Since the file is a version 8.0 IDF file, archetypal is going to transition the file to EnergyPlus 9.2 (or any other +version specified with the ep_version parameter) and execute EnergyPlus for the `design_day` only. + +The command above yields a list of output files thanks to the `return_files=True` parameter. These will be located +inside a cache folder specified by the settings.cache_folder variable (this folder path can be changed using the config +method). + +.. code-block:: python + + [None, , + [Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4tbl.csv'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.end'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/AdultEducationCenter.idf'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.dxf'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.eso'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.mtd'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.bnd'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.sql'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.mdd'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4tbl.htm'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.shd'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.expidf'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.err'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/eplus_run_AdultEducationCenter.idf_2020_02_27.log'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.mtr'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/sqlite.err'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.audit'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/USA_IL_Chicago-OHare.Intl.AP.725300_TMY3.epw'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.eio'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/d04795a50b4ff172da72fec54c6991e4/d04795a50b4ff172da72fec54c6991e4out.rdd')]] + +Now, if the command above is modified with `annual=True` and set `design_day=False`, then run_eplus should return the +annual simulation results (which do not exist yet). + +.. code-block:: python + + _, idf, results = ar.run_eplus( + eplus_file=ar.utils.get_eplus_dirs("8-9-0") / "ExampleFiles" / "BasicsFiles/AdultEducationCenter.idf", + weather_file=ar.utils.get_eplus_dirs("8-9-0") / "WeatherData" / "USA_IL_Chicago-OHare.Intl.AP.725300_TMY3.epw", + design_day=False, + return_files=True, + annual=True, + return_idf=True, + expandobjects=True, + prep_outputs=True, + ) + +Now, since the original IDF file (the version 8.9 one) has not changed, archetypal is going to look for the transitioned +file that resides in the cache folder and use that one instead of retransitioning the original file a second time. On +the other hand, since the parameters of run_eplus have changed (annual instead of design_day), it is going to execute +EnergyPlus using the annual method and return the annual results (see that the second-level folder id has changed from +d04795a50b4ff172da72fec54c6991e4 to 9efc05f6e6cde990685b8ef61e326d94; *these ids may be different on your computer*): + +.. code-block:: python + + [None, , + [Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/AdultEducationCenter.idf'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.mdd'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.shd'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94tbl.htm'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.audit'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.mtr'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.err'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.rdd'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.expidf'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.eio'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.dxf'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.end'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94tbl.csv'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.eso'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.bnd'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.mtd'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/sqlite.err'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/9efc05f6e6cde990685b8ef61e326d94out.sql'), + Path('cache/e8f4fb7e50ecaaf2cf2c9d4e4d159605/9efc05f6e6cde990685b8ef61e326d94/USA_IL_Chicago-OHare.Intl.AP.725300_TMY3.epw')]] + +If we were to rerun the first code block (annual simulation) then it would return the cached results instantly from +the cache: + +.. code-block:: shell + + Successfully parsed cached idf run in 0.00 seconds + +Profiling this simple script shows an 8x speedup. \ No newline at end of file diff --git a/docs/converter_bui.rst b/docs/converter_bui.rst index b0092a4e..7c702721 100644 --- a/docs/converter_bui.rst +++ b/docs/converter_bui.rst @@ -106,7 +106,7 @@ Users can chose to return a combination of flags .. code-block:: python - archetypal convert --window-lib "/Users/Documents/W74-lib.dat" "/Users/Documents/NECB 2011 - Warehouse.idf" "/Users/Documents/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" "/Users/Documents/WIP" + archetypal convert --window_lib "/Users/Documents/W74-lib.dat" "/Users/Documents/NECB 2011 - Warehouse.idf" "/Users/Documents/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" "/Users/Documents/WIP" - ``--trnsidf_exe`` is the path of the trnsidf.exe executable. Usually located in the TRNSYS18 folder under "Building/trnsIDF/trnsidf.exe". @@ -114,7 +114,7 @@ Users can chose to return a combination of flags .. code-block:: python - archetypal convert --trnsidf-exe "C:TRNSYS18\\Building\\trnsIDF\\trnsidf.exe" "/Users/Documents/NECB 2011 - Warehouse.idf" "/Users/Documents/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" "/Users/Documents/WIP" + archetypal convert --trnsidf_exe "C:TRNSYS18\\Building\\trnsIDF\\trnsidf.exe" "/Users/Documents/NECB 2011 - Warehouse.idf" "/Users/Documents/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" "/Users/Documents/WIP" - ``--template`` is the path of the .d18 template file (usually in the same directory as the `trnsidf.exe` executable). If nothing is passed, the following path will be used : "C:TRNSYS18\\Building\\trnsIDF\\NewFileTemplate.d18". @@ -128,7 +128,7 @@ Users can chose to return a combination of flags .. code-block:: python - archetypal convert --log-clear-names "/Users/Documents/NECB 2011 - Warehouse.idf" "/Users/Documents/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" "/Users/Documents/WIP" + archetypal convert --log_clear_names "/Users/Documents/NECB 2011 - Warehouse.idf" "/Users/Documents/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" "/Users/Documents/WIP" - ``--window`` specifies the window properties . If nothing is passed, the following values will be used : 2.2 0.65 0.8 0.05 0.15 8.17 diff --git a/docs/index.rst b/docs/index.rst index ea6c4283..864993cb 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -35,6 +35,7 @@ independently. Installation For MacOS/Linux users + Caching .. toctree:: diff --git a/docs/install.rst b/docs/install.rst index c1217273..90d9bcbe 100644 --- a/docs/install.rst +++ b/docs/install.rst @@ -15,9 +15,72 @@ MacOS that would be in `/Applications/EnergyPlus-9-2-0`. It is also recommended that the older transition programs be installed as well. These programs allow older IDF files (versions 7.2 and below) to be upgraded to version 9-2-0. Since these, don't come by default with EnergyPlus, they -need to be installed by hand. A script has been created for windows (see `Installation from scratch (Windows)`_). For +need to be installed by hand. A script has been created for windows (see `Installation from scratch`_). For macOS, refer to the `supplementary conversion programs`_. +Installation from scratch +------------------------- + +This first step should be helpful for users that are not familiar with python environments. If you already have python +installed and think that you can manage the installation a new package using `pip`, then you can skip to the next +section. + +Download & Install MiniConda (or the full Anaconda) +................................................... + +found at the following URL: https://docs.conda.io/en/latest/miniconda.html + +Launch the executable and select the following settings: + +- InstallationType=JustMe +- AddToPath=Yes +- RegisterPython=Yes +- Installation path=%UserProfile%\Miniconda3 + +Check if everything is ok by running `conda list` in the command line (make sure to open a new command line window just +in case). You should see something like this: + +.. code-block:: doscon + + C:\Users\archetypal>conda list + # packages in environment at C:\ProgramData\Miniconda3: + # + # Name Version Build Channel + asn1crypto 1.2.0 py37_0 + ca-certificates 2019.10.16 0 + certifi 2019.9.11 py37_0 + ... + win_inet_pton 1.1.0 py37_0 + wincertstore 0.2 py37_0 + yaml 0.1.7 hc54c509_2 + +Install EnergyPlus & Conversion Programs +........................................ + +EnergyPlus is a prerequisite of archetypal. It must be installed beforehand. Moreover, archetypal contains routines that +may download IDF components that are coded in earlier versions of EnergyPlus (e.g., 7.1). For this reason, users should +also download the `supplementary conversion programs`_, and install the content in the EnergyPlus installation folder: + +- On Windows: `C:\\EnergyPlusV9-2-0\\PreProcess\\IDFVersionUpdater` (For Windows, see automated procedure below). +- On MacOS: `/Applications/EnergyPlus-9-2-0/PreProcess/IDFVersionUpdater` + +On Windows, this installation procedure can be automated with the following `script`_ which will download and installEnergyPlus as +well as the supplementary conversion programs. + +To use the script, follow the next steps. First git must be installed beforehand with default installation parameters. +See https://git-scm.com/downloads to download git. Then the following commands will change the current directory to the +user's Downloads folder. Then the script will be downloaded using the `git clone` command. Finally the script will be executed. +Copy the whole code block below in Command Prompt and Hit :guilabel:`&Enter:⏎`. + +.. code-block:: doscon + + cd %USERPROFILE%\Downloads + git clone https://gist.github.com/aef233396167e0f961df3d62a193573e.git + cd aef233396167e0f961df3d62a193573e + install_eplus_script.cmd + +To install *archetypal*, follow the steps detailed below in `Installing using pip`_ + Installing using ``pip`` ------------------------ @@ -110,70 +173,6 @@ will work well to create a new environment using a specific dependency file in o conda activate archetypal pip install archetypal - -Installation from scratch (Windows) ------------------------------------ - -This first step should be helpful for users that are not familiar with python environments. If you already have python -installed and think that you can manage the installation a new package using `pip`, then you can skip to the next -section. - -Download & Install MiniConda (or the full Anaconda) -................................................... - -found at the following URL: https://docs.conda.io/en/latest/miniconda.html - -Launch the executable and select the following settings: - -- InstallationType=JustMe -- AddToPath=Yes -- RegisterPython=Yes -- Installation path=%UserProfile%\Miniconda3 - -Check if everything is ok by running `conda list` in the command line (make sure to open a new command line window just -in case). You should see something like this: - -.. code-block:: doscon - - C:\Users\archetypal>conda list - # packages in environment at C:\ProgramData\Miniconda3: - # - # Name Version Build Channel - asn1crypto 1.2.0 py37_0 - ca-certificates 2019.10.16 0 - certifi 2019.9.11 py37_0 - ... - win_inet_pton 1.1.0 py37_0 - wincertstore 0.2 py37_0 - yaml 0.1.7 hc54c509_2 - -Install EnergyPlus & Conversion Programs -........................................ - -Note: To follow this procedure, git must be installed beforehand with default installation parameters. -See https://git-scm.com/downloads to download git. - -EnergyPlus is a prerequisite of archetypal. It must be installed beforehand. Moreover, archetypal contains routines that -may download IDF components that are coded in earlier versions of EnergyPlus (e.g., 7.1). For this reason, users should -also download the `supplementary conversion programs`_. - -This installation procedure can be automated with the following `script`_ which will download and installEnergyPlus as -well as the supplementary conversion programs. - -To use the script, follow the next steps. Theses commands will change the current directory to the user's Downloads -folder. Then the script will be downloaded using the `git clone` command. Finally the script will be executed. Copy the -whole code block below in Command Prompt and Hit :guilabel:`&Enter:⏎`. - -.. code-block:: doscon - - cd %USERPROFILE%\Downloads - git clone https://gist.github.com/aef233396167e0f961df3d62a193573e.git - cd aef233396167e0f961df3d62a193573e - install_eplus_script.cmd - -To install *archetypal*, follow the steps detailed above in `Installing using pip`_ - - .. _start a jupyter notebook: https://jupyter.readthedocs.io/en/latest/running.html#starting-the-notebook-server .. _jupyter notebook: https://jupyter-notebook.readthedocs.io/en/stable/# .. _Angelo Basile: https://anbasile.github.io/programming/2017/06/25/jupyter-venv/ diff --git a/paper/paper.md b/paper/paper.md index 58912848..b0dfb7f0 100644 --- a/paper/paper.md +++ b/paper/paper.md @@ -35,7 +35,7 @@ EnergyPlus to enable building energy modeling at the urban scale. The three tool many advantages in their respective fields, but all suffer from the same flaw: creating building archetypes for any platform is a time-consuming, tedious and error-prone process. `archetypal` is a Python package that helps handling collections of such archetypes and to -enabling the interoperability between these energy simulation platforms to accelerate the +enable the interoperability between these energy simulation platforms to accelerate the creation of reliable urban building energy models. This package offers three major capabilities for researchers and practitioners: @@ -53,12 +53,12 @@ results as time-series DataFrames and typical building energy profiles such as t heating, space cooling and domestic hot water profiles are accessible by default. Other output names can be specified by the user. -Furthermore, for a drastic workflow speed gain, especially when larger IDF files can take -several minutes to complete, `archetypal` features a caching method that handles -simulation results. This is particularly useful for reproducible workflows such as the -Jupyter Notebook programming environment. Reopening a closed notebook and running a cell -containing the `run_eplus` command will use the cached simulation results instead of -executing EnergyPlus again. +Furthermore, for a drastic workflow speed gain, especially with multiple and/or larger IDF +files (which can take several minutes to transition and simulate), `archetypal` features a +caching API. This is particularly useful for reproducible workflows such as the Jupyter +Notebook programming environment. Rerunning cells (even after a kernel restart) will use +the cached IDF models and their simulation results instead of executing EnergyPlus again. +Speedups of up to 8x have been measured. ## EnergyPlus to UMI Template File Conversion diff --git a/tests/conftest.py b/tests/conftest.py index a8434d98..1a51f025 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -52,7 +52,7 @@ def config(): use_cache=True, log_file=True, log_console=True, - umitemplate="tests/input_data/umi_samples" "/BostonTemplateLibrary_2.json", + umitemplate="tests/input_data/umi_samples/BostonTemplateLibrary_2.json", ) diff --git a/tests/input_data/trnsys/T3D_simple_2_zone.b18 b/tests/input_data/trnsys/T3D_simple_2_zone.b18 index 28e1a372..162993db 100644 --- a/tests/input_data/trnsys/T3D_simple_2_zone.b18 +++ b/tests/input_data/trnsys/T3D_simple_2_zone.b18 @@ -182,8 +182,6 @@ GAIN p_000000 *----------------------------------------------------------------------------------------------------------------------- * I n f i l t r a t i o n *----------------------------------------------------------------------------------------------------------------------- -INFILTRATION Constant -AIRCHANGE=0.5 *----------------------------------------------------------------------------------------------------------------------- * V e n t i l a t i o n *----------------------------------------------------------------------------------------------------------------------- @@ -239,11 +237,6 @@ FLOOR =c_000006 : SURF= 4 : AREA= 50.000 : BOUNDARY=SCHEDULE ROOF =c_000001 : SURF= 5 : AREA= 50.000 : EXTERNAL : ORI=H_0_0 : FSKY=1.00 WALL =c_000003 : SURF= 6 : AREA= 12.192 : EXTERNAL : ORI=E_270_90 : FSKY=0.50 : GEOSURF=0.066667 REGIME - GAIN= ee_000001 : SCALE= SCHEDULE 1*sy_000004 : GEOPOS=0 : SCALE2= 1 : FRAC_REFAREA= 1 - GAIN= l_000001 : SCALE= SCHEDULE 1*sy_000004 : GEOPOS=0 : SCALE2= 1 : FRAC_REFAREA= 1 - GAIN= p_000001 : SCALE= SCHEDULE 1*sy_000006 : GEOPOS=0 : SCALE2= 1 : FRAC_REFAREA= 1 - INFILTRATION = Constant - HEATING = HEAT_z2 CAPACITANCE= 146.304 : VOLUME= 121.920 : REFAREA= 50.000: TINITIAL= 20.0 : PHINITIAL= 50 : WCAPR= 1 *----------------------------------------------------------------------------------------------------------------------- @@ -263,11 +256,6 @@ WINDOW=EXT_WINDOW1 : SURF= 13 : AREA= 2.250 : EXTERNAL : ORI=N_ WALL =c_000012 : SURF= 11 : AREA= 24.384 : ADJACENT=z_000001 : ADJ_SURF=3 : BACK : GEOSURF=0.133333 WALL =c_000003 : SURF= 12 : AREA= 12.192 : EXTERNAL : ORI=W_90_90 : FSKY=0.50 : GEOSURF=0.066667 REGIME - GAIN= ee_000000 : SCALE= SCHEDULE 1*sy_000004 : GEOPOS=0 : SCALE2= 1 : FRAC_REFAREA= 1 - GAIN= l_000000 : SCALE= SCHEDULE 1*sy_000003 : GEOPOS=0 : SCALE2= 1 : FRAC_REFAREA= 1 - GAIN= p_000000 : SCALE= SCHEDULE 1*sy_000006 : GEOPOS=0 : SCALE2= 1 : FRAC_REFAREA= 1 - INFILTRATION = Constant - HEATING = HEAT_z1 CAPACITANCE= 146.304 : VOLUME= 121.920 : REFAREA= 50.000: TINITIAL= 20.0 : PHINITIAL= 50 : WCAPR= 1 *----------------------------------------------------------------------------------------------------------------------- diff --git a/tests/test_dataportals.py b/tests/test_dataportals.py index 40e4c575..88ae0e34 100644 --- a/tests/test_dataportals.py +++ b/tests/test_dataportals.py @@ -12,11 +12,31 @@ def test_tabula_available_country(config, scratch_then_cache): # First, let's try the API call data = {"code_country": "FR"} cc_res = ar.dataportal.tabula_api_request(data, table="all-country") + # Makes sure data is not empty + assert cc_res["data"] # Then let's use the user-friendly call. Since it is the second call to the # same function, the response should be read from the cache. code_country = "FR" cc_cache = ar.dataportal.tabula_available_buildings(code_country) + # Makes sure result is not empty + assert list(cc_cache["id"]) + + +def test_tabula_api_request_valueerror(config, scratch_then_cache): + # Gives "wrong_string" as table + data = {"code_country": "FR"} + with pytest.raises(ValueError): + cc_res = ar.dataportal.tabula_api_request(data, table="wrong_string") + # Makes sure cc_res not in locals + assert "cc_res" not in locals() + + # Gives "wrong_string" as country + data = {"code_country": "wrong_string"} + with pytest.raises(ValueError): + cc_res = ar.dataportal.tabula_api_request(data, table="all-country") + # Makes sure cc_res not in locals + assert "cc_res" not in locals() def test_tabula_notavailable_country(config, scratch_then_cache): @@ -26,6 +46,101 @@ def test_tabula_notavailable_country(config, scratch_then_cache): def test_tabula_building_sheet(config, scratch_then_cache): sheet = ar.tabula_building_details_sheet(code_country="Austria") + # Makes sure result is not empty + assert list(sheet["val"]) + + +def test_tabula_building_sheet_code_building(config, scratch_then_cache): + # Test with code_building not None + sheet = ar.tabula_building_details_sheet( + code_building="AT.MT.AB.02.Gen.ReEx.001.001", code_country="Austria" + ) + + # Makes sure result is not empty + assert list(sheet["val"]) + # Make sure code_building is right + assert sheet["val"][0] == "AT.MT.AB.02.Gen.ReEx.001.001" + + +def test_tabula_building_sheet_valueerror(config, scratch_then_cache): + # Test with wrong code_building + with pytest.raises(ValueError): + sheet = ar.tabula_building_details_sheet( + code_building="wrong_string", code_country="Austria" + ) + # Makes sure sheet not in locals + assert "sheet" not in locals() + + # Test with wrong code_buildingsizeclass + with pytest.raises(ValueError): + sheet = ar.tabula_building_details_sheet( + code_buildingsizeclass="wrong_string", code_country="Austria" + ) + # Makes sure sheet not in locals + assert "sheet" not in locals() + + # Test with wrong code_country + with pytest.raises(ValueError): + sheet = ar.tabula_building_details_sheet(code_country="wrong_string",) + # Makes sure sheet not in locals + assert "sheet" not in locals() + + +def test_tabula_system(config, scratch_then_cache): + res = ar.dataportal.tabula_system(code_country="FR") + + # Makes sure result is not empty + assert list(res["data"]) + # Makes sure code_country is right + assert res["data"][0] == "FR" + + +def test_tabula_system_valueerror(config, scratch_then_cache): + # Test with wrong code_boundarycond + with pytest.raises(ValueError): + res = ar.dataportal.tabula_system( + code_country="FR", code_boundarycond="wrong_string" + ) + # Makes sure res not in locals + assert "res" not in locals() + + +def test_resolve_codecountry(config, scratch_then_cache): + # Tests with country string length == 3 + res = ar.dataportal._resolve_codecountry("USA") + # Makes sure code_country is right + assert res == "US" + + # Tests with country number (integer) + res = ar.dataportal._resolve_codecountry(533) + # Makes sure code_country is right + assert res == "AW" + + +def test_openei_api_request(config, scratch_then_cache): + data = {"code_country": "FR"} + res = ar.dataportal.openei_api_request(data) + + # Makes sure result is None (no cache data found) + assert res is None + + +def test_nrel_api_cbr_request(config, scratch_then_cache): + data = {"code_country": "FR"} + res = ar.dataportal.nrel_api_cbr_request(data) + + # Makes sure result returns an error "API_KEY_MISSING" + assert res["error"]["code"] == "API_KEY_MISSING" + + +def test_nrel_api_cbr_request_exception(config, scratch_then_cache): + # Test with wrong code_country + data = {"code_country": "wrong_string"} + res = ar.dataportal.nrel_api_cbr_request(data) + + # Makes sure result returns an error "API_KEY_MISSING" + assert res["error"]["code"] == "API_KEY_MISSING" + def test_tabula_multiple(config, scratch_then_cache): country_code = "FR" @@ -43,6 +158,9 @@ def test_tabula_multiple(config, scratch_then_cache): keys=ab.code_buildingtype_column1 + "." + ab.suffix_building_column1, ) + # Makes sure result is not empty + assert list(archetypes["val"]) + @pytest.mark.xfail( condition=os.environ.get("NREL_CONSUMER_KEY") is None, @@ -102,15 +220,39 @@ def test_download_and_load_bld_window(clean_config): assert ws -def test_statcan(config): +def test_statcan(config, scratch_then_cache): data = dict(type="json", lang="E", dguid="2016A000011124", topic=5, notes=0) + response = ar.dataportal.stat_can_request(**data) + print(response) + # Makes sure result is not empty + assert response + + +def test_statcan_error(config, scratch_then_cache): + # Tests statcan with error in inputs + data = dict(type="json", lang="E", dguid="wrong_string", topic=5, notes=0) response = ar.dataportal.stat_can_request(**data) print(response) + # Makes sure result is None (wrong function input "dguid") + assert response is None -def test_statcan_geo(config): + +def test_statcan_geo(config, scratch_then_cache): data = dict(type="json", lang="E", geos="PR", cpt="00") + response = ar.dataportal.stat_can_geo_request(**data) + print(response) + # Makes sure result is not empty + assert response + + +def test_statcan_geo_error(config, scratch_then_cache): + # Tests statcan_geo with error in inputs + data = dict(type="json", lang="E", geos="wrong_string", cpt="00") response = ar.dataportal.stat_can_geo_request(**data) print(response) + + # Makes sure result is not empty + assert response diff --git a/tests/test_trnsys.py b/tests/test_trnsys.py index 8012438f..1673e2ba 100644 --- a/tests/test_trnsys.py +++ b/tests/test_trnsys.py @@ -1,8 +1,12 @@ import io import os +import glob +import shutil import pytest +import archetypal as ar + import pandas as pd from path import Path @@ -56,21 +60,17 @@ conditioning_to_b18, adds_sch_ground, adds_sch_setpoint, + t_initial_to_b18, + closest_coords, ) from tests.conftest import get_platform @pytest.fixture( - scope="class", - params=[ - "RefBldgWarehouseNew2004_Chicago.idf", - "ASHRAE9012016_Warehouse_Denver.idf", - "ASHRAE9012016_ApartmentMidRise_Denver.idf", - "5ZoneGeometryTransform.idf", - ], + scope="class", params=["tests/input_data/trnsys/simple_2_zone_sched.idf",], ) -def converttest(request): - file = get_eplus_dirs(settings.ep_version) / "ExampleFiles" / request.param +def converttesteasy(request): + file = request.param window_file = "W74-lib.dat" template_dir = os.path.join("archetypal", "ressources") window_filepath = os.path.join(template_dir, window_file) @@ -92,20 +92,76 @@ def converttest(request): "tests", "input_data", "CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" ) - yield idf, file, weather_file, window_filepath, trnsidf_exe, template_d18, kwargs_dict + output_folder = os.path.relpath(settings.data_folder) + + yield idf, file, weather_file, window_filepath, trnsidf_exe, template_d18, output_folder, kwargs_dict del idf -class TestConvert: +class TestConvertEasy: - """Tests convert_idf_to_trnbuild()""" + """Tests functions of trnsys.py using 1 simple/small IDF file""" - def test_get_save_write_schedules_as_input(self, config, converttest): - output_folder = None - idf, idf_file, weather_file, window_lib, trnsidf_exe, template, _ = converttest - lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() - try: + def test_assert_files(self, config, converttesteasy): + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + _, + ) = converttesteasy + + # Tests assertion if idf_file is None + with pytest.raises(IOError): + ( + idf_file, + weather_file, + window_lib, + output_folder, + trnsidf_exe, + template, + ) = _assert_files(None, None, None, None, None, None) + + # Tests assertion if weather file is None + with pytest.raises(IOError): + ( + idf_file, + weather_file, + window_lib, + output_folder, + trnsidf_exe, + template, + ) = _assert_files(idf_file, None, None, None, None, None) + + # Tests assertion if window_lib is None + with pytest.raises(IOError): + ( + idf_file, + weather_file, + window_lib, + output_folder, + trnsidf_exe, + template, + ) = _assert_files(idf_file, weather_file, None, None, None, None) + + # Tests assertion if output_folder is None + with pytest.raises(IOError): + ( + idf_file, + weather_file, + window_lib, + output_folder, + trnsidf_exe, + template, + ) = _assert_files(idf_file, weather_file, window_lib, None, None, None) + + # Tests assertion if trnsidf_exe and template_d18 is None + with pytest.raises(IOError): ( idf_file, weather_file, @@ -114,23 +170,68 @@ def test_get_save_write_schedules_as_input(self, config, converttest): trnsidf_exe, template, ) = _assert_files( - idf_file, weather_file, window_lib, output_folder, trnsidf_exe, template + idf_file, weather_file, window_lib, output_folder, None, None ) - except: - output_folder = os.path.relpath(settings.data_folder) - print("Could not assert all paths exist - OK for this test") - schedule_names, schedules = _get_schedules(idf) - _yearlySched_to_csv(idf_file, output_folder, schedule_names, schedules) - schedule_as_input = True - schedules_not_written = _write_schedules( - lines, schedule_names, schedules, schedule_as_input, idf_file - ) - def test_get_save_write_schedules_as_sched(self, config, converttest): - output_folder = None - idf, idf_file, weather_file, window_lib, trnsidf_exe, template, _ = converttest - lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() - try: + # Tests assertion if idf_file is a string but not a path + with pytest.raises(IOError): + ( + idf_file, + weather_file, + window_lib, + output_folder, + trnsidf_exe, + template, + ) = _assert_files("wrong_string", None, None, None, None, None) + + # Tests assertion if weather_file is a string but not a path + with pytest.raises(IOError): + ( + idf_file, + weather_file, + window_lib, + output_folder, + trnsidf_exe, + template, + ) = _assert_files(idf_file, "wrong_string", None, None, None, None) + + # Tests assertion if window_lib is a string but not a path + with pytest.raises(IOError): + ( + idf_file, + weather_file, + window_lib, + output_folder, + trnsidf_exe, + template, + ) = _assert_files(idf_file, weather_file, "wrong_string", None, None, None) + + # Tests assertion if window_lib is not a string AND not None + with pytest.raises(IOError): + ( + idf_file, + weather_file, + window_lib, + output_folder, + trnsidf_exe, + template, + ) = _assert_files(idf_file, weather_file, 2, None, None, None) + + # Tests assertion if trnsidf_exe is a string but not a path + with pytest.raises(IOError): + ( + idf_file, + weather_file, + window_lib, + output_folder, + trnsidf_exe, + template, + ) = _assert_files( + idf_file, weather_file, window_lib, output_folder, "wrong_string", None + ) + + # Tests assertion if template_d18 is a string but not a path + with pytest.raises(IOError): ( idf_file, weather_file, @@ -139,42 +240,595 @@ def test_get_save_write_schedules_as_sched(self, config, converttest): trnsidf_exe, template, ) = _assert_files( - idf_file, weather_file, window_lib, output_folder, trnsidf_exe, template + idf_file, + weather_file, + window_lib, + output_folder, + trnsidf_exe, + "wrong_string", ) - except: - output_folder = os.path.relpath(settings.data_folder) - print("Could not assert all paths exist - OK for this test") + + assert output_folder == os.path.relpath(settings.data_folder) + + def test_get_save_write_schedules_as_sched(self, config, converttesteasy): + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + _, + ) = converttesteasy + + # Read IDF_T3D template and write lines in variable + lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() + + # Copy IDF object, making sure we don't change/overwrite original IDF file + idf_2 = deepcopy(idf) + + # Gets all schedule from IDF + schedule_names, schedules = _get_schedules(idf_2) + # Save schedules in a csv file + _yearlySched_to_csv(idf_file, output_folder, schedule_names, schedules) + # Write schedules directly in T3D file (in lines) + schedule_as_input = False + schedules_not_written = _write_schedules( + lines, schedule_names, schedules, schedule_as_input, idf_file + ) + + # Asserts csv with schedules exists and schedules are written in lines + assert os.path.exists(glob.glob(settings.data_folder + "/*.csv")[0]) + assert "!-SCHEDULE " + schedule_names[0] + "\n" in lines + + def test_write_version_and_building(self, config, converttesteasy): + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + _, + ) = converttesteasy + + # Copy IDF object, making sure we don't change/overwrite original IDF file + idf_2 = deepcopy(idf) + + # Get objects from IDF + ( + buildingSurfs, + buildings, + constructions, + equipments, + fenestrationSurfs, + globGeomRules, + lights, + locations, + materialAirGap, + materialNoMass, + materials, + peoples, + versions, + zones, + zonelists, + ) = get_idf_objects(idf_2) + + # Read IDF_T3D template and write lines in variable + lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() + + # Write VERSION and BUILDING info from IDF to lines (T3D) + _write_version(lines, versions) + _write_building(buildings, lines) + + # Asserts version and building information written in lines + assert "Version," + settings.ep_version.replace("-", ".")[:3] + ";\n" in lines + assert buildings[0] in lines + + def test_write_material(self, config, converttesteasy): + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + _, + ) = converttesteasy + + # Read IDF_T3D template and write lines in variable + lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() + + # Copy IDF object, making sure we don't change/overwrite original IDF file + idf_2 = deepcopy(idf) + + # Get objects from IDF + ( + buildingSurfs, + buildings, + constructions, + equipments, + fenestrationSurfs, + globGeomRules, + lights, + locations, + materialAirGap, + materialNoMass, + materials, + peoples, + versions, + zones, + zonelists, + ) = get_idf_objects(idf_2) + + # Write LAYER from IDF to lines (T3D) + _write_materials(lines, materialAirGap, materialNoMass, materials) + + # Asserts materials (material, AirGap, NoMass, etc.) are written in lines + assert "!-LAYER " + materialAirGap[0].Name + "\n" in lines + assert "!-LAYER " + materialNoMass[0].Name + "\n" in lines + assert "!-LAYER " + materials[0].Name + "\n" in lines + + def test_relative_to_absolute(self, config, converttesteasy): + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + _, + ) = converttesteasy + + # Copy IDF object, making sure we don't change/overwrite original IDF file + idf_2 = deepcopy(idf) + + # Clean names of idf objects (e.g. 'MATERIAL') + log_clear_names = False + clear_name_idf_objects(idf_2, log_clear_names) + + # Get objects from IDF + ( + buildingSurfs, + buildings, + constructions, + equipments, + fenestrationSurfs, + globGeomRules, + lights, + locations, + materialAirGap, + materialNoMass, + materials, + peoples, + versions, + zones, + zonelists, + ) = get_idf_objects(idf_2) + + # Getting surface to test, by copying it (like that object stay unchanged) + # And can be used after for assertion + surface_init = deepcopy(buildingSurfs[0]) + + # Transform relative coords of a surface to absolute coords + _relative_to_absolute(buildingSurfs[0], 1, 2, 3) + + # Asserts relative coords converted to absolute ones + assert ( + buildingSurfs[0]["Vertex_" + str(1) + "_Xcoordinate"] + == surface_init["Vertex_" + str(1) + "_Xcoordinate"] + 1 + ) + assert ( + buildingSurfs[0]["Vertex_" + str(1) + "_Ycoordinate"] + == surface_init["Vertex_" + str(1) + "_Ycoordinate"] + 2 + ) + assert ( + buildingSurfs[0]["Vertex_" + str(1) + "_Zcoordinate"] + == surface_init["Vertex_" + str(1) + "_Zcoordinate"] + 3 + ) + + def test_save_t3d(self, config, converttesteasy): + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + _, + ) = converttesteasy + + # Read IDF_T3D template and write lines in variable + lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() + + # Save T3D file at output_folder + output_folder, t3d_path = _save_t3d(idf_file, lines, output_folder) + + # Asserts path to T3D file exists + assert t3d_path == glob.glob(settings.data_folder + "/*.idf")[0] + + def test_t_initial_to_b18(self, config, converttesteasy): + # Deletes cache folder + if os.path.exists(settings.cache_folder): + shutil.rmtree(settings.cache_folder) + + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + kwargs, + ) = converttesteasy + + # Copy IDF object, making sure we don't change/overwrite original IDF file + idf_2 = deepcopy(idf) + + # Clean names of idf objects (e.g. 'MATERIAL') + log_clear_names = False + clear_name_idf_objects(idf_2, log_clear_names) + + # Get objects from IDF + ( + buildingSurfs, + buildings, + constructions, + equipments, + fenestrationSurfs, + globGeomRules, + lights, + locations, + materialAirGap, + materialNoMass, + materials, + peoples, + versions, + zones, + zonelists, + ) = get_idf_objects(idf_2) + + # Read a b18 file and write lines in variable (b18_lines) + b18_path = "tests/input_data/trnsys/T3D_simple_2_zone.b18" + with open(b18_path) as b18_file: + b18_lines = b18_file.readlines() + + # Creates a constant schedule setpoint over the year + schedules = {"sch_h_setpoint_" + zones[0].Name: {"all values": [18] * 8760}} + zones = [zones[0]] + + # Writes initial temperature of zone in b18_lines (b18 file) + t_initial_to_b18(b18_lines, zones, schedules) + + # Asserts initial temperature is written in b18_lines + assert any("TINITIAL= 18" in mystring for mystring in b18_lines[200:]) + + def test_closest_coords(self, config, converttesteasy): + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + kwargs, + ) = converttesteasy + + # Copy IDF object, making sure we don't change/overwrite original IDF file + idf_2 = deepcopy(idf) + + # Get objects from IDF + ( + buildingSurfs, + buildings, + constructions, + equipments, + fenestrationSurfs, + globGeomRules, + lights, + locations, + materialAirGap, + materialNoMass, + materials, + peoples, + versions, + zones, + zonelists, + ) = get_idf_objects(idf_2) + + # Find closest surface to origin (0,0,0) + x, y, z = closest_coords(buildingSurfs, to=[0, 0, 0]) + + # Asserts closest coords + assert x == -5 + assert y == 215 + assert z == 0 + + def test_write_to_b18(self, config, converttesteasy): + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + kwargs, + ) = converttesteasy + + # Runs EnergyPlus Simulation + res = run_eplus( + idf_file, + weather_file, + output_directory=None, + ep_version=None, + output_report="htm", + prep_outputs=True, + design_day=True, + ) + + # Copy IDF object, making sure we don't change/overwrite original IDF file + idf_2 = deepcopy(idf) + + # Clean names of idf objects (e.g. 'MATERIAL') + log_clear_names = False + clear_name_idf_objects(idf_2, log_clear_names) + + # Get old:new names equivalence + old_new_names = pd.read_csv( + os.path.join( + settings.data_folder, + Path(idf_file).basename().stripext() + "_old_new_names_equivalence.csv", + ) + ).to_dict() + + # Get objects from IDF + ( + buildingSurfs, + buildings, + constructions, + equipments, + fenestrationSurfs, + globGeomRules, + lights, + locations, + materialAirGap, + materialNoMass, + materials, + peoples, + versions, + zones, + zonelists, + ) = get_idf_objects(idf_2) + + # Read a b18 file and write lines in variable (b18_lines) + b18_path = "tests/input_data/trnsys/T3D_simple_2_zone.b18" + with open(b18_path) as b18_file: + b18_lines = b18_file.readlines() + + # initialize variable + schedules_not_written = [] + + # Gets conditioning (heating and cooling) info from simulation results + heat_name = {} + for i in range(0, len(res["Zone Sensible Heating"])): + key = res["Zone Sensible Heating"].iloc[i, 0] + name = "HEAT_z" + str(res["Zone Sensible Heating"].iloc[i].name) + heat_name[key] = name + cool_name = {} + for i in range(0, len(res["Zone Sensible Cooling"])): + key = res["Zone Sensible Cooling"].iloc[i, 0] + name = "HEAT_z" + str(res["Zone Sensible Cooling"].iloc[i].name) + cool_name[key] = name + + # Selects only 2 first zones + zones = zones[0:2] + peoples = peoples[0:2] + equipments = equipments[0:2] + lights = lights[0:2] + + # Writes infiltration in b18_lines (b18 file) + infilt_to_b18(b18_lines, zones, res) + + # Tests both cases, whether schedules are taken as inputs or written in b18_lines + for cond in [True, False]: + schedule_as_input = cond + gains_to_b18( + b18_lines, + zones, + zonelists, + peoples, + lights, + equipments, + schedules_not_written, + res, + old_new_names, + schedule_as_input, + ) + + # Writes conditioning (heating and cooling) in b18_lines (b18 file) + conditioning_to_b18(b18_lines, heat_name, cool_name, zones, old_new_names) + + # Asserts infiltration, internal gains and conditioning are written in b18_lines + assert "INFILTRATION Constant" + "\n" in b18_lines + assert " INFILTRATION = Constant" + "\n" in b18_lines + assert any(peoples[0].Name in mystring for mystring in b18_lines[200:]) + assert any(lights[0].Name in mystring for mystring in b18_lines[200:]) + assert any(equipments[0].Name in mystring for mystring in b18_lines[200:]) + assert any( + heat_name[old_new_names[zones[0].Name.upper()][0]] in mystring + for mystring in b18_lines[200:] + ) + + def test_load_idf_file_and_clean_names(self, config, converttesteasy): + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + _, + ) = converttesteasy + + # Clean names of idf objects (e.g. 'MATERIAL') + log_clear_names = False + idf_2 = load_idf_file_and_clean_names(idf_file, log_clear_names) + + # Makes sure material names are unique and are 8 characters long + name = None + unique = False + length = False + for liste in idf_2.idfobjects["MATERIAL"].list2: + if liste[1] != name: + unique = True + name = liste[1] + else: + unique = False + if len(liste[1]) == 8: + length = True + else: + length = False + + assert isinstance(idf_2, ar.idfclass.IDF) + assert unique + assert length + + def test_add_object_and_run_ep(self, config, converttesteasy): + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + kwargs, + ) = converttesteasy + + ep_version = None + # Adds Output variable in IDF + outputs = [ + { + "ep_object": "Output:Variable".upper(), + "kwargs": dict( + Variable_Name="Zone Thermostat Heating Setpoint Temperature", + Reporting_Frequency="hourly", + save=True, + ), + }, + { + "ep_object": "Output:Variable".upper(), + "kwargs": dict( + Variable_Name="Zone Thermostat Cooling Setpoint Temperature", + Reporting_Frequency="hourly", + save=True, + ), + }, + ] + + # Runs EnergyPlus Simulation + _, idf = run_eplus( + idf_file, + weather_file, + output_directory=None, + ep_version=ep_version, + output_report=None, + prep_outputs=outputs, + design_day=False, + annual=True, + expandobjects=True, + return_idf=True, + ) + + # Makes sure idf vriable is an IDF + assert isinstance(idf, ar.idfclass.IDF) + + +@pytest.fixture( + scope="class", params=["5ZoneGeometryTransform.idf",], +) +def converttest(request): + file = get_eplus_dirs(settings.ep_version) / "ExampleFiles" / request.param + # file = request.param + window_file = "W74-lib.dat" + template_dir = os.path.join("archetypal", "ressources") + window_filepath = os.path.join(template_dir, window_file) + template_d18 = "tests/input_data/trnsys/NewFileTemplate.d18" + trnsidf_exe = "docker/trnsidf/trnsidf.exe" # 'docker/trnsidf/trnsidf.exe' + + # prepare args (key=value). Key is a unique id for the runs (here the + # file basename is used). Value is a dict of the function arguments + kwargs_dict = { + "u_value": 2.5, + "shgc": 0.6, + "t_vis": 0.78, + "tolerance": 0.05, + "ordered": True, + } + idf = load_idf(file) + + weather_file = os.path.join( + "tests", "input_data", "CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" + ) + + output_folder = os.path.relpath(settings.data_folder) + + yield idf, file, weather_file, window_filepath, trnsidf_exe, template_d18, output_folder, kwargs_dict + + del idf + + +class TestConvert: + + """Tests convert_idf_to_trnbuild() with several files""" + + def test_get_save_write_schedules_as_input(self, config, converttest): + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + _, + ) = converttest + + # Read IDF_T3D template and write lines in variable + lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() + + # Gets all schedule from IDF schedule_names, schedules = _get_schedules(idf) + # Save schedules in a csv file _yearlySched_to_csv(idf_file, output_folder, schedule_names, schedules) - schedule_as_input = False + # Write schedules as inputs in T3D file (in lines) + schedule_as_input = True schedules_not_written = _write_schedules( lines, schedule_names, schedules, schedule_as_input, idf_file ) - def test_write_version_and_building(self, config, converttest): - idf, idf_file, weather_file, window_lib, trnsidf_exe, template, _ = converttest - ( - buildingSurfs, - buildings, - constructions, - equipments, - fenestrationSurfs, - globGeomRules, - lights, - locations, - materialAirGap, - materialNoMass, - materials, - peoples, - versions, - zones, - zonelists, - ) = get_idf_objects(idf) - lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() - _write_version(lines, versions) - _write_building(buildings, lines) - def test_write_idf_objects(self, config, converttest): + # Gets from fixture paths to files and IDF object to be used in test ( idf, idf_file, @@ -182,6 +836,7 @@ def test_write_idf_objects(self, config, converttest): window_lib, trnsidf_exe, template, + output_folder, kwargs, ) = converttest @@ -207,6 +862,35 @@ def test_write_idf_objects(self, config, converttest): zonelists, ) = get_idf_objects(idf) + # Creates low thermal resistance construction and materials to be deleted + # To improve coverage of test + idf.newidfobject( + "MATERIAL", + Name="low_res_mat", + Roughness="Smooth", + Thickness=0.0008, + Conductivity=45.28, + Density=7824, + Specific_Heat=500, + Thermal_Absorptance=0.7, + Solar_Absorptance=0.7, + Visible_Absorptance=0.7, + ) + idf.newidfobject( + "CONSTRUCTION", Name="low_res_constr", Outside_Layer="low_res_mat" + ) + + # Changes Outside boundary of surface to adiabatic + # To improve coverage of test + buildingSurfs[0].Outside_Boundary_Condition = "Adiabatic" + + # Changes coords of zone + # To improve coverage of test + zones[0].X_Origin = "" + zones[0].Y_Origin = "" + zones[0].Z_Origin = "" + zones[0].Multiplier = "" + # Get all construction EXCEPT fenestration ones constr_list = _get_constr_list(buildingSurfs) @@ -247,14 +931,37 @@ def test_write_idf_objects(self, config, converttest): ordered, ) + # Removes low conductivity material and constructions mat_name = _remove_low_conductivity(constructions, idf, materials) + + # Determine if coordsSystem is "World" (all zones at (0,0,0)) + coordSys = _is_coordSys_world("Relative", zones) + + # Changes Geom Rule to "Relative" + # To improve coverage of test + globGeomRules[0].Coordinate_System = "Relative" + globGeomRules[0].Daylighting_Reference_Point_Coordinate_System = "Relative" + globGeomRules[0].Rectangular_Surface_Coordinate_System = "Relative" + + # Change Outside boundary condition of surface to itself + # To improve coverage of test + buildingSurfs[5].Outside_Boundary_Condition_Object = "C5-1" + + # Change Outside boundary condition of surface to Zone and adjacent to Outdoors + # To improve coverage of test + buildingSurfs[0].Outside_Boundary_Condition = "Zone" + buildingSurfs[0].Outside_Boundary_Condition_Object = buildingSurfs[6].Zone_Name + buildingSurfs[6].Outside_Boundary_Condition = "Outdoors" + + # Change Outside boundary condition of surface to Zone and adjacent to Zone.Name + # To improve coverage of test + buildingSurfs[1].Outside_Boundary_Condition = "Zone" + buildingSurfs[1].Outside_Boundary_Condition_Object = "SPACE3-1" + # Write LOCATION and GLOBALGEOMETRYRULES from IDF to lines (T3D) and # define if coordinate system is "Relative" coordSys = _write_location_geomrules(globGeomRules, lines, locations) - # Determine if coordsSystem is "World" (all zones at (0,0,0)) - coordSys = _is_coordSys_world(coordSys, zones) - # Change coordinates from relative to absolute for building surfaces _change_relative_coords(buildingSurfs, coordSys, idf) @@ -262,9 +969,6 @@ def test_write_idf_objects(self, config, converttest): _add_change_adj_surf(buildingSurfs, idf) buildingSurfs = idf.idfobjects["BUILDINGSURFACE:DETAILED"] - # region Write VARIABLEDICTONARY (Zone, BuildingSurf, FenestrationSurf) - # from IDF to lines (T3D) - # Get all surfaces having Outside boundary condition with the ground. # To be used to find the window's slopes n_ground = _get_ground_vertex(buildingSurfs) @@ -292,9 +996,7 @@ def test_write_idf_objects(self, config, converttest): # lines (T3D) # Get window from library # window = (win_id, description, design, u_win, shgc_win, t_sol_win, - # rf_sol, - # t_vis_win, lay_win, width, window_bunches[win_id], - # and maybe tolerance) + # rf_sol, t_vis_win, lay_win, width, window_bunches[win_id], and maybe tolerance) win_u_value = kwargs.get("u_value", 2.2) win_shgc = kwargs.get("shgc", 0.64) win_tvis = kwargs.get("t_vis", 0.8) @@ -309,39 +1011,23 @@ def test_write_idf_objects(self, config, converttest): # Write window pool in lines _write_winPool(lines, window) - def test_write_material(self, config, converttest): - idf, idf_file, weather_file, window_lib, trnsidf_exe, template, _ = converttest - - # Read IDF_T3D template and write lines in variable - lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() - - # Get objects from IDF file - ( - buildingSurfs, - buildings, - constructions, - equipments, - fenestrationSurfs, - globGeomRules, - lights, - locations, - materialAirGap, - materialNoMass, - materials, - peoples, - versions, - zones, - zonelists, - ) = get_idf_objects(idf) - - # Write LAYER from IDF to lines (T3D) - _write_materials(lines, materialAirGap, materialNoMass, materials) - def test_write_gains_conditioning(self, config, converttest): - idf, idf_file, weather_file, window_lib, trnsidf_exe, template, _ = converttest + # Gets from fixture paths to files and IDF object to be used in test + ( + idf, + idf_file, + weather_file, + window_lib, + trnsidf_exe, + template, + output_folder, + _, + ) = converttest - # Run EnergyPlus Simulation + # Gets EnergyPlus version ep_version = settings.ep_version + + # Adds Output variable in IDF outputs = [ { "ep_object": "Output:Variable".upper(), @@ -360,6 +1046,8 @@ def test_write_gains_conditioning(self, config, converttest): ), }, ] + + # Run EnergyPlus Simulation _, idf = run_eplus( idf_file, weather_file, @@ -373,7 +1061,7 @@ def test_write_gains_conditioning(self, config, converttest): return_idf=True, ) - # Outpout reports + # Output reports htm = idf.htm sql = idf.sql sql_file = idf.sql_file @@ -440,201 +1128,12 @@ def test_write_gains_conditioning(self, config, converttest): zones, df_cooling_setpoint, old_new_names, schedule_names, schedules, "c" ) + # Writes conditioning in lines schedule_as_input = True heat_dict, cool_dict = _write_conditioning( htm, lines, schedules, old_new_names, schedule_as_input ) - def test_relative_to_absolute(self, config, converttest): - output_folder = None - idf, idf_file, weather_file, window_lib, trnsidf_exe, template, _ = converttest - try: - ( - idf_file, - weather_file, - window_lib, - output_folder, - trnsidf_exe, - template, - ) = _assert_files( - idf_file, weather_file, window_lib, output_folder, trnsidf_exe, template - ) - except: - output_folder = os.path.relpath(settings.data_folder) - print("Could not assert all paths exist - OK for this test") - - # Check if cache exists - log_clear_names = False - idf = load_idf(idf_file) - - # Clean names of idf objects (e.g. 'MATERIAL') - idf_2 = deepcopy(idf) - clear_name_idf_objects(idf_2, log_clear_names) - - # Get objects from IDF file - ( - buildingSurfs, - buildings, - constructions, - equipments, - fenestrationSurfs, - globGeomRules, - lights, - locations, - materialAirGap, - materialNoMass, - materials, - peoples, - versions, - zones, - zonelists, - ) = get_idf_objects(idf_2) - - _relative_to_absolute(buildingSurfs[0], 1, 2, 3) - - def test_save_t3d(self, config, converttest): - output_folder = None - idf, idf_file, weather_file, window_lib, trnsidf_exe, template, _ = converttest - try: - ( - idf_file, - weather_file, - window_lib, - output_folder, - trnsidf_exe, - template, - ) = _assert_files( - idf_file, weather_file, window_lib, output_folder, trnsidf_exe, template - ) - except: - output_folder = os.path.relpath(settings.data_folder) - print("Could not assert all paths exist - OK for this test") - - # Read IDF_T3D template and write lines in variable - lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() - - # Save T3D file at output_folder - output_folder, t3d_path = _save_t3d(idf_file, lines, output_folder) - - def test_write_to_b18(self, config, converttest): - output_folder = None - ( - idf, - idf_file, - weather_file, - window_lib, - trnsidf_exe, - template, - kwargs, - ) = converttest - try: - ( - idf_file, - weather_file, - window_lib, - output_folder, - trnsidf_exe, - template, - ) = _assert_files( - idf_file, weather_file, window_lib, output_folder, trnsidf_exe, template - ) - except: - output_folder = os.path.relpath(settings.data_folder) - print("Could not assert all paths exist - OK for this test") - - # Run EnergyPlus Simulation - res = run_eplus( - idf_file, - weather_file, - output_directory=None, - ep_version=None, - output_report="htm", - prep_outputs=True, - design_day=True, - ) - - # Check if cache exists - log_clear_names = False - idf = load_idf(idf_file) - - # Clean names of idf objects (e.g. 'MATERIAL') - idf_2 = deepcopy(idf) - clear_name_idf_objects(idf_2, log_clear_names) - - # Get old:new names equivalence - old_new_names = pd.read_csv( - os.path.join( - settings.data_folder, - Path(idf_file).basename().stripext() + "_old_new_names_equivalence.csv", - ) - ).to_dict() - - # Get objects from IDF file - ( - buildingSurfs, - buildings, - constructions, - equipments, - fenestrationSurfs, - globGeomRules, - lights, - locations, - materialAirGap, - materialNoMass, - materials, - peoples, - versions, - zones, - zonelists, - ) = get_idf_objects(idf_2) - - b18_path = "tests/input_data/trnsys/T3D_simple_2_zone.b18" - - schedules_not_written = [] - - heat_name = {} - for i in range(0, len(res["Zone Sensible Heating"])): - key = res["Zone Sensible Heating"].iloc[i, 0] - name = "HEAT_z" + str(res["Zone Sensible Heating"].iloc[i].name) - heat_name[key] = name - - cool_name = {} - for i in range(0, len(res["Zone Sensible Cooling"])): - key = res["Zone Sensible Cooling"].iloc[i, 0] - name = "HEAT_z" + str(res["Zone Sensible Cooling"].iloc[i].name) - cool_name[key] = name - - with open(b18_path) as b18_file: - b18_lines = b18_file.readlines() - - zones = zones[0:2] - peoples = peoples[0:2] - equipments = equipments[0:2] - lights = lights[0:2] - - infilt_to_b18(b18_lines, zones, res) - - schedule_as_input = True - gains_to_b18( - b18_lines, - zones, - zonelists, - peoples, - lights, - equipments, - schedules_not_written, - res, - old_new_names, - schedule_as_input, - ) - - conditioning_to_b18(b18_lines, heat_name, cool_name, zones, old_new_names) - - def test_load_idf_file_and_clean_names(self, config, converttest): - idf, idf_file, weather_file, window_lib, trnsidf_exe, template, _ = converttest - log_clear_names = False - idf_2 = load_idf_file_and_clean_names(idf_file, log_clear_names) - @pytest.fixture( params=[ @@ -661,8 +1160,7 @@ def trnbuild_file(config, request): ) class TestTrnBuild: def test_trnbuild_from_idf(self, config, trnbuild_file): - # List files here - + # Gets file paths/names window_file = "W74-lib.dat" template_dir = os.path.join("archetypal", "ressources") window_filepath = os.path.join(template_dir, window_file) @@ -670,8 +1168,7 @@ def test_trnbuild_from_idf(self, config, trnbuild_file): "tests", "input_data", "CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" ) - # prepare args (key=value). Key is a unique id for the runs (here the - # file basename is used). Value is a dict of the function arguments + # prepare args (key=value)f or EnergyPlus version to use, windows parameters,etc. kwargs_dict = { "ep_version": settings.ep_version, "u_value": 2.5, @@ -683,7 +1180,10 @@ def test_trnbuild_from_idf(self, config, trnbuild_file): "ordered": True, } + # Gets IDF file path from fixture file = trnbuild_file + + # Convert IDF to BUI file convert_idf_to_trnbuild( idf_file=file, weather_file=weather_file, @@ -695,13 +1195,10 @@ def test_trnbuild_from_idf(self, config, trnbuild_file): @pytest.mark.win32 def test_trnbuild_from_idf_parallel(self, config, trnbuild_file): - # All IDF files - # List files here + # Gets IDF file path from fixture files = trnbuild_file - # window_file = 'W74-lib.dat' - # window_filepath = os.path.join(file_upper_path, window_file) - + # Path to weather file weather_file = os.path.join( "tests", "input_data", "CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" ) @@ -713,6 +1210,7 @@ def test_trnbuild_from_idf_parallel(self, config, trnbuild_file): for file in files } + # Convert IDF files to BUI ones usinf parallel process result = parallel_process(in_dict, convert_idf_to_trnbuild, 4, use_kwargs=True) assert not any(isinstance(a, Exception) for a in result.values()) @@ -720,11 +1218,12 @@ def test_trnbuild_from_idf_parallel(self, config, trnbuild_file): @pytest.mark.darwin @pytest.mark.linux def test_trnbuild_from_idf_parallel_darwin_or_linux(self, config): - # All IDF files - # List files here + # Path to EnergyPlus example files file_upper_path = os.path.join( get_eplus_dirs(settings.ep_version), "ExampleFiles" ) + + # IDF file names files = [ "RefBldgWarehouseNew2004_Chicago.idf", "ASHRAE9012016_Warehouse_Denver.idf", @@ -732,6 +1231,7 @@ def test_trnbuild_from_idf_parallel_darwin_or_linux(self, config): "5ZoneGeometryTransform.idf", ] + # Path to weather file weather_file = os.path.join( "tests", "input_data", "CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" ) @@ -748,14 +1248,21 @@ def test_trnbuild_from_idf_parallel_darwin_or_linux(self, config): for file in files } + # Convert IDF files to BUI ones usinf parallel process result = parallel_process(in_dict, convert_idf_to_trnbuild, 4, use_kwargs=True) + + # Print results [print(a) for a in result.values() if isinstance(a, Exception)] + assert not any(isinstance(a, Exception) for a in result.values()) @pytest.mark.win32 def test_trnbuild_idf_win32(self, config): + # Paths to T3D and B18 template files idf_file = "tests/input_data/trnsys/Building.idf" template = "tests/input_data/trnsys/NewFileTemplate.d18" + + # Convert T3D file to BUI file res = trnbuild_idf(idf_file, template=template, nonum=True) assert res @@ -768,9 +1275,12 @@ def test_trnbuild_idf_win32(self, config): "trnsidf.exe is copied in ./docker/trnsidf", ) def test_trnbuild_idf_darwin_or_linux(self, config): + # Paths to T3D, B18 template and trnsidf.exe files idf_file = "tests/input_data/trnsys/Building.idf" template = "tests/input_data/trnsys/NewFileTemplate.d18" trnsidf_exe = "docker/trnsidf/trnsidf.exe" + + # Convert T3D file to BUI file res = trnbuild_idf( idf_file, template=template, @@ -785,8 +1295,7 @@ def test_trnbuild_idf_darwin_or_linux(self, config): assert res def test_trnbuild_from_simple_idf(self, config): - # List files here - + # Path to weather file, window library and T3D template window_file = "W74-lib.dat" template_dir = os.path.join("archetypal", "ressources") window_filepath = os.path.join(template_dir, window_file) @@ -794,8 +1303,7 @@ def test_trnbuild_from_simple_idf(self, config): "tests", "input_data", "CAN_QC_Montreal-McTavish.716120_CWEC2016.epw" ) - # prepare args (key=value). Key is a unique id for the runs (here the - # file basename is used). Value is a dict of the function arguments + # prepare args (key=value)f or EnergyPlus version to use, windows parameters,etc. # WINDOW = 2-WSV_#3_Air kwargs_dict = { "ep_version": "9-2-0", @@ -808,7 +1316,10 @@ def test_trnbuild_from_simple_idf(self, config): "ordered": True, } + # Path to IDF file file = os.path.join("tests", "input_data", "trnsys", "simple_2_zone.idf") + + # Converts IDF to BUI convert_idf_to_trnbuild( idf_file=file, weather_file=weather_file,