From 02a2f91e802185fd6b31929f99e31fecbb41c499 Mon Sep 17 00:00:00 2001 From: scottrp <45947939+scottrp@users.noreply.github.com> Date: Mon, 18 Dec 2023 08:48:52 -0800 Subject: [PATCH] feat(set all data external options): additional parameters added (#2041) * feat(set all data external options): parameters added "binary" and "base_name" parameters added to set_all_data_external * feat(set all external binary): doc update * feat(set all ext options) --- .docs/Notebooks/mf6_data_tutorial08.py | 15 ++++-- autotest/regression/test_mf6.py | 27 +++++++++++ flopy/mf6/data/mfdata.py | 12 ++--- flopy/mf6/data/mfdataarray.py | 42 +++++++++-------- flopy/mf6/data/mfdatalist.py | 32 ++++++------- flopy/mf6/data/mfdataplist.py | 63 +++++++++++++------------- flopy/mf6/data/mfdatascalar.py | 10 ++-- flopy/mf6/mfmodel.py | 17 ++++++- flopy/mf6/mfpackage.py | 57 ++++++++++++++++++----- flopy/mf6/mfsimbase.py | 38 ++++++++++++++-- flopy/mf6/utils/model_splitter.py | 4 +- 11 files changed, 212 insertions(+), 105 deletions(-) diff --git a/.docs/Notebooks/mf6_data_tutorial08.py b/.docs/Notebooks/mf6_data_tutorial08.py index c26578d0f..c88dd0b10 100644 --- a/.docs/Notebooks/mf6_data_tutorial08.py +++ b/.docs/Notebooks/mf6_data_tutorial08.py @@ -1,20 +1,21 @@ # --- # jupyter: # jupytext: +# notebook_metadata_filter: metadata # text_representation: # extension: .py # format_name: light -# format_version: "1.5" -# jupytext_version: 1.5.1 +# format_version: '1.5' +# jupytext_version: 1.14.4 # kernelspec: -# display_name: Python 3 +# display_name: Python 3 (ipykernel) # language: python # name: python3 # metadata: # section: mf6 # --- -# # MODFLOW 6: Data Storage Information and Performance Optimization +# # MODFLOW 6: External Files, Binary Data, and Performance Optimization # # This tutorial shows the different options for storing MODFLOW data in FloPy. # Interaction with a FloPy MODFLOW 6 model is different from other models, @@ -36,7 +37,7 @@ # This tutorial focuses on the different storage options for MODFLOW data and # how to optimize data storage read/write speed. -# ## Introduction to Data Storage Information +# ## Introduction to Data Storage Options # MODFLOW array and list data can either be stored internally or externally in # text or binary files. Additionally array data can have a factor applied to # them and can have a format flag/code to define how these data will be @@ -215,6 +216,10 @@ print(f"New binary flag for stress period 1: {spd_record[0]['binary']}") print(f"New filename for stress period 2: {spd_record[1]['filename']}") +# An alternative to individually setting each file to external is to call the set_all_files_external method (there is also a set_all_files_internal method to do the opposite). While this requires less code, it does not give you the ability to set the names of each individual external file. By setting the binary attribute to True, flopy will store data to binary files wherever possible. + +sim.set_all_data_external(binary=True) + # ## Optimizing FloPy Performance # # By default FloPy will perform a number of verification checks on your data diff --git a/autotest/regression/test_mf6.py b/autotest/regression/test_mf6.py index 2dfa44f8b..9cc7db187 100644 --- a/autotest/regression/test_mf6.py +++ b/autotest/regression/test_mf6.py @@ -4240,7 +4240,34 @@ def test045_lake1ss_table(function_tmpdir, example_data_path): save_folder = function_tmpdir / "save" save_folder.mkdir() sim.set_sim_path(save_folder) + sim.set_all_data_external( + external_data_folder="test_folder", + base_name="ext_file", + binary=True, + ) sim.write_simulation() + # verify external files were written + ext_folder = os.path.join(save_folder, "test_folder") + files_to_check = [ + "ext_file_lakeex1b.dis_botm_layer1.bin", + "ext_file_lakeex1b.dis_botm_layer2.bin", + "ext_file_lakeex1b.dis_botm_layer3.bin", + "ext_file_lakeex1b.dis_botm_layer4.bin", + "ext_file_lakeex1b.dis_botm_layer5.bin", + "ext_file_lakeex1b.npf_k_layer1.bin", + "ext_file_lakeex1b.npf_k_layer5.bin", + "ext_file_lakeex1b.chd_stress_period_data_1.bin", + "ext_file_lakeex1b.lak_connectiondata.txt", + "ext_file_lakeex1b.lak_packagedata.txt", + "ext_file_lakeex1b.lak_perioddata_1.txt", + "ext_file_lakeex1b_table.ref_table.txt", + "ext_file_lakeex1b.evt_depth_1.bin", + "ext_file_lakeex1b.evt_rate_1.bin", + "ext_file_lakeex1b.evt_surface_1.bin", + ] + for file in files_to_check: + data_file_path = os.path.join(ext_folder, file) + assert os.path.exists(data_file_path) # run simulation success, buff = sim.run_simulation() diff --git a/flopy/mf6/data/mfdata.py b/flopy/mf6/data/mfdata.py index 2aafe39f0..2f992d8a8 100644 --- a/flopy/mf6/data/mfdata.py +++ b/flopy/mf6/data/mfdata.py @@ -251,7 +251,7 @@ def __init__( self._data_type = structure.type self._keyword = "" if self._simulation_data is not None: - self._data_dimensions = DataDimensions(dimensions, structure) + self.data_dimensions = DataDimensions(dimensions, structure) # build a unique path in the simulation dictionary self._org_path = self._path index = 0 @@ -380,13 +380,13 @@ def layer_shape(self): layers = [] layer_dims = self.structure.data_item_structures[0].layer_dims if len(layer_dims) == 1: - layers.append(self._data_dimensions.get_model_grid().num_layers()) + layers.append(self.data_dimensions.get_model_grid().num_layers()) else: for layer in layer_dims: if layer == "nlay": # get the layer size from the model grid try: - model_grid = self._data_dimensions.get_model_grid() + model_grid = self.data_dimensions.get_model_grid() except Exception as ex: type_, value_, traceback_ = sys.exc_info() raise MFDataException( @@ -521,13 +521,13 @@ def _get_constant_formatting_string( const_val, data_type, self._simulation_data, - self._data_dimensions, + self.data_dimensions, verify_data=self._simulation_data.verify_data, ) return f"{sim_data.indent_string.join(const_format)}{suffix}" def _get_aux_var_name(self, aux_var_index): - aux_var_names = self._data_dimensions.package_dim.get_aux_variables() + aux_var_names = self.data_dimensions.package_dim.get_aux_variables() # TODO: Verify that this works for multi-dimensional layering return aux_var_names[0][aux_var_index[0] + 1] @@ -608,7 +608,7 @@ def _get_external_formatting_str( self, fname, factor, binary, iprn, data_type, ext_file_action ): file_mgmt = self._simulation_data.mfpath - model_name = self._data_dimensions.package_dim.model_dim[0].model_name + model_name = self.data_dimensions.package_dim.model_dim[0].model_name ext_file_path = file_mgmt.get_updated_path( fname, model_name, ext_file_action ) diff --git a/flopy/mf6/data/mfdataarray.py b/flopy/mf6/data/mfdataarray.py index 51f47bf0c..2825747a3 100644 --- a/flopy/mf6/data/mfdataarray.py +++ b/flopy/mf6/data/mfdataarray.py @@ -348,7 +348,7 @@ def supports_layered(self): """ try: - model_grid = self._data_dimensions.get_model_grid() + model_grid = self.data_dimensions.get_model_grid() except Exception as ex: type_, value_, traceback_ = sys.exc_info() raise MFDataException( @@ -381,7 +381,7 @@ def set_layered_data(self, layered_data): """ if layered_data is True and self.structure.layered is False: if ( - self._data_dimensions.get_model_grid().grid_type() + self.data_dimensions.get_model_grid().grid_type() == DiscretizationType.DISU ): comment = f"Layered option not available for unstructured grid. {self._path}" @@ -430,7 +430,7 @@ def make_layered(self): ) else: if ( - self._data_dimensions.get_model_grid().grid_type() + self.data_dimensions.get_model_grid().grid_type() == DiscretizationType.DISU ): comment = f"Layered option not available for unstructured grid. {self._path}" @@ -482,6 +482,7 @@ def store_as_external_file( Whether to replace an existing external file. check_data : bool Verify data prior to storing + """ storage = self._get_storage_obj() if storage is None: @@ -861,11 +862,11 @@ def _set_record(self, data_record): ) type_, value_, traceback_ = sys.exc_info() raise MFDataException( - self._data_dimensions.structure.get_model(), - self._data_dimensions.structure.get_package(), - self._data_dimensions.structure.path, + self.data_dimensions.structure.get_model(), + self.data_dimensions.structure.get_package(), + self.data_dimensions.structure.path, "setting record", - self._data_dimensions.structure.name, + self.data_dimensions.structure.name, inspect.stack()[0][3], type_, value_, @@ -933,7 +934,7 @@ def _set_data( # handle special case of aux variables in an array self.layered = True aux_var_names = ( - self._data_dimensions.package_dim.get_aux_variables() + self.data_dimensions.package_dim.get_aux_variables() ) if len(aux_data) == len(aux_var_names[0]) - 1: for layer, aux_var_data in enumerate(aux_data): @@ -980,11 +981,11 @@ def _set_data( ) type_, value_, traceback_ = sys.exc_info() raise MFDataException( - self._data_dimensions.structure.get_model(), - self._data_dimensions.structure.get_package(), - self._data_dimensions.structure.path, + self.data_dimensions.structure.get_model(), + self.data_dimensions.structure.get_package(), + self.data_dimensions.structure.path, "setting aux variables", - self._data_dimensions.structure.name, + self.data_dimensions.structure.name, inspect.stack()[0][3], type_, value_, @@ -1064,7 +1065,7 @@ def load( self._resync() if self.structure.layered: try: - model_grid = self._data_dimensions.get_model_grid() + model_grid = self.data_dimensions.get_model_grid() except Exception as ex: type_, value_, traceback_ = sys.exc_info() raise MFDataException( @@ -1101,7 +1102,7 @@ def load( else: file_access = MFFileAccessArray( self.structure, - self._data_dimensions, + self.data_dimensions, self._simulation_data, self._path, self._current_key, @@ -1272,7 +1273,7 @@ def _new_storage( return DataStorage( self._simulation_data, self._model_or_sim, - self._data_dimensions, + self.data_dimensions, self._get_file_entry, DataStorageType.internal_array, DataStructureType.ndarray, @@ -1284,7 +1285,7 @@ def _new_storage( return DataStorage( self._simulation_data, self._model_or_sim, - self._data_dimensions, + self.data_dimensions, self._get_file_entry, DataStorageType.internal_array, DataStructureType.ndarray, @@ -1402,7 +1403,7 @@ def _get_file_entry_layer( self._simulation_data.debug, ex, ) - package_dim = self._data_dimensions.package_dim + package_dim = self.data_dimensions.package_dim model_name = package_dim.model_dim[0].model_name self._simulation_data.mfpath.add_ext_file(file_path, model_name) return file_entry @@ -1430,7 +1431,7 @@ def _get_data_layer_string(self, layer, data_indent): ) file_access = MFFileAccessArray( self.structure, - self._data_dimensions, + self.data_dimensions, self._simulation_data, self._path, self._current_key, @@ -1683,6 +1684,7 @@ def store_as_external_file( Whether to replace an existing external file. check_data : bool Verify data prior to storing + """ # store each stress period in separate file(s) for sp in self._data_storage.keys(): @@ -1803,7 +1805,7 @@ def get_record(self, key=None): """ if self._data_storage is not None and len(self._data_storage) > 0: if key is None: - sim_time = self._data_dimensions.package_dim.model_dim[ + sim_time = self.data_dimensions.package_dim.model_dim[ 0 ].simulation_time num_sp = sim_time.get_num_stress_periods() @@ -1825,7 +1827,7 @@ def get_data(self, key=None, apply_mult=True, **kwargs): """ if self._data_storage is not None and len(self._data_storage) > 0: if key is None: - sim_time = self._data_dimensions.package_dim.model_dim[ + sim_time = self.data_dimensions.package_dim.model_dim[ 0 ].simulation_time num_sp = sim_time.get_num_stress_periods() diff --git a/flopy/mf6/data/mfdatalist.py b/flopy/mf6/data/mfdatalist.py index efb545bc5..d1e4e123b 100644 --- a/flopy/mf6/data/mfdatalist.py +++ b/flopy/mf6/data/mfdatalist.py @@ -142,7 +142,7 @@ def to_array(self, kper=0, mask=False): for a selected stress period. The dictionary keys are the MFDataList dtype names for the stress period data.""" sarr = self.get_data(key=kper) - model_grid = self._data_dimensions.get_model_grid() + model_grid = self.data_dimensions.get_model_grid() return list_to_array(sarr, model_grid, kper, mask) def new_simulation(self, sim_data): @@ -761,7 +761,7 @@ def _get_file_entry( ): try: # freeze model grid to boost performance - self._data_dimensions.lock() + self.data_dimensions.lock() # init indent = self._simulation_data.indent_string file_entry = [] @@ -872,7 +872,7 @@ def _get_file_entry( self._crnt_line_num += 1 # unfreeze model grid - self._data_dimensions.unlock() + self.data_dimensions.unlock() return "".join(file_entry) def _get_file_entry_record( @@ -913,7 +913,7 @@ def _get_file_entry_record( ex, ) else: - data_dim = self._data_dimensions + data_dim = self.data_dimensions data_line = data_complete[mflist_line] for data_item in data_set.data_item_structures: if data_item.is_aux: @@ -931,7 +931,7 @@ def _get_file_entry_record( data_val, data_item.type, self._simulation_data, - self._data_dimensions, + self.data_dimensions, data_item.is_cellid, data_item.possible_cellid, data_item, @@ -1028,7 +1028,7 @@ def _get_file_entry_record( model_num = DatumUtil.cellid_model_num( data_item, self.structure.model_data, - self._data_dimensions.package_dim.model_dim, + self.data_dimensions.package_dim.model_dim, ) model_grid = data_dim.get_model_grid(model_num) cellid_size = ( @@ -1180,7 +1180,7 @@ def _get_file_entry_record( data_line[data_index], k_data_item.type, self._simulation_data, - self._data_dimensions, + self.data_dimensions, k_data_item.is_cellid, k_data_item.possible_cellid, k_data_item, @@ -1244,7 +1244,7 @@ def _get_file_entry_record( data_val, DatumType.string, self._simulation_data, - self._data_dimensions, + self.data_dimensions, False, data_item=data_item, verify_data=self._simulation_data.verify_data, @@ -1257,7 +1257,7 @@ def _get_file_entry_record( data_val, data_item.type, self._simulation_data, - self._data_dimensions, + self.data_dimensions, data_item.is_cellid, data_item.possible_cellid, data_item, @@ -1369,7 +1369,7 @@ def load( else: file_access = MFFileAccessList( self.structure, - self._data_dimensions, + self.data_dimensions, self._simulation_data, self._path, self._current_key, @@ -1383,7 +1383,7 @@ def _new_storage(self, stress_period=0): return DataStorage( self._simulation_data, self._model_or_sim, - self._data_dimensions, + self.data_dimensions, self._get_file_entry, DataStorageType.internal_array, DataStructureType.recarray, @@ -1546,8 +1546,8 @@ def data(self): @property def masked_4D_arrays(self): """Returns list data as a masked 4D array.""" - model_grid = self._data_dimensions.get_model_grid() - nper = self._data_dimensions.package_dim.model_dim[ + model_grid = self.data_dimensions.get_model_grid() + nper = self.data_dimensions.package_dim.model_dim[ 0 ].simulation_time.get_num_stress_periods() # get the first kper @@ -1593,8 +1593,8 @@ def masked_4D_arrays(self): def masked_4D_arrays_itr(self): """Returns list data as an iterator of a masked 4D array.""" - model_grid = self._data_dimensions.get_model_grid() - nper = self._data_dimensions.package_dim.model_dim[ + model_grid = self.data_dimensions.get_model_grid() + nper = self.data_dimensions.package_dim.model_dim[ 0 ].simulation_time.get_num_stress_periods() # get the first kper @@ -1796,7 +1796,7 @@ def get_data(self, key=None, apply_mult=False, **kwargs): if key is None: if "array" in kwargs: output = [] - sim_time = self._data_dimensions.package_dim.model_dim[ + sim_time = self.data_dimensions.package_dim.model_dim[ 0 ].simulation_time num_sp = sim_time.get_num_stress_periods() diff --git a/flopy/mf6/data/mfdataplist.py b/flopy/mf6/data/mfdataplist.py index 508c3412f..95460bf28 100644 --- a/flopy/mf6/data/mfdataplist.py +++ b/flopy/mf6/data/mfdataplist.py @@ -222,7 +222,7 @@ def _process_open_close_line(self, arr_line, store=True): Process open/close line extracting the multiplier, print format, binary flag, data file path, and any comments """ - data_dim = self._data_dimensions + data_dim = self.data_dimensions ( multiplier, print_format, @@ -310,10 +310,10 @@ def _get_cellid_size(self, data_item_name): """get the number of spatial coordinates used in the cellid""" model_num = datautil.DatumUtil.cellid_model_num( data_item_name, - self._data_dimensions.structure.model_data, - self._data_dimensions.package_dim.model_dim, + self.data_dimensions.structure.model_data, + self.data_dimensions.package_dim.model_dim, ) - model_grid = self._data_dimensions.get_model_grid(model_num=model_num) + model_grid = self.data_dimensions.get_model_grid(model_num=model_num) return model_grid.get_num_spatial_coordinates() def _build_data_header(self): @@ -330,7 +330,7 @@ def _build_data_header(self): s_type = pandas.StringDtype f_type = np.float64 i_type = np.int64 - data_dim = self._data_dimensions + data_dim = self.data_dimensions # loop through data structure definition information for data_item, index in zip( self.structure.data_item_structures, @@ -651,11 +651,11 @@ def set_data(self, data, autofill=False, check_data=True, append=False): ) type_, value_, traceback_ = sys.exc_info() raise MFDataException( - self._data_dimensions.structure.get_model(), - self._data_dimensions.structure.get_package(), - self._data_dimensions.structure.path, + self.data_dimensions.structure.get_model(), + self.data_dimensions.structure.get_package(), + self.data_dimensions.structure.path, "setting list data", - self._data_dimensions.structure.name, + self.data_dimensions.structure.name, inspect.stack()[0][3], type_, value_, @@ -692,11 +692,11 @@ def set_data(self, data, autofill=False, check_data=True, append=False): ) type_, value_, traceback_ = sys.exc_info() raise MFDataException( - self._data_dimensions.structure.get_model(), - self._data_dimensions.structure.get_package(), - self._data_dimensions.structure.path, + self.data_dimensions.structure.get_model(), + self.data_dimensions.structure.get_package(), + self.data_dimensions.structure.path, "setting list data", - self._data_dimensions.structure.name, + self.data_dimensions.structure.name, inspect.stack()[0][3], type_, value_, @@ -713,11 +713,11 @@ def set_data(self, data, autofill=False, check_data=True, append=False): ) type_, value_, traceback_ = sys.exc_info() raise MFDataException( - self._data_dimensions.structure.get_model(), - self._data_dimensions.structure.get_package(), - self._data_dimensions.structure.path, + self.data_dimensions.structure.get_model(), + self.data_dimensions.structure.get_package(), + self.data_dimensions.structure.path, "setting list data", - self._data_dimensions.structure.name, + self.data_dimensions.structure.name, inspect.stack()[0][3], type_, value_, @@ -772,7 +772,7 @@ def to_array(self, kper=0, mask=False): for a selected stress period. The dictionary keys are the MFDataList dtype names for the stress period data.""" sarr = self.get_data(key=kper) - model_grid = self._data_dimensions.get_model_grid() + model_grid = self.data_dimensions.get_model_grid() return list_to_array(sarr, model_grid, kper, mask) def set_record(self, record, autofill=False, check_data=True): @@ -798,20 +798,20 @@ def set_record(self, record, autofill=False, check_data=True): if "binary" in record: if ( record["binary"] - and self._data_dimensions.package_dim.boundnames() + and self.data_dimensions.package_dim.boundnames() ): message = ( "Unable to store list data ({}) to a binary " "file when using boundnames" - ".".format(self._data_dimensions.structure.name) + ".".format(self.data_dimensions.structure.name) ) type_, value_, traceback_ = sys.exc_info() raise MFDataException( - self._data_dimensions.structure.get_model(), - self._data_dimensions.structure.get_package(), - self._data_dimensions.structure.path, + self.data_dimensions.structure.get_model(), + self.data_dimensions.structure.get_package(), + self.data_dimensions.structure.path, "writing list data to binary file", - self._data_dimensions.structure.name, + self.data_dimensions.structure.name, inspect.stack()[0][3], type_, value_, @@ -1159,7 +1159,7 @@ def _read_text_data(self, fd_data_file, first_line, external_file=False): None, True, self.path, - self._data_dimensions.package_dim, + self.data_dimensions.package_dim, self._package, self._block, ) @@ -1181,7 +1181,7 @@ def _save_binary_data(self, fd_data_file, data): # write file_access = MFFileAccessList( self.structure, - self._data_dimensions, + self.data_dimensions, self._simulation_data, self._path, self._current_key, @@ -1224,7 +1224,7 @@ def _load_external_data(self, data_storage): if data_storage.binary: file_access = MFFileAccessList( self.structure, - self._data_dimensions, + self.data_dimensions, self._simulation_data, self._path, self._current_key, @@ -1441,7 +1441,7 @@ def _resolve_ext_file_path(self, data_storage): returned the resolved relative path of external file in "data_storage" """ # pathing to external file - data_dim = self._data_dimensions + data_dim = self.data_dimensions model_name = data_dim.package_dim.model_dim[0].model_name fp_relative = data_storage.fname if model_name is not None and fp_relative is not None: @@ -1985,7 +1985,6 @@ def store_as_external_file( Whether to replace an existing external file. check_data : bool Verify data prior to storing - """ self._cache_model_grid = True for sp in self._data_storage.keys(): @@ -2125,7 +2124,7 @@ def get_data(self, key=None, apply_mult=False, dataframe=False, **kwargs): if key is None: if "array" in kwargs: output = [] - sim_time = self._data_dimensions.package_dim.model_dim[ + sim_time = self.data_dimensions.package_dim.model_dim[ 0 ].simulation_time num_sp = sim_time.get_num_stress_periods() @@ -2201,8 +2200,8 @@ def set_data(self, data, key=None, autofill=False): def masked_4D_arrays_itr(self): """Returns list data as an iterator of a masked 4D array.""" - model_grid = self._data_dimensions.get_model_grid() - nper = self._data_dimensions.package_dim.model_dim[ + model_grid = self.data_dimensions.get_model_grid() + nper = self.data_dimensions.package_dim.model_dim[ 0 ].simulation_time.get_num_stress_periods() # get the first kper diff --git a/flopy/mf6/data/mfdatascalar.py b/flopy/mf6/data/mfdatascalar.py index 88db11d06..ca6e7e63a 100644 --- a/flopy/mf6/data/mfdatascalar.py +++ b/flopy/mf6/data/mfdatascalar.py @@ -191,7 +191,7 @@ def set_data(self, data): data_struct = self.structure.data_item_structures[0] try: converted_data = convert_data( - data, self._data_dimensions, self._data_type, data_struct + data, self.data_dimensions, self._data_type, data_struct ) except Exception as ex: type_, value_, traceback_ = sys.exc_info() @@ -479,7 +479,7 @@ def get_file_entry( current_data, self._data_type, self._simulation_data, - self._data_dimensions, + self.data_dimensions, data_item=data_item, ) ) @@ -555,7 +555,7 @@ def get_file_entry( data, self._data_type, self._simulation_data, - self._data_dimensions, + self.data_dimensions, data_item=data_item, verify_data=self._simulation_data.verify_data, ) @@ -635,7 +635,7 @@ def load( self._resync() file_access = MFFileAccessScalar( self.structure, - self._data_dimensions, + self.data_dimensions, self._simulation_data, self._path, self._current_key, @@ -653,7 +653,7 @@ def _new_storage(self, stress_period=0): return DataStorage( self._simulation_data, self._model_or_sim, - self._data_dimensions, + self.data_dimensions, self.get_file_entry, DataStorageType.internal_array, DataStructureType.scalar, diff --git a/flopy/mf6/mfmodel.py b/flopy/mf6/mfmodel.py index 54ce7f9e8..00e9d260d 100644 --- a/flopy/mf6/mfmodel.py +++ b/flopy/mf6/mfmodel.py @@ -1601,7 +1601,11 @@ def rename_all_packages(self, name): ) def set_all_data_external( - self, check_data=True, external_data_folder=None + self, + check_data=True, + external_data_folder=None, + base_name=None, + binary=False, ): """Sets the model's list and array data to be stored externally. @@ -1614,10 +1618,19 @@ def set_all_data_external( Folder, relative to the simulation path or model relative path (see use_model_relative_path parameter), where external data will be stored + base_name: str + Base file name prefix for all files + binary: bool + Whether file will be stored as binary """ for package in self.packagelist: - package.set_all_data_external(check_data, external_data_folder) + package.set_all_data_external( + check_data, + external_data_folder, + base_name, + binary, + ) def set_all_data_internal(self, check_data=True): """Sets the model's list and array data to be stored externally. diff --git a/flopy/mf6/mfpackage.py b/flopy/mf6/mfpackage.py index d7dcaabce..6f3ed087f 100644 --- a/flopy/mf6/mfpackage.py +++ b/flopy/mf6/mfpackage.py @@ -1314,7 +1314,11 @@ def header_exists(self, key, data_path=None): return False def set_all_data_external( - self, base_name, check_data=True, external_data_folder=None + self, + base_name, + check_data=True, + external_data_folder=None, + binary=False, ): """Sets the block's list and array data to be stored externally, base_name is external file name's prefix, check_data determines @@ -1328,21 +1332,31 @@ def set_all_data_external( Whether to do data error checking. external_data_folder Folder where external data will be stored + binary: bool + Whether file will be stored as binary """ for key, dataset in self.datasets.items(): + lst_data = isinstance(dataset, mfdatalist.MFList) or isinstance( + dataset, mfdataplist.MFPandasList + ) if ( isinstance(dataset, mfdataarray.MFArray) - or ( - ( - isinstance(dataset, mfdatalist.MFList) - or isinstance(dataset, mfdataplist.MFPandasList) - ) - and dataset.structure.type == DatumType.recarray - ) + or (lst_data and dataset.structure.type == DatumType.recarray) and dataset.enabled ): - file_path = f"{base_name}_{dataset.structure.name}.txt" + if not binary or ( + lst_data + and ( + dataset.data_dimensions.package_dim.boundnames() + or not dataset.structure.basic_item + ) + ): + ext = "txt" + binary = False + else: + ext = "bin" + file_path = f"{base_name}_{dataset.structure.name}.{ext}" replace_existing_external = False if external_data_folder is not None: # get simulation root path @@ -1367,6 +1381,7 @@ def set_all_data_external( file_path, replace_existing_external=replace_existing_external, check_data=check_data, + binary=binary, ) def set_all_data_internal(self, check_data=True): @@ -2687,7 +2702,11 @@ def set_model_relative_path(self, model_ws): package.set_model_relative_path(model_ws) def set_all_data_external( - self, check_data=True, external_data_folder=None + self, + check_data=True, + external_data_folder=None, + base_name=None, + binary=False, ): """Sets the package's list and array data to be stored externally. @@ -2697,16 +2716,30 @@ def set_all_data_external( Determine if data error checking is enabled external_data_folder Folder where external data will be stored + base_name: str + Base file name prefix for all files + binary: bool + Whether file will be stored as binary """ # set blocks for key, block in self.blocks.items(): file_name = os.path.split(self.filename)[1] + if base_name is not None: + file_name = f"{base_name}_{file_name}" block.set_all_data_external( - file_name, check_data, external_data_folder + file_name, + check_data, + external_data_folder, + binary, ) # set sub-packages for package in self._packagelist: - package.set_all_data_external(check_data, external_data_folder) + package.set_all_data_external( + check_data, + external_data_folder, + base_name, + binary, + ) def set_all_data_internal(self, check_data=True): """Sets the package's list and array data to be stored internally. diff --git a/flopy/mf6/mfsimbase.py b/flopy/mf6/mfsimbase.py index f5f806d70..898e42836 100644 --- a/flopy/mf6/mfsimbase.py +++ b/flopy/mf6/mfsimbase.py @@ -1445,7 +1445,11 @@ def rename_all_packages(self, name): model.rename_all_packages(name) def set_all_data_external( - self, check_data=True, external_data_folder=None + self, + check_data=True, + external_data_folder=None, + base_name=None, + binary=False, ): """Sets the simulation's list and array data to be stored externally. @@ -1458,20 +1462,44 @@ def set_all_data_external( Path relative to the simulation path or model relative path (see use_model_relative_path parameter), where external data will be stored + base_name: str + Base file name prefix for all files + binary: bool + Whether file will be stored as binary """ # copy any files whose paths have changed self.simulation_data.mfpath.copy_files() # set data external for all packages in all models for model in self._models.values(): - model.set_all_data_external(check_data, external_data_folder) + model.set_all_data_external( + check_data, + external_data_folder, + base_name, + binary, + ) # set data external for solution packages for package in self._solution_files.values(): - package.set_all_data_external(check_data, external_data_folder) + package.set_all_data_external( + check_data, + external_data_folder, + base_name, + binary, + ) # set data external for other packages for package in self._other_files.values(): - package.set_all_data_external(check_data, external_data_folder) + package.set_all_data_external( + check_data, + external_data_folder, + base_name, + binary, + ) for package in self._exchange_files.values(): - package.set_all_data_external(check_data, external_data_folder) + package.set_all_data_external( + check_data, + external_data_folder, + base_name, + binary, + ) def set_all_data_internal(self, check_data=True): # set data external for all packages in all models diff --git a/flopy/mf6/utils/model_splitter.py b/flopy/mf6/utils/model_splitter.py index 9777a7d88..f3b3bfd37 100644 --- a/flopy/mf6/utils/model_splitter.py +++ b/flopy/mf6/utils/model_splitter.py @@ -2920,7 +2920,7 @@ def _remap_package(self, package, ismvr=False): value.structure, True, value.path, - value._data_dimensions.package_dim, + value.data_dimensions.package_dim, value._package, value._block, ) @@ -2941,7 +2941,7 @@ def _remap_package(self, package, ismvr=False): None, True, value.path, - value._data_dimensions.package_dim, + value.data_dimensions.package_dim, value._package, value._block, )