diff --git a/autotest/test_model_splitter.py b/autotest/test_model_splitter.py index 4f43905061..895a87aca9 100644 --- a/autotest/test_model_splitter.py +++ b/autotest/test_model_splitter.py @@ -221,7 +221,10 @@ def test_metis_splitting_with_lak_sfr(function_tmpdir): @requires_exe("mf6") @requires_pkg("pymetis") @requires_pkg("h5py") +# @requires_pkg("sklearn") def test_save_load_node_mapping_structured(function_tmpdir): + import pymetis + sim_path = get_example_data_path() / "mf6-freyberg" new_sim_path = function_tmpdir / "mf6-freyberg/split_model" hdf_file = new_sim_path / "node_map.hdf5" @@ -235,7 +238,9 @@ def test_save_load_node_mapping_structured(function_tmpdir): original_heads = sim.get_model().output.head().get_alldata()[-1] mfsplit = Mf6Splitter(sim) - array = mfsplit.optimize_splitting_mask(nparts=nparts) + array = mfsplit.optimize_splitting_mask( + nparts=nparts, active_only=True, options=pymetis.Options(seed=42, contig=1) + ) new_sim = mfsplit.split_model(array) new_sim.set_sim_path(new_sim_path) new_sim.write_simulation() diff --git a/etc/environment.yml b/etc/environment.yml index bcb8a08e3a..13597bb10d 100644 --- a/etc/environment.yml +++ b/etc/environment.yml @@ -64,3 +64,4 @@ dependencies: - vtk - xmipy - h5py + - scikit-learn diff --git a/flopy/discretization/grid.py b/flopy/discretization/grid.py index f71524c8a8..1d59801b05 100644 --- a/flopy/discretization/grid.py +++ b/flopy/discretization/grid.py @@ -71,7 +71,7 @@ class Grid: The value can be anything accepted by :meth:`pyproj.CRS.from_user_input() `, such as an authority string (eg "EPSG:26916") or a WKT string. - prjfile : str or PathLike, optional if `crs` is specified + prjfile : str or pathlike, optional if `crs` is specified ESRI-style projection file with well-known text defining the CRS for the model grid (must be projected; geographic CRS are not supported). xoff : float @@ -648,7 +648,7 @@ def _set_neighbors(self, reset=False, method="rook"): ---------- reset : bool flag to recalculate neighbors - method: str + method : str "rook" for shared edges and "queen" for shared vertex Returns @@ -722,6 +722,7 @@ def neighbors(self, node=None, **kwargs): """ method = kwargs.pop("method", None) reset = kwargs.pop("reset", False) + if method is None: self._set_neighbors(reset=reset) else: @@ -744,6 +745,32 @@ def neighbors(self, node=None, **kwargs): return self._neighbors + def get_shared_edge(self, node0, node1, iverts=True): + """ + Method to get the shared iverts or vertices between two cells. The shared + edge defines the shared cell face. + + Parameters + ---------- + node0 : int + node1 : int + iverts : bool + boolean flag to return shared iverts (True) or shared vertices (False) + + Returns + ------- + tuple : iverts or vertices that define the shared face + """ + iv0 = set(self.iverts[node0]) + iv1 = set(self.iverts[node1]) + shared = tuple(iv0 & iv1) + + if iverts: + return tuple(shared) + else: + verts = [self.verts[shared[0]], self.verts[shared[1]]] + return tuple(verts) + def remove_confining_beds(self, array): """ Method to remove confining bed layers from an array diff --git a/flopy/discretization/structuredgrid.py b/flopy/discretization/structuredgrid.py index a96aac8241..d132485ee2 100644 --- a/flopy/discretization/structuredgrid.py +++ b/flopy/discretization/structuredgrid.py @@ -590,15 +590,15 @@ def is_regular_z(self): 0, 0, 0 ] failed = np.abs(rel_diff_thick0) > rel_tol - is_regular_z = np.count_nonzero(failed) == 0 + _is_regular_z = np.count_nonzero(failed) == 0 for k in range(1, self.nlay): rel_diff_zk = (self.delz[k, :, :] - self.delz[0, :, :]) / self.delz[ 0, :, : ] failed = np.abs(rel_diff_zk) > rel_tol - is_regular_z = is_regular_z and np.count_nonzero(failed) == 0 + _is_regular_z = _is_regular_z and np.count_nonzero(failed) == 0 - self._cache_dict[cache_index] = CachedData(is_regular_z) + self._cache_dict[cache_index] = CachedData(_is_regular_z) if self._copy_cache: return self._cache_dict[cache_index].data else: @@ -801,7 +801,7 @@ def convert_grid(self, factor): raise AssertionError("Grid is not complete and cannot be converted") ############### - # Methods + ### Methods ### ############### def neighbors(self, *args, **kwargs): """ @@ -835,6 +835,7 @@ def neighbors(self, *args, **kwargs): """ nn = None as_nodes = kwargs.pop("as_nodes", False) + memhog = kwargs.pop("fast", False) if kwargs: if "node" in kwargs: @@ -864,12 +865,105 @@ def neighbors(self, *args, **kwargs): else: as_nodes = True - neighbors = super().neighbors(nn, **kwargs) - if not as_nodes: - neighbors = self.get_lrc(neighbors) + if memhog: + neighbors = self._fast_neighbors(**kwargs) + else: + neighbors = super().neighbors(nn, **kwargs) + if not as_nodes: + neighbors = self.get_lrc(neighbors) return neighbors + def _fast_neighbors(self, **kwargs): + """ + Memory intensive and not elegent in any sense, but very fast method to get + neighbors for Structured Grids + + Returns + ------- + dict + """ + from collections import defaultdict + + if self._neighbors is None or kwargs.pop("reset", False): + nrow = self.nrow + ncol = self.ncol + + ncpl = nrow * ncol + + arr = np.arange(ncpl, dtype=np.int32).reshape((nrow, ncol)) + + # general case + arr2 = arr[1:-1, 1:-1].ravel() + neighs2 = np.empty((4, (nrow - 2) * (ncol - 2)), dtype=np.int32) + neighs2[0] = arr2 - ncol # u + neighs2[1] = arr2 + 1 # r + neighs2[2] = arr2 + ncol # d + neighs2[3] = arr2 - 1 # l + neighs2 = neighs2.T.tolist() + neighbors = {v: neighs2[ix] for ix, v in enumerate(arr2)} + + # ecase 1 + arr2 = arr[0, 1:-1].ravel() + neighs2 = np.empty((3, ncol - 2), dtype=np.int32) + # no up + neighs2[0] = arr2 + 1 # r + neighs2[1] = arr2 + ncol # d + neighs2[2] = arr2 - 1 # l + neighs2 = neighs2.T.tolist() + + for ix, v in enumerate(arr2): + neighbors[v] = neighs2[ix] + + # ecase 2 + arr2 = arr[-1, 1:-1].ravel() + neighs2 = np.empty((3, ncol - 2), dtype=np.int32) + neighs2[0] = arr2 - ncol # u + neighs2[1] = arr2 + 1 # r + # no down + neighs2[2] = arr2 - 1 # l + neighs2 = neighs2.T.tolist() + + for ix, v in enumerate(arr2): + neighbors[v] = neighs2[ix] + + # ecase 3 + arr2 = arr[1:-1, 0].ravel() + neighs2 = np.empty((3, nrow - 2), dtype=np.int32) + neighs2[0] = arr2 - ncol # u + neighs2[1] = arr2 + 1 # r + neighs2[2] = arr2 + ncol # d + # no left + neighs2 = neighs2.T.tolist() + + for ix, v in enumerate(arr2): + neighbors[v] = neighs2[ix] + + # ecase 4 + arr2 = arr[1:-1, -1].ravel() + neighs2 = np.empty((3, nrow - 2), dtype=np.int32) + neighs2[0] = arr2 - ncol # u + # no right + neighs2[1] = arr2 + ncol # d + neighs2[2] = arr2 - 1 # l + + neighs2 = neighs2.T.tolist() + + for ix, v in enumerate(arr2): + neighbors[v] = neighs2[ix] + + del arr + + # corners + neighbors[0] = [1, ncol] # r, d + neighbors[ncol - 1] = [(ncol - 1) + ncol, ncol - 2] # d, l + neighbors[ncpl - ncol] = [(ncpl - ncol) - ncol, (ncpl - ncol) + 1] # u, r + neighbors[ncpl - 1] = [(ncpl - 1) - ncol, ncpl - 2] # u, l + + self._neighbors = neighbors + + return self._neighbors + def intersect(self, x, y, z=None, local=False, forgive=False): """ Get the row and column of a point with coordinates x and y @@ -1682,11 +1776,15 @@ def _set_structured_iverts(self): pair for each vertex """ - iverts = [] - for i in range(self.nrow): - for j in range(self.ncol): - iverts.append(self._build_structured_iverts(i, j)) - self._iverts = iverts + rowarr = np.repeat(np.arange(self.nrow, dtype=int), self.ncol) + colarr = np.tile(np.arange(self.ncol, dtype=int), self.nrow) + + iverts = np.empty((4, self.ncpl), dtype=int) + iverts[0] = rowarr * (self.ncol + 1) + colarr + iverts[1] = rowarr * (self.ncol + 1) + colarr + 1 + iverts[2] = (rowarr + 1) * (self.ncol + 1) + colarr + 1 + iverts[3] = (rowarr + 1) * (self.ncol + 1) + colarr + self._iverts = iverts.T.tolist() return def _build_structured_iverts(self, i, j): diff --git a/flopy/mf6/utils/model_splitter.py b/flopy/mf6/utils/model_splitter.py index 2af4c9135b..b0130d2345 100644 --- a/flopy/mf6/utils/model_splitter.py +++ b/flopy/mf6/utils/model_splitter.py @@ -143,8 +143,12 @@ def __init__(self, sim, modelname=None): if self._model_type.endswith("6"): self._model_type = self._model_type[:-1] self._modelgrid = self._model.modelgrid + self._fast_neighbors = False if self._modelgrid.grid_type in ("structured", "vertex"): self._ncpl = self._modelgrid.ncpl + if self._modelgrid.grid_type == "structured": + if self._modelgrid.nrow > 2 and self._modelgrid.ncol > 2: + self._fast_neighbors = True else: self._ncpl = self._modelgrid.nnodes self._shape = self._modelgrid.shape @@ -159,7 +163,6 @@ def __init__(self, sim, modelname=None): self._uconnection = None self._usg_metadata = None self._has_angldegx = False - self._connection_ivert = None self._model_dict = None self._ivert_vert_remap = None self._sfr_mover_connections = [] @@ -229,7 +232,6 @@ def switch_models(self, modelname, remap_nodes=False): self._uconnection = None self._usg_metadata = None self._has_angldegx = False - self._connection_ivert = None self._ivert_vert_remap = None self._sfr_mover_connections = [] self._mover = False @@ -602,7 +604,7 @@ def construct_modelgrid(f, name, grid_type): mfs._allow_splitting = False return mfs - def optimize_splitting_mask(self, nparts): + def optimize_splitting_mask(self, nparts, active_only=False, options=None, verbose=False): """ Method to create a splitting array with a balanced number of active cells per model. This method uses the program METIS and pymetis to @@ -611,24 +613,35 @@ def optimize_splitting_mask(self, nparts): Parameters ---------- nparts: int - + number of parts to split the model in to + active_only : bool + only consider active cells when building adjacency graph. Default is False + options : None or pymetis.Options + optional pymetis.Options class that gets passed through to the + pymetis.part_graph() function. Example + `options=pymetis.Options(seed=42, contig=1)` + verbose : bool + Default False. Prints progress statements if True Returns ------- np.ndarray """ + if active_only: + import_optional_dependency("sklearn") + from sklearn.neighbors import NearestNeighbors + pymetis = import_optional_dependency( "pymetis", "please install pymetis using: " "conda install -c conda-forge pymetis", ) # create graph of nodes - graph = [] - weights = [] nlay = self._modelgrid.nlay if self._modelgrid.grid_type in ("structured", "vertex"): ncpl = self._modelgrid.ncpl shape = self._modelgrid.shape[1:] else: + nlay = 1 ncpl = self._modelgrid.nnodes shape = self._modelgrid.shape idomain = self._modelgrid.idomain @@ -678,17 +691,60 @@ def optimize_splitting_mask(self, nparts): laks += [i for i in np.unique(lakenos)] else: adv_pkg_weights[nodes] += 1 + if verbose: + print("Mapping neighbors") + neighbors = self._modelgrid.neighbors(reset=True, fast=self._fast_neighbors) + + if active_only: + if verbose: + print("Filtering inactive cells") + # filter out inactive cells here to avoid messing with neighbor algo. + iact = np.where(np.sum(idomain, axis=0), 1, 0) + # set this to a dict because key lookup is O(1) vs O(n) for lists + inactive = dict.fromkeys([int(i) for i in np.where(iact == 0)[0]]) + # for k in inactive: + [neighbors.pop(k) for k in inactive] + node_map = {i: ix for ix, i in enumerate(np.where(iact > 0)[0])} + neighbors = { + k : [node_map[i] for i in v if i not in inactive] for k, v in neighbors.items() + } - for nn, neighbors in self._modelgrid.neighbors().items(): - weight = np.count_nonzero(idomain[:, nn]) - adv_weight = adv_pkg_weights[nn] - weights.append(weight + adv_weight) - graph.append(np.array(neighbors, dtype=int)) + if verbose: + print("Creating graph and weights") + neighbors = dict(sorted(neighbors.items())) + weights = [np.count_nonzero(idomain[:, nn]) + adv_pkg_weights[nn] for nn in neighbors.keys()] + graph = [np.array(neigh, dtype=int) for neigh in neighbors.values()] + if verbose: + print("Running Metis") n_cuts, membership = pymetis.part_graph( - nparts, adjacency=graph, vweights=weights + nparts, adjacency=graph, vweights=weights, options=options ) membership = np.array(membership, dtype=int) + + if active_only: + # reamp everything to original domain + if verbose: + print("Remapping inactive to model domains") + if len(inactive) > 0: + xc = self._modelgrid.xcellcenters.ravel() + yc = self._modelgrid.ycellcenters.ravel() + axc = xc[list(node_map.keys())] + ayc = yc[list(node_map.keys())] + fit_points = np.array(list(zip(axc, ayc))) + ixc = xc[list(inactive.keys())] + iyc = yc[list(inactive.keys())] + pred_points = np.array(list(zip(ixc, iyc))) + + nn = NearestNeighbors(n_neighbors=1, algorithm="auto") + nn.fit(fit_points) + ind = nn.kneighbors(pred_points, return_distance=False).ravel() + data = membership[ind] + member_array = np.full((ncpl,), -1) + member_array[list(node_map.keys())] = membership[list(node_map.values())] + member_array[list(inactive.keys())] = data + membership = member_array + if laks: for lak in laks: idx = np.asarray(lak_array == lak).nonzero()[0] @@ -892,7 +948,10 @@ def _get_nlay_shape_models(self, model, array): tuple : (nlay, grid_shape) """ if array.size == model.modelgrid.size: - nlay = model.modelgrid.nlay + if self._modelgrid.grid_type in ("structured", "vertex"): + nlay = model.modelgrid.nlay + else: + nlay = 1 shape = self._shape elif array.size == model.modelgrid.ncpl: @@ -939,9 +998,8 @@ def _remap_nodes(self, array): self._map_iac_ja_connections() else: self._connection = self._modelgrid.neighbors( - reset=True, method="rook" + reset=True, method="rook", fast=self._fast_neighbors ) - self._connection_ivert = self._modelgrid._edge_set grid_info = {} if self._modelgrid.grid_type == "structured": @@ -1059,7 +1117,7 @@ def _remap_nodes(self, array): exchange_meta[mdl][nnode][cnnode] = [ node, cnode, - self._connection_ivert[node][ix], + self._modelgrid.get_shared_edge(node, cnode) ] else: exchange_meta[mdl][nnode][cnnode] = [ @@ -1078,7 +1136,7 @@ def _remap_nodes(self, array): cnnode: [ node, cnode, - self._connection_ivert[node][ix], + self._modelgrid.get_shared_edge(node, cnode) ] } else: diff --git a/pyproject.toml b/pyproject.toml index 6e1f29aede..bf6b704a0b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -84,6 +84,7 @@ optional = [ "vtk >=9.4.0", "xmipy", "h5py", + "scikit-learn" ] doc = [ "flopy[optional]",