From e6e778ad4745d5d4ceee81722768c2b903ce6a11 Mon Sep 17 00:00:00 2001 From: "David A. Ham" Date: Fri, 20 May 2022 16:10:22 +1000 Subject: [PATCH] Merge pull request #2437 from firedrakeproject/periodic_vertexonlymesh Transfer responsibility for computing cell inclusion for vertices from Plex to Firedrake. This enables periodic meshes to be supported in VertexOnlyMesh. --- firedrake/cython/dmcommon.pyx | 135 ----------- firedrake/mesh.py | 213 ++++++++++++------ .../test_interpolation_from_parent.py | 21 +- tests/vertexonly/test_swarm.py | 31 ++- tests/vertexonly/test_vertex_only_fs.py | 2 +- .../test_vertex_only_mesh_generation.py | 2 +- 6 files changed, 183 insertions(+), 221 deletions(-) diff --git a/firedrake/cython/dmcommon.pyx b/firedrake/cython/dmcommon.pyx index e49d55b1db..3cd30790e4 100644 --- a/firedrake/cython/dmcommon.pyx +++ b/firedrake/cython/dmcommon.pyx @@ -2845,141 +2845,6 @@ def clear_adjacency_callback(PETSc.DM dm not None): CHKERR(DMPlexSetAdjacencyUser(dm.dm, NULL, NULL)) -@cython.boundscheck(False) -@cython.wraparound(False) -def remove_ghosts_pic(PETSc.DM swarm, PETSc.DM plex): - """Remove DMSwarm PICs which are in ghost cells of a distributed - DMPlex. - - :arg swarm: The DMSWARM which has been associated with the input - DMPlex `plex` using PETSc `DMSwarmSetCellDM`. - :arg plex: The DMPlex which is associated with the input DMSWARM - `swarm` - """ - cdef: - PetscInt cStart, cEnd, ncells, i, npics - PETSc.SF sf - PetscInt nroots, nleaves - const PetscInt *ilocal = NULL - const PetscSFNode *iremote = NULL - np.ndarray[PetscInt, ndim=1, mode="c"] pic_cell_indices - np.ndarray[PetscInt, ndim=1, mode="c"] ghost_cell_indices - - if type(plex) is not PETSc.DMPlex: - raise ValueError("plex must be a DMPlex") - - if type(swarm) is not PETSc.DMSwarm: - raise ValueError("swarm must be a DMSwarm") - - if plex.handle != swarm.getCellDM().handle: - raise ValueError("plex is not the swarm's CellDM") - - if plex.comm.size > 1: - get_height_stratum(plex.dm, 0, &cStart, &cEnd) - ncells = cEnd - cStart - - # Get full list of cell indices for particles - pic_cell_indices = np.copy(swarm.getField("DMSwarm_cellid")) - swarm.restoreField("DMSwarm_cellid") - npics = len(pic_cell_indices) - - # Initialise with zeros since these can't be valid ranks or cell ids - ghost_cell_indices = np.full(ncells, -1, dtype=IntType) - - # Search for ghost cell indices (spooky!) - sf = plex.getPointSF() - CHKERR(PetscSFGetGraph(sf.sf, &nroots, &nleaves, &ilocal, &iremote)) - for i in range(nleaves): - if cStart <= ilocal[i] < cEnd: - # NOTE need to check this is correct index. Can I check the labels some how? - ghost_cell_indices[ilocal[i] - cStart] = ilocal[i] - - # trim -1's and make into set to reduce searching needed - ghost_cell_indices_set = set(ghost_cell_indices[ghost_cell_indices != -1]) - - # remove swarm pic parent cell indices which match ghost cell indices - for i in range(npics-1, -1, -1): - if pic_cell_indices[i] in ghost_cell_indices_set: - # removePointAtIndex shift cell numbers down by 1 - swarm.removePointAtIndex(i) - - -@cython.boundscheck(False) -@cython.wraparound(False) -def label_pic_parent_cell_info(PETSc.DM swarm, parentmesh): - """ - For each PIC in the input swarm, label its `parentcellnum` field with - the relevant cell number from the `parentmesh` in which is it emersed - and the `refcoord` field with the relevant cell reference coordinate. - This information is given by the - `parentmesh.locate_cell_and_reference_coordinate` method. - - For a swarm with N PICs emersed in `parentmesh` - the `parentcellnum` field is N long and - the `refcoord` field is N*parentmesh.topological_dimension() long. - - :arg swarm: The DMSWARM which contains the PICs immersed in - `parentmesh` - :arg parentmesh: The mesh within with the `swarm` PICs are immersed. - - ..note:: All PICs must be within the parentmesh or this will try to - assign `None` (returned by - `parentmesh.locate_cell_and_reference_coordinate`) to the - `parentcellnum` or `refcoord` fields. - - - """ - cdef: - PetscInt num_vertices, i, gdim, tdim - PetscInt parent_cell_num - np.ndarray[PetscReal, ndim=2, mode="c"] swarm_coords - np.ndarray[PetscInt, ndim=1, mode="c"] parent_cell_nums - np.ndarray[PetscReal, ndim=2, mode="c"] reference_coords - np.ndarray[PetscReal, ndim=1, mode="c"] reference_coord - - gdim = parentmesh.geometric_dimension() - tdim = parentmesh.topological_dimension() - - num_vertices = swarm.getLocalSize() - - # Check size of biggest num_vertices so - # locate_cell can be called on every processor - comm = swarm.comm.tompi4py() - max_num_vertices = comm.allreduce(num_vertices, op=MPI.MAX) - - # Create an out of mesh point to use in locate_cell when needed - out_of_mesh_point = np.full((1, gdim), np.inf) - - # get fields - NOTE this isn't copied so make sure - # swarm.restoreField is called for each field too! - swarm_coords = swarm.getField("DMSwarmPIC_coor").reshape((num_vertices, gdim)) - parent_cell_nums = swarm.getField("parentcellnum") - reference_coords = swarm.getField("refcoord").reshape((num_vertices, tdim)) - - # find parent cell numbers - # TODO We should be able to do this for all the points in in one call to - # the parent mesh's _c_locator. - # SUGGESTED API 1: - # parent_cell_nums, reference_coords = parentmesh.locate_cell_and_reference_coordinates(swarm_coords) - # with second call for collectivity. - # SUGGESTED API 2: - # parent_cell_nums, reference_coords = parentmesh.locate_cell_and_reference_coordinates(swarm_coords, local_num_vertices) - # with behaviour changing inside locate_cell_and_reference_coordinates to - # ensure collectivity. - for i in range(max_num_vertices): - if i < num_vertices: - parent_cell_num, reference_coord = parentmesh.locate_cell_and_reference_coordinate(swarm_coords[i]) - parent_cell_nums[i] = parent_cell_num - reference_coords[i] = reference_coord - else: - parentmesh.locate_cell(out_of_mesh_point) # should return None - - # have to restore fields once accessed to allow access again - swarm.restoreField("refcoord") - swarm.restoreField("parentcellnum") - swarm.restoreField("DMSwarmPIC_coor") - - @cython.boundscheck(False) @cython.wraparound(False) def fill_reference_coordinates_function(reference_coordinates_f): diff --git a/firedrake/mesh.py b/firedrake/mesh.py index ee339ed7ad..94191bbefd 100644 --- a/firedrake/mesh.py +++ b/firedrake/mesh.py @@ -2093,18 +2093,18 @@ def ExtrudedMesh(mesh, layers, layer_height=None, extrusion_type='uniform', kern @PETSc.Log.EventDecorator() def VertexOnlyMesh(mesh, vertexcoords, missing_points_behaviour=None): """ - Create a vertex only mesh, immersed in a given mesh, with vertices - defined by a list of coordinates. + Create a vertex only mesh, immersed in a given mesh, with vertices defined + by a list of coordinates. - :arg mesh: The unstructured mesh in which to immerse the vertex only - mesh. + :arg mesh: The unstructured mesh in which to immerse the vertex only mesh. :arg vertexcoords: A list of coordinate tuples which defines the vertices. :kwarg missing_points_behaviour: optional string argument for what to do - when vertices which are outside of the mesh are discarded. If ``'warn'``, - will print a warning. If ``'error'`` will raise a ValueError. Note that - setting this will cause all MPI ranks to check that they have the same - list of vertices (else the test is not possible): this operation scales - with number of vertices and number of ranks. + when vertices which are outside of the mesh are discarded. If + ``'warn'``, will print a warning. If ``'error'`` will raise a + ValueError. Note that setting this will cause all MPI ranks to check + that they have the same list of vertices (else the test is not + possible): this operation scales with number of vertices and number of + ranks. .. note:: @@ -2112,15 +2112,13 @@ def VertexOnlyMesh(mesh, vertexcoords, missing_points_behaviour=None): .. note:: - Meshes created from a coordinates :py:class:`~.Function` and immersed - manifold meshes are not yet supported. + Extruded and immersed manifold meshes are not yet supported. .. note:: - This should also only be used for meshes which have not had their - coordinates field modified as, at present, this does not update the - coordinates field of the underlying DMPlex. Such meshes may cause - unexpected behavioir or hangs when running in parallel. + Modifying the coordinates of the parent mesh is not currently + supported. Doing so will cause interpolation to Functions defined on + the VertexOnlyMesh to return the wrong values. .. note:: When running in parallel, ``vertexcoords`` are strictly confined @@ -2134,8 +2132,8 @@ def VertexOnlyMesh(mesh, vertexcoords, missing_points_behaviour=None): #. making sure that all ranks are supplied with the same ``vertexcoords`` or by - #. ensuring that ``vertexcoords`` are already found in cells - owned by the ``mesh`` partition of the given rank. + #. ensuring that ``vertexcoords`` are already found in cells owned by + the ``mesh`` partition of the given rank. For more see `this github issue `_. @@ -2158,28 +2156,26 @@ def VertexOnlyMesh(mesh, vertexcoords, missing_points_behaviour=None): if gdim != tdim: raise NotImplementedError("Immersed manifold meshes are not supported") - # TODO Some better method of matching points to cells will need to - # be used for bendy meshes since our PETSc DMPlex implementation - # only supports straight-edged mesh topologies and meshes made from - # coordinate fields. - # We can hopefully update the coordinates field correctly so that - # the DMSwarm PIC can immerse itself in the DMPlex. - # We can also hopefully provide a callback for PETSc to use to find - # the parent cell id. We would add `DMLocatePoints` as an `op` to - # `DMShell` types and do `DMSwarmSetCellDM(yourdmshell)` which has - # `DMLocatePoints_Shell` implemented. - # Whether one or both of these is needed is unclear. - + # Bendy meshes require a smarter bounding box algorithm at partition and + # (especially) cell level. Projecting coordinates to Bernstein may be + # sufficient. if mesh.coordinates.function_space().ufl_element().degree() > 1: raise NotImplementedError("Only straight edged meshes are supported") - if hasattr(mesh, "_made_from_coordinates") and mesh._made_from_coordinates: - raise NotImplementedError("Meshes made from coordinate fields are not yet supported") + # Currently we take responsibility for locating the mesh cells in which the + # vertices lie. + # + # In the future we hope to update the coordinates field correctly so that + # the DMSwarm PIC can immerse itself in the DMPlex. We can also hopefully + # provide a callback for PETSc to use to find the parent cell id. We would + # add `DMLocatePoints` as an `op` to `DMShell` types and do + # `DMSwarmSetCellDM(yourdmshell)` which has `DMLocatePoints_Shell` + # implemented. Whether one or both of these is needed is unclear. if pdim != gdim: raise ValueError(f"Mesh geometric dimension {gdim} must match point list dimension {pdim}") - swarm = _pic_swarm_in_plex(mesh.topology.topology_dm, vertexcoords, fields=[("parentcellnum", 1, IntType), ("refcoord", tdim, RealType)]) + swarm = _pic_swarm_in_mesh(mesh, vertexcoords) if missing_points_behaviour: @@ -2217,8 +2213,6 @@ def compare_arrays(x, y, datatype): else: raise ValueError("missing_points_behaviour must be None, 'error' or 'warn'") - dmcommon.label_pic_parent_cell_info(swarm, mesh) - # Topology topology = VertexOnlyMeshTopology(swarm, mesh.topology, name="swarmmesh", reorder=False) @@ -2249,48 +2243,45 @@ def compare_arrays(x, y, datatype): # Save vertex reference coordinate (within reference cell) in function reference_coordinates_fs = functionspace.VectorFunctionSpace(vmesh, "DG", 0, dim=tdim) - vmesh.reference_coordinates = dmcommon.fill_reference_coordinates_function(function.Function(reference_coordinates_fs)) + vmesh.reference_coordinates = \ + dmcommon.fill_reference_coordinates_function(function.Function(reference_coordinates_fs)) return vmesh -def _pic_swarm_in_plex(plex, coords, fields=[]): - """ - Create a Particle In Cell (PIC) DMSwarm, immersed in a DMPlex - at given point coordinates. +def _pic_swarm_in_mesh(parent_mesh, coords, fields=None): + """Create a Particle In Cell (PIC) DMSwarm immersed in a Mesh - This should only by used for dmplexes associated with meshes with - straight edges. If not, the particles may be placed in the wrong - cells. + This should only by used for meshes with straight edges. If not, the + particles may be placed in the wrong cells. - :arg plex: the DMPlex within with the DMSwarm should be + :arg parent_mesh: the :class:`Mesh` within with the DMSwarm should be immersed. :arg coords: an ``ndarray`` of (npoints, coordsdim) shape. - :kwarg fields: An optional list of named data which can be stored - for each point in the DMSwarm. The format should be:: + :kwarg fields: An optional list of named data which can be stored for each + point in the DMSwarm. The format should be:: [(fieldname1, blocksize1, dtype1), ..., (fieldnameN, blocksizeN, dtypeN)] - For example, the swarm coordinates themselves are stored in a - field named ``DMSwarmPIC_coor`` which, were it not created - automatically, would be initialised with - ``fields = [("DMSwarmPIC_coor", coordsdim, RealType)]``. - All fields must have the same number of points. For more + For example, the swarm coordinates themselves are stored in a field + named ``DMSwarmPIC_coor`` which, were it not created automatically, + would be initialised with ``fields = [("DMSwarmPIC_coor", coordsdim, + RealType)]``. All fields must have the same number of points. For more information see `the DMSWARM API reference _. :return: the immersed DMSwarm .. note:: - The created DMSwarm uses the communicator of the input DMPlex. + The created DMSwarm uses the communicator of the input Mesh. .. note:: - In complex mode the "DMSwarmPIC_coor" field is still saved as a - real number unlike the coordinates of a DMPlex which become - complex (though usually with zeroed imaginary parts). + In complex mode the "DMSwarmPIC_coor" field is still saved as a real + number unlike the coordinates of a DMPlex which become complex (though + usually with zeroed imaginary parts). .. note:: When running in parallel, ``coords`` are strictly confined to @@ -2304,8 +2295,8 @@ def _pic_swarm_in_plex(plex, coords, fields=[]): #. making sure that all ranks are supplied with the same list of ``coords`` or by - #. ensuring that ``coords`` are already localised for to the - DMPlex cells of the given rank. + #. ensuring that ``coords`` are already localised for to the DMPlex + cells of the given rank. For more see `this github issue `_. @@ -2313,6 +2304,20 @@ def _pic_swarm_in_plex(plex, coords, fields=[]): # Check coords coords = np.asarray(coords, dtype=RealType) + + plex = parent_mesh.topology.topology_dm + tdim = parent_mesh.topological_dimension() + gdim = parent_mesh.geometric_dimension() + + if fields is None: + fields = [] + fields += [("parentcellnum", 1, IntType), ("refcoord", tdim, RealType)] + + coords, reference_coords, parent_cell_nums = \ + _parent_mesh_embedding(coords, parent_mesh) + # mesh.topology.cell_closure[:, -1] maps Firedrake cell numbers to plex numbers. + plex_parent_cell_nums = parent_mesh.topology.cell_closure[parent_cell_nums, -1] + _, coordsdim = coords.shape # Create a DMSWARM @@ -2338,23 +2343,33 @@ def _pic_swarm_in_plex(plex, coords, fields=[]): for name, size, dtype in fields: swarm.registerField(name, size, dtype=dtype) swarm.finalizeFieldRegister() - # Note that no new fields can now be associated with the DMSWARM. - # Add point coordinates - note we set redundant mode to False - # because we allow different MPI ranks to be given the overlapping - # lists of coordinates. The cell DM (`plex`) will then attempt to - # locate the coordinates within its rank-local sub domain and - # disregard those which are outside it. See https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/DMSWARM/DMSwarmSetPointCoordinates.html - # for more information. The result is that all DMPlex cells, - # including ghost cells on distributed meshes, have the relevent PIC - # coordinates associated with them. The DMPlex cell id associated - # with each PIC in the DMSwarm is accessed with the `DMSwarm_cellid` - # field. - swarm.setPointCoordinates(coords, redundant=False, mode=PETSc.InsertMode.INSERT_VALUES) - - # Remove PICs which have been placed into ghost cells of a distributed DMPlex - dmcommon.remove_ghosts_pic(swarm, plex) + num_vertices = len(coords) + swarm.setLocalSizes(num_vertices, -1) + + # Add point coordinates. This amounts to our own implementation of + # DMSwarmSetPointCoordinates because Firedrake's mesh coordinate model + # doesn't always exactly coincide with that of DMPlex. + + # NOTE ensure that swarm.restoreField is called for each field too! + swarm_coords = swarm.getField("DMSwarmPIC_coor").reshape((num_vertices, gdim)) + # Once we support extruded meshes, the plex cell ID won't coincide with the + # Firedrake cell ID so the following fields will differ. + swarm_parent_cell_nums = swarm.getField("DMSwarm_cellid") + field_parent_cell_nums = swarm.getField("parentcellnum") + field_reference_coords = swarm.getField("refcoord").reshape((num_vertices, tdim)) + + swarm_coords[...] = coords + swarm_parent_cell_nums[...] = plex_parent_cell_nums + field_parent_cell_nums[...] = parent_cell_nums + field_reference_coords[...] = reference_coords + + # have to restore fields once accessed to allow access again + swarm.restoreField("refcoord") + swarm.restoreField("parentcellnum") + swarm.restoreField("DMSwarmPIC_coor") + swarm.restoreField("DMSwarm_cellid") # Set the `SF` graph to advertises no shared points (since the halo # is now empty) by setting the leaves to an empty list @@ -2366,6 +2381,62 @@ def _pic_swarm_in_plex(plex, coords, fields=[]): return swarm +def _parent_mesh_embedding(vertex_coords, parent_mesh): + """Find the parent cells and local coords for vertices on this rank. + + Vertices not located in cells owned by this rank are discarded. + """ + if len(vertex_coords) > 0: + vertex_coords = _on_rank_vertices(vertex_coords, parent_mesh) + + num_vertices = len(vertex_coords) + max_num_vertices = parent_mesh.comm.allreduce(num_vertices, op=MPI.MAX) + + gdim = parent_mesh.geometric_dimension() + tdim = parent_mesh.topological_dimension() + + parent_cell_nums = np.empty(num_vertices, dtype=IntType) + reference_coords = np.empty((num_vertices, tdim), dtype=RealType) + valid = np.full(num_vertices, False) + + # Create an out of mesh point to use in locate_cell when needed + out_of_mesh_point = np.full((1, gdim), np.inf) + + for i in range(max_num_vertices): + if i < num_vertices: + parent_cell_num, reference_coord = \ + parent_mesh.locate_cell_and_reference_coordinate(vertex_coords[i]) + # parent_cell_num >= parent_mesh.cell_set.size means the vertex is in the halo + # and is to be discarded. + if parent_cell_num is not None and parent_cell_num < parent_mesh.cell_set.size: + valid[i] = True + parent_cell_nums[i] = parent_cell_num + reference_coords[i] = reference_coord + else: + parent_mesh.locate_cell(out_of_mesh_point) # should return None + + vertex_coords = np.compress(valid, vertex_coords, axis=0) + reference_coords = np.compress(valid, reference_coords, axis=0) + parent_cell_nums = np.compress(valid, parent_cell_nums, axis=0) + + return vertex_coords, reference_coords, parent_cell_nums + + +def _on_rank_vertices(vertex_coords, parent_mesh): + """Discard those vertices that are definitely not on this MPI rank.""" + bounding_box_min = parent_mesh.coordinates.dat.data_ro_with_halos.min(axis=0, initial=np.inf) + bounding_box_max = parent_mesh.coordinates.dat.data_ro_with_halos.max(axis=0, initial=-np.inf) + length_scale = (bounding_box_max - bounding_box_min).max() + # This is basically to avoid roundoff, so 1% is very conservative. + bounding_box_min -= 0.01 * length_scale + bounding_box_max += 0.01 * length_scale + + on_rank = (vertex_coords < bounding_box_max).all(axis=1) \ + | (vertex_coords > bounding_box_min).all(axis=1) + + return np.compress(on_rank, vertex_coords, axis=0) + + @PETSc.Log.EventDecorator() def SubDomainData(geometric_expr): """Creates a subdomain data object from a boolean-valued UFL expression. diff --git a/tests/vertexonly/test_interpolation_from_parent.py b/tests/vertexonly/test_interpolation_from_parent.py index 1b988f9c4b..0cb13682c4 100644 --- a/tests/vertexonly/test_interpolation_from_parent.py +++ b/tests/vertexonly/test_interpolation_from_parent.py @@ -18,9 +18,7 @@ # CalledProcessError is so the parallel tests correctly xfail marks=pytest.mark.xfail(raises=(subprocess.CalledProcessError, NotImplementedError), reason="immersed parent meshes not supported")), - pytest.param("periodicrectangle", - marks=pytest.mark.xfail(raises=(subprocess.CalledProcessError, NotImplementedError), - reason="meshes made from coordinate fields are not supported")), + pytest.param("periodicrectangle"), pytest.param("shiftedmesh", marks=pytest.mark.skip(reason="meshes with modified coordinate fields are not supported"))], ids=lambda x: f"{x}-mesh") @@ -133,6 +131,13 @@ def test_scalar_function_interpolation(parentmesh, vertexcoords, fs): vm = VertexOnlyMesh(parentmesh, vertexcoords) vertexcoords = vm.coordinates.dat.data_ro.reshape(-1, parentmesh.geometric_dimension()) fs_fam, fs_deg, fs_typ = fs + if ( + parentmesh.coordinates.function_space().ufl_element().family() + == "Discontinuous Lagrange" + and fs_fam == "CG" + ): + pytest.skip(f"Interpolating into f{fs_fam} on a periodic mesh is not well-defined.") + V = fs_typ(parentmesh, fs_fam, fs_deg) W = FunctionSpace(vm, "DG", 0) expr = reduce(add, SpatialCoordinate(parentmesh)) @@ -163,6 +168,11 @@ def test_vector_function_interpolation(parentmesh, vertexcoords, vfs): vfs_fam, vfs_deg, vfs_typ = vfs vm = VertexOnlyMesh(parentmesh, vertexcoords) vertexcoords = vm.coordinates.dat.data_ro + if ( + parentmesh.coordinates.function_space().ufl_element().family() + == "Discontinuous Lagrange" + ): + pytest.skip(f"Interpolating into f{vfs_fam} on a periodic mesh is not well-defined.") V = vfs_typ(parentmesh, vfs_fam, vfs_deg) W = VectorFunctionSpace(vm, "DG", 0) expr = 2 * SpatialCoordinate(parentmesh) @@ -200,6 +210,11 @@ def test_tensor_function_interpolation(parentmesh, vertexcoords, tfs): tfs_fam, tfs_deg, tfs_typ = tfs vm = VertexOnlyMesh(parentmesh, vertexcoords) vertexcoords = vm.coordinates.dat.data_ro + if ( + parentmesh.coordinates.function_space().ufl_element().family() + == "Discontinuous Lagrange" + ): + pytest.skip(f"Interpolating into f{tfs_fam} on a periodic mesh is not well-defined.") V = tfs_typ(parentmesh, tfs_fam, tfs_deg) W = TensorFunctionSpace(vm, "DG", 0) x = SpatialCoordinate(parentmesh) diff --git a/tests/vertexonly/test_swarm.py b/tests/vertexonly/test_swarm.py index 74b470575d..152eef3b55 100644 --- a/tests/vertexonly/test_swarm.py +++ b/tests/vertexonly/test_swarm.py @@ -42,7 +42,7 @@ def cell_midpoints(m): "cube", "tetrahedron", pytest.param("immersedsphere", marks=pytest.mark.skip(reason="immersed parent meshes not supported and will segfault PETSc when creating the DMSwarm")), - pytest.param("periodicrectangle", marks=pytest.mark.skip(reason="meshes made from coordinate fields are not supported")), + pytest.param("periodicrectangle"), pytest.param("shiftedmesh", marks=pytest.mark.skip(reason="meshes with modified coordinate fields are not supported"))]) def parentmesh(request): if request.param == "interval": @@ -67,7 +67,7 @@ def parentmesh(request): # pic swarm tests -def test_pic_swarm_in_plex(parentmesh): +def test_pic_swarm_in_mesh(parentmesh): """Generate points in cell midpoints of mesh `parentmesh` and check correct swarm is created in plex.""" @@ -78,7 +78,7 @@ def test_pic_swarm_in_plex(parentmesh): plex = parentmesh.topology.topology_dm from firedrake.petsc import PETSc fields = [("fieldA", 1, PETSc.IntType), ("fieldB", 2, PETSc.ScalarType)] - swarm = mesh._pic_swarm_in_plex(plex, inputpointcoords, fields=fields) + swarm = mesh._pic_swarm_in_mesh(parentmesh, inputpointcoords, fields=fields) # Get point coords on current MPI rank localpointcoords = np.copy(swarm.getField("DMSwarmPIC_coor")) swarm.restoreField("DMSwarmPIC_coor") @@ -103,6 +103,7 @@ def test_pic_swarm_in_plex(parentmesh): swarm.restoreField(name) # Check comm sizes match assert plex.comm.size == swarm.comm.size + # Check coordinate list and parent cell indices match assert len(localpointcoords) == len(localparentcellindices) # check local points are found in list of input points @@ -110,7 +111,8 @@ def test_pic_swarm_in_plex(parentmesh): assert np.any(np.isclose(p, inputpointcoords)) # check local points are correct local points given mesh # partitioning (but don't require ordering to be maintained) - assert np.allclose(np.sort(inputlocalpointcoords), np.sort(localpointcoords)) + assert np.allclose(np.sort(inputlocalpointcoords, axis=0), + np.sort(localpointcoords, axis=0)) # Check methods for checking number of points on current MPI rank assert len(localpointcoords) == swarm.getLocalSize() # Check there are as many local points as there are local cells @@ -125,17 +127,26 @@ def test_pic_swarm_in_plex(parentmesh): for index in localparentcellindices: assert np.any(index == cell_indexes) + # Now have DMPLex compute the cell IDs in cases where it can: + if parentmesh.coordinates.ufl_element().family() != "Discontinuous Lagrange": + swarm.setPointCoordinates(localpointcoords, redundant=False, + mode=PETSc.InsertMode.INSERT_VALUES) + petsclocalparentcellindices = np.copy(swarm.getField("DMSwarm_cellid")) + swarm.restoreField("DMSwarm_cellid") + # Check that we agree with PETSc + assert np.all(petsclocalparentcellindices == localparentcellindices) + @pytest.mark.parallel -def test_pic_swarm_in_plex_parallel(parentmesh): - test_pic_swarm_in_plex(parentmesh) +def test_pic_swarm_in_mesh_parallel(parentmesh): + test_pic_swarm_in_mesh(parentmesh) @pytest.mark.parallel(nprocs=2) # nprocs == total number of mesh cells -def test_pic_swarm_in_plex_2d_2procs(): - test_pic_swarm_in_plex(UnitSquareMesh(1, 1)) +def test_pic_swarm_in_mesh_2d_2procs(): + test_pic_swarm_in_mesh(UnitSquareMesh(1, 1)) @pytest.mark.parallel(nprocs=3) # nprocs > total number of mesh cells -def test_pic_swarm_in_plex_2d_3procs(): - test_pic_swarm_in_plex(UnitSquareMesh(1, 1)) +def test_pic_swarm_in_mesh_2d_3procs(): + test_pic_swarm_in_mesh(UnitSquareMesh(1, 1)) diff --git a/tests/vertexonly/test_vertex_only_fs.py b/tests/vertexonly/test_vertex_only_fs.py index 76f2462ab4..4701594213 100644 --- a/tests/vertexonly/test_vertex_only_fs.py +++ b/tests/vertexonly/test_vertex_only_fs.py @@ -12,7 +12,7 @@ "cube", "tetrahedron", pytest.param("immersedsphere", marks=pytest.mark.xfail(reason="immersed parent meshes not supported")), - pytest.param("periodicrectangle", marks=pytest.mark.xfail(reason="meshes made from coordinate fields are not supported")), + pytest.param("periodicrectangle"), pytest.param("shiftedmesh", marks=pytest.mark.skip(reason="meshes with modified coordinate fields are not supported"))]) def parentmesh(request): if request.param == "interval": diff --git a/tests/vertexonly/test_vertex_only_mesh_generation.py b/tests/vertexonly/test_vertex_only_mesh_generation.py index b68d97bd02..b10bfe7e27 100644 --- a/tests/vertexonly/test_vertex_only_mesh_generation.py +++ b/tests/vertexonly/test_vertex_only_mesh_generation.py @@ -42,7 +42,7 @@ def cell_midpoints(m): "cube", "tetrahedron", pytest.param("immersedsphere", marks=pytest.mark.xfail(reason="immersed parent meshes not supported")), - pytest.param("periodicrectangle", marks=pytest.mark.xfail(reason="meshes made from coordinate fields are not supported")), + pytest.param("periodicrectangle"), pytest.param("shiftedmesh", marks=pytest.mark.skip(reason="meshes with modified coordinate fields are not supported"))]) def parentmesh(request): if request.param == "interval":