Skip to content

Commit

Permalink
Add VecDohp, MPI vectors with VecGhost closure (includes test)
Browse files Browse the repository at this point in the history
Changed reference output suffix to .refout

This is progress towards issue gh-4
  • Loading branch information
jedbrown committed Apr 24, 2009
1 parent a1782ce commit fba64d1
Show file tree
Hide file tree
Showing 7 changed files with 371 additions and 1 deletion.
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ macro (dohp_link_executable name source)
target_link_libraries (${name} dohp)
endmacro ()
macro (dohp_add_test name np)
add_test ("${name}" "${Dohp_TestWithReference}" "${PETSC_MPIEXEC}" 2 "${CMAKE_CURRENT_SOURCE_DIR}/${name}.out" ${ARGN})
add_test ("${name}" "${Dohp_TestWithReference}" "${PETSC_MPIEXEC}" 2 "${CMAKE_CURRENT_SOURCE_DIR}/${name}.refout" ${ARGN})
endmacro()
set (Dohp_TestWithReference "${Dohp_SOURCE_DIR}/TestWithReference.sh")

Expand Down
17 changes: 17 additions & 0 deletions include/dohpvec.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#ifndef _DOHPVEC_H
#define _DOHPVEC_H

#include "petscvec.h"
#include "dohptype.h"

PETSC_EXTERN_CXX_BEGIN

#define VECDOHP "dohp"

EXTERN dErr VecDohpGetClosure(Vec,Vec*);
EXTERN dErr VecDohpRestoreClosure(Vec,Vec*);
EXTERN dErr VecCreateDohp(MPI_Comm,dInt,dInt,dInt,dInt,const dInt[],Vec*);

PETSC_EXTERN_CXX_BEGIN

#endif
5 changes: 5 additions & 0 deletions src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@ list (APPEND Dohp_SRCS
fs/mesh/impls/pack/pack.c
fs/mesh/impls/serial/serial.c)

# ADD_SUBDIRECTORY (fs/vec)
list (APPEND Dohp_SRCS
vec/vecd.c)

list (APPEND Dohp_SRCS
util/util.c)

Expand All @@ -41,4 +45,5 @@ install (TARGETS dohp
if (Dohp_BUILD_TESTS)
add_subdirectory (fs/tests)
add_subdirectory (jacobi/tests)
add_subdirectory (vec/tests)
endif (Dohp_BUILD_TESTS)
3 changes: 3 additions & 0 deletions src/vec/tests/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
dohp_link_executable (vec-ex1 vec-ex1.c)

dohp_add_test(vec-ex1 2 vec-ex1)
87 changes: 87 additions & 0 deletions src/vec/tests/vec-ex1.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
static const char help[] = "Test vector closure\n\n";

#include "dohpvec.h"

int main(int argc,char *argv[])
{
MPI_Comm comm;
dMPIInt rank,size;
PetscViewer viewer;
Vec x,xc,xl,y,yc,z;
IS isg;
VecScatter scatter;
dScalar *a;
dInt nghost = 2,ghosts[2],n=3,bs=2,xn,xbs;
dErr err;

err = PetscInitialize(&argc,&argv,0,help);dCHK(err);
comm = PETSC_COMM_WORLD;
err = MPI_Comm_size(comm,&size);dCHK(err);
err = MPI_Comm_rank(comm,&rank);dCHK(err);
if (size != 2) dERROR(1,"This example must be run with 2 processes");
err = PetscViewerASCIIGetStdout(comm,&viewer);dCHK(err);

ghosts[0] = n*((size+rank-1)%size)+1; /* second block of left neighbor, periodically */
ghosts[1] = n*((size+rank+1)%size)+1; /* second block of right neighbor, periodically */
err = PetscSynchronizedPrintf(comm,"ghosts %d %d\n",ghosts[0],ghosts[1]);dCHK(err);
err = PetscSynchronizedFlush(comm);dCHK(err);

err = VecCreateDohp(comm,bs,n-1,n,nghost,ghosts,&x);dCHK(err);
err = VecGetLocalSize(x,&xn);dCHK(err);
if (xn != (n-1)*bs) dERROR(1,"local size %d, expected %d",xn,(n-1)*bs);
err = VecGetBlockSize(x,&xbs);dCHK(err);
if (xbs != bs) dERROR(1,"block size %d, expected %d",xbs,bs);

err = PetscPrintf(comm,"Empty vector\n");
err = VecView(x,viewer);dCHK(err);

err = VecGetArray(x,&a);dCHK(err);
for (dInt i=0; i<xn; i++) a[i] = 1000+rank*100+i;
err = VecRestoreArray(x,&a);dCHK(err);
err = PetscPrintf(comm,"Global unclosed form\n");dCHK(err);
err = VecView(x,viewer);dCHK(err);
err = VecDohpGetClosure(x,&xc);dCHK(err);
err = PetscPrintf(comm,"Global closed form\n");dCHK(err);
err = VecView(xc,viewer);dCHK(err);
err = VecGhostGetLocalForm(xc,&xl);dCHK(err);

/* Create a global vector that will hold the local values on each process, and a scatter from that to the local form.
* This makes viewing deterministic, where as using PETSC_VIEWER_STDOUT_SELF requires manual synchronization. */
err = VecCreateMPI(comm,(n+nghost)*bs,PETSC_DECIDE,&z);dCHK(err);
err = ISCreateStride(comm,(n+nghost)*bs,(n+nghost)*bs*rank,1,&isg);dCHK(err);
err = VecScatterCreate(z,isg,xl,NULL,&scatter);dCHK(err);
err = ISDestroy(isg);dCHK(err);
err = VecScatterBegin(scatter,xl,z,INSERT_VALUES,SCATTER_REVERSE);dCHK(err);
err = VecScatterEnd(scatter,xl,z,INSERT_VALUES,SCATTER_REVERSE);dCHK(err);
err = PetscPrintf(comm,"Before VecGhostUpdateBegin/End");dCHK(err);
err = VecView(z,viewer);dCHK(err);
err = VecGhostUpdateBegin(xc,INSERT_VALUES,SCATTER_FORWARD);dCHK(err);
err = VecGhostUpdateEnd(xc,INSERT_VALUES,SCATTER_FORWARD);dCHK(err);
err = VecScatterBegin(scatter,xl,z,INSERT_VALUES,SCATTER_REVERSE);dCHK(err);
err = VecScatterEnd(scatter,xl,z,INSERT_VALUES,SCATTER_REVERSE);dCHK(err);
err = PetscPrintf(comm,"After VecGhostUpdateBegin/End");dCHK(err);
err = VecView(z,viewer);dCHK(err);
err = VecScatterDestroy(scatter);dCHK(err);
err = VecDestroy(z);dCHK(err);
err = VecGhostRestoreLocalForm(xc,&xl);dCHK(err);

err = VecDuplicate(x,&y);dCHK(err);
err = VecCopy(x,y);dCHK(err);
err = PetscPrintf(comm,"Global unclosed form of y\n");dCHK(err);
err = VecView(y,viewer);dCHK(err);
err = VecDohpGetClosure(y,&yc);dCHK(err);
err = PetscPrintf(comm,"Global closed form of y\n");dCHK(err);
err = VecView(yc,viewer);dCHK(err);
err = VecZeroEntries(yc);dCHK(err);
err = VecShift(xc,1000);dCHK(err);
err = VecCopy(xc,yc);dCHK(err);
err = PetscPrintf(comm,"Global closed form of y, after copy of shifted closure\n");dCHK(err);
err = VecView(yc,viewer);dCHK(err);

err = VecDohpRestoreClosure(x,&xc);dCHK(err);
err = VecDohpRestoreClosure(y,&yc);dCHK(err);
err = VecDestroy(x);dCHK(err);
err = VecDestroy(y);dCHK(err);
err = PetscFinalize();dCHK(err);
return 0;
}
124 changes: 124 additions & 0 deletions src/vec/tests/vec-ex1.refout
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
ghosts 4 4
ghosts 1 1
Empty vector
Process [0]
0
0
0
0
Process [1]
0
0
0
0
Global unclosed form
Process [0]
1000
1001
1002
1003
Process [1]
1100
1101
1102
1103
Global closed form
Process [0]
1000
1001
1002
1003
0
0
Process [1]
1100
1101
1102
1103
0
0
Before VecGhostUpdateBegin/EndProcess [0]
1000
1001
1002
1003
0
0
0
0
0
0
Process [1]
1100
1101
1102
1103
0
0
0
0
0
0
After VecGhostUpdateBegin/EndProcess [0]
1000
1001
1002
1003
0
0
1102
1103
1102
1103
Process [1]
1100
1101
1102
1103
0
0
1002
1003
1002
1003
Global unclosed form of y
Process [0]
1000
1001
1002
1003
Process [1]
1100
1101
1102
1103
Global closed form of y
Process [0]
1000
1001
1002
1003
0
0
Process [1]
1100
1101
1102
1103
0
0
Global closed form of y, after copy of shifted closure
Process [0]
2000
2001
2002
2003
1000
1000
Process [1]
2100
2101
2102
2103
1000
1000
134 changes: 134 additions & 0 deletions src/vec/vecd.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
#include "dohpvec.h"
#include "../src/vec/vec/impls/mpi/pvecimpl.h" /* To have access to Vec_MPI (.localrep) and VecCreate_MPI_Private */

/** Get the closed form of a Dohp vector
*
* @note Dohp vectors are basically just MPI vectors, the only difference is that instead of a local form, we have a
* closed form. We subvert .localrep to mean the closed form.
*
**/
dErr VecDohpGetClosure(Vec v,Vec *c)
{
Vec_MPI *vmpi;
dTruth isdohp;
dErr err;

dFunctionBegin;
err = PetscTypeCompare((dObject)v,VECDOHP,&isdohp);dCHK(err);
if (!isdohp) dERROR(1,"Vector type %s does not have closure",((dObject)v)->type_name);
vmpi = v->data;
if (!vmpi->localrep) dERROR(1,"Vector has no closure");
*c = vmpi->localrep;
err = PetscObjectReference((dObject)*c);dCHK(err);
dFunctionReturn(0);
}

dErr VecDohpRestoreClosure(Vec v,Vec *c)
{
dErr err;

dFunctionBegin;
if (*c != ((Vec_MPI*)v->data)->localrep) dERROR(1,"attempting to restore incorrect closure");
err = PetscObjectDereference((dObject)*c);dCHK(err);
dFunctionReturn(0);
}

static dErr VecDuplicate_Dohp(Vec x,Vec *iny)
{
Vec y,xc,yc;
Vec_MPI *ympi;
dScalar *a;
dErr err;

dFunctionBegin;
dValidHeader(x,VEC_COOKIE,1);
dValidPointer(y,2);
*iny = 0;
err = VecDohpGetClosure(x,&xc);dCHK(err);
err = VecDuplicate(xc,&yc);dCHK(err);
err = VecDohpRestoreClosure(x,&xc);dCHK(err);

/* The rest is mostly the same as VecDuplicate_MPI, but we can't call that because it allocates memory.
* Unfortunately, this is fragile if the VecMPI implementation changes. I think this part of PETSc is quite stable and
* I will be sufficiently involved to notice changes here. Famous last words. */
err = VecCreate(((dObject)x)->comm,&y);dCHK(err);

err = PetscMapDestroy(y->map);dCHK(err);
y->map = x->map;
y->map->refcnt++;

err = VecGetArray(yc,&a);dCHK(err);
err = VecCreate_MPI_Private(y,PETSC_FALSE,0,a);dCHK(err);
err = VecRestoreArray(yc,&a);dCHK(err);
ympi = y->data;
err = dMemcpy(y->ops,x->ops,sizeof(struct _VecOps));dCHK(err);

ympi->localrep = yc; /* subverting .localrep to mean closed form */

y->stash.donotstash = x->stash.donotstash;
y->stash.ignorenegidx = x->stash.ignorenegidx;

err = PetscOListDuplicate(((dObject)x)->olist,&((dObject)y)->olist);dCHK(err);
err = PetscFListDuplicate(((dObject)x)->qlist,&((dObject)y)->qlist);dCHK(err);
if (x->mapping) {
err = PetscObjectReference((dObject)x->mapping);dCHK(err);
y->mapping = x->mapping;
}
if (x->bmapping) {
err = PetscObjectReference((dObject)x->bmapping);dCHK(err);
y->bmapping = x->bmapping;
}
y->map->bs = x->map->bs;
y->bstash.bs = x->bstash.bs;

err = PetscObjectChangeTypeName((dObject)y,VECDOHP);dCHK(err);
*iny = y;
dFunctionReturn(0);
}

#if 0
static dErr VecDestroy_Dohp(Vec x)
{
dErr err;

dFunctionBegin;
err = PetscObjectChangeTypeName((dObject)x,VECMPI);
#endif

dErr VecCreateDohp(MPI_Comm comm,dInt bs,dInt n,dInt nc,dInt nghosts,const dInt ghosts[],Vec *v)
{
Vec_MPI *vmpi;
dInt *sghosts;
Vec vc,vg;
dScalar *a;
dErr err;

dFunctionBegin;
dValidPointer(v,7);
*v = 0;
if (bs > 1) {
err = dMallocA(nghosts,&sghosts);dCHK(err);
for (dInt i=0; i<nghosts; i++) sghosts[i] = ghosts[i]*bs; /* Index ghosts by scalar offset instead of blocks */
} else {
sghosts = 0;
}
err = VecCreateGhostBlock(comm,bs,nc*bs,PETSC_DECIDE,nghosts,sghosts?sghosts:ghosts,&vc);dCHK(err);
err = dFree(sghosts);dCHK(err);
err = VecGetArray(vc,&a);dCHK(err);
err = VecCreateMPIWithArray(comm,n*bs,PETSC_DECIDE,a,&vg);dCHK(err);
err = VecRestoreArray(vc,&a);dCHK(err);
err = VecSetBlockSize(vg,bs);dCHK(err);
vmpi = vg->data;
if (vmpi->localrep) dERROR(1,"Vector has localrep, expected no localrep");
vmpi->localrep = vc; /* subvert this field to mean closed rep */
/* Since we subvect .localrep, VecDestroy_MPI will automatically destroy the closed form */
vg->ops->duplicate = VecDuplicate_Dohp;
//vg->ops->destroy = VecDestroy_Dohp;
/* It might be useful to set the (block) LocalToGlobal mapping here, but in the use case I have in mind, the user is
* always working with the closed form anyway (in function evaluation). The \e matrix does need a customized
* LocalToGlobal mapping.
*/
err = PetscObjectChangeTypeName((dObject)vg,VECDOHP);dCHK(err);
*v = vg;
dFunctionReturn(0);
}

0 comments on commit fba64d1

Please sign in to comment.