Skip to content

Commit

Permalink
Merge pull request #1797 from wenqing/petsc_fixing
Browse files Browse the repository at this point in the history
Fixed a bug in PETScVector
  • Loading branch information
wenqing committed Jun 9, 2017
2 parents 3b6ed38 + b17a217 commit e203766
Show file tree
Hide file tree
Showing 19 changed files with 287 additions and 96 deletions.
9 changes: 9 additions & 0 deletions MathLib/LinAlg/LinAlg.cpp
Expand Up @@ -22,6 +22,11 @@ namespace MathLib { namespace LinAlg

// Vector

void setLocalAccessibleVector(PETScVector const& x)
{
x.setLocalAccessibleVector();
}

void set(PETScVector& x, double const a)
{
VecSet(x.getRawVector(), a);
Expand Down Expand Up @@ -177,6 +182,10 @@ namespace MathLib { namespace LinAlg

// Vector

void setLocalAccessibleVector(EigenVector const& /*x*/)
{
}

void set(EigenVector& x, double const a)
{
x.getRawVector().setConstant(a);
Expand Down
14 changes: 14 additions & 0 deletions MathLib/LinAlg/LinAlg.h
Expand Up @@ -148,6 +148,10 @@ namespace LinAlg

// Vector

/// Set local accessible vector in order to get entries.
/// Call this before call operator[] or get(...) of x.
void setLocalAccessibleVector(PETScVector const& x);

void set(PETScVector& x, double const a);

void copy(PETScVector const& x, PETScVector& y);
Expand Down Expand Up @@ -206,6 +210,16 @@ namespace LinAlg

// Vector

/**
Set local accessible vector in order to get entries. Call this
before call operator[] or get(...) of x.
The function only has computation if DDC is enabled,
e.g. parallel computing. Up to now, Eigen vector is not used for
global vectors in parallel computing. Therefore this function is
empty in the current status.
*/
void setLocalAccessibleVector(EigenVector const& x);

void set(EigenVector& x, double const a);

void copy(EigenVector const& x, EigenVector& y);
Expand Down
2 changes: 1 addition & 1 deletion MathLib/LinAlg/PETSc/PETScLinearSolver.cpp
Expand Up @@ -83,7 +83,7 @@ bool PETScLinearSolver::solve(PETScMatrix& A, PETScVector &b, PETScVector &x)
KSPSetOperators(_solver, A.getRawMatrix(), A.getRawMatrix(), DIFFERENT_NONZERO_PATTERN);
#endif

KSPSolve(_solver, b.getData(), x.getData());
KSPSolve(_solver, b.getRawVector(), x.getRawVector());

KSPConvergedReason reason;
KSPGetConvergedReason(_solver, &reason);
Expand Down
122 changes: 104 additions & 18 deletions MathLib/LinAlg/PETSc/PETScVector.cpp
Expand Up @@ -28,10 +28,13 @@ namespace MathLib
{
PETScVector::PETScVector(const PetscInt vec_size, const bool is_global_size)
{
if( is_global_size ) {
if( is_global_size )
{
VecCreate(PETSC_COMM_WORLD, &_v);
VecSetSizes(_v, PETSC_DECIDE, vec_size);
} else {
}
else
{
// Fix size partitioning
// the size can be associated to specific memory allocation of a matrix
VecCreateMPI(PETSC_COMM_WORLD, vec_size, PETSC_DECIDE, &_v);
Expand All @@ -43,8 +46,8 @@ PETScVector::PETScVector(const PetscInt vec_size, const bool is_global_size)
PETScVector::PETScVector(const PetscInt vec_size,
const std::vector<PetscInt>& ghost_ids,
const bool is_global_size)
: _size_ghosts{static_cast<PetscInt>(ghost_ids.size())}
, _has_ghost_id{true}
: _size_ghosts {static_cast<PetscInt>(ghost_ids.size())},
_has_ghost_id {true}
{
PetscInt nghosts = static_cast<PetscInt>( ghost_ids.size() );
if ( is_global_size )
Expand All @@ -61,6 +64,10 @@ PETScVector::PETScVector(const PetscInt vec_size,
}

config();

for (PetscInt i=0; i<nghosts; i++)
_global_ids2local_ids_ghost.emplace(ghost_ids[i],
_size_loc + i);
}

PETScVector::PETScVector(const PETScVector &existing_vec, const bool deep_copy)
Expand All @@ -75,14 +82,15 @@ PETScVector::PETScVector(const PETScVector &existing_vec, const bool deep_copy)
}

PETScVector::PETScVector(PETScVector &&other)
: _v{std::move(other._v)}
, _v_loc{std::move(other._v_loc)}
, _start_rank{other._start_rank}
, _end_rank{other._end_rank}
, _size{other._size}
, _size_loc{other._size_loc}
, _size_ghosts{other._size_ghosts}
, _has_ghost_id{other._has_ghost_id}
: _v {std::move(other._v)},
_v_loc {std::move(other._v_loc)},
_start_rank {other._start_rank},
_end_rank {other._end_rank},
_size {other._size},
_size_loc {other._size_loc},
_size_ghosts {other._size_ghosts},
_has_ghost_id {other._has_ghost_id},
_global_ids2local_ids_ghost {other._global_ids2local_ids_ghost}
{}

void PETScVector::config()
Expand All @@ -104,7 +112,7 @@ void PETScVector::finalizeAssembly()
}

void PETScVector::gatherLocalVectors( PetscScalar local_array[],
PetscScalar global_array[])
PetscScalar global_array[]) const
{
// Collect vectors from processors.
int size_rank;
Expand All @@ -130,18 +138,20 @@ void PETScVector::gatherLocalVectors( PetscScalar local_array[],

}

void PETScVector::getGlobalVector(PetscScalar u[])
void PETScVector::getGlobalVector(std::vector<PetscScalar>& u) const
{

#ifdef TEST_MEM_PETSC
PetscLogDouble mem1, mem2;
PetscMemoryGetCurrentUsage(&mem1);
#endif

assert(u.size() == _size);

PetscScalar *xp = nullptr;
VecGetArray(_v, &xp);

gatherLocalVectors(xp, u);
gatherLocalVectors(xp, u.data());

//This following line may be needed late on
// for a communication load balance:
Expand All @@ -156,15 +166,77 @@ void PETScVector::getGlobalVector(PetscScalar u[])
#endif
}

void PETScVector::copyValues(std::vector<double>& u) const
void PETScVector::setLocalAccessibleVector() const
{
if (_entry_array.empty())
{
const PetscInt array_size
= _global_ids2local_ids_ghost.size() > 0 ?
_size_loc + _size_ghosts: _size;
_entry_array.resize(array_size);
}

if ( !_global_ids2local_ids_ghost.empty() )
{
PetscScalar* loc_x = getLocalVector();
std::copy_n(loc_x, _size_loc + _size_ghosts,
_entry_array.begin());
restoreArray(loc_x);
}
else
getGlobalVector(_entry_array);
}

void PETScVector::copyValues(std::vector<PetscScalar>& u) const
{
assert(u.size() == (std::size_t) (getLocalSize() + getGhostSize()));

double* loc_x = getLocalVector();
PetscScalar* loc_x = getLocalVector();
std::copy_n(loc_x, getLocalSize() + getGhostSize(), u.begin());
restoreArray(loc_x);
}

PetscScalar PETScVector::get(const PetscInt idx) const
{
if (_global_ids2local_ids_ghost.size() > 0)
{
return _entry_array[getLocalIndex(idx)];
}

// Ghost entries, and its original index is 0.
const PetscInt id_p = (idx == -_size) ? 0 : std::abs(idx);
return _entry_array[id_p];
}


std::vector<PetscScalar> PETScVector::get(std::vector<IndexType> const& indices) const
{
std::vector<PetscScalar> local_x(indices.size());
// If VecGetValues can get values from different processors,
// use VecGetValues(_v, indices.size(), indices.data(),
// local_x.data());

if (_global_ids2local_ids_ghost.size() > 0)
{
for (std::size_t i=0; i<indices.size(); i++)
{
local_x[i] = _entry_array[getLocalIndex(indices[i])];
}
}
else
{
for (std::size_t i=0; i<indices.size(); i++)
{
// Ghost entries, and its original index is 0.
const IndexType id_p = (indices[i] == -_size)
? 0 : std::abs(indices[i]);
local_x[i] = _entry_array[id_p];
}
}

return local_x;
}

PetscScalar* PETScVector::getLocalVector() const
{
PetscScalar *loc_array;
Expand All @@ -176,10 +248,23 @@ PetscScalar* PETScVector::getLocalVector() const
VecGetArray(_v_loc, &loc_array);
}
else
VecGetArray(_v, &loc_array);
VecGetArray(_v, &loc_array);
return loc_array;
}

PetscInt PETScVector::getLocalIndex(const PetscInt global_index) const
{
if (global_index >= 0) // non-ghost entry.
return global_index - _start_rank;

// A special case for a ghost location with global index equal to
// the size of the local vector:
PetscInt real_global_index =
(-global_index == _size) ? 0 : -global_index;

return _global_ids2local_ids_ghost.at(real_global_index);
}

void PETScVector::restoreArray(PetscScalar* array) const
{
if (_has_ghost_id)
Expand Down Expand Up @@ -221,6 +306,7 @@ void PETScVector::shallowCopy(const PETScVector &v)
_size_loc = v._size_loc;
_size_ghosts = v._size_ghosts;
_has_ghost_id = v._has_ghost_id;
_global_ids2local_ids_ghost = v._global_ids2local_ids_ghost;

VecSetOption(_v, VEC_IGNORE_NEGATIVE_INDICES, PETSC_TRUE);
}
Expand Down

0 comments on commit e203766

Please sign in to comment.