Skip to content
Snippets Groups Projects
Commit e9d19eb1 authored by Boris Martin's avatar Boris Martin
Browse files

Routine for summed RHS contribs

parent ec4019af
Branches
No related tags found
No related merge requests found
Pipeline #12901 failed
...@@ -61,6 +61,16 @@ namespace gmshfem::system ...@@ -61,6 +61,16 @@ namespace gmshfem::system
return _distributedExtendedVector; return _distributedExtendedVector;
#endif #endif
} }
PetscErrorCode sumDistributedRHS(Vec localRHSdistributed, Vec* out) const {
PetscFunctionBeginUser;
Vec res;
PetscCall(VecDuplicate(_distributedVector, &res));
VecView(localRHSdistributed, PETSC_VIEWER_STDOUT_WORLD);
PetscCall(VecScatterBegin(_scatter, localRHSdistributed, res, ADD_VALUES, SCATTER_REVERSE));
PetscCall(VecScatterEnd(_scatter, localRHSdistributed, res, ADD_VALUES, SCATTER_REVERSE));
*out = res;
PetscFunctionReturn(PETSC_SUCCESS);
};
~DistributedContextImpl() { ~DistributedContextImpl() {
#ifdef HAVE_PETSC #ifdef HAVE_PETSC
VecDestroy(&_distributedExtendedVector); VecDestroy(&_distributedExtendedVector);
...@@ -125,6 +135,16 @@ namespace gmshfem::system ...@@ -125,6 +135,16 @@ namespace gmshfem::system
PetscFunctionReturn(PETSC_SUCCESS); PetscFunctionReturn(PETSC_SUCCESS);
} }
template< class T_Scalar >
PetscErrorCode DistributedContext< T_Scalar >::sumDistributedRHS(Vec localRHSdistributed, Vec *out) const
{
PetscFunctionBeginUser;
if (!_impl)
throw common::Exception("Uninitialized DistributedContextImpl");
PetscCall(_impl->sumDistributedRHS(localRHSdistributed, out));
PetscFunctionReturn(PETSC_SUCCESS);
}
#endif #endif
template <class T_Scalar> template <class T_Scalar>
......
...@@ -45,9 +45,12 @@ class DistributedContext { ...@@ -45,9 +45,12 @@ class DistributedContext {
const Indices& localToGlobal() const; const Indices& localToGlobal() const;
const Indices& localIDofOwned() const; const Indices& localIDofOwned() const;
const Indices& localIDofNonOwned() const; const Indices& localIDofNonOwned() const;
// Vec is a distbuted vector. We scatter to an extended vector (duplicated overlaps) then set the local vals
void readScatteredData(std::vector<T_Scalar>& values, Vec sol) const; void readScatteredData(std::vector<T_Scalar>& values, Vec sol) const;
#ifdef HAVE_PETSC #ifdef HAVE_PETSC
PetscErrorCode getSubdomainsIS(IS* is) const; PetscErrorCode getSubdomainsIS(IS* is) const;
// Map an extended vector into a global vector with sums. Output becomes a standalone vector
PetscErrorCode sumDistributedRHS(Vec localRHSdistributed, Vec* out) const;
#endif #endif
DistributedContext(Indices&& localToGlobal, Indices&& owned, Indices&& nonOwned); DistributedContext(Indices&& localToGlobal, Indices&& owned, Indices&& nonOwned);
~DistributedContext(); ~DistributedContext();
......
...@@ -59,6 +59,7 @@ namespace gmshfem::system ...@@ -59,6 +59,7 @@ namespace gmshfem::system
Vec getPetsc() const; Vec getPetsc() const;
#ifdef HAVE_MPI #ifdef HAVE_MPI
Vec getPetscDistributed(const DistributedContext<T_Scalar>& distributedContext) const; Vec getPetscDistributed(const DistributedContext<T_Scalar>& distributedContext) const;
PetscErrorCode getPetscDistributedSummedRHS(const DistributedContext<T_Scalar>& distributedContext, Vec* out) const;
// Return PetscScalar* but not declared yet // Return PetscScalar* but not declared yet
void* generateRawLocalData(const DistributedContext<T_Scalar>& distributedContext) const; void* generateRawLocalData(const DistributedContext<T_Scalar>& distributedContext) const;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment