Skip to content
Snippets Groups Projects
Commit 4b2dbbf7 authored by Boris Martin's avatar Boris Martin
Browse files

Working sumRHS

parent ada3fd69
Branches
No related tags found
No related merge requests found
......@@ -1745,7 +1745,7 @@ namespace gmshfem::problem
throw common::Exception("Missing distributed context");
if (!_solver)
throw common::Exception("The solver is not set");
_solver->solveDistributed(values, type, *_distributedContext, reusePreconditioner);
_solver->solveDistributed(values, type, *_distributedContext, reusePreconditioner, this->sumRhsInDistributed);
}
catch(const std::exception &exc) {
msg::error << "Unable to solve the system" << msg::endl;
......
......
......@@ -88,6 +88,8 @@ namespace gmshfem::problem
unsigned _currentRHS = 0;
unsigned _numRHS = 1;
std::vector<std::vector<T_Scalar>> _solutions;
bool sumRhsInDistributed = false;
private:
std::unique_ptr<system::DistributedContext<T_Scalar>> _distributedContext;
......@@ -241,6 +243,9 @@ namespace gmshfem::problem
algebra::Vector< T_Scalar > &rawSolution, unsigned rhsIdx = 0); // same as above, but exposes raw solution vector
virtual common::Timer solveForPETSc(Vec x, Vec y, const bool reusePreconditioner = false) const; // solve A x = b without copying data
virtual common::Timer solveForPETSc(Mat x, Mat y, const bool reusePreconditioner = false) const; // solve A X = B without copying data
void setSumDistributedRHS(bool sum) {
sumRhsInDistributed = sum;
}
Mat getAllRHSasPETSc() const;
Mat getAllDistributedRHSasPETSc() const;
Vec getDistributedRHSasPETSc() const;
......
......
......@@ -65,7 +65,6 @@ namespace gmshfem::system
PetscFunctionBeginUser;
Vec res;
PetscCall(VecDuplicate(_distributedVector, &res));
VecView(localRHSdistributed, PETSC_VIEWER_STDOUT_WORLD);
PetscCall(VecScatterBegin(_scatter, localRHSdistributed, res, ADD_VALUES, SCATTER_REVERSE));
PetscCall(VecScatterEnd(_scatter, localRHSdistributed, res, ADD_VALUES, SCATTER_REVERSE));
*out = res;
......@@ -135,6 +134,21 @@ namespace gmshfem::system
PetscFunctionReturn(PETSC_SUCCESS);
}
template< class T_Scalar >
PetscErrorCode DistributedContext< T_Scalar >::createExtendedVector(const std::vector< T_Scalar > &values, Vec *out) const
{
PetscFunctionBeginUser;
if (values.size() != localToGlobal().size())
PetscFunctionReturn(PETSC_ERR_ARG_SIZ);
Vec newVec;
if constexpr (std::is_same<T_Scalar, PetscScalar>::value)
PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, localToGlobal().size(), PETSC_DETERMINE, values.data(), &newVec));
else
PetscFunctionReturn(PETSC_ERR_SUP);
*out = newVec;
PetscFunctionReturn(PETSC_SUCCESS);
}
template< class T_Scalar >
PetscErrorCode DistributedContext< T_Scalar >::sumDistributedRHS(Vec localRHSdistributed, Vec *out) const
......
......
......@@ -49,6 +49,8 @@ class DistributedContext {
void readScatteredData(std::vector<T_Scalar>& values, Vec sol) const;
#ifdef HAVE_PETSC
PetscErrorCode getSubdomainsIS(IS* is) const;
// Create a lightweight vector with the local dofs, without copying data
PetscErrorCode createExtendedVector(const std::vector< T_Scalar >& values, Vec* out) const;
// Map an extended vector into a global vector with sums. Output becomes a standalone vector
PetscErrorCode sumDistributedRHS(Vec localRHSdistributed, Vec* out) const;
#endif
......
......
......@@ -520,7 +520,7 @@ template< class T_Scalar >
#endif
template< class T_Scalar >
void Solver< T_Scalar >::solveDistributed(std::vector< T_Scalar > &values, system::DISTRIBUTED_SOLVE_TYPE solveType, const DistributedContext<T_Scalar>& distributedContext, const bool reusePreconditioner)
void Solver< T_Scalar >::solveDistributed(std::vector< T_Scalar > &values, system::DISTRIBUTED_SOLVE_TYPE solveType, const DistributedContext<T_Scalar>& distributedContext, const bool reusePreconditioner, bool sumRHS)
{
// Unpack distributed Context
const std::vector< unsigned long long > &localIdOfOwned = distributedContext.localIDofOwned();
......@@ -591,7 +591,11 @@ template< class T_Scalar >
Vec sol;
Vec b;
if (!sumRHS) {
_b.at(0).getPetscDistributed(distributedContext, &b);
} else {
_b.at(0).getPetscDistributedSummedRHS(distributedContext, &b);
}
VecScale(b, -1.0);
// Create ghosted vector
......
......
......@@ -88,7 +88,7 @@ public:
void solve(std::vector< T_Scalar > &values, const bool reusePreconditioner = false, unsigned rhsIdx = 0);
void solveAll(std::vector< std::vector< T_Scalar > > &values, const bool reusePreconditioner = false);
void solveDistributed(std::vector< T_Scalar > &values, DISTRIBUTED_SOLVE_TYPE, const DistributedContext< T_Scalar > &distributedContext, const bool reusePreconditioner);
void solveDistributed(std::vector< T_Scalar > &values, DISTRIBUTED_SOLVE_TYPE, const DistributedContext< T_Scalar > &distributedContext, const bool reusePreconditioner, bool sumRHS = false);
void solveAllDistributed(std::vector< std::vector< T_Scalar > > &values, DISTRIBUTED_SOLVE_TYPE, const DistributedContext<T_Scalar>& distributedContext, const bool reusePreconditioner = false);
void eigensolve(std::vector< scalar::ComplexPrecision< T_Scalar > > &eigenvalues, std::vector< std::vector< scalar::ComplexPrecision< T_Scalar > > > &eigenvectors, const bool computeEigenvectors, const unsigned int numberOfEigenvalues, const scalar::ComplexPrecision< T_Scalar > target);
PetscErrorCode getPreconditionerORAS(const DistributedContext< T_Scalar > &distributedContext, PC *out) const;
......
......
......@@ -311,6 +311,9 @@ namespace gmshfem::system
PetscErrorCode VectorFactory< T_Scalar >::getPetscDistributedSummedRHS(const DistributedContext< T_Scalar > &distributedContext, Vec *out) const
{
PetscFunctionBeginUser;
if (!_valuesDC.empty() || !_valuesLC.empty()) {
gmshfem::msg::warning << "Sum of RHS with Dirichlet or Linked conditions is probably wrong." << gmshfem::msg::endl;
}
Vec VecExtended;
auto code = distributedContext.createExtendedVector(_vec, &VecExtended);
......
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment