From 4b2dbbf794160b350d62426474c7f30f114c4cb7 Mon Sep 17 00:00:00 2001
From: Boris Martin <boris.martin@uliege.be>
Date: Wed, 21 May 2025 15:30:56 +0200
Subject: [PATCH] Working sumRHS

---
 src/problem/Formulation.cpp       |  2 +-
 src/problem/Formulation.h         |  5 +++++
 src/system/DistributedContext.cpp | 16 +++++++++++++++-
 src/system/DistributedContext.h   |  2 ++
 src/system/Solver.cpp             |  8 ++++++--
 src/system/Solver.h               |  2 +-
 src/system/VectorFactory.cpp      |  3 +++
 7 files changed, 33 insertions(+), 5 deletions(-)

diff --git a/src/problem/Formulation.cpp b/src/problem/Formulation.cpp
index 12b67834..6c599b3a 100644
--- a/src/problem/Formulation.cpp
+++ b/src/problem/Formulation.cpp
@@ -1745,7 +1745,7 @@ namespace gmshfem::problem
         throw common::Exception("Missing distributed context");
       if (!_solver)
         throw common::Exception("The solver is not set");
-      _solver->solveDistributed(values, type, *_distributedContext, reusePreconditioner);
+      _solver->solveDistributed(values, type, *_distributedContext, reusePreconditioner, this->sumRhsInDistributed);
     }
     catch(const std::exception &exc) {
       msg::error << "Unable to solve the system" << msg::endl;
diff --git a/src/problem/Formulation.h b/src/problem/Formulation.h
index 1c36da46..ee377eaa 100644
--- a/src/problem/Formulation.h
+++ b/src/problem/Formulation.h
@@ -88,6 +88,8 @@ namespace gmshfem::problem
     unsigned _currentRHS = 0;
     unsigned _numRHS = 1;
     std::vector<std::vector<T_Scalar>> _solutions;
+
+    bool sumRhsInDistributed = false;
   private:
     std::unique_ptr<system::DistributedContext<T_Scalar>> _distributedContext;
 
@@ -241,6 +243,9 @@ namespace gmshfem::problem
                                 algebra::Vector< T_Scalar > &rawSolution, unsigned rhsIdx = 0); // same as above, but exposes raw solution vector
     virtual common::Timer solveForPETSc(Vec x, Vec y, const bool reusePreconditioner = false) const; // solve A x = b without copying data
     virtual common::Timer solveForPETSc(Mat x, Mat y, const bool reusePreconditioner = false) const; // solve A X = B without copying data
+    void setSumDistributedRHS(bool sum) {
+      sumRhsInDistributed = sum;
+    }
     Mat getAllRHSasPETSc() const;
     Mat getAllDistributedRHSasPETSc() const;
     Vec getDistributedRHSasPETSc() const;
diff --git a/src/system/DistributedContext.cpp b/src/system/DistributedContext.cpp
index 988a8f3a..3eeebc0f 100644
--- a/src/system/DistributedContext.cpp
+++ b/src/system/DistributedContext.cpp
@@ -65,7 +65,6 @@ namespace gmshfem::system
       PetscFunctionBeginUser;
       Vec res;
       PetscCall(VecDuplicate(_distributedVector, &res));
-      VecView(localRHSdistributed, PETSC_VIEWER_STDOUT_WORLD);
       PetscCall(VecScatterBegin(_scatter, localRHSdistributed, res, ADD_VALUES, SCATTER_REVERSE));
       PetscCall(VecScatterEnd(_scatter, localRHSdistributed, res, ADD_VALUES, SCATTER_REVERSE));
       *out = res;
@@ -135,6 +134,21 @@ namespace gmshfem::system
 
     PetscFunctionReturn(PETSC_SUCCESS);
   }
+  template< class T_Scalar >
+  PetscErrorCode DistributedContext< T_Scalar >::createExtendedVector(const std::vector< T_Scalar > &values, Vec *out) const
+  {
+    PetscFunctionBeginUser;
+    if (values.size() != localToGlobal().size())
+      PetscFunctionReturn(PETSC_ERR_ARG_SIZ);    
+    Vec newVec;
+    if constexpr (std::is_same<T_Scalar, PetscScalar>::value)
+      PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, localToGlobal().size(), PETSC_DETERMINE, values.data(), &newVec));
+    else 
+      PetscFunctionReturn(PETSC_ERR_SUP);
+    *out = newVec;
+
+    PetscFunctionReturn(PETSC_SUCCESS);
+  }
 
   template< class T_Scalar >
   PetscErrorCode DistributedContext< T_Scalar >::sumDistributedRHS(Vec localRHSdistributed, Vec *out) const
diff --git a/src/system/DistributedContext.h b/src/system/DistributedContext.h
index eff68c99..a61a7468 100644
--- a/src/system/DistributedContext.h
+++ b/src/system/DistributedContext.h
@@ -49,6 +49,8 @@ class DistributedContext {
     void readScatteredData(std::vector<T_Scalar>& values, Vec sol) const;
     #ifdef HAVE_PETSC
     PetscErrorCode getSubdomainsIS(IS* is) const;
+    // Create a lightweight vector with the local dofs, without copying data
+    PetscErrorCode createExtendedVector(const std::vector< T_Scalar >& values, Vec* out) const;
     // Map an extended vector into a global vector with sums. Output becomes a standalone vector
     PetscErrorCode sumDistributedRHS(Vec localRHSdistributed, Vec* out) const;
     #endif
diff --git a/src/system/Solver.cpp b/src/system/Solver.cpp
index c9daeda7..78561015 100644
--- a/src/system/Solver.cpp
+++ b/src/system/Solver.cpp
@@ -520,7 +520,7 @@ template< class T_Scalar >
 #endif
 
   template< class T_Scalar >
-  void Solver< T_Scalar >::solveDistributed(std::vector< T_Scalar > &values, system::DISTRIBUTED_SOLVE_TYPE solveType, const DistributedContext<T_Scalar>& distributedContext, const bool reusePreconditioner)
+  void Solver< T_Scalar >::solveDistributed(std::vector< T_Scalar > &values, system::DISTRIBUTED_SOLVE_TYPE solveType, const DistributedContext<T_Scalar>& distributedContext, const bool reusePreconditioner, bool sumRHS)
   {
     // Unpack distributed Context
     const std::vector< unsigned long long > &localIdOfOwned = distributedContext.localIDofOwned();
@@ -591,7 +591,11 @@ template< class T_Scalar >
 
       Vec sol;
       Vec b;
-       _b.at(0).getPetscDistributed(distributedContext, &b);
+      if (!sumRHS) {
+        _b.at(0).getPetscDistributed(distributedContext, &b);
+      } else {
+        _b.at(0).getPetscDistributedSummedRHS(distributedContext, &b);
+      }
       VecScale(b, -1.0);
 
       // Create ghosted vector
diff --git a/src/system/Solver.h b/src/system/Solver.h
index c9076874..635904fe 100644
--- a/src/system/Solver.h
+++ b/src/system/Solver.h
@@ -88,7 +88,7 @@ public:
 
     void solve(std::vector< T_Scalar > &values, const bool reusePreconditioner = false, unsigned rhsIdx = 0);
     void solveAll(std::vector< std::vector< T_Scalar > > &values, const bool reusePreconditioner = false);
-    void solveDistributed(std::vector< T_Scalar > &values, DISTRIBUTED_SOLVE_TYPE, const DistributedContext< T_Scalar > &distributedContext, const bool reusePreconditioner);
+    void solveDistributed(std::vector< T_Scalar > &values, DISTRIBUTED_SOLVE_TYPE, const DistributedContext< T_Scalar > &distributedContext, const bool reusePreconditioner, bool sumRHS = false);
     void solveAllDistributed(std::vector< std::vector< T_Scalar > > &values, DISTRIBUTED_SOLVE_TYPE, const DistributedContext<T_Scalar>& distributedContext, const bool reusePreconditioner = false);
     void eigensolve(std::vector< scalar::ComplexPrecision< T_Scalar > > &eigenvalues, std::vector< std::vector< scalar::ComplexPrecision< T_Scalar > > > &eigenvectors, const bool computeEigenvectors, const unsigned int numberOfEigenvalues, const scalar::ComplexPrecision< T_Scalar > target);
     PetscErrorCode getPreconditionerORAS(const DistributedContext< T_Scalar > &distributedContext, PC *out) const;
diff --git a/src/system/VectorFactory.cpp b/src/system/VectorFactory.cpp
index 89aca4c0..01853906 100644
--- a/src/system/VectorFactory.cpp
+++ b/src/system/VectorFactory.cpp
@@ -311,6 +311,9 @@ namespace gmshfem::system
   PetscErrorCode VectorFactory< T_Scalar >::getPetscDistributedSummedRHS(const DistributedContext< T_Scalar > &distributedContext, Vec *out) const
   {
     PetscFunctionBeginUser;
+    if (!_valuesDC.empty() || !_valuesLC.empty()) {
+      gmshfem::msg::warning << "Sum of RHS with Dirichlet or Linked conditions is probably wrong." << gmshfem::msg::endl;
+    }
     
     Vec VecExtended;
     auto code = distributedContext.createExtendedVector(_vec, &VecExtended);
-- 
GitLab