diff --git a/src/problem/Formulation.cpp b/src/problem/Formulation.cpp
index 12b67834d9553423ffa94317268631881162cb48..6c599b3ab49accea55e31050f11c12cc95750a52 100644
--- a/src/problem/Formulation.cpp
+++ b/src/problem/Formulation.cpp
@@ -1745,7 +1745,7 @@ namespace gmshfem::problem
         throw common::Exception("Missing distributed context");
       if (!_solver)
         throw common::Exception("The solver is not set");
-      _solver->solveDistributed(values, type, *_distributedContext, reusePreconditioner);
+      _solver->solveDistributed(values, type, *_distributedContext, reusePreconditioner, this->sumRhsInDistributed);
     }
     catch(const std::exception &exc) {
       msg::error << "Unable to solve the system" << msg::endl;
diff --git a/src/problem/Formulation.h b/src/problem/Formulation.h
index 1c36da46937337e7a25ef1beed75183b0eaf0c88..ee377eaa33a080960aeea3e376f73a225c065013 100644
--- a/src/problem/Formulation.h
+++ b/src/problem/Formulation.h
@@ -88,6 +88,8 @@ namespace gmshfem::problem
     unsigned _currentRHS = 0;
     unsigned _numRHS = 1;
     std::vector<std::vector<T_Scalar>> _solutions;
+
+    bool sumRhsInDistributed = false;
   private:
     std::unique_ptr<system::DistributedContext<T_Scalar>> _distributedContext;
 
@@ -241,6 +243,9 @@ namespace gmshfem::problem
                                 algebra::Vector< T_Scalar > &rawSolution, unsigned rhsIdx = 0); // same as above, but exposes raw solution vector
     virtual common::Timer solveForPETSc(Vec x, Vec y, const bool reusePreconditioner = false) const; // solve A x = b without copying data
     virtual common::Timer solveForPETSc(Mat x, Mat y, const bool reusePreconditioner = false) const; // solve A X = B without copying data
+    void setSumDistributedRHS(bool sum) {
+      sumRhsInDistributed = sum;
+    }
     Mat getAllRHSasPETSc() const;
     Mat getAllDistributedRHSasPETSc() const;
     Vec getDistributedRHSasPETSc() const;
diff --git a/src/system/DistributedContext.cpp b/src/system/DistributedContext.cpp
index 988a8f3aad29cede3aa51bb4b83d8ebe0103a6eb..3eeebc0ff541f4cb44de829a0f1c170d1b427114 100644
--- a/src/system/DistributedContext.cpp
+++ b/src/system/DistributedContext.cpp
@@ -65,7 +65,6 @@ namespace gmshfem::system
       PetscFunctionBeginUser;
       Vec res;
       PetscCall(VecDuplicate(_distributedVector, &res));
-      VecView(localRHSdistributed, PETSC_VIEWER_STDOUT_WORLD);
       PetscCall(VecScatterBegin(_scatter, localRHSdistributed, res, ADD_VALUES, SCATTER_REVERSE));
       PetscCall(VecScatterEnd(_scatter, localRHSdistributed, res, ADD_VALUES, SCATTER_REVERSE));
       *out = res;
@@ -135,6 +134,21 @@ namespace gmshfem::system
 
     PetscFunctionReturn(PETSC_SUCCESS);
   }
+  template< class T_Scalar >
+  PetscErrorCode DistributedContext< T_Scalar >::createExtendedVector(const std::vector< T_Scalar > &values, Vec *out) const
+  {
+    PetscFunctionBeginUser;
+    if (values.size() != localToGlobal().size())
+      PetscFunctionReturn(PETSC_ERR_ARG_SIZ);    
+    Vec newVec;
+    if constexpr (std::is_same<T_Scalar, PetscScalar>::value)
+      PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, localToGlobal().size(), PETSC_DETERMINE, values.data(), &newVec));
+    else 
+      PetscFunctionReturn(PETSC_ERR_SUP);
+    *out = newVec;
+
+    PetscFunctionReturn(PETSC_SUCCESS);
+  }
 
   template< class T_Scalar >
   PetscErrorCode DistributedContext< T_Scalar >::sumDistributedRHS(Vec localRHSdistributed, Vec *out) const
diff --git a/src/system/DistributedContext.h b/src/system/DistributedContext.h
index eff68c99f6c975f4ee08311a4da25b045e8768ab..a61a7468e0d4471f7bf9d1a43ed832c8636e50b9 100644
--- a/src/system/DistributedContext.h
+++ b/src/system/DistributedContext.h
@@ -49,6 +49,8 @@ class DistributedContext {
     void readScatteredData(std::vector<T_Scalar>& values, Vec sol) const;
     #ifdef HAVE_PETSC
     PetscErrorCode getSubdomainsIS(IS* is) const;
+    // Create a lightweight vector with the local dofs, without copying data
+    PetscErrorCode createExtendedVector(const std::vector< T_Scalar >& values, Vec* out) const;
     // Map an extended vector into a global vector with sums. Output becomes a standalone vector
     PetscErrorCode sumDistributedRHS(Vec localRHSdistributed, Vec* out) const;
     #endif
diff --git a/src/system/Solver.cpp b/src/system/Solver.cpp
index c9daeda79623b9c4c99f0cb22be1a0976b014b1a..7856101589a84823563ba8c980949c7726c22ff4 100644
--- a/src/system/Solver.cpp
+++ b/src/system/Solver.cpp
@@ -520,7 +520,7 @@ template< class T_Scalar >
 #endif
 
   template< class T_Scalar >
-  void Solver< T_Scalar >::solveDistributed(std::vector< T_Scalar > &values, system::DISTRIBUTED_SOLVE_TYPE solveType, const DistributedContext<T_Scalar>& distributedContext, const bool reusePreconditioner)
+  void Solver< T_Scalar >::solveDistributed(std::vector< T_Scalar > &values, system::DISTRIBUTED_SOLVE_TYPE solveType, const DistributedContext<T_Scalar>& distributedContext, const bool reusePreconditioner, bool sumRHS)
   {
     // Unpack distributed Context
     const std::vector< unsigned long long > &localIdOfOwned = distributedContext.localIDofOwned();
@@ -591,7 +591,11 @@ template< class T_Scalar >
 
       Vec sol;
       Vec b;
-       _b.at(0).getPetscDistributed(distributedContext, &b);
+      if (!sumRHS) {
+        _b.at(0).getPetscDistributed(distributedContext, &b);
+      } else {
+        _b.at(0).getPetscDistributedSummedRHS(distributedContext, &b);
+      }
       VecScale(b, -1.0);
 
       // Create ghosted vector
diff --git a/src/system/Solver.h b/src/system/Solver.h
index c9076874aa4a40e22403d9f5ffd86829876648d8..635904fe7b8d36f9824740b4bd5ad29eb6718542 100644
--- a/src/system/Solver.h
+++ b/src/system/Solver.h
@@ -88,7 +88,7 @@ public:
 
     void solve(std::vector< T_Scalar > &values, const bool reusePreconditioner = false, unsigned rhsIdx = 0);
     void solveAll(std::vector< std::vector< T_Scalar > > &values, const bool reusePreconditioner = false);
-    void solveDistributed(std::vector< T_Scalar > &values, DISTRIBUTED_SOLVE_TYPE, const DistributedContext< T_Scalar > &distributedContext, const bool reusePreconditioner);
+    void solveDistributed(std::vector< T_Scalar > &values, DISTRIBUTED_SOLVE_TYPE, const DistributedContext< T_Scalar > &distributedContext, const bool reusePreconditioner, bool sumRHS = false);
     void solveAllDistributed(std::vector< std::vector< T_Scalar > > &values, DISTRIBUTED_SOLVE_TYPE, const DistributedContext<T_Scalar>& distributedContext, const bool reusePreconditioner = false);
     void eigensolve(std::vector< scalar::ComplexPrecision< T_Scalar > > &eigenvalues, std::vector< std::vector< scalar::ComplexPrecision< T_Scalar > > > &eigenvectors, const bool computeEigenvectors, const unsigned int numberOfEigenvalues, const scalar::ComplexPrecision< T_Scalar > target);
     PetscErrorCode getPreconditionerORAS(const DistributedContext< T_Scalar > &distributedContext, PC *out) const;
diff --git a/src/system/VectorFactory.cpp b/src/system/VectorFactory.cpp
index 89aca4c0fbc44ecde82a339718dfbc13df47e010..0185390670f8e50f0276453af8069472b37eec10 100644
--- a/src/system/VectorFactory.cpp
+++ b/src/system/VectorFactory.cpp
@@ -311,6 +311,9 @@ namespace gmshfem::system
   PetscErrorCode VectorFactory< T_Scalar >::getPetscDistributedSummedRHS(const DistributedContext< T_Scalar > &distributedContext, Vec *out) const
   {
     PetscFunctionBeginUser;
+    if (!_valuesDC.empty() || !_valuesLC.empty()) {
+      gmshfem::msg::warning << "Sum of RHS with Dirichlet or Linked conditions is probably wrong." << gmshfem::msg::endl;
+    }
     
     Vec VecExtended;
     auto code = distributedContext.createExtendedVector(_vec, &VecExtended);