From f62308edc7e23937eec47947e03cd91adc9a5078 Mon Sep 17 00:00:00 2001
From: Van Dung Nguyen <vandung.nguyen@ulg.ac.be>
Date: Wed, 12 Dec 2012 16:51:20 +0000
Subject: [PATCH] fix problem when using PETSC_COMM_SELF with
 Msg::GetCommSize()>1

---
 Solver/linearSystemPETSc.hpp | 25 +++++++++++++++++--------
 1 file changed, 17 insertions(+), 8 deletions(-)

diff --git a/Solver/linearSystemPETSc.hpp b/Solver/linearSystemPETSc.hpp
index f66ea1f849..48922da960 100644
--- a/Solver/linearSystemPETSc.hpp
+++ b/Solver/linearSystemPETSc.hpp
@@ -117,16 +117,25 @@ void linearSystemPETSc<scalar>::allocate(int nbRows)
   //since PETSc 3.3 GetOwnershipRange and MatGetSize cannot be called before MatXXXSetPreallocation
   _localSize = nbRows;
   #ifdef HAVE_MPI
-  _localRowStart = 0;
-  if (Msg::GetCommRank() != 0) {
-    MPI_Status status;
-    MPI_Recv((void*)&_localRowStart, 1, MPI_INT, Msg::GetCommRank() - 1, 1, MPI_COMM_WORLD, &status);
+  PetscMPIInt commSize;
+  MPI_Comm_size(_comm,&commSize);
+  if (commSize>1){
+    _localRowStart = 0;
+    if (Msg::GetCommRank() != 0) {
+      MPI_Status status;
+      MPI_Recv((void*)&_localRowStart, 1, MPI_INT, Msg::GetCommRank() - 1, 1, MPI_COMM_WORLD, &status);
+    }
+    _localRowEnd = _localRowStart + nbRows;
+    if (Msg::GetCommRank() != Msg::GetCommSize() - 1) {
+      MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD);
+    }
+    MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
   }
-  _localRowEnd = _localRowStart + nbRows;
-  if (Msg::GetCommRank() != Msg::GetCommSize() - 1) {
-    MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD);
+  else{
+    _localRowStart = 0;
+    _localRowEnd = nbRows;
+    _globalSize = _localSize;
   }
-  MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
   #else
   _localRowStart = 0;
   _localRowEnd = nbRows;
-- 
GitLab