From 043da9e1a0138facaa47430e79be0ec69db972f5 Mon Sep 17 00:00:00 2001
From: Jonathan Lambrechts <jonathan.lambrechts@uclouvain.be>
Date: Mon, 10 Dec 2012 10:39:26 +0000
Subject: [PATCH] update to petsc 3.3 (MatGetOwnerShipRange/MatGetSize cannot
 be called before MatXXXSetPreallocation)

---
 Solver/linearSystemPETSc.cpp | 26 ++++++++++++++++++++------
 Solver/linearSystemPETSc.hpp | 22 ++++++++++++++++++----
 2 files changed, 38 insertions(+), 10 deletions(-)

diff --git a/Solver/linearSystemPETSc.cpp b/Solver/linearSystemPETSc.cpp
index dad32f0721..f0904a3fed 100644
--- a/Solver/linearSystemPETSc.cpp
+++ b/Solver/linearSystemPETSc.cpp
@@ -118,6 +118,7 @@ void linearSystemPETScBlockDouble::getFromSolution(int row, fullMatrix<double> &
   }
 }
 
+
 void linearSystemPETScBlockDouble::allocate(int nbRows)
 {
   MPI_Comm comm = _sequential ? PETSC_COMM_SELF: PETSC_COMM_WORLD;
@@ -138,12 +139,25 @@ void linearSystemPETScBlockDouble::allocate(int nbRows)
   if (_parameters.count("petscPrefix"))
     MatAppendOptionsPrefix(_a, _parameters["petscPrefix"].c_str());
   MatSetFromOptions(_a);
-  MatGetOwnershipRange(_a, &_localRowStart, &_localRowEnd);
-  MatGetSize(_a, &_globalSize, &_localSize);
-  _globalSize /= _blockSize;
-  _localSize /= _blockSize;
-  _localRowStart /= _blockSize;
-  _localRowEnd /= _blockSize;
+  //since PETSc 3.3 GetOwnershipRange and MatGetSize() cannot be called before SetPreallocation
+  _localSize = nbRows;
+  _localRowStart = 0;
+  _localRowEnd = nbRows;
+  _globalSize = _localSize;
+  #ifdef HAVE_MPI
+  if (!_sequential) {
+    _localRowStart = 0;
+    if (Msg::GetCommRank() != 0) {
+      MPI_Status status;
+      MPI_Recv((void*)&_localRowStart, 1, MPI_INT, Msg::GetCommRank() - 1, 1, MPI_COMM_WORLD, &status);
+    }
+    _localRowEnd = _localRowStart + nbRows;
+    if (Msg::GetCommRank() != Msg::GetCommSize() - 1) {
+      MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD);
+    }
+    MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
+  }
+  #endif
   // override the default options with the ones from the option
   // database (if any)
   VecCreate(comm, &_x);
diff --git a/Solver/linearSystemPETSc.hpp b/Solver/linearSystemPETSc.hpp
index 4d26303719..f66ea1f849 100644
--- a/Solver/linearSystemPETSc.hpp
+++ b/Solver/linearSystemPETSc.hpp
@@ -114,10 +114,24 @@ void linearSystemPETSc<scalar>::allocate(int nbRows)
   if (this->_parameters.count("petscPrefix"))
     _try(MatAppendOptionsPrefix(_a, this->_parameters["petscPrefix"].c_str()));
   _try(MatSetFromOptions(_a));
-  _try(MatGetOwnershipRange(_a, &_localRowStart, &_localRowEnd));
-  int nbColumns;
-  _localSize = _localRowEnd - _localRowStart;
-  _try(MatGetSize(_a, &_globalSize, &nbColumns));
+  //since PETSc 3.3 GetOwnershipRange and MatGetSize cannot be called before MatXXXSetPreallocation
+  _localSize = nbRows;
+  #ifdef HAVE_MPI
+  _localRowStart = 0;
+  if (Msg::GetCommRank() != 0) {
+    MPI_Status status;
+    MPI_Recv((void*)&_localRowStart, 1, MPI_INT, Msg::GetCommRank() - 1, 1, MPI_COMM_WORLD, &status);
+  }
+  _localRowEnd = _localRowStart + nbRows;
+  if (Msg::GetCommRank() != Msg::GetCommSize() - 1) {
+    MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD);
+  }
+  MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
+  #else
+  _localRowStart = 0;
+  _localRowEnd = nbRows;
+  _globalSize = _localSize;
+  #endif
   // preallocation option must be set after other options
   _try(VecCreate(_comm, &_x));
   _try(VecSetSizes(_x, nbRows, PETSC_DETERMINE));
-- 
GitLab