From 1ff8f483d45cf8f9c170e052e5f1c9dffdedc48f Mon Sep 17 00:00:00 2001
From: Jonathan Lambrechts <jonathan.lambrechts@uclouvain.be>
Date: Wed, 24 Jul 2013 12:11:11 +0000
Subject: [PATCH] petsc : MatXAIJSetPreallocation not available in petsc < 3.3

---
 Solver/linearSystemPETSc.hpp | 27 ++++++++++++++++++---------
 1 file changed, 18 insertions(+), 9 deletions(-)

diff --git a/Solver/linearSystemPETSc.hpp b/Solver/linearSystemPETSc.hpp
index 5f7098f2a7..9fe2ae310f 100644
--- a/Solver/linearSystemPETSc.hpp
+++ b/Solver/linearSystemPETSc.hpp
@@ -111,29 +111,38 @@ void linearSystemPETSc<scalar>::preAllocateEntries()
     }
     _sparsity.clear();
   }
-  _try(MatXAIJSetPreallocation(_a, blockSize, &nByRowDiag[0], &nByRowOffDiag[0], NULL, NULL));
+  //MatXAIJSetPreallocation is not available in petsc < 3.3
+  int commSize = 1;
+  MPI_Comm_size(_comm, &commSize);
+  if (commSize == 1){
+    if (blockSize == 1)
+      _try(MatSeqAIJSetPreallocation(_a, 0,  &nByRowDiag[0]));
+    else
+      _try(MatSeqBAIJSetPreallocation(_a, blockSize, 0, &nByRowDiag[0]));
+  }
+  else {
+    if (blockSize == 1)
+      _try(MatMPIAIJSetPreallocation(_a, 0, &nByRowDiag[0], 0, &nByRowOffDiag[0]));
+    else
+      _try(MatMPIBAIJSetPreallocation(_a, blockSize, 0, &nByRowDiag[0], 0, &nByRowOffDiag[0]));
+  }
   _entriesPreAllocated = true;
 }
 
 template <class scalar>
 void linearSystemPETSc<scalar>::allocate(int nbRows)
 {
-  #ifdef HAVE_MPI
-  PetscMPIInt commSize;
-  MPI_Comm_size(_comm,&commSize);
-  #endif
+  int commSize;
+  MPI_Comm_size(_comm, &commSize);
   int blockSize = _getBlockSizeFromParameters();
   clear();
   _try(MatCreate(_comm, &_a));
   _try(MatSetSizes(_a, blockSize * nbRows, blockSize * nbRows, PETSC_DETERMINE, PETSC_DETERMINE));
   if (blockSize > 1) {
-    #ifdef HAVE_MPI
     if (commSize > 1) {
       MatSetType(_a, MATMPIBAIJ);
     }
-    else
-    #endif
-    {
+    else {
       MatSetType(_a, MATSEQBAIJ);
     }
   }
-- 
GitLab