diff --git a/Solver/linearSystemPETSc.cpp b/Solver/linearSystemPETSc.cpp index dad32f072100b375577f7b3fab599b8bbc2167cc..f0904a3fedbdcd5cc617cd93d5d72e97f933d59b 100644 --- a/Solver/linearSystemPETSc.cpp +++ b/Solver/linearSystemPETSc.cpp @@ -118,6 +118,7 @@ void linearSystemPETScBlockDouble::getFromSolution(int row, fullMatrix<double> & } } + void linearSystemPETScBlockDouble::allocate(int nbRows) { MPI_Comm comm = _sequential ? PETSC_COMM_SELF: PETSC_COMM_WORLD; @@ -138,12 +139,25 @@ void linearSystemPETScBlockDouble::allocate(int nbRows) if (_parameters.count("petscPrefix")) MatAppendOptionsPrefix(_a, _parameters["petscPrefix"].c_str()); MatSetFromOptions(_a); - MatGetOwnershipRange(_a, &_localRowStart, &_localRowEnd); - MatGetSize(_a, &_globalSize, &_localSize); - _globalSize /= _blockSize; - _localSize /= _blockSize; - _localRowStart /= _blockSize; - _localRowEnd /= _blockSize; + //since PETSc 3.3 GetOwnershipRange and MatGetSize() cannot be called before SetPreallocation + _localSize = nbRows; + _localRowStart = 0; + _localRowEnd = nbRows; + _globalSize = _localSize; + #ifdef HAVE_MPI + if (!_sequential) { + _localRowStart = 0; + if (Msg::GetCommRank() != 0) { + MPI_Status status; + MPI_Recv((void*)&_localRowStart, 1, MPI_INT, Msg::GetCommRank() - 1, 1, MPI_COMM_WORLD, &status); + } + _localRowEnd = _localRowStart + nbRows; + if (Msg::GetCommRank() != Msg::GetCommSize() - 1) { + MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD); + } + MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD); + } + #endif // override the default options with the ones from the option // database (if any) VecCreate(comm, &_x); diff --git a/Solver/linearSystemPETSc.hpp b/Solver/linearSystemPETSc.hpp index 4d263037194d8dfa4de39d8692e5bf78bcd20fcd..f66ea1f849478b0c2e2d19dacbd59c0b9a8dabad 100644 --- a/Solver/linearSystemPETSc.hpp +++ b/Solver/linearSystemPETSc.hpp @@ -114,10 +114,24 @@ void linearSystemPETSc<scalar>::allocate(int nbRows) if (this->_parameters.count("petscPrefix")) _try(MatAppendOptionsPrefix(_a, this->_parameters["petscPrefix"].c_str())); _try(MatSetFromOptions(_a)); - _try(MatGetOwnershipRange(_a, &_localRowStart, &_localRowEnd)); - int nbColumns; - _localSize = _localRowEnd - _localRowStart; - _try(MatGetSize(_a, &_globalSize, &nbColumns)); + //since PETSc 3.3 GetOwnershipRange and MatGetSize cannot be called before MatXXXSetPreallocation + _localSize = nbRows; + #ifdef HAVE_MPI + _localRowStart = 0; + if (Msg::GetCommRank() != 0) { + MPI_Status status; + MPI_Recv((void*)&_localRowStart, 1, MPI_INT, Msg::GetCommRank() - 1, 1, MPI_COMM_WORLD, &status); + } + _localRowEnd = _localRowStart + nbRows; + if (Msg::GetCommRank() != Msg::GetCommSize() - 1) { + MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD); + } + MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD); + #else + _localRowStart = 0; + _localRowEnd = nbRows; + _globalSize = _localSize; + #endif // preallocation option must be set after other options _try(VecCreate(_comm, &_x)); _try(VecSetSizes(_x, nbRows, PETSC_DETERMINE));