Skip to content
Snippets Groups Projects
Commit 043da9e1 authored by Jonathan Lambrechts's avatar Jonathan Lambrechts
Browse files

update to petsc 3.3 (MatGetOwnerShipRange/MatGetSize cannot be called

before MatXXXSetPreallocation)
parent 2795acfe
No related branches found
No related tags found
No related merge requests found
......@@ -118,6 +118,7 @@ void linearSystemPETScBlockDouble::getFromSolution(int row, fullMatrix<double> &
}
}
void linearSystemPETScBlockDouble::allocate(int nbRows)
{
MPI_Comm comm = _sequential ? PETSC_COMM_SELF: PETSC_COMM_WORLD;
......@@ -138,12 +139,25 @@ void linearSystemPETScBlockDouble::allocate(int nbRows)
if (_parameters.count("petscPrefix"))
MatAppendOptionsPrefix(_a, _parameters["petscPrefix"].c_str());
MatSetFromOptions(_a);
MatGetOwnershipRange(_a, &_localRowStart, &_localRowEnd);
MatGetSize(_a, &_globalSize, &_localSize);
_globalSize /= _blockSize;
_localSize /= _blockSize;
_localRowStart /= _blockSize;
_localRowEnd /= _blockSize;
//since PETSc 3.3 GetOwnershipRange and MatGetSize() cannot be called before SetPreallocation
_localSize = nbRows;
_localRowStart = 0;
_localRowEnd = nbRows;
_globalSize = _localSize;
#ifdef HAVE_MPI
if (!_sequential) {
_localRowStart = 0;
if (Msg::GetCommRank() != 0) {
MPI_Status status;
MPI_Recv((void*)&_localRowStart, 1, MPI_INT, Msg::GetCommRank() - 1, 1, MPI_COMM_WORLD, &status);
}
_localRowEnd = _localRowStart + nbRows;
if (Msg::GetCommRank() != Msg::GetCommSize() - 1) {
MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD);
}
MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
}
#endif
// override the default options with the ones from the option
// database (if any)
VecCreate(comm, &_x);
......
......@@ -114,10 +114,24 @@ void linearSystemPETSc<scalar>::allocate(int nbRows)
if (this->_parameters.count("petscPrefix"))
_try(MatAppendOptionsPrefix(_a, this->_parameters["petscPrefix"].c_str()));
_try(MatSetFromOptions(_a));
_try(MatGetOwnershipRange(_a, &_localRowStart, &_localRowEnd));
int nbColumns;
_localSize = _localRowEnd - _localRowStart;
_try(MatGetSize(_a, &_globalSize, &nbColumns));
//since PETSc 3.3 GetOwnershipRange and MatGetSize cannot be called before MatXXXSetPreallocation
_localSize = nbRows;
#ifdef HAVE_MPI
_localRowStart = 0;
if (Msg::GetCommRank() != 0) {
MPI_Status status;
MPI_Recv((void*)&_localRowStart, 1, MPI_INT, Msg::GetCommRank() - 1, 1, MPI_COMM_WORLD, &status);
}
_localRowEnd = _localRowStart + nbRows;
if (Msg::GetCommRank() != Msg::GetCommSize() - 1) {
MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD);
}
MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
#else
_localRowStart = 0;
_localRowEnd = nbRows;
_globalSize = _localSize;
#endif
// preallocation option must be set after other options
_try(VecCreate(_comm, &_x));
_try(VecSetSizes(_x, nbRows, PETSC_DETERMINE));
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment