Skip to content
Snippets Groups Projects
Commit 043da9e1 authored by Jonathan Lambrechts's avatar Jonathan Lambrechts
Browse files

update to petsc 3.3 (MatGetOwnerShipRange/MatGetSize cannot be called

before MatXXXSetPreallocation)
parent 2795acfe
No related branches found
No related tags found
No related merge requests found
...@@ -118,6 +118,7 @@ void linearSystemPETScBlockDouble::getFromSolution(int row, fullMatrix<double> & ...@@ -118,6 +118,7 @@ void linearSystemPETScBlockDouble::getFromSolution(int row, fullMatrix<double> &
} }
} }
void linearSystemPETScBlockDouble::allocate(int nbRows) void linearSystemPETScBlockDouble::allocate(int nbRows)
{ {
MPI_Comm comm = _sequential ? PETSC_COMM_SELF: PETSC_COMM_WORLD; MPI_Comm comm = _sequential ? PETSC_COMM_SELF: PETSC_COMM_WORLD;
...@@ -138,12 +139,25 @@ void linearSystemPETScBlockDouble::allocate(int nbRows) ...@@ -138,12 +139,25 @@ void linearSystemPETScBlockDouble::allocate(int nbRows)
if (_parameters.count("petscPrefix")) if (_parameters.count("petscPrefix"))
MatAppendOptionsPrefix(_a, _parameters["petscPrefix"].c_str()); MatAppendOptionsPrefix(_a, _parameters["petscPrefix"].c_str());
MatSetFromOptions(_a); MatSetFromOptions(_a);
MatGetOwnershipRange(_a, &_localRowStart, &_localRowEnd); //since PETSc 3.3 GetOwnershipRange and MatGetSize() cannot be called before SetPreallocation
MatGetSize(_a, &_globalSize, &_localSize); _localSize = nbRows;
_globalSize /= _blockSize; _localRowStart = 0;
_localSize /= _blockSize; _localRowEnd = nbRows;
_localRowStart /= _blockSize; _globalSize = _localSize;
_localRowEnd /= _blockSize; #ifdef HAVE_MPI
if (!_sequential) {
_localRowStart = 0;
if (Msg::GetCommRank() != 0) {
MPI_Status status;
MPI_Recv((void*)&_localRowStart, 1, MPI_INT, Msg::GetCommRank() - 1, 1, MPI_COMM_WORLD, &status);
}
_localRowEnd = _localRowStart + nbRows;
if (Msg::GetCommRank() != Msg::GetCommSize() - 1) {
MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD);
}
MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
}
#endif
// override the default options with the ones from the option // override the default options with the ones from the option
// database (if any) // database (if any)
VecCreate(comm, &_x); VecCreate(comm, &_x);
......
...@@ -114,10 +114,24 @@ void linearSystemPETSc<scalar>::allocate(int nbRows) ...@@ -114,10 +114,24 @@ void linearSystemPETSc<scalar>::allocate(int nbRows)
if (this->_parameters.count("petscPrefix")) if (this->_parameters.count("petscPrefix"))
_try(MatAppendOptionsPrefix(_a, this->_parameters["petscPrefix"].c_str())); _try(MatAppendOptionsPrefix(_a, this->_parameters["petscPrefix"].c_str()));
_try(MatSetFromOptions(_a)); _try(MatSetFromOptions(_a));
_try(MatGetOwnershipRange(_a, &_localRowStart, &_localRowEnd)); //since PETSc 3.3 GetOwnershipRange and MatGetSize cannot be called before MatXXXSetPreallocation
int nbColumns; _localSize = nbRows;
_localSize = _localRowEnd - _localRowStart; #ifdef HAVE_MPI
_try(MatGetSize(_a, &_globalSize, &nbColumns)); _localRowStart = 0;
if (Msg::GetCommRank() != 0) {
MPI_Status status;
MPI_Recv((void*)&_localRowStart, 1, MPI_INT, Msg::GetCommRank() - 1, 1, MPI_COMM_WORLD, &status);
}
_localRowEnd = _localRowStart + nbRows;
if (Msg::GetCommRank() != Msg::GetCommSize() - 1) {
MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD);
}
MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
#else
_localRowStart = 0;
_localRowEnd = nbRows;
_globalSize = _localSize;
#endif
// preallocation option must be set after other options // preallocation option must be set after other options
_try(VecCreate(_comm, &_x)); _try(VecCreate(_comm, &_x));
_try(VecSetSizes(_x, nbRows, PETSC_DETERMINE)); _try(VecSetSizes(_x, nbRows, PETSC_DETERMINE));
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment