Skip to content
Snippets Groups Projects
Commit f62308ed authored by Van Dung Nguyen's avatar Van Dung Nguyen
Browse files

fix problem when using PETSC_COMM_SELF with Msg::GetCommSize()>1

parent 579dfe9a
No related branches found
No related tags found
No related merge requests found
...@@ -117,6 +117,9 @@ void linearSystemPETSc<scalar>::allocate(int nbRows) ...@@ -117,6 +117,9 @@ void linearSystemPETSc<scalar>::allocate(int nbRows)
//since PETSc 3.3 GetOwnershipRange and MatGetSize cannot be called before MatXXXSetPreallocation //since PETSc 3.3 GetOwnershipRange and MatGetSize cannot be called before MatXXXSetPreallocation
_localSize = nbRows; _localSize = nbRows;
#ifdef HAVE_MPI #ifdef HAVE_MPI
PetscMPIInt commSize;
MPI_Comm_size(_comm,&commSize);
if (commSize>1){
_localRowStart = 0; _localRowStart = 0;
if (Msg::GetCommRank() != 0) { if (Msg::GetCommRank() != 0) {
MPI_Status status; MPI_Status status;
...@@ -127,6 +130,12 @@ void linearSystemPETSc<scalar>::allocate(int nbRows) ...@@ -127,6 +130,12 @@ void linearSystemPETSc<scalar>::allocate(int nbRows)
MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD); MPI_Send((void*)&_localRowEnd, 1, MPI_INT, Msg::GetCommRank() + 1, 1, MPI_COMM_WORLD);
} }
MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD); MPI_Allreduce((void*)&_localSize, (void*)&_globalSize, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
}
else{
_localRowStart = 0;
_localRowEnd = nbRows;
_globalSize = _localSize;
}
#else #else
_localRowStart = 0; _localRowStart = 0;
_localRowEnd = nbRows; _localRowEnd = nbRows;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment