Skip to content
Snippets Groups Projects
Commit dfb95f52 authored by vinugholap@gmail.com's avatar vinugholap@gmail.com
Browse files

Merge remote-tracking branch 'origin/master' into vinayak

parents 462d7cdf f6b735f4
No related branches found
No related tags found
3 merge requests!293Master,!290Vinayak,!289Vinayak
......@@ -10,7 +10,7 @@
#SBATCH --time=0:20:0
module load openmpi/2.1.2/GCC-4.9.2-gpu
module load OpenMPI/3.1.1-GCC-7.3.0-2.30
SUBDIR=$HOME/cm3Libraries/dG3D/benchmarks/taylor
echo "subdir=" $SUBDIR
......@@ -32,16 +32,18 @@ srun cp $SUBDIR/$mshfile $SCRATCH/ || exit $?
cd $SCRATCH # needed to work in the tmpscrach dir otherwise work on home !!
#ls -artl
export PETSC_DIR=$HOME/local/petsc-3.9.1
export PETSC_ARCH=linux-gnu-c-opt
export PETSC_DIR=$HOME/local/petsc-3.13.2
export PETSC_ARCH=arch-linux-cxx-opt
export PATH=$HOME/local/bin:$PATH
export PATH=$PATH:$HOME/cm3Libraries/dG3D/release
export PATH=$PATH:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh
export PYTHONPATH=$PYTHONPATH:$HOME/cm3Libraries/dG3D/release:$HOME/cm3Libraries/dG3D/debug:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers/gmshpy:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers/gmshpy
mpirun python $SCRATCH/$pyfile >& $SCRATCH/output.txt
echo -e "\n"
......
#!/bin/sh
# script example for dragon2
#SBATCH --job-name taylorMPI
#SBATCH --mail-user=l.noels@ulg.ac.be
#SBATCH --mail-type=ALL
#SBATCH --output="out.txt"
#SBATCH --ntasks=4
#SBATCH --mem-per-cpu=800
#SBATCH --time=0:20:0
module load OpenMPI/3.1.1-GCC-7.3.0-2.30
SUBDIR=$HOME/cm3Libraries/dG3D/benchmarks/taylorMPI
echo "subdir=" $SUBDIR
SCRATCH=$GLOBALSCRATCH/${USER}_$SLURM_JOB_ID
echo "workdir=" $SCRATCH
pyfile=taylorCG.py
mshfile=taylor.msh
echo "node list of job" >> $SUBDIR/output.po$SLURM_JOB_ID
hostname >> $SUBDIR/output.po$SLURM_JOB_ID
cd $GLOBALSCRATCH
mkdir -p ${USER}_$SLURM_JOB_ID || exit $?
cp $SUBDIR/$pyfile $SCRATCH/ || exit $?
cp $SUBDIR/$mshfile $SCRATCH/ || exit $?
cd $SCRATCH # needed to work in the tmpscrach dir otherwise work on home !!
#ls -artl
export PETSC_DIR=$HOME/local/petsc-3.13.2
export PETSC_ARCH=arch-linux-cxx-opt
export PATH=$HOME/local/bin:$PATH
export PATH=$PATH:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh
export PYTHONPATH=$PYTHONPATH:$HOME/cm3Libraries/dG3D/release:$HOME/cm3Libraries/dG3D/debug:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers/gmshpy:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers/gmshpy
mpirun -np 4 python $SCRATCH/$pyfile >& $SCRATCH/output.txt
echo -e "\n"
sleep 5
cp -f $SCRATCH/* $SUBDIR/ || exit $?
rm -rf $SCRATCH || exit $?
echo -e "\n"
#!/bin/sh
# script example for dragon2
#SBATCH --job-name taylorMPI
#SBATCH --mail-user=l.noels@ulg.ac.be
#SBATCH --mail-type=ALL
#SBATCH --output="out.txt"
#SBATCH --ntasks=1
#SBATCH --mem-per-cpu=800
#SBATCH --time=0:20:0
module load OpenMPI/3.1.1-GCC-7.3.0-2.30
SUBDIR=$HOME/cm3Libraries/dG3D/benchmarks/taylor
echo "subdir=" $SUBDIR
SCRATCH=$GLOBALSCRATCH/${USER}_$SLURM_JOB_ID
echo "workdir=" $SCRATCH
pyfile=taylorTetDGDyn.py
mshfile=taylor.msh
echo "node list of job" >> $SUBDIR/output.po$SLURM_JOB_ID
hostname >> $SUBDIR/output.po$SLURM_JOB_ID
cd $GLOBALSCRATCH
mkdir -p ${USER}_$SLURM_JOB_ID || exit $?
cp $SUBDIR/$pyfile $SCRATCH/ || exit $?
cp $SUBDIR/$mshfile $SCRATCH/ || exit $?
cd $SCRATCH # needed to work in the tmpscrach dir otherwise work on home !!
#ls -artl
export PETSC_DIR=$HOME/local/petsc-3.13.2
export PETSC_ARCH=arch-linux-cxx-opt
export PATH=$HOME/local/bin:$PATH
export PATH=$PATH:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh
export PYTHONPATH=$PYTHONPATH:$HOME/cm3Libraries/dG3D/release:$HOME/cm3Libraries/dG3D/debug:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers/gmshpy:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers/gmshpy
mpirun python $SCRATCH/$pyfile >& $SCRATCH/output.txt
echo -e "\n"
sleep 5
srun cp -f $SCRATCH/* $SUBDIR/ || exit $?
srun rm -rf $SCRATCH || exit $?
echo -e "\n"
......@@ -119,6 +119,8 @@ A) The packages needed to install gmsh are (gmsh has to be install to use cm3app
4) close your terminal and open it (to reload your bashrc)
5) Go to your petsc installation folder and type the following command (you have to use this configuration for hmem server):
For 3.13 on dragon1, dragon2:
./configure --with-debugging=0 --download-fblaslapack=yes --download-mumps=yes --download-scalapack=yes --download-blacs=yes --with-mpi-dir=$MPI_HOME --with-pic --with-fpic --with-shared-libraries=1 --with-clanguage=cxx --known-mpi-shared-libraries=0 --download-parmetis=yes --download-metis=yes --download-superlu_dist=yes
For 3.10, without superlu
./configure -configModules=PETSc.Configure --optionsModule=config.compilerOptions --with-debugging=0 --download-fblaslapack=yes --download-mumps=yes --download-scalapack=yes --download-mpich=yes --with-shared-libraries=yes --known-mpi-shared-libraries=0 --download-parmetis=yes --download-metis=yes
......@@ -151,13 +153,6 @@ A) The packages needed to install gmsh are (gmsh has to be install to use cm3app
in case of error "MPI wrappers do not work, use --with-mpi-compilers=0
for dragon
version >=3.7
./configure --with-debugging=0 --download-fblaslapack=yes --download-mumps=yes --download-scalapack=yes --download-blacs=yes --with-mpi-dir=$MPI_HOME --with-pic --with-fpic --with-shared-libraries=1 --with-clanguage=cxx --known-mpi-shared-libraries=0 --download-parmetis=yes --download-metis=yes --download-superlu_dist=yes
version <3.7
./configure --with-debugging=0 --download-fblaslapack=yes --download-mumps=yes --download-scalapack=yes --download-blacs=yes --with-mpi-dir=$MPI_HOME --with-pic --with-shared-libraries=0 --with-clanguage=cxx --with-batch --known-mpi-shared-libraries=0 --download-parmetis=yes --download-metis=yes --download-superlu_dist=yes
on hydra (there is a conmflict with metis if downloaded from petsc)
-petsc 3.8.
./configure --with-debugging=0 --with-scalapack-lib=$SCALAPACKDIR/lib$SCALAPACKLIB.so --download-mumps=yes --with-blas-lib=$BLASDIR/lib$BLASLIB.so --with-lapack-lib=$LAPACKDIR/lib$LAPACKLIB.so --with-mpi-dir=$MPI_HOME --with-pic --with-shared-libraries=1 --with-clanguage=cxx --with-batch --known-mpi-shared-libraries=1
......@@ -185,41 +180,73 @@ Then wait until the job end (the last line of the created file "petsc-install.ou
function load_module()
{
module purge
module load GCC/4.7.3
module load CMake/2.8.11.1-GCC-4.7.3
module load lapack/gcc/3.4.2
# hack as the load of module lapack export BLASDIR???
export LAPACKDIR=$BLASDIR
export LAPACKLIB=$BLASLIB
# then load blas
module load blas/gcc/3.4.2
module load openmpi/1.6.1/gnu64-4.7.3
module load GCC/7.3.0-2.30
module load CMake/3.11.4-GCCcore-7.3.0
module load OpenMPI/3.1.1-GCC-7.3.0-2.30
module load Python/2.7.15-GCCcore-7.3.0-bare
export MPI_HOME=/usr/local/Software/.local/easybuild/software/OpenMPI/3.1.1-GCC-7.3.0-2.30/
export MPI_RUN=$MPI_HOME/bin/mpirun
echo "List of loaded modules:"
module list
}
new one on dragon 1
load_module
export PETSC_DIR=$HOME/local/petsc-3.13.2
export PETSC_ARCH=arch-linux-cxx-opt
export SLEPC_DIR=$HOME/local/slepc-3.13.3
export SLEPC_ARCH=linux-gnu-c-opt
export BLASDIR=$PETSC_DIR/$PETSC_ARCH/lib
export BLASLIB=fblas
export LAPACKDIR=$PETSC_DIR/$PETSC_ARCH/lib
export LAPACKLIB=flapack
export LD_LIBRARY_PATH=$PETSC_DIR/$PETSC_ARCH/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=$PETSC_DIR/$PETSC_ARCH/lib:$LD_LIBRARY_PATH
export PATH=$HOME/local/swig/bin:$HOME/local/bin:$PATH
export PATH=$PATH:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh
export PYTHONPATH=$PYTHONPATH:$HOME/cm3Libraries/dG3D/release:$HOME/cm3Libraries/dG3D/debug:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers/gmshpy:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers/gmshpy
unset SSH_ASKPASS
- dragon 2
function load_module()
{
module purge
module load GCC/4.9.2
module load CMake/3.10.2-GCC-4.9.2
#module load lapack/3.5.0/gcc/4.7.3
#module load blas/3.5.0/gcc/4.7.3
module load openmpi/2.1.2/GCC-4.9.2-gpu
module load python/2.7.13
export MPI_HOME=/usr/local/openmpi/2.1.2/GCC-4.9.2
module load GCC/7.3.0-2.30
module load CMake/3.9.6
module load OpenMPI/3.1.1-GCC-7.3.0-2.30
module load Python/2.7.15-GCCcore-7.3.0-bare
export MPI_HOME=/opt/cecisw/arch/easybuild/2018b/software/OpenMPI/3.1.1-GCC-7.3.0-2.30
export MPI_RUN=$MPI_HOME/bin/mpirun
#export BLASDIR=$BLAS
#export BLASLIB=blas
#export LAPACKDIR=$LAPACK
#export LAPACKLIB=lapack
echo "List of loaded modules:"
module list
}
load_module
with dragon, when doing ccmake .., you need to specify the locations /home/ulg/cmmm/lnoels/local/petsc-3.9.1/linux-gnu-c-opt/lib/libfblas.a and /home/ulg/cmmm/lnoels/local/petsc-3.9.1/linux-gnu-c-opt/lib/libflapack.a using toggle
also you need to compile without gmm fltk and without metis
export PETSC_DIR=$HOME/local/petsc-3.13.2
export PETSC_ARCH=arch-linux-cxx-opt
export SLEPC_DIR=$HOME/local/slepc-3.13.3
export SLEPC_ARCH=arch-linux-cxx-opt
export BLASDIR=$PETSC_DIR/$PETSC_ARCH/lib
export BLASLIB=fblas
export LAPACKDIR=$PETSC_DIR/$PETSC_ARCH/lib
export LAPACKLIB=flapack
export LD_LIBRARY_PATH=$PETSC_DIR/$PETSC_ARCH/lib:$LD_LIBRARY_PATH
export PATH=$HOME/local/swig/bin:$PATH
export PATH=$HOME/local/bin:$PATH
export PATH=$PATH:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh
export PYTHONPATH=$PYTHONPATH:$HOME/cm3Libraries/dG3D/release:$HOME/cm3Libraries/dG3D/debug:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers/gmshpy:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers/gmshpy
unset SSH_ASKPASS
with dragon 1 and dragon 2, when doing ccmake .., you need to specify the locations /home/ulg/cmmm/lnoels/local/petsc-3.13.2/arch-linux-cxx-opt/lib//libfblas.a and /home/ulg/cmmm/lnoels/local/petsc-3.13.2/arch-linux-cxx-opt/lib//libflapack.a using toggle
also you need to compile without gmm fltk numpy and without metis
#load_module #do not load it if you want to use scratchcopy etc
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment