diff --git a/NonLinearSolver/clusterScript/dragon1taylor.sh b/NonLinearSolver/clusterScript/dragon1taylorSingle.sh
similarity index 89%
rename from NonLinearSolver/clusterScript/dragon1taylor.sh
rename to NonLinearSolver/clusterScript/dragon1taylorSingle.sh
index 2b8af95bc84ff9ef0d6bbe93f78be59c65e9f591..3ae3bd24c16c88717c74e0ce885a16e3aaac1ce7 100644
--- a/NonLinearSolver/clusterScript/dragon1taylor.sh
+++ b/NonLinearSolver/clusterScript/dragon1taylorSingle.sh
@@ -10,7 +10,7 @@
 #SBATCH --time=0:20:0
 
 
-module load openmpi/2.1.2/GCC-4.9.2-gpu
+module load OpenMPI/3.1.1-GCC-7.3.0-2.30
 
 SUBDIR=$HOME/cm3Libraries/dG3D/benchmarks/taylor
 echo "subdir=" $SUBDIR
@@ -32,16 +32,18 @@ srun cp $SUBDIR/$mshfile $SCRATCH/ || exit $?
 cd $SCRATCH # needed to work in the tmpscrach dir otherwise work on home !!
 #ls -artl
 
-export PETSC_DIR=$HOME/local/petsc-3.9.1
-export PETSC_ARCH=linux-gnu-c-opt
+
+export PETSC_DIR=$HOME/local/petsc-3.13.2
+export PETSC_ARCH=arch-linux-cxx-opt
 
 export PATH=$HOME/local/bin:$PATH
-export PATH=$PATH:$HOME/cm3Libraries/dG3D/release
+
 export PATH=$PATH:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh
 
 
 export PYTHONPATH=$PYTHONPATH:$HOME/cm3Libraries/dG3D/release:$HOME/cm3Libraries/dG3D/debug:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers/gmshpy:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers/gmshpy
 
+
 mpirun python  $SCRATCH/$pyfile >& $SCRATCH/output.txt
 
 echo -e "\n"
diff --git a/NonLinearSolver/clusterScript/dragon2taylorMPI.sh b/NonLinearSolver/clusterScript/dragon2taylorMPI.sh
new file mode 100644
index 0000000000000000000000000000000000000000..accc7bbdc3788cbfea4414341488468337723b74
--- /dev/null
+++ b/NonLinearSolver/clusterScript/dragon2taylorMPI.sh
@@ -0,0 +1,59 @@
+#!/bin/sh
+
+# script example for dragon2
+#SBATCH --job-name taylorMPI
+#SBATCH --mail-user=l.noels@ulg.ac.be
+#SBATCH --mail-type=ALL
+#SBATCH --output="out.txt"
+#SBATCH --ntasks=4
+#SBATCH --mem-per-cpu=800
+#SBATCH --time=0:20:0
+
+
+module load OpenMPI/3.1.1-GCC-7.3.0-2.30
+
+SUBDIR=$HOME/cm3Libraries/dG3D/benchmarks/taylorMPI
+echo "subdir=" $SUBDIR
+
+SCRATCH=$GLOBALSCRATCH/${USER}_$SLURM_JOB_ID
+echo "workdir=" $SCRATCH
+
+
+pyfile=taylorCG.py
+mshfile=taylor.msh
+echo "node list of job"  >> $SUBDIR/output.po$SLURM_JOB_ID
+
+hostname >> $SUBDIR/output.po$SLURM_JOB_ID
+
+cd $GLOBALSCRATCH
+mkdir -p ${USER}_$SLURM_JOB_ID || exit $?
+cp $SUBDIR/$pyfile $SCRATCH/ || exit $?
+cp $SUBDIR/$mshfile $SCRATCH/ || exit $? 
+
+cd $SCRATCH # needed to work in the tmpscrach dir otherwise work on home !!
+#ls -artl
+
+export PETSC_DIR=$HOME/local/petsc-3.13.2
+export PETSC_ARCH=arch-linux-cxx-opt
+
+export PATH=$HOME/local/bin:$PATH
+
+export PATH=$PATH:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh
+
+
+export PYTHONPATH=$PYTHONPATH:$HOME/cm3Libraries/dG3D/release:$HOME/cm3Libraries/dG3D/debug:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers/gmshpy:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers/gmshpy
+
+
+mpirun -np 4 python  $SCRATCH/$pyfile >& $SCRATCH/output.txt
+
+echo -e "\n"
+
+sleep 5
+
+cp -f $SCRATCH/* $SUBDIR/  || exit $? 
+rm -rf $SCRATCH ||  exit $? 
+echo -e "\n"
+
+
+
+
diff --git a/NonLinearSolver/clusterScript/dragon2taylorSingle.sh b/NonLinearSolver/clusterScript/dragon2taylorSingle.sh
new file mode 100644
index 0000000000000000000000000000000000000000..795b2054dd02f17486960511467ce0d67f6ab62d
--- /dev/null
+++ b/NonLinearSolver/clusterScript/dragon2taylorSingle.sh
@@ -0,0 +1,59 @@
+#!/bin/sh
+
+# script example for dragon2
+#SBATCH --job-name taylorMPI
+#SBATCH --mail-user=l.noels@ulg.ac.be
+#SBATCH --mail-type=ALL
+#SBATCH --output="out.txt"
+#SBATCH --ntasks=1
+#SBATCH --mem-per-cpu=800
+#SBATCH --time=0:20:0
+
+
+module load OpenMPI/3.1.1-GCC-7.3.0-2.30
+
+SUBDIR=$HOME/cm3Libraries/dG3D/benchmarks/taylor
+echo "subdir=" $SUBDIR
+
+SCRATCH=$GLOBALSCRATCH/${USER}_$SLURM_JOB_ID
+echo "workdir=" $SCRATCH
+
+
+pyfile=taylorTetDGDyn.py
+mshfile=taylor.msh
+echo "node list of job"  >> $SUBDIR/output.po$SLURM_JOB_ID
+
+hostname >> $SUBDIR/output.po$SLURM_JOB_ID
+
+cd $GLOBALSCRATCH
+mkdir -p ${USER}_$SLURM_JOB_ID || exit $?
+cp $SUBDIR/$pyfile $SCRATCH/ || exit $?
+cp $SUBDIR/$mshfile $SCRATCH/ || exit $? 
+
+cd $SCRATCH # needed to work in the tmpscrach dir otherwise work on home !!
+#ls -artl
+
+export PETSC_DIR=$HOME/local/petsc-3.13.2
+export PETSC_ARCH=arch-linux-cxx-opt
+
+export PATH=$HOME/local/bin:$PATH
+
+export PATH=$PATH:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh
+
+
+export PYTHONPATH=$PYTHONPATH:$HOME/cm3Libraries/dG3D/release:$HOME/cm3Libraries/dG3D/debug:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers/gmshpy:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers/gmshpy
+
+
+mpirun python  $SCRATCH/$pyfile >& $SCRATCH/output.txt
+
+echo -e "\n"
+
+sleep 5
+
+srun cp -f $SCRATCH/* $SUBDIR/  || exit $? 
+srun rm -rf $SCRATCH ||  exit $? 
+echo -e "\n"
+
+
+
+
diff --git a/cm3apps/install.txt b/cm3apps/install.txt
index 3d170f3446ddf82f1cf43a7d4076f06c87e3739e..8ef26effff5660b5a00783e263d53dc6434c7460 100644
--- a/cm3apps/install.txt
+++ b/cm3apps/install.txt
@@ -119,6 +119,8 @@ A) The packages needed to install gmsh are (gmsh has to be install to use cm3app
         4) close your terminal and open it (to reload your bashrc)
 
         5) Go to your petsc installation folder and type the following command (you have to use this configuration for hmem server):               
+          For 3.13 on dragon1, dragon2:
+          ./configure --with-debugging=0 --download-fblaslapack=yes --download-mumps=yes --download-scalapack=yes --download-blacs=yes --with-mpi-dir=$MPI_HOME --with-pic --with-fpic --with-shared-libraries=1 --with-clanguage=cxx --known-mpi-shared-libraries=0 --download-parmetis=yes --download-metis=yes --download-superlu_dist=yes
 
           For 3.10, without superlu
 ./configure -configModules=PETSc.Configure --optionsModule=config.compilerOptions --with-debugging=0 --download-fblaslapack=yes --download-mumps=yes --download-scalapack=yes --download-mpich=yes --with-shared-libraries=yes --known-mpi-shared-libraries=0 --download-parmetis=yes --download-metis=yes
@@ -151,13 +153,6 @@ A) The packages needed to install gmsh are (gmsh has to be install to use cm3app
 
            in case of error "MPI wrappers do not work, use --with-mpi-compilers=0
          
-          for dragon
-           version >=3.7
-         ./configure --with-debugging=0 --download-fblaslapack=yes --download-mumps=yes --download-scalapack=yes --download-blacs=yes --with-mpi-dir=$MPI_HOME --with-pic --with-fpic --with-shared-libraries=1 --with-clanguage=cxx --known-mpi-shared-libraries=0 --download-parmetis=yes --download-metis=yes --download-superlu_dist=yes
-
-          version <3.7
-          ./configure --with-debugging=0 --download-fblaslapack=yes --download-mumps=yes --download-scalapack=yes --download-blacs=yes --with-mpi-dir=$MPI_HOME --with-pic --with-shared-libraries=0 --with-clanguage=cxx --with-batch --known-mpi-shared-libraries=0 --download-parmetis=yes --download-metis=yes --download-superlu_dist=yes
-
           on hydra (there is a conmflict with metis if downloaded from petsc) 
           -petsc 3.8.
                 ./configure --with-debugging=0 --with-scalapack-lib=$SCALAPACKDIR/lib$SCALAPACKLIB.so --download-mumps=yes --with-blas-lib=$BLASDIR/lib$BLASLIB.so --with-lapack-lib=$LAPACKDIR/lib$LAPACKLIB.so --with-mpi-dir=$MPI_HOME --with-pic --with-shared-libraries=1 --with-clanguage=cxx --with-batch --known-mpi-shared-libraries=1  
@@ -182,44 +177,76 @@ Then wait until the job end (the last line of the created file "petsc-install.ou
         5c) This is a macro that load all the required module on ceci cluster. Be aware that the macro is cluster dependant as the version of the mpackages var from one cluster to others. "module avail" will list all the available modules (including different compiler and MPI versions) so you may change this macro to fit your needs. You can put it on your .bashrc and then bash; load_module will load the required module
 
         - dragon1:
-	function load_module()
-	{
-	  module purge
-	  module load GCC/4.7.3
-	  module load CMake/2.8.11.1-GCC-4.7.3
-	  module load lapack/gcc/3.4.2
-	  # hack as the load of module lapack export BLASDIR???
-	  export LAPACKDIR=$BLASDIR
-	  export LAPACKLIB=$BLASLIB
-	  # then load blas
-	  module load blas/gcc/3.4.2
-	  module load openmpi/1.6.1/gnu64-4.7.3
-	  echo "List of loaded modules:"
-	  module list
-	}
-        new one on dragon 1
         function load_module()
         {
           module purge
-          module load GCC/4.9.2
-          module load CMake/3.10.2-GCC-4.9.2
-          #module load lapack/3.5.0/gcc/4.7.3
-          #module load blas/3.5.0/gcc/4.7.3
-          module load openmpi/2.1.2/GCC-4.9.2-gpu
-          module load python/2.7.13
-          export MPI_HOME=/usr/local/openmpi/2.1.2/GCC-4.9.2
+          module load GCC/7.3.0-2.30
+          module load CMake/3.11.4-GCCcore-7.3.0
+          module load OpenMPI/3.1.1-GCC-7.3.0-2.30
+          module load Python/2.7.15-GCCcore-7.3.0-bare
+          export MPI_HOME=/usr/local/Software/.local/easybuild/software/OpenMPI/3.1.1-GCC-7.3.0-2.30/
           export MPI_RUN=$MPI_HOME/bin/mpirun
-          #export BLASDIR=$BLAS
-          #export BLASLIB=blas
-          #export LAPACKDIR=$LAPACK
-          #export LAPACKLIB=lapack
           echo "List of loaded modules:"
           module list
+        }
+        load_module
+
+        export PETSC_DIR=$HOME/local/petsc-3.13.2
+        export PETSC_ARCH=arch-linux-cxx-opt
+        export SLEPC_DIR=$HOME/local/slepc-3.13.3
+        export SLEPC_ARCH=linux-gnu-c-opt
+        export BLASDIR=$PETSC_DIR/$PETSC_ARCH/lib
+        export BLASLIB=fblas
+        export LAPACKDIR=$PETSC_DIR/$PETSC_ARCH/lib
+        export LAPACKLIB=flapack
+        export LD_LIBRARY_PATH=$PETSC_DIR/$PETSC_ARCH/lib:$LD_LIBRARY_PATH
+
+        export LD_LIBRARY_PATH=$PETSC_DIR/$PETSC_ARCH/lib:$LD_LIBRARY_PATH
 
+
+        export PATH=$HOME/local/swig/bin:$HOME/local/bin:$PATH
+        export PATH=$PATH:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh
+
+        export PYTHONPATH=$PYTHONPATH:$HOME/cm3Libraries/dG3D/release:$HOME/cm3Libraries/dG3D/debug:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers/gmshpy:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers/gmshpy
+
+unset SSH_ASKPASS
+
+
+
+
+        - dragon 2
+        function load_module()
+        {
+          module purge
+          module load GCC/7.3.0-2.30
+          module load CMake/3.9.6
+          module load OpenMPI/3.1.1-GCC-7.3.0-2.30
+          module load Python/2.7.15-GCCcore-7.3.0-bare
+          export MPI_HOME=/opt/cecisw/arch/easybuild/2018b/software/OpenMPI/3.1.1-GCC-7.3.0-2.30
+          export MPI_RUN=$MPI_HOME/bin/mpirun
+          echo "List of loaded modules:"
+          module list
         }
         load_module
-        with dragon, when doing ccmake .., you need to specify the locations /home/ulg/cmmm/lnoels/local/petsc-3.9.1/linux-gnu-c-opt/lib/libfblas.a and /home/ulg/cmmm/lnoels/local/petsc-3.9.1/linux-gnu-c-opt/lib/libflapack.a using toggle
-        also you need to compile without gmm fltk and without metis
+        export PETSC_DIR=$HOME/local/petsc-3.13.2
+        export PETSC_ARCH=arch-linux-cxx-opt
+        export SLEPC_DIR=$HOME/local/slepc-3.13.3
+        export SLEPC_ARCH=arch-linux-cxx-opt
+        export BLASDIR=$PETSC_DIR/$PETSC_ARCH/lib
+        export BLASLIB=fblas
+        export LAPACKDIR=$PETSC_DIR/$PETSC_ARCH/lib
+        export LAPACKLIB=flapack
+        export LD_LIBRARY_PATH=$PETSC_DIR/$PETSC_ARCH/lib:$LD_LIBRARY_PATH
+
+        export PATH=$HOME/local/swig/bin:$PATH
+        export PATH=$HOME/local/bin:$PATH
+        export PATH=$PATH:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh
+        export PYTHONPATH=$PYTHONPATH:$HOME/cm3Libraries/dG3D/release:$HOME/cm3Libraries/dG3D/debug:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/debug/NonLinearSolver/gmsh/utils/wrappers/gmshpy:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers:$HOME/cm3Libraries/dG3D/release/NonLinearSolver/gmsh/utils/wrappers/gmshpy
+
+        unset SSH_ASKPASS
+
+        with dragon 1 and dragon 2, when doing ccmake .., you need to specify the locations /home/ulg/cmmm/lnoels/local/petsc-3.13.2/arch-linux-cxx-opt/lib//libfblas.a and /home/ulg/cmmm/lnoels/local/petsc-3.13.2/arch-linux-cxx-opt/lib//libflapack.a using toggle
+        also you need to compile without gmm fltk numpy and without metis
 
         #load_module #do not load it if you want to use scratchcopy etc