diff --git a/NonLinearSolver/modelReduction/ANNUtils.h b/NonLinearSolver/modelReduction/ANNUtils.h
index a113ff1272c40f22c3b51c6731725f9ab140722b..1620de713e6dab6f5e329171d048980db497bf34 100644
--- a/NonLinearSolver/modelReduction/ANNUtils.h
+++ b/NonLinearSolver/modelReduction/ANNUtils.h
@@ -65,14 +65,15 @@ class RectifierActivationFunction: public activationFunction
 {
   protected:
     double _fact;
+    double _offset;
   public:
-    RectifierActivationFunction(double f=10.): activationFunction(),_fact(f){}
+    RectifierActivationFunction(double f=10.): activationFunction(),_fact(f),_offset(0.){}
     RectifierActivationFunction(const RectifierActivationFunction& src): activationFunction(src){}
     virtual ~RectifierActivationFunction(){}
     virtual activationFunction::functionType getType() const {return activationFunction::rectifier;};
     virtual std::string getName() const;
-    virtual double getVal(double x) const  {return (log(1.+exp(_fact*x)))/_fact;};
-    virtual double getReciprocalVal(double y) const {return (log(exp(_fact*y)-1.))/_fact;}; // inverse function
+    virtual double getVal(double x) const  {return (_offset+log(1.+exp(_fact*x)))/_fact;};
+    virtual double getReciprocalVal(double y) const {return (log(exp(_fact*(y-_offset))-1.))/_fact;}; // inverse function
     virtual double getDiff(double x) const {return exp(_fact*x)/(1.+exp(_fact*x));};
     virtual activationFunction* clone() const {return new RectifierActivationFunction(*this);};
 };
diff --git a/NonLinearSolver/modelReduction/DeepMaterialNetworks.cpp b/NonLinearSolver/modelReduction/DeepMaterialNetworks.cpp
index 84bd3862c8fe106609a3499e0831067c38b8a2a8..07fbc4265ce7d72423706d7cd8549eac24c5aecd 100644
--- a/NonLinearSolver/modelReduction/DeepMaterialNetworks.cpp
+++ b/NonLinearSolver/modelReduction/DeepMaterialNetworks.cpp
@@ -980,9 +980,14 @@ void BimaterialHomogenization::compute(const std::vector<fullMatrix<double> >& C
     {
       DCeffDnormal.resize(3);
     }    
-  };
-  computeBimaterial(Cin[0],Cin[1],f[0],f[1],normal,Ceff,
+    computeBimaterial(Cin[0],Cin[1],f[0],f[1],normal,Ceff,
                     stiff,&DCeffDcin[0],&DCeffDcin[1],&DCeffDf[0],&DCeffDf[1],&DCeffDnormal[0],&DCeffDnormal[1],&DCeffDnormal[2]);
+  }
+  else
+  {
+    computeBimaterial(Cin[0],Cin[1],f[0],f[1],normal,Ceff);
+  };
+  
 };
 
 void LaminateHomogenization::compute(const fullMatrix<double>& C1, const fullMatrix<double>& C2, const fullMatrix<double>& C3, 
@@ -1105,10 +1110,17 @@ void LaminateHomogenization::compute(const std::vector<fullMatrix<double> >& Cin
   }
   else if (numPhase == 2)
   {
-    computeBimaterial(Cin[0],Cin[1],f[0],f[1],normal,Ceff,
-                    stiff,&DCeffDcin[0],&DCeffDcin[1],
-                    &DCeffDf[0],&DCeffDf[1],&DCeffDnormal[0],
-                    &DCeffDnormal[1],&DCeffDnormal[2]);
+    if (stiff)
+    {
+      computeBimaterial(Cin[0],Cin[1],f[0],f[1],normal,Ceff,
+                      stiff,&DCeffDcin[0],&DCeffDcin[1],
+                      &DCeffDf[0],&DCeffDf[1],&DCeffDnormal[0],
+                      &DCeffDnormal[1],&DCeffDnormal[2]);
+    }
+    else
+    {
+      computeBimaterial(Cin[0],Cin[1],f[0],f[1],normal,Ceff);
+    }
   }
   else
   {
@@ -1173,7 +1185,14 @@ void LaminateHomogenization::compute(const std::vector<fullMatrix<double> >& Cin
         };
       }
       // two phase
-      computeBimaterial(CC[i],Cin[i+1],f0i,f1i,normal,CC[i+1],stiff,&A[i],&B[i],&F0[i],&F1[i],&N0[i],&N1[i],&N2[i]);
+      if (stiff)
+      {
+        computeBimaterial(CC[i],Cin[i+1],f0i,f1i,normal,CC[i+1],stiff,&A[i],&B[i],&F0[i],&F1[i],&N0[i],&N1[i],&N2[i]);
+      }
+      else
+      {
+        computeBimaterial(CC[i],Cin[i+1],f0i,f1i,normal,CC[i+1]);
+      }
     };
     //
     Ceff = CC[numPhase-1];
@@ -1265,10 +1284,17 @@ void LaminateHomogenization::compute(const std::vector<fullMatrix<double> >& Cin
   }
   else if (numPhase == 2)
   {
-    computeBimaterial(Cin[0],Cin[1],f[0],f[1],normal[0],Ceff,
+    if (stiff)
+    {
+      computeBimaterial(Cin[0],Cin[1],f[0],f[1],normal[0],Ceff,
                     stiff,&DCeffDcin[0],&DCeffDcin[1],
                     &DCeffDf[0],&DCeffDf[1],&DCeffDnormal[0][0],
                     &DCeffDnormal[0][1],&DCeffDnormal[0][2]);
+    }
+    else
+    {
+      computeBimaterial(Cin[0],Cin[1],f[0],f[1],normal[0],Ceff);
+    }
   }
   else
   {
@@ -1333,7 +1359,14 @@ void LaminateHomogenization::compute(const std::vector<fullMatrix<double> >& Cin
         };
       }
       // two phase
-      computeBimaterial(CC[i],Cin[i+1],f0i,f1i,normal[i],CC[i+1],stiff,&A[i],&B[i],&F0[i],&F1[i],&N0[i],&N1[i],&N2[i]);
+      if (stiff)
+      {
+        computeBimaterial(CC[i],Cin[i+1],f0i,f1i,normal[i],CC[i+1],stiff,&A[i],&B[i],&F0[i],&F1[i],&N0[i],&N1[i],&N2[i]);
+      }
+      else
+      {
+        computeBimaterial(CC[i],Cin[i+1],f0i,f1i,normal[i],CC[i+1]);
+      }
     };
     //
     Ceff = CC[numPhase-1];
@@ -1696,7 +1729,8 @@ void TrainingDeepMaterialNetwork::train(double lr, int maxEpoch, std::string los
   WBest = Wcur;
   Wprev = Wcur;
   g.resize(numDof,true);
-  double costfuncPrev = 0.;
+  double costfuncPrev = evaluateTrainingSet(*lossEval);
+  Msg::Info("costfuncPrev = %e",costfuncPrev);
   
   //OfflineData
   int epoch = 0;
@@ -1705,6 +1739,7 @@ void TrainingDeepMaterialNetwork::train(double lr, int maxEpoch, std::string los
   int Ntrain = _XTrain.size();
   double lrCur = lr;
   int numEpochIncrease = 0;
+  int numEpochLrMin = 0;
   int numEpochDecrease = 0;
   while (true)
   {
@@ -1773,17 +1808,23 @@ void TrainingDeepMaterialNetwork::train(double lr, int maxEpoch, std::string los
     double costfunc = evaluateTrainingSet(*lossEval);
     if (numberBatches==Ntrain)
     {
-      if ((costfunc > costfuncPrev) && (epoch > 0))
+      if (costfunc > costfuncPrev)
       {
         numEpochIncrease ++;
         if (numEpochIncrease == 1)
         {
           numEpochIncrease = 0;
-          Msg::Info("reduce learning rate from %.5e to %.5e",lrCur,0.8*lrCur);
+          Msg::Info("costfunc = %e, reduce learning rate from %.5e to %.5e",costfunc,lrCur,0.8*lrCur);
           lrCur *= 0.8;
           if (lrCur < lrmin)
           {
             lrCur = lrmin;
+            numEpochLrMin ++;
+            if (numEpochLrMin>10)
+            {
+              Msg::Info("maximal number of iterations %d at minimal learning step is reached !!",numEpochLrMin);
+              break;
+            }
           }
           numEpochDecrease = 0;
           Wcur = Wprev;
@@ -1844,8 +1885,10 @@ void TrainingDeepMaterialNetwork::train(double lr, int maxEpoch, std::string los
     
     if (removeZeroContribution)
     {
+      
       if ((epoch+1) %numStepRemove==0)
       {
+        double costfuncPrev_pre = evaluateTrainingSet(*lossEval);
         Msg::Info("tree removing");
         bool removed =  _T->removeLeavesWithZeroContribution(tolRemove);
         if (removed)
@@ -1858,7 +1901,10 @@ void TrainingDeepMaterialNetwork::train(double lr, int maxEpoch, std::string los
           Wprev = Wcur;
           g.resize(numDof,true);
         }
+        costfuncPrev = evaluateTrainingSet(*lossEval);
+        Msg::Info("pre-removing costfuncPrev = %e and after removing costfuncPrev = %e",costfuncPrev_pre,costfuncPrev);
       }
+      
     }
     
     epoch++;
@@ -3158,7 +3204,7 @@ void DeepMaterialNetwork::downscale(const STensor3& Fcur, STensor3& F,  const Tr
     double wChild = getWeight(node);
     int childOrder = node->childOrder;
     //
-    double f = wChild/wParent;
+   //double f = wChild/wParent;
     double fact = 1./wChild;
     
     /*
diff --git a/NonLinearSolver/modelReduction/DeepMaterialNetworks.h b/NonLinearSolver/modelReduction/DeepMaterialNetworks.h
index bf1ead0059c58c8c25b700dc38b7c3769a9988a0..51ce1bf3d9116b9ca7f414b932f11ad09c04786c 100644
--- a/NonLinearSolver/modelReduction/DeepMaterialNetworks.h
+++ b/NonLinearSolver/modelReduction/DeepMaterialNetworks.h
@@ -142,10 +142,10 @@ class BimaterialHomogenization : public Homogenization
           fullMatrix<double>& DNDn2) const;
       virtual void computeBimaterial(const fullMatrix<double>& C1, const fullMatrix<double>& C2, double f1, double f2, const SVector3& normal,
                         fullMatrix<double>& Ceff, 
-                        bool stiff, 
-                        hyperFullMatrix* DCeffDC1, hyperFullMatrix* DCeffDC2,
-                        fullMatrix<double>* DCeffDf1, fullMatrix<double>* DCeffDf2, 
-                        fullMatrix<double>* DCeffDnormal1,fullMatrix<double>* DCeffDnormal2, fullMatrix<double>* DCeffDnormal3) const;
+                        bool stiff=false, 
+                        hyperFullMatrix* DCeffDC1=NULL, hyperFullMatrix* DCeffDC2=NULL,
+                        fullMatrix<double>* DCeffDf1=NULL, fullMatrix<double>* DCeffDf2=NULL, 
+                        fullMatrix<double>* DCeffDnormal1=NULL,fullMatrix<double>* DCeffDnormal2=NULL, fullMatrix<double>* DCeffDnormal3=NULL) const;
     #endif //SWIG
 };
 
diff --git a/NonLinearSolver/modelReduction/Tree.cpp b/NonLinearSolver/modelReduction/Tree.cpp
index af87281f50b70c238cef90e39e9ba6ed117dad84..ed8d0a0e8ed4f125dac7a22c8804d6a51b0f1c53 100644
--- a/NonLinearSolver/modelReduction/Tree.cpp
+++ b/NonLinearSolver/modelReduction/Tree.cpp
@@ -244,7 +244,7 @@ void Tree::createSpecialTripleTree(int numNodes, int numDirVars, const std::stri
   }
 };
 
-void Tree::createRandomTreeSameDepthForNodes(int nbLeaves, int numChildMax, int leafPerNode, int numDirVars,const std::string affunc, bool randomChild)
+void Tree::createRandomTreeSameDepthForNodes(int nbLeaves, int numChildMax, int leafPerNode, int numDirVars,const std::string affunc, bool randomChild, bool randomMat)
 { 
   auto makeGroup = [&](const std::vector<int>& input, std::vector<int>& outPut, bool first)
   {
@@ -269,7 +269,7 @@ void Tree::createRandomTreeSameDepthForNodes(int nbLeaves, int numChildMax, int
       if (first)
       {
         numChilds = leafPerNode;
-        if (randomChild)
+        if (randomMat)
         {
           numChilds =  rand() % (leafPerNode-1) + 2; // number takes a random value from 2 to leafPerNode 
         }
@@ -971,7 +971,7 @@ int Tree::getNumberOfNodes() const
   return allNodes.size();
 };
 
-bool Tree::removeZeroLeaves(double tol)
+bool Tree::removeZeroLeaves(double tol, bool iteration)
 {
   bool ok = false;
   int maxInter = 100;
@@ -1122,6 +1122,11 @@ bool Tree::removeZeroLeaves(double tol)
         Msg::Exit(0);
       }
     };
+    
+    if (!iteration)
+    {
+      break;
+    }
   };
   return false;
 };
@@ -1904,7 +1909,8 @@ void Tree::initialize(bool rand)
   for (int i=0; i< allLeaves.size(); i++)
   {
     TreeNode* n = allLeaves[i];
-    n->weight *= (1./toltalWeightLeaves);
+    double vv = (n->af->getVal(n->weight))/toltalWeightLeaves;
+    n->weight *= n->af->getReciprocalVal(vv);
     // propagate data
     double w = n->weight;
     while (n->parent != NULL)
diff --git a/NonLinearSolver/modelReduction/Tree.h b/NonLinearSolver/modelReduction/Tree.h
index c3747732f16f9c2431a1d83e5bcf986dd424c6d0..e5d7abd6622e4ddd268951038980af00e6afd7ac 100644
--- a/NonLinearSolver/modelReduction/Tree.h
+++ b/NonLinearSolver/modelReduction/Tree.h
@@ -53,7 +53,7 @@ class Tree
     void createPerfectTree(int maxDepth, int numChildRegular, int numChildLeaf, int numDirVars, const std::string affunc="relu", int lastRepeat=1);
     void createMixedTree(int maxDepth, int numChildMax, int leafPerNode, int numDirVars,const std::string affunc="relu");
     void createRandomTree(int nbLeaves, int numChildMax, int leafPerNode, int numDirVars,const std::string affunc="relu", bool randomSubdivion=true, bool randomChild=true, bool sameNbChildLayer=true);
-    void createRandomTreeSameDepthForNodes(int nbLeaves, int numChildMax, int leafPerNode, int numDirVars,const std::string affunc="relu", bool randomChild=true);
+    void createRandomTreeSameDepthForNodes(int nbLeaves, int numChildMax, int leafPerNode, int numDirVars,const std::string affunc="relu", bool randomChild=true, bool randomMat=true);
     void createSpecialBinaryTree(int numNodes, int numDirVars,const std::string affunc="relu");
     void createSpecialTripleTree(int numNodes, int numDirVars,const std::string affunc="relu");
     void printTree() const;
@@ -78,7 +78,7 @@ class Tree
     void printNodeFraction() const;
     
     bool removeLeavesWithZeroContribution(double tol=1e-6);
-    bool removeZeroLeaves(double tol);
+    bool removeZeroLeaves(double tol, bool iteration=true);
     bool treeMerging();
     
     #ifndef SWIG