diff --git a/Ccore/ccore.pro b/Ccore/ccore.pro
index d7054f641bccb822ca1e9dbe783e5dff07858b5c..620a35560e3659b20003d1537060b73fcf6a6dda 100644
--- a/Ccore/ccore.pro
+++ b/Ccore/ccore.pro
@@ -37,16 +37,18 @@
   - effectively solve the optimisation problem.
 
   The computation of sensitivities is a delicate step of the optimisation,
-  which can however be one hundred percent validated by finite difference.
+  which can however be fully validated by finite difference.
   This tutorial is thus dedicated to both the explanation and verification
   of the direct computation of sensitivities in Magnetostatics.
 
   Communication between the optimisation loop ('shp.py')
   and the Onelab model is done via the Onelab database. 
-  Design variables are defined as Onelab variables whose name
-  has the reserved prefix 'Optimization/Parameters'.
-  Similarly, velocities and sensibilities are also exchanged as 
-  Onelab variables with reserved prefixes 'Optimization/Results/*'.
+  Design variables (here 'D' and 'E') are defined as Onelab variables 
+  whose name has the reserved prefix 'Optimization/Parameters'.
+  Similarly, the unknown field 'a', velocities 'velocity_*'
+  and sensibilities 'dwdtau_*' are also exchanged in memory
+  between different functions or steps of the model
+  as Onelab variables with reserved prefixes 'Optimization/Results/*'.
 
   Finally, an extra mechanism is implemented 
   to apply a temporary small perturbation of a design parameter 
diff --git a/Ccore/shp.py b/Ccore/shp.py
index bb23d5508f556ccde20aab6a893ce9027ffe9f36..ed42af8141f6d016d28b9133bf01da744d6da56a 100644
--- a/Ccore/shp.py
+++ b/Ccore/shp.py
@@ -110,7 +110,9 @@ def grad(xFromOpti):
           + ' -setnumber VelocityTag '+str(dv)\
           + ' -solve GetGradient_wrt_dv')
     c.waitOnSubClients()
-    grads = [c.getNumber('Optimization/Results/dwdtau_0'), c.getNumber('Optimization/Results/dwdtau_1')]
+    grads=[]
+    for dv, var in x.iteritems():
+      grads.append(c.getNumber('Optimization/Results/dwdtau_'+str(dv)))
     return np.asarray(grads)
 
 Nfeval = 1
@@ -123,7 +125,7 @@ def test(n):
     tab = np.zeros((n+1,4))
     for step in range(n+1):
         xstep = x[0][2] + (x[0][3]-x[0][2])/float(n)*step
-        f,g,d = fobj([xstep]), grad([xstep]), 0
+        f,g,d = fobj([xstep]), grad([xstep])[0], 0
         tab[step] = [xstep, f, g, d]
         if step >= 2 :
             tab[step-1][3] = (tab[step][1]-tab[step-2][1])/(tab[step][0]-tab[step-2][0])
@@ -151,10 +153,9 @@ designParameters={
 x = {}
 index = int(c.getNumber('Optimization/Sweep on parameter'))-1
 
-if index >= 0: # parameter sweep and optimisation over one design parameter
+if index >= 0: # parameter sweep and optimisation over one selected design parameter
     x[0] = designParameters[index];
     test(int(c.getNumber('Optimization/Steps in range')))
-    #callback=callbackF, 
     res = minimize(fobj, [x[0][1]], jac=grad, bounds=[(x[0][2],x[0][3])],\
                    callback=callbackF, method = 'L-BFGS-B', tol=None,\
                    options={'ftol': 1e-5, 'gtol': 1e-3, 'disp':True} )