]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Adding KalmanFilter and treatment of evolution model
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Wed, 8 Aug 2012 15:06:25 +0000 (17:06 +0200)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Wed, 8 Aug 2012 15:06:25 +0000 (17:06 +0200)
14 files changed:
resources/ADAOSchemaCatalog.xml
src/daComposant/daAlgorithms/Blue.py
src/daComposant/daAlgorithms/EnsembleBlue.py
src/daComposant/daAlgorithms/KalmanFilter.py [new file with mode: 0644]
src/daComposant/daAlgorithms/LinearLeastSquares.py
src/daComposant/daCore/AssimilationStudy.py
src/daComposant/daCore/BasicObjects.py
src/daComposant/daNumerics/ApproximatedDerivatives.py
src/daEficas/generator_adao.py
src/daSalome/daYacsIntegration/daOptimizerLoop.py
src/daSalome/daYacsIntegration/daStudy.py
src/daSalome/daYacsSchemaCreator/help_methods.py
src/daSalome/daYacsSchemaCreator/infos_daComposant.py
src/daSalome/daYacsSchemaCreator/methods.py

index aa8c2d7e60179e5d5e1e91960efb70fc1ada2236..3c34efe20bda75d357d6970938ff6f8e38e1f14b 100644 (file)
@@ -140,6 +140,17 @@ else:
   assim_study.setObservationErrorStored(ObservationErrorStored)
   assim_study.setObservationError(ObservationError)
 
+# EvolutionError
+try:
+  EvolutionError
+except NameError:
+  pass
+else:
+  logging.debug("CREATE EvolutionError is %s"%EvolutionError)
+  logging.debug("CREATE EvolutionErrorStored is %s"%EvolutionErrorStored)
+  assim_study.setEvolutionErrorStored(EvolutionErrorStored)
+  assim_study.setEvolutionError(EvolutionError)
+
 # ObservationOperator
 ObservationOperatorOk = 0
 try:
@@ -179,6 +190,45 @@ if ObservationOperatorOk == 0:
     assim_study.setObservationOperatorType("Adjoint", "Function")
     assim_study.setObservationOperator("Adjoint", ObservationOperatorAdjoint)
 
+# EvolutionModel
+EvolutionModelOk = 0
+try:
+  EvolutionModel
+except NameError:
+  pass
+else:
+  logging.debug("CREATE EvolutionModel is %s"%EvolutionModel)
+  logging.debug("CREATE EvolutionModelType is %s"%EvolutionModelType)
+  assim_study.setEvolutionModelType("Matrix", EvolutionModelType)
+  assim_study.setEvolutionModel("Matrix", EvolutionModel)
+  EvolutionModelOk = 1
+
+if EvolutionModelOk == 0:
+  try:
+    EvolutionModelDirect
+  except NameError:
+    pass
+  else:
+    logging.debug("CREATE EvolutionModelDirect is %s"%EvolutionModelDirect)
+    assim_study.setEvolutionModelType("Direct", "Function")
+    assim_study.setEvolutionModel("Direct", EvolutionModelDirect)
+  try:
+    EvolutionModelTangent
+  except NameError:
+    pass
+  else:
+    logging.debug("CREATE EvolutionModelTangent is %s"%EvolutionModelTangent)
+    assim_study.setEvolutionModelType("Tangent", "Function")
+    assim_study.setEvolutionModel("Tangent", EvolutionModelTangent)
+  try:
+    EvolutionModelAdjoint
+  except NameError:
+    pass
+  else:
+    logging.debug("CREATE EvolutionModelAdjoint is %s"%EvolutionModelAdjoint)
+    assim_study.setEvolutionModelType("Adjoint", "Function")
+    assim_study.setEvolutionModel("Adjoint", EvolutionModelAdjoint)
+
 # Variables
 for name, size in zip(InputVariablesNames, InputVariablesSizes):
   assim_study.setInputVariable(name, size)
@@ -239,7 +289,8 @@ import os
 filepath = os.path.dirname(script)
 filename = os.path.basename(script)
 module_name = os.path.splitext(filename)[0]
-sys.path.insert(0,filepath)
+if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath,0,1)>0):
+    sys.path.insert(0,filepath)
 
 # Import script
 __import__(module_name)
@@ -285,6 +336,46 @@ if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index
 __import__(module_name)
 user_script_module = sys.modules[module_name]
 
+# Get Data from script
+]]></code></script>
+    <inport name="script" type="string"/>
+    <outport name="type" type="string"/>
+    <outport name="stored" type="bool"/>
+  </inline>
+
+  <inline name="CreateNumpyVectorSerieFromString">
+    <script><code><![CDATA[
+import numpy, logging
+logging.debug("CREATE Entering in CreateNumpyVectorSerieFromString")
+vector = numpy.matrix(vector_in_string)
+type = "VectorSerie"
+logging.debug("VectorSerie is %s"%vector)
+]]></code></script>
+    <inport name="vector_in_string" type="string"/>
+    <outport name="vector" type="pyobj"/>
+    <outport name="type" type="string"/>
+    <outport name="stored" type="bool"/>
+  </inline>
+
+  <inline name="CreateNumpyVectorSerieFromScript">
+    <script><code><![CDATA[
+import logging
+logging.debug("CREATE Entering in CreateNumpyVectorSerieFromScript")
+type = "VectorSerie"
+
+# Get file path and filename
+import sys
+import os
+filepath = os.path.dirname(script)
+filename = os.path.basename(script)
+module_name = os.path.splitext(filename)[0]
+if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath,0,1)>0):
+    sys.path.insert(0,filepath)
+
+# Import script
+__import__(module_name)
+user_script_module = sys.modules[module_name]
+
 # Get Data from script
 ]]></code></script>
     <inport name="script" type="string"/>
@@ -361,7 +452,8 @@ import os
 filepath = os.path.dirname(script)
 filename = os.path.basename(script)
 module_name = os.path.splitext(filename)[0]
-sys.path.insert(0,filepath)
+if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath,0,1)>0):
+    sys.path.insert(0,filepath)
 
 # Import script
 __import__(module_name)
@@ -377,10 +469,12 @@ user_script_module = sys.modules[module_name]
     <script><code><![CDATA[
 import logging
 logging.debug("CREATE Entering in ReadForSwitchNode")
+logging.debug("       with input data : "+str(data["specificParameters"]))
 switch_value = -1
 for param in data["specificParameters"]:
   if param["name"] == "switch_value":
     switch_value = int(param["value"])
+logging.debug("       switching to value : "+str(switch_value))
 ]]></code></script>
     <inport name="data" type="SALOME_TYPES/ParametricInput"/>
     <outport name="data" type="SALOME_TYPES/ParametricInput"/>
index 42a2e05a2c127010d3abb2eeb00ea8a357f05e05..98995d57b57536e674b8bda16df6e74c76b6c23d 100644 (file)
@@ -49,8 +49,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Opérateur d'observation
         # -----------------------
-        Hm = H["Direct"].asMatrix()
-        Ha = H["Adjoint"].asMatrix()
+        Hm = H["Tangent"].asMatrix(None)
+        Ha = H["Adjoint"].asMatrix(None)
         #
         # Utilisation éventuelle d'un vecteur H(Xb) précalculé
         # ----------------------------------------------------
index 6ab3effd94cefc65594d66b3a0b99bc1b6f22d96..8bb2341818035a3090c8d641ccf07db8aecfa8e8 100644 (file)
@@ -82,8 +82,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Initialisation des opérateurs d'observation et de la matrice gain
         # -----------------------------------------------------------------
-        Hm = H["Direct"].asMatrix()
-        Ha = H["Adjoint"].asMatrix()
+        Hm = H["Tangent"].asMatrix(None)
+        Ha = H["Adjoint"].asMatrix(None)
         #
         # Calcul de la matrice de gain dans l'espace le plus petit et de l'analyse
         # ------------------------------------------------------------------------
diff --git a/src/daComposant/daAlgorithms/KalmanFilter.py b/src/daComposant/daAlgorithms/KalmanFilter.py
new file mode 100644 (file)
index 0000000..708d5fe
--- /dev/null
@@ -0,0 +1,107 @@
+#-*-coding:iso-8859-1-*-
+#
+#  Copyright (C) 2008-2012 EDF R&D
+#
+#  This library is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2.1 of the License.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library; if not, write to the Free Software
+#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+#  See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+import logging
+from daCore import BasicObjects, PlatformInfo
+m = PlatformInfo.SystemUsage()
+
+# ==============================================================================
+class ElementaryAlgorithm(BasicObjects.Algorithm):
+    def __init__(self):
+        BasicObjects.Algorithm.__init__(self, "KALMANFILTER")
+        self.defineRequiredParameter(
+            name     = "CalculateAPosterioriCovariance",
+            default  = False,
+            typecast = bool,
+            message  = "Calcul de la covariance a posteriori",
+            )
+
+    def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
+        """
+        Calcul de l'estimateur du filtre de Kalman
+
+        Remarque : les observations sont exploitées à partir du pas de temps 1,
+        et sont utilisées dans Yo comme rangées selon ces indices. Donc le pas 0
+        n'est pas utilisé puisque la première étape de Kalman passe de 0 à 1
+        avec l'observation du pas 1.
+        """
+        logging.debug("%s Lancement"%self._name)
+        logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("Mo")))
+        #
+        # Paramètres de pilotage
+        # ----------------------
+        self.setParameters(Parameters)
+        #
+        # Opérateur d'observation
+        # -----------------------
+        Hm = H["Tangent"].asMatrix(None)
+        Ha = H["Adjoint"].asMatrix(None)
+        #
+        if B is None:
+            raise ValueError("Background error covariance matrix has to be properly defined!")
+        if R is None:
+            raise ValueError("Observation error covariance matrix has to be properly defined!")
+        #
+        # Opérateur d'évolution
+        # ---------------------
+        Mm = M["Tangent"].asMatrix(None)
+        Mt = M["Adjoint"].asMatrix(None)
+        #
+        # Nombre de pas du Kalman identique au nombre de pas d'observations
+        # -----------------------------------------------------------------
+        duration = Y.stepnumber()
+        #
+        # Initialisation
+        # --------------
+        Xn = Xb
+        Pn = B
+        self.StoredVariables["Analysis"].store( Xn.A1 )
+        if self._parameters["CalculateAPosterioriCovariance"]:
+            self.StoredVariables["APosterioriCovariance"].store( Pn )
+        #
+        for step in range(duration-1):
+            logging.debug("%s Etape de Kalman %i (i.e. %i->%i) sur un total de %i"%(self._name, step+1, step,step+1, duration-1))
+            #
+            # Etape de prédiction
+            # -------------------
+            Xn_predicted = Mm * Xn
+            Pn_predicted = Mm * Pn * Mt + Q
+            #
+            # Etape de correction
+            # -------------------
+            d  = Y.valueserie(step+1) - Hm * Xn_predicted
+            K  = Pn_predicted * Ha * (Hm * Pn_predicted * Ha + R).I
+            Xn = Xn_predicted + K * d
+            Pn = Pn_predicted - K * Hm * Pn_predicted
+            #
+            self.StoredVariables["Analysis"].store( Xn.A1 )
+            self.StoredVariables["Innovation"].store( d.A1 )
+            if self._parameters["CalculateAPosterioriCovariance"]:
+                self.StoredVariables["APosterioriCovariance"].store( Pn )
+        #
+        logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("Mo")))
+        logging.debug("%s Terminé"%self._name)
+        #
+        return 0
+
+# ==============================================================================
+if __name__ == "__main__":
+    print '\n AUTODIAGNOSTIC \n'
index 5b8863d63cc07f1ee2cf17c77657a0db0d00de0a..0b07c5b2a016040f5c9620fc6e033a616ecaddaf 100644 (file)
@@ -42,8 +42,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Opérateur d'observation
         # -----------------------
-        Hm = H["Direct"].asMatrix()
-        Ha = H["Adjoint"].asMatrix()
+        Hm = H["Tangent"].asMatrix(None)
+        Ha = H["Adjoint"].asMatrix(None)
         #
         if R is not None:
             RI = R.I
index fbb71569514e9b7dcda1d32c07080278a2f7dc38..3f573de4ebed98d1cf6ab63b9f88fd71bb4cf4a4 100644 (file)
@@ -192,7 +192,13 @@ class AssimilationStudy:
             else:
                 self.__Y = numpy.matrix( asVector,    numpy.float ).T
         elif asPersistentVector is not None:
-            self.__Y = asPersistentVector
+            if type( asPersistentVector ) is list or type( asPersistentVector ) is tuple:
+                from Persistence import OneVector
+                self.__Y = OneVector("Observation", basetype=numpy.array)
+                for y in asPersistentVector:
+                    self.__Y.store( y )
+            else:
+                self.__Y = asPersistentVector
         else:
             raise ValueError("Error: improperly defined observations")
         if toBeStored:
@@ -364,9 +370,9 @@ class AssimilationStudy:
                 centeredDF = asFunction["withCenteredDF"],
                 increment  = asFunction["withIncrement"],
                 dX         = asFunction["withdX"] )
-            self.__H["Direct"]  = Operator( fromMethod = FDA.FunctionH )
-            self.__H["Tangent"] = Operator( fromMethod = FDA.TangentH  )
-            self.__H["Adjoint"] = Operator( fromMethod = FDA.AdjointH  )
+            self.__M["Direct"]  = Operator( fromMethod = FDA.FunctionH )
+            self.__M["Tangent"] = Operator( fromMethod = FDA.TangentH  )
+            self.__M["Adjoint"] = Operator( fromMethod = FDA.AdjointH  )
         elif (type(asFunction) is type({})) and \
                 asFunction.has_key("Tangent") and asFunction.has_key("Adjoint") and \
                 (asFunction["Tangent"] is not None) and (asFunction["Adjoint"] is not None):
index 16d44b35fb90ecf5d3283670473517c3c7b66424..0c15f3d85c320b3727fae67f2c8c9037cb3d3ed6 100644 (file)
@@ -92,13 +92,13 @@ class Operator:
         else:
             return self.__Method( (xNominal, xValue) )
 
-    def asMatrix(self, ValueForMethodForm = None):
+    def asMatrix(self, ValueForMethodForm = "UnknownVoidValue"):
         """
         Permet de renvoyer l'opérateur sous la forme d'une matrice
         """
         if self.__Matrix is not None:
             return self.__Matrix
-        elif ValueForMethodForm is not None:
+        elif ValueForMethodForm is not "UnknownVoidValue": # Ne pas utiliser "None"
             return self.__Method( (ValueForMethodForm, None) )
         else:
             raise ValueError("Matrix form of the operator defined as a function/method requires to give an operating point.")
index 127ebe342aec36b1b380db12119236ebbea0e012..73160cbb8cf84f967a08228103885b2753ede5a0 100644 (file)
@@ -159,8 +159,8 @@ class FDApproximation:
             #
             # Calcul de la valeur linéarisée de H en X appliqué à dX
             # ------------------------------------------------------
-            dX = numpy.asmatrix(dX).flatten().T
-            HtX = numpy.dot(Jacobienne, dX)
+            _dX = numpy.asmatrix(dX).flatten().T
+            HtX = numpy.dot(Jacobienne, _dX)
             return HtX.A1
 
     # ---------------------------------------------------------
@@ -178,8 +178,8 @@ class FDApproximation:
             #
             # Calcul de la valeur de l'adjoint en X appliqué à Y
             # --------------------------------------------------
-            Y = numpy.asmatrix(Y).flatten().T
-            HaY = numpy.dot(JacobienneT, Y)
+            _Y = numpy.asmatrix(Y).flatten().T
+            HaY = numpy.dot(JacobienneT, _Y)
             return HaY.A1
 
 # ==============================================================================
index 94a8102f20c7ba56e413222571c6d0e7acc87432..c668267f18f0c0173bfb9c8f6c643178eba60918 100644 (file)
@@ -118,7 +118,12 @@ class AdaoGenerator(PythonGenerator):
       self.add_data("ObservationError")
     if "__"+self.type_of_study+"__CheckingPoint__INPUT_TYPE" in self.dictMCVal.keys():
       self.add_data("CheckingPoint")
-    self.add_data("ObservationOperator")
+    if "__"+self.type_of_study+"__ObservationOperator__INPUT_TYPE" in self.dictMCVal.keys():
+      self.add_data("ObservationOperator")
+    if "__"+self.type_of_study+"__EvolutionModel__INPUT_TYPE" in self.dictMCVal.keys():
+      self.add_data("EvolutionModel")
+    if "__"+self.type_of_study+"__EvolutionError__INPUT_TYPE" in self.dictMCVal.keys():
+      self.add_data("EvolutionError")
 
     self.add_variables()
     # Parametres optionnels
@@ -286,7 +291,7 @@ class AdaoGenerator(PythonGenerator):
         self.add_observer_in_dict(observer, observers)
 
     # Write observers in the python command file
-    number = 1
+    number = 2
     self.text_da += "observers = {}\n"
     for observer in observers.keys():
       number += 1
index a9611b49c0c01a25c68f5a14fd52c151dcd6ec31..81830c4fc10fd5457d9d2699a7e2b1c3d4177796 100644 (file)
@@ -31,8 +31,9 @@ from daYacsIntegration import daStudy
 
 class OptimizerHooks:
 
-  def __init__(self, optim_algo):
+  def __init__(self, optim_algo, switch_value=-1):
     self.optim_algo = optim_algo
+    self.switch_value = str(int(switch_value))
 
   def create_sample(self, data, method):
     sample = pilot.StructAny_New(self.optim_algo.runtime.getTypeCode('SALOME_TYPES/ParametricInput'))
@@ -59,6 +60,11 @@ class OptimizerHooks:
       obs_switch.setEltAtRank("name", "switch_value")
       obs_switch.setEltAtRank("value", "1")
       specificParameters.pushBack(obs_switch)
+    if self.optim_algo.has_evolution_model:
+      obs_switch = pilot.StructAny_New(self.optim_algo.runtime.getTypeCode('SALOME_TYPES/Parameter'))
+      obs_switch.setEltAtRank("name", "switch_value")
+      obs_switch.setEltAtRank("value", self.switch_value)
+      specificParameters.pushBack(obs_switch)
     sample.setEltAtRank("specificParameters", specificParameters)
 
     # Les données
@@ -95,7 +101,10 @@ class OptimizerHooks:
     else:
       val_end = self.optim_algo.da_study.OutputVariables[self.optim_algo.da_study.OutputVariablesOrder[elt_list]] # nbr de l'argument courant (-1 == tout)
 
-    it = data.flat
+    if data is None:
+        it = []
+    else:
+        it = data.flat
     for val in it:
       param.pushBack(val)
       val_number += 1
@@ -160,18 +169,18 @@ class OptimizerHooks:
           if sample_id == local_counter:
             # 4: Data is ready
             any_data = self.optim_algo.pool.getOutSample(local_counter)
-            Y = self.get_data_from_any(any_data)
+            Z = self.get_data_from_any(any_data)
 
             # 5: Release lock
             # Have to be done before but need a new implementation
             # of the optimizer loop
             self.optim_algo.counter_lock.release()
-            return Y
+            return Z
     else:
       #print "sync false is not yet implemented"
       self.optim_algo.setError("sync == false not yet implemented")
 
-  def Tangent(self, X, sync = 1):
+  def Tangent(self, (X, dX), sync = 1):
     #print "Call Tangent OptimizerHooks"
     if sync == 1:
       # 1: Get a unique sample number
@@ -180,7 +189,7 @@ class OptimizerHooks:
       local_counter = self.optim_algo.sample_counter
 
       # 2: Put sample in the job pool
-      sample = self.create_sample(X, "Tangent")
+      sample = self.create_sample((X,dX) , "Tangent")
       self.optim_algo.pool.pushInSample(local_counter, sample)
 
       # 3: Wait
@@ -195,13 +204,13 @@ class OptimizerHooks:
           if sample_id == local_counter:
             # 4: Data is ready
             any_data = self.optim_algo.pool.getOutSample(local_counter)
-            Y = self.get_data_from_any(any_data)
+            Z = self.get_data_from_any(any_data)
 
             # 5: Release lock
             # Have to be done before but need a new implementation
             # of the optimizer loop
             self.optim_algo.counter_lock.release()
-            return Y
+            return Z
     else:
       #print "sync false is not yet implemented"
       self.optim_algo.setError("sync == false not yet implemented")
@@ -250,6 +259,7 @@ class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync):
     SALOMERuntime.OptimizerAlgASync.__init__(self, None)
     self.runtime = SALOMERuntime.getSALOMERuntime()
 
+    self.has_evolution_model = False
     self.has_observer = False
 
     # Gestion du compteur
@@ -261,6 +271,8 @@ class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync):
     self.tout     = self.runtime.getTypeCode("SALOME_TYPES/ParametricOutput")
     self.pyobject = self.runtime.getTypeCode("pyobj")
 
+    # Absolument indispensable de définir ainsi "self.optim_hooks"
+    # (sinon on a une "Unknown Exception" sur l'attribut "finish")
     self.optim_hooks = OptimizerHooks(self)
 
   # input vient du port algoinit, input est un Any YACS !
@@ -282,18 +294,35 @@ class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync):
     if self.da_study.getObservationOperatorType("Direct") == "Function" or self.da_study.getObservationOperatorType("Tangent") == "Function" or self.da_study.getObservationOperatorType("Adjoint") == "Function" :
       #print "Set Hooks"
       # Use proxy function for YACS
-      self.hooks = OptimizerHooks(self)
+      self.hooksOO = OptimizerHooks(self, switch_value=1)
       direct = tangent = adjoint = None
       if self.da_study.getObservationOperatorType("Direct") == "Function":
-        direct = self.hooks.Direct
+        direct = self.hooksOO.Direct
       if self.da_study.getObservationOperatorType("Tangent") == "Function" :
-        tangent = self.hooks.Tangent
+        tangent = self.hooksOO.Tangent
       if self.da_study.getObservationOperatorType("Adjoint") == "Function" :
-        adjoint = self.hooks.Adjoint
+        adjoint = self.hooksOO.Adjoint
 
       # Set ObservationOperator
       self.ADD.setObservationOperator(asFunction = {"Direct":direct, "Tangent":tangent, "Adjoint":adjoint})
 
+    # Check if EvolutionModel is already set
+    if self.da_study.getEvolutionModelType("Direct") == "Function" or self.da_study.getEvolutionModelType("Tangent") == "Function" or self.da_study.getEvolutionModelType("Adjoint") == "Function" :
+      self.has_evolution_model = True
+      #print "Set Hooks"
+      # Use proxy function for YACS
+      self.hooksEM = OptimizerHooks(self, switch_value=2)
+      direct = tangent = adjoint = None
+      if self.da_study.getEvolutionModelType("Direct") == "Function":
+        direct = self.hooksEM.Direct
+      if self.da_study.getEvolutionModelType("Tangent") == "Function" :
+        tangent = self.hooksEM.Tangent
+      if self.da_study.getEvolutionModelType("Adjoint") == "Function" :
+        adjoint = self.hooksEM.Adjoint
+
+      # Set EvolutionModel
+      self.ADD.setEvolutionModel(asFunction = {"Direct":direct, "Tangent":tangent, "Adjoint":adjoint})
+
     # Set Observers
     for observer_name in self.da_study.observers_dict.keys():
       # print "observers %s found" % observer_name
index 474291981f91e7fe5684bf1c696f080f90325590..c2c59b18b4a24e63338109835d62e09c3949adf3 100644 (file)
@@ -53,9 +53,12 @@ class daStudy:
 
     # Observation Management
     self.ObservationOperatorType = {}
-    self.EvolutionModelType = {}
     self.FunctionObservationOperator = {}
 
+    # Evolution Management
+    self.EvolutionModelType = {}
+    self.FunctionEvolutionModel = {}
+
   #--------------------------------------
 
   def setInputVariable(self, name, size):
@@ -91,7 +94,7 @@ class daStudy:
     if Type == "Vector":
       self.BackgroundType = Type
     else:
-      raise daError("[daStudy::setBackgroundType] Type is unkown : " + Type + " Types are : Vector")
+      raise daError("[daStudy::setBackgroundType] Type is unkown : " + Type + ". Types are : Vector")
 
   def setBackgroundStored(self, Stored):
     if Stored:
@@ -118,7 +121,7 @@ class daStudy:
     if Type == "Vector":
       self.CheckingPointType = Type
     else:
-      raise daError("[daStudy::setCheckingPointType] Type is unkown : " + Type + " Types are : Vector")
+      raise daError("[daStudy::setCheckingPointType] Type is unkown : " + Type + ". Types are : Vector")
 
   def setCheckingPointStored(self, Stored):
     if Stored:
@@ -154,10 +157,10 @@ class daStudy:
   #--------------------------------------
 
   def setObservationType(self, Type):
-    if Type == "Vector":
+    if Type == "Vector" or Type == "VectorSerie":
       self.ObservationType = Type
     else:
-      raise daError("[daStudy::setObservationType] Type is unkown : " + Type + " Types are : Vector")
+      raise daError("[daStudy::setObservationType] Type is unkown : " + Type + ". Types are : Vector, VectorSerie")
 
   def setObservationStored(self, Stored):
     if Stored:
@@ -173,6 +176,8 @@ class daStudy:
       raise daError("[daStudy::setObservation] Type or Storage is not defined !")
     if self.ObservationType == "Vector":
       self.ADD.setObservation(asVector = Observation, toBeStored = self.ObservationStored)
+    if self.ObservationType == "VectorSerie":
+      self.ADD.setObservation(asPersistentVector = Observation, toBeStored = self.ObservationStored)
 
   #--------------------------------------
 
@@ -205,7 +210,7 @@ class daStudy:
     elif Type == "Function":
       self.ObservationOperatorType[Name] = Type
     else:
-      raise daError("[daStudy::setObservationOperatorType] Type is unkown : " + Type + " Types are : Matrix, Function")
+      raise daError("[daStudy::setObservationOperatorType] Type is unkown : " + Type + ". Types are : Matrix, Function")
 
   def setObservationOperator(self, Name, ObservationOperator):
     try:
@@ -219,6 +224,21 @@ class daStudy:
 
   #--------------------------------------
 
+  def setEvolutionErrorStored(self, Stored):
+    if Stored:
+      self.EvolutionErrorStored = True
+    else:
+      self.EvolutionErrorStored = False
+
+  def setEvolutionError(self, EvolutionError):
+    try:
+      self.EvolutionErrorStored
+    except AttributeError:
+      raise daError("[daStudy::setEvolutionError] Storage is not defined !")
+    self.ADD.setEvolutionError(asCovariance = EvolutionError, toBeStored = self.EvolutionErrorStored)
+
+  #--------------------------------------
+
   def getEvolutionModelType(self, Name):
     rtn = None
     try:
@@ -233,7 +253,7 @@ class daStudy:
     elif Type == "Function":
       self.EvolutionModelType[Name] = Type
     else:
-      raise daError("[daStudy::setEvolutionModelType] Type is unkown : " + Type + " Types are : Matrix, Function")
+      raise daError("[daStudy::setEvolutionModelType] Type is unkown : " + Type + ". Types are : Matrix, Function")
 
   def setEvolutionModel(self, Name, EvolutionModel):
     try:
index 96b57c0a8af4142110f5ed5b5f838f880454a3c5..b0781d5beb0bec4ee89b9f7d7bccf2a1c827a023 100644 (file)
@@ -210,7 +210,7 @@ def check_data(data_name, data_config, repertory_check=False, repertory=""):
     sys.exit(1)
   else:
     if data_config[data_name_type] not in AssimType[data_name]:
-      logging.fatal(data_name_type + " defined in the study configuration does not have a correct type : " + str(data_config[data_name_type]) 
+      logging.fatal(data_name_type + " of " + data_name + " defined in the study configuration does not have a correct type : " + str(data_config[data_name_type]) 
                     + "\n You can have : " + str(AssimType[data_name]))
       sys.exit(1)
   if data_name_from not in data_config:
@@ -218,7 +218,7 @@ def check_data(data_name, data_config, repertory_check=False, repertory=""):
     sys.exit(1)
   else:
     if data_config[data_name_from] not in FromNumpyList[data_config[data_name_type]]:
-      logging.fatal(data_name_from + " defined in the study configuration does not have a correct value : " + str(data_config[data_name_from]) 
+      logging.fatal(data_name_from + " of " + data_name + " defined in the study configuration does not have a correct value : " + str(data_config[data_name_from]) 
                     + "\n You can have : " + str(FromNumpyList[data_config[data_name_type]]))
       sys.exit(1)
 
index 04e14af09fbbc81e9234bf51681262e4df291412..55eb7db912e932a61033b16a6e71a03eed3665d3 100644 (file)
@@ -35,29 +35,30 @@ AssimData = ["Background", "BackgroundError",
              ]
 
 AssimType = {}
-AssimType["Background"] = ["Vector"]
-AssimType["BackgroundError"] = ["Matrix"]
-AssimType["Observation"] = ["Vector"]
-AssimType["ObservationError"] = ["Matrix"]
+AssimType["Background"]          = ["Vector"]
+AssimType["BackgroundError"]     = ["Matrix"]
+AssimType["Observation"]         = ["Vector", "VectorSerie"]
+AssimType["ObservationError"]    = ["Matrix"]
 AssimType["ObservationOperator"] = ["Matrix", "Function"]
-AssimType["EvolutionModel"] = ["Matrix", "Function"]
-AssimType["EvolutionError"] = ["Matrix"]
+AssimType["EvolutionModel"]      = ["Matrix", "Function"]
+AssimType["EvolutionError"]      = ["Matrix"]
 AssimType["AlgorithmParameters"] = ["Dict"]
-AssimType["UserDataInit"] = ["Dict"]
-AssimType["CheckingPoint"] = ["Vector"]
+AssimType["UserDataInit"]        = ["Dict"]
+AssimType["CheckingPoint"]       = ["Vector"]
 
 FromNumpyList = {}
-FromNumpyList["Vector"]   = ["String", "Script"]
-FromNumpyList["Matrix"]   = ["String", "Script"]
-FromNumpyList["Function"] = ["FunctionDict"]
-FromNumpyList["Dict"]     = ["Script"]
+FromNumpyList["Vector"]      = ["String", "Script"]
+FromNumpyList["VectorSerie"] = ["String", "Script"]
+FromNumpyList["Matrix"]      = ["String", "Script"]
+FromNumpyList["Function"]    = ["FunctionDict"]
+FromNumpyList["Dict"]        = ["Script"]
 
 # -- Infos from daAlgorithms --
 AssimAlgos = [
     "3DVAR",
     "Blue",
     "EnsembleBlue",
-#     "KalmanFilter", # Removed because EvolutionModel must be available in OptLoop
+    "KalmanFilter",
     "LinearLeastSquares",
     "NonLinearLeastSquares",
     "QuantileRegression",
@@ -83,12 +84,12 @@ AlgoDataRequirements["EnsembleBlue"] = [
     "Observation", "ObservationError",
     "ObservationOperator",
     ]
-AlgoDataRequirements["KalmanFilter"] = [
-    "Background", "BackgroundError",
-    "Observation", "ObservationError",
-    "EvolutionModel", "EvolutionError",
-    "ObservationOperator",
-    ]
+AlgoDataRequirements["KalmanFilter"] = [
+    "Background", "BackgroundError",
+    "Observation", "ObservationError",
+    "EvolutionModel", "EvolutionError",
+    "ObservationOperator",
+    ]
 AlgoDataRequirements["LinearLeastSquares"] = [
     "Observation", "ObservationError",
     "ObservationOperator",
@@ -117,7 +118,7 @@ AlgoType = {}
 AlgoType["3DVAR"] = "Optim"
 AlgoType["Blue"] = "Optim"
 AlgoType["EnsembleBlue"] = "Optim"
-AlgoType["KalmanFilter"] = "Optim"
+AlgoType["KalmanFilter"] = "Optim"
 AlgoType["LinearLeastSquares"] = "Optim"
 AlgoType["NonLinearLeastSquares"] = "Optim"
 AlgoType["QuantileRegression"] = "Optim"
@@ -130,29 +131,31 @@ BasicDataInputs = ["String", "Script", "FunctionDict"]
 
 # Data input dict
 DataTypeDict = {}
-DataTypeDict["Vector"]   = ["String", "Script"]
-DataTypeDict["Matrix"]   = ["String", "Script"]
-DataTypeDict["Function"] = ["FunctionDict"]
-DataTypeDict["Dict"]     = ["Script"]
+DataTypeDict["Vector"]      = ["String", "Script"]
+DataTypeDict["VectorSerie"] = ["String", "Script"]
+DataTypeDict["Matrix"]      = ["String", "Script"]
+DataTypeDict["Function"]    = ["FunctionDict"]
+DataTypeDict["Dict"]        = ["Script"]
 
 DataTypeDefaultDict = {}
-DataTypeDefaultDict["Vector"]   = "Script"
-DataTypeDefaultDict["Matrix"]   = "Script"
-DataTypeDefaultDict["Function"] = "FunctionDict"
-DataTypeDefaultDict["Dict"]     = "Script"
+DataTypeDefaultDict["Vector"]      = "Script"
+DataTypeDefaultDict["VectorSerie"] = "Script"
+DataTypeDefaultDict["Matrix"]      = "Script"
+DataTypeDefaultDict["Function"]    = "FunctionDict"
+DataTypeDefaultDict["Dict"]        = "Script"
 
 # Assimilation data input
 AssimDataDict = {}
-AssimDataDict["Background"] = ["Vector"]
-AssimDataDict["BackgroundError"] = ["Matrix"]
-AssimDataDict["Observation"] = ["Vector"]
-AssimDataDict["ObservationError"] = ["Matrix"]
+AssimDataDict["Background"]          = ["Vector"]
+AssimDataDict["BackgroundError"]     = ["Matrix"]
+AssimDataDict["Observation"]         = ["Vector", "VectorSerie"]
+AssimDataDict["ObservationError"]    = ["Matrix"]
 AssimDataDict["ObservationOperator"] = ["Matrix", "Function"]
-AssimDataDict["EvolutionModel"] = ["Matrix", "Function"]
-AssimDataDict["EvolutionError"] = ["Matrix"]
+AssimDataDict["EvolutionModel"]      = ["Matrix", "Function"]
+AssimDataDict["EvolutionError"]      = ["Matrix"]
 AssimDataDict["AlgorithmParameters"] = ["Dict"]
-AssimDataDict["UserDataInit"] = ["Dict"]
-AssimDataDict["CheckingPoint"] = ["Vector"]
+AssimDataDict["UserDataInit"]        = ["Dict"]
+AssimDataDict["CheckingPoint"]       = ["Vector"]
 
 AssimDataDefaultDict = {}
 AssimDataDefaultDict["Background"]          = "Vector"
@@ -166,7 +169,7 @@ AssimDataDefaultDict["AlgorithmParameters"] = "Dict"
 AssimDataDefaultDict["UserDataInit"]        = "Dict"
 AssimDataDefaultDict["CheckingPoint"]       = "Vector"
 
-StoredAssimData = ["Vector", "Matrix"]
+StoredAssimData = ["Vector", "VectorSerie", "Matrix"]
 
 # Assimilation optional nodes
 OptDict = {}
index 9f148f8b791d83bccff524535b9c86494ecd9b7d..dbc05a872242aeef856ffe5487aab857f8b1483b 100644 (file)
@@ -214,6 +214,57 @@ def create_yacs_proc(study_config):
           ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
         back_node.setScript(back_node_script)
 
+      if data_config["Type"] == "VectorSerie" and data_config["From"] == "String":
+        # Create node
+        factory_back_node = catalogAd.getNodeFromNodeMap("CreateNumpyVectorSerieFromString")
+        back_node = factory_back_node.cloneNode("Get" + key)
+        back_node.getInputPort("vector_in_string").edInitPy(data_config["Data"])
+        ADAO_Case.edAddChild(back_node)
+        # Connect node with CreateAssimilationStudy
+        CAS_node.edAddInputPort(key, t_pyobj)
+        CAS_node.edAddInputPort(key_type, t_string)
+        CAS_node.edAddInputPort(key_stored, t_bool)
+        ADAO_Case.edAddDFLink(back_node.getOutputPort("vector"), CAS_node.getInputPort(key))
+        ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+        ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored))
+        back_node_script = back_node.getScript()
+        back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+        # Connect node with InitUserData
+        if key in init_config["Target"]:
+          back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script
+          back_node.edAddInputPort("init_data", t_pyobj)
+          ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
+        back_node.setScript(back_node_script)
+
+      if data_config["Type"] == "VectorSerie" and data_config["From"] == "Script":
+        # Create node
+        factory_back_node = catalogAd.getNodeFromNodeMap("CreateNumpyVectorSerieFromScript")
+        back_node = factory_back_node.cloneNode("Get" + key)
+        if repertory:
+          back_node.getInputPort("script").edInitPy(os.path.join(base_repertory, os.path.basename(data_config["Data"])))
+        else:
+          back_node.getInputPort("script").edInitPy(data_config["Data"])
+        back_node.edAddOutputPort(key, t_pyobj)
+        back_node_script = back_node.getScript()
+        back_node_script += key + " = user_script_module." + key + "\n"
+        back_node.setScript(back_node_script)
+        ADAO_Case.edAddChild(back_node)
+        # Connect node with CreateAssimilationStudy
+        CAS_node.edAddInputPort(key, t_pyobj)
+        CAS_node.edAddInputPort(key_type, t_string)
+        CAS_node.edAddInputPort(key_stored, t_bool)
+        ADAO_Case.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key))
+        ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+        ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored))
+        back_node_script = back_node.getScript()
+        back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+        # Connect node with InitUserData
+        if key in init_config["Target"]:
+          back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script
+          back_node.edAddInputPort("init_data", t_pyobj)
+          ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
+        back_node.setScript(back_node_script)
+
       if data_config["Type"] == "Matrix" and data_config["From"] == "String":
         # Create node
         factory_back_node = catalogAd.getNodeFromNodeMap("CreateNumpyMatrixFromString")
@@ -296,9 +347,10 @@ def create_yacs_proc(study_config):
   optimizer_node = runtime.createOptimizerLoop(name, algLib, factoryName, "")
   compute_bloc.edAddChild(optimizer_node)
   ADAO_Case.edAddDFLink(CAS_node.getOutputPort("Study"), optimizer_node.edGetAlgoInitPort())
+
   # Check if we have a python script for OptimizerLoopNode
   data_config = study_config["ObservationOperator"]
-  opt_script_node = None
+  opt_script_nodeOO = None
   if data_config["Type"] == "Function" and data_config["From"] == "FunctionDict":
     # Get script
     FunctionDict = data_config["Data"]
@@ -309,7 +361,7 @@ def create_yacs_proc(study_config):
       break
 
     # We create a new pyscript node
-    opt_script_node = runtime.createScriptNode("", "FunctionNode")
+    opt_script_nodeOO = runtime.createScriptNode("", "FunctionNodeOO")
     if repertory:
       script_filename = os.path.join(base_repertory, os.path.basename(script_filename))
     try:
@@ -324,15 +376,54 @@ def create_yacs_proc(study_config):
       node_script += "filepath = \"" + base_repertory + "\"\n"
     else:
       node_script += "filepath = \"" + os.path.dirname(script_filename) + "\"\n"
-    node_script += "if sys.path.count(os.path.dirname(filepath))==0 or (sys.path.count(os.path.dirname(filepath))>0 and sys.path.index(os.path.dirname(filepath),0,1)>0):\n"
-    node_script += "  sys.path.insert(0,os.path.dirname(filepath))\n"
+    node_script += "if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath)>0):\n"
+    node_script += "  sys.path.insert(0,filepath)\n"
     node_script += script_str.read()
-    opt_script_node.setScript(node_script)
-    opt_script_node.edAddInputPort("computation", t_param_input)
-    opt_script_node.edAddOutputPort("result", t_param_output)
+    opt_script_nodeOO.setScript(node_script)
+    opt_script_nodeOO.edAddInputPort("computation", t_param_input)
+    opt_script_nodeOO.edAddOutputPort("result", t_param_output)
   else:
     factory_opt_script_node = catalogAd.getNodeFromNodeMap("FakeOptimizerLoopNode")
-    opt_script_node = factory_opt_script_node.cloneNode("FakeFunctionNode")
+    opt_script_nodeOO = factory_opt_script_node.cloneNode("FakeFunctionNode")
+
+  # Check if we have a python script for OptimizerLoopNode
+  if "EvolutionModel" in study_config.keys():
+    data_config = study_config["EvolutionModel"]
+    opt_script_nodeEM = None
+    if data_config["Type"] == "Function" and data_config["From"] == "FunctionDict":
+      # Get script
+      FunctionDict = data_config["Data"]
+      script_filename = ""
+      for FunctionName in FunctionDict["Function"]:
+        # We currently support only one file
+        script_filename = FunctionDict["Script"][FunctionName]
+        break
+
+      # We create a new pyscript node
+      opt_script_nodeEM = runtime.createScriptNode("", "FunctionNodeEM")
+      if repertory:
+        script_filename = os.path.join(base_repertory, os.path.basename(script_filename))
+      try:
+        script_str= open(script_filename, 'r')
+      except:
+        logging.fatal("Exception in opening function script file : " + script_filename)
+        traceback.print_exc()
+        sys.exit(1)
+      node_script  = "#-*-coding:iso-8859-1-*-\n"
+      node_script += "import sys, os \n"
+      if base_repertory != "":
+        node_script += "filepath = \"" + base_repertory + "\"\n"
+      else:
+        node_script += "filepath = \"" + os.path.dirname(script_filename) + "\"\n"
+      node_script += "if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath)>0):\n"
+      node_script += "  sys.path.insert(0,filepath)\n"
+      node_script += script_str.read()
+      opt_script_nodeEM.setScript(node_script)
+      opt_script_nodeEM.edAddInputPort("computation", t_param_input)
+      opt_script_nodeEM.edAddOutputPort("result", t_param_output)
+    else:
+      factory_opt_script_node = catalogAd.getNodeFromNodeMap("FakeOptimizerLoopNode")
+      opt_script_nodeEM = factory_opt_script_node.cloneNode("FakeFunctionNode")
 
   # Add computation bloc
   if "Observers" in study_config.keys():
@@ -351,15 +442,24 @@ def create_yacs_proc(study_config):
     # Connect switch
     ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("switch_value"), switch_node.edGetConditionPort())
 
-    # First case: always computation bloc
-    computation_bloc = runtime.createBloc("computation_bloc")
-    computation_bloc.edAddChild(opt_script_node)
-    switch_node.edSetNode(1, computation_bloc)
+    # First case: computation bloc
+    computation_blocOO = runtime.createBloc("computation_blocOO")
+    computation_blocOO.edAddChild(opt_script_nodeOO)
+    switch_node.edSetNode(1, computation_blocOO)
 
-    # We connect Optimizer with the script
-    ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("data"), opt_script_node.getInputPort("computation"))
-    ADAO_Case.edAddDFLink(opt_script_node.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
+    # We connect with the script
+    ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("data"), opt_script_nodeOO.getInputPort("computation"))
+    ADAO_Case.edAddDFLink(opt_script_nodeOO.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
 
+    # Second case: evolution bloc
+    if "EvolutionModel" in study_config.keys():
+      computation_blocEM = runtime.createBloc("computation_blocEM")
+      computation_blocEM.edAddChild(opt_script_nodeEM)
+      switch_node.edSetNode(2, computation_blocEM)
+
+      # We connect with the script
+      ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("data"), opt_script_nodeEM.getInputPort("computation"))
+      ADAO_Case.edAddDFLink(opt_script_nodeEM.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
 
     # For each observer add a new bloc in the switch
     observer_config = study_config["Observers"]
@@ -396,22 +496,57 @@ def create_yacs_proc(study_config):
       observer_bloc.edAddChild(end_observation_node)
       ADAO_Case.edAddCFLink(observation_node, end_observation_node)
       ADAO_Case.edAddDFLink(end_observation_node.getOutputPort("output"), optimizer_node.edGetPortForOutPool())
+
+  elif "EvolutionModel" in study_config.keys():
+    execution_bloc = runtime.createBloc("Execution Bloc")
+    optimizer_node.edSetNode(execution_bloc)
+
+    # Add a node that permits to configure the switch
+    factory_read_for_switch_node = catalogAd.getNodeFromNodeMap("ReadForSwitchNode")
+    read_for_switch_node = factory_read_for_switch_node.cloneNode("ReadForSwitch")
+    execution_bloc.edAddChild(read_for_switch_node)
+    ADAO_Case.edAddDFLink(optimizer_node.edGetSamplePort(), read_for_switch_node.getInputPort("data"))
+
+    # Add a switch
+    switch_node = runtime.createSwitch("Execution Switch")
+    execution_bloc.edAddChild(switch_node)
+    # Connect switch
+    ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("switch_value"), switch_node.edGetConditionPort())
+
+    # First case: computation bloc
+    computation_blocOO = runtime.createBloc("computation_blocOO")
+    computation_blocOO.edAddChild(opt_script_nodeOO)
+    switch_node.edSetNode(1, computation_blocOO)
+
+    # We connect with the script
+    ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("data"), opt_script_nodeOO.getInputPort("computation"))
+    ADAO_Case.edAddDFLink(opt_script_nodeOO.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
+
+    # Second case: evolution bloc
+    computation_blocEM = runtime.createBloc("computation_blocEM")
+    computation_blocEM.edAddChild(opt_script_nodeEM)
+    switch_node.edSetNode(2, computation_blocEM)
+
+    # We connect with the script
+    ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("data"), opt_script_nodeEM.getInputPort("computation"))
+    ADAO_Case.edAddDFLink(opt_script_nodeEM.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
+
   else:
-    computation_bloc = runtime.createBloc("computation_bloc")
-    optimizer_node.edSetNode(computation_bloc)
-    computation_bloc.edAddChild(opt_script_node)
+    computation_blocOO = runtime.createBloc("computation_blocOO")
+    optimizer_node.edSetNode(computation_blocOO)
+    computation_blocOO.edAddChild(opt_script_nodeOO)
 
     # We connect Optimizer with the script
-    ADAO_Case.edAddDFLink(optimizer_node.edGetSamplePort(), opt_script_node.getInputPort("computation"))
-    ADAO_Case.edAddDFLink(opt_script_node.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
+    ADAO_Case.edAddDFLink(optimizer_node.edGetSamplePort(), opt_script_nodeOO.getInputPort("computation"))
+    ADAO_Case.edAddDFLink(opt_script_nodeOO.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
 
   # Connect node with InitUserData
   if "ObservationOperator" in init_config["Target"]:
-    opt_node_script = opt_script_node.getScript()
+    opt_node_script = opt_script_nodeOO.getScript()
     opt_node_script = "__builtins__[\"init_data\"] = init_data\n" + opt_node_script
-    opt_script_node.setScript(opt_node_script)
-    opt_script_node.edAddInputPort("init_data", t_pyobj)
-    ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), opt_script_node.getInputPort("init_data"))
+    opt_script_nodeOO.setScript(opt_node_script)
+    opt_script_nodeOO.edAddInputPort("init_data", t_pyobj)
+    ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), opt_script_nodeOO.getInputPort("init_data"))
 
   # Step 4: create post-processing from user configuration
   if "UserPostAnalysis" in study_config.keys():
@@ -456,8 +591,8 @@ def create_yacs_proc(study_config):
         node_script += "filepath = \"" + base_repertory + "\"\n"
       else:
         node_script += "filepath = \"" + os.path.dirname(script_filename) + "\"\n"
-      node_script += "if sys.path.count(os.path.dirname(filepath))==0 or (sys.path.count(os.path.dirname(filepath))>0 and sys.path.index(os.path.dirname(filepath),0,1)>0):\n"
-      node_script += "  sys.path.insert(0,os.path.dirname(filepath))\n"
+      node_script += "if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath)>0):\n"
+      node_script += "  sys.path.insert(0,filepath)\n"
       node_script += default_script
       node_script += analysis_file.read()
       analysis_node.setScript(node_script)