Salome HOME
- Adding a new test: test017 with OptimizerLoop
authorAndré Ribes <andre.ribes@edf.fr>
Thu, 24 Jun 2010 14:10:34 +0000 (16:10 +0200)
committerAndré Ribes <andre.ribes@edf.fr>
Thu, 24 Jun 2010 14:10:34 +0000 (16:10 +0200)
14 files changed:
configure.ac
resources/ADAOSchemaCatalog.xml
src/daComposant/daAlgorithms/3DVAR.py
src/daComposant/daCore/AssimilationStudy.py
src/daSalome/daYacsIntegration/__init__.py
src/daSalome/daYacsIntegration/daOptimizerLoop.py
src/daSalome/daYacsIntegration/daStudy.py
src/daSalome/daYacsSchemaCreator/infos_daComposant.py
src/daSalome/daYacsSchemaCreator/methods.py
src/tests/daComposant/Plateforme/test017_3DVAR_par_fonction.py
src/tests/daSalome/Makefile.am
src/tests/daSalome/test017_3DVAR_function_script.py [new file with mode: 0644]
src/tests/daSalome/test017_3DVAR_init_data.py [new file with mode: 0644]
src/tests/daSalome/test017_3DVAR_par_fonction.py.in [new file with mode: 0644]

index d533c42b106471655795c1ff0f9638391194a967..13625938c75780198605d1ad720350e5c5260b74 100644 (file)
@@ -115,6 +115,7 @@ AC_CONFIG_FILES([
         src/tests/Makefile
         src/tests/daSalome/Makefile
         src/tests/daSalome/test000_Blue_AnalysisFile.py
+        src/tests/daSalome/test017_3DVAR_par_fonction.py
         src/tests/daComposant/Makefile
         src/tests/daComposant/Plateforme/Makefile
         src/tests/daComposant/Plateforme/context.py
index 44f5fc56c2faee1bf92d6f86c2e2f9b26323b3fa..e3ac589dca9a72267d9b74c72cfdb45b809a5c28 100644 (file)
@@ -26,8 +26,8 @@ try:
 except NameError:
   pass
 else:
-  print "Background is", Background
-  print "BackgroundType is", BackgroundType
+  #print "Background is", Background
+  #print "BackgroundType is", BackgroundType
   assim_study.setBackgroundType(BackgroundType)
   assim_study.setBackground(Background)
 
@@ -37,8 +37,8 @@ try:
 except NameError:
   pass
 else:
-  print "BackgroundError is", BackgroundError
-  print "BackgroundErrorType is", BackgroundErrorType
+  #print "BackgroundError is", BackgroundError
+  #print "BackgroundErrorType is", BackgroundErrorType
   assim_study.setBackgroundError(BackgroundError)
 
 # Observation
@@ -47,8 +47,8 @@ try:
 except NameError:
   pass
 else:
-  print "Observation is", Observation
-  print "ObservationType is", ObservationType
+  #print "Observation is", Observation
+  #print "ObservationType is", ObservationType
   assim_study.setObservationType(ObservationType)
   assim_study.setObservation(Observation)
 
@@ -58,20 +58,49 @@ try:
 except NameError:
   pass
 else:
-  print "ObservationError is", ObservationError
-  print "ObservationErrorType is", ObservationErrorType
+  #print "ObservationError is", ObservationError
+  #print "ObservationErrorType is", ObservationErrorType
   assim_study.setObservationError(ObservationError)
 
 # ObservationOperator
+ObservationOperatorOk = 0
 try:
   ObservationOperator
 except NameError:
   pass
 else:
-  print "ObservationOperator is", ObservationOperator
-  print "ObservationOperatorType is", ObservationOperatorType
+  #print "ObservationOperator is", ObservationOperator
+  #print "ObservationOperatorType is", ObservationOperatorType
   assim_study.setObservationOperatorType(ObservationOperatorType)
   assim_study.setObservationOperator(ObservationOperator)
+  ObservationOperatorOk = 1
+
+if ObservationOperatorOk == 0:
+  try:
+    ObservationOperatorDirect
+  except NameError:
+    pass
+  else:
+    #print "ObservationOperatorDirect is", ObservationOperatorDirect
+    assim_study.setObservationOperatorType("Direct", "Function")
+    assim_study.setObservationOperator("Direct", ObservationOperatorDirect)
+  try:
+    ObservationOperatorTangent
+  except NameError:
+    pass
+  else:
+    #print "ObservationOperatorTangent is", ObservationOperatorTangent
+    assim_study.setObservationOperatorType("Tangent", "Function")
+    assim_study.setObservationOperator("Tangent", ObservationOperatorTangent)
+  try:
+    ObservationOperatorAdjoint
+  except NameError:
+    pass
+  else:
+    #print "ObservationOperatorAdjoint is", ObservationOperatorAdjoint
+    assim_study.setObservationOperatorType("Adjoint", "Function")
+    assim_study.setObservationOperator("Adjoint", ObservationOperatorAdjoint)
+
 
 Study = assim_study
 ]]>
@@ -95,6 +124,16 @@ print "Matrix is", matrix
     <outport name="type" type="string"/>
   </inline>
 
+  <inline name="CreateNumpyMatrixFromScript">
+    <script><code><![CDATA[
+print "Entering in CreateNumpyMatrixFromScript"
+execfile(script)
+type = "Matrix"
+]]></code></script>
+    <inport name="script" type="string"/>
+    <outport name="type" type="string"/>
+  </inline>
+
   <inline name="CreateNumpyVectorFromString">
     <script><code><![CDATA[
 print "Entering in CreateNumpyVectorFromString"
@@ -108,6 +147,16 @@ print "Vector is", vector
     <outport name="type" type="string"/>
   </inline>
 
+  <inline name="CreateNumpyVectorFromScript">
+    <script><code><![CDATA[
+print "Entering in CreateNumpyVectorFromScript"
+execfile(script)
+type = "Vector"
+]]></code></script>
+    <inport name="script" type="string"/>
+    <outport name="type" type="string"/>
+  </inline>
+
   <inline name="SimpleExecuteDirectAlgorithm">
     <script><code><![CDATA[
 print "Entering in SimpleExecuteDirectAlgorithm"
@@ -130,4 +179,14 @@ ADD = Study.getAssimilationStudy()
 ]]></code></script>
     <inport name="Study" type="pyobj"/>
   </inline>
+
+  <inline name="FakeOptimizerLoopNode">
+    <script><code><![CDATA[
+print "Entering in FakeOptimizerLoopNode"
+result = None
+]]></code></script>
+    <inport name="computation" type="pyobj"/>
+    <outport name="result" type="pyobj"/>
+  </inline>
+
 </proc>
index d1ad427ad286cfc849184a2c1dc87f40402c6d1e..82eb0586eb074fb7d4f9469ba2175e75df5ab8d8 100644 (file)
@@ -25,9 +25,9 @@ __author__ = "Jean-Philippe ARGAUD - Mars 2009"
 
 import sys ; sys.path.insert(0, "../daCore")
 import logging
-import Persistence
-from BasicObjects import Algorithm
-import PlatformInfo ; m = PlatformInfo.SystemUsage()
+from daCore import Persistence
+from daCore.BasicObjects import Algorithm
+from daCore import PlatformInfo ; m = PlatformInfo.SystemUsage()
 
 import numpy
 import scipy.optimize
index 83b4813e62b6eae9d49c9cf8b0b118a3fc955f14..c830fbccacc4ffd67f661a98892a2f5b83ea9975 100644 (file)
@@ -562,6 +562,11 @@ class AssimilationStudy:
         sys.path = list(set(sys.path)) # Conserve en unique exemplaire chaque chemin
         return 1
 
+    def prepare_to_pickle(self):
+      self.__algorithmFile = None
+      self.__diagnosticFile = None
+      self.__H  = {}
+
 # ==============================================================================
 if __name__ == "__main__":
     print '\n AUTODIAGNOSTIC \n'
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..a1bd4c1d1d878cd065d611b7f45f4e7fbb8c1643 100644 (file)
@@ -0,0 +1 @@
+from daOptimizerLoop import *
index 164217132dbe963da1feaf7628b8f6cb27dec767..65ce9ee37c086b62efda4d6cebc97ae6318a94bf 100644 (file)
@@ -6,7 +6,7 @@ import numpy
 import threading
 
 from daCore.AssimilationStudy import AssimilationStudy
-import daStudy
+from daYacsIntegration import daStudy
 
 class OptimizerHooks:
 
@@ -26,18 +26,23 @@ class OptimizerHooks:
       local_counter = self.sample_counter
 
       # 2: Put sample in the job pool
-      matrix_to_pool = pickle.dumps(X)
-      self.optim_algo.pool.pushInSample(local_counter, matrix_to_pool)
+      computation = {}
+      computation["method"] = "Direct"
+      computation["data"] = X
+      computation = pickle.dumps(computation)
+      self.optim_algo.pool.pushInSample(local_counter, computation)
 
       # 3: Wait
       while 1:
+        print "waiting"
         self.optim_algo.signalMasterAndWait()
+        print "signal"
         if self.optim_algo.isTerminationRequested():
           self.optim_algo.pool.destroyAll()
           return
         else:
           # Get current Id
-          sample_id = self.pool.getCurrentId()
+          sample_id = self.optim_algo.pool.getCurrentId()
           if sample_id == local_counter:
             # 4: Data is ready
             matrix_from_pool = self.optim_algo.pool.getOutSample(local_counter).getStringValue()
@@ -52,7 +57,7 @@ class OptimizerHooks:
             return Y
     else:
       print "sync false is not yet implemented"
-      raise ValueError("sync == false not yet implemented")
+      raise daStudy.daError("sync == false not yet implemented")
 
   def Tangent(self, X, sync = 1):
     print "Call Tangent OptimizerHooks"
@@ -63,8 +68,11 @@ class OptimizerHooks:
       local_counter = self.sample_counter
 
       # 2: Put sample in the job pool
-      matrix_to_pool = pickle.dumps(X)
-      self.optim_algo.pool.pushInSample(local_counter, matrix_to_pool)
+      computation = {}
+      computation["method"] = "Tangent"
+      computation["data"] = X
+      computation = pickle.dumps(computation)
+      self.optim_algo.pool.pushInSample(local_counter, computation)
 
       # 3: Wait
       while 1:
@@ -74,7 +82,7 @@ class OptimizerHooks:
           return
         else:
           # Get current Id
-          sample_id = self.pool.getCurrentId()
+          sample_id = self.optim_algo.pool.getCurrentId()
           if sample_id == local_counter:
             # 4: Data is ready
             matrix_from_pool = self.optim_algo.pool.getOutSample(local_counter).getStringValue()
@@ -89,7 +97,7 @@ class OptimizerHooks:
             return Y
     else:
       print "sync false is not yet implemented"
-      raise ValueError("sync == false not yet implemented")
+      raise daStudy.daError("sync == false not yet implemented")
 
   def Adjoint(self, (X, Y), sync = 1):
     print "Call Adjoint OptimizerHooks"
@@ -100,18 +108,23 @@ class OptimizerHooks:
       local_counter = self.sample_counter
 
       # 2: Put sample in the job pool
-      matrix_to_pool = pickle.dumps(Y)
-      self.optim_algo.pool.pushInSample(local_counter, matrix_to_pool)
+      computation = {}
+      computation["method"] = "Adjoint"
+      computation["data"] = (X, Y)
+      computation = pickle.dumps(computation)
+      self.optim_algo.pool.pushInSample(local_counter, computation)
 
       # 3: Wait
       while 1:
+        print "waiting"
         self.optim_algo.signalMasterAndWait()
+        print "signal"
         if self.optim_algo.isTerminationRequested():
           self.optim_algo.pool.destroyAll()
           return
         else:
           # Get current Id
-          sample_id = self.pool.getCurrentId()
+          sample_id = self.optim_algo.pool.getCurrentId()
           if sample_id == local_counter:
             # 4: Data is ready
             matrix_from_pool = self.optim_algo.pool.getOutSample(local_counter).getStringValue()
@@ -126,9 +139,9 @@ class OptimizerHooks:
             return Z
     else:
       print "sync false is not yet implemented"
-      raise ValueError("sync == false not yet implemented")
+      raise daStudy.daError("sync == false not yet implemented")
 
-class AssimilationAlgorithm_asynch_3DVAR(SALOMERuntime.OptimizerAlgASync):
+class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync):
 
   def __init__(self):
     SALOMERuntime.RuntimeSALOME_setRuntime()
@@ -146,55 +159,44 @@ class AssimilationAlgorithm_asynch_3DVAR(SALOMERuntime.OptimizerAlgASync):
     print "Algorithme initialize"
 
     # get the daStudy
-    print "Input is ", input
+    #print "[Debug] Input is ", input
     str_da_study = input.getStringValue()
-    da_study = pickle.loads(str_da_study)
-    print "da_study is ", da_study
-    da_study.initAlgorithm()
-    self.ADD = da_study.getAssimilationStudy()
+    self.da_study = pickle.loads(str_da_study)
+    #print "[Debug] da_study is ", self.da_study
+    self.da_study.initAlgorithm()
+    self.ADD = self.da_study.getAssimilationStudy()
 
   def startToTakeDecision(self):
     print "Algorithme startToTakeDecision"
 
-    #TODO !!
-
-    precision = 1.e-13
-    dimension = 3
-
-    xt = numpy.matrix(numpy.arange(dimension)).T
-    Eo = numpy.matrix(numpy.zeros((dimension,))).T
-    Eb = numpy.matrix(numpy.zeros((dimension,))).T
-    H  = numpy.matrix(numpy.core.identity(dimension))
-    xb = xt + Eb
-    yo = FunctionH( xt ) + Eo
-    xb = xb.A1
-    yo = yo.A1
-    R  = numpy.matrix(numpy.core.identity(dimension)).T
-    B  = numpy.matrix(numpy.core.identity(dimension)).T
-
-    ADD = AssimilationStudy()
-    ADD.setBackground         (asVector     = xb )
-    ADD.setBackgroundError    (asCovariance = B )
-    ADD.setObservation        (asVector     = yo )
-    ADD.setObservationError   (asCovariance = R )
-    ADD.setObservationOperator(asFunction   = {"Tangent":FunctionH,
-                                               "Adjoint":AdjointH} )
-    ADD.setControls()
-    ADD.setAlgorithm(choice="3DVAR")
-    ADD.analyze()
-
-    xa = numpy.array(ADD.get("Analysis").valueserie(0))
-    d  = numpy.array(ADD.get("Innovation").valueserie(0))
-    if max(abs(xa - xb)) > precision:
-        raise ValueError("Résultat du test erroné (1)")
-    elif max(abs(d)) > precision:
-        raise ValueError("Résultat du test erroné (2)")
-    else:
-        print "    Test correct, erreur maximale inférieure à %s"%precision
-        print
-    # On a fini !
+    # Check if ObservationOperator is already set
+    if self.da_study.getObservationOperatorType("Direct") == "Function" or self.da_study.getObservationOperatorType("Tangent") == "Function" or self.da_study.getObservationOperatorType("Adjoint") == "Function" :
+      # Use proxy function for YACS
+      self.hooks = OptimizerHooks(self)
+      direct = tangent = adjoint = None
+      if self.da_study.getObservationOperatorType("Direct") == "Function":
+        direct = self.hooks.Direct
+      if self.da_study.getObservationOperatorType("Tangent") == "Function" :
+        tangent = self.hooks.Tangent
+      if self.da_study.getObservationOperatorType("Adjoint") == "Function" :
+        adjoint = self.hooks.Adjoint
+
+      # Set ObservationOperator
+      self.ADD.setObservationOperator(asFunction = {"Direct":direct, "Tangent":tangent, "Adjoint":adjoint})
+
+
+    # Start Assimilation Study
+    self.ADD.analyze()
+
+    # Assimilation Study is finished
     self.pool.destroyAll()
 
+  def getAlgoResult(self):
+    print "getAlgoResult"
+    self.ADD.prepare_to_pickle()
+    result = pickle.dumps(self.da_study)
+    return result
+
   # Obligatoire ???
   def finish(self):
     print "Algorithme finish"
@@ -210,4 +212,6 @@ class AssimilationAlgorithm_asynch_3DVAR(SALOMERuntime.OptimizerAlgASync):
     return self.tout
   def getTCForAlgoInit(self):
     return self.tin
+  def getTCForAlgoResult(self):
+    return self.tout
 
index a54a92881c8f76dce7793a0ad1ef8d611c1736cb..68db0b935e4c77ac23a3f895334eb48ec6b3d416 100644 (file)
@@ -15,8 +15,13 @@ class daStudy:
     self.ADD = AssimilationStudy(name)
     self.ADD.setControls()
     self.algorithm = algorithm
+    self.Background = None
 
-  def initAlgorithm():
+    # Observation Management
+    self.ObservationOperatorType = {}
+    self.FunctionObservationOperator = {}
+
+  def initAlgorithm(self):
     self.ADD.setAlgorithm(choice=self.algorithm)
 
   def getAssimilationStudy(self):
@@ -39,9 +44,14 @@ class daStudy:
     except AttributeError:
       raise daError("[daStudy::setBackground] Type is not defined !")
 
+    self.Background = Background
+
     if self.BackgroundType == "Vector":
       self.ADD.setBackground(asVector = Background)
 
+  def getBackground(self):
+    return self.Background
+
   def setBackgroundError(self, BackgroundError):
 
     self.ADD.setBackgroundError(asCovariance = BackgroundError)
@@ -64,23 +74,33 @@ class daStudy:
       self.ADD.setObservation(asVector = Observation)
 
   def setObservationError(self, ObservationError):
-
     self.ADD.setObservationError(asCovariance = ObservationError)
 
-  def setObservationOperatorType(self, Type):
 
+  def getObservationOperatorType(self, Name):
+    rtn = None
+    try:
+      rtn = self.ObservationOperatorType[Name]
+    except:
+      pass
+    return rtn
+
+  def setObservationOperatorType(self, Name, Type):
     if Type == "Matrix":
-      self.ObservationOperatorType = Type
+      self.ObservationOperatorType[Name] = Type
+    elif Type == "Function":
+      self.ObservationOperatorType[Name] = Type
     else:
       raise daError("[daStudy::setObservationOperatorType] Type is unkown : " + Type + " Types are : Matrix")
 
-  def setObservationOperator(self, ObservationOperator):
-
+  def setObservationOperator(self, Name, ObservationOperator):
     try:
-      self.ObservationOperatorType
+      self.ObservationOperatorType[Name]
     except AttributeError:
       raise daError("[daStudy::setObservationOperator] Type is not defined !")
 
-    if self.ObservationOperatorType == "Matrix":
+    if self.ObservationOperatorType[Name] == "Matrix":
       self.ADD.setObservationOperator(asMatrix = ObservationOperator)
+    elif self.ObservationOperatorType[Name] == "Function":
+      self.FunctionObservationOperator[Name] = ObservationOperator
 
index 5fead375bc3c3ed7cd5b89d39c906684e8a51d2d..beaf528bc9c2989df122fc326f80ebd372139941 100644 (file)
@@ -39,8 +39,8 @@ AssimType["ObservationOperator"] = ["Matrix", "Function"]
 AssimType["ObservationOperatorAppliedToX"] = ["List"]
 
 FromNumpyList = {}
-FromNumpyList["Vector"] = ["String"]
-FromNumpyList["Matrix"] = ["String"]
+FromNumpyList["Vector"] = ["String", "Script"]
+FromNumpyList["Matrix"] = ["String", "Script"]
 FromNumpyList["Function"] = ["Dict"]
 FromNumpyList["List"] = ["List"]
 
@@ -50,5 +50,9 @@ AssimAlgos = ["Blue", "EnsembleBlue", "Kalman", "LinearLeastSquares", "3DVAR"]
 AlgoDataRequirements = {}
 AlgoDataRequirements["Blue"] = ["Background", "BackgroundError",
                                 "Observation", "ObservationOperator", "ObservationError"]
+
+AlgoDataRequirements["3DVAR"] = ["Background", "BackgroundError",
+                                 "Observation", "ObservationOperator", "ObservationError"]
 AlgoType = {}
 AlgoType["Blue"] = "Direct"
+AlgoType["3DVAR"] = "Optim"
index cacbb070cc732cc26fb6a993a6d706ff59250295..17e4ea1b9c11d46745e12f6beda42b5ee58d8331 100644 (file)
@@ -78,6 +78,19 @@ def create_yacs_proc(study_config):
         proc.edAddDFLink(back_node.getOutputPort("vector"), CAS_node.getInputPort(key))
         proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
 
+      if data_config["Type"] == "Vector" and data_config["From"] == "Script":
+        # Create node
+        factory_back_node = catalogAd._nodeMap["CreateNumpyVectorFromScript"]
+        back_node = factory_back_node.cloneNode("Get" + key)
+        back_node.getInputPort("script").edInitPy(data_config["Data"])
+        back_node.edAddOutputPort(key, t_pyobj)
+        proc.edAddChild(back_node)
+        # Connect node with CreateAssimilationStudy
+        CAS_node.edAddInputPort(key, t_pyobj)
+        CAS_node.edAddInputPort(key_type, t_string)
+        proc.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key))
+        proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+
       if data_config["Type"] == "Matrix" and data_config["From"] == "String":
         # Create node
         factory_back_node = catalogAd._nodeMap["CreateNumpyMatrixFromString"]
@@ -90,6 +103,25 @@ def create_yacs_proc(study_config):
         proc.edAddDFLink(back_node.getOutputPort("matrix"), CAS_node.getInputPort(key))
         proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
 
+      if data_config["Type"] == "Matrix" and data_config["From"] == "Script":
+        # Create node
+        factory_back_node = catalogAd._nodeMap["CreateNumpyMatrixFromScript"]
+        back_node = factory_back_node.cloneNode("Get" + key)
+        back_node.getInputPort("script").edInitPy(data_config["Data"])
+        back_node.edAddOutputPort(key, t_pyobj)
+        proc.edAddChild(back_node)
+        # Connect node with CreateAssimilationStudy
+        CAS_node.edAddInputPort(key, t_pyobj)
+        CAS_node.edAddInputPort(key_type, t_string)
+        proc.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key))
+        proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+
+      if data_config["Type"] == "Function" and data_config["From"] == "Dict" and key == "ObservationOperator":
+         FunctionDict = data_config["Data"]
+         for FunctionName in FunctionDict["Function"]:
+           port_name = "ObservationOperator" + FunctionName
+           CAS_node.edAddInputPort(port_name, t_string)
+           CAS_node.getInputPort(port_name).edInitPy(FunctionDict["Script"][FunctionName])
 
   # Step 3: create compute bloc
   compute_bloc = runtime.createBloc("compute_bloc")
@@ -103,6 +135,54 @@ def create_yacs_proc(study_config):
     compute_bloc.edAddChild(execute_node)
     proc.edAddDFLink(CAS_node.getOutputPort("Study"), execute_node.getInputPort("Study"))
 
+  if AlgoType[study_config["Algorithm"]] == "Optim":
+    # We use an optimizer loop
+    name = "Execute" + study_config["Algorithm"]
+    algLib = "daYacsIntegration.py"
+    factoryName = "AssimilationAlgorithm_asynch"
+    optimizer_node = runtime.createOptimizerLoop(name, algLib, factoryName, "")
+    compute_bloc.edAddChild(optimizer_node)
+    proc.edAddDFLink(CAS_node.getOutputPort("Study"), optimizer_node.edGetAlgoInitPort())
+
+    # Check if we have a python script for OptimizerLoopNode
+    data_config = study_config["ObservationOperator"]
+    if data_config["Type"] == "Function" and data_config["From"] == "Dict":
+      # Get script
+      FunctionDict = data_config["Data"]
+      script_filename = ""
+      for FunctionName in FunctionDict["Function"]:
+        # We currently support only one file
+        script_filename = FunctionDict["Script"][FunctionName]
+        break
+
+      # We create a new pyscript node
+      opt_script_node = runtime.createScriptNode("", "FunctionNode")
+      if not os.path.exists(script_filename):
+        logging.fatal("Function script source file does not exists ! :" + script_filename)
+        sys.exit(1)
+      try:
+        script_str= open(script_filename, 'r')
+      except:
+        logging.fatal("Exception in opening function script file : " + script_filename)
+        traceback.print_exc()
+        sys.exit(1)
+      opt_script_node.setScript(script_str.read())
+      opt_script_node.edAddInputPort("computation", t_pyobj)
+      opt_script_node.edAddOutputPort("result", t_pyobj)
+
+      # Add it
+      computation_bloc = runtime.createBloc("computation_bloc")
+      optimizer_node.edSetNode(computation_bloc)
+      computation_bloc.edAddChild(opt_script_node)
+
+      # We connect Optimizer with the script
+      proc.edAddDFLink(optimizer_node.edGetSamplePort(), opt_script_node.getInputPort("computation"))
+      proc.edAddDFLink(opt_script_node.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
+
+    else:
+      logging.fatal("Fake optim script node currently not implemented")
+      sys.exit(1)
+
   # Step 4: create post-processing from user configuration
   if "Analysis" in study_config.keys():
     analysis_config = study_config["Analysis"]
@@ -114,7 +194,10 @@ def create_yacs_proc(study_config):
       analysis_node.setScript(final_script)
       proc.edAddChild(analysis_node)
       proc.edAddCFLink(compute_bloc, analysis_node)
-      proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study"))
+      if AlgoType[study_config["Algorithm"]] == "Optim":
+        proc.edAddDFLink(optimizer_node.edGetAlgoResultPort(), analysis_node.getInputPort("Study"))
+      else:
+        proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study"))
 
     elif analysis_config["From"] == "File":
       factory_analysis_node = catalogAd._nodeMap["SimpleUserAnalysis"]
@@ -126,7 +209,7 @@ def create_yacs_proc(study_config):
       try:
         analysis_file = open(analysis_config["Data"], 'r')
       except:
-        logging.fatal("Exception in openng analysis file : " + str(analysis_config["Data"]))
+        logging.fatal("Exception in opening analysis file : " + str(analysis_config["Data"]))
         traceback.print_exc()
         sys.exit(1)
       file_text = analysis_file.read()
@@ -134,7 +217,10 @@ def create_yacs_proc(study_config):
       analysis_node.setScript(final_script)
       proc.edAddChild(analysis_node)
       proc.edAddCFLink(compute_bloc, analysis_node)
-      proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study"))
+      if AlgoType[study_config["Algorithm"]] == "Optim":
+        proc.edAddDFLink(optimizer_node.edGetAlgoResultPort(), analysis_node.getInputPort("Study"))
+      else:
+        proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study"))
 
       pass
 
index 64677d8749cc7eb70cbf9eb11cf637b2f27b942f..2ca1d09ee43a1b6f59335d98a8b7e6c6216ab7f6 100644 (file)
@@ -51,6 +51,12 @@ def test(precision = 1.e-10, dimension = 3):
     # ------------------------------------------------
     R  = numpy.matrix(numpy.core.identity(dimension)).T
     B  = numpy.matrix(numpy.core.identity(dimension)).T
+
+    print "xb", xb
+    print "B", B
+    print "yo", yo
+    print "R", R
+
     #
     # Analyse
     # -------
index c1f4d30e11ed0734caec26f39c40c8d53b27fee0..d76a9dbe3ef22495d7befae5733fa5aab51993a0 100644 (file)
@@ -1,9 +1,11 @@
 include $(top_srcdir)/adm_local/make_common_starter.am
 
 DATA_INST = \
-           test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py test000_Blue.py
+           test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py test000_Blue.py \
+           test017_3DVAR_par_fonction.py test017_3DVAR_function_script.py test017_3DVAR_init_data.py
 
 testsdasalome_DATA = ${DATA_INST}
 
-EXTRA_DIST = test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py.in test000_Blue.py
+EXTRA_DIST = test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py.in test000_Blue.py \
+            test017_3DVAR_par_fonction.py.in test017_3DVAR_function_script.py test017_3DVAR_init_data.py
 
diff --git a/src/tests/daSalome/test017_3DVAR_function_script.py b/src/tests/daSalome/test017_3DVAR_function_script.py
new file mode 100644 (file)
index 0000000..6272b5d
--- /dev/null
@@ -0,0 +1,25 @@
+import numpy
+import pickle
+
+print computation["method"]
+
+dimension = 300
+
+H  = numpy.matrix(numpy.core.identity(dimension))
+
+def FunctionH( X ):
+    return H * X
+
+def AdjointH( (X, Y) ):
+    return H.T * Y
+
+if computation["method"] == "Direct":
+  result = FunctionH(computation["data"])
+
+if computation["method"] == "Tangent":
+  result = FunctionH(computation["data"])
+
+if computation["method"] == "Adjoint":
+  result = AdjointH(computation["data"])
+
+print "Computation end"
diff --git a/src/tests/daSalome/test017_3DVAR_init_data.py b/src/tests/daSalome/test017_3DVAR_init_data.py
new file mode 100644 (file)
index 0000000..c8ccc24
--- /dev/null
@@ -0,0 +1,24 @@
+import numpy
+
+numpy.random.seed(1000)
+dimension = 300
+
+xt = numpy.matrix(numpy.arange(dimension)).T
+Eo = numpy.matrix(numpy.zeros((dimension,))).T
+Eb = numpy.matrix(numpy.random.normal(0.,1.,size=(dimension,))).T
+H  = numpy.matrix(numpy.core.identity(dimension))
+B = numpy.matrix(numpy.core.identity(dimension)).T
+R = numpy.matrix(numpy.core.identity(dimension)).T
+
+def FunctionH( X ):
+    return H * X
+
+xb = xt + Eb
+xb = xb.A1
+yo = FunctionH( xt ) + Eo
+yo = yo.A1
+
+Background = xb
+BackgroundError = B
+Observation = yo
+ObservationError = R
diff --git a/src/tests/daSalome/test017_3DVAR_par_fonction.py.in b/src/tests/daSalome/test017_3DVAR_par_fonction.py.in
new file mode 100644 (file)
index 0000000..253af8a
--- /dev/null
@@ -0,0 +1,62 @@
+#-*-coding:iso-8859-1-*-
+study_config = {}
+study_config["Name"] = "test017_3DVAR"
+study_config["Algorithm"] = "3DVAR"
+
+Background_config = {}
+Background_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py"
+Background_config["Type"] = "Vector"
+Background_config["From"] = "Script"
+study_config["Background"] = Background_config
+
+BackgroundError_config = {}
+BackgroundError_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py"
+BackgroundError_config["Type"] = "Matrix"
+BackgroundError_config["From"] = "Script"
+study_config["BackgroundError"] = BackgroundError_config
+
+Observation_config = {}
+Observation_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py"
+Observation_config["Type"] = "Vector"
+Observation_config["From"] = "Script"
+study_config["Observation"] = Observation_config
+
+ObservationError_config = {}
+ObservationError_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py"
+ObservationError_config["Type"] = "Matrix"
+ObservationError_config["From"] = "Script"
+study_config["ObservationError"] = ObservationError_config
+
+FunctionDict = {}
+FunctionDict["Function"] = ["Direct", "Tangent", "Adjoint"]
+FunctionDict["Script"] = {}
+FunctionDict["Script"]["Direct"] = "@prefix@/tests/daSalome/test017_3DVAR_function_script.py"
+FunctionDict["Script"]["Tangent"] = "@prefix@/tests/daSalome/test017_3DVAR_function_script.py"
+FunctionDict["Script"]["Adjoint"] = "@prefix@/tests/daSalome/test017_3DVAR_function_script.py"
+ObservationOperator_config = {}
+ObservationOperator_config["Data"] = FunctionDict
+ObservationOperator_config["Type"] = "Function"
+ObservationOperator_config["From"] = "Dict"
+study_config["ObservationOperator"] = ObservationOperator_config
+
+Analysis_config = {}
+Analysis_config["Data"] = """
+import numpy
+
+dimension = 300
+precision = 1.e-10
+xt = numpy.matrix(numpy.arange(dimension)).T
+xb = Study.getBackground()
+
+xa = numpy.array(ADD.get("Analysis").valueserie(0))
+d  = numpy.array(ADD.get("Innovation").valueserie(0))
+#
+# Verification du resultat
+# ------------------------
+if max(abs(xa - (xb+xt.A1)/2)) > precision:
+    raise ValueError("Resultat du test errone (1)")
+else:
+    print "    Test correct, erreur maximale inferieure à %s"%precision
+"""
+Analysis_config["From"] = "String"
+study_config["Analysis"] = Analysis_config