]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Adding storage capacity of some input variables
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Fri, 22 Jun 2012 22:07:09 +0000 (00:07 +0200)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Sun, 24 Jun 2012 21:24:40 +0000 (23:24 +0200)
bin/AdaoCatalogGenerator.py
resources/ADAOSchemaCatalog.xml
src/daComposant/daAlgorithms/GradientTest.py
src/daEficas/generator_adao.py
src/daSalome/daYacsIntegration/daStudy.py
src/daSalome/daYacsSchemaCreator/infos_daComposant.py
src/daSalome/daYacsSchemaCreator/methods.py

index c3641c0e156c1be35d06b60e4e5f2b5acac83bed..c7764ed9d98709043daa5ae96bffb656ecf075af 100644 (file)
@@ -81,6 +81,7 @@ def F_Init(statut) : return FACT(statut = statut,
 
 assim_data_method = """
 def F_${assim_name}(statut) : return FACT(statut=statut,
+${storage}
                                           INPUT_TYPE = SIMP(statut="o", typ = "TXM", into=(${choices}), defaut=${default_choice}),
 ${decl_choices}
                                                 )
@@ -232,16 +233,22 @@ for data_input_name in infos.DataTypeDict.keys():
 for assim_data_input_name in infos.AssimDataDict.keys():
   logging.debug("An assimilation algorithm data input is found: " + assim_data_input_name)
   assim_name = assim_data_input_name
+  storage = ""
   choices = ""
   default_choice = ""
   decl_choices = ""
   decl_opts = ""
+  if infos.AssimDataDefaultDict[assim_data_input_name] in infos.StoredAssimData:
+    storage = "                                          Stored = SIMP(statut=\"o\", typ = \"I\", into=(0, 1), defaut=0),"
   for choice in infos.AssimDataDict[assim_data_input_name]:
     choices += "\"" + choice + "\", "
     decl_choices += assim_data_choice.substitute(choice_name = choice)
+    if choice in infos.StoredAssimData:
+      storage = "                                          Stored = SIMP(statut=\"o\", typ = \"I\", into=(0, 1), defaut=0),"
   default_choice = "\"" + infos.AssimDataDefaultDict[assim_data_input_name] + "\""
 
   mem_file.write(assim_data_method.substitute(assim_name = assim_name,
+                                              storage = storage,
                                               choices = choices,
                                               decl_choices = decl_choices,
                                               default_choice=default_choice))
index 10a9073db53222967e9c346a921f3dbafa8cc3b7..4d42970f61b123e0db900db0613a4c1561812b0a 100644 (file)
@@ -87,7 +87,9 @@ except NameError:
 else:
   logging.debug("CREATE Background is %s"%Background)
   logging.debug("CREATE BackgroundType is %s"%BackgroundType)
+  logging.debug("CREATE BackgroundStored is %s"%BackgroundStored)
   assim_study.setBackgroundType(BackgroundType)
+  assim_study.setBackgroundStored(BackgroundStored)
   assim_study.setBackground(Background)
 
 # CheckingPoint
@@ -98,7 +100,9 @@ except NameError:
 else:
   logging.debug("CREATE CheckingPoint is %s"%CheckingPoint)
   logging.debug("CREATE CheckingPointType is %s"%CheckingPointType)
+  logging.debug("CREATE CheckingPointStored is %s"%CheckingPointStored)
   assim_study.setCheckingPointType(CheckingPointType)
+  assim_study.setCheckingPointStored(CheckingPointStored)
   assim_study.setCheckingPoint(CheckingPoint)
 
 # BackgroundError
@@ -108,7 +112,8 @@ except NameError:
   pass
 else:
   logging.debug("CREATE BackgroundError is %s"%BackgroundError)
-  logging.debug("CREATE BackgroundErrorType is %s"%BackgroundErrorType)
+  logging.debug("CREATE BackgroundErrorStored is %s"%BackgroundErrorStored)
+  assim_study.setBackgroundErrorStored(BackgroundErrorStored)
   assim_study.setBackgroundError(BackgroundError)
 
 # Observation
@@ -119,7 +124,9 @@ except NameError:
 else:
   logging.debug("CREATE Observation is %s"%Observation)
   logging.debug("CREATE ObservationType is %s"%ObservationType)
+  logging.debug("CREATE ObservationStored is %s"%ObservationStored)
   assim_study.setObservationType(ObservationType)
+  assim_study.setObservationStored(ObservationStored)
   assim_study.setObservation(Observation)
 
 # ObservationError
@@ -129,7 +136,8 @@ except NameError:
   pass
 else:
   logging.debug("CREATE ObservationError is %s"%ObservationError)
-  logging.debug("CREATE ObservationErrorType is %s"%ObservationErrorType)
+  logging.debug("CREATE ObservationErrorStored is %s"%ObservationErrorStored)
+  assim_study.setObservationErrorStored(ObservationErrorStored)
   assim_study.setObservationError(ObservationError)
 
 # ObservationOperator
@@ -216,6 +224,7 @@ logging.debug("CREATE Matrix is %s"%matrix)
     <inport name="matrix_in_string" type="string"/>
     <outport name="matrix" type="pyobj"/>
     <outport name="type" type="string"/>
+    <outport name="stored" type="bool"/>
   </inline>
 
   <inline name="CreateNumpyMatrixFromScript">
@@ -240,6 +249,7 @@ user_script_module = sys.modules[module_name]
 ]]></code></script>
     <inport name="script" type="string"/>
     <outport name="type" type="string"/>
+    <outport name="stored" type="bool"/>
   </inline>
 
   <inline name="CreateNumpyVectorFromString">
@@ -253,6 +263,7 @@ logging.debug("Vector is %s"%vector)
     <inport name="vector_in_string" type="string"/>
     <outport name="vector" type="pyobj"/>
     <outport name="type" type="string"/>
+    <outport name="stored" type="bool"/>
   </inline>
 
   <inline name="CreateNumpyVectorFromScript">
@@ -277,6 +288,7 @@ user_script_module = sys.modules[module_name]
 ]]></code></script>
     <inport name="script" type="string"/>
     <outport name="type" type="string"/>
+    <outport name="stored" type="bool"/>
   </inline>
 
   <inline name="SimpleExecuteDirectAlgorithm">
index 2fe5381bca3a066e3552bd0c44a8680453e1229d..03b0680175d789a8cea5ecfe907ae1f8a47ee3f5 100644 (file)
@@ -69,7 +69,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
         self.defineRequiredParameter(
             name     = "ResultFile",
-            default  = "",
+            default  = "gradient_result_file",
             typecast = str,
             message  = "Nom de base (hors extension) des fichiers de sauvegarde des résultats",
             )
index 2d5ae5de6fd099cadf40284b905ec6bff2faf218..94a8102f20c7ba56e413222571c6d0e7acc87432 100644 (file)
@@ -159,6 +159,8 @@ class AdaoGenerator(PythonGenerator):
       self.text_da += data_name + "_config['Type'] = '" + data_type + "'\n"
       self.text_da += data_name + "_config['From'] = '" + from_type + "'\n"
       self.text_da += data_name + "_config['Data'] = '" + data      + "'\n"
+      if search_text+"Stored" in self.dictMCVal.keys():
+        self.text_da += data_name + "_config['Stored'] = '" +  str(self.dictMCVal[search_text+"Stored"])  + "'\n"
       self.text_da += "study_config['" + data_name + "'] = " + data_name + "_config\n"
 
     if from_type == "FunctionDict":
index 977520bee6b68b49387e220f4c6d77c2970fc251..474291981f91e7fe5684bf1c696f080f90325590 100644 (file)
@@ -93,14 +93,21 @@ class daStudy:
     else:
       raise daError("[daStudy::setBackgroundType] Type is unkown : " + Type + " Types are : Vector")
 
+  def setBackgroundStored(self, Stored):
+    if Stored:
+      self.BackgroundStored = True
+    else:
+      self.BackgroundStored = False
+
   def setBackground(self, Background):
     try:
       self.BackgroundType
+      self.BackgroundStored
     except AttributeError:
-      raise daError("[daStudy::setBackground] Type is not defined !")
+      raise daError("[daStudy::setBackground] Type or Storage is not defined !")
     self.Background = Background
     if self.BackgroundType == "Vector":
-      self.ADD.setBackground(asVector = Background)
+      self.ADD.setBackground(asVector = Background, toBeStored = self.BackgroundStored)
 
   def getBackground(self):
     return self.Background
@@ -113,19 +120,36 @@ class daStudy:
     else:
       raise daError("[daStudy::setCheckingPointType] Type is unkown : " + Type + " Types are : Vector")
 
+  def setCheckingPointStored(self, Stored):
+    if Stored:
+      self.CheckingPointStored = True
+    else:
+      self.CheckingPointStored = False
+
   def setCheckingPoint(self, CheckingPoint):
     try:
       self.CheckingPointType
+      self.CheckingPointStored
     except AttributeError:
-      raise daError("[daStudy::setCheckingPoint] Type is not defined !")
+      raise daError("[daStudy::setCheckingPoint] Type or Storage is not defined !")
     self.CheckingPoint = CheckingPoint
     if self.CheckingPointType == "Vector":
-      self.ADD.setBackground(asVector = CheckingPoint)
+      self.ADD.setBackground(asVector = CheckingPoint, toBeStored = self.CheckingPointStored)
 
   #--------------------------------------
 
+  def setBackgroundErrorStored(self, Stored):
+    if Stored:
+      self.BackgroundErrorStored = True
+    else:
+      self.BackgroundErrorStored = False
+
   def setBackgroundError(self, BackgroundError):
-    self.ADD.setBackgroundError(asCovariance = BackgroundError)
+    try:
+      self.BackgroundErrorStored
+    except AttributeError:
+      raise daError("[daStudy::setBackgroundError] Storage is not defined !")
+    self.ADD.setBackgroundError(asCovariance = BackgroundError, toBeStored = self.BackgroundErrorStored)
 
   #--------------------------------------
 
@@ -135,18 +159,35 @@ class daStudy:
     else:
       raise daError("[daStudy::setObservationType] Type is unkown : " + Type + " Types are : Vector")
 
+  def setObservationStored(self, Stored):
+    if Stored:
+      self.ObservationStored = True
+    else:
+      self.ObservationStored = False
+
   def setObservation(self, Observation):
     try:
       self.ObservationType
+      self.ObservationStored
     except AttributeError:
-      raise daError("[daStudy::setObservation] Type is not defined !")
+      raise daError("[daStudy::setObservation] Type or Storage is not defined !")
     if self.ObservationType == "Vector":
-      self.ADD.setObservation(asVector = Observation)
+      self.ADD.setObservation(asVector = Observation, toBeStored = self.ObservationStored)
 
   #--------------------------------------
 
+  def setObservationErrorStored(self, Stored):
+    if Stored:
+      self.ObservationErrorStored = True
+    else:
+      self.ObservationErrorStored = False
+
   def setObservationError(self, ObservationError):
-    self.ADD.setObservationError(asCovariance = ObservationError)
+    try:
+      self.ObservationErrorStored
+    except AttributeError:
+      raise daError("[daStudy::setObservationError] Storage is not defined !")
+    self.ADD.setObservationError(asCovariance = ObservationError, toBeStored = self.ObservationErrorStored)
 
   #--------------------------------------
 
index 569f45f79b136fdf8b7590b57d72cece29b80cbf..95e36984bde52721863a96156075a170a8b9980b 100644 (file)
@@ -161,6 +161,8 @@ AssimDataDefaultDict["AlgorithmParameters"] = "Dict"
 AssimDataDefaultDict["UserDataInit"]        = "Dict"
 AssimDataDefaultDict["CheckingPoint"]       = "Vector"
 
+StoredAssimData = ["Vector", "Matrix"]
+
 # Assimilation optional nodes
 OptDict = {}
 OptDict["UserPostAnalysis"]   = ["String", "Script"]
index 698194ecc37fb522ee3dfd80c9bd182d93ba37c7..27d1910d3ffdaa86941c16abfe225452571a5bea 100644 (file)
@@ -53,6 +53,7 @@ def create_yacs_proc(study_config):
   proc.setTypeCode("SALOME_TYPES/ParametricOutput", catalogAd._typeMap["SALOME_TYPES/ParametricOutput"])
   t_pyobj  = proc.getTypeCode("pyobj")
   t_string = proc.getTypeCode("string")
+  t_bool = proc.getTypeCode("bool")
   t_param_input  = proc.getTypeCode("SALOME_TYPES/ParametricInput")
   t_param_output = proc.getTypeCode("SALOME_TYPES/ParametricOutput")
   repertory = False
@@ -136,6 +137,7 @@ def create_yacs_proc(study_config):
       data_config = study_config[key]
 
       key_type = key + "Type"
+      key_stored = key + "Stored"
 
       if data_config["Type"] == "Dict" and data_config["From"] == "Script":
         # Create node
@@ -170,15 +172,18 @@ def create_yacs_proc(study_config):
         # Connect node with CreateAssimilationStudy
         CAS_node.edAddInputPort(key, t_pyobj)
         CAS_node.edAddInputPort(key_type, t_string)
+        CAS_node.edAddInputPort(key_stored, t_bool)
         ADAO_Case.edAddDFLink(back_node.getOutputPort("vector"), CAS_node.getInputPort(key))
         ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+        ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored))
+        back_node_script = back_node.getScript()
+        back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
         # Connect node with InitUserData
         if key in init_config["Target"]:
-          back_node_script = back_node.getScript()
-          back_node_script = "__builtins__[\"init_data\"] = init_data\n" + back_node_script
-          back_node.setScript(back_node_script)
+          back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script
           back_node.edAddInputPort("init_data", t_pyobj)
           ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
+        back_node.setScript(back_node_script)
 
       if data_config["Type"] == "Vector" and data_config["From"] == "Script":
         # Create node
@@ -196,15 +201,18 @@ def create_yacs_proc(study_config):
         # Connect node with CreateAssimilationStudy
         CAS_node.edAddInputPort(key, t_pyobj)
         CAS_node.edAddInputPort(key_type, t_string)
+        CAS_node.edAddInputPort(key_stored, t_bool)
         ADAO_Case.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key))
         ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+        ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored))
+        back_node_script = back_node.getScript()
+        back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
         # Connect node with InitUserData
         if key in init_config["Target"]:
-          back_node_script = back_node.getScript()
-          back_node_script = "__builtins__[\"init_data\"] = init_data\n" + back_node_script
-          back_node.setScript(back_node_script)
+          back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script
           back_node.edAddInputPort("init_data", t_pyobj)
           ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
+        back_node.setScript(back_node_script)
 
       if data_config["Type"] == "Matrix" and data_config["From"] == "String":
         # Create node
@@ -215,15 +223,18 @@ def create_yacs_proc(study_config):
         # Connect node with CreateAssimilationStudy
         CAS_node.edAddInputPort(key, t_pyobj)
         CAS_node.edAddInputPort(key_type, t_string)
+        CAS_node.edAddInputPort(key_stored, t_bool)
         ADAO_Case.edAddDFLink(back_node.getOutputPort("matrix"), CAS_node.getInputPort(key))
         ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+        ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored))
+        back_node_script = back_node.getScript()
+        back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
         # Connect node with InitUserData
         if key in init_config["Target"]:
-          back_node_script = back_node.getScript()
-          back_node_script = "__builtins__[\"init_data\"] = init_data\n" + back_node_script
-          back_node.setScript(back_node_script)
+          back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script
           back_node.edAddInputPort("init_data", t_pyobj)
           ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
+        back_node.setScript(back_node_script)
 
       if data_config["Type"] == "Matrix" and data_config["From"] == "Script":
         # Create node
@@ -235,18 +246,21 @@ def create_yacs_proc(study_config):
           back_node.getInputPort("script").edInitPy(data_config["Data"])
         back_node.edAddOutputPort(key, t_pyobj)
         back_node_script = back_node.getScript()
+        back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
         back_node_script += key + " = user_script_module." + key + "\n"
         back_node.setScript(back_node_script)
         ADAO_Case.edAddChild(back_node)
         # Connect node with CreateAssimilationStudy
         CAS_node.edAddInputPort(key, t_pyobj)
         CAS_node.edAddInputPort(key_type, t_string)
+        CAS_node.edAddInputPort(key_stored, t_bool)
         ADAO_Case.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key))
         ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+        ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored))
         # Connect node with InitUserData
         if key in init_config["Target"]:
           back_node_script = back_node.getScript()
-          back_node_script = "__builtins__[\"init_data\"] = init_data\n" + back_node_script
+          back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script
           back_node.setScript(back_node_script)
           back_node.edAddInputPort("init_data", t_pyobj)
           ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
@@ -261,6 +275,16 @@ def create_yacs_proc(study_config):
            else:
              CAS_node.getInputPort(port_name).edInitPy(FunctionDict["Script"][FunctionName])
 
+      if data_config["Type"] == "Function" and data_config["From"] == "FunctionDict" and key == "EvolutionModel":
+         FunctionDict = data_config["Data"]
+         for FunctionName in FunctionDict["Function"]:
+           port_name = "EvolutionModel" + FunctionName
+           CAS_node.edAddInputPort(port_name, t_string)
+           if repertory:
+             CAS_node.getInputPort(port_name).edInitPy(os.path.join(base_repertory, os.path.basename(FunctionDict["Script"][FunctionName])))
+           else:
+             CAS_node.getInputPort(port_name).edInitPy(FunctionDict["Script"][FunctionName])
+
   # Step 3: create compute bloc
   compute_bloc = runtime.createBloc("compute_bloc")
   ADAO_Case.edAddChild(compute_bloc)