From fa8d65693210b63990b2b35c05ef070587949b8b Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Sat, 23 Jun 2012 00:07:09 +0200 Subject: [PATCH] Adding storage capacity of some input variables --- bin/AdaoCatalogGenerator.py | 7 +++ resources/ADAOSchemaCatalog.xml | 16 +++++- src/daComposant/daAlgorithms/GradientTest.py | 2 +- src/daEficas/generator_adao.py | 2 + src/daSalome/daYacsIntegration/daStudy.py | 57 ++++++++++++++++--- .../daYacsSchemaCreator/infos_daComposant.py | 2 + src/daSalome/daYacsSchemaCreator/methods.py | 44 ++++++++++---- 7 files changed, 109 insertions(+), 21 deletions(-) diff --git a/bin/AdaoCatalogGenerator.py b/bin/AdaoCatalogGenerator.py index c3641c0..c7764ed 100644 --- a/bin/AdaoCatalogGenerator.py +++ b/bin/AdaoCatalogGenerator.py @@ -81,6 +81,7 @@ def F_Init(statut) : return FACT(statut = statut, assim_data_method = """ def F_${assim_name}(statut) : return FACT(statut=statut, +${storage} INPUT_TYPE = SIMP(statut="o", typ = "TXM", into=(${choices}), defaut=${default_choice}), ${decl_choices} ) @@ -232,16 +233,22 @@ for data_input_name in infos.DataTypeDict.keys(): for assim_data_input_name in infos.AssimDataDict.keys(): logging.debug("An assimilation algorithm data input is found: " + assim_data_input_name) assim_name = assim_data_input_name + storage = "" choices = "" default_choice = "" decl_choices = "" decl_opts = "" + if infos.AssimDataDefaultDict[assim_data_input_name] in infos.StoredAssimData: + storage = " Stored = SIMP(statut=\"o\", typ = \"I\", into=(0, 1), defaut=0)," for choice in infos.AssimDataDict[assim_data_input_name]: choices += "\"" + choice + "\", " decl_choices += assim_data_choice.substitute(choice_name = choice) + if choice in infos.StoredAssimData: + storage = " Stored = SIMP(statut=\"o\", typ = \"I\", into=(0, 1), defaut=0)," default_choice = "\"" + infos.AssimDataDefaultDict[assim_data_input_name] + "\"" mem_file.write(assim_data_method.substitute(assim_name = assim_name, + storage = storage, choices = choices, decl_choices = decl_choices, default_choice=default_choice)) diff --git a/resources/ADAOSchemaCatalog.xml b/resources/ADAOSchemaCatalog.xml index 10a9073..4d42970 100644 --- a/resources/ADAOSchemaCatalog.xml +++ b/resources/ADAOSchemaCatalog.xml @@ -87,7 +87,9 @@ except NameError: else: logging.debug("CREATE Background is %s"%Background) logging.debug("CREATE BackgroundType is %s"%BackgroundType) + logging.debug("CREATE BackgroundStored is %s"%BackgroundStored) assim_study.setBackgroundType(BackgroundType) + assim_study.setBackgroundStored(BackgroundStored) assim_study.setBackground(Background) # CheckingPoint @@ -98,7 +100,9 @@ except NameError: else: logging.debug("CREATE CheckingPoint is %s"%CheckingPoint) logging.debug("CREATE CheckingPointType is %s"%CheckingPointType) + logging.debug("CREATE CheckingPointStored is %s"%CheckingPointStored) assim_study.setCheckingPointType(CheckingPointType) + assim_study.setCheckingPointStored(CheckingPointStored) assim_study.setCheckingPoint(CheckingPoint) # BackgroundError @@ -108,7 +112,8 @@ except NameError: pass else: logging.debug("CREATE BackgroundError is %s"%BackgroundError) - logging.debug("CREATE BackgroundErrorType is %s"%BackgroundErrorType) + logging.debug("CREATE BackgroundErrorStored is %s"%BackgroundErrorStored) + assim_study.setBackgroundErrorStored(BackgroundErrorStored) assim_study.setBackgroundError(BackgroundError) # Observation @@ -119,7 +124,9 @@ except NameError: else: logging.debug("CREATE Observation is %s"%Observation) logging.debug("CREATE ObservationType is %s"%ObservationType) + logging.debug("CREATE ObservationStored is %s"%ObservationStored) assim_study.setObservationType(ObservationType) + assim_study.setObservationStored(ObservationStored) assim_study.setObservation(Observation) # ObservationError @@ -129,7 +136,8 @@ except NameError: pass else: logging.debug("CREATE ObservationError is %s"%ObservationError) - logging.debug("CREATE ObservationErrorType is %s"%ObservationErrorType) + logging.debug("CREATE ObservationErrorStored is %s"%ObservationErrorStored) + assim_study.setObservationErrorStored(ObservationErrorStored) assim_study.setObservationError(ObservationError) # ObservationOperator @@ -216,6 +224,7 @@ logging.debug("CREATE Matrix is %s"%matrix) + @@ -240,6 +249,7 @@ user_script_module = sys.modules[module_name] ]]> + @@ -253,6 +263,7 @@ logging.debug("Vector is %s"%vector) + @@ -277,6 +288,7 @@ user_script_module = sys.modules[module_name] ]]> + diff --git a/src/daComposant/daAlgorithms/GradientTest.py b/src/daComposant/daAlgorithms/GradientTest.py index 2fe5381..03b0680 100644 --- a/src/daComposant/daAlgorithms/GradientTest.py +++ b/src/daComposant/daAlgorithms/GradientTest.py @@ -69,7 +69,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) self.defineRequiredParameter( name = "ResultFile", - default = "", + default = "gradient_result_file", typecast = str, message = "Nom de base (hors extension) des fichiers de sauvegarde des résultats", ) diff --git a/src/daEficas/generator_adao.py b/src/daEficas/generator_adao.py index 2d5ae5d..94a8102 100644 --- a/src/daEficas/generator_adao.py +++ b/src/daEficas/generator_adao.py @@ -159,6 +159,8 @@ class AdaoGenerator(PythonGenerator): self.text_da += data_name + "_config['Type'] = '" + data_type + "'\n" self.text_da += data_name + "_config['From'] = '" + from_type + "'\n" self.text_da += data_name + "_config['Data'] = '" + data + "'\n" + if search_text+"Stored" in self.dictMCVal.keys(): + self.text_da += data_name + "_config['Stored'] = '" + str(self.dictMCVal[search_text+"Stored"]) + "'\n" self.text_da += "study_config['" + data_name + "'] = " + data_name + "_config\n" if from_type == "FunctionDict": diff --git a/src/daSalome/daYacsIntegration/daStudy.py b/src/daSalome/daYacsIntegration/daStudy.py index 977520b..4742919 100644 --- a/src/daSalome/daYacsIntegration/daStudy.py +++ b/src/daSalome/daYacsIntegration/daStudy.py @@ -93,14 +93,21 @@ class daStudy: else: raise daError("[daStudy::setBackgroundType] Type is unkown : " + Type + " Types are : Vector") + def setBackgroundStored(self, Stored): + if Stored: + self.BackgroundStored = True + else: + self.BackgroundStored = False + def setBackground(self, Background): try: self.BackgroundType + self.BackgroundStored except AttributeError: - raise daError("[daStudy::setBackground] Type is not defined !") + raise daError("[daStudy::setBackground] Type or Storage is not defined !") self.Background = Background if self.BackgroundType == "Vector": - self.ADD.setBackground(asVector = Background) + self.ADD.setBackground(asVector = Background, toBeStored = self.BackgroundStored) def getBackground(self): return self.Background @@ -113,19 +120,36 @@ class daStudy: else: raise daError("[daStudy::setCheckingPointType] Type is unkown : " + Type + " Types are : Vector") + def setCheckingPointStored(self, Stored): + if Stored: + self.CheckingPointStored = True + else: + self.CheckingPointStored = False + def setCheckingPoint(self, CheckingPoint): try: self.CheckingPointType + self.CheckingPointStored except AttributeError: - raise daError("[daStudy::setCheckingPoint] Type is not defined !") + raise daError("[daStudy::setCheckingPoint] Type or Storage is not defined !") self.CheckingPoint = CheckingPoint if self.CheckingPointType == "Vector": - self.ADD.setBackground(asVector = CheckingPoint) + self.ADD.setBackground(asVector = CheckingPoint, toBeStored = self.CheckingPointStored) #-------------------------------------- + def setBackgroundErrorStored(self, Stored): + if Stored: + self.BackgroundErrorStored = True + else: + self.BackgroundErrorStored = False + def setBackgroundError(self, BackgroundError): - self.ADD.setBackgroundError(asCovariance = BackgroundError) + try: + self.BackgroundErrorStored + except AttributeError: + raise daError("[daStudy::setBackgroundError] Storage is not defined !") + self.ADD.setBackgroundError(asCovariance = BackgroundError, toBeStored = self.BackgroundErrorStored) #-------------------------------------- @@ -135,18 +159,35 @@ class daStudy: else: raise daError("[daStudy::setObservationType] Type is unkown : " + Type + " Types are : Vector") + def setObservationStored(self, Stored): + if Stored: + self.ObservationStored = True + else: + self.ObservationStored = False + def setObservation(self, Observation): try: self.ObservationType + self.ObservationStored except AttributeError: - raise daError("[daStudy::setObservation] Type is not defined !") + raise daError("[daStudy::setObservation] Type or Storage is not defined !") if self.ObservationType == "Vector": - self.ADD.setObservation(asVector = Observation) + self.ADD.setObservation(asVector = Observation, toBeStored = self.ObservationStored) #-------------------------------------- + def setObservationErrorStored(self, Stored): + if Stored: + self.ObservationErrorStored = True + else: + self.ObservationErrorStored = False + def setObservationError(self, ObservationError): - self.ADD.setObservationError(asCovariance = ObservationError) + try: + self.ObservationErrorStored + except AttributeError: + raise daError("[daStudy::setObservationError] Storage is not defined !") + self.ADD.setObservationError(asCovariance = ObservationError, toBeStored = self.ObservationErrorStored) #-------------------------------------- diff --git a/src/daSalome/daYacsSchemaCreator/infos_daComposant.py b/src/daSalome/daYacsSchemaCreator/infos_daComposant.py index 569f45f..95e3698 100644 --- a/src/daSalome/daYacsSchemaCreator/infos_daComposant.py +++ b/src/daSalome/daYacsSchemaCreator/infos_daComposant.py @@ -161,6 +161,8 @@ AssimDataDefaultDict["AlgorithmParameters"] = "Dict" AssimDataDefaultDict["UserDataInit"] = "Dict" AssimDataDefaultDict["CheckingPoint"] = "Vector" +StoredAssimData = ["Vector", "Matrix"] + # Assimilation optional nodes OptDict = {} OptDict["UserPostAnalysis"] = ["String", "Script"] diff --git a/src/daSalome/daYacsSchemaCreator/methods.py b/src/daSalome/daYacsSchemaCreator/methods.py index 698194e..27d1910 100644 --- a/src/daSalome/daYacsSchemaCreator/methods.py +++ b/src/daSalome/daYacsSchemaCreator/methods.py @@ -53,6 +53,7 @@ def create_yacs_proc(study_config): proc.setTypeCode("SALOME_TYPES/ParametricOutput", catalogAd._typeMap["SALOME_TYPES/ParametricOutput"]) t_pyobj = proc.getTypeCode("pyobj") t_string = proc.getTypeCode("string") + t_bool = proc.getTypeCode("bool") t_param_input = proc.getTypeCode("SALOME_TYPES/ParametricInput") t_param_output = proc.getTypeCode("SALOME_TYPES/ParametricOutput") repertory = False @@ -136,6 +137,7 @@ def create_yacs_proc(study_config): data_config = study_config[key] key_type = key + "Type" + key_stored = key + "Stored" if data_config["Type"] == "Dict" and data_config["From"] == "Script": # Create node @@ -170,15 +172,18 @@ def create_yacs_proc(study_config): # Connect node with CreateAssimilationStudy CAS_node.edAddInputPort(key, t_pyobj) CAS_node.edAddInputPort(key_type, t_string) + CAS_node.edAddInputPort(key_stored, t_bool) ADAO_Case.edAddDFLink(back_node.getOutputPort("vector"), CAS_node.getInputPort(key)) ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type)) + ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored)) + back_node_script = back_node.getScript() + back_node_script += "stored = " + str(data_config["Stored"]) + "\n" # Connect node with InitUserData if key in init_config["Target"]: - back_node_script = back_node.getScript() - back_node_script = "__builtins__[\"init_data\"] = init_data\n" + back_node_script - back_node.setScript(back_node_script) + back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script back_node.edAddInputPort("init_data", t_pyobj) ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data")) + back_node.setScript(back_node_script) if data_config["Type"] == "Vector" and data_config["From"] == "Script": # Create node @@ -196,15 +201,18 @@ def create_yacs_proc(study_config): # Connect node with CreateAssimilationStudy CAS_node.edAddInputPort(key, t_pyobj) CAS_node.edAddInputPort(key_type, t_string) + CAS_node.edAddInputPort(key_stored, t_bool) ADAO_Case.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key)) ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type)) + ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored)) + back_node_script = back_node.getScript() + back_node_script += "stored = " + str(data_config["Stored"]) + "\n" # Connect node with InitUserData if key in init_config["Target"]: - back_node_script = back_node.getScript() - back_node_script = "__builtins__[\"init_data\"] = init_data\n" + back_node_script - back_node.setScript(back_node_script) + back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script back_node.edAddInputPort("init_data", t_pyobj) ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data")) + back_node.setScript(back_node_script) if data_config["Type"] == "Matrix" and data_config["From"] == "String": # Create node @@ -215,15 +223,18 @@ def create_yacs_proc(study_config): # Connect node with CreateAssimilationStudy CAS_node.edAddInputPort(key, t_pyobj) CAS_node.edAddInputPort(key_type, t_string) + CAS_node.edAddInputPort(key_stored, t_bool) ADAO_Case.edAddDFLink(back_node.getOutputPort("matrix"), CAS_node.getInputPort(key)) ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type)) + ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored)) + back_node_script = back_node.getScript() + back_node_script += "stored = " + str(data_config["Stored"]) + "\n" # Connect node with InitUserData if key in init_config["Target"]: - back_node_script = back_node.getScript() - back_node_script = "__builtins__[\"init_data\"] = init_data\n" + back_node_script - back_node.setScript(back_node_script) + back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script back_node.edAddInputPort("init_data", t_pyobj) ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data")) + back_node.setScript(back_node_script) if data_config["Type"] == "Matrix" and data_config["From"] == "Script": # Create node @@ -235,18 +246,21 @@ def create_yacs_proc(study_config): back_node.getInputPort("script").edInitPy(data_config["Data"]) back_node.edAddOutputPort(key, t_pyobj) back_node_script = back_node.getScript() + back_node_script += "stored = " + str(data_config["Stored"]) + "\n" back_node_script += key + " = user_script_module." + key + "\n" back_node.setScript(back_node_script) ADAO_Case.edAddChild(back_node) # Connect node with CreateAssimilationStudy CAS_node.edAddInputPort(key, t_pyobj) CAS_node.edAddInputPort(key_type, t_string) + CAS_node.edAddInputPort(key_stored, t_bool) ADAO_Case.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key)) ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type)) + ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored)) # Connect node with InitUserData if key in init_config["Target"]: back_node_script = back_node.getScript() - back_node_script = "__builtins__[\"init_data\"] = init_data\n" + back_node_script + back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script back_node.setScript(back_node_script) back_node.edAddInputPort("init_data", t_pyobj) ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data")) @@ -261,6 +275,16 @@ def create_yacs_proc(study_config): else: CAS_node.getInputPort(port_name).edInitPy(FunctionDict["Script"][FunctionName]) + if data_config["Type"] == "Function" and data_config["From"] == "FunctionDict" and key == "EvolutionModel": + FunctionDict = data_config["Data"] + for FunctionName in FunctionDict["Function"]: + port_name = "EvolutionModel" + FunctionName + CAS_node.edAddInputPort(port_name, t_string) + if repertory: + CAS_node.getInputPort(port_name).edInitPy(os.path.join(base_repertory, os.path.basename(FunctionDict["Script"][FunctionName]))) + else: + CAS_node.getInputPort(port_name).edInitPy(FunctionDict["Script"][FunctionName]) + # Step 3: create compute bloc compute_bloc = runtime.createBloc("compute_bloc") ADAO_Case.edAddChild(compute_bloc) -- 2.39.2