From 1ee15c2ef4b8f11ce80ba762819be8e0fff109a6 Mon Sep 17 00:00:00 2001 From: =?utf8?q?Andr=C3=A9=20Ribes?= Date: Thu, 24 Jun 2010 16:10:34 +0200 Subject: [PATCH] - Adding a new test: test017 with OptimizerLoop --- configure.ac | 1 + resources/ADAOSchemaCatalog.xml | 79 ++++++++++-- src/daComposant/daAlgorithms/3DVAR.py | 6 +- src/daComposant/daCore/AssimilationStudy.py | 5 + src/daSalome/daYacsIntegration/__init__.py | 1 + .../daYacsIntegration/daOptimizerLoop.py | 116 +++++++++--------- src/daSalome/daYacsIntegration/daStudy.py | 36 ++++-- .../daYacsSchemaCreator/infos_daComposant.py | 8 +- src/daSalome/daYacsSchemaCreator/methods.py | 92 +++++++++++++- .../Plateforme/test017_3DVAR_par_fonction.py | 6 + src/tests/daSalome/Makefile.am | 6 +- .../daSalome/test017_3DVAR_function_script.py | 25 ++++ src/tests/daSalome/test017_3DVAR_init_data.py | 24 ++++ .../daSalome/test017_3DVAR_par_fonction.py.in | 62 ++++++++++ 14 files changed, 383 insertions(+), 84 deletions(-) create mode 100644 src/tests/daSalome/test017_3DVAR_function_script.py create mode 100644 src/tests/daSalome/test017_3DVAR_init_data.py create mode 100644 src/tests/daSalome/test017_3DVAR_par_fonction.py.in diff --git a/configure.ac b/configure.ac index d533c42..1362593 100644 --- a/configure.ac +++ b/configure.ac @@ -115,6 +115,7 @@ AC_CONFIG_FILES([ src/tests/Makefile src/tests/daSalome/Makefile src/tests/daSalome/test000_Blue_AnalysisFile.py + src/tests/daSalome/test017_3DVAR_par_fonction.py src/tests/daComposant/Makefile src/tests/daComposant/Plateforme/Makefile src/tests/daComposant/Plateforme/context.py diff --git a/resources/ADAOSchemaCatalog.xml b/resources/ADAOSchemaCatalog.xml index 44f5fc5..e3ac589 100644 --- a/resources/ADAOSchemaCatalog.xml +++ b/resources/ADAOSchemaCatalog.xml @@ -26,8 +26,8 @@ try: except NameError: pass else: - print "Background is", Background - print "BackgroundType is", BackgroundType + #print "Background is", Background + #print "BackgroundType is", BackgroundType assim_study.setBackgroundType(BackgroundType) assim_study.setBackground(Background) @@ -37,8 +37,8 @@ try: except NameError: pass else: - print "BackgroundError is", BackgroundError - print "BackgroundErrorType is", BackgroundErrorType + #print "BackgroundError is", BackgroundError + #print "BackgroundErrorType is", BackgroundErrorType assim_study.setBackgroundError(BackgroundError) # Observation @@ -47,8 +47,8 @@ try: except NameError: pass else: - print "Observation is", Observation - print "ObservationType is", ObservationType + #print "Observation is", Observation + #print "ObservationType is", ObservationType assim_study.setObservationType(ObservationType) assim_study.setObservation(Observation) @@ -58,20 +58,49 @@ try: except NameError: pass else: - print "ObservationError is", ObservationError - print "ObservationErrorType is", ObservationErrorType + #print "ObservationError is", ObservationError + #print "ObservationErrorType is", ObservationErrorType assim_study.setObservationError(ObservationError) # ObservationOperator +ObservationOperatorOk = 0 try: ObservationOperator except NameError: pass else: - print "ObservationOperator is", ObservationOperator - print "ObservationOperatorType is", ObservationOperatorType + #print "ObservationOperator is", ObservationOperator + #print "ObservationOperatorType is", ObservationOperatorType assim_study.setObservationOperatorType(ObservationOperatorType) assim_study.setObservationOperator(ObservationOperator) + ObservationOperatorOk = 1 + +if ObservationOperatorOk == 0: + try: + ObservationOperatorDirect + except NameError: + pass + else: + #print "ObservationOperatorDirect is", ObservationOperatorDirect + assim_study.setObservationOperatorType("Direct", "Function") + assim_study.setObservationOperator("Direct", ObservationOperatorDirect) + try: + ObservationOperatorTangent + except NameError: + pass + else: + #print "ObservationOperatorTangent is", ObservationOperatorTangent + assim_study.setObservationOperatorType("Tangent", "Function") + assim_study.setObservationOperator("Tangent", ObservationOperatorTangent) + try: + ObservationOperatorAdjoint + except NameError: + pass + else: + #print "ObservationOperatorAdjoint is", ObservationOperatorAdjoint + assim_study.setObservationOperatorType("Adjoint", "Function") + assim_study.setObservationOperator("Adjoint", ObservationOperatorAdjoint) + Study = assim_study ]]> @@ -95,6 +124,16 @@ print "Matrix is", matrix + + + + + + + + + + + + + + + + + diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index d1ad427..82eb058 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -25,9 +25,9 @@ __author__ = "Jean-Philippe ARGAUD - Mars 2009" import sys ; sys.path.insert(0, "../daCore") import logging -import Persistence -from BasicObjects import Algorithm -import PlatformInfo ; m = PlatformInfo.SystemUsage() +from daCore import Persistence +from daCore.BasicObjects import Algorithm +from daCore import PlatformInfo ; m = PlatformInfo.SystemUsage() import numpy import scipy.optimize diff --git a/src/daComposant/daCore/AssimilationStudy.py b/src/daComposant/daCore/AssimilationStudy.py index 83b4813..c830fbc 100644 --- a/src/daComposant/daCore/AssimilationStudy.py +++ b/src/daComposant/daCore/AssimilationStudy.py @@ -562,6 +562,11 @@ class AssimilationStudy: sys.path = list(set(sys.path)) # Conserve en unique exemplaire chaque chemin return 1 + def prepare_to_pickle(self): + self.__algorithmFile = None + self.__diagnosticFile = None + self.__H = {} + # ============================================================================== if __name__ == "__main__": print '\n AUTODIAGNOSTIC \n' diff --git a/src/daSalome/daYacsIntegration/__init__.py b/src/daSalome/daYacsIntegration/__init__.py index e69de29..a1bd4c1 100644 --- a/src/daSalome/daYacsIntegration/__init__.py +++ b/src/daSalome/daYacsIntegration/__init__.py @@ -0,0 +1 @@ +from daOptimizerLoop import * diff --git a/src/daSalome/daYacsIntegration/daOptimizerLoop.py b/src/daSalome/daYacsIntegration/daOptimizerLoop.py index 1642171..65ce9ee 100644 --- a/src/daSalome/daYacsIntegration/daOptimizerLoop.py +++ b/src/daSalome/daYacsIntegration/daOptimizerLoop.py @@ -6,7 +6,7 @@ import numpy import threading from daCore.AssimilationStudy import AssimilationStudy -import daStudy +from daYacsIntegration import daStudy class OptimizerHooks: @@ -26,18 +26,23 @@ class OptimizerHooks: local_counter = self.sample_counter # 2: Put sample in the job pool - matrix_to_pool = pickle.dumps(X) - self.optim_algo.pool.pushInSample(local_counter, matrix_to_pool) + computation = {} + computation["method"] = "Direct" + computation["data"] = X + computation = pickle.dumps(computation) + self.optim_algo.pool.pushInSample(local_counter, computation) # 3: Wait while 1: + print "waiting" self.optim_algo.signalMasterAndWait() + print "signal" if self.optim_algo.isTerminationRequested(): self.optim_algo.pool.destroyAll() return else: # Get current Id - sample_id = self.pool.getCurrentId() + sample_id = self.optim_algo.pool.getCurrentId() if sample_id == local_counter: # 4: Data is ready matrix_from_pool = self.optim_algo.pool.getOutSample(local_counter).getStringValue() @@ -52,7 +57,7 @@ class OptimizerHooks: return Y else: print "sync false is not yet implemented" - raise ValueError("sync == false not yet implemented") + raise daStudy.daError("sync == false not yet implemented") def Tangent(self, X, sync = 1): print "Call Tangent OptimizerHooks" @@ -63,8 +68,11 @@ class OptimizerHooks: local_counter = self.sample_counter # 2: Put sample in the job pool - matrix_to_pool = pickle.dumps(X) - self.optim_algo.pool.pushInSample(local_counter, matrix_to_pool) + computation = {} + computation["method"] = "Tangent" + computation["data"] = X + computation = pickle.dumps(computation) + self.optim_algo.pool.pushInSample(local_counter, computation) # 3: Wait while 1: @@ -74,7 +82,7 @@ class OptimizerHooks: return else: # Get current Id - sample_id = self.pool.getCurrentId() + sample_id = self.optim_algo.pool.getCurrentId() if sample_id == local_counter: # 4: Data is ready matrix_from_pool = self.optim_algo.pool.getOutSample(local_counter).getStringValue() @@ -89,7 +97,7 @@ class OptimizerHooks: return Y else: print "sync false is not yet implemented" - raise ValueError("sync == false not yet implemented") + raise daStudy.daError("sync == false not yet implemented") def Adjoint(self, (X, Y), sync = 1): print "Call Adjoint OptimizerHooks" @@ -100,18 +108,23 @@ class OptimizerHooks: local_counter = self.sample_counter # 2: Put sample in the job pool - matrix_to_pool = pickle.dumps(Y) - self.optim_algo.pool.pushInSample(local_counter, matrix_to_pool) + computation = {} + computation["method"] = "Adjoint" + computation["data"] = (X, Y) + computation = pickle.dumps(computation) + self.optim_algo.pool.pushInSample(local_counter, computation) # 3: Wait while 1: + print "waiting" self.optim_algo.signalMasterAndWait() + print "signal" if self.optim_algo.isTerminationRequested(): self.optim_algo.pool.destroyAll() return else: # Get current Id - sample_id = self.pool.getCurrentId() + sample_id = self.optim_algo.pool.getCurrentId() if sample_id == local_counter: # 4: Data is ready matrix_from_pool = self.optim_algo.pool.getOutSample(local_counter).getStringValue() @@ -126,9 +139,9 @@ class OptimizerHooks: return Z else: print "sync false is not yet implemented" - raise ValueError("sync == false not yet implemented") + raise daStudy.daError("sync == false not yet implemented") -class AssimilationAlgorithm_asynch_3DVAR(SALOMERuntime.OptimizerAlgASync): +class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync): def __init__(self): SALOMERuntime.RuntimeSALOME_setRuntime() @@ -146,55 +159,44 @@ class AssimilationAlgorithm_asynch_3DVAR(SALOMERuntime.OptimizerAlgASync): print "Algorithme initialize" # get the daStudy - print "Input is ", input + #print "[Debug] Input is ", input str_da_study = input.getStringValue() - da_study = pickle.loads(str_da_study) - print "da_study is ", da_study - da_study.initAlgorithm() - self.ADD = da_study.getAssimilationStudy() + self.da_study = pickle.loads(str_da_study) + #print "[Debug] da_study is ", self.da_study + self.da_study.initAlgorithm() + self.ADD = self.da_study.getAssimilationStudy() def startToTakeDecision(self): print "Algorithme startToTakeDecision" - #TODO !! - - precision = 1.e-13 - dimension = 3 - - xt = numpy.matrix(numpy.arange(dimension)).T - Eo = numpy.matrix(numpy.zeros((dimension,))).T - Eb = numpy.matrix(numpy.zeros((dimension,))).T - H = numpy.matrix(numpy.core.identity(dimension)) - xb = xt + Eb - yo = FunctionH( xt ) + Eo - xb = xb.A1 - yo = yo.A1 - R = numpy.matrix(numpy.core.identity(dimension)).T - B = numpy.matrix(numpy.core.identity(dimension)).T - - ADD = AssimilationStudy() - ADD.setBackground (asVector = xb ) - ADD.setBackgroundError (asCovariance = B ) - ADD.setObservation (asVector = yo ) - ADD.setObservationError (asCovariance = R ) - ADD.setObservationOperator(asFunction = {"Tangent":FunctionH, - "Adjoint":AdjointH} ) - ADD.setControls() - ADD.setAlgorithm(choice="3DVAR") - ADD.analyze() - - xa = numpy.array(ADD.get("Analysis").valueserie(0)) - d = numpy.array(ADD.get("Innovation").valueserie(0)) - if max(abs(xa - xb)) > precision: - raise ValueError("Résultat du test erroné (1)") - elif max(abs(d)) > precision: - raise ValueError("Résultat du test erroné (2)") - else: - print " Test correct, erreur maximale inférieure à %s"%precision - print - # On a fini ! + # Check if ObservationOperator is already set + if self.da_study.getObservationOperatorType("Direct") == "Function" or self.da_study.getObservationOperatorType("Tangent") == "Function" or self.da_study.getObservationOperatorType("Adjoint") == "Function" : + # Use proxy function for YACS + self.hooks = OptimizerHooks(self) + direct = tangent = adjoint = None + if self.da_study.getObservationOperatorType("Direct") == "Function": + direct = self.hooks.Direct + if self.da_study.getObservationOperatorType("Tangent") == "Function" : + tangent = self.hooks.Tangent + if self.da_study.getObservationOperatorType("Adjoint") == "Function" : + adjoint = self.hooks.Adjoint + + # Set ObservationOperator + self.ADD.setObservationOperator(asFunction = {"Direct":direct, "Tangent":tangent, "Adjoint":adjoint}) + + + # Start Assimilation Study + self.ADD.analyze() + + # Assimilation Study is finished self.pool.destroyAll() + def getAlgoResult(self): + print "getAlgoResult" + self.ADD.prepare_to_pickle() + result = pickle.dumps(self.da_study) + return result + # Obligatoire ??? def finish(self): print "Algorithme finish" @@ -210,4 +212,6 @@ class AssimilationAlgorithm_asynch_3DVAR(SALOMERuntime.OptimizerAlgASync): return self.tout def getTCForAlgoInit(self): return self.tin + def getTCForAlgoResult(self): + return self.tout diff --git a/src/daSalome/daYacsIntegration/daStudy.py b/src/daSalome/daYacsIntegration/daStudy.py index a54a928..68db0b9 100644 --- a/src/daSalome/daYacsIntegration/daStudy.py +++ b/src/daSalome/daYacsIntegration/daStudy.py @@ -15,8 +15,13 @@ class daStudy: self.ADD = AssimilationStudy(name) self.ADD.setControls() self.algorithm = algorithm + self.Background = None - def initAlgorithm(): + # Observation Management + self.ObservationOperatorType = {} + self.FunctionObservationOperator = {} + + def initAlgorithm(self): self.ADD.setAlgorithm(choice=self.algorithm) def getAssimilationStudy(self): @@ -39,9 +44,14 @@ class daStudy: except AttributeError: raise daError("[daStudy::setBackground] Type is not defined !") + self.Background = Background + if self.BackgroundType == "Vector": self.ADD.setBackground(asVector = Background) + def getBackground(self): + return self.Background + def setBackgroundError(self, BackgroundError): self.ADD.setBackgroundError(asCovariance = BackgroundError) @@ -64,23 +74,33 @@ class daStudy: self.ADD.setObservation(asVector = Observation) def setObservationError(self, ObservationError): - self.ADD.setObservationError(asCovariance = ObservationError) - def setObservationOperatorType(self, Type): + def getObservationOperatorType(self, Name): + rtn = None + try: + rtn = self.ObservationOperatorType[Name] + except: + pass + return rtn + + def setObservationOperatorType(self, Name, Type): if Type == "Matrix": - self.ObservationOperatorType = Type + self.ObservationOperatorType[Name] = Type + elif Type == "Function": + self.ObservationOperatorType[Name] = Type else: raise daError("[daStudy::setObservationOperatorType] Type is unkown : " + Type + " Types are : Matrix") - def setObservationOperator(self, ObservationOperator): - + def setObservationOperator(self, Name, ObservationOperator): try: - self.ObservationOperatorType + self.ObservationOperatorType[Name] except AttributeError: raise daError("[daStudy::setObservationOperator] Type is not defined !") - if self.ObservationOperatorType == "Matrix": + if self.ObservationOperatorType[Name] == "Matrix": self.ADD.setObservationOperator(asMatrix = ObservationOperator) + elif self.ObservationOperatorType[Name] == "Function": + self.FunctionObservationOperator[Name] = ObservationOperator diff --git a/src/daSalome/daYacsSchemaCreator/infos_daComposant.py b/src/daSalome/daYacsSchemaCreator/infos_daComposant.py index 5fead37..beaf528 100644 --- a/src/daSalome/daYacsSchemaCreator/infos_daComposant.py +++ b/src/daSalome/daYacsSchemaCreator/infos_daComposant.py @@ -39,8 +39,8 @@ AssimType["ObservationOperator"] = ["Matrix", "Function"] AssimType["ObservationOperatorAppliedToX"] = ["List"] FromNumpyList = {} -FromNumpyList["Vector"] = ["String"] -FromNumpyList["Matrix"] = ["String"] +FromNumpyList["Vector"] = ["String", "Script"] +FromNumpyList["Matrix"] = ["String", "Script"] FromNumpyList["Function"] = ["Dict"] FromNumpyList["List"] = ["List"] @@ -50,5 +50,9 @@ AssimAlgos = ["Blue", "EnsembleBlue", "Kalman", "LinearLeastSquares", "3DVAR"] AlgoDataRequirements = {} AlgoDataRequirements["Blue"] = ["Background", "BackgroundError", "Observation", "ObservationOperator", "ObservationError"] + +AlgoDataRequirements["3DVAR"] = ["Background", "BackgroundError", + "Observation", "ObservationOperator", "ObservationError"] AlgoType = {} AlgoType["Blue"] = "Direct" +AlgoType["3DVAR"] = "Optim" diff --git a/src/daSalome/daYacsSchemaCreator/methods.py b/src/daSalome/daYacsSchemaCreator/methods.py index cacbb07..17e4ea1 100644 --- a/src/daSalome/daYacsSchemaCreator/methods.py +++ b/src/daSalome/daYacsSchemaCreator/methods.py @@ -78,6 +78,19 @@ def create_yacs_proc(study_config): proc.edAddDFLink(back_node.getOutputPort("vector"), CAS_node.getInputPort(key)) proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type)) + if data_config["Type"] == "Vector" and data_config["From"] == "Script": + # Create node + factory_back_node = catalogAd._nodeMap["CreateNumpyVectorFromScript"] + back_node = factory_back_node.cloneNode("Get" + key) + back_node.getInputPort("script").edInitPy(data_config["Data"]) + back_node.edAddOutputPort(key, t_pyobj) + proc.edAddChild(back_node) + # Connect node with CreateAssimilationStudy + CAS_node.edAddInputPort(key, t_pyobj) + CAS_node.edAddInputPort(key_type, t_string) + proc.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key)) + proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type)) + if data_config["Type"] == "Matrix" and data_config["From"] == "String": # Create node factory_back_node = catalogAd._nodeMap["CreateNumpyMatrixFromString"] @@ -90,6 +103,25 @@ def create_yacs_proc(study_config): proc.edAddDFLink(back_node.getOutputPort("matrix"), CAS_node.getInputPort(key)) proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type)) + if data_config["Type"] == "Matrix" and data_config["From"] == "Script": + # Create node + factory_back_node = catalogAd._nodeMap["CreateNumpyMatrixFromScript"] + back_node = factory_back_node.cloneNode("Get" + key) + back_node.getInputPort("script").edInitPy(data_config["Data"]) + back_node.edAddOutputPort(key, t_pyobj) + proc.edAddChild(back_node) + # Connect node with CreateAssimilationStudy + CAS_node.edAddInputPort(key, t_pyobj) + CAS_node.edAddInputPort(key_type, t_string) + proc.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key)) + proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type)) + + if data_config["Type"] == "Function" and data_config["From"] == "Dict" and key == "ObservationOperator": + FunctionDict = data_config["Data"] + for FunctionName in FunctionDict["Function"]: + port_name = "ObservationOperator" + FunctionName + CAS_node.edAddInputPort(port_name, t_string) + CAS_node.getInputPort(port_name).edInitPy(FunctionDict["Script"][FunctionName]) # Step 3: create compute bloc compute_bloc = runtime.createBloc("compute_bloc") @@ -103,6 +135,54 @@ def create_yacs_proc(study_config): compute_bloc.edAddChild(execute_node) proc.edAddDFLink(CAS_node.getOutputPort("Study"), execute_node.getInputPort("Study")) + if AlgoType[study_config["Algorithm"]] == "Optim": + # We use an optimizer loop + name = "Execute" + study_config["Algorithm"] + algLib = "daYacsIntegration.py" + factoryName = "AssimilationAlgorithm_asynch" + optimizer_node = runtime.createOptimizerLoop(name, algLib, factoryName, "") + compute_bloc.edAddChild(optimizer_node) + proc.edAddDFLink(CAS_node.getOutputPort("Study"), optimizer_node.edGetAlgoInitPort()) + + # Check if we have a python script for OptimizerLoopNode + data_config = study_config["ObservationOperator"] + if data_config["Type"] == "Function" and data_config["From"] == "Dict": + # Get script + FunctionDict = data_config["Data"] + script_filename = "" + for FunctionName in FunctionDict["Function"]: + # We currently support only one file + script_filename = FunctionDict["Script"][FunctionName] + break + + # We create a new pyscript node + opt_script_node = runtime.createScriptNode("", "FunctionNode") + if not os.path.exists(script_filename): + logging.fatal("Function script source file does not exists ! :" + script_filename) + sys.exit(1) + try: + script_str= open(script_filename, 'r') + except: + logging.fatal("Exception in opening function script file : " + script_filename) + traceback.print_exc() + sys.exit(1) + opt_script_node.setScript(script_str.read()) + opt_script_node.edAddInputPort("computation", t_pyobj) + opt_script_node.edAddOutputPort("result", t_pyobj) + + # Add it + computation_bloc = runtime.createBloc("computation_bloc") + optimizer_node.edSetNode(computation_bloc) + computation_bloc.edAddChild(opt_script_node) + + # We connect Optimizer with the script + proc.edAddDFLink(optimizer_node.edGetSamplePort(), opt_script_node.getInputPort("computation")) + proc.edAddDFLink(opt_script_node.getOutputPort("result"), optimizer_node.edGetPortForOutPool()) + + else: + logging.fatal("Fake optim script node currently not implemented") + sys.exit(1) + # Step 4: create post-processing from user configuration if "Analysis" in study_config.keys(): analysis_config = study_config["Analysis"] @@ -114,7 +194,10 @@ def create_yacs_proc(study_config): analysis_node.setScript(final_script) proc.edAddChild(analysis_node) proc.edAddCFLink(compute_bloc, analysis_node) - proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study")) + if AlgoType[study_config["Algorithm"]] == "Optim": + proc.edAddDFLink(optimizer_node.edGetAlgoResultPort(), analysis_node.getInputPort("Study")) + else: + proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study")) elif analysis_config["From"] == "File": factory_analysis_node = catalogAd._nodeMap["SimpleUserAnalysis"] @@ -126,7 +209,7 @@ def create_yacs_proc(study_config): try: analysis_file = open(analysis_config["Data"], 'r') except: - logging.fatal("Exception in openng analysis file : " + str(analysis_config["Data"])) + logging.fatal("Exception in opening analysis file : " + str(analysis_config["Data"])) traceback.print_exc() sys.exit(1) file_text = analysis_file.read() @@ -134,7 +217,10 @@ def create_yacs_proc(study_config): analysis_node.setScript(final_script) proc.edAddChild(analysis_node) proc.edAddCFLink(compute_bloc, analysis_node) - proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study")) + if AlgoType[study_config["Algorithm"]] == "Optim": + proc.edAddDFLink(optimizer_node.edGetAlgoResultPort(), analysis_node.getInputPort("Study")) + else: + proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study")) pass diff --git a/src/tests/daComposant/Plateforme/test017_3DVAR_par_fonction.py b/src/tests/daComposant/Plateforme/test017_3DVAR_par_fonction.py index 64677d8..2ca1d09 100644 --- a/src/tests/daComposant/Plateforme/test017_3DVAR_par_fonction.py +++ b/src/tests/daComposant/Plateforme/test017_3DVAR_par_fonction.py @@ -51,6 +51,12 @@ def test(precision = 1.e-10, dimension = 3): # ------------------------------------------------ R = numpy.matrix(numpy.core.identity(dimension)).T B = numpy.matrix(numpy.core.identity(dimension)).T + + print "xb", xb + print "B", B + print "yo", yo + print "R", R + # # Analyse # ------- diff --git a/src/tests/daSalome/Makefile.am b/src/tests/daSalome/Makefile.am index c1f4d30..d76a9db 100644 --- a/src/tests/daSalome/Makefile.am +++ b/src/tests/daSalome/Makefile.am @@ -1,9 +1,11 @@ include $(top_srcdir)/adm_local/make_common_starter.am DATA_INST = \ - test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py test000_Blue.py + test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py test000_Blue.py \ + test017_3DVAR_par_fonction.py test017_3DVAR_function_script.py test017_3DVAR_init_data.py testsdasalome_DATA = ${DATA_INST} -EXTRA_DIST = test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py.in test000_Blue.py +EXTRA_DIST = test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py.in test000_Blue.py \ + test017_3DVAR_par_fonction.py.in test017_3DVAR_function_script.py test017_3DVAR_init_data.py diff --git a/src/tests/daSalome/test017_3DVAR_function_script.py b/src/tests/daSalome/test017_3DVAR_function_script.py new file mode 100644 index 0000000..6272b5d --- /dev/null +++ b/src/tests/daSalome/test017_3DVAR_function_script.py @@ -0,0 +1,25 @@ +import numpy +import pickle + +print computation["method"] + +dimension = 300 + +H = numpy.matrix(numpy.core.identity(dimension)) + +def FunctionH( X ): + return H * X + +def AdjointH( (X, Y) ): + return H.T * Y + +if computation["method"] == "Direct": + result = FunctionH(computation["data"]) + +if computation["method"] == "Tangent": + result = FunctionH(computation["data"]) + +if computation["method"] == "Adjoint": + result = AdjointH(computation["data"]) + +print "Computation end" diff --git a/src/tests/daSalome/test017_3DVAR_init_data.py b/src/tests/daSalome/test017_3DVAR_init_data.py new file mode 100644 index 0000000..c8ccc24 --- /dev/null +++ b/src/tests/daSalome/test017_3DVAR_init_data.py @@ -0,0 +1,24 @@ +import numpy + +numpy.random.seed(1000) +dimension = 300 + +xt = numpy.matrix(numpy.arange(dimension)).T +Eo = numpy.matrix(numpy.zeros((dimension,))).T +Eb = numpy.matrix(numpy.random.normal(0.,1.,size=(dimension,))).T +H = numpy.matrix(numpy.core.identity(dimension)) +B = numpy.matrix(numpy.core.identity(dimension)).T +R = numpy.matrix(numpy.core.identity(dimension)).T + +def FunctionH( X ): + return H * X + +xb = xt + Eb +xb = xb.A1 +yo = FunctionH( xt ) + Eo +yo = yo.A1 + +Background = xb +BackgroundError = B +Observation = yo +ObservationError = R diff --git a/src/tests/daSalome/test017_3DVAR_par_fonction.py.in b/src/tests/daSalome/test017_3DVAR_par_fonction.py.in new file mode 100644 index 0000000..253af8a --- /dev/null +++ b/src/tests/daSalome/test017_3DVAR_par_fonction.py.in @@ -0,0 +1,62 @@ +#-*-coding:iso-8859-1-*- +study_config = {} +study_config["Name"] = "test017_3DVAR" +study_config["Algorithm"] = "3DVAR" + +Background_config = {} +Background_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py" +Background_config["Type"] = "Vector" +Background_config["From"] = "Script" +study_config["Background"] = Background_config + +BackgroundError_config = {} +BackgroundError_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py" +BackgroundError_config["Type"] = "Matrix" +BackgroundError_config["From"] = "Script" +study_config["BackgroundError"] = BackgroundError_config + +Observation_config = {} +Observation_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py" +Observation_config["Type"] = "Vector" +Observation_config["From"] = "Script" +study_config["Observation"] = Observation_config + +ObservationError_config = {} +ObservationError_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py" +ObservationError_config["Type"] = "Matrix" +ObservationError_config["From"] = "Script" +study_config["ObservationError"] = ObservationError_config + +FunctionDict = {} +FunctionDict["Function"] = ["Direct", "Tangent", "Adjoint"] +FunctionDict["Script"] = {} +FunctionDict["Script"]["Direct"] = "@prefix@/tests/daSalome/test017_3DVAR_function_script.py" +FunctionDict["Script"]["Tangent"] = "@prefix@/tests/daSalome/test017_3DVAR_function_script.py" +FunctionDict["Script"]["Adjoint"] = "@prefix@/tests/daSalome/test017_3DVAR_function_script.py" +ObservationOperator_config = {} +ObservationOperator_config["Data"] = FunctionDict +ObservationOperator_config["Type"] = "Function" +ObservationOperator_config["From"] = "Dict" +study_config["ObservationOperator"] = ObservationOperator_config + +Analysis_config = {} +Analysis_config["Data"] = """ +import numpy + +dimension = 300 +precision = 1.e-10 +xt = numpy.matrix(numpy.arange(dimension)).T +xb = Study.getBackground() + +xa = numpy.array(ADD.get("Analysis").valueserie(0)) +d = numpy.array(ADD.get("Innovation").valueserie(0)) +# +# Verification du resultat +# ------------------------ +if max(abs(xa - (xb+xt.A1)/2)) > precision: + raise ValueError("Resultat du test errone (1)") +else: + print " Test correct, erreur maximale inferieure à %s"%precision +""" +Analysis_config["From"] = "String" +study_config["Analysis"] = Analysis_config -- 2.39.2