src/tests/Makefile
src/tests/daSalome/Makefile
src/tests/daSalome/test000_Blue_AnalysisFile.py
+ src/tests/daSalome/test017_3DVAR_par_fonction.py
src/tests/daComposant/Makefile
src/tests/daComposant/Plateforme/Makefile
src/tests/daComposant/Plateforme/context.py
except NameError:
pass
else:
- print "Background is", Background
- print "BackgroundType is", BackgroundType
+ #print "Background is", Background
+ #print "BackgroundType is", BackgroundType
assim_study.setBackgroundType(BackgroundType)
assim_study.setBackground(Background)
except NameError:
pass
else:
- print "BackgroundError is", BackgroundError
- print "BackgroundErrorType is", BackgroundErrorType
+ #print "BackgroundError is", BackgroundError
+ #print "BackgroundErrorType is", BackgroundErrorType
assim_study.setBackgroundError(BackgroundError)
# Observation
except NameError:
pass
else:
- print "Observation is", Observation
- print "ObservationType is", ObservationType
+ #print "Observation is", Observation
+ #print "ObservationType is", ObservationType
assim_study.setObservationType(ObservationType)
assim_study.setObservation(Observation)
except NameError:
pass
else:
- print "ObservationError is", ObservationError
- print "ObservationErrorType is", ObservationErrorType
+ #print "ObservationError is", ObservationError
+ #print "ObservationErrorType is", ObservationErrorType
assim_study.setObservationError(ObservationError)
# ObservationOperator
+ObservationOperatorOk = 0
try:
ObservationOperator
except NameError:
pass
else:
- print "ObservationOperator is", ObservationOperator
- print "ObservationOperatorType is", ObservationOperatorType
+ #print "ObservationOperator is", ObservationOperator
+ #print "ObservationOperatorType is", ObservationOperatorType
assim_study.setObservationOperatorType(ObservationOperatorType)
assim_study.setObservationOperator(ObservationOperator)
+ ObservationOperatorOk = 1
+
+if ObservationOperatorOk == 0:
+ try:
+ ObservationOperatorDirect
+ except NameError:
+ pass
+ else:
+ #print "ObservationOperatorDirect is", ObservationOperatorDirect
+ assim_study.setObservationOperatorType("Direct", "Function")
+ assim_study.setObservationOperator("Direct", ObservationOperatorDirect)
+ try:
+ ObservationOperatorTangent
+ except NameError:
+ pass
+ else:
+ #print "ObservationOperatorTangent is", ObservationOperatorTangent
+ assim_study.setObservationOperatorType("Tangent", "Function")
+ assim_study.setObservationOperator("Tangent", ObservationOperatorTangent)
+ try:
+ ObservationOperatorAdjoint
+ except NameError:
+ pass
+ else:
+ #print "ObservationOperatorAdjoint is", ObservationOperatorAdjoint
+ assim_study.setObservationOperatorType("Adjoint", "Function")
+ assim_study.setObservationOperator("Adjoint", ObservationOperatorAdjoint)
+
Study = assim_study
]]>
<outport name="type" type="string"/>
</inline>
+ <inline name="CreateNumpyMatrixFromScript">
+ <script><code><![CDATA[
+print "Entering in CreateNumpyMatrixFromScript"
+execfile(script)
+type = "Matrix"
+]]></code></script>
+ <inport name="script" type="string"/>
+ <outport name="type" type="string"/>
+ </inline>
+
<inline name="CreateNumpyVectorFromString">
<script><code><![CDATA[
print "Entering in CreateNumpyVectorFromString"
<outport name="type" type="string"/>
</inline>
+ <inline name="CreateNumpyVectorFromScript">
+ <script><code><![CDATA[
+print "Entering in CreateNumpyVectorFromScript"
+execfile(script)
+type = "Vector"
+]]></code></script>
+ <inport name="script" type="string"/>
+ <outport name="type" type="string"/>
+ </inline>
+
<inline name="SimpleExecuteDirectAlgorithm">
<script><code><![CDATA[
print "Entering in SimpleExecuteDirectAlgorithm"
]]></code></script>
<inport name="Study" type="pyobj"/>
</inline>
+
+ <inline name="FakeOptimizerLoopNode">
+ <script><code><![CDATA[
+print "Entering in FakeOptimizerLoopNode"
+result = None
+]]></code></script>
+ <inport name="computation" type="pyobj"/>
+ <outport name="result" type="pyobj"/>
+ </inline>
+
</proc>
import sys ; sys.path.insert(0, "../daCore")
import logging
-import Persistence
-from BasicObjects import Algorithm
-import PlatformInfo ; m = PlatformInfo.SystemUsage()
+from daCore import Persistence
+from daCore.BasicObjects import Algorithm
+from daCore import PlatformInfo ; m = PlatformInfo.SystemUsage()
import numpy
import scipy.optimize
sys.path = list(set(sys.path)) # Conserve en unique exemplaire chaque chemin
return 1
+ def prepare_to_pickle(self):
+ self.__algorithmFile = None
+ self.__diagnosticFile = None
+ self.__H = {}
+
# ==============================================================================
if __name__ == "__main__":
print '\n AUTODIAGNOSTIC \n'
+from daOptimizerLoop import *
import threading
from daCore.AssimilationStudy import AssimilationStudy
-import daStudy
+from daYacsIntegration import daStudy
class OptimizerHooks:
local_counter = self.sample_counter
# 2: Put sample in the job pool
- matrix_to_pool = pickle.dumps(X)
- self.optim_algo.pool.pushInSample(local_counter, matrix_to_pool)
+ computation = {}
+ computation["method"] = "Direct"
+ computation["data"] = X
+ computation = pickle.dumps(computation)
+ self.optim_algo.pool.pushInSample(local_counter, computation)
# 3: Wait
while 1:
+ print "waiting"
self.optim_algo.signalMasterAndWait()
+ print "signal"
if self.optim_algo.isTerminationRequested():
self.optim_algo.pool.destroyAll()
return
else:
# Get current Id
- sample_id = self.pool.getCurrentId()
+ sample_id = self.optim_algo.pool.getCurrentId()
if sample_id == local_counter:
# 4: Data is ready
matrix_from_pool = self.optim_algo.pool.getOutSample(local_counter).getStringValue()
return Y
else:
print "sync false is not yet implemented"
- raise ValueError("sync == false not yet implemented")
+ raise daStudy.daError("sync == false not yet implemented")
def Tangent(self, X, sync = 1):
print "Call Tangent OptimizerHooks"
local_counter = self.sample_counter
# 2: Put sample in the job pool
- matrix_to_pool = pickle.dumps(X)
- self.optim_algo.pool.pushInSample(local_counter, matrix_to_pool)
+ computation = {}
+ computation["method"] = "Tangent"
+ computation["data"] = X
+ computation = pickle.dumps(computation)
+ self.optim_algo.pool.pushInSample(local_counter, computation)
# 3: Wait
while 1:
return
else:
# Get current Id
- sample_id = self.pool.getCurrentId()
+ sample_id = self.optim_algo.pool.getCurrentId()
if sample_id == local_counter:
# 4: Data is ready
matrix_from_pool = self.optim_algo.pool.getOutSample(local_counter).getStringValue()
return Y
else:
print "sync false is not yet implemented"
- raise ValueError("sync == false not yet implemented")
+ raise daStudy.daError("sync == false not yet implemented")
def Adjoint(self, (X, Y), sync = 1):
print "Call Adjoint OptimizerHooks"
local_counter = self.sample_counter
# 2: Put sample in the job pool
- matrix_to_pool = pickle.dumps(Y)
- self.optim_algo.pool.pushInSample(local_counter, matrix_to_pool)
+ computation = {}
+ computation["method"] = "Adjoint"
+ computation["data"] = (X, Y)
+ computation = pickle.dumps(computation)
+ self.optim_algo.pool.pushInSample(local_counter, computation)
# 3: Wait
while 1:
+ print "waiting"
self.optim_algo.signalMasterAndWait()
+ print "signal"
if self.optim_algo.isTerminationRequested():
self.optim_algo.pool.destroyAll()
return
else:
# Get current Id
- sample_id = self.pool.getCurrentId()
+ sample_id = self.optim_algo.pool.getCurrentId()
if sample_id == local_counter:
# 4: Data is ready
matrix_from_pool = self.optim_algo.pool.getOutSample(local_counter).getStringValue()
return Z
else:
print "sync false is not yet implemented"
- raise ValueError("sync == false not yet implemented")
+ raise daStudy.daError("sync == false not yet implemented")
-class AssimilationAlgorithm_asynch_3DVAR(SALOMERuntime.OptimizerAlgASync):
+class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync):
def __init__(self):
SALOMERuntime.RuntimeSALOME_setRuntime()
print "Algorithme initialize"
# get the daStudy
- print "Input is ", input
+ #print "[Debug] Input is ", input
str_da_study = input.getStringValue()
- da_study = pickle.loads(str_da_study)
- print "da_study is ", da_study
- da_study.initAlgorithm()
- self.ADD = da_study.getAssimilationStudy()
+ self.da_study = pickle.loads(str_da_study)
+ #print "[Debug] da_study is ", self.da_study
+ self.da_study.initAlgorithm()
+ self.ADD = self.da_study.getAssimilationStudy()
def startToTakeDecision(self):
print "Algorithme startToTakeDecision"
- #TODO !!
-
- precision = 1.e-13
- dimension = 3
-
- xt = numpy.matrix(numpy.arange(dimension)).T
- Eo = numpy.matrix(numpy.zeros((dimension,))).T
- Eb = numpy.matrix(numpy.zeros((dimension,))).T
- H = numpy.matrix(numpy.core.identity(dimension))
- xb = xt + Eb
- yo = FunctionH( xt ) + Eo
- xb = xb.A1
- yo = yo.A1
- R = numpy.matrix(numpy.core.identity(dimension)).T
- B = numpy.matrix(numpy.core.identity(dimension)).T
-
- ADD = AssimilationStudy()
- ADD.setBackground (asVector = xb )
- ADD.setBackgroundError (asCovariance = B )
- ADD.setObservation (asVector = yo )
- ADD.setObservationError (asCovariance = R )
- ADD.setObservationOperator(asFunction = {"Tangent":FunctionH,
- "Adjoint":AdjointH} )
- ADD.setControls()
- ADD.setAlgorithm(choice="3DVAR")
- ADD.analyze()
-
- xa = numpy.array(ADD.get("Analysis").valueserie(0))
- d = numpy.array(ADD.get("Innovation").valueserie(0))
- if max(abs(xa - xb)) > precision:
- raise ValueError("Résultat du test erroné (1)")
- elif max(abs(d)) > precision:
- raise ValueError("Résultat du test erroné (2)")
- else:
- print " Test correct, erreur maximale inférieure à %s"%precision
- print
- # On a fini !
+ # Check if ObservationOperator is already set
+ if self.da_study.getObservationOperatorType("Direct") == "Function" or self.da_study.getObservationOperatorType("Tangent") == "Function" or self.da_study.getObservationOperatorType("Adjoint") == "Function" :
+ # Use proxy function for YACS
+ self.hooks = OptimizerHooks(self)
+ direct = tangent = adjoint = None
+ if self.da_study.getObservationOperatorType("Direct") == "Function":
+ direct = self.hooks.Direct
+ if self.da_study.getObservationOperatorType("Tangent") == "Function" :
+ tangent = self.hooks.Tangent
+ if self.da_study.getObservationOperatorType("Adjoint") == "Function" :
+ adjoint = self.hooks.Adjoint
+
+ # Set ObservationOperator
+ self.ADD.setObservationOperator(asFunction = {"Direct":direct, "Tangent":tangent, "Adjoint":adjoint})
+
+
+ # Start Assimilation Study
+ self.ADD.analyze()
+
+ # Assimilation Study is finished
self.pool.destroyAll()
+ def getAlgoResult(self):
+ print "getAlgoResult"
+ self.ADD.prepare_to_pickle()
+ result = pickle.dumps(self.da_study)
+ return result
+
# Obligatoire ???
def finish(self):
print "Algorithme finish"
return self.tout
def getTCForAlgoInit(self):
return self.tin
+ def getTCForAlgoResult(self):
+ return self.tout
self.ADD = AssimilationStudy(name)
self.ADD.setControls()
self.algorithm = algorithm
+ self.Background = None
- def initAlgorithm():
+ # Observation Management
+ self.ObservationOperatorType = {}
+ self.FunctionObservationOperator = {}
+
+ def initAlgorithm(self):
self.ADD.setAlgorithm(choice=self.algorithm)
def getAssimilationStudy(self):
except AttributeError:
raise daError("[daStudy::setBackground] Type is not defined !")
+ self.Background = Background
+
if self.BackgroundType == "Vector":
self.ADD.setBackground(asVector = Background)
+ def getBackground(self):
+ return self.Background
+
def setBackgroundError(self, BackgroundError):
self.ADD.setBackgroundError(asCovariance = BackgroundError)
self.ADD.setObservation(asVector = Observation)
def setObservationError(self, ObservationError):
-
self.ADD.setObservationError(asCovariance = ObservationError)
- def setObservationOperatorType(self, Type):
+ def getObservationOperatorType(self, Name):
+ rtn = None
+ try:
+ rtn = self.ObservationOperatorType[Name]
+ except:
+ pass
+ return rtn
+
+ def setObservationOperatorType(self, Name, Type):
if Type == "Matrix":
- self.ObservationOperatorType = Type
+ self.ObservationOperatorType[Name] = Type
+ elif Type == "Function":
+ self.ObservationOperatorType[Name] = Type
else:
raise daError("[daStudy::setObservationOperatorType] Type is unkown : " + Type + " Types are : Matrix")
- def setObservationOperator(self, ObservationOperator):
-
+ def setObservationOperator(self, Name, ObservationOperator):
try:
- self.ObservationOperatorType
+ self.ObservationOperatorType[Name]
except AttributeError:
raise daError("[daStudy::setObservationOperator] Type is not defined !")
- if self.ObservationOperatorType == "Matrix":
+ if self.ObservationOperatorType[Name] == "Matrix":
self.ADD.setObservationOperator(asMatrix = ObservationOperator)
+ elif self.ObservationOperatorType[Name] == "Function":
+ self.FunctionObservationOperator[Name] = ObservationOperator
AssimType["ObservationOperatorAppliedToX"] = ["List"]
FromNumpyList = {}
-FromNumpyList["Vector"] = ["String"]
-FromNumpyList["Matrix"] = ["String"]
+FromNumpyList["Vector"] = ["String", "Script"]
+FromNumpyList["Matrix"] = ["String", "Script"]
FromNumpyList["Function"] = ["Dict"]
FromNumpyList["List"] = ["List"]
AlgoDataRequirements = {}
AlgoDataRequirements["Blue"] = ["Background", "BackgroundError",
"Observation", "ObservationOperator", "ObservationError"]
+
+AlgoDataRequirements["3DVAR"] = ["Background", "BackgroundError",
+ "Observation", "ObservationOperator", "ObservationError"]
AlgoType = {}
AlgoType["Blue"] = "Direct"
+AlgoType["3DVAR"] = "Optim"
proc.edAddDFLink(back_node.getOutputPort("vector"), CAS_node.getInputPort(key))
proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+ if data_config["Type"] == "Vector" and data_config["From"] == "Script":
+ # Create node
+ factory_back_node = catalogAd._nodeMap["CreateNumpyVectorFromScript"]
+ back_node = factory_back_node.cloneNode("Get" + key)
+ back_node.getInputPort("script").edInitPy(data_config["Data"])
+ back_node.edAddOutputPort(key, t_pyobj)
+ proc.edAddChild(back_node)
+ # Connect node with CreateAssimilationStudy
+ CAS_node.edAddInputPort(key, t_pyobj)
+ CAS_node.edAddInputPort(key_type, t_string)
+ proc.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key))
+ proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+
if data_config["Type"] == "Matrix" and data_config["From"] == "String":
# Create node
factory_back_node = catalogAd._nodeMap["CreateNumpyMatrixFromString"]
proc.edAddDFLink(back_node.getOutputPort("matrix"), CAS_node.getInputPort(key))
proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+ if data_config["Type"] == "Matrix" and data_config["From"] == "Script":
+ # Create node
+ factory_back_node = catalogAd._nodeMap["CreateNumpyMatrixFromScript"]
+ back_node = factory_back_node.cloneNode("Get" + key)
+ back_node.getInputPort("script").edInitPy(data_config["Data"])
+ back_node.edAddOutputPort(key, t_pyobj)
+ proc.edAddChild(back_node)
+ # Connect node with CreateAssimilationStudy
+ CAS_node.edAddInputPort(key, t_pyobj)
+ CAS_node.edAddInputPort(key_type, t_string)
+ proc.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key))
+ proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+
+ if data_config["Type"] == "Function" and data_config["From"] == "Dict" and key == "ObservationOperator":
+ FunctionDict = data_config["Data"]
+ for FunctionName in FunctionDict["Function"]:
+ port_name = "ObservationOperator" + FunctionName
+ CAS_node.edAddInputPort(port_name, t_string)
+ CAS_node.getInputPort(port_name).edInitPy(FunctionDict["Script"][FunctionName])
# Step 3: create compute bloc
compute_bloc = runtime.createBloc("compute_bloc")
compute_bloc.edAddChild(execute_node)
proc.edAddDFLink(CAS_node.getOutputPort("Study"), execute_node.getInputPort("Study"))
+ if AlgoType[study_config["Algorithm"]] == "Optim":
+ # We use an optimizer loop
+ name = "Execute" + study_config["Algorithm"]
+ algLib = "daYacsIntegration.py"
+ factoryName = "AssimilationAlgorithm_asynch"
+ optimizer_node = runtime.createOptimizerLoop(name, algLib, factoryName, "")
+ compute_bloc.edAddChild(optimizer_node)
+ proc.edAddDFLink(CAS_node.getOutputPort("Study"), optimizer_node.edGetAlgoInitPort())
+
+ # Check if we have a python script for OptimizerLoopNode
+ data_config = study_config["ObservationOperator"]
+ if data_config["Type"] == "Function" and data_config["From"] == "Dict":
+ # Get script
+ FunctionDict = data_config["Data"]
+ script_filename = ""
+ for FunctionName in FunctionDict["Function"]:
+ # We currently support only one file
+ script_filename = FunctionDict["Script"][FunctionName]
+ break
+
+ # We create a new pyscript node
+ opt_script_node = runtime.createScriptNode("", "FunctionNode")
+ if not os.path.exists(script_filename):
+ logging.fatal("Function script source file does not exists ! :" + script_filename)
+ sys.exit(1)
+ try:
+ script_str= open(script_filename, 'r')
+ except:
+ logging.fatal("Exception in opening function script file : " + script_filename)
+ traceback.print_exc()
+ sys.exit(1)
+ opt_script_node.setScript(script_str.read())
+ opt_script_node.edAddInputPort("computation", t_pyobj)
+ opt_script_node.edAddOutputPort("result", t_pyobj)
+
+ # Add it
+ computation_bloc = runtime.createBloc("computation_bloc")
+ optimizer_node.edSetNode(computation_bloc)
+ computation_bloc.edAddChild(opt_script_node)
+
+ # We connect Optimizer with the script
+ proc.edAddDFLink(optimizer_node.edGetSamplePort(), opt_script_node.getInputPort("computation"))
+ proc.edAddDFLink(opt_script_node.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
+
+ else:
+ logging.fatal("Fake optim script node currently not implemented")
+ sys.exit(1)
+
# Step 4: create post-processing from user configuration
if "Analysis" in study_config.keys():
analysis_config = study_config["Analysis"]
analysis_node.setScript(final_script)
proc.edAddChild(analysis_node)
proc.edAddCFLink(compute_bloc, analysis_node)
- proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study"))
+ if AlgoType[study_config["Algorithm"]] == "Optim":
+ proc.edAddDFLink(optimizer_node.edGetAlgoResultPort(), analysis_node.getInputPort("Study"))
+ else:
+ proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study"))
elif analysis_config["From"] == "File":
factory_analysis_node = catalogAd._nodeMap["SimpleUserAnalysis"]
try:
analysis_file = open(analysis_config["Data"], 'r')
except:
- logging.fatal("Exception in openng analysis file : " + str(analysis_config["Data"]))
+ logging.fatal("Exception in opening analysis file : " + str(analysis_config["Data"]))
traceback.print_exc()
sys.exit(1)
file_text = analysis_file.read()
analysis_node.setScript(final_script)
proc.edAddChild(analysis_node)
proc.edAddCFLink(compute_bloc, analysis_node)
- proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study"))
+ if AlgoType[study_config["Algorithm"]] == "Optim":
+ proc.edAddDFLink(optimizer_node.edGetAlgoResultPort(), analysis_node.getInputPort("Study"))
+ else:
+ proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study"))
pass
# ------------------------------------------------
R = numpy.matrix(numpy.core.identity(dimension)).T
B = numpy.matrix(numpy.core.identity(dimension)).T
+
+ print "xb", xb
+ print "B", B
+ print "yo", yo
+ print "R", R
+
#
# Analyse
# -------
include $(top_srcdir)/adm_local/make_common_starter.am
DATA_INST = \
- test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py test000_Blue.py
+ test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py test000_Blue.py \
+ test017_3DVAR_par_fonction.py test017_3DVAR_function_script.py test017_3DVAR_init_data.py
testsdasalome_DATA = ${DATA_INST}
-EXTRA_DIST = test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py.in test000_Blue.py
+EXTRA_DIST = test000_Blue_AnalysisCode.py test000_Blue_AnalysisFile.py.in test000_Blue.py \
+ test017_3DVAR_par_fonction.py.in test017_3DVAR_function_script.py test017_3DVAR_init_data.py
--- /dev/null
+import numpy
+import pickle
+
+print computation["method"]
+
+dimension = 300
+
+H = numpy.matrix(numpy.core.identity(dimension))
+
+def FunctionH( X ):
+ return H * X
+
+def AdjointH( (X, Y) ):
+ return H.T * Y
+
+if computation["method"] == "Direct":
+ result = FunctionH(computation["data"])
+
+if computation["method"] == "Tangent":
+ result = FunctionH(computation["data"])
+
+if computation["method"] == "Adjoint":
+ result = AdjointH(computation["data"])
+
+print "Computation end"
--- /dev/null
+import numpy
+
+numpy.random.seed(1000)
+dimension = 300
+
+xt = numpy.matrix(numpy.arange(dimension)).T
+Eo = numpy.matrix(numpy.zeros((dimension,))).T
+Eb = numpy.matrix(numpy.random.normal(0.,1.,size=(dimension,))).T
+H = numpy.matrix(numpy.core.identity(dimension))
+B = numpy.matrix(numpy.core.identity(dimension)).T
+R = numpy.matrix(numpy.core.identity(dimension)).T
+
+def FunctionH( X ):
+ return H * X
+
+xb = xt + Eb
+xb = xb.A1
+yo = FunctionH( xt ) + Eo
+yo = yo.A1
+
+Background = xb
+BackgroundError = B
+Observation = yo
+ObservationError = R
--- /dev/null
+#-*-coding:iso-8859-1-*-
+study_config = {}
+study_config["Name"] = "test017_3DVAR"
+study_config["Algorithm"] = "3DVAR"
+
+Background_config = {}
+Background_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py"
+Background_config["Type"] = "Vector"
+Background_config["From"] = "Script"
+study_config["Background"] = Background_config
+
+BackgroundError_config = {}
+BackgroundError_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py"
+BackgroundError_config["Type"] = "Matrix"
+BackgroundError_config["From"] = "Script"
+study_config["BackgroundError"] = BackgroundError_config
+
+Observation_config = {}
+Observation_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py"
+Observation_config["Type"] = "Vector"
+Observation_config["From"] = "Script"
+study_config["Observation"] = Observation_config
+
+ObservationError_config = {}
+ObservationError_config["Data"] = "@prefix@/tests/daSalome/test017_3DVAR_init_data.py"
+ObservationError_config["Type"] = "Matrix"
+ObservationError_config["From"] = "Script"
+study_config["ObservationError"] = ObservationError_config
+
+FunctionDict = {}
+FunctionDict["Function"] = ["Direct", "Tangent", "Adjoint"]
+FunctionDict["Script"] = {}
+FunctionDict["Script"]["Direct"] = "@prefix@/tests/daSalome/test017_3DVAR_function_script.py"
+FunctionDict["Script"]["Tangent"] = "@prefix@/tests/daSalome/test017_3DVAR_function_script.py"
+FunctionDict["Script"]["Adjoint"] = "@prefix@/tests/daSalome/test017_3DVAR_function_script.py"
+ObservationOperator_config = {}
+ObservationOperator_config["Data"] = FunctionDict
+ObservationOperator_config["Type"] = "Function"
+ObservationOperator_config["From"] = "Dict"
+study_config["ObservationOperator"] = ObservationOperator_config
+
+Analysis_config = {}
+Analysis_config["Data"] = """
+import numpy
+
+dimension = 300
+precision = 1.e-10
+xt = numpy.matrix(numpy.arange(dimension)).T
+xb = Study.getBackground()
+
+xa = numpy.array(ADD.get("Analysis").valueserie(0))
+d = numpy.array(ADD.get("Innovation").valueserie(0))
+#
+# Verification du resultat
+# ------------------------
+if max(abs(xa - (xb+xt.A1)/2)) > precision:
+ raise ValueError("Resultat du test errone (1)")
+else:
+ print " Test correct, erreur maximale inferieure à %s"%precision
+"""
+Analysis_config["From"] = "String"
+study_config["Analysis"] = Analysis_config