assim_study.setObservationErrorStored(ObservationErrorStored)
assim_study.setObservationError(ObservationError)
+# EvolutionError
+try:
+ EvolutionError
+except NameError:
+ pass
+else:
+ logging.debug("CREATE EvolutionError is %s"%EvolutionError)
+ logging.debug("CREATE EvolutionErrorStored is %s"%EvolutionErrorStored)
+ assim_study.setEvolutionErrorStored(EvolutionErrorStored)
+ assim_study.setEvolutionError(EvolutionError)
+
# ObservationOperator
ObservationOperatorOk = 0
try:
assim_study.setObservationOperatorType("Adjoint", "Function")
assim_study.setObservationOperator("Adjoint", ObservationOperatorAdjoint)
+# EvolutionModel
+EvolutionModelOk = 0
+try:
+ EvolutionModel
+except NameError:
+ pass
+else:
+ logging.debug("CREATE EvolutionModel is %s"%EvolutionModel)
+ logging.debug("CREATE EvolutionModelType is %s"%EvolutionModelType)
+ assim_study.setEvolutionModelType("Matrix", EvolutionModelType)
+ assim_study.setEvolutionModel("Matrix", EvolutionModel)
+ EvolutionModelOk = 1
+
+if EvolutionModelOk == 0:
+ try:
+ EvolutionModelDirect
+ except NameError:
+ pass
+ else:
+ logging.debug("CREATE EvolutionModelDirect is %s"%EvolutionModelDirect)
+ assim_study.setEvolutionModelType("Direct", "Function")
+ assim_study.setEvolutionModel("Direct", EvolutionModelDirect)
+ try:
+ EvolutionModelTangent
+ except NameError:
+ pass
+ else:
+ logging.debug("CREATE EvolutionModelTangent is %s"%EvolutionModelTangent)
+ assim_study.setEvolutionModelType("Tangent", "Function")
+ assim_study.setEvolutionModel("Tangent", EvolutionModelTangent)
+ try:
+ EvolutionModelAdjoint
+ except NameError:
+ pass
+ else:
+ logging.debug("CREATE EvolutionModelAdjoint is %s"%EvolutionModelAdjoint)
+ assim_study.setEvolutionModelType("Adjoint", "Function")
+ assim_study.setEvolutionModel("Adjoint", EvolutionModelAdjoint)
+
# Variables
for name, size in zip(InputVariablesNames, InputVariablesSizes):
assim_study.setInputVariable(name, size)
filepath = os.path.dirname(script)
filename = os.path.basename(script)
module_name = os.path.splitext(filename)[0]
-sys.path.insert(0,filepath)
+if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath,0,1)>0):
+ sys.path.insert(0,filepath)
# Import script
__import__(module_name)
__import__(module_name)
user_script_module = sys.modules[module_name]
+# Get Data from script
+]]></code></script>
+ <inport name="script" type="string"/>
+ <outport name="type" type="string"/>
+ <outport name="stored" type="bool"/>
+ </inline>
+
+ <inline name="CreateNumpyVectorSerieFromString">
+ <script><code><![CDATA[
+import numpy, logging
+logging.debug("CREATE Entering in CreateNumpyVectorSerieFromString")
+vector = numpy.matrix(vector_in_string)
+type = "VectorSerie"
+logging.debug("VectorSerie is %s"%vector)
+]]></code></script>
+ <inport name="vector_in_string" type="string"/>
+ <outport name="vector" type="pyobj"/>
+ <outport name="type" type="string"/>
+ <outport name="stored" type="bool"/>
+ </inline>
+
+ <inline name="CreateNumpyVectorSerieFromScript">
+ <script><code><![CDATA[
+import logging
+logging.debug("CREATE Entering in CreateNumpyVectorSerieFromScript")
+type = "VectorSerie"
+
+# Get file path and filename
+import sys
+import os
+filepath = os.path.dirname(script)
+filename = os.path.basename(script)
+module_name = os.path.splitext(filename)[0]
+if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath,0,1)>0):
+ sys.path.insert(0,filepath)
+
+# Import script
+__import__(module_name)
+user_script_module = sys.modules[module_name]
+
# Get Data from script
]]></code></script>
<inport name="script" type="string"/>
filepath = os.path.dirname(script)
filename = os.path.basename(script)
module_name = os.path.splitext(filename)[0]
-sys.path.insert(0,filepath)
+if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath,0,1)>0):
+ sys.path.insert(0,filepath)
# Import script
__import__(module_name)
<script><code><![CDATA[
import logging
logging.debug("CREATE Entering in ReadForSwitchNode")
+logging.debug(" with input data : "+str(data["specificParameters"]))
switch_value = -1
for param in data["specificParameters"]:
if param["name"] == "switch_value":
switch_value = int(param["value"])
+logging.debug(" switching to value : "+str(switch_value))
]]></code></script>
<inport name="data" type="SALOME_TYPES/ParametricInput"/>
<outport name="data" type="SALOME_TYPES/ParametricInput"/>
#
# Opérateur d'observation
# -----------------------
- Hm = H["Direct"].asMatrix()
- Ha = H["Adjoint"].asMatrix()
+ Hm = H["Tangent"].asMatrix(None)
+ Ha = H["Adjoint"].asMatrix(None)
#
# Utilisation éventuelle d'un vecteur H(Xb) précalculé
# ----------------------------------------------------
#
# Initialisation des opérateurs d'observation et de la matrice gain
# -----------------------------------------------------------------
- Hm = H["Direct"].asMatrix()
- Ha = H["Adjoint"].asMatrix()
+ Hm = H["Tangent"].asMatrix(None)
+ Ha = H["Adjoint"].asMatrix(None)
#
# Calcul de la matrice de gain dans l'espace le plus petit et de l'analyse
# ------------------------------------------------------------------------
--- /dev/null
+#-*-coding:iso-8859-1-*-
+#
+# Copyright (C) 2008-2012 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+import logging
+from daCore import BasicObjects, PlatformInfo
+m = PlatformInfo.SystemUsage()
+
+# ==============================================================================
+class ElementaryAlgorithm(BasicObjects.Algorithm):
+ def __init__(self):
+ BasicObjects.Algorithm.__init__(self, "KALMANFILTER")
+ self.defineRequiredParameter(
+ name = "CalculateAPosterioriCovariance",
+ default = False,
+ typecast = bool,
+ message = "Calcul de la covariance a posteriori",
+ )
+
+ def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
+ """
+ Calcul de l'estimateur du filtre de Kalman
+
+ Remarque : les observations sont exploitées à partir du pas de temps 1,
+ et sont utilisées dans Yo comme rangées selon ces indices. Donc le pas 0
+ n'est pas utilisé puisque la première étape de Kalman passe de 0 à 1
+ avec l'observation du pas 1.
+ """
+ logging.debug("%s Lancement"%self._name)
+ logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("Mo")))
+ #
+ # Paramètres de pilotage
+ # ----------------------
+ self.setParameters(Parameters)
+ #
+ # Opérateur d'observation
+ # -----------------------
+ Hm = H["Tangent"].asMatrix(None)
+ Ha = H["Adjoint"].asMatrix(None)
+ #
+ if B is None:
+ raise ValueError("Background error covariance matrix has to be properly defined!")
+ if R is None:
+ raise ValueError("Observation error covariance matrix has to be properly defined!")
+ #
+ # Opérateur d'évolution
+ # ---------------------
+ Mm = M["Tangent"].asMatrix(None)
+ Mt = M["Adjoint"].asMatrix(None)
+ #
+ # Nombre de pas du Kalman identique au nombre de pas d'observations
+ # -----------------------------------------------------------------
+ duration = Y.stepnumber()
+ #
+ # Initialisation
+ # --------------
+ Xn = Xb
+ Pn = B
+ self.StoredVariables["Analysis"].store( Xn.A1 )
+ if self._parameters["CalculateAPosterioriCovariance"]:
+ self.StoredVariables["APosterioriCovariance"].store( Pn )
+ #
+ for step in range(duration-1):
+ logging.debug("%s Etape de Kalman %i (i.e. %i->%i) sur un total de %i"%(self._name, step+1, step,step+1, duration-1))
+ #
+ # Etape de prédiction
+ # -------------------
+ Xn_predicted = Mm * Xn
+ Pn_predicted = Mm * Pn * Mt + Q
+ #
+ # Etape de correction
+ # -------------------
+ d = Y.valueserie(step+1) - Hm * Xn_predicted
+ K = Pn_predicted * Ha * (Hm * Pn_predicted * Ha + R).I
+ Xn = Xn_predicted + K * d
+ Pn = Pn_predicted - K * Hm * Pn_predicted
+ #
+ self.StoredVariables["Analysis"].store( Xn.A1 )
+ self.StoredVariables["Innovation"].store( d.A1 )
+ if self._parameters["CalculateAPosterioriCovariance"]:
+ self.StoredVariables["APosterioriCovariance"].store( Pn )
+ #
+ logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("Mo")))
+ logging.debug("%s Terminé"%self._name)
+ #
+ return 0
+
+# ==============================================================================
+if __name__ == "__main__":
+ print '\n AUTODIAGNOSTIC \n'
#
# Opérateur d'observation
# -----------------------
- Hm = H["Direct"].asMatrix()
- Ha = H["Adjoint"].asMatrix()
+ Hm = H["Tangent"].asMatrix(None)
+ Ha = H["Adjoint"].asMatrix(None)
#
if R is not None:
RI = R.I
else:
self.__Y = numpy.matrix( asVector, numpy.float ).T
elif asPersistentVector is not None:
- self.__Y = asPersistentVector
+ if type( asPersistentVector ) is list or type( asPersistentVector ) is tuple:
+ from Persistence import OneVector
+ self.__Y = OneVector("Observation", basetype=numpy.array)
+ for y in asPersistentVector:
+ self.__Y.store( y )
+ else:
+ self.__Y = asPersistentVector
else:
raise ValueError("Error: improperly defined observations")
if toBeStored:
centeredDF = asFunction["withCenteredDF"],
increment = asFunction["withIncrement"],
dX = asFunction["withdX"] )
- self.__H["Direct"] = Operator( fromMethod = FDA.FunctionH )
- self.__H["Tangent"] = Operator( fromMethod = FDA.TangentH )
- self.__H["Adjoint"] = Operator( fromMethod = FDA.AdjointH )
+ self.__M["Direct"] = Operator( fromMethod = FDA.FunctionH )
+ self.__M["Tangent"] = Operator( fromMethod = FDA.TangentH )
+ self.__M["Adjoint"] = Operator( fromMethod = FDA.AdjointH )
elif (type(asFunction) is type({})) and \
asFunction.has_key("Tangent") and asFunction.has_key("Adjoint") and \
(asFunction["Tangent"] is not None) and (asFunction["Adjoint"] is not None):
else:
return self.__Method( (xNominal, xValue) )
- def asMatrix(self, ValueForMethodForm = None):
+ def asMatrix(self, ValueForMethodForm = "UnknownVoidValue"):
"""
Permet de renvoyer l'opérateur sous la forme d'une matrice
"""
if self.__Matrix is not None:
return self.__Matrix
- elif ValueForMethodForm is not None:
+ elif ValueForMethodForm is not "UnknownVoidValue": # Ne pas utiliser "None"
return self.__Method( (ValueForMethodForm, None) )
else:
raise ValueError("Matrix form of the operator defined as a function/method requires to give an operating point.")
#
# Calcul de la valeur linéarisée de H en X appliqué à dX
# ------------------------------------------------------
- dX = numpy.asmatrix(dX).flatten().T
- HtX = numpy.dot(Jacobienne, dX)
+ _dX = numpy.asmatrix(dX).flatten().T
+ HtX = numpy.dot(Jacobienne, _dX)
return HtX.A1
# ---------------------------------------------------------
#
# Calcul de la valeur de l'adjoint en X appliqué à Y
# --------------------------------------------------
- Y = numpy.asmatrix(Y).flatten().T
- HaY = numpy.dot(JacobienneT, Y)
+ _Y = numpy.asmatrix(Y).flatten().T
+ HaY = numpy.dot(JacobienneT, _Y)
return HaY.A1
# ==============================================================================
self.add_data("ObservationError")
if "__"+self.type_of_study+"__CheckingPoint__INPUT_TYPE" in self.dictMCVal.keys():
self.add_data("CheckingPoint")
- self.add_data("ObservationOperator")
+ if "__"+self.type_of_study+"__ObservationOperator__INPUT_TYPE" in self.dictMCVal.keys():
+ self.add_data("ObservationOperator")
+ if "__"+self.type_of_study+"__EvolutionModel__INPUT_TYPE" in self.dictMCVal.keys():
+ self.add_data("EvolutionModel")
+ if "__"+self.type_of_study+"__EvolutionError__INPUT_TYPE" in self.dictMCVal.keys():
+ self.add_data("EvolutionError")
self.add_variables()
# Parametres optionnels
self.add_observer_in_dict(observer, observers)
# Write observers in the python command file
- number = 1
+ number = 2
self.text_da += "observers = {}\n"
for observer in observers.keys():
number += 1
class OptimizerHooks:
- def __init__(self, optim_algo):
+ def __init__(self, optim_algo, switch_value=-1):
self.optim_algo = optim_algo
+ self.switch_value = str(int(switch_value))
def create_sample(self, data, method):
sample = pilot.StructAny_New(self.optim_algo.runtime.getTypeCode('SALOME_TYPES/ParametricInput'))
obs_switch.setEltAtRank("name", "switch_value")
obs_switch.setEltAtRank("value", "1")
specificParameters.pushBack(obs_switch)
+ if self.optim_algo.has_evolution_model:
+ obs_switch = pilot.StructAny_New(self.optim_algo.runtime.getTypeCode('SALOME_TYPES/Parameter'))
+ obs_switch.setEltAtRank("name", "switch_value")
+ obs_switch.setEltAtRank("value", self.switch_value)
+ specificParameters.pushBack(obs_switch)
sample.setEltAtRank("specificParameters", specificParameters)
# Les données
else:
val_end = self.optim_algo.da_study.OutputVariables[self.optim_algo.da_study.OutputVariablesOrder[elt_list]] # nbr de l'argument courant (-1 == tout)
- it = data.flat
+ if data is None:
+ it = []
+ else:
+ it = data.flat
for val in it:
param.pushBack(val)
val_number += 1
if sample_id == local_counter:
# 4: Data is ready
any_data = self.optim_algo.pool.getOutSample(local_counter)
- Y = self.get_data_from_any(any_data)
+ Z = self.get_data_from_any(any_data)
# 5: Release lock
# Have to be done before but need a new implementation
# of the optimizer loop
self.optim_algo.counter_lock.release()
- return Y
+ return Z
else:
#print "sync false is not yet implemented"
self.optim_algo.setError("sync == false not yet implemented")
- def Tangent(self, X, sync = 1):
+ def Tangent(self, (X, dX), sync = 1):
#print "Call Tangent OptimizerHooks"
if sync == 1:
# 1: Get a unique sample number
local_counter = self.optim_algo.sample_counter
# 2: Put sample in the job pool
- sample = self.create_sample(X, "Tangent")
+ sample = self.create_sample((X,dX) , "Tangent")
self.optim_algo.pool.pushInSample(local_counter, sample)
# 3: Wait
if sample_id == local_counter:
# 4: Data is ready
any_data = self.optim_algo.pool.getOutSample(local_counter)
- Y = self.get_data_from_any(any_data)
+ Z = self.get_data_from_any(any_data)
# 5: Release lock
# Have to be done before but need a new implementation
# of the optimizer loop
self.optim_algo.counter_lock.release()
- return Y
+ return Z
else:
#print "sync false is not yet implemented"
self.optim_algo.setError("sync == false not yet implemented")
SALOMERuntime.OptimizerAlgASync.__init__(self, None)
self.runtime = SALOMERuntime.getSALOMERuntime()
+ self.has_evolution_model = False
self.has_observer = False
# Gestion du compteur
self.tout = self.runtime.getTypeCode("SALOME_TYPES/ParametricOutput")
self.pyobject = self.runtime.getTypeCode("pyobj")
+ # Absolument indispensable de définir ainsi "self.optim_hooks"
+ # (sinon on a une "Unknown Exception" sur l'attribut "finish")
self.optim_hooks = OptimizerHooks(self)
# input vient du port algoinit, input est un Any YACS !
if self.da_study.getObservationOperatorType("Direct") == "Function" or self.da_study.getObservationOperatorType("Tangent") == "Function" or self.da_study.getObservationOperatorType("Adjoint") == "Function" :
#print "Set Hooks"
# Use proxy function for YACS
- self.hooks = OptimizerHooks(self)
+ self.hooksOO = OptimizerHooks(self, switch_value=1)
direct = tangent = adjoint = None
if self.da_study.getObservationOperatorType("Direct") == "Function":
- direct = self.hooks.Direct
+ direct = self.hooksOO.Direct
if self.da_study.getObservationOperatorType("Tangent") == "Function" :
- tangent = self.hooks.Tangent
+ tangent = self.hooksOO.Tangent
if self.da_study.getObservationOperatorType("Adjoint") == "Function" :
- adjoint = self.hooks.Adjoint
+ adjoint = self.hooksOO.Adjoint
# Set ObservationOperator
self.ADD.setObservationOperator(asFunction = {"Direct":direct, "Tangent":tangent, "Adjoint":adjoint})
+ # Check if EvolutionModel is already set
+ if self.da_study.getEvolutionModelType("Direct") == "Function" or self.da_study.getEvolutionModelType("Tangent") == "Function" or self.da_study.getEvolutionModelType("Adjoint") == "Function" :
+ self.has_evolution_model = True
+ #print "Set Hooks"
+ # Use proxy function for YACS
+ self.hooksEM = OptimizerHooks(self, switch_value=2)
+ direct = tangent = adjoint = None
+ if self.da_study.getEvolutionModelType("Direct") == "Function":
+ direct = self.hooksEM.Direct
+ if self.da_study.getEvolutionModelType("Tangent") == "Function" :
+ tangent = self.hooksEM.Tangent
+ if self.da_study.getEvolutionModelType("Adjoint") == "Function" :
+ adjoint = self.hooksEM.Adjoint
+
+ # Set EvolutionModel
+ self.ADD.setEvolutionModel(asFunction = {"Direct":direct, "Tangent":tangent, "Adjoint":adjoint})
+
# Set Observers
for observer_name in self.da_study.observers_dict.keys():
# print "observers %s found" % observer_name
# Observation Management
self.ObservationOperatorType = {}
- self.EvolutionModelType = {}
self.FunctionObservationOperator = {}
+ # Evolution Management
+ self.EvolutionModelType = {}
+ self.FunctionEvolutionModel = {}
+
#--------------------------------------
def setInputVariable(self, name, size):
if Type == "Vector":
self.BackgroundType = Type
else:
- raise daError("[daStudy::setBackgroundType] Type is unkown : " + Type + " Types are : Vector")
+ raise daError("[daStudy::setBackgroundType] Type is unkown : " + Type + ". Types are : Vector")
def setBackgroundStored(self, Stored):
if Stored:
if Type == "Vector":
self.CheckingPointType = Type
else:
- raise daError("[daStudy::setCheckingPointType] Type is unkown : " + Type + " Types are : Vector")
+ raise daError("[daStudy::setCheckingPointType] Type is unkown : " + Type + ". Types are : Vector")
def setCheckingPointStored(self, Stored):
if Stored:
#--------------------------------------
def setObservationType(self, Type):
- if Type == "Vector":
+ if Type == "Vector" or Type == "VectorSerie":
self.ObservationType = Type
else:
- raise daError("[daStudy::setObservationType] Type is unkown : " + Type + " Types are : Vector")
+ raise daError("[daStudy::setObservationType] Type is unkown : " + Type + ". Types are : Vector, VectorSerie")
def setObservationStored(self, Stored):
if Stored:
raise daError("[daStudy::setObservation] Type or Storage is not defined !")
if self.ObservationType == "Vector":
self.ADD.setObservation(asVector = Observation, toBeStored = self.ObservationStored)
+ if self.ObservationType == "VectorSerie":
+ self.ADD.setObservation(asPersistentVector = Observation, toBeStored = self.ObservationStored)
#--------------------------------------
elif Type == "Function":
self.ObservationOperatorType[Name] = Type
else:
- raise daError("[daStudy::setObservationOperatorType] Type is unkown : " + Type + " Types are : Matrix, Function")
+ raise daError("[daStudy::setObservationOperatorType] Type is unkown : " + Type + ". Types are : Matrix, Function")
def setObservationOperator(self, Name, ObservationOperator):
try:
#--------------------------------------
+ def setEvolutionErrorStored(self, Stored):
+ if Stored:
+ self.EvolutionErrorStored = True
+ else:
+ self.EvolutionErrorStored = False
+
+ def setEvolutionError(self, EvolutionError):
+ try:
+ self.EvolutionErrorStored
+ except AttributeError:
+ raise daError("[daStudy::setEvolutionError] Storage is not defined !")
+ self.ADD.setEvolutionError(asCovariance = EvolutionError, toBeStored = self.EvolutionErrorStored)
+
+ #--------------------------------------
+
def getEvolutionModelType(self, Name):
rtn = None
try:
elif Type == "Function":
self.EvolutionModelType[Name] = Type
else:
- raise daError("[daStudy::setEvolutionModelType] Type is unkown : " + Type + " Types are : Matrix, Function")
+ raise daError("[daStudy::setEvolutionModelType] Type is unkown : " + Type + ". Types are : Matrix, Function")
def setEvolutionModel(self, Name, EvolutionModel):
try:
sys.exit(1)
else:
if data_config[data_name_type] not in AssimType[data_name]:
- logging.fatal(data_name_type + " defined in the study configuration does not have a correct type : " + str(data_config[data_name_type])
+ logging.fatal(data_name_type + " of " + data_name + " defined in the study configuration does not have a correct type : " + str(data_config[data_name_type])
+ "\n You can have : " + str(AssimType[data_name]))
sys.exit(1)
if data_name_from not in data_config:
sys.exit(1)
else:
if data_config[data_name_from] not in FromNumpyList[data_config[data_name_type]]:
- logging.fatal(data_name_from + " defined in the study configuration does not have a correct value : " + str(data_config[data_name_from])
+ logging.fatal(data_name_from + " of " + data_name + " defined in the study configuration does not have a correct value : " + str(data_config[data_name_from])
+ "\n You can have : " + str(FromNumpyList[data_config[data_name_type]]))
sys.exit(1)
]
AssimType = {}
-AssimType["Background"] = ["Vector"]
-AssimType["BackgroundError"] = ["Matrix"]
-AssimType["Observation"] = ["Vector"]
-AssimType["ObservationError"] = ["Matrix"]
+AssimType["Background"] = ["Vector"]
+AssimType["BackgroundError"] = ["Matrix"]
+AssimType["Observation"] = ["Vector", "VectorSerie"]
+AssimType["ObservationError"] = ["Matrix"]
AssimType["ObservationOperator"] = ["Matrix", "Function"]
-AssimType["EvolutionModel"] = ["Matrix", "Function"]
-AssimType["EvolutionError"] = ["Matrix"]
+AssimType["EvolutionModel"] = ["Matrix", "Function"]
+AssimType["EvolutionError"] = ["Matrix"]
AssimType["AlgorithmParameters"] = ["Dict"]
-AssimType["UserDataInit"] = ["Dict"]
-AssimType["CheckingPoint"] = ["Vector"]
+AssimType["UserDataInit"] = ["Dict"]
+AssimType["CheckingPoint"] = ["Vector"]
FromNumpyList = {}
-FromNumpyList["Vector"] = ["String", "Script"]
-FromNumpyList["Matrix"] = ["String", "Script"]
-FromNumpyList["Function"] = ["FunctionDict"]
-FromNumpyList["Dict"] = ["Script"]
+FromNumpyList["Vector"] = ["String", "Script"]
+FromNumpyList["VectorSerie"] = ["String", "Script"]
+FromNumpyList["Matrix"] = ["String", "Script"]
+FromNumpyList["Function"] = ["FunctionDict"]
+FromNumpyList["Dict"] = ["Script"]
# -- Infos from daAlgorithms --
AssimAlgos = [
"3DVAR",
"Blue",
"EnsembleBlue",
-# "KalmanFilter", # Removed because EvolutionModel must be available in OptLoop
+ "KalmanFilter",
"LinearLeastSquares",
"NonLinearLeastSquares",
"QuantileRegression",
"Observation", "ObservationError",
"ObservationOperator",
]
-# AlgoDataRequirements["KalmanFilter"] = [
-# "Background", "BackgroundError",
-# "Observation", "ObservationError",
-# "EvolutionModel", "EvolutionError",
-# "ObservationOperator",
-# ]
+AlgoDataRequirements["KalmanFilter"] = [
+ "Background", "BackgroundError",
+ "Observation", "ObservationError",
+ "EvolutionModel", "EvolutionError",
+ "ObservationOperator",
+ ]
AlgoDataRequirements["LinearLeastSquares"] = [
"Observation", "ObservationError",
"ObservationOperator",
AlgoType["3DVAR"] = "Optim"
AlgoType["Blue"] = "Optim"
AlgoType["EnsembleBlue"] = "Optim"
-# AlgoType["KalmanFilter"] = "Optim"
+AlgoType["KalmanFilter"] = "Optim"
AlgoType["LinearLeastSquares"] = "Optim"
AlgoType["NonLinearLeastSquares"] = "Optim"
AlgoType["QuantileRegression"] = "Optim"
# Data input dict
DataTypeDict = {}
-DataTypeDict["Vector"] = ["String", "Script"]
-DataTypeDict["Matrix"] = ["String", "Script"]
-DataTypeDict["Function"] = ["FunctionDict"]
-DataTypeDict["Dict"] = ["Script"]
+DataTypeDict["Vector"] = ["String", "Script"]
+DataTypeDict["VectorSerie"] = ["String", "Script"]
+DataTypeDict["Matrix"] = ["String", "Script"]
+DataTypeDict["Function"] = ["FunctionDict"]
+DataTypeDict["Dict"] = ["Script"]
DataTypeDefaultDict = {}
-DataTypeDefaultDict["Vector"] = "Script"
-DataTypeDefaultDict["Matrix"] = "Script"
-DataTypeDefaultDict["Function"] = "FunctionDict"
-DataTypeDefaultDict["Dict"] = "Script"
+DataTypeDefaultDict["Vector"] = "Script"
+DataTypeDefaultDict["VectorSerie"] = "Script"
+DataTypeDefaultDict["Matrix"] = "Script"
+DataTypeDefaultDict["Function"] = "FunctionDict"
+DataTypeDefaultDict["Dict"] = "Script"
# Assimilation data input
AssimDataDict = {}
-AssimDataDict["Background"] = ["Vector"]
-AssimDataDict["BackgroundError"] = ["Matrix"]
-AssimDataDict["Observation"] = ["Vector"]
-AssimDataDict["ObservationError"] = ["Matrix"]
+AssimDataDict["Background"] = ["Vector"]
+AssimDataDict["BackgroundError"] = ["Matrix"]
+AssimDataDict["Observation"] = ["Vector", "VectorSerie"]
+AssimDataDict["ObservationError"] = ["Matrix"]
AssimDataDict["ObservationOperator"] = ["Matrix", "Function"]
-AssimDataDict["EvolutionModel"] = ["Matrix", "Function"]
-AssimDataDict["EvolutionError"] = ["Matrix"]
+AssimDataDict["EvolutionModel"] = ["Matrix", "Function"]
+AssimDataDict["EvolutionError"] = ["Matrix"]
AssimDataDict["AlgorithmParameters"] = ["Dict"]
-AssimDataDict["UserDataInit"] = ["Dict"]
-AssimDataDict["CheckingPoint"] = ["Vector"]
+AssimDataDict["UserDataInit"] = ["Dict"]
+AssimDataDict["CheckingPoint"] = ["Vector"]
AssimDataDefaultDict = {}
AssimDataDefaultDict["Background"] = "Vector"
AssimDataDefaultDict["UserDataInit"] = "Dict"
AssimDataDefaultDict["CheckingPoint"] = "Vector"
-StoredAssimData = ["Vector", "Matrix"]
+StoredAssimData = ["Vector", "VectorSerie", "Matrix"]
# Assimilation optional nodes
OptDict = {}
ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
back_node.setScript(back_node_script)
+ if data_config["Type"] == "VectorSerie" and data_config["From"] == "String":
+ # Create node
+ factory_back_node = catalogAd.getNodeFromNodeMap("CreateNumpyVectorSerieFromString")
+ back_node = factory_back_node.cloneNode("Get" + key)
+ back_node.getInputPort("vector_in_string").edInitPy(data_config["Data"])
+ ADAO_Case.edAddChild(back_node)
+ # Connect node with CreateAssimilationStudy
+ CAS_node.edAddInputPort(key, t_pyobj)
+ CAS_node.edAddInputPort(key_type, t_string)
+ CAS_node.edAddInputPort(key_stored, t_bool)
+ ADAO_Case.edAddDFLink(back_node.getOutputPort("vector"), CAS_node.getInputPort(key))
+ ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+ ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored))
+ back_node_script = back_node.getScript()
+ back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ # Connect node with InitUserData
+ if key in init_config["Target"]:
+ back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script
+ back_node.edAddInputPort("init_data", t_pyobj)
+ ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
+ back_node.setScript(back_node_script)
+
+ if data_config["Type"] == "VectorSerie" and data_config["From"] == "Script":
+ # Create node
+ factory_back_node = catalogAd.getNodeFromNodeMap("CreateNumpyVectorSerieFromScript")
+ back_node = factory_back_node.cloneNode("Get" + key)
+ if repertory:
+ back_node.getInputPort("script").edInitPy(os.path.join(base_repertory, os.path.basename(data_config["Data"])))
+ else:
+ back_node.getInputPort("script").edInitPy(data_config["Data"])
+ back_node.edAddOutputPort(key, t_pyobj)
+ back_node_script = back_node.getScript()
+ back_node_script += key + " = user_script_module." + key + "\n"
+ back_node.setScript(back_node_script)
+ ADAO_Case.edAddChild(back_node)
+ # Connect node with CreateAssimilationStudy
+ CAS_node.edAddInputPort(key, t_pyobj)
+ CAS_node.edAddInputPort(key_type, t_string)
+ CAS_node.edAddInputPort(key_stored, t_bool)
+ ADAO_Case.edAddDFLink(back_node.getOutputPort(key), CAS_node.getInputPort(key))
+ ADAO_Case.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
+ ADAO_Case.edAddDFLink(back_node.getOutputPort("stored"), CAS_node.getInputPort(key_stored))
+ back_node_script = back_node.getScript()
+ back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ # Connect node with InitUserData
+ if key in init_config["Target"]:
+ back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script
+ back_node.edAddInputPort("init_data", t_pyobj)
+ ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
+ back_node.setScript(back_node_script)
+
if data_config["Type"] == "Matrix" and data_config["From"] == "String":
# Create node
factory_back_node = catalogAd.getNodeFromNodeMap("CreateNumpyMatrixFromString")
optimizer_node = runtime.createOptimizerLoop(name, algLib, factoryName, "")
compute_bloc.edAddChild(optimizer_node)
ADAO_Case.edAddDFLink(CAS_node.getOutputPort("Study"), optimizer_node.edGetAlgoInitPort())
+
# Check if we have a python script for OptimizerLoopNode
data_config = study_config["ObservationOperator"]
- opt_script_node = None
+ opt_script_nodeOO = None
if data_config["Type"] == "Function" and data_config["From"] == "FunctionDict":
# Get script
FunctionDict = data_config["Data"]
break
# We create a new pyscript node
- opt_script_node = runtime.createScriptNode("", "FunctionNode")
+ opt_script_nodeOO = runtime.createScriptNode("", "FunctionNodeOO")
if repertory:
script_filename = os.path.join(base_repertory, os.path.basename(script_filename))
try:
node_script += "filepath = \"" + base_repertory + "\"\n"
else:
node_script += "filepath = \"" + os.path.dirname(script_filename) + "\"\n"
- node_script += "if sys.path.count(os.path.dirname(filepath))==0 or (sys.path.count(os.path.dirname(filepath))>0 and sys.path.index(os.path.dirname(filepath),0,1)>0):\n"
- node_script += " sys.path.insert(0,os.path.dirname(filepath))\n"
+ node_script += "if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath)>0):\n"
+ node_script += " sys.path.insert(0,filepath)\n"
node_script += script_str.read()
- opt_script_node.setScript(node_script)
- opt_script_node.edAddInputPort("computation", t_param_input)
- opt_script_node.edAddOutputPort("result", t_param_output)
+ opt_script_nodeOO.setScript(node_script)
+ opt_script_nodeOO.edAddInputPort("computation", t_param_input)
+ opt_script_nodeOO.edAddOutputPort("result", t_param_output)
else:
factory_opt_script_node = catalogAd.getNodeFromNodeMap("FakeOptimizerLoopNode")
- opt_script_node = factory_opt_script_node.cloneNode("FakeFunctionNode")
+ opt_script_nodeOO = factory_opt_script_node.cloneNode("FakeFunctionNode")
+
+ # Check if we have a python script for OptimizerLoopNode
+ if "EvolutionModel" in study_config.keys():
+ data_config = study_config["EvolutionModel"]
+ opt_script_nodeEM = None
+ if data_config["Type"] == "Function" and data_config["From"] == "FunctionDict":
+ # Get script
+ FunctionDict = data_config["Data"]
+ script_filename = ""
+ for FunctionName in FunctionDict["Function"]:
+ # We currently support only one file
+ script_filename = FunctionDict["Script"][FunctionName]
+ break
+
+ # We create a new pyscript node
+ opt_script_nodeEM = runtime.createScriptNode("", "FunctionNodeEM")
+ if repertory:
+ script_filename = os.path.join(base_repertory, os.path.basename(script_filename))
+ try:
+ script_str= open(script_filename, 'r')
+ except:
+ logging.fatal("Exception in opening function script file : " + script_filename)
+ traceback.print_exc()
+ sys.exit(1)
+ node_script = "#-*-coding:iso-8859-1-*-\n"
+ node_script += "import sys, os \n"
+ if base_repertory != "":
+ node_script += "filepath = \"" + base_repertory + "\"\n"
+ else:
+ node_script += "filepath = \"" + os.path.dirname(script_filename) + "\"\n"
+ node_script += "if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath)>0):\n"
+ node_script += " sys.path.insert(0,filepath)\n"
+ node_script += script_str.read()
+ opt_script_nodeEM.setScript(node_script)
+ opt_script_nodeEM.edAddInputPort("computation", t_param_input)
+ opt_script_nodeEM.edAddOutputPort("result", t_param_output)
+ else:
+ factory_opt_script_node = catalogAd.getNodeFromNodeMap("FakeOptimizerLoopNode")
+ opt_script_nodeEM = factory_opt_script_node.cloneNode("FakeFunctionNode")
# Add computation bloc
if "Observers" in study_config.keys():
# Connect switch
ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("switch_value"), switch_node.edGetConditionPort())
- # First case: always computation bloc
- computation_bloc = runtime.createBloc("computation_bloc")
- computation_bloc.edAddChild(opt_script_node)
- switch_node.edSetNode(1, computation_bloc)
+ # First case: computation bloc
+ computation_blocOO = runtime.createBloc("computation_blocOO")
+ computation_blocOO.edAddChild(opt_script_nodeOO)
+ switch_node.edSetNode(1, computation_blocOO)
- # We connect Optimizer with the script
- ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("data"), opt_script_node.getInputPort("computation"))
- ADAO_Case.edAddDFLink(opt_script_node.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
+ # We connect with the script
+ ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("data"), opt_script_nodeOO.getInputPort("computation"))
+ ADAO_Case.edAddDFLink(opt_script_nodeOO.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
+ # Second case: evolution bloc
+ if "EvolutionModel" in study_config.keys():
+ computation_blocEM = runtime.createBloc("computation_blocEM")
+ computation_blocEM.edAddChild(opt_script_nodeEM)
+ switch_node.edSetNode(2, computation_blocEM)
+
+ # We connect with the script
+ ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("data"), opt_script_nodeEM.getInputPort("computation"))
+ ADAO_Case.edAddDFLink(opt_script_nodeEM.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
# For each observer add a new bloc in the switch
observer_config = study_config["Observers"]
observer_bloc.edAddChild(end_observation_node)
ADAO_Case.edAddCFLink(observation_node, end_observation_node)
ADAO_Case.edAddDFLink(end_observation_node.getOutputPort("output"), optimizer_node.edGetPortForOutPool())
+
+ elif "EvolutionModel" in study_config.keys():
+ execution_bloc = runtime.createBloc("Execution Bloc")
+ optimizer_node.edSetNode(execution_bloc)
+
+ # Add a node that permits to configure the switch
+ factory_read_for_switch_node = catalogAd.getNodeFromNodeMap("ReadForSwitchNode")
+ read_for_switch_node = factory_read_for_switch_node.cloneNode("ReadForSwitch")
+ execution_bloc.edAddChild(read_for_switch_node)
+ ADAO_Case.edAddDFLink(optimizer_node.edGetSamplePort(), read_for_switch_node.getInputPort("data"))
+
+ # Add a switch
+ switch_node = runtime.createSwitch("Execution Switch")
+ execution_bloc.edAddChild(switch_node)
+ # Connect switch
+ ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("switch_value"), switch_node.edGetConditionPort())
+
+ # First case: computation bloc
+ computation_blocOO = runtime.createBloc("computation_blocOO")
+ computation_blocOO.edAddChild(opt_script_nodeOO)
+ switch_node.edSetNode(1, computation_blocOO)
+
+ # We connect with the script
+ ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("data"), opt_script_nodeOO.getInputPort("computation"))
+ ADAO_Case.edAddDFLink(opt_script_nodeOO.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
+
+ # Second case: evolution bloc
+ computation_blocEM = runtime.createBloc("computation_blocEM")
+ computation_blocEM.edAddChild(opt_script_nodeEM)
+ switch_node.edSetNode(2, computation_blocEM)
+
+ # We connect with the script
+ ADAO_Case.edAddDFLink(read_for_switch_node.getOutputPort("data"), opt_script_nodeEM.getInputPort("computation"))
+ ADAO_Case.edAddDFLink(opt_script_nodeEM.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
+
else:
- computation_bloc = runtime.createBloc("computation_bloc")
- optimizer_node.edSetNode(computation_bloc)
- computation_bloc.edAddChild(opt_script_node)
+ computation_blocOO = runtime.createBloc("computation_blocOO")
+ optimizer_node.edSetNode(computation_blocOO)
+ computation_blocOO.edAddChild(opt_script_nodeOO)
# We connect Optimizer with the script
- ADAO_Case.edAddDFLink(optimizer_node.edGetSamplePort(), opt_script_node.getInputPort("computation"))
- ADAO_Case.edAddDFLink(opt_script_node.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
+ ADAO_Case.edAddDFLink(optimizer_node.edGetSamplePort(), opt_script_nodeOO.getInputPort("computation"))
+ ADAO_Case.edAddDFLink(opt_script_nodeOO.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
# Connect node with InitUserData
if "ObservationOperator" in init_config["Target"]:
- opt_node_script = opt_script_node.getScript()
+ opt_node_script = opt_script_nodeOO.getScript()
opt_node_script = "__builtins__[\"init_data\"] = init_data\n" + opt_node_script
- opt_script_node.setScript(opt_node_script)
- opt_script_node.edAddInputPort("init_data", t_pyobj)
- ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), opt_script_node.getInputPort("init_data"))
+ opt_script_nodeOO.setScript(opt_node_script)
+ opt_script_nodeOO.edAddInputPort("init_data", t_pyobj)
+ ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), opt_script_nodeOO.getInputPort("init_data"))
# Step 4: create post-processing from user configuration
if "UserPostAnalysis" in study_config.keys():
node_script += "filepath = \"" + base_repertory + "\"\n"
else:
node_script += "filepath = \"" + os.path.dirname(script_filename) + "\"\n"
- node_script += "if sys.path.count(os.path.dirname(filepath))==0 or (sys.path.count(os.path.dirname(filepath))>0 and sys.path.index(os.path.dirname(filepath),0,1)>0):\n"
- node_script += " sys.path.insert(0,os.path.dirname(filepath))\n"
+ node_script += "if sys.path.count(filepath)==0 or (sys.path.count(filepath)>0 and sys.path.index(filepath)>0):\n"
+ node_script += " sys.path.insert(0,filepath)\n"
node_script += default_script
node_script += analysis_file.read()
analysis_node.setScript(node_script)