1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2018 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 Définit les outils généraux élémentaires.
26 Ce module est destiné à être appelée par AssimilationStudy.
28 __author__ = "Jean-Philippe ARGAUD"
36 from daCore import Persistence
37 from daCore import PlatformInfo
38 from daCore import Templates
40 # ==============================================================================
41 class CacheManager(object):
43 Classe générale de gestion d'un cache de calculs
46 toleranceInRedundancy = 1.e-18,
47 lenghtOfRedundancy = -1,
50 Les caractéristiques de tolérance peuvent être modifées à la création.
52 self.__tolerBP = float(toleranceInRedundancy)
53 self.__lenghtOR = int(lenghtOfRedundancy)
54 self.__initlnOR = self.__lenghtOR
59 self.__listOPCV = [] # Operator Previous Calculated Points, Results, Point Norms
60 # logging.debug("CM Tolerance de determination des doublons : %.2e", self.__tolerBP)
62 def wasCalculatedIn(self, xValue ): #, info="" ):
63 "Vérifie l'existence d'un calcul correspondant à la valeur"
66 for i in range(min(len(self.__listOPCV),self.__lenghtOR)-1,-1,-1):
67 if xValue.size != self.__listOPCV[i][0].size:
68 # logging.debug("CM Différence de la taille %s de X et de celle %s du point %i déjà calculé", xValue.shape,i,self.__listOPCP[i].shape)
70 if numpy.linalg.norm(numpy.ravel(xValue) - self.__listOPCV[i][0]) < self.__tolerBP * self.__listOPCV[i][2]:
72 __HxV = self.__listOPCV[i][1]
73 # logging.debug("CM Cas%s déja calculé, portant le numéro %i", info, i)
77 def storeValueInX(self, xValue, HxValue ):
78 "Stocke un calcul correspondant à la valeur"
79 if self.__lenghtOR < 0:
80 self.__lenghtOR = 2 * xValue.size + 2
81 self.__initlnOR = self.__lenghtOR
82 while len(self.__listOPCV) > self.__lenghtOR:
83 # logging.debug("CM Réduction de la liste des cas à %i éléments par suppression du premier", self.__lenghtOR)
84 self.__listOPCV.pop(0)
85 self.__listOPCV.append( (
86 copy.copy(numpy.ravel(xValue)),
88 numpy.linalg.norm(xValue),
93 self.__initlnOR = self.__lenghtOR
98 self.__lenghtOR = self.__initlnOR
100 # ==============================================================================
101 class Operator(object):
103 Classe générale d'interface de type opérateur simple
110 def __init__(self, fromMethod=None, fromMatrix=None, avoidingRedundancy = True):
112 On construit un objet de ce type en fournissant à l'aide de l'un des
113 deux mots-clé, soit une fonction python, soit une matrice.
115 - fromMethod : argument de type fonction Python
116 - fromMatrix : argument adapté au constructeur numpy.matrix
117 - avoidingRedundancy : évite ou pas les calculs redondants
119 self.__NbCallsAsMatrix, self.__NbCallsAsMethod, self.__NbCallsOfCached = 0, 0, 0
120 self.__AvoidRC = bool( avoidingRedundancy )
121 if fromMethod is not None:
122 self.__Method = fromMethod # logtimer(fromMethod)
124 self.__Type = "Method"
125 elif fromMatrix is not None:
127 self.__Matrix = numpy.matrix( fromMatrix, numpy.float )
128 self.__Type = "Matrix"
134 def disableAvoidingRedundancy(self):
136 Operator.CM.disable()
138 def enableAvoidingRedundancy(self):
143 Operator.CM.disable()
149 def appliedTo(self, xValue, HValue = None):
151 Permet de restituer le résultat de l'application de l'opérateur à un
152 argument xValue. Cette méthode se contente d'appliquer, son argument
153 devant a priori être du bon type.
155 - xValue : argument adapté pour appliquer l'opérateur
157 if HValue is not None:
158 HxValue = numpy.asmatrix( numpy.ravel( HValue ) ).T
160 Operator.CM.storeValueInX(xValue,HxValue)
163 __alreadyCalculated, __HxV = Operator.CM.wasCalculatedIn(xValue)
165 __alreadyCalculated = False
167 if __alreadyCalculated:
168 self.__addOneCacheCall()
171 if self.__Matrix is not None:
172 self.__addOneMatrixCall()
173 HxValue = self.__Matrix * xValue
175 self.__addOneMethodCall()
176 HxValue = self.__Method( xValue )
178 Operator.CM.storeValueInX(xValue,HxValue)
182 def appliedControledFormTo(self, paire ):
184 Permet de restituer le résultat de l'application de l'opérateur à une
185 paire (xValue, uValue). Cette méthode se contente d'appliquer, son
186 argument devant a priori être du bon type. Si la uValue est None,
187 on suppose que l'opérateur ne s'applique qu'à xValue.
189 - xValue : argument X adapté pour appliquer l'opérateur
190 - uValue : argument U adapté pour appliquer l'opérateur
192 assert len(paire) == 2, "Incorrect number of arguments"
193 xValue, uValue = paire
194 if self.__Matrix is not None:
195 self.__addOneMatrixCall()
196 return self.__Matrix * xValue
197 elif uValue is not None:
198 self.__addOneMethodCall()
199 return self.__Method( (xValue, uValue) )
201 self.__addOneMethodCall()
202 return self.__Method( xValue )
204 def appliedInXTo(self, paire ):
206 Permet de restituer le résultat de l'application de l'opérateur à un
207 argument xValue, sachant que l'opérateur est valable en xNominal.
208 Cette méthode se contente d'appliquer, son argument devant a priori
209 être du bon type. Si l'opérateur est linéaire car c'est une matrice,
210 alors il est valable en tout point nominal et il n'est pas nécessaire
212 Arguments : une liste contenant
213 - xNominal : argument permettant de donner le point où l'opérateur
214 est construit pour etre ensuite appliqué
215 - xValue : argument adapté pour appliquer l'opérateur
217 assert len(paire) == 2, "Incorrect number of arguments"
218 xNominal, xValue = paire
219 if self.__Matrix is not None:
220 self.__addOneMatrixCall()
221 return self.__Matrix * xValue
223 self.__addOneMethodCall()
224 return self.__Method( (xNominal, xValue) )
226 def asMatrix(self, ValueForMethodForm = "UnknownVoidValue"):
228 Permet de renvoyer l'opérateur sous la forme d'une matrice
230 if self.__Matrix is not None:
231 self.__addOneMatrixCall()
233 elif ValueForMethodForm is not "UnknownVoidValue": # Ne pas utiliser "None"
234 self.__addOneMethodCall()
235 return numpy.matrix( self.__Method( (ValueForMethodForm, None) ) )
237 raise ValueError("Matrix form of the operator defined as a function/method requires to give an operating point.")
241 Renvoie la taille sous forme numpy si l'opérateur est disponible sous
242 la forme d'une matrice
244 if self.__Matrix is not None:
245 return self.__Matrix.shape
247 raise ValueError("Matrix form of the operator is not available, nor the shape")
249 def nbcalls(self, which=None):
251 Renvoie les nombres d'évaluations de l'opérateur
254 self.__NbCallsAsMatrix+self.__NbCallsAsMethod,
255 self.__NbCallsAsMatrix,
256 self.__NbCallsAsMethod,
257 self.__NbCallsOfCached,
258 Operator.NbCallsAsMatrix+Operator.NbCallsAsMethod,
259 Operator.NbCallsAsMatrix,
260 Operator.NbCallsAsMethod,
261 Operator.NbCallsOfCached,
263 if which is None: return __nbcalls
264 else: return __nbcalls[which]
266 def __addOneMatrixCall(self):
267 "Comptabilise un appel"
268 self.__NbCallsAsMatrix += 1 # Decompte local
269 Operator.NbCallsAsMatrix += 1 # Decompte global
271 def __addOneMethodCall(self):
272 "Comptabilise un appel"
273 self.__NbCallsAsMethod += 1 # Decompte local
274 Operator.NbCallsAsMethod += 1 # Decompte global
276 def __addOneCacheCall(self):
277 "Comptabilise un appel"
278 self.__NbCallsOfCached += 1 # Decompte local
279 Operator.NbCallsOfCached += 1 # Decompte global
281 # ==============================================================================
282 class FullOperator(object):
284 Classe générale d'interface de type opérateur complet
285 (Direct, Linéaire Tangent, Adjoint)
288 name = "GenericFullOperator",
290 asOneFunction = None, # Fonction
291 asThreeFunctions = None, # Dictionnaire de fonctions
293 asDict = None, # Parameters
300 self.__name = str(name)
301 self.__check = bool(toBeChecked)
306 if (asDict is not None) and isinstance(asDict, dict):
307 __Parameters.update( asDict )
308 if "DifferentialIncrement" in asDict:
309 __Parameters["withIncrement"] = asDict["DifferentialIncrement"]
310 if "CenteredFiniteDifference" in asDict:
311 __Parameters["withCenteredDF"] = asDict["CenteredFiniteDifference"]
312 if "EnableMultiProcessing" in asDict:
313 __Parameters["withmpEnabled"] = asDict["EnableMultiProcessing"]
314 if "NumberOfProcesses" in asDict:
315 __Parameters["withmpWorkers"] = asDict["NumberOfProcesses"]
317 if asScript is not None:
318 __Matrix, __Function = None, None
320 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
322 __Function = { "Direct":ImportFromScript(asScript).getvalue( "DirectOperator" ) }
323 __Function.update({"useApproximatedDerivatives":True})
324 __Function.update(__Parameters)
325 elif asThreeFunctions:
327 "Direct" :ImportFromScript(asScript).getvalue( "DirectOperator" ),
328 "Tangent":ImportFromScript(asScript).getvalue( "TangentOperator" ),
329 "Adjoint":ImportFromScript(asScript).getvalue( "AdjointOperator" ),
331 __Function.update(__Parameters)
334 if asOneFunction is not None:
335 if isinstance(asOneFunction, dict) and "Direct" in asOneFunction:
336 if asOneFunction["Direct"] is not None:
337 __Function = asOneFunction
339 raise ValueError("The function has to be given in a dictionnary which have 1 key (\"Direct\")")
341 __Function = { "Direct":asOneFunction }
342 __Function.update({"useApproximatedDerivatives":True})
343 __Function.update(__Parameters)
344 elif asThreeFunctions is not None:
345 if isinstance(asThreeFunctions, dict) and \
346 ("Tangent" in asThreeFunctions) and (asThreeFunctions["Tangent"] is not None) and \
347 ("Adjoint" in asThreeFunctions) and (asThreeFunctions["Adjoint"] is not None) and \
348 (("useApproximatedDerivatives" not in asThreeFunctions) or not bool(asThreeFunctions["useApproximatedDerivatives"])):
349 __Function = asThreeFunctions
350 elif isinstance(asThreeFunctions, dict) and \
351 ("Direct" in asThreeFunctions) and (asThreeFunctions["Direct"] is not None):
352 __Function = asThreeFunctions
353 __Function.update({"useApproximatedDerivatives":True})
355 raise ValueError("The functions has to be given in a dictionnary which have either 1 key (\"Direct\") or 3 keys (\"Direct\" (optionnal), \"Tangent\" and \"Adjoint\")")
356 if "Direct" not in asThreeFunctions:
357 __Function["Direct"] = asThreeFunctions["Tangent"]
358 __Function.update(__Parameters)
362 # if sys.version_info[0] < 3 and isinstance(__Function, dict):
363 # for k in ("Direct", "Tangent", "Adjoint"):
364 # if k in __Function and hasattr(__Function[k],"__class__"):
365 # if type(__Function[k]) is type(self.__init__):
366 # raise TypeError("can't use a class method (%s) as a function for the \"%s\" operator. Use a real function instead."%(type(__Function[k]),k))
368 if appliedInX is not None and isinstance(appliedInX, dict):
369 __appliedInX = appliedInX
370 elif appliedInX is not None:
371 __appliedInX = {"HXb":appliedInX}
375 if scheduledBy is not None:
376 self.__T = scheduledBy
378 if isinstance(__Function, dict) and \
379 ("useApproximatedDerivatives" in __Function) and bool(__Function["useApproximatedDerivatives"]) and \
380 ("Direct" in __Function) and (__Function["Direct"] is not None):
381 if "withCenteredDF" not in __Function: __Function["withCenteredDF"] = False
382 if "withIncrement" not in __Function: __Function["withIncrement"] = 0.01
383 if "withdX" not in __Function: __Function["withdX"] = None
384 if "withAvoidingRedundancy" not in __Function: __Function["withAvoidingRedundancy"] = True
385 if "withToleranceInRedundancy" not in __Function: __Function["withToleranceInRedundancy"] = 1.e-18
386 if "withLenghtOfRedundancy" not in __Function: __Function["withLenghtOfRedundancy"] = -1
387 if "withmpEnabled" not in __Function: __Function["withmpEnabled"] = False
388 if "withmpWorkers" not in __Function: __Function["withmpWorkers"] = None
389 from daNumerics.ApproximatedDerivatives import FDApproximation
390 FDA = FDApproximation(
391 Function = __Function["Direct"],
392 centeredDF = __Function["withCenteredDF"],
393 increment = __Function["withIncrement"],
394 dX = __Function["withdX"],
395 avoidingRedundancy = __Function["withAvoidingRedundancy"],
396 toleranceInRedundancy = __Function["withToleranceInRedundancy"],
397 lenghtOfRedundancy = __Function["withLenghtOfRedundancy"],
398 mpEnabled = __Function["withmpEnabled"],
399 mpWorkers = __Function["withmpWorkers"],
401 self.__FO["Direct"] = Operator( fromMethod = FDA.DirectOperator, avoidingRedundancy = avoidRC )
402 self.__FO["Tangent"] = Operator( fromMethod = FDA.TangentOperator, avoidingRedundancy = avoidRC )
403 self.__FO["Adjoint"] = Operator( fromMethod = FDA.AdjointOperator, avoidingRedundancy = avoidRC )
404 elif isinstance(__Function, dict) and \
405 ("Direct" in __Function) and ("Tangent" in __Function) and ("Adjoint" in __Function) and \
406 (__Function["Direct"] is not None) and (__Function["Tangent"] is not None) and (__Function["Adjoint"] is not None):
407 self.__FO["Direct"] = Operator( fromMethod = __Function["Direct"], avoidingRedundancy = avoidRC )
408 self.__FO["Tangent"] = Operator( fromMethod = __Function["Tangent"], avoidingRedundancy = avoidRC )
409 self.__FO["Adjoint"] = Operator( fromMethod = __Function["Adjoint"], avoidingRedundancy = avoidRC )
410 elif asMatrix is not None:
411 __matrice = numpy.matrix( __Matrix, numpy.float )
412 self.__FO["Direct"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
413 self.__FO["Tangent"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
414 self.__FO["Adjoint"] = Operator( fromMatrix = __matrice.T, avoidingRedundancy = avoidRC )
417 raise ValueError("Improperly defined observation operator, it requires at minima either a matrix, a Direct for approximate derivatives or a Tangent/Adjoint pair.")
419 if __appliedInX is not None:
420 self.__FO["AppliedInX"] = {}
421 if not isinstance(__appliedInX, dict):
422 raise ValueError("Error: observation operator defined by \"AppliedInX\" need a dictionary as argument.")
423 for key in list(__appliedInX.keys()):
424 if type( __appliedInX[key] ) is type( numpy.matrix([]) ):
425 # Pour le cas où l'on a une vraie matrice
426 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].A1, numpy.float ).T
427 elif type( __appliedInX[key] ) is type( numpy.array([]) ) and len(__appliedInX[key].shape) > 1:
428 # Pour le cas où l'on a un vecteur représenté en array avec 2 dimensions
429 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].reshape(len(__appliedInX[key]),), numpy.float ).T
431 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key], numpy.float ).T
433 self.__FO["AppliedInX"] = None
439 "x.__repr__() <==> repr(x)"
440 return repr(self.__V)
443 "x.__str__() <==> str(x)"
446 # ==============================================================================
447 class Algorithm(object):
449 Classe générale d'interface de type algorithme
451 Elle donne un cadre pour l'écriture d'une classe élémentaire d'algorithme
452 d'assimilation, en fournissant un container (dictionnaire) de variables
453 persistantes initialisées, et des méthodes d'accès à ces variables stockées.
455 Une classe élémentaire d'algorithme doit implémenter la méthode "run".
457 def __init__(self, name):
459 L'initialisation présente permet de fabriquer des variables de stockage
460 disponibles de manière générique dans les algorithmes élémentaires. Ces
461 variables de stockage sont ensuite conservées dans un dictionnaire
462 interne à l'objet, mais auquel on accède par la méthode "get".
464 Les variables prévues sont :
465 - CostFunctionJ : fonction-cout globale, somme des deux parties suivantes
466 - CostFunctionJb : partie ébauche ou background de la fonction-cout
467 - CostFunctionJo : partie observations de la fonction-cout
468 - GradientOfCostFunctionJ : gradient de la fonction-cout globale
469 - GradientOfCostFunctionJb : gradient de la partie ébauche de la fonction-cout
470 - GradientOfCostFunctionJo : gradient de la partie observations de la fonction-cout
471 - CurrentState : état courant lors d'itérations
472 - Analysis : l'analyse Xa
473 - SimulatedObservationAtBackground : l'état observé H(Xb) à l'ébauche
474 - SimulatedObservationAtCurrentState : l'état observé H(X) à l'état courant
475 - SimulatedObservationAtOptimum : l'état observé H(Xa) à l'optimum
476 - Innovation : l'innovation : d = Y - H(X)
477 - InnovationAtCurrentState : l'innovation à l'état courant : dn = Y - H(Xn)
478 - SigmaObs2 : indicateur de correction optimale des erreurs d'observation
479 - SigmaBck2 : indicateur de correction optimale des erreurs d'ébauche
480 - MahalanobisConsistency : indicateur de consistance des covariances
481 - OMA : Observation moins Analysis : Y - Xa
482 - OMB : Observation moins Background : Y - Xb
483 - AMB : Analysis moins Background : Xa - Xb
484 - APosterioriCovariance : matrice A
485 - APosterioriVariances : variances de la matrice A
486 - APosterioriStandardDeviations : écart-types de la matrice A
487 - APosterioriCorrelations : correlations de la matrice A
488 - Residu : dans le cas des algorithmes de vérification
489 On peut rajouter des variables à stocker dans l'initialisation de
490 l'algorithme élémentaire qui va hériter de cette classe
492 logging.debug("%s Initialisation", str(name))
493 self._m = PlatformInfo.SystemUsage()
495 self._name = str( name )
496 self._parameters = {"StoreSupplementaryCalculations":[]}
497 self.__required_parameters = {}
498 self.__required_inputs = {"RequiredInputValues":{"mandatory":(), "optional":()}}
500 self.StoredVariables = {}
501 self.StoredVariables["CostFunctionJ"] = Persistence.OneScalar(name = "CostFunctionJ")
502 self.StoredVariables["CostFunctionJb"] = Persistence.OneScalar(name = "CostFunctionJb")
503 self.StoredVariables["CostFunctionJo"] = Persistence.OneScalar(name = "CostFunctionJo")
504 self.StoredVariables["CostFunctionJAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJAtCurrentOptimum")
505 self.StoredVariables["CostFunctionJbAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJbAtCurrentOptimum")
506 self.StoredVariables["CostFunctionJoAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJoAtCurrentOptimum")
507 self.StoredVariables["GradientOfCostFunctionJ"] = Persistence.OneVector(name = "GradientOfCostFunctionJ")
508 self.StoredVariables["GradientOfCostFunctionJb"] = Persistence.OneVector(name = "GradientOfCostFunctionJb")
509 self.StoredVariables["GradientOfCostFunctionJo"] = Persistence.OneVector(name = "GradientOfCostFunctionJo")
510 self.StoredVariables["CurrentState"] = Persistence.OneVector(name = "CurrentState")
511 self.StoredVariables["Analysis"] = Persistence.OneVector(name = "Analysis")
512 self.StoredVariables["IndexOfOptimum"] = Persistence.OneIndex(name = "IndexOfOptimum")
513 self.StoredVariables["CurrentOptimum"] = Persistence.OneVector(name = "CurrentOptimum")
514 self.StoredVariables["SimulatedObservationAtBackground"] = Persistence.OneVector(name = "SimulatedObservationAtBackground")
515 self.StoredVariables["SimulatedObservationAtCurrentState"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentState")
516 self.StoredVariables["SimulatedObservationAtOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtOptimum")
517 self.StoredVariables["SimulatedObservationAtCurrentOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentOptimum")
518 self.StoredVariables["Innovation"] = Persistence.OneVector(name = "Innovation")
519 self.StoredVariables["InnovationAtCurrentState"] = Persistence.OneVector(name = "InnovationAtCurrentState")
520 self.StoredVariables["SigmaObs2"] = Persistence.OneScalar(name = "SigmaObs2")
521 self.StoredVariables["SigmaBck2"] = Persistence.OneScalar(name = "SigmaBck2")
522 self.StoredVariables["MahalanobisConsistency"] = Persistence.OneScalar(name = "MahalanobisConsistency")
523 self.StoredVariables["OMA"] = Persistence.OneVector(name = "OMA")
524 self.StoredVariables["OMB"] = Persistence.OneVector(name = "OMB")
525 self.StoredVariables["BMA"] = Persistence.OneVector(name = "BMA")
526 self.StoredVariables["APosterioriCovariance"] = Persistence.OneMatrix(name = "APosterioriCovariance")
527 self.StoredVariables["APosterioriVariances"] = Persistence.OneVector(name = "APosterioriVariances")
528 self.StoredVariables["APosterioriStandardDeviations"] = Persistence.OneVector(name = "APosterioriStandardDeviations")
529 self.StoredVariables["APosterioriCorrelations"] = Persistence.OneMatrix(name = "APosterioriCorrelations")
530 self.StoredVariables["SimulationQuantiles"] = Persistence.OneMatrix(name = "SimulationQuantiles")
531 self.StoredVariables["Residu"] = Persistence.OneScalar(name = "Residu")
533 def _pre_run(self, Parameters, Xb=None, Y=None, R=None, B=None, Q=None ):
535 logging.debug("%s Lancement", self._name)
536 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
538 # Mise a jour de self._parameters avec Parameters
539 self.__setParameters(Parameters)
541 # Corrections et complements
542 def __test_vvalue( argument, variable, argname):
544 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
545 raise ValueError("%s %s vector %s has to be properly defined!"%(self._name,argname,variable))
546 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
547 logging.debug("%s %s vector %s is not set, but is optional."%(self._name,argname,variable))
549 logging.debug("%s %s vector %s is not set, but is not required."%(self._name,argname,variable))
551 logging.debug("%s %s vector %s is set, and its size is %i."%(self._name,argname,variable,numpy.array(argument).size))
552 __test_vvalue( Xb, "Xb", "Background or initial state" )
553 __test_vvalue( Y, "Y", "Observation" )
554 def __test_cvalue( argument, variable, argname):
556 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
557 raise ValueError("%s %s error covariance matrix %s has to be properly defined!"%(self._name,argname,variable))
558 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
559 logging.debug("%s %s error covariance matrix %s is not set, but is optional."%(self._name,argname,variable))
561 logging.debug("%s %s error covariance matrix %s is not set, but is not required."%(self._name,argname,variable))
563 logging.debug("%s %s error covariance matrix %s is set."%(self._name,argname,variable))
564 __test_cvalue( R, "R", "Observation" )
565 __test_cvalue( B, "B", "Background" )
566 __test_cvalue( Q, "Q", "Evolution" )
568 if ("Bounds" in self._parameters) and isinstance(self._parameters["Bounds"], (list, tuple)) and (len(self._parameters["Bounds"]) > 0):
569 logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
571 self._parameters["Bounds"] = None
573 if logging.getLogger().level < logging.WARNING:
574 self._parameters["optiprint"], self._parameters["optdisp"] = 1, 1
575 if PlatformInfo.has_scipy:
576 import scipy.optimize
577 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_ALL
579 self._parameters["optmessages"] = 15
581 self._parameters["optiprint"], self._parameters["optdisp"] = -1, 0
582 if PlatformInfo.has_scipy:
583 import scipy.optimize
584 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_NONE
586 self._parameters["optmessages"] = 15
590 def _post_run(self,_oH=None):
592 if ("StoreSupplementaryCalculations" in self._parameters) and \
593 "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]:
594 for _A in self.StoredVariables["APosterioriCovariance"]:
595 if "APosterioriVariances" in self._parameters["StoreSupplementaryCalculations"]:
596 self.StoredVariables["APosterioriVariances"].store( numpy.diag(_A) )
597 if "APosterioriStandardDeviations" in self._parameters["StoreSupplementaryCalculations"]:
598 self.StoredVariables["APosterioriStandardDeviations"].store( numpy.sqrt(numpy.diag(_A)) )
599 if "APosterioriCorrelations" in self._parameters["StoreSupplementaryCalculations"]:
600 _EI = numpy.diag(1./numpy.sqrt(numpy.diag(_A)))
601 _C = numpy.dot(_EI, numpy.dot(_A, _EI))
602 self.StoredVariables["APosterioriCorrelations"].store( _C )
604 logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint.: %i/%i/%i", self._name, _oH["Direct"].nbcalls(0),_oH["Tangent"].nbcalls(0),_oH["Adjoint"].nbcalls(0))
605 logging.debug("%s Nombre d'appels au cache d'opérateur d'observation direct/tangent/adjoint..: %i/%i/%i", self._name, _oH["Direct"].nbcalls(3),_oH["Tangent"].nbcalls(3),_oH["Adjoint"].nbcalls(3))
606 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
607 logging.debug("%s Terminé", self._name)
610 def get(self, key=None):
612 Renvoie l'une des variables stockées identifiée par la clé, ou le
613 dictionnaire de l'ensemble des variables disponibles en l'absence de
614 clé. Ce sont directement les variables sous forme objet qui sont
615 renvoyées, donc les méthodes d'accès à l'objet individuel sont celles
616 des classes de persistance.
619 return self.StoredVariables[key]
621 return self.StoredVariables
623 def __contains__(self, key=None):
624 "D.__contains__(k) -> True if D has a key k, else False"
625 return key in self.StoredVariables
628 "D.keys() -> list of D's keys"
629 if hasattr(self, "StoredVariables"):
630 return self.StoredVariables.keys()
635 "D.pop(k[,d]) -> v, remove specified key and return the corresponding value"
636 if hasattr(self, "StoredVariables"):
637 return self.StoredVariables.pop(k, d)
642 raise TypeError("pop expected at least 1 arguments, got 0")
643 "If key is not found, d is returned if given, otherwise KeyError is raised"
649 def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
651 Doit implémenter l'opération élémentaire de calcul d'assimilation sous
652 sa forme mathématique la plus naturelle possible.
654 raise NotImplementedError("Mathematical assimilation calculation has not been implemented!")
656 def defineRequiredParameter(self, name = None, default = None, typecast = None, message = None, minval = None, maxval = None, listval = None):
658 Permet de définir dans l'algorithme des paramètres requis et leurs
659 caractéristiques par défaut.
662 raise ValueError("A name is mandatory to define a required parameter.")
664 self.__required_parameters[name] = {
666 "typecast" : typecast,
672 logging.debug("%s %s (valeur par défaut = %s)", self._name, message, self.setParameterValue(name))
674 def getRequiredParameters(self, noDetails=True):
676 Renvoie la liste des noms de paramètres requis ou directement le
677 dictionnaire des paramètres requis.
680 return sorted(self.__required_parameters.keys())
682 return self.__required_parameters
684 def setParameterValue(self, name=None, value=None):
686 Renvoie la valeur d'un paramètre requis de manière contrôlée
688 default = self.__required_parameters[name]["default"]
689 typecast = self.__required_parameters[name]["typecast"]
690 minval = self.__required_parameters[name]["minval"]
691 maxval = self.__required_parameters[name]["maxval"]
692 listval = self.__required_parameters[name]["listval"]
694 if value is None and default is None:
696 elif value is None and default is not None:
697 if typecast is None: __val = default
698 else: __val = typecast( default )
700 if typecast is None: __val = value
701 else: __val = typecast( value )
703 if minval is not None and (numpy.array(__val, float) < minval).any():
704 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be less than %s."%(name, __val, minval))
705 if maxval is not None and (numpy.array(__val, float) > maxval).any():
706 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be greater than %s."%(name, __val, maxval))
707 if listval is not None:
708 if typecast is list or typecast is tuple or isinstance(__val,list) or isinstance(__val,tuple):
711 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%(v, name, listval))
712 elif __val not in listval:
713 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%( __val, name,listval))
716 def requireInputArguments(self, mandatory=(), optional=()):
718 Permet d'imposer des arguments requises en entrée
720 self.__required_inputs["RequiredInputValues"]["mandatory"] = tuple( mandatory )
721 self.__required_inputs["RequiredInputValues"]["optional"] = tuple( optional )
723 def __setParameters(self, fromDico={}):
725 Permet de stocker les paramètres reçus dans le dictionnaire interne.
727 self._parameters.update( fromDico )
728 for k in self.__required_parameters.keys():
729 if k in fromDico.keys():
730 self._parameters[k] = self.setParameterValue(k,fromDico[k])
732 self._parameters[k] = self.setParameterValue(k)
733 logging.debug("%s %s : %s", self._name, self.__required_parameters[k]["message"], self._parameters[k])
735 # ==============================================================================
736 class Diagnostic(object):
738 Classe générale d'interface de type diagnostic
740 Ce template s'utilise de la manière suivante : il sert de classe "patron" en
741 même temps que l'une des classes de persistance, comme "OneScalar" par
744 Une classe élémentaire de diagnostic doit implémenter ses deux méthodes, la
745 méthode "_formula" pour écrire explicitement et proprement la formule pour
746 l'écriture mathématique du calcul du diagnostic (méthode interne non
747 publique), et "calculate" pour activer la précédente tout en ayant vérifié
748 et préparé les données, et pour stocker les résultats à chaque pas (méthode
749 externe d'activation).
751 def __init__(self, name = "", parameters = {}):
753 self.name = str(name)
754 self.parameters = dict( parameters )
756 def _formula(self, *args):
758 Doit implémenter l'opération élémentaire de diagnostic sous sa forme
759 mathématique la plus naturelle possible.
761 raise NotImplementedError("Diagnostic mathematical formula has not been implemented!")
763 def calculate(self, *args):
765 Active la formule de calcul avec les arguments correctement rangés
767 raise NotImplementedError("Diagnostic activation method has not been implemented!")
769 # ==============================================================================
770 class DiagnosticAndParameters(object):
772 Classe générale d'interface d'interface de type diagnostic
775 name = "GenericDiagnostic",
782 asExistingDiags = None,
786 self.__name = str(name)
792 self.__E = tuple(asExistingDiags)
793 self.__TheDiag = None
795 if asScript is not None:
796 __Diag = ImportFromScript(asScript).getvalue( "Diagnostic" )
797 __Iden = ImportFromScript(asScript).getvalue( "Identifier" )
798 __Dict = ImportFromScript(asScript).getvalue( self.__name, "Parameters" )
799 __Unit = ImportFromScript(asScript).getvalue( "Unit" )
800 __Base = ImportFromScript(asScript).getvalue( "BaseType" )
802 __Diag = asDiagnostic
803 __Iden = asIdentifier
808 if __Diag is not None:
809 self.__D = str(__Diag)
810 if __Iden is not None:
811 self.__I = str(__Iden)
813 self.__I = str(__Diag)
814 if __Dict is not None:
815 self.__P.update( dict(__Dict) )
816 if __Unit is None or __Unit == "None":
818 if __Base is None or __Base == "None":
821 self.__setDiagnostic( self.__D, self.__I, self.__U, self.__B, self.__P, self.__E )
825 return self.__TheDiag
827 def __setDiagnostic(self, __choice = None, __name = "", __unit = "", __basetype = None, __parameters = {}, __existings = () ):
829 Permet de sélectionner un diagnostic a effectuer
832 raise ValueError("Error: diagnostic choice has to be given")
833 __daDirectory = "daDiagnostics"
835 # Recherche explicitement le fichier complet
836 # ------------------------------------------
838 for directory in sys.path:
839 if os.path.isfile(os.path.join(directory, __daDirectory, str(__choice)+'.py')):
840 __module_path = os.path.abspath(os.path.join(directory, __daDirectory))
841 if __module_path is None:
842 raise ImportError("No diagnostic module named \"%s\" was found in a \"%s\" subdirectory\n The search path is %s"%(__choice, __daDirectory, sys.path))
844 # Importe le fichier complet comme un module
845 # ------------------------------------------
847 __sys_path_tmp = sys.path ; sys.path.insert(0,__module_path)
848 self.__diagnosticFile = __import__(str(__choice), globals(), locals(), [])
849 sys.path = __sys_path_tmp ; del __sys_path_tmp
850 except ImportError as e:
851 raise ImportError("The module named \"%s\" was found, but is incorrect at the import stage.\n The import error message is: %s"%(__choice,e))
853 # Instancie un objet du type élémentaire du fichier
854 # -------------------------------------------------
855 if __name in __existings:
856 raise ValueError("A default input with the same name \"%s\" already exists."%str(__name))
858 self.__TheDiag = self.__diagnosticFile.ElementaryDiagnostic(
861 basetype = __basetype,
862 parameters = __parameters )
865 # ==============================================================================
866 class AlgorithmAndParameters(object):
868 Classe générale d'interface d'action pour l'algorithme et ses paramètres
871 name = "GenericAlgorithm",
878 self.__name = str(name)
882 self.__algorithm = {}
883 self.__algorithmFile = None
884 self.__algorithmName = None
886 self.updateParameters( asDict, asScript )
888 if asScript is not None:
889 __Algo = ImportFromScript(asScript).getvalue( "Algorithm" )
893 if __Algo is not None:
894 self.__A = str(__Algo)
895 self.__P.update( {"Algorithm":self.__A} )
897 self.__setAlgorithm( self.__A )
899 def updateParameters(self,
903 "Mise a jour des parametres"
904 if asScript is not None:
905 __Dict = ImportFromScript(asScript).getvalue( self.__name, "Parameters" )
909 if __Dict is not None:
910 self.__P.update( dict(__Dict) )
912 def executePythonScheme(self, asDictAO = None):
913 "Permet de lancer le calcul d'assimilation"
914 Operator.CM.clearCache()
916 if not isinstance(asDictAO, dict):
917 raise ValueError("The objects for algorithm calculation have to be given together as a dictionnary, and they are not")
918 if hasattr(asDictAO["Background"],"getO"): self.__Xb = asDictAO["Background"].getO()
919 elif hasattr(asDictAO["CheckingPoint"],"getO"): self.__Xb = asDictAO["CheckingPoint"].getO()
920 else: self.__Xb = None
921 if hasattr(asDictAO["Observation"],"getO"): self.__Y = asDictAO["Observation"].getO()
922 else: self.__Y = asDictAO["Observation"]
923 if hasattr(asDictAO["ControlInput"],"getO"): self.__U = asDictAO["ControlInput"].getO()
924 else: self.__U = asDictAO["ControlInput"]
925 if hasattr(asDictAO["ObservationOperator"],"getO"): self.__HO = asDictAO["ObservationOperator"].getO()
926 else: self.__HO = asDictAO["ObservationOperator"]
927 if hasattr(asDictAO["EvolutionModel"],"getO"): self.__EM = asDictAO["EvolutionModel"].getO()
928 else: self.__EM = asDictAO["EvolutionModel"]
929 if hasattr(asDictAO["ControlModel"],"getO"): self.__CM = asDictAO["ControlModel"].getO()
930 else: self.__CM = asDictAO["ControlModel"]
931 self.__B = asDictAO["BackgroundError"]
932 self.__R = asDictAO["ObservationError"]
933 self.__Q = asDictAO["EvolutionError"]
935 self.__shape_validate()
937 self.__algorithm.run(
947 Parameters = self.__P,
951 def executeYACSScheme(self, FileName=None):
952 "Permet de lancer le calcul d'assimilation"
953 if FileName is None or not os.path.exists(FileName):
954 raise ValueError("a YACS file name has to be given for YACS execution.\n")
955 if not PlatformInfo.has_salome or not PlatformInfo.has_yacs or not PlatformInfo.has_adao:
956 raise ImportError("Unable to get SALOME, YACS or ADAO environnement variables. Please launch SALOME before executing.\n")
961 SALOMERuntime.RuntimeSALOME_setRuntime()
963 r = pilot.getRuntime()
964 xmlLoader = loader.YACSLoader()
965 xmlLoader.registerProcCataLoader()
967 catalogAd = r.loadCatalog("proc", os.path.abspath(FileName))
968 r.addCatalog(catalogAd)
973 p = xmlLoader.load(os.path.abspath(FileName))
974 except IOError as ex:
975 print("The YACS XML schema file can not be loaded: %s"%(ex,))
977 logger = p.getLogger("parser")
978 if not logger.isEmpty():
979 print("The imported YACS XML schema has errors on parsing:")
980 print(logger.getStr())
983 print("The YACS XML schema is not valid and will not be executed:")
984 print(p.getErrorReport())
986 info=pilot.LinkInfo(pilot.LinkInfo.ALL_DONT_STOP)
987 p.checkConsistency(info)
988 if info.areWarningsOrErrors():
989 print("The YACS XML schema is not coherent and will not be executed:")
990 print(info.getGlobalRepr())
992 e = pilot.ExecutorSwig()
994 if p.getEffectiveState() != pilot.DONE:
995 print(p.getErrorReport())
999 def get(self, key = None):
1000 "Vérifie l'existence d'une clé de variable ou de paramètres"
1001 if key in self.__algorithm:
1002 return self.__algorithm.get( key )
1003 elif key in self.__P:
1004 return self.__P[key]
1008 def pop(self, k, d):
1009 "Necessaire pour le pickling"
1010 return self.__algorithm.pop(k, d)
1012 def getAlgorithmRequiredParameters(self, noDetails=True):
1013 "Renvoie la liste des paramètres requis selon l'algorithme"
1014 return self.__algorithm.getRequiredParameters(noDetails)
1016 def setObserver(self, __V, __O, __I, __S):
1017 if self.__algorithm is None \
1018 or isinstance(self.__algorithm, dict) \
1019 or not hasattr(self.__algorithm,"StoredVariables"):
1020 raise ValueError("No observer can be build before choosing an algorithm.")
1021 if __V not in self.__algorithm:
1022 raise ValueError("An observer requires to be set on a variable named %s which does not exist."%__V)
1024 self.__algorithm.StoredVariables[ __V ].setDataObserver(
1027 HookParameters = __I,
1030 def removeObserver(self, __V, __O, __A = False):
1031 if self.__algorithm is None \
1032 or isinstance(self.__algorithm, dict) \
1033 or not hasattr(self.__algorithm,"StoredVariables"):
1034 raise ValueError("No observer can be removed before choosing an algorithm.")
1035 if __V not in self.__algorithm:
1036 raise ValueError("An observer requires to be removed on a variable named %s which does not exist."%__V)
1038 return self.__algorithm.StoredVariables[ __V ].removeDataObserver(
1043 def hasObserver(self, __V):
1044 if self.__algorithm is None \
1045 or isinstance(self.__algorithm, dict) \
1046 or not hasattr(self.__algorithm,"StoredVariables"):
1048 if __V not in self.__algorithm:
1050 return self.__algorithm.StoredVariables[ __V ].hasDataObserver()
1053 return list(self.__algorithm.keys()) + list(self.__P.keys())
1055 def __contains__(self, key=None):
1056 "D.__contains__(k) -> True if D has a key k, else False"
1057 return key in self.__algorithm or key in self.__P
1060 "x.__repr__() <==> repr(x)"
1061 return repr(self.__A)+", "+repr(self.__P)
1064 "x.__str__() <==> str(x)"
1065 return str(self.__A)+", "+str(self.__P)
1067 def __setAlgorithm(self, choice = None ):
1069 Permet de sélectionner l'algorithme à utiliser pour mener à bien l'étude
1070 d'assimilation. L'argument est un champ caractère se rapportant au nom
1071 d'un algorithme réalisant l'opération sur les arguments fixes.
1074 raise ValueError("Error: algorithm choice has to be given")
1075 if self.__algorithmName is not None:
1076 raise ValueError("Error: algorithm choice has already been done as \"%s\", it can't be changed."%self.__algorithmName)
1077 daDirectory = "daAlgorithms"
1079 # Recherche explicitement le fichier complet
1080 # ------------------------------------------
1082 for directory in sys.path:
1083 if os.path.isfile(os.path.join(directory, daDirectory, str(choice)+'.py')):
1084 module_path = os.path.abspath(os.path.join(directory, daDirectory))
1085 if module_path is None:
1086 raise ImportError("No algorithm module named \"%s\" has been found in the search path.\n The search path is %s"%(choice, sys.path))
1088 # Importe le fichier complet comme un module
1089 # ------------------------------------------
1091 sys_path_tmp = sys.path ; sys.path.insert(0,module_path)
1092 self.__algorithmFile = __import__(str(choice), globals(), locals(), [])
1093 if not hasattr(self.__algorithmFile, "ElementaryAlgorithm"):
1094 raise ImportError("this module does not define a valid elementary algorithm.")
1095 self.__algorithmName = str(choice)
1096 sys.path = sys_path_tmp ; del sys_path_tmp
1097 except ImportError as e:
1098 raise ImportError("The module named \"%s\" was found, but is incorrect at the import stage.\n The import error message is: %s"%(choice,e))
1100 # Instancie un objet du type élémentaire du fichier
1101 # -------------------------------------------------
1102 self.__algorithm = self.__algorithmFile.ElementaryAlgorithm()
1105 def __shape_validate(self):
1107 Validation de la correspondance correcte des tailles des variables et
1108 des matrices s'il y en a.
1110 if self.__Xb is None: __Xb_shape = (0,)
1111 elif hasattr(self.__Xb,"size"): __Xb_shape = (self.__Xb.size,)
1112 elif hasattr(self.__Xb,"shape"):
1113 if isinstance(self.__Xb.shape, tuple): __Xb_shape = self.__Xb.shape
1114 else: __Xb_shape = self.__Xb.shape()
1115 else: raise TypeError("The background (Xb) has no attribute of shape: problem !")
1117 if self.__Y is None: __Y_shape = (0,)
1118 elif hasattr(self.__Y,"size"): __Y_shape = (self.__Y.size,)
1119 elif hasattr(self.__Y,"shape"):
1120 if isinstance(self.__Y.shape, tuple): __Y_shape = self.__Y.shape
1121 else: __Y_shape = self.__Y.shape()
1122 else: raise TypeError("The observation (Y) has no attribute of shape: problem !")
1124 if self.__U is None: __U_shape = (0,)
1125 elif hasattr(self.__U,"size"): __U_shape = (self.__U.size,)
1126 elif hasattr(self.__U,"shape"):
1127 if isinstance(self.__U.shape, tuple): __U_shape = self.__U.shape
1128 else: __U_shape = self.__U.shape()
1129 else: raise TypeError("The control (U) has no attribute of shape: problem !")
1131 if self.__B is None: __B_shape = (0,0)
1132 elif hasattr(self.__B,"shape"):
1133 if isinstance(self.__B.shape, tuple): __B_shape = self.__B.shape
1134 else: __B_shape = self.__B.shape()
1135 else: raise TypeError("The a priori errors covariance matrix (B) has no attribute of shape: problem !")
1137 if self.__R is None: __R_shape = (0,0)
1138 elif hasattr(self.__R,"shape"):
1139 if isinstance(self.__R.shape, tuple): __R_shape = self.__R.shape
1140 else: __R_shape = self.__R.shape()
1141 else: raise TypeError("The observation errors covariance matrix (R) has no attribute of shape: problem !")
1143 if self.__Q is None: __Q_shape = (0,0)
1144 elif hasattr(self.__Q,"shape"):
1145 if isinstance(self.__Q.shape, tuple): __Q_shape = self.__Q.shape
1146 else: __Q_shape = self.__Q.shape()
1147 else: raise TypeError("The evolution errors covariance matrix (Q) has no attribute of shape: problem !")
1149 if len(self.__HO) == 0: __HO_shape = (0,0)
1150 elif isinstance(self.__HO, dict): __HO_shape = (0,0)
1151 elif hasattr(self.__HO["Direct"],"shape"):
1152 if isinstance(self.__HO["Direct"].shape, tuple): __HO_shape = self.__HO["Direct"].shape
1153 else: __HO_shape = self.__HO["Direct"].shape()
1154 else: raise TypeError("The observation operator (H) has no attribute of shape: problem !")
1156 if len(self.__EM) == 0: __EM_shape = (0,0)
1157 elif isinstance(self.__EM, dict): __EM_shape = (0,0)
1158 elif hasattr(self.__EM["Direct"],"shape"):
1159 if isinstance(self.__EM["Direct"].shape, tuple): __EM_shape = self.__EM["Direct"].shape
1160 else: __EM_shape = self.__EM["Direct"].shape()
1161 else: raise TypeError("The evolution model (EM) has no attribute of shape: problem !")
1163 if len(self.__CM) == 0: __CM_shape = (0,0)
1164 elif isinstance(self.__CM, dict): __CM_shape = (0,0)
1165 elif hasattr(self.__CM["Direct"],"shape"):
1166 if isinstance(self.__CM["Direct"].shape, tuple): __CM_shape = self.__CM["Direct"].shape
1167 else: __CM_shape = self.__CM["Direct"].shape()
1168 else: raise TypeError("The control model (CM) has no attribute of shape: problem !")
1170 # Vérification des conditions
1171 # ---------------------------
1172 if not( len(__Xb_shape) == 1 or min(__Xb_shape) == 1 ):
1173 raise ValueError("Shape characteristic of background (Xb) is incorrect: \"%s\"."%(__Xb_shape,))
1174 if not( len(__Y_shape) == 1 or min(__Y_shape) == 1 ):
1175 raise ValueError("Shape characteristic of observation (Y) is incorrect: \"%s\"."%(__Y_shape,))
1177 if not( min(__B_shape) == max(__B_shape) ):
1178 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) is incorrect: \"%s\"."%(__B_shape,))
1179 if not( min(__R_shape) == max(__R_shape) ):
1180 raise ValueError("Shape characteristic of observation errors covariance matrix (R) is incorrect: \"%s\"."%(__R_shape,))
1181 if not( min(__Q_shape) == max(__Q_shape) ):
1182 raise ValueError("Shape characteristic of evolution errors covariance matrix (Q) is incorrect: \"%s\"."%(__Q_shape,))
1183 if not( min(__EM_shape) == max(__EM_shape) ):
1184 raise ValueError("Shape characteristic of evolution operator (EM) is incorrect: \"%s\"."%(__EM_shape,))
1186 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[1] == max(__Xb_shape) ):
1187 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and state (X) \"%s\" are incompatible."%(__HO_shape,__Xb_shape))
1188 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[0] == max(__Y_shape) ):
1189 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation (Y) \"%s\" are incompatible."%(__HO_shape,__Y_shape))
1190 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__B) > 0 and not( __HO_shape[1] == __B_shape[0] ):
1191 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and a priori errors covariance matrix (B) \"%s\" are incompatible."%(__HO_shape,__B_shape))
1192 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__R) > 0 and not( __HO_shape[0] == __R_shape[1] ):
1193 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation errors covariance matrix (R) \"%s\" are incompatible."%(__HO_shape,__R_shape))
1195 if self.__B is not None and len(self.__B) > 0 and not( __B_shape[1] == max(__Xb_shape) ):
1196 if self.__algorithmName in ["EnsembleBlue",]:
1197 asPersistentVector = self.__Xb.reshape((-1,min(__B_shape)))
1198 self.__Xb = Persistence.OneVector("Background", basetype=numpy.matrix)
1199 for member in asPersistentVector:
1200 self.__Xb.store( numpy.matrix( numpy.ravel(member), numpy.float ).T )
1201 __Xb_shape = min(__B_shape)
1203 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) \"%s\" and background (Xb) \"%s\" are incompatible."%(__B_shape,__Xb_shape))
1205 if self.__R is not None and len(self.__R) > 0 and not( __R_shape[1] == max(__Y_shape) ):
1206 raise ValueError("Shape characteristic of observation errors covariance matrix (R) \"%s\" and observation (Y) \"%s\" are incompatible."%(__R_shape,__Y_shape))
1208 if self.__EM is not None and len(self.__EM) > 0 and not isinstance(self.__EM, dict) and not( __EM_shape[1] == max(__Xb_shape) ):
1209 raise ValueError("Shape characteristic of evolution model (EM) \"%s\" and state (X) \"%s\" are incompatible."%(__EM_shape,__Xb_shape))
1211 if self.__CM is not None and len(self.__CM) > 0 and not isinstance(self.__CM, dict) and not( __CM_shape[1] == max(__U_shape) ):
1212 raise ValueError("Shape characteristic of control model (CM) \"%s\" and control (U) \"%s\" are incompatible."%(__CM_shape,__U_shape))
1214 if ("Bounds" in self.__P) \
1215 and (isinstance(self.__P["Bounds"], list) or isinstance(self.__P["Bounds"], tuple)) \
1216 and (len(self.__P["Bounds"]) != max(__Xb_shape)):
1217 raise ValueError("The number \"%s\" of bound pairs for the state (X) components is different of the size \"%s\" of the state itself." \
1218 %(len(self.__P["Bounds"]),max(__Xb_shape)))
1222 # ==============================================================================
1223 class DataObserver(object):
1225 Classe générale d'interface de type observer
1228 name = "GenericObserver",
1240 self.__name = str(name)
1245 if onVariable is None:
1246 raise ValueError("setting an observer has to be done over a variable name or a list of variable names, not over None.")
1247 elif type(onVariable) in (tuple, list):
1248 self.__V = tuple(map( str, onVariable ))
1249 if withInfo is None:
1252 self.__I = (str(withInfo),)*len(self.__V)
1253 elif isinstance(onVariable, str):
1254 self.__V = (onVariable,)
1255 if withInfo is None:
1256 self.__I = (onVariable,)
1258 self.__I = (str(withInfo),)
1260 raise ValueError("setting an observer has to be done over a variable name or a list of variable names.")
1262 if asString is not None:
1263 __FunctionText = asString
1264 elif (asTemplate is not None) and (asTemplate in Templates.ObserverTemplates):
1265 __FunctionText = Templates.ObserverTemplates[asTemplate]
1266 elif asScript is not None:
1267 __FunctionText = ImportFromScript(asScript).getstring()
1270 __Function = ObserverF(__FunctionText)
1272 if asObsObject is not None:
1273 self.__O = asObsObject
1275 self.__O = __Function.getfunc()
1277 for k in range(len(self.__V)):
1280 if ename not in withAlgo:
1281 raise ValueError("An observer is asked to be set on a variable named %s which does not exist."%ename)
1283 withAlgo.setObserver(ename, self.__O, einfo, scheduledBy)
1286 "x.__repr__() <==> repr(x)"
1287 return repr(self.__V)+"\n"+repr(self.__O)
1290 "x.__str__() <==> str(x)"
1291 return str(self.__V)+"\n"+str(self.__O)
1293 # ==============================================================================
1294 class State(object):
1296 Classe générale d'interface de type état
1299 name = "GenericVector",
1301 asPersistentVector = None,
1304 toBeChecked = False,
1307 Permet de définir un vecteur :
1308 - asVector : entrée des données, comme un vecteur compatible avec le
1309 constructeur de numpy.matrix, ou "True" si entrée par script.
1310 - asPersistentVector : entrée des données, comme une série de vecteurs
1311 compatible avec le constructeur de numpy.matrix, ou comme un objet de
1312 type Persistence, ou "True" si entrée par script.
1313 - asScript : si un script valide est donné contenant une variable
1314 nommée "name", la variable est de type "asVector" (par défaut) ou
1315 "asPersistentVector" selon que l'une de ces variables est placée à
1318 self.__name = str(name)
1319 self.__check = bool(toBeChecked)
1323 self.__is_vector = False
1324 self.__is_series = False
1326 if asScript is not None:
1327 __Vector, __Series = None, None
1328 if asPersistentVector:
1329 __Series = ImportFromScript(asScript).getvalue( self.__name )
1331 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1333 __Vector, __Series = asVector, asPersistentVector
1335 if __Vector is not None:
1336 self.__is_vector = True
1337 self.__V = numpy.matrix( numpy.asmatrix(__Vector).A1, numpy.float ).T
1338 self.shape = self.__V.shape
1339 self.size = self.__V.size
1340 elif __Series is not None:
1341 self.__is_series = True
1342 if isinstance(__Series, (tuple, list, numpy.ndarray, numpy.matrix)):
1343 self.__V = Persistence.OneVector(self.__name, basetype=numpy.matrix)
1344 for member in __Series:
1345 self.__V.store( numpy.matrix( numpy.asmatrix(member).A1, numpy.float ).T )
1346 import sys ; sys.stdout.flush()
1349 if isinstance(self.__V.shape, (tuple, list)):
1350 self.shape = self.__V.shape
1352 self.shape = self.__V.shape()
1353 if len(self.shape) == 1:
1354 self.shape = (self.shape[0],1)
1355 self.size = self.shape[0] * self.shape[1]
1357 raise ValueError("The %s object is improperly defined, it requires at minima either a vector, a list/tuple of vectors or a persistent object. Please check your vector input."%self.__name)
1359 if scheduledBy is not None:
1360 self.__T = scheduledBy
1362 def getO(self, withScheduler=False):
1364 return self.__V, self.__T
1365 elif self.__T is None:
1371 "Vérification du type interne"
1372 return self.__is_vector
1375 "Vérification du type interne"
1376 return self.__is_series
1379 "x.__repr__() <==> repr(x)"
1380 return repr(self.__V)
1383 "x.__str__() <==> str(x)"
1384 return str(self.__V)
1386 # ==============================================================================
1387 class Covariance(object):
1389 Classe générale d'interface de type covariance
1392 name = "GenericCovariance",
1393 asCovariance = None,
1394 asEyeByScalar = None,
1395 asEyeByVector = None,
1398 toBeChecked = False,
1401 Permet de définir une covariance :
1402 - asCovariance : entrée des données, comme une matrice compatible avec
1403 le constructeur de numpy.matrix
1404 - asEyeByScalar : entrée des données comme un seul scalaire de variance,
1405 multiplicatif d'une matrice de corrélation identité, aucune matrice
1406 n'étant donc explicitement à donner
1407 - asEyeByVector : entrée des données comme un seul vecteur de variance,
1408 à mettre sur la diagonale d'une matrice de corrélation, aucune matrice
1409 n'étant donc explicitement à donner
1410 - asCovObject : entrée des données comme un objet python, qui a les
1411 methodes obligatoires "getT", "getI", "diag", "trace", "__add__",
1412 "__sub__", "__neg__", "__mul__", "__rmul__" et facultatives "shape",
1413 "size", "cholesky", "choleskyI", "asfullmatrix", "__repr__", "__str__"
1414 - toBeChecked : booléen indiquant si le caractère SDP de la matrice
1415 pleine doit être vérifié
1417 self.__name = str(name)
1418 self.__check = bool(toBeChecked)
1421 self.__is_scalar = False
1422 self.__is_vector = False
1423 self.__is_matrix = False
1424 self.__is_object = False
1426 if asScript is not None:
1427 __Matrix, __Scalar, __Vector, __Object = None, None, None, None
1429 __Scalar = ImportFromScript(asScript).getvalue( self.__name )
1431 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1433 __Object = ImportFromScript(asScript).getvalue( self.__name )
1435 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
1437 __Matrix, __Scalar, __Vector, __Object = asCovariance, asEyeByScalar, asEyeByVector, asCovObject
1439 if __Scalar is not None:
1440 if numpy.matrix(__Scalar).size != 1:
1441 raise ValueError(' The diagonal multiplier given to define a sparse matrix is not a unique scalar value.\n Its actual measured size is %i. Please check your scalar input.'%numpy.matrix(__Scalar).size)
1442 self.__is_scalar = True
1443 self.__C = numpy.abs( float(__Scalar) )
1446 elif __Vector is not None:
1447 self.__is_vector = True
1448 self.__C = numpy.abs( numpy.array( numpy.ravel( numpy.matrix(__Vector, float ) ) ) )
1449 self.shape = (self.__C.size,self.__C.size)
1450 self.size = self.__C.size**2
1451 elif __Matrix is not None:
1452 self.__is_matrix = True
1453 self.__C = numpy.matrix( __Matrix, float )
1454 self.shape = self.__C.shape
1455 self.size = self.__C.size
1456 elif __Object is not None:
1457 self.__is_object = True
1459 for at in ("getT","getI","diag","trace","__add__","__sub__","__neg__","__mul__","__rmul__"):
1460 if not hasattr(self.__C,at):
1461 raise ValueError("The matrix given for %s as an object has no attribute \"%s\". Please check your object input."%(self.__name,at))
1462 if hasattr(self.__C,"shape"):
1463 self.shape = self.__C.shape
1466 if hasattr(self.__C,"size"):
1467 self.size = self.__C.size
1472 # raise ValueError("The %s covariance matrix has to be specified either as a matrix, a vector for its diagonal or a scalar multiplying an identity matrix."%self.__name)
1476 def __validate(self):
1478 if self.__C is None:
1479 raise UnboundLocalError("%s covariance matrix value has not been set!"%(self.__name,))
1480 if self.ismatrix() and min(self.shape) != max(self.shape):
1481 raise ValueError("The given matrix for %s is not a square one, its shape is %s. Please check your matrix input."%(self.__name,self.shape))
1482 if self.isobject() and min(self.shape) != max(self.shape):
1483 raise ValueError("The matrix given for \"%s\" is not a square one, its shape is %s. Please check your object input."%(self.__name,self.shape))
1484 if self.isscalar() and self.__C <= 0:
1485 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your scalar input %s."%(self.__name,self.__C))
1486 if self.isvector() and (self.__C <= 0).any():
1487 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your vector input."%(self.__name,))
1488 if self.ismatrix() and (self.__check or logging.getLogger().level < logging.WARNING):
1490 L = numpy.linalg.cholesky( self.__C )
1492 raise ValueError("The %s covariance matrix is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1493 if self.isobject() and (self.__check or logging.getLogger().level < logging.WARNING):
1495 L = self.__C.cholesky()
1497 raise ValueError("The %s covariance object is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1500 "Vérification du type interne"
1501 return self.__is_scalar
1504 "Vérification du type interne"
1505 return self.__is_vector
1508 "Vérification du type interne"
1509 return self.__is_matrix
1512 "Vérification du type interne"
1513 return self.__is_object
1518 return Covariance(self.__name+"I", asCovariance = self.__C.I )
1519 elif self.isvector():
1520 return Covariance(self.__name+"I", asEyeByVector = 1. / self.__C )
1521 elif self.isscalar():
1522 return Covariance(self.__name+"I", asEyeByScalar = 1. / self.__C )
1523 elif self.isobject():
1524 return Covariance(self.__name+"I", asCovObject = self.__C.getI() )
1526 return None # Indispensable
1531 return Covariance(self.__name+"T", asCovariance = self.__C.T )
1532 elif self.isvector():
1533 return Covariance(self.__name+"T", asEyeByVector = self.__C )
1534 elif self.isscalar():
1535 return Covariance(self.__name+"T", asEyeByScalar = self.__C )
1536 elif self.isobject():
1537 return Covariance(self.__name+"T", asCovObject = self.__C.getT() )
1540 "Décomposition de Cholesky"
1542 return Covariance(self.__name+"C", asCovariance = numpy.linalg.cholesky(self.__C) )
1543 elif self.isvector():
1544 return Covariance(self.__name+"C", asEyeByVector = numpy.sqrt( self.__C ) )
1545 elif self.isscalar():
1546 return Covariance(self.__name+"C", asEyeByScalar = numpy.sqrt( self.__C ) )
1547 elif self.isobject() and hasattr(self.__C,"cholesky"):
1548 return Covariance(self.__name+"C", asCovObject = self.__C.cholesky() )
1550 def choleskyI(self):
1551 "Inversion de la décomposition de Cholesky"
1553 return Covariance(self.__name+"H", asCovariance = numpy.linalg.cholesky(self.__C).I )
1554 elif self.isvector():
1555 return Covariance(self.__name+"H", asEyeByVector = 1.0 / numpy.sqrt( self.__C ) )
1556 elif self.isscalar():
1557 return Covariance(self.__name+"H", asEyeByScalar = 1.0 / numpy.sqrt( self.__C ) )
1558 elif self.isobject() and hasattr(self.__C,"choleskyI"):
1559 return Covariance(self.__name+"H", asCovObject = self.__C.choleskyI() )
1561 def diag(self, msize=None):
1562 "Diagonale de la matrice"
1564 return numpy.diag(self.__C)
1565 elif self.isvector():
1567 elif self.isscalar():
1569 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1571 return self.__C * numpy.ones(int(msize))
1572 elif self.isobject():
1573 return self.__C.diag()
1575 def asfullmatrix(self, msize=None):
1579 elif self.isvector():
1580 return numpy.matrix( numpy.diag(self.__C), float )
1581 elif self.isscalar():
1583 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1585 return numpy.matrix( self.__C * numpy.eye(int(msize)), float )
1586 elif self.isobject() and hasattr(self.__C,"asfullmatrix"):
1587 return self.__C.asfullmatrix()
1589 def trace(self, msize=None):
1590 "Trace de la matrice"
1592 return numpy.trace(self.__C)
1593 elif self.isvector():
1594 return float(numpy.sum(self.__C))
1595 elif self.isscalar():
1597 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1599 return self.__C * int(msize)
1600 elif self.isobject():
1601 return self.__C.trace()
1607 "x.__repr__() <==> repr(x)"
1608 return repr(self.__C)
1611 "x.__str__() <==> str(x)"
1612 return str(self.__C)
1614 def __add__(self, other):
1615 "x.__add__(y) <==> x+y"
1616 if self.ismatrix() or self.isobject():
1617 return self.__C + numpy.asmatrix(other)
1618 elif self.isvector() or self.isscalar():
1619 _A = numpy.asarray(other)
1620 _A.reshape(_A.size)[::_A.shape[1]+1] += self.__C
1621 return numpy.asmatrix(_A)
1623 def __radd__(self, other):
1624 "x.__radd__(y) <==> y+x"
1625 raise NotImplementedError("%s covariance matrix __radd__ method not available for %s type!"%(self.__name,type(other)))
1627 def __sub__(self, other):
1628 "x.__sub__(y) <==> x-y"
1629 if self.ismatrix() or self.isobject():
1630 return self.__C - numpy.asmatrix(other)
1631 elif self.isvector() or self.isscalar():
1632 _A = numpy.asarray(other)
1633 _A.reshape(_A.size)[::_A.shape[1]+1] = self.__C - _A.reshape(_A.size)[::_A.shape[1]+1]
1634 return numpy.asmatrix(_A)
1636 def __rsub__(self, other):
1637 "x.__rsub__(y) <==> y-x"
1638 raise NotImplementedError("%s covariance matrix __rsub__ method not available for %s type!"%(self.__name,type(other)))
1641 "x.__neg__() <==> -x"
1644 def __mul__(self, other):
1645 "x.__mul__(y) <==> x*y"
1646 if self.ismatrix() and isinstance(other,numpy.matrix):
1647 return self.__C * other
1648 elif self.ismatrix() and isinstance(other, (list, numpy.ndarray, tuple)):
1649 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1650 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1651 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1652 return self.__C * numpy.asmatrix(other)
1654 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.asmatrix(other).shape,self.__name))
1655 elif self.isvector() and isinstance(other, (list, numpy.matrix, numpy.ndarray, tuple)):
1656 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1657 return numpy.asmatrix(self.__C * numpy.ravel(other)).T
1658 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1659 return numpy.asmatrix((self.__C * (numpy.asarray(other).transpose())).transpose())
1661 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1662 elif self.isscalar() and isinstance(other,numpy.matrix):
1663 return self.__C * other
1664 elif self.isscalar() and isinstance(other, (list, numpy.ndarray, tuple)):
1665 if len(numpy.asarray(other).shape) == 1 or numpy.asarray(other).shape[1] == 1 or numpy.asarray(other).shape[0] == 1:
1666 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1668 return self.__C * numpy.asmatrix(other)
1669 elif self.isobject():
1670 return self.__C.__mul__(other)
1672 raise NotImplementedError("%s covariance matrix __mul__ method not available for %s type!"%(self.__name,type(other)))
1674 def __rmul__(self, other):
1675 "x.__rmul__(y) <==> y*x"
1676 if self.ismatrix() and isinstance(other,numpy.matrix):
1677 return other * self.__C
1678 elif self.isvector() and isinstance(other,numpy.matrix):
1679 if numpy.ravel(other).size == self.shape[0]: # Vecteur
1680 return numpy.asmatrix(numpy.ravel(other) * self.__C)
1681 elif numpy.asmatrix(other).shape[1] == self.shape[0]: # Matrice
1682 return numpy.asmatrix(numpy.array(other) * self.__C)
1684 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1685 elif self.isscalar() and isinstance(other,numpy.matrix):
1686 return other * self.__C
1687 elif self.isobject():
1688 return self.__C.__rmul__(other)
1690 raise NotImplementedError("%s covariance matrix __rmul__ method not available for %s type!"%(self.__name,type(other)))
1693 "x.__len__() <==> len(x)"
1694 return self.shape[0]
1696 # ==============================================================================
1697 class ObserverF(object):
1699 Creation d'une fonction d'observateur a partir de son texte
1701 def __init__(self, corps=""):
1702 self.__corps = corps
1703 def func(self,var,info):
1704 "Fonction d'observation"
1707 "Restitution du pointeur de fonction dans l'objet"
1710 # ==============================================================================
1711 class CaseLogger(object):
1713 Conservation des commandes de creation d'un cas
1715 def __init__(self, __name="", __objname="case", __addViewers=None, __addLoaders=None):
1716 self.__name = str(__name)
1717 self.__objname = str(__objname)
1718 self.__logSerie = []
1719 self.__switchoff = False
1720 self.__viewers = self.__loaders = {
1726 if __addViewers is not None:
1727 self.__viewers.update(dict(__addViewers))
1728 if __addLoaders is not None:
1729 self.__loaders.update(dict(__addLoaders))
1731 def register(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1732 "Enregistrement d'une commande individuelle"
1733 if __command is not None and __keys is not None and __local is not None and not self.__switchoff:
1734 if "self" in __keys: __keys.remove("self")
1735 self.__logSerie.append( (str(__command), __keys, __local, __pre, __switchoff) )
1737 self.__switchoff = True
1739 self.__switchoff = False
1741 def dump(self, __filename=None, __format="TUI"):
1742 "Restitution normalisée des commandes (par les *GenericCaseViewer)"
1743 if __format in self.__viewers:
1744 __formater = self.__viewers[__format](self.__name, self.__objname, self.__logSerie)
1746 raise ValueError("Dumping as \"%s\" is not available"%__format)
1747 return __formater.dump(__filename)
1749 def load(self, __filename=None, __format="TUI"):
1750 "Chargement normalisé des commandes"
1751 if __format in self.__loaders:
1752 __formater = self.__loaders[__format]()
1754 raise ValueError("Loading as \"%s\" is not available"%__format)
1755 return __formater.load(__filename)
1757 # ==============================================================================
1758 class GenericCaseViewer(object):
1760 Gestion des commandes de creation d'une vue de cas
1762 def __init__(self, __name="", __objname="case", __content=None):
1763 "Initialisation et enregistrement de l'entete"
1764 self._name = str(__name)
1765 self._objname = str(__objname)
1766 self._lineSerie = []
1767 self._switchoff = False
1768 self._numobservers = 2
1769 self._content = __content
1770 self._missing = """raise ValueError("This case requires beforehand to import or define the variable named <%s>. When corrected, remove this command, correct and uncomment the following one.")\n# """
1772 "Transformation de commande individuelle en enregistrement"
1773 raise NotImplementedError()
1775 "Transformation d'enregistrement en commande individuelle"
1776 raise NotImplementedError()
1777 def _finalize(self):
1778 "Enregistrement du final"
1780 def _addLine(self, line=""):
1781 "Ajoute un enregistrement individuel"
1782 self._lineSerie.append(line)
1783 def dump(self, __filename=None):
1784 "Restitution normalisée des commandes"
1786 __text = "\n".join(self._lineSerie)
1788 if __filename is not None:
1789 __file = os.path.abspath(__filename)
1790 __fid = open(__file,"w")
1794 def load(self, __filename=None):
1795 "Chargement normalisé des commandes"
1796 if os.path.exists(__filename):
1797 self._content = open(__filename, 'r').read()
1798 __commands = self._extract(self._content)
1801 # --> Inutile d'accrocher l'interpretation au cas
1802 # def _interpret(self):
1803 # "Interprétation d'une commande"
1804 # raise NotImplementedError()
1805 # def execCase(self, __filename=None):
1806 # "Exécution normalisée des commandes"
1807 # if os.path.exists(__filename):
1808 # self._content = open(__filename, 'r').read()
1809 # __retcode = self._interpret(self._content)
1812 class _TUIViewer(GenericCaseViewer):
1814 Etablissement des commandes d'un cas TUI
1816 def __init__(self, __name="", __objname="case", __content=None):
1817 "Initialisation et enregistrement de l'entete"
1818 GenericCaseViewer.__init__(self, __name, __objname, __content)
1819 self._addLine("# -*- coding: utf-8 -*-")
1820 self._addLine("#\n# Python script for ADAO TUI\n#")
1821 self._addLine("from numpy import array, matrix")
1822 self._addLine("import adaoBuilder")
1823 self._addLine("%s = adaoBuilder.New('%s')"%(self._objname, self._name))
1824 if self._content is not None:
1825 for command in self._content:
1826 self._append(*command)
1827 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1828 "Transformation d'une commande individuelle en un enregistrement"
1829 if __command is not None and __keys is not None and __local is not None:
1831 if __pre is not None:
1832 __text += "%s = "%__pre
1833 __text += "%s.%s( "%(self._objname,str(__command))
1834 if "self" in __keys: __keys.remove("self")
1835 if __command not in ("set","get") and "Concept" in __keys: __keys.remove("Concept")
1838 if __v is None: continue
1839 if k == "Checked" and not __v: continue
1840 if k == "Stored" and not __v: continue
1841 if k == "AvoidRC" and __v: continue
1842 if k == "noDetails": continue
1843 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1844 if callable(__v): __text = self._missing%__v.__name__+__text
1845 if isinstance(__v,dict):
1846 for val in __v.values():
1847 if callable(val): __text = self._missing%val.__name__+__text
1848 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1849 __text += "%s=%s, "%(k,repr(__v))
1850 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
1853 self._addLine(__text)
1854 def _extract(self, __content=""):
1855 "Transformation un enregistrement en une commande individuelle"
1858 __content = __content.replace("\r\n","\n")
1859 for line in __content.split("\n"):
1860 if "adaoBuilder.New" in line and "=" in line:
1861 self._objname = line.split("=")[0].strip()
1866 if self._objname+".set" in line:
1867 __commands.append( line.replace(self._objname+".","",1) )
1870 # def _interpret(self, __content=""):
1871 # "Interprétation d'une commande"
1872 # __content = __content.replace("\r\n","\n")
1876 class _DCTViewer(GenericCaseViewer):
1878 Etablissement des commandes d'un cas DCT
1880 def __init__(self, __name="", __objname="case", __content=None):
1881 "Initialisation et enregistrement de l'entete"
1882 GenericCaseViewer.__init__(self, __name, __objname, __content)
1883 self._observerIndex = 0
1884 self._addLine("# -*- coding: utf-8 -*-")
1885 self._addLine("#\n# Python script for ADAO DCT\n#")
1886 self._addLine("from numpy import array, matrix")
1888 self._addLine("%s = {}"%__objname)
1889 if self._content is not None:
1890 for command in self._content:
1891 self._append(*command)
1892 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1893 "Transformation d'une commande individuelle en un enregistrement"
1894 if __command is not None and __keys is not None and __local is not None:
1896 if "execute" in __command: return
1897 if "self" in __keys: __keys.remove("self")
1898 if __command in ("set","get") and "Concept" in __keys:
1899 __key = __local["Concept"]
1900 __keys.remove("Concept")
1902 __key = __command.replace("set","").replace("get","")
1903 if "Observer" in __key and 'Variable' in __keys:
1904 self._observerIndex += 1
1905 __key += "_%i"%self._observerIndex
1906 __text += "%s['%s'] = {"%(self._objname,str(__key))
1909 if __v is None: continue
1910 if k == "Checked" and not __v: continue
1911 if k == "Stored" and not __v: continue
1912 if k == "AvoidRC" and __v: continue
1913 if k == "noDetails": continue
1914 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1915 if callable(__v): __text = self._missing%__v.__name__+__text
1916 if isinstance(__v,dict):
1917 for val in __v.values():
1918 if callable(val): __text = self._missing%val.__name__+__text
1919 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1920 __text += "'%s':%s, "%(k,repr(__v))
1921 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
1922 __text.rstrip(", ").rstrip()
1924 if __text[-2:] == "{}": return # Supprime les *Debug et les variables
1925 self._addLine(__text)
1926 def _extract(self, __content=""):
1927 "Transformation un enregistrement en une commande individuelle"
1930 __content = __content.replace("\r\n","\n")
1932 self._objdata = None
1933 __getlocals = locals()
1934 for k in __getlocals:
1936 if 'AlgorithmParameters' in __getlocals[k] and type(__getlocals[k]) is dict:
1938 self._objdata = __getlocals[k]
1941 if self._objdata is None:
1942 raise ValueError("Impossible to load given content as a ADAO DCT one (no 'AlgorithmParameters' key found).")
1943 for k in self._objdata:
1944 if 'Observer_' in k:
1945 __command = k.split('_',1)[0]
1948 __arguments = ["%s = %s"%(k,repr(v)) for k,v in self._objdata[k].items()]
1949 __commands.append( "set( Concept='%s', %s )"%(__command, ", ".join(__arguments)))
1950 __commands.sort() # Pour commencer par 'AlgorithmParameters'
1953 class _SCDViewer(GenericCaseViewer):
1955 Etablissement des commandes d'un cas SCD (Study Config Dictionary)
1957 def __init__(self, __name="", __objname="case", __content=None):
1958 "Initialisation et enregistrement de l'entete"
1959 GenericCaseViewer.__init__(self, __name, __objname, __content)
1960 self._addLine("# -*- coding: utf-8 -*-")
1961 self._addLine("#\n# Input for ADAO converter to YACS\n#")
1962 self._addLine("from numpy import array, matrix")
1964 self._addLine("study_config = {}")
1965 self._addLine("study_config['StudyType'] = 'ASSIMILATION_STUDY'")
1966 self._addLine("study_config['Name'] = '%s'"%self._name)
1967 self._addLine("observers = {}")
1968 self._addLine("study_config['Observers'] = observers")
1970 self._addLine("inputvariables_config = {}")
1971 self._addLine("inputvariables_config['Order'] =['adao_default']")
1972 self._addLine("inputvariables_config['adao_default'] = -1")
1973 self._addLine("study_config['InputVariables'] = inputvariables_config")
1975 self._addLine("outputvariables_config = {}")
1976 self._addLine("outputvariables_config['Order'] = ['adao_default']")
1977 self._addLine("outputvariables_config['adao_default'] = -1")
1978 self._addLine("study_config['OutputVariables'] = outputvariables_config")
1979 if __content is not None:
1980 for command in __content:
1981 self._append(*command)
1982 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1983 "Transformation d'une commande individuelle en un enregistrement"
1984 if __command == "set": __command = __local["Concept"]
1985 else: __command = __command.replace("set", "", 1)
1988 if __command in (None, 'execute', 'executePythonScheme', 'executeYACSScheme', 'get'):
1990 elif __command in ['Debug', 'setDebug']:
1991 __text = "#\nstudy_config['Debug'] = '1'"
1992 elif __command in ['NoDebug', 'setNoDebug']:
1993 __text = "#\nstudy_config['Debug'] = '0'"
1994 elif __command in ['Observer', 'setObserver']:
1995 __obs = __local['Variable']
1996 self._numobservers += 1
1998 __text += "observers['%s'] = {}\n"%__obs
1999 if __local['String'] is not None:
2000 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
2001 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, __local['String'])
2002 if __local['Script'] is not None:
2003 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'Script')
2004 __text += "observers['%s']['Script'] = \"%s\"\n"%(__obs, __local['Script'])
2005 if __local['Template'] is not None and __local['Template'] in Templates.ObserverTemplates:
2006 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
2007 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, Templates.ObserverTemplates[__local['Template']])
2008 if __local['Info'] is not None:
2009 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __local['Info'])
2011 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __obs)
2012 __text += "observers['%s']['number'] = %s"%(__obs, self._numobservers)
2013 elif __local is not None: # __keys is not None and
2014 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
2016 __text += "%s_config = {}\n"%__command
2017 if 'self' in __local: __local.pop('self')
2018 __to_be_removed = []
2019 for __k,__v in __local.items():
2020 if __v is None: __to_be_removed.append(__k)
2021 for __k in __to_be_removed:
2023 for __k,__v in __local.items():
2024 if __k == "Concept": continue
2025 if __k in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix','OneFunction','ThreeFunctions'] and 'Script' in __local: continue
2026 if __k == 'Algorithm':
2027 __text += "study_config['Algorithm'] = %s\n"%(repr(__v))
2028 elif __k == 'Script':
2031 __v = "'"+repr(__v)+"'"
2032 for __lk in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix']:
2033 if __lk in __local and __local[__lk]: __k = __lk
2034 if __command == "AlgorithmParameters": __k = "Dict"
2035 if 'OneFunction' in __local and __local['OneFunction']:
2036 __text += "%s_ScriptWithOneFunction = {}\n"%(__command,)
2037 __text += "%s_ScriptWithOneFunction['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
2038 __text += "%s_ScriptWithOneFunction['Script'] = {}\n"%(__command,)
2039 __text += "%s_ScriptWithOneFunction['Script']['Direct'] = %s\n"%(__command,__v)
2040 __text += "%s_ScriptWithOneFunction['Script']['Tangent'] = %s\n"%(__command,__v)
2041 __text += "%s_ScriptWithOneFunction['Script']['Adjoint'] = %s\n"%(__command,__v)
2042 __text += "%s_ScriptWithOneFunction['DifferentialIncrement'] = 1e-06\n"%(__command,)
2043 __text += "%s_ScriptWithOneFunction['CenteredFiniteDifference'] = 0\n"%(__command,)
2045 __f = 'ScriptWithOneFunction'
2046 __v = '%s_ScriptWithOneFunction'%(__command,)
2047 if 'ThreeFunctions' in __local and __local['ThreeFunctions']:
2048 __text += "%s_ScriptWithFunctions = {}\n"%(__command,)
2049 __text += "%s_ScriptWithFunctions['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
2050 __text += "%s_ScriptWithFunctions['Script'] = {}\n"%(__command,)
2051 __text += "%s_ScriptWithFunctions['Script']['Direct'] = %s\n"%(__command,__v)
2052 __text += "%s_ScriptWithFunctions['Script']['Tangent'] = %s\n"%(__command,__v)
2053 __text += "%s_ScriptWithFunctions['Script']['Adjoint'] = %s\n"%(__command,__v)
2055 __f = 'ScriptWithFunctions'
2056 __v = '%s_ScriptWithFunctions'%(__command,)
2057 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
2058 __text += "%s_config['From'] = '%s'\n"%(__command,__f)
2059 __text += "%s_config['Data'] = %s\n"%(__command,__v)
2060 __text = __text.replace("''","'")
2061 elif __k in ('Stored', 'Checked'):
2063 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
2064 elif __k in ('AvoidRC', 'noDetails'):
2066 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
2068 if __k == 'Parameters': __k = "Dict"
2069 if isinstance(__v,Persistence.Persistence): __v = __v.values()
2070 if callable(__v): __text = self._missing%__v.__name__+__text
2071 if isinstance(__v,dict):
2072 for val in __v.values():
2073 if callable(val): __text = self._missing%val.__name__+__text
2074 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
2075 __text += "%s_config['From'] = '%s'\n"%(__command,'String')
2076 __text += "%s_config['Data'] = \"\"\"%s\"\"\"\n"%(__command,repr(__v))
2077 __text += "study_config['%s'] = %s_config"%(__command,__command)
2078 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
2080 self._switchoff = True
2081 if __text is not None: self._addLine(__text)
2083 self._switchoff = False
2084 def _finalize(self):
2085 self.__loadVariablesByScript()
2087 self._addLine("Analysis_config = {}")
2088 self._addLine("Analysis_config['From'] = 'String'")
2089 self._addLine("Analysis_config['Data'] = \"\"\"import numpy")
2090 self._addLine("xa=numpy.ravel(ADD.get('Analysis')[-1])")
2091 self._addLine("print 'Analysis:',xa\"\"\"")
2092 self._addLine("study_config['UserPostAnalysis'] = Analysis_config")
2093 def __loadVariablesByScript(self):
2094 __ExecVariables = {} # Necessaire pour recuperer la variable
2095 exec("\n".join(self._lineSerie), __ExecVariables)
2096 study_config = __ExecVariables['study_config']
2097 # Pour Python 3 : self.__hasAlgorithm = bool(study_config['Algorithm'])
2098 if 'Algorithm' in study_config:
2099 self.__hasAlgorithm = True
2101 self.__hasAlgorithm = False
2102 if not self.__hasAlgorithm and \
2103 "AlgorithmParameters" in study_config and \
2104 isinstance(study_config['AlgorithmParameters'], dict) and \
2105 "From" in study_config['AlgorithmParameters'] and \
2106 "Data" in study_config['AlgorithmParameters'] and \
2107 study_config['AlgorithmParameters']['From'] == 'Script':
2108 __asScript = study_config['AlgorithmParameters']['Data']
2109 __var = ImportFromScript(__asScript).getvalue( "Algorithm" )
2110 __text = "#\nstudy_config['Algorithm'] = '%s'"%(__var,)
2111 self._addLine(__text)
2112 if self.__hasAlgorithm and \
2113 "AlgorithmParameters" in study_config and \
2114 isinstance(study_config['AlgorithmParameters'], dict) and \
2115 "From" not in study_config['AlgorithmParameters'] and \
2116 "Data" not in study_config['AlgorithmParameters']:
2118 __text += "AlgorithmParameters_config['Type'] = 'Dict'\n"
2119 __text += "AlgorithmParameters_config['From'] = 'String'\n"
2120 __text += "AlgorithmParameters_config['Data'] = '{}'\n"
2121 self._addLine(__text)
2124 class _XMLViewer(GenericCaseViewer):
2126 Etablissement des commandes de creation d'un cas XML
2128 def __init__(self, __name="", __objname="case", __content=None):
2129 "Initialisation et enregistrement de l'entete"
2130 GenericCaseViewer.__init__(self, __name, __objname, __content)
2131 raise NotImplementedError()
2133 class _YACSViewer(GenericCaseViewer):
2135 Etablissement des commandes de creation d'un cas YACS
2137 def __init__(self, __name="", __objname="case", __content=None):
2138 "Initialisation et enregistrement de l'entete"
2139 GenericCaseViewer.__init__(self, __name, __objname, __content)
2140 self.__internalSCD = _SCDViewer(__name, __objname, __content)
2141 self._append = self.__internalSCD._append
2142 def dump(self, __filename=None, __convertSCDinMemory=True):
2143 "Restitution normalisée des commandes"
2144 self.__internalSCD._finalize()
2146 if __filename is None:
2147 raise ValueError("A file name has to be given for YACS XML output.")
2149 if not PlatformInfo.has_salome or \
2150 not PlatformInfo.has_adao:
2151 raise ImportError("\n\n"+\
2152 "Unable to get SALOME or ADAO environnement variables.\n"+\
2153 "Please load the right environnement before trying to use it.\n")
2154 elif __convertSCDinMemory:
2155 __file = os.path.abspath(__filename)
2156 __SCDdump = self.__internalSCD.dump()
2157 if os.path.isfile(__file) or os.path.islink(__file):
2159 from daYacsSchemaCreator.run import create_schema_from_content
2160 create_schema_from_content(__SCDdump, __file)
2162 __file = os.path.abspath(__filename)
2163 __SCDfile = __file[:__file.rfind(".")] + '_SCD.py'
2164 __SCDdump = self.__internalSCD.dump(__SCDfile)
2165 if os.path.isfile(__file) or os.path.islink(__file):
2167 __converterExe = os.path.join(os.environ["ADAO_ROOT_DIR"], "bin/salome", "AdaoYacsSchemaCreator.py")
2168 __args = ["python", __converterExe, __SCDfile, __file]
2170 __p = subprocess.Popen(__args)
2171 (__stdoutdata, __stderrdata) = __p.communicate()
2173 os.remove(__SCDfile)
2175 if not os.path.exists(__file):
2176 # logging.debug("-- Error YacsSchemaCreator with convert SCD in memory=%s --"%__convertSCDinMemory)
2177 # logging.debug("-- Content of the file : --")
2178 # logging.debug(__SCDdump)
2179 __msg = "An error occured during the ADAO YACS Schema build for\n"
2180 __msg += "the target output file:\n"
2181 __msg += " %s\n"%__file
2182 __msg += "See errors details in your launching terminal log.\n"
2183 raise ValueError(__msg)
2185 __fid = open(__file,"r")
2186 __text = __fid.read()
2190 # ==============================================================================
2191 class ImportFromScript(object):
2193 Obtention d'une variable nommee depuis un fichier script importe
2195 def __init__(self, __filename=None):
2196 "Verifie l'existence et importe le script"
2197 self.__filename = __filename.rstrip(".py")
2198 if self.__filename is None:
2199 raise ValueError("The name of the file, containing the variable to be read, has to be specified.")
2200 if not os.path.isfile(str(self.__filename)+".py"):
2201 raise ValueError("The file containing the variable to be imported doesn't seem to exist. Please check the file. The given file name is:\n \"%s\""%self.__filename)
2202 self.__scriptfile = __import__(self.__filename, globals(), locals(), [])
2203 self.__scriptstring = open(self.__filename+".py",'r').read()
2204 def getvalue(self, __varname=None, __synonym=None ):
2205 "Renvoie la variable demandee"
2206 if __varname is None:
2207 raise ValueError("The name of the variable to be read has to be specified. Please check the content of the file and the syntax.")
2208 if not hasattr(self.__scriptfile, __varname):
2209 if __synonym is None:
2210 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__filename)+".py",__varname))
2211 elif not hasattr(self.__scriptfile, __synonym):
2212 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__filename)+".py",__synonym))
2214 return getattr(self.__scriptfile, __synonym)
2216 return getattr(self.__scriptfile, __varname)
2217 def getstring(self):
2218 "Renvoie le script complet"
2219 return self.__scriptstring
2221 # ==============================================================================
2222 def CostFunction3D(_x,
2223 _Hm = None, # Pour simuler Hm(x) : HO["Direct"].appliedTo
2224 _HmX = None, # Simulation déjà faite de Hm(x)
2225 _arg = None, # Arguments supplementaires pour Hm, sous la forme d'un tuple
2230 _SIV = False, # A résorber pour la 8.0
2231 _SSC = [], # self._parameters["StoreSupplementaryCalculations"]
2232 _nPS = 0, # nbPreviousSteps
2233 _QM = "DA", # QualityMeasure
2234 _SSV = {}, # Entrée et/ou sortie : self.StoredVariables
2235 _fRt = False, # Restitue ou pas la sortie étendue
2236 _sSc = True, # Stocke ou pas les SSC
2239 Fonction-coût générale utile pour les algorithmes statiques/3D : 3DVAR, BLUE
2240 et dérivés, Kalman et dérivés, LeastSquares, SamplingTest, PSO, SA, Tabu,
2241 DFO, QuantileRegression
2247 for k in ["CostFunctionJ",
2253 "SimulatedObservationAtCurrentOptimum",
2254 "SimulatedObservationAtCurrentState",
2258 if hasattr(_SSV[k],"store"):
2259 _SSV[k].append = _SSV[k].store # Pour utiliser "append" au lieu de "store"
2261 _X = numpy.asmatrix(numpy.ravel( _x )).T
2262 if _SIV or "CurrentState" in _SSC or "CurrentOptimum" in _SSC:
2263 _SSV["CurrentState"].append( _X )
2265 if _HmX is not None:
2269 raise ValueError("COSTFUNCTION3D Operator has to be defined.")
2273 _HX = _Hm( _X, *_arg )
2274 _HX = numpy.asmatrix(numpy.ravel( _HX )).T
2276 if "SimulatedObservationAtCurrentState" in _SSC or \
2277 "SimulatedObservationAtCurrentOptimum" in _SSC:
2278 _SSV["SimulatedObservationAtCurrentState"].append( _HX )
2280 if numpy.any(numpy.isnan(_HX)):
2281 Jb, Jo, J = numpy.nan, numpy.nan, numpy.nan
2283 _Y = numpy.asmatrix(numpy.ravel( _Y )).T
2284 if _QM in ["AugmentedWeightedLeastSquares", "AWLS", "AugmentedPonderatedLeastSquares", "APLS", "DA"]:
2285 if _BI is None or _RI is None:
2286 raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
2287 _Xb = numpy.asmatrix(numpy.ravel( _Xb )).T
2288 Jb = 0.5 * (_X - _Xb).T * _BI * (_X - _Xb)
2289 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2290 elif _QM in ["WeightedLeastSquares", "WLS", "PonderatedLeastSquares", "PLS"]:
2292 raise ValueError("Observation error covariance matrix has to be properly defined!")
2294 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2295 elif _QM in ["LeastSquares", "LS", "L2"]:
2297 Jo = 0.5 * (_Y - _HX).T * (_Y - _HX)
2298 elif _QM in ["AbsoluteValue", "L1"]:
2300 Jo = numpy.sum( numpy.abs(_Y - _HX) )
2301 elif _QM in ["MaximumError", "ME"]:
2303 Jo = numpy.max( numpy.abs(_Y - _HX) )
2304 elif _QM in ["QR", "Null"]:
2308 raise ValueError("Unknown asked quality measure!")
2310 J = float( Jb ) + float( Jo )
2313 _SSV["CostFunctionJb"].append( Jb )
2314 _SSV["CostFunctionJo"].append( Jo )
2315 _SSV["CostFunctionJ" ].append( J )
2317 if "IndexOfOptimum" in _SSC or \
2318 "CurrentOptimum" in _SSC or \
2319 "SimulatedObservationAtCurrentOptimum" in _SSC:
2320 IndexMin = numpy.argmin( _SSV["CostFunctionJ"][_nPS:] ) + _nPS
2321 if "IndexOfOptimum" in _SSC:
2322 _SSV["IndexOfOptimum"].append( IndexMin )
2323 if "CurrentOptimum" in _SSC:
2324 _SSV["CurrentOptimum"].append( _SSV["CurrentState"][IndexMin] )
2325 if "SimulatedObservationAtCurrentOptimum" in _SSC:
2326 _SSV["SimulatedObservationAtCurrentOptimum"].append( _SSV["SimulatedObservationAtCurrentState"][IndexMin] )
2331 if _QM in ["QR"]: # Pour le QuantileRegression
2336 # ==============================================================================
2337 if __name__ == "__main__":
2338 print('\n AUTODIAGNOSTIC \n')