1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2018 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 Définit les outils généraux élémentaires.
26 Ce module est destiné à être appelée par AssimilationStudy.
28 __author__ = "Jean-Philippe ARGAUD"
36 from daCore import Persistence
37 from daCore import PlatformInfo
38 from daCore import Templates
40 # ==============================================================================
41 class CacheManager(object):
43 Classe générale de gestion d'un cache de calculs
46 toleranceInRedundancy = 1.e-18,
47 lenghtOfRedundancy = -1,
50 Les caractéristiques de tolérance peuvent être modifées à la création.
52 self.__tolerBP = float(toleranceInRedundancy)
53 self.__lenghtOR = int(lenghtOfRedundancy)
54 self.__initlnOR = self.__lenghtOR
59 self.__listOPCV = [] # Operator Previous Calculated Points, Results, Point Norms
60 # logging.debug("CM Tolerance de determination des doublons : %.2e", self.__tolerBP)
62 def wasCalculatedIn(self, xValue ): #, info="" ):
63 "Vérifie l'existence d'un calcul correspondant à la valeur"
66 for i in range(min(len(self.__listOPCV),self.__lenghtOR)-1,-1,-1):
67 if xValue.size != self.__listOPCV[i][0].size:
68 # logging.debug("CM Différence de la taille %s de X et de celle %s du point %i déjà calculé", xValue.shape,i,self.__listOPCP[i].shape)
70 if numpy.linalg.norm(numpy.ravel(xValue) - self.__listOPCV[i][0]) < self.__tolerBP * self.__listOPCV[i][2]:
72 __HxV = self.__listOPCV[i][1]
73 # logging.debug("CM Cas%s déja calculé, portant le numéro %i", info, i)
77 def storeValueInX(self, xValue, HxValue ):
78 "Stocke un calcul correspondant à la valeur"
79 if self.__lenghtOR < 0:
80 self.__lenghtOR = 2 * xValue.size + 2
81 self.__initlnOR = self.__lenghtOR
82 while len(self.__listOPCV) > self.__lenghtOR:
83 # logging.debug("CM Réduction de la liste des cas à %i éléments par suppression du premier", self.__lenghtOR)
84 self.__listOPCV.pop(0)
85 self.__listOPCV.append( (
86 copy.copy(numpy.ravel(xValue)),
88 numpy.linalg.norm(xValue),
93 self.__initlnOR = self.__lenghtOR
98 self.__lenghtOR = self.__initlnOR
100 # ==============================================================================
101 class Operator(object):
103 Classe générale d'interface de type opérateur simple
110 def __init__(self, fromMethod=None, fromMatrix=None, avoidingRedundancy = True):
112 On construit un objet de ce type en fournissant à l'aide de l'un des
113 deux mots-clé, soit une fonction python, soit une matrice.
115 - fromMethod : argument de type fonction Python
116 - fromMatrix : argument adapté au constructeur numpy.matrix
117 - avoidingRedundancy : évite ou pas les calculs redondants
119 self.__NbCallsAsMatrix, self.__NbCallsAsMethod, self.__NbCallsOfCached = 0, 0, 0
120 self.__AvoidRC = bool( avoidingRedundancy )
121 if fromMethod is not None:
122 self.__Method = fromMethod # logtimer(fromMethod)
124 self.__Type = "Method"
125 elif fromMatrix is not None:
127 self.__Matrix = numpy.matrix( fromMatrix, numpy.float )
128 self.__Type = "Matrix"
134 def disableAvoidingRedundancy(self):
136 Operator.CM.disable()
138 def enableAvoidingRedundancy(self):
143 Operator.CM.disable()
149 def appliedTo(self, xValue, HValue = None):
151 Permet de restituer le résultat de l'application de l'opérateur à un
152 argument xValue. Cette méthode se contente d'appliquer, son argument
153 devant a priori être du bon type.
155 - xValue : argument adapté pour appliquer l'opérateur
157 if HValue is not None:
158 HxValue = numpy.asmatrix( numpy.ravel( HValue ) ).T
160 Operator.CM.storeValueInX(xValue,HxValue)
163 __alreadyCalculated, __HxV = Operator.CM.wasCalculatedIn(xValue)
165 __alreadyCalculated = False
167 if __alreadyCalculated:
168 self.__addOneCacheCall()
171 if self.__Matrix is not None:
172 self.__addOneMatrixCall()
173 HxValue = self.__Matrix * xValue
175 self.__addOneMethodCall()
176 HxValue = self.__Method( xValue )
178 Operator.CM.storeValueInX(xValue,HxValue)
182 def appliedControledFormTo(self, paire ):
184 Permet de restituer le résultat de l'application de l'opérateur à une
185 paire (xValue, uValue). Cette méthode se contente d'appliquer, son
186 argument devant a priori être du bon type. Si la uValue est None,
187 on suppose que l'opérateur ne s'applique qu'à xValue.
189 - xValue : argument X adapté pour appliquer l'opérateur
190 - uValue : argument U adapté pour appliquer l'opérateur
192 assert len(paire) == 2, "Incorrect number of arguments"
193 xValue, uValue = paire
194 if self.__Matrix is not None:
195 self.__addOneMatrixCall()
196 return self.__Matrix * xValue
197 elif uValue is not None:
198 self.__addOneMethodCall()
199 return self.__Method( (xValue, uValue) )
201 self.__addOneMethodCall()
202 return self.__Method( xValue )
204 def appliedInXTo(self, paire ):
206 Permet de restituer le résultat de l'application de l'opérateur à un
207 argument xValue, sachant que l'opérateur est valable en xNominal.
208 Cette méthode se contente d'appliquer, son argument devant a priori
209 être du bon type. Si l'opérateur est linéaire car c'est une matrice,
210 alors il est valable en tout point nominal et il n'est pas nécessaire
212 Arguments : une liste contenant
213 - xNominal : argument permettant de donner le point où l'opérateur
214 est construit pour etre ensuite appliqué
215 - xValue : argument adapté pour appliquer l'opérateur
217 assert len(paire) == 2, "Incorrect number of arguments"
218 xNominal, xValue = paire
219 if self.__Matrix is not None:
220 self.__addOneMatrixCall()
221 return self.__Matrix * xValue
223 self.__addOneMethodCall()
224 return self.__Method( (xNominal, xValue) )
226 def asMatrix(self, ValueForMethodForm = "UnknownVoidValue"):
228 Permet de renvoyer l'opérateur sous la forme d'une matrice
230 if self.__Matrix is not None:
231 self.__addOneMatrixCall()
233 elif ValueForMethodForm is not "UnknownVoidValue": # Ne pas utiliser "None"
234 self.__addOneMethodCall()
235 return numpy.matrix( self.__Method( (ValueForMethodForm, None) ) )
237 raise ValueError("Matrix form of the operator defined as a function/method requires to give an operating point.")
241 Renvoie la taille sous forme numpy si l'opérateur est disponible sous
242 la forme d'une matrice
244 if self.__Matrix is not None:
245 return self.__Matrix.shape
247 raise ValueError("Matrix form of the operator is not available, nor the shape")
249 def nbcalls(self, which=None):
251 Renvoie les nombres d'évaluations de l'opérateur
254 self.__NbCallsAsMatrix+self.__NbCallsAsMethod,
255 self.__NbCallsAsMatrix,
256 self.__NbCallsAsMethod,
257 self.__NbCallsOfCached,
258 Operator.NbCallsAsMatrix+Operator.NbCallsAsMethod,
259 Operator.NbCallsAsMatrix,
260 Operator.NbCallsAsMethod,
261 Operator.NbCallsOfCached,
263 if which is None: return __nbcalls
264 else: return __nbcalls[which]
266 def __addOneMatrixCall(self):
267 "Comptabilise un appel"
268 self.__NbCallsAsMatrix += 1 # Decompte local
269 Operator.NbCallsAsMatrix += 1 # Decompte global
271 def __addOneMethodCall(self):
272 "Comptabilise un appel"
273 self.__NbCallsAsMethod += 1 # Decompte local
274 Operator.NbCallsAsMethod += 1 # Decompte global
276 def __addOneCacheCall(self):
277 "Comptabilise un appel"
278 self.__NbCallsOfCached += 1 # Decompte local
279 Operator.NbCallsOfCached += 1 # Decompte global
281 # ==============================================================================
282 class FullOperator(object):
284 Classe générale d'interface de type opérateur complet
285 (Direct, Linéaire Tangent, Adjoint)
288 name = "GenericFullOperator",
290 asOneFunction = None, # Fonction
291 asThreeFunctions = None, # Dictionnaire de fonctions
293 asDict = None, # Parameters
300 self.__name = str(name)
301 self.__check = bool(toBeChecked)
306 if (asDict is not None) and isinstance(asDict, dict):
307 __Parameters.update( asDict )
308 if "DifferentialIncrement" in asDict:
309 __Parameters["withIncrement"] = asDict["DifferentialIncrement"]
310 if "CenteredFiniteDifference" in asDict:
311 __Parameters["withCenteredDF"] = asDict["CenteredFiniteDifference"]
312 if "EnableMultiProcessing" in asDict:
313 __Parameters["withmpEnabled"] = asDict["EnableMultiProcessing"]
314 if "NumberOfProcesses" in asDict:
315 __Parameters["withmpWorkers"] = asDict["NumberOfProcesses"]
317 if asScript is not None:
318 __Matrix, __Function = None, None
320 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
322 __Function = { "Direct":ImportFromScript(asScript).getvalue( "DirectOperator" ) }
323 __Function.update({"useApproximatedDerivatives":True})
324 __Function.update(__Parameters)
325 elif asThreeFunctions:
327 "Direct" :ImportFromScript(asScript).getvalue( "DirectOperator" ),
328 "Tangent":ImportFromScript(asScript).getvalue( "TangentOperator" ),
329 "Adjoint":ImportFromScript(asScript).getvalue( "AdjointOperator" ),
331 __Function.update(__Parameters)
334 if asOneFunction is not None:
335 if isinstance(asOneFunction, dict) and "Direct" in asOneFunction:
336 if asOneFunction["Direct"] is not None:
337 __Function = asOneFunction
339 raise ValueError("The function has to be given in a dictionnary which have 1 key (\"Direct\")")
341 __Function = { "Direct":asOneFunction }
342 __Function.update({"useApproximatedDerivatives":True})
343 __Function.update(__Parameters)
344 elif asThreeFunctions is not None:
345 if isinstance(asThreeFunctions, dict) and \
346 ("Tangent" in asThreeFunctions) and (asThreeFunctions["Tangent"] is not None) and \
347 ("Adjoint" in asThreeFunctions) and (asThreeFunctions["Adjoint"] is not None) and \
348 (("useApproximatedDerivatives" not in asThreeFunctions) or not bool(asThreeFunctions["useApproximatedDerivatives"])):
349 __Function = asThreeFunctions
350 elif isinstance(asThreeFunctions, dict) and \
351 ("Direct" in asThreeFunctions) and (asThreeFunctions["Direct"] is not None):
352 __Function = asThreeFunctions
353 __Function.update({"useApproximatedDerivatives":True})
355 raise ValueError("The functions has to be given in a dictionnary which have either 1 key (\"Direct\") or 3 keys (\"Direct\" (optionnal), \"Tangent\" and \"Adjoint\")")
356 if "Direct" not in asThreeFunctions:
357 __Function["Direct"] = asThreeFunctions["Tangent"]
358 __Function.update(__Parameters)
362 # if sys.version_info[0] < 3 and isinstance(__Function, dict):
363 # for k in ("Direct", "Tangent", "Adjoint"):
364 # if k in __Function and hasattr(__Function[k],"__class__"):
365 # if type(__Function[k]) is type(self.__init__):
366 # raise TypeError("can't use a class method (%s) as a function for the \"%s\" operator. Use a real function instead."%(type(__Function[k]),k))
368 if appliedInX is not None and isinstance(appliedInX, dict):
369 __appliedInX = appliedInX
370 elif appliedInX is not None:
371 __appliedInX = {"HXb":appliedInX}
375 if scheduledBy is not None:
376 self.__T = scheduledBy
378 if isinstance(__Function, dict) and \
379 ("useApproximatedDerivatives" in __Function) and bool(__Function["useApproximatedDerivatives"]) and \
380 ("Direct" in __Function) and (__Function["Direct"] is not None):
381 if "withCenteredDF" not in __Function: __Function["withCenteredDF"] = False
382 if "withIncrement" not in __Function: __Function["withIncrement"] = 0.01
383 if "withdX" not in __Function: __Function["withdX"] = None
384 if "withAvoidingRedundancy" not in __Function: __Function["withAvoidingRedundancy"] = True
385 if "withToleranceInRedundancy" not in __Function: __Function["withToleranceInRedundancy"] = 1.e-18
386 if "withLenghtOfRedundancy" not in __Function: __Function["withLenghtOfRedundancy"] = -1
387 if "withmpEnabled" not in __Function: __Function["withmpEnabled"] = False
388 if "withmpWorkers" not in __Function: __Function["withmpWorkers"] = None
389 from daNumerics.ApproximatedDerivatives import FDApproximation
390 FDA = FDApproximation(
391 Function = __Function["Direct"],
392 centeredDF = __Function["withCenteredDF"],
393 increment = __Function["withIncrement"],
394 dX = __Function["withdX"],
395 avoidingRedundancy = __Function["withAvoidingRedundancy"],
396 toleranceInRedundancy = __Function["withToleranceInRedundancy"],
397 lenghtOfRedundancy = __Function["withLenghtOfRedundancy"],
398 mpEnabled = __Function["withmpEnabled"],
399 mpWorkers = __Function["withmpWorkers"],
401 self.__FO["Direct"] = Operator( fromMethod = FDA.DirectOperator, avoidingRedundancy = avoidRC )
402 self.__FO["Tangent"] = Operator( fromMethod = FDA.TangentOperator, avoidingRedundancy = avoidRC )
403 self.__FO["Adjoint"] = Operator( fromMethod = FDA.AdjointOperator, avoidingRedundancy = avoidRC )
404 elif isinstance(__Function, dict) and \
405 ("Direct" in __Function) and ("Tangent" in __Function) and ("Adjoint" in __Function) and \
406 (__Function["Direct"] is not None) and (__Function["Tangent"] is not None) and (__Function["Adjoint"] is not None):
407 self.__FO["Direct"] = Operator( fromMethod = __Function["Direct"], avoidingRedundancy = avoidRC )
408 self.__FO["Tangent"] = Operator( fromMethod = __Function["Tangent"], avoidingRedundancy = avoidRC )
409 self.__FO["Adjoint"] = Operator( fromMethod = __Function["Adjoint"], avoidingRedundancy = avoidRC )
410 elif asMatrix is not None:
411 __matrice = numpy.matrix( __Matrix, numpy.float )
412 self.__FO["Direct"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
413 self.__FO["Tangent"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
414 self.__FO["Adjoint"] = Operator( fromMatrix = __matrice.T, avoidingRedundancy = avoidRC )
417 raise ValueError("Improperly defined observation operator, it requires at minima either a matrix, a Direct for approximate derivatives or a Tangent/Adjoint pair.")
419 if __appliedInX is not None:
420 self.__FO["AppliedInX"] = {}
421 if not isinstance(__appliedInX, dict):
422 raise ValueError("Error: observation operator defined by \"AppliedInX\" need a dictionary as argument.")
423 for key in list(__appliedInX.keys()):
424 if type( __appliedInX[key] ) is type( numpy.matrix([]) ):
425 # Pour le cas où l'on a une vraie matrice
426 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].A1, numpy.float ).T
427 elif type( __appliedInX[key] ) is type( numpy.array([]) ) and len(__appliedInX[key].shape) > 1:
428 # Pour le cas où l'on a un vecteur représenté en array avec 2 dimensions
429 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].reshape(len(__appliedInX[key]),), numpy.float ).T
431 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key], numpy.float ).T
433 self.__FO["AppliedInX"] = None
439 "x.__repr__() <==> repr(x)"
440 return repr(self.__V)
443 "x.__str__() <==> str(x)"
446 # ==============================================================================
447 class Algorithm(object):
449 Classe générale d'interface de type algorithme
451 Elle donne un cadre pour l'écriture d'une classe élémentaire d'algorithme
452 d'assimilation, en fournissant un container (dictionnaire) de variables
453 persistantes initialisées, et des méthodes d'accès à ces variables stockées.
455 Une classe élémentaire d'algorithme doit implémenter la méthode "run".
457 def __init__(self, name):
459 L'initialisation présente permet de fabriquer des variables de stockage
460 disponibles de manière générique dans les algorithmes élémentaires. Ces
461 variables de stockage sont ensuite conservées dans un dictionnaire
462 interne à l'objet, mais auquel on accède par la méthode "get".
464 Les variables prévues sont :
465 - CostFunctionJ : fonction-cout globale, somme des deux parties suivantes
466 - CostFunctionJb : partie ébauche ou background de la fonction-cout
467 - CostFunctionJo : partie observations de la fonction-cout
468 - GradientOfCostFunctionJ : gradient de la fonction-cout globale
469 - GradientOfCostFunctionJb : gradient de la partie ébauche de la fonction-cout
470 - GradientOfCostFunctionJo : gradient de la partie observations de la fonction-cout
471 - CurrentState : état courant lors d'itérations
472 - Analysis : l'analyse Xa
473 - SimulatedObservationAtBackground : l'état observé H(Xb) à l'ébauche
474 - SimulatedObservationAtCurrentState : l'état observé H(X) à l'état courant
475 - SimulatedObservationAtOptimum : l'état observé H(Xa) à l'optimum
476 - Innovation : l'innovation : d = Y - H(X)
477 - InnovationAtCurrentState : l'innovation à l'état courant : dn = Y - H(Xn)
478 - SigmaObs2 : indicateur de correction optimale des erreurs d'observation
479 - SigmaBck2 : indicateur de correction optimale des erreurs d'ébauche
480 - MahalanobisConsistency : indicateur de consistance des covariances
481 - OMA : Observation moins Analysis : Y - Xa
482 - OMB : Observation moins Background : Y - Xb
483 - AMB : Analysis moins Background : Xa - Xb
484 - APosterioriCovariance : matrice A
485 - APosterioriVariances : variances de la matrice A
486 - APosterioriStandardDeviations : écart-types de la matrice A
487 - APosterioriCorrelations : correlations de la matrice A
488 - Residu : dans le cas des algorithmes de vérification
489 On peut rajouter des variables à stocker dans l'initialisation de
490 l'algorithme élémentaire qui va hériter de cette classe
492 logging.debug("%s Initialisation", str(name))
493 self._m = PlatformInfo.SystemUsage()
495 self._name = str( name )
496 self._parameters = {"StoreSupplementaryCalculations":[]}
497 self.__required_parameters = {}
498 self.__required_inputs = {"RequiredInputValues":{"mandatory":(), "optional":()}}
500 self.StoredVariables = {}
501 self.StoredVariables["CostFunctionJ"] = Persistence.OneScalar(name = "CostFunctionJ")
502 self.StoredVariables["CostFunctionJb"] = Persistence.OneScalar(name = "CostFunctionJb")
503 self.StoredVariables["CostFunctionJo"] = Persistence.OneScalar(name = "CostFunctionJo")
504 self.StoredVariables["CostFunctionJAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJAtCurrentOptimum")
505 self.StoredVariables["CostFunctionJbAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJbAtCurrentOptimum")
506 self.StoredVariables["CostFunctionJoAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJoAtCurrentOptimum")
507 self.StoredVariables["GradientOfCostFunctionJ"] = Persistence.OneVector(name = "GradientOfCostFunctionJ")
508 self.StoredVariables["GradientOfCostFunctionJb"] = Persistence.OneVector(name = "GradientOfCostFunctionJb")
509 self.StoredVariables["GradientOfCostFunctionJo"] = Persistence.OneVector(name = "GradientOfCostFunctionJo")
510 self.StoredVariables["CurrentState"] = Persistence.OneVector(name = "CurrentState")
511 self.StoredVariables["Analysis"] = Persistence.OneVector(name = "Analysis")
512 self.StoredVariables["IndexOfOptimum"] = Persistence.OneIndex(name = "IndexOfOptimum")
513 self.StoredVariables["CurrentOptimum"] = Persistence.OneVector(name = "CurrentOptimum")
514 self.StoredVariables["SimulatedObservationAtBackground"] = Persistence.OneVector(name = "SimulatedObservationAtBackground")
515 self.StoredVariables["SimulatedObservationAtCurrentState"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentState")
516 self.StoredVariables["SimulatedObservationAtOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtOptimum")
517 self.StoredVariables["SimulatedObservationAtCurrentOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentOptimum")
518 self.StoredVariables["Innovation"] = Persistence.OneVector(name = "Innovation")
519 self.StoredVariables["InnovationAtCurrentState"] = Persistence.OneVector(name = "InnovationAtCurrentState")
520 self.StoredVariables["SigmaObs2"] = Persistence.OneScalar(name = "SigmaObs2")
521 self.StoredVariables["SigmaBck2"] = Persistence.OneScalar(name = "SigmaBck2")
522 self.StoredVariables["MahalanobisConsistency"] = Persistence.OneScalar(name = "MahalanobisConsistency")
523 self.StoredVariables["OMA"] = Persistence.OneVector(name = "OMA")
524 self.StoredVariables["OMB"] = Persistence.OneVector(name = "OMB")
525 self.StoredVariables["BMA"] = Persistence.OneVector(name = "BMA")
526 self.StoredVariables["APosterioriCovariance"] = Persistence.OneMatrix(name = "APosterioriCovariance")
527 self.StoredVariables["APosterioriVariances"] = Persistence.OneVector(name = "APosterioriVariances")
528 self.StoredVariables["APosterioriStandardDeviations"] = Persistence.OneVector(name = "APosterioriStandardDeviations")
529 self.StoredVariables["APosterioriCorrelations"] = Persistence.OneMatrix(name = "APosterioriCorrelations")
530 self.StoredVariables["SimulationQuantiles"] = Persistence.OneMatrix(name = "SimulationQuantiles")
531 self.StoredVariables["Residu"] = Persistence.OneScalar(name = "Residu")
533 def _pre_run(self, Parameters, Xb=None, Y=None, R=None, B=None, Q=None ):
535 logging.debug("%s Lancement", self._name)
536 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
538 # Mise a jour de self._parameters avec Parameters
539 self.__setParameters(Parameters)
541 # Corrections et complements
542 def __test_vvalue( argument, variable, argname):
544 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
545 raise ValueError("%s %s vector %s has to be properly defined!"%(self._name,argname,variable))
546 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
547 logging.debug("%s %s vector %s is not set, but is optional."%(self._name,argname,variable))
549 logging.debug("%s %s vector %s is not set, but is not required."%(self._name,argname,variable))
551 logging.debug("%s %s vector %s is set, and its size is %i."%(self._name,argname,variable,numpy.array(argument).size))
552 __test_vvalue( Xb, "Xb", "Background or initial state" )
553 __test_vvalue( Y, "Y", "Observation" )
554 def __test_cvalue( argument, variable, argname):
556 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
557 raise ValueError("%s %s error covariance matrix %s has to be properly defined!"%(self._name,argname,variable))
558 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
559 logging.debug("%s %s error covariance matrix %s is not set, but is optional."%(self._name,argname,variable))
561 logging.debug("%s %s error covariance matrix %s is not set, but is not required."%(self._name,argname,variable))
563 logging.debug("%s %s error covariance matrix %s is set."%(self._name,argname,variable))
564 __test_cvalue( R, "R", "Observation" )
565 __test_cvalue( B, "B", "Background" )
566 __test_cvalue( Q, "Q", "Evolution" )
568 if ("Bounds" in self._parameters) and isinstance(self._parameters["Bounds"], (list, tuple)) and (len(self._parameters["Bounds"]) > 0):
569 logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
571 self._parameters["Bounds"] = None
573 if logging.getLogger().level < logging.WARNING:
574 self._parameters["optiprint"], self._parameters["optdisp"] = 1, 1
575 if PlatformInfo.has_scipy:
576 import scipy.optimize
577 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_ALL
579 self._parameters["optmessages"] = 15
581 self._parameters["optiprint"], self._parameters["optdisp"] = -1, 0
582 if PlatformInfo.has_scipy:
583 import scipy.optimize
584 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_NONE
586 self._parameters["optmessages"] = 15
590 def _post_run(self,_oH=None):
592 if ("StoreSupplementaryCalculations" in self._parameters) and \
593 "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]:
594 for _A in self.StoredVariables["APosterioriCovariance"]:
595 if "APosterioriVariances" in self._parameters["StoreSupplementaryCalculations"]:
596 self.StoredVariables["APosterioriVariances"].store( numpy.diag(_A) )
597 if "APosterioriStandardDeviations" in self._parameters["StoreSupplementaryCalculations"]:
598 self.StoredVariables["APosterioriStandardDeviations"].store( numpy.sqrt(numpy.diag(_A)) )
599 if "APosterioriCorrelations" in self._parameters["StoreSupplementaryCalculations"]:
600 _EI = numpy.diag(1./numpy.sqrt(numpy.diag(_A)))
601 _C = numpy.dot(_EI, numpy.dot(_A, _EI))
602 self.StoredVariables["APosterioriCorrelations"].store( _C )
604 logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint.: %i/%i/%i", self._name, _oH["Direct"].nbcalls(0),_oH["Tangent"].nbcalls(0),_oH["Adjoint"].nbcalls(0))
605 logging.debug("%s Nombre d'appels au cache d'opérateur d'observation direct/tangent/adjoint..: %i/%i/%i", self._name, _oH["Direct"].nbcalls(3),_oH["Tangent"].nbcalls(3),_oH["Adjoint"].nbcalls(3))
606 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
607 logging.debug("%s Terminé", self._name)
610 def get(self, key=None):
612 Renvoie l'une des variables stockées identifiée par la clé, ou le
613 dictionnaire de l'ensemble des variables disponibles en l'absence de
614 clé. Ce sont directement les variables sous forme objet qui sont
615 renvoyées, donc les méthodes d'accès à l'objet individuel sont celles
616 des classes de persistance.
619 return self.StoredVariables[key]
621 return self.StoredVariables
623 def __contains__(self, key=None):
624 "D.__contains__(k) -> True if D has a key k, else False"
625 return key in self.StoredVariables
628 "D.keys() -> list of D's keys"
629 if hasattr(self, "StoredVariables"):
630 return self.StoredVariables.keys()
635 "D.pop(k[,d]) -> v, remove specified key and return the corresponding value"
636 if hasattr(self, "StoredVariables"):
637 return self.StoredVariables.pop(k, d)
642 raise TypeError("pop expected at least 1 arguments, got 0")
643 "If key is not found, d is returned if given, otherwise KeyError is raised"
649 def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
651 Doit implémenter l'opération élémentaire de calcul d'assimilation sous
652 sa forme mathématique la plus naturelle possible.
654 raise NotImplementedError("Mathematical assimilation calculation has not been implemented!")
656 def defineRequiredParameter(self, name = None, default = None, typecast = None, message = None, minval = None, maxval = None, listval = None):
658 Permet de définir dans l'algorithme des paramètres requis et leurs
659 caractéristiques par défaut.
662 raise ValueError("A name is mandatory to define a required parameter.")
664 self.__required_parameters[name] = {
666 "typecast" : typecast,
672 logging.debug("%s %s (valeur par défaut = %s)", self._name, message, self.setParameterValue(name))
674 def getRequiredParameters(self, noDetails=True):
676 Renvoie la liste des noms de paramètres requis ou directement le
677 dictionnaire des paramètres requis.
680 return sorted(self.__required_parameters.keys())
682 return self.__required_parameters
684 def setParameterValue(self, name=None, value=None):
686 Renvoie la valeur d'un paramètre requis de manière contrôlée
688 default = self.__required_parameters[name]["default"]
689 typecast = self.__required_parameters[name]["typecast"]
690 minval = self.__required_parameters[name]["minval"]
691 maxval = self.__required_parameters[name]["maxval"]
692 listval = self.__required_parameters[name]["listval"]
694 if value is None and default is None:
696 elif value is None and default is not None:
697 if typecast is None: __val = default
698 else: __val = typecast( default )
700 if typecast is None: __val = value
701 else: __val = typecast( value )
703 if minval is not None and (numpy.array(__val, float) < minval).any():
704 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be less than %s."%(name, __val, minval))
705 if maxval is not None and (numpy.array(__val, float) > maxval).any():
706 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be greater than %s."%(name, __val, maxval))
707 if listval is not None:
708 if typecast is list or typecast is tuple or isinstance(__val,list) or isinstance(__val,tuple):
711 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%(v, name, listval))
712 elif __val not in listval:
713 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%( __val, name,listval))
716 def requireInputArguments(self, mandatory=(), optional=()):
718 Permet d'imposer des arguments requises en entrée
720 self.__required_inputs["RequiredInputValues"]["mandatory"] = tuple( mandatory )
721 self.__required_inputs["RequiredInputValues"]["optional"] = tuple( optional )
723 def __setParameters(self, fromDico={}):
725 Permet de stocker les paramètres reçus dans le dictionnaire interne.
727 self._parameters.update( fromDico )
728 for k in self.__required_parameters.keys():
729 if k in fromDico.keys():
730 self._parameters[k] = self.setParameterValue(k,fromDico[k])
732 self._parameters[k] = self.setParameterValue(k)
733 logging.debug("%s %s : %s", self._name, self.__required_parameters[k]["message"], self._parameters[k])
735 # ==============================================================================
736 class Diagnostic(object):
738 Classe générale d'interface de type diagnostic
740 Ce template s'utilise de la manière suivante : il sert de classe "patron" en
741 même temps que l'une des classes de persistance, comme "OneScalar" par
744 Une classe élémentaire de diagnostic doit implémenter ses deux méthodes, la
745 méthode "_formula" pour écrire explicitement et proprement la formule pour
746 l'écriture mathématique du calcul du diagnostic (méthode interne non
747 publique), et "calculate" pour activer la précédente tout en ayant vérifié
748 et préparé les données, et pour stocker les résultats à chaque pas (méthode
749 externe d'activation).
751 def __init__(self, name = "", parameters = {}):
753 self.name = str(name)
754 self.parameters = dict( parameters )
756 def _formula(self, *args):
758 Doit implémenter l'opération élémentaire de diagnostic sous sa forme
759 mathématique la plus naturelle possible.
761 raise NotImplementedError("Diagnostic mathematical formula has not been implemented!")
763 def calculate(self, *args):
765 Active la formule de calcul avec les arguments correctement rangés
767 raise NotImplementedError("Diagnostic activation method has not been implemented!")
769 # ==============================================================================
770 class DiagnosticAndParameters(object):
772 Classe générale d'interface d'interface de type diagnostic
775 name = "GenericDiagnostic",
782 asExistingDiags = None,
786 self.__name = str(name)
792 self.__E = tuple(asExistingDiags)
793 self.__TheDiag = None
795 if asScript is not None:
796 __Diag = ImportFromScript(asScript).getvalue( "Diagnostic" )
797 __Iden = ImportFromScript(asScript).getvalue( "Identifier" )
798 __Dict = ImportFromScript(asScript).getvalue( self.__name, "Parameters" )
799 __Unit = ImportFromScript(asScript).getvalue( "Unit" )
800 __Base = ImportFromScript(asScript).getvalue( "BaseType" )
802 __Diag = asDiagnostic
803 __Iden = asIdentifier
808 if __Diag is not None:
809 self.__D = str(__Diag)
810 if __Iden is not None:
811 self.__I = str(__Iden)
813 self.__I = str(__Diag)
814 if __Dict is not None:
815 self.__P.update( dict(__Dict) )
816 if __Unit is None or __Unit == "None":
818 if __Base is None or __Base == "None":
821 self.__setDiagnostic( self.__D, self.__I, self.__U, self.__B, self.__P, self.__E )
825 return self.__TheDiag
827 def __setDiagnostic(self, __choice = None, __name = "", __unit = "", __basetype = None, __parameters = {}, __existings = () ):
829 Permet de sélectionner un diagnostic a effectuer
832 raise ValueError("Error: diagnostic choice has to be given")
833 __daDirectory = "daDiagnostics"
835 # Recherche explicitement le fichier complet
836 # ------------------------------------------
838 for directory in sys.path:
839 if os.path.isfile(os.path.join(directory, __daDirectory, str(__choice)+'.py')):
840 __module_path = os.path.abspath(os.path.join(directory, __daDirectory))
841 if __module_path is None:
842 raise ImportError("No diagnostic module named \"%s\" was found in a \"%s\" subdirectory\n The search path is %s"%(__choice, __daDirectory, sys.path))
844 # Importe le fichier complet comme un module
845 # ------------------------------------------
847 __sys_path_tmp = sys.path ; sys.path.insert(0,__module_path)
848 self.__diagnosticFile = __import__(str(__choice), globals(), locals(), [])
849 sys.path = __sys_path_tmp ; del __sys_path_tmp
850 except ImportError as e:
851 raise ImportError("The module named \"%s\" was found, but is incorrect at the import stage.\n The import error message is: %s"%(__choice,e))
853 # Instancie un objet du type élémentaire du fichier
854 # -------------------------------------------------
855 if __name in __existings:
856 raise ValueError("A default input with the same name \"%s\" already exists."%str(__name))
858 self.__TheDiag = self.__diagnosticFile.ElementaryDiagnostic(
861 basetype = __basetype,
862 parameters = __parameters )
865 # ==============================================================================
866 class AlgorithmAndParameters(object):
868 Classe générale d'interface d'action pour l'algorithme et ses paramètres
871 name = "GenericAlgorithm",
878 self.__name = str(name)
882 self.__algorithm = {}
883 self.__algorithmFile = None
884 self.__algorithmName = None
886 self.updateParameters( asDict, asScript )
888 if asScript is not None:
889 __Algo = ImportFromScript(asScript).getvalue( "Algorithm" )
893 if __Algo is not None:
894 self.__A = str(__Algo)
895 self.__P.update( {"Algorithm":self.__A} )
897 self.__setAlgorithm( self.__A )
899 def updateParameters(self,
903 "Mise a jour des parametres"
904 if asScript is not None:
905 __Dict = ImportFromScript(asScript).getvalue( self.__name, "Parameters" )
909 if __Dict is not None:
910 self.__P.update( dict(__Dict) )
912 def executePythonScheme(self, asDictAO = None):
913 "Permet de lancer le calcul d'assimilation"
914 Operator.CM.clearCache()
916 if not isinstance(asDictAO, dict):
917 raise ValueError("The objects for algorithm calculation have to be given together as a dictionnary, and they are not")
918 if hasattr(asDictAO["Background"],"getO"): self.__Xb = asDictAO["Background"].getO()
919 elif hasattr(asDictAO["CheckingPoint"],"getO"): self.__Xb = asDictAO["CheckingPoint"].getO()
920 else: self.__Xb = None
921 if hasattr(asDictAO["Observation"],"getO"): self.__Y = asDictAO["Observation"].getO()
922 else: self.__Y = asDictAO["Observation"]
923 if hasattr(asDictAO["ControlInput"],"getO"): self.__U = asDictAO["ControlInput"].getO()
924 else: self.__U = asDictAO["ControlInput"]
925 if hasattr(asDictAO["ObservationOperator"],"getO"): self.__HO = asDictAO["ObservationOperator"].getO()
926 else: self.__HO = asDictAO["ObservationOperator"]
927 if hasattr(asDictAO["EvolutionModel"],"getO"): self.__EM = asDictAO["EvolutionModel"].getO()
928 else: self.__EM = asDictAO["EvolutionModel"]
929 if hasattr(asDictAO["ControlModel"],"getO"): self.__CM = asDictAO["ControlModel"].getO()
930 else: self.__CM = asDictAO["ControlModel"]
931 self.__B = asDictAO["BackgroundError"]
932 self.__R = asDictAO["ObservationError"]
933 self.__Q = asDictAO["EvolutionError"]
935 self.__shape_validate()
937 self.__algorithm.run(
947 Parameters = self.__P,
951 def executeYACSScheme(self, FileName=None):
952 "Permet de lancer le calcul d'assimilation"
953 if FileName is None or not os.path.exists(FileName):
954 raise ValueError("an existing DIC Python file name has to be given for YACS execution.\n")
955 if not PlatformInfo.has_salome or not PlatformInfo.has_yacs or not PlatformInfo.has_adao:
956 raise ImportError("Unable to get SALOME, YACS or ADAO environnement variables. Please launch SALOME before executing.\n")
958 __converterExe = os.path.join(os.environ["ADAO_ROOT_DIR"], "bin/salome", "AdaoYacsSchemaCreator.py")
959 __inputFile = os.path.abspath(FileName)
960 __outputFile = __inputFile[:__inputFile.rfind(".")] + '.xml'
962 __args = ["python", __converterExe, __inputFile, __outputFile]
964 __p = subprocess.Popen(__args)
965 (__stdoutdata, __stderrdata) = __p.communicate()
966 if not os.path.exists(__outputFile):
967 __msg = "An error occured during the execution of the ADAO YACS Schema\n"
968 __msg += "Creator applied on the input file:\n"
969 __msg += " %s\n"%__inputFile
970 __msg += "If SALOME GUI is launched by command line, see errors\n"
971 __msg += "details in your terminal.\n"
972 raise ValueError(__msg)
978 SALOMERuntime.RuntimeSALOME_setRuntime()
980 r = pilot.getRuntime()
981 xmlLoader = loader.YACSLoader()
982 xmlLoader.registerProcCataLoader()
984 catalogAd = r.loadCatalog("proc", __outputFile)
987 r.addCatalog(catalogAd)
990 p = xmlLoader.load(__outputFile)
991 except IOError as ex:
992 print("IO exception: %s"%(ex,))
994 logger = p.getLogger("parser")
995 if not logger.isEmpty():
996 print("The imported file has errors :")
997 print(logger.getStr())
1000 print("Le schéma n'est pas valide et ne peut pas être exécuté")
1001 print(p.getErrorReport())
1003 info=pilot.LinkInfo(pilot.LinkInfo.ALL_DONT_STOP)
1004 p.checkConsistency(info)
1005 if info.areWarningsOrErrors():
1006 print("Le schéma n'est pas cohérent et ne peut pas être exécuté")
1007 print(info.getGlobalRepr())
1009 e = pilot.ExecutorSwig()
1011 if p.getEffectiveState() != pilot.DONE:
1012 print(p.getErrorReport())
1014 raise ValueError("execution error of YACS scheme")
1018 def get(self, key = None):
1019 "Vérifie l'existence d'une clé de variable ou de paramètres"
1020 if key in self.__algorithm:
1021 return self.__algorithm.get( key )
1022 elif key in self.__P:
1023 return self.__P[key]
1027 def pop(self, k, d):
1028 "Necessaire pour le pickling"
1029 return self.__algorithm.pop(k, d)
1031 def getAlgorithmRequiredParameters(self, noDetails=True):
1032 "Renvoie la liste des paramètres requis selon l'algorithme"
1033 return self.__algorithm.getRequiredParameters(noDetails)
1035 def setObserver(self, __V, __O, __I, __S):
1036 if self.__algorithm is None \
1037 or isinstance(self.__algorithm, dict) \
1038 or not hasattr(self.__algorithm,"StoredVariables"):
1039 raise ValueError("No observer can be build before choosing an algorithm.")
1040 if __V not in self.__algorithm:
1041 raise ValueError("An observer requires to be set on a variable named %s which does not exist."%__V)
1043 self.__algorithm.StoredVariables[ __V ].setDataObserver(
1046 HookParameters = __I,
1049 def removeObserver(self, __V, __O, __A = False):
1050 if self.__algorithm is None \
1051 or isinstance(self.__algorithm, dict) \
1052 or not hasattr(self.__algorithm,"StoredVariables"):
1053 raise ValueError("No observer can be removed before choosing an algorithm.")
1054 if __V not in self.__algorithm:
1055 raise ValueError("An observer requires to be removed on a variable named %s which does not exist."%__V)
1057 return self.__algorithm.StoredVariables[ __V ].removeDataObserver(
1062 def hasObserver(self, __V):
1063 if self.__algorithm is None \
1064 or isinstance(self.__algorithm, dict) \
1065 or not hasattr(self.__algorithm,"StoredVariables"):
1067 if __V not in self.__algorithm:
1069 return self.__algorithm.StoredVariables[ __V ].hasDataObserver()
1072 return list(self.__algorithm.keys()) + list(self.__P.keys())
1074 def __contains__(self, key=None):
1075 "D.__contains__(k) -> True if D has a key k, else False"
1076 return key in self.__algorithm or key in self.__P
1079 "x.__repr__() <==> repr(x)"
1080 return repr(self.__A)+", "+repr(self.__P)
1083 "x.__str__() <==> str(x)"
1084 return str(self.__A)+", "+str(self.__P)
1086 def __setAlgorithm(self, choice = None ):
1088 Permet de sélectionner l'algorithme à utiliser pour mener à bien l'étude
1089 d'assimilation. L'argument est un champ caractère se rapportant au nom
1090 d'un fichier contenu dans "../daAlgorithms" et réalisant l'opération
1091 d'assimilation sur les arguments fixes.
1094 raise ValueError("Error: algorithm choice has to be given")
1095 if self.__algorithmName is not None:
1096 raise ValueError("Error: algorithm choice has already been done as \"%s\", it can't be changed."%self.__algorithmName)
1097 daDirectory = "daAlgorithms"
1099 # Recherche explicitement le fichier complet
1100 # ------------------------------------------
1102 for directory in sys.path:
1103 if os.path.isfile(os.path.join(directory, daDirectory, str(choice)+'.py')):
1104 module_path = os.path.abspath(os.path.join(directory, daDirectory))
1105 if module_path is None:
1106 raise ImportError("No algorithm module named \"%s\" was found in a \"%s\" subdirectory\n The search path is %s"%(choice, daDirectory, sys.path))
1108 # Importe le fichier complet comme un module
1109 # ------------------------------------------
1111 sys_path_tmp = sys.path ; sys.path.insert(0,module_path)
1112 self.__algorithmFile = __import__(str(choice), globals(), locals(), [])
1113 self.__algorithmName = str(choice)
1114 sys.path = sys_path_tmp ; del sys_path_tmp
1115 except ImportError as e:
1116 raise ImportError("The module named \"%s\" was found, but is incorrect at the import stage.\n The import error message is: %s"%(choice,e))
1118 # Instancie un objet du type élémentaire du fichier
1119 # -------------------------------------------------
1120 self.__algorithm = self.__algorithmFile.ElementaryAlgorithm()
1123 def __shape_validate(self):
1125 Validation de la correspondance correcte des tailles des variables et
1126 des matrices s'il y en a.
1128 if self.__Xb is None: __Xb_shape = (0,)
1129 elif hasattr(self.__Xb,"size"): __Xb_shape = (self.__Xb.size,)
1130 elif hasattr(self.__Xb,"shape"):
1131 if isinstance(self.__Xb.shape, tuple): __Xb_shape = self.__Xb.shape
1132 else: __Xb_shape = self.__Xb.shape()
1133 else: raise TypeError("The background (Xb) has no attribute of shape: problem !")
1135 if self.__Y is None: __Y_shape = (0,)
1136 elif hasattr(self.__Y,"size"): __Y_shape = (self.__Y.size,)
1137 elif hasattr(self.__Y,"shape"):
1138 if isinstance(self.__Y.shape, tuple): __Y_shape = self.__Y.shape
1139 else: __Y_shape = self.__Y.shape()
1140 else: raise TypeError("The observation (Y) has no attribute of shape: problem !")
1142 if self.__U is None: __U_shape = (0,)
1143 elif hasattr(self.__U,"size"): __U_shape = (self.__U.size,)
1144 elif hasattr(self.__U,"shape"):
1145 if isinstance(self.__U.shape, tuple): __U_shape = self.__U.shape
1146 else: __U_shape = self.__U.shape()
1147 else: raise TypeError("The control (U) has no attribute of shape: problem !")
1149 if self.__B is None: __B_shape = (0,0)
1150 elif hasattr(self.__B,"shape"):
1151 if isinstance(self.__B.shape, tuple): __B_shape = self.__B.shape
1152 else: __B_shape = self.__B.shape()
1153 else: raise TypeError("The a priori errors covariance matrix (B) has no attribute of shape: problem !")
1155 if self.__R is None: __R_shape = (0,0)
1156 elif hasattr(self.__R,"shape"):
1157 if isinstance(self.__R.shape, tuple): __R_shape = self.__R.shape
1158 else: __R_shape = self.__R.shape()
1159 else: raise TypeError("The observation errors covariance matrix (R) has no attribute of shape: problem !")
1161 if self.__Q is None: __Q_shape = (0,0)
1162 elif hasattr(self.__Q,"shape"):
1163 if isinstance(self.__Q.shape, tuple): __Q_shape = self.__Q.shape
1164 else: __Q_shape = self.__Q.shape()
1165 else: raise TypeError("The evolution errors covariance matrix (Q) has no attribute of shape: problem !")
1167 if len(self.__HO) == 0: __HO_shape = (0,0)
1168 elif isinstance(self.__HO, dict): __HO_shape = (0,0)
1169 elif hasattr(self.__HO["Direct"],"shape"):
1170 if isinstance(self.__HO["Direct"].shape, tuple): __HO_shape = self.__HO["Direct"].shape
1171 else: __HO_shape = self.__HO["Direct"].shape()
1172 else: raise TypeError("The observation operator (H) has no attribute of shape: problem !")
1174 if len(self.__EM) == 0: __EM_shape = (0,0)
1175 elif isinstance(self.__EM, dict): __EM_shape = (0,0)
1176 elif hasattr(self.__EM["Direct"],"shape"):
1177 if isinstance(self.__EM["Direct"].shape, tuple): __EM_shape = self.__EM["Direct"].shape
1178 else: __EM_shape = self.__EM["Direct"].shape()
1179 else: raise TypeError("The evolution model (EM) has no attribute of shape: problem !")
1181 if len(self.__CM) == 0: __CM_shape = (0,0)
1182 elif isinstance(self.__CM, dict): __CM_shape = (0,0)
1183 elif hasattr(self.__CM["Direct"],"shape"):
1184 if isinstance(self.__CM["Direct"].shape, tuple): __CM_shape = self.__CM["Direct"].shape
1185 else: __CM_shape = self.__CM["Direct"].shape()
1186 else: raise TypeError("The control model (CM) has no attribute of shape: problem !")
1188 # Vérification des conditions
1189 # ---------------------------
1190 if not( len(__Xb_shape) == 1 or min(__Xb_shape) == 1 ):
1191 raise ValueError("Shape characteristic of background (Xb) is incorrect: \"%s\"."%(__Xb_shape,))
1192 if not( len(__Y_shape) == 1 or min(__Y_shape) == 1 ):
1193 raise ValueError("Shape characteristic of observation (Y) is incorrect: \"%s\"."%(__Y_shape,))
1195 if not( min(__B_shape) == max(__B_shape) ):
1196 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) is incorrect: \"%s\"."%(__B_shape,))
1197 if not( min(__R_shape) == max(__R_shape) ):
1198 raise ValueError("Shape characteristic of observation errors covariance matrix (R) is incorrect: \"%s\"."%(__R_shape,))
1199 if not( min(__Q_shape) == max(__Q_shape) ):
1200 raise ValueError("Shape characteristic of evolution errors covariance matrix (Q) is incorrect: \"%s\"."%(__Q_shape,))
1201 if not( min(__EM_shape) == max(__EM_shape) ):
1202 raise ValueError("Shape characteristic of evolution operator (EM) is incorrect: \"%s\"."%(__EM_shape,))
1204 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[1] == max(__Xb_shape) ):
1205 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and state (X) \"%s\" are incompatible."%(__HO_shape,__Xb_shape))
1206 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[0] == max(__Y_shape) ):
1207 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation (Y) \"%s\" are incompatible."%(__HO_shape,__Y_shape))
1208 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__B) > 0 and not( __HO_shape[1] == __B_shape[0] ):
1209 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and a priori errors covariance matrix (B) \"%s\" are incompatible."%(__HO_shape,__B_shape))
1210 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__R) > 0 and not( __HO_shape[0] == __R_shape[1] ):
1211 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation errors covariance matrix (R) \"%s\" are incompatible."%(__HO_shape,__R_shape))
1213 if self.__B is not None and len(self.__B) > 0 and not( __B_shape[1] == max(__Xb_shape) ):
1214 if self.__algorithmName in ["EnsembleBlue",]:
1215 asPersistentVector = self.__Xb.reshape((-1,min(__B_shape)))
1216 self.__Xb = Persistence.OneVector("Background", basetype=numpy.matrix)
1217 for member in asPersistentVector:
1218 self.__Xb.store( numpy.matrix( numpy.ravel(member), numpy.float ).T )
1219 __Xb_shape = min(__B_shape)
1221 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) \"%s\" and background (Xb) \"%s\" are incompatible."%(__B_shape,__Xb_shape))
1223 if self.__R is not None and len(self.__R) > 0 and not( __R_shape[1] == max(__Y_shape) ):
1224 raise ValueError("Shape characteristic of observation errors covariance matrix (R) \"%s\" and observation (Y) \"%s\" are incompatible."%(__R_shape,__Y_shape))
1226 if self.__EM is not None and len(self.__EM) > 0 and not isinstance(self.__EM, dict) and not( __EM_shape[1] == max(__Xb_shape) ):
1227 raise ValueError("Shape characteristic of evolution model (EM) \"%s\" and state (X) \"%s\" are incompatible."%(__EM_shape,__Xb_shape))
1229 if self.__CM is not None and len(self.__CM) > 0 and not isinstance(self.__CM, dict) and not( __CM_shape[1] == max(__U_shape) ):
1230 raise ValueError("Shape characteristic of control model (CM) \"%s\" and control (U) \"%s\" are incompatible."%(__CM_shape,__U_shape))
1232 if ("Bounds" in self.__P) \
1233 and (isinstance(self.__P["Bounds"], list) or isinstance(self.__P["Bounds"], tuple)) \
1234 and (len(self.__P["Bounds"]) != max(__Xb_shape)):
1235 raise ValueError("The number \"%s\" of bound pairs for the state (X) components is different of the size \"%s\" of the state itself." \
1236 %(len(self.__P["Bounds"]),max(__Xb_shape)))
1240 # ==============================================================================
1241 class DataObserver(object):
1243 Classe générale d'interface de type observer
1246 name = "GenericObserver",
1258 self.__name = str(name)
1263 if onVariable is None:
1264 raise ValueError("setting an observer has to be done over a variable name or a list of variable names, not over None.")
1265 elif type(onVariable) in (tuple, list):
1266 self.__V = tuple(map( str, onVariable ))
1267 if withInfo is None:
1270 self.__I = (str(withInfo),)*len(self.__V)
1271 elif isinstance(onVariable, str):
1272 self.__V = (onVariable,)
1273 if withInfo is None:
1274 self.__I = (onVariable,)
1276 self.__I = (str(withInfo),)
1278 raise ValueError("setting an observer has to be done over a variable name or a list of variable names.")
1280 if asString is not None:
1281 __FunctionText = asString
1282 elif (asTemplate is not None) and (asTemplate in Templates.ObserverTemplates):
1283 __FunctionText = Templates.ObserverTemplates[asTemplate]
1284 elif asScript is not None:
1285 __FunctionText = ImportFromScript(asScript).getstring()
1288 __Function = ObserverF(__FunctionText)
1290 if asObsObject is not None:
1291 self.__O = asObsObject
1293 self.__O = __Function.getfunc()
1295 for k in range(len(self.__V)):
1298 if ename not in withAlgo:
1299 raise ValueError("An observer is asked to be set on a variable named %s which does not exist."%ename)
1301 withAlgo.setObserver(ename, self.__O, einfo, scheduledBy)
1304 "x.__repr__() <==> repr(x)"
1305 return repr(self.__V)+"\n"+repr(self.__O)
1308 "x.__str__() <==> str(x)"
1309 return str(self.__V)+"\n"+str(self.__O)
1311 # ==============================================================================
1312 class State(object):
1314 Classe générale d'interface de type état
1317 name = "GenericVector",
1319 asPersistentVector = None,
1322 toBeChecked = False,
1325 Permet de définir un vecteur :
1326 - asVector : entrée des données, comme un vecteur compatible avec le
1327 constructeur de numpy.matrix, ou "True" si entrée par script.
1328 - asPersistentVector : entrée des données, comme une série de vecteurs
1329 compatible avec le constructeur de numpy.matrix, ou comme un objet de
1330 type Persistence, ou "True" si entrée par script.
1331 - asScript : si un script valide est donné contenant une variable
1332 nommée "name", la variable est de type "asVector" (par défaut) ou
1333 "asPersistentVector" selon que l'une de ces variables est placée à
1336 self.__name = str(name)
1337 self.__check = bool(toBeChecked)
1341 self.__is_vector = False
1342 self.__is_series = False
1344 if asScript is not None:
1345 __Vector, __Series = None, None
1346 if asPersistentVector:
1347 __Series = ImportFromScript(asScript).getvalue( self.__name )
1349 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1351 __Vector, __Series = asVector, asPersistentVector
1353 if __Vector is not None:
1354 self.__is_vector = True
1355 self.__V = numpy.matrix( numpy.asmatrix(__Vector).A1, numpy.float ).T
1356 self.shape = self.__V.shape
1357 self.size = self.__V.size
1358 elif __Series is not None:
1359 self.__is_series = True
1360 if isinstance(__Series, (tuple, list, numpy.ndarray, numpy.matrix)):
1361 self.__V = Persistence.OneVector(self.__name, basetype=numpy.matrix)
1362 for member in __Series:
1363 self.__V.store( numpy.matrix( numpy.asmatrix(member).A1, numpy.float ).T )
1364 import sys ; sys.stdout.flush()
1367 if isinstance(self.__V.shape, (tuple, list)):
1368 self.shape = self.__V.shape
1370 self.shape = self.__V.shape()
1371 if len(self.shape) == 1:
1372 self.shape = (self.shape[0],1)
1373 self.size = self.shape[0] * self.shape[1]
1375 raise ValueError("The %s object is improperly defined, it requires at minima either a vector, a list/tuple of vectors or a persistent object. Please check your vector input."%self.__name)
1377 if scheduledBy is not None:
1378 self.__T = scheduledBy
1380 def getO(self, withScheduler=False):
1382 return self.__V, self.__T
1383 elif self.__T is None:
1389 "Vérification du type interne"
1390 return self.__is_vector
1393 "Vérification du type interne"
1394 return self.__is_series
1397 "x.__repr__() <==> repr(x)"
1398 return repr(self.__V)
1401 "x.__str__() <==> str(x)"
1402 return str(self.__V)
1404 # ==============================================================================
1405 class Covariance(object):
1407 Classe générale d'interface de type covariance
1410 name = "GenericCovariance",
1411 asCovariance = None,
1412 asEyeByScalar = None,
1413 asEyeByVector = None,
1416 toBeChecked = False,
1419 Permet de définir une covariance :
1420 - asCovariance : entrée des données, comme une matrice compatible avec
1421 le constructeur de numpy.matrix
1422 - asEyeByScalar : entrée des données comme un seul scalaire de variance,
1423 multiplicatif d'une matrice de corrélation identité, aucune matrice
1424 n'étant donc explicitement à donner
1425 - asEyeByVector : entrée des données comme un seul vecteur de variance,
1426 à mettre sur la diagonale d'une matrice de corrélation, aucune matrice
1427 n'étant donc explicitement à donner
1428 - asCovObject : entrée des données comme un objet python, qui a les
1429 methodes obligatoires "getT", "getI", "diag", "trace", "__add__",
1430 "__sub__", "__neg__", "__mul__", "__rmul__" et facultatives "shape",
1431 "size", "cholesky", "choleskyI", "asfullmatrix", "__repr__", "__str__"
1432 - toBeChecked : booléen indiquant si le caractère SDP de la matrice
1433 pleine doit être vérifié
1435 self.__name = str(name)
1436 self.__check = bool(toBeChecked)
1439 self.__is_scalar = False
1440 self.__is_vector = False
1441 self.__is_matrix = False
1442 self.__is_object = False
1444 if asScript is not None:
1445 __Matrix, __Scalar, __Vector, __Object = None, None, None, None
1447 __Scalar = ImportFromScript(asScript).getvalue( self.__name )
1449 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1451 __Object = ImportFromScript(asScript).getvalue( self.__name )
1453 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
1455 __Matrix, __Scalar, __Vector, __Object = asCovariance, asEyeByScalar, asEyeByVector, asCovObject
1457 if __Scalar is not None:
1458 if numpy.matrix(__Scalar).size != 1:
1459 raise ValueError(' The diagonal multiplier given to define a sparse matrix is not a unique scalar value.\n Its actual measured size is %i. Please check your scalar input.'%numpy.matrix(__Scalar).size)
1460 self.__is_scalar = True
1461 self.__C = numpy.abs( float(__Scalar) )
1464 elif __Vector is not None:
1465 self.__is_vector = True
1466 self.__C = numpy.abs( numpy.array( numpy.ravel( numpy.matrix(__Vector, float ) ) ) )
1467 self.shape = (self.__C.size,self.__C.size)
1468 self.size = self.__C.size**2
1469 elif __Matrix is not None:
1470 self.__is_matrix = True
1471 self.__C = numpy.matrix( __Matrix, float )
1472 self.shape = self.__C.shape
1473 self.size = self.__C.size
1474 elif __Object is not None:
1475 self.__is_object = True
1477 for at in ("getT","getI","diag","trace","__add__","__sub__","__neg__","__mul__","__rmul__"):
1478 if not hasattr(self.__C,at):
1479 raise ValueError("The matrix given for %s as an object has no attribute \"%s\". Please check your object input."%(self.__name,at))
1480 if hasattr(self.__C,"shape"):
1481 self.shape = self.__C.shape
1484 if hasattr(self.__C,"size"):
1485 self.size = self.__C.size
1490 # raise ValueError("The %s covariance matrix has to be specified either as a matrix, a vector for its diagonal or a scalar multiplying an identity matrix."%self.__name)
1494 def __validate(self):
1496 if self.__C is None:
1497 raise UnboundLocalError("%s covariance matrix value has not been set!"%(self.__name,))
1498 if self.ismatrix() and min(self.shape) != max(self.shape):
1499 raise ValueError("The given matrix for %s is not a square one, its shape is %s. Please check your matrix input."%(self.__name,self.shape))
1500 if self.isobject() and min(self.shape) != max(self.shape):
1501 raise ValueError("The matrix given for \"%s\" is not a square one, its shape is %s. Please check your object input."%(self.__name,self.shape))
1502 if self.isscalar() and self.__C <= 0:
1503 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your scalar input %s."%(self.__name,self.__C))
1504 if self.isvector() and (self.__C <= 0).any():
1505 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your vector input."%(self.__name,))
1506 if self.ismatrix() and (self.__check or logging.getLogger().level < logging.WARNING):
1508 L = numpy.linalg.cholesky( self.__C )
1510 raise ValueError("The %s covariance matrix is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1511 if self.isobject() and (self.__check or logging.getLogger().level < logging.WARNING):
1513 L = self.__C.cholesky()
1515 raise ValueError("The %s covariance object is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1518 "Vérification du type interne"
1519 return self.__is_scalar
1522 "Vérification du type interne"
1523 return self.__is_vector
1526 "Vérification du type interne"
1527 return self.__is_matrix
1530 "Vérification du type interne"
1531 return self.__is_object
1536 return Covariance(self.__name+"I", asCovariance = self.__C.I )
1537 elif self.isvector():
1538 return Covariance(self.__name+"I", asEyeByVector = 1. / self.__C )
1539 elif self.isscalar():
1540 return Covariance(self.__name+"I", asEyeByScalar = 1. / self.__C )
1541 elif self.isobject():
1542 return Covariance(self.__name+"I", asCovObject = self.__C.getI() )
1544 return None # Indispensable
1549 return Covariance(self.__name+"T", asCovariance = self.__C.T )
1550 elif self.isvector():
1551 return Covariance(self.__name+"T", asEyeByVector = self.__C )
1552 elif self.isscalar():
1553 return Covariance(self.__name+"T", asEyeByScalar = self.__C )
1554 elif self.isobject():
1555 return Covariance(self.__name+"T", asCovObject = self.__C.getT() )
1558 "Décomposition de Cholesky"
1560 return Covariance(self.__name+"C", asCovariance = numpy.linalg.cholesky(self.__C) )
1561 elif self.isvector():
1562 return Covariance(self.__name+"C", asEyeByVector = numpy.sqrt( self.__C ) )
1563 elif self.isscalar():
1564 return Covariance(self.__name+"C", asEyeByScalar = numpy.sqrt( self.__C ) )
1565 elif self.isobject() and hasattr(self.__C,"cholesky"):
1566 return Covariance(self.__name+"C", asCovObject = self.__C.cholesky() )
1568 def choleskyI(self):
1569 "Inversion de la décomposition de Cholesky"
1571 return Covariance(self.__name+"H", asCovariance = numpy.linalg.cholesky(self.__C).I )
1572 elif self.isvector():
1573 return Covariance(self.__name+"H", asEyeByVector = 1.0 / numpy.sqrt( self.__C ) )
1574 elif self.isscalar():
1575 return Covariance(self.__name+"H", asEyeByScalar = 1.0 / numpy.sqrt( self.__C ) )
1576 elif self.isobject() and hasattr(self.__C,"choleskyI"):
1577 return Covariance(self.__name+"H", asCovObject = self.__C.choleskyI() )
1579 def diag(self, msize=None):
1580 "Diagonale de la matrice"
1582 return numpy.diag(self.__C)
1583 elif self.isvector():
1585 elif self.isscalar():
1587 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1589 return self.__C * numpy.ones(int(msize))
1590 elif self.isobject():
1591 return self.__C.diag()
1593 def asfullmatrix(self, msize=None):
1597 elif self.isvector():
1598 return numpy.matrix( numpy.diag(self.__C), float )
1599 elif self.isscalar():
1601 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1603 return numpy.matrix( self.__C * numpy.eye(int(msize)), float )
1604 elif self.isobject() and hasattr(self.__C,"asfullmatrix"):
1605 return self.__C.asfullmatrix()
1607 def trace(self, msize=None):
1608 "Trace de la matrice"
1610 return numpy.trace(self.__C)
1611 elif self.isvector():
1612 return float(numpy.sum(self.__C))
1613 elif self.isscalar():
1615 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1617 return self.__C * int(msize)
1618 elif self.isobject():
1619 return self.__C.trace()
1625 "x.__repr__() <==> repr(x)"
1626 return repr(self.__C)
1629 "x.__str__() <==> str(x)"
1630 return str(self.__C)
1632 def __add__(self, other):
1633 "x.__add__(y) <==> x+y"
1634 if self.ismatrix() or self.isobject():
1635 return self.__C + numpy.asmatrix(other)
1636 elif self.isvector() or self.isscalar():
1637 _A = numpy.asarray(other)
1638 _A.reshape(_A.size)[::_A.shape[1]+1] += self.__C
1639 return numpy.asmatrix(_A)
1641 def __radd__(self, other):
1642 "x.__radd__(y) <==> y+x"
1643 raise NotImplementedError("%s covariance matrix __radd__ method not available for %s type!"%(self.__name,type(other)))
1645 def __sub__(self, other):
1646 "x.__sub__(y) <==> x-y"
1647 if self.ismatrix() or self.isobject():
1648 return self.__C - numpy.asmatrix(other)
1649 elif self.isvector() or self.isscalar():
1650 _A = numpy.asarray(other)
1651 _A.reshape(_A.size)[::_A.shape[1]+1] = self.__C - _A.reshape(_A.size)[::_A.shape[1]+1]
1652 return numpy.asmatrix(_A)
1654 def __rsub__(self, other):
1655 "x.__rsub__(y) <==> y-x"
1656 raise NotImplementedError("%s covariance matrix __rsub__ method not available for %s type!"%(self.__name,type(other)))
1659 "x.__neg__() <==> -x"
1662 def __mul__(self, other):
1663 "x.__mul__(y) <==> x*y"
1664 if self.ismatrix() and isinstance(other,numpy.matrix):
1665 return self.__C * other
1666 elif self.ismatrix() and isinstance(other, (list, numpy.ndarray, tuple)):
1667 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1668 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1669 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1670 return self.__C * numpy.asmatrix(other)
1672 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.asmatrix(other).shape,self.__name))
1673 elif self.isvector() and isinstance(other, (list, numpy.matrix, numpy.ndarray, tuple)):
1674 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1675 return numpy.asmatrix(self.__C * numpy.ravel(other)).T
1676 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1677 return numpy.asmatrix((self.__C * (numpy.asarray(other).transpose())).transpose())
1679 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1680 elif self.isscalar() and isinstance(other,numpy.matrix):
1681 return self.__C * other
1682 elif self.isscalar() and isinstance(other, (list, numpy.ndarray, tuple)):
1683 if len(numpy.asarray(other).shape) == 1 or numpy.asarray(other).shape[1] == 1 or numpy.asarray(other).shape[0] == 1:
1684 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1686 return self.__C * numpy.asmatrix(other)
1687 elif self.isobject():
1688 return self.__C.__mul__(other)
1690 raise NotImplementedError("%s covariance matrix __mul__ method not available for %s type!"%(self.__name,type(other)))
1692 def __rmul__(self, other):
1693 "x.__rmul__(y) <==> y*x"
1694 if self.ismatrix() and isinstance(other,numpy.matrix):
1695 return other * self.__C
1696 elif self.isvector() and isinstance(other,numpy.matrix):
1697 if numpy.ravel(other).size == self.shape[0]: # Vecteur
1698 return numpy.asmatrix(numpy.ravel(other) * self.__C)
1699 elif numpy.asmatrix(other).shape[1] == self.shape[0]: # Matrice
1700 return numpy.asmatrix(numpy.array(other) * self.__C)
1702 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1703 elif self.isscalar() and isinstance(other,numpy.matrix):
1704 return other * self.__C
1705 elif self.isobject():
1706 return self.__C.__rmul__(other)
1708 raise NotImplementedError("%s covariance matrix __rmul__ method not available for %s type!"%(self.__name,type(other)))
1711 "x.__len__() <==> len(x)"
1712 return self.shape[0]
1714 # ==============================================================================
1715 class ObserverF(object):
1717 Creation d'une fonction d'observateur a partir de son texte
1719 def __init__(self, corps=""):
1720 self.__corps = corps
1721 def func(self,var,info):
1722 "Fonction d'observation"
1725 "Restitution du pointeur de fonction dans l'objet"
1728 # ==============================================================================
1729 class CaseLogger(object):
1731 Conservation des commandes de creation d'un cas
1733 def __init__(self, __name="", __objname="case", __addViewers=None, __addLoaders=None):
1734 self.__name = str(__name)
1735 self.__objname = str(__objname)
1736 self.__logSerie = []
1737 self.__switchoff = False
1738 self.__viewers = self.__loaders = {"TUI":_TUIViewer, "DIC":_DICViewer}
1739 if __addViewers is not None:
1740 self.__viewers.update(dict(__addViewers))
1741 if __addLoaders is not None:
1742 self.__loaders.update(dict(__addLoaders))
1744 def register(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1745 "Enregistrement d'une commande individuelle"
1746 if __command is not None and __keys is not None and __local is not None and not self.__switchoff:
1747 if "self" in __keys: __keys.remove("self")
1748 self.__logSerie.append( (str(__command), __keys, __local, __pre, __switchoff) )
1750 self.__switchoff = True
1752 self.__switchoff = False
1754 def dump(self, __filename=None, __format="TUI"):
1755 "Restitution normalisée des commandes (par les *GenericCaseViewer)"
1756 if __format in self.__viewers:
1757 __formater = self.__viewers[__format](self.__name, self.__objname, self.__logSerie)
1759 raise ValueError("Dumping as \"%s\" is not available"%__format)
1760 return __formater.dump(__filename)
1762 def load(self, __filename=None, __format="TUI"):
1763 "Chargement normalisé des commandes"
1764 if __format in self.__loaders:
1765 __formater = self.__loaders[__format]()
1767 raise ValueError("Loading as \"%s\" is not available"%__format)
1768 return __formater.load(__filename)
1770 # ==============================================================================
1771 class GenericCaseViewer(object):
1773 Gestion des commandes de creation d'une vue de cas
1775 def __init__(self, __name="", __objname="case", __content=None):
1776 "Initialisation et enregistrement de l'entete"
1777 self._name = str(__name)
1778 self._objname = str(__objname)
1779 self._lineSerie = []
1780 self._switchoff = False
1781 self._numobservers = 2
1782 self._content = __content
1783 self._missing = """raise ValueError("This case requires beforehand to import or define the variable named <%s>. When corrected, remove this command, correct and uncomment the following one.")\n# """
1785 "Transformation de commande individuelle en enregistrement"
1786 raise NotImplementedError()
1788 "Transformation d'enregistrement en commande individuelle"
1789 raise NotImplementedError()
1790 def _interpret(self):
1791 "Interprétation d'une commande"
1792 raise NotImplementedError()
1793 def _finalize(self):
1794 "Enregistrement du final"
1796 def _addLine(self, line=""):
1797 "Ajoute un enregistrement individuel"
1798 self._lineSerie.append(line)
1799 def dump(self, __filename=None):
1800 "Restitution normalisée des commandes"
1802 __text = "\n".join(self._lineSerie)
1804 if __filename is not None:
1805 __file = os.path.abspath(__filename)
1806 fid = open(__file,"w")
1810 def load(self, __filename=None):
1811 "Chargement normalisé des commandes"
1812 if os.path.exists(__filename):
1813 self._content = open(__filename, 'r').read()
1814 __commands = self._extract(self._content)
1816 def execCase(self, __filename=None):
1817 "Exécution normalisée des commandes"
1818 if os.path.exists(__filename):
1819 self._content = open(__filename, 'r').read()
1820 __retcode = self._interpret(self._content)
1823 class _TUIViewer(GenericCaseViewer):
1825 Etablissement des commandes de creation d'un cas TUI
1827 def __init__(self, __name="", __objname="case", __content=None):
1828 "Initialisation et enregistrement de l'entete"
1829 GenericCaseViewer.__init__(self, __name, __objname, __content)
1830 self._addLine("#\n# Python script for ADAO TUI\n#")
1831 self._addLine("from numpy import array, matrix")
1832 self._addLine("import adaoBuilder")
1833 self._addLine("%s = adaoBuilder.New('%s')"%(self._objname, self._name))
1834 if self._content is not None:
1835 for command in self._content:
1836 self._append(*command)
1837 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1838 "Transformation d'une commande individuelle en un enregistrement"
1839 if __command is not None and __keys is not None and __local is not None:
1841 if __pre is not None:
1842 __text += "%s = "%__pre
1843 __text += "%s.%s( "%(self._objname,str(__command))
1844 if "self" in __keys: __keys.remove("self")
1845 if __command not in ("set","get") and "Concept" in __keys: __keys.remove("Concept")
1848 if __v is None: continue
1849 if k == "Checked" and not __v: continue
1850 if k == "Stored" and not __v: continue
1851 if k == "AvoidRC" and __v: continue
1852 if k == "noDetails": continue
1853 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1854 if callable(__v): __text = self._missing%__v.__name__+__text
1855 if isinstance(__v,dict):
1856 for val in __v.values():
1857 if callable(val): __text = self._missing%val.__name__+__text
1858 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1859 __text += "%s=%s, "%(k,repr(__v))
1860 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
1863 self._addLine(__text)
1864 def _extract(self, __content=""):
1865 "Transformation un enregistrement en une commande individuelle"
1868 __content = __content.replace("\r\n","\n")
1869 for line in __content.split("\n"):
1870 if "adaoBuilder" in line and "=" in line:
1871 self._objname = line.split("=")[0].strip()
1876 if self._objname+".set" in line:
1877 __commands.append( line.replace(self._objname+".","",1) )
1879 def _interpret(self, __content=""):
1880 "Interprétation d'une commande"
1881 __content = __content.replace("\r\n","\n")
1885 class _DICViewer(GenericCaseViewer):
1887 Etablissement des commandes de creation d'un cas DIC
1889 def __init__(self, __name="", __objname="case", __content=None):
1890 "Initialisation et enregistrement de l'entete"
1891 GenericCaseViewer.__init__(self, __name, __objname, __content)
1892 self._addLine("# -*- coding: utf-8 -*-")
1893 self._addLine("#\n# Input for ADAO converter to YACS\n#")
1894 self._addLine("from numpy import array, matrix")
1896 self._addLine("study_config = {}")
1897 self._addLine("study_config['StudyType'] = 'ASSIMILATION_STUDY'")
1898 self._addLine("study_config['Name'] = '%s'"%self._name)
1899 self._addLine("observers = {}")
1900 self._addLine("study_config['Observers'] = observers")
1902 self._addLine("inputvariables_config = {}")
1903 self._addLine("inputvariables_config['Order'] =['adao_default']")
1904 self._addLine("inputvariables_config['adao_default'] = -1")
1905 self._addLine("study_config['InputVariables'] = inputvariables_config")
1907 self._addLine("outputvariables_config = {}")
1908 self._addLine("outputvariables_config['Order'] = ['adao_default']")
1909 self._addLine("outputvariables_config['adao_default'] = -1")
1910 self._addLine("study_config['OutputVariables'] = outputvariables_config")
1911 if __content is not None:
1912 for command in __content:
1913 self._append(*command)
1914 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1915 "Transformation d'une commande individuelle en un enregistrement"
1916 if __command == "set": __command = __local["Concept"]
1917 else: __command = __command.replace("set", "", 1)
1920 if __command in (None, 'execute', 'executePythonScheme', 'executeYACSScheme', 'get'):
1922 elif __command in ['Debug', 'setDebug']:
1923 __text = "#\nstudy_config['Debug'] = '1'"
1924 elif __command in ['NoDebug', 'setNoDebug']:
1925 __text = "#\nstudy_config['Debug'] = '0'"
1926 elif __command in ['Observer', 'setObserver']:
1927 __obs = __local['Variable']
1928 self._numobservers += 1
1930 __text += "observers['%s'] = {}\n"%__obs
1931 if __local['String'] is not None:
1932 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
1933 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, __local['String'])
1934 if __local['Script'] is not None:
1935 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'Script')
1936 __text += "observers['%s']['Script'] = \"%s\"\n"%(__obs, __local['Script'])
1937 if __local['Template'] is not None and __local['Template'] in Templates.ObserverTemplates:
1938 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
1939 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, Templates.ObserverTemplates[__local['Template']])
1940 if __local['Info'] is not None:
1941 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __local['Info'])
1943 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __obs)
1944 __text += "observers['%s']['number'] = %s"%(__obs, self._numobservers)
1945 elif __local is not None: # __keys is not None and
1946 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1948 __text += "%s_config = {}\n"%__command
1949 if 'self' in __local: __local.pop('self')
1950 __to_be_removed = []
1951 for __k,__v in __local.items():
1952 if __v is None: __to_be_removed.append(__k)
1953 for __k in __to_be_removed:
1955 for __k,__v in __local.items():
1956 if __k == "Concept": continue
1957 if __k in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix','OneFunction','ThreeFunctions'] and 'Script' in __local: continue
1958 if __k == 'Algorithm':
1959 __text += "study_config['Algorithm'] = %s\n"%(repr(__v))
1960 elif __k == 'Script':
1963 __v = "'"+repr(__v)+"'"
1964 for __lk in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix']:
1965 if __lk in __local and __local[__lk]: __k = __lk
1966 if __command == "AlgorithmParameters": __k = "Dict"
1967 if 'OneFunction' in __local and __local['OneFunction']:
1968 __text += "%s_ScriptWithOneFunction = {}\n"%(__command,)
1969 __text += "%s_ScriptWithOneFunction['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
1970 __text += "%s_ScriptWithOneFunction['Script'] = {}\n"%(__command,)
1971 __text += "%s_ScriptWithOneFunction['Script']['Direct'] = %s\n"%(__command,__v)
1972 __text += "%s_ScriptWithOneFunction['Script']['Tangent'] = %s\n"%(__command,__v)
1973 __text += "%s_ScriptWithOneFunction['Script']['Adjoint'] = %s\n"%(__command,__v)
1974 __text += "%s_ScriptWithOneFunction['DifferentialIncrement'] = 1e-06\n"%(__command,)
1975 __text += "%s_ScriptWithOneFunction['CenteredFiniteDifference'] = 0\n"%(__command,)
1977 __f = 'ScriptWithOneFunction'
1978 __v = '%s_ScriptWithOneFunction'%(__command,)
1979 if 'ThreeFunctions' in __local and __local['ThreeFunctions']:
1980 __text += "%s_ScriptWithFunctions = {}\n"%(__command,)
1981 __text += "%s_ScriptWithFunctions['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
1982 __text += "%s_ScriptWithFunctions['Script'] = {}\n"%(__command,)
1983 __text += "%s_ScriptWithFunctions['Script']['Direct'] = %s\n"%(__command,__v)
1984 __text += "%s_ScriptWithFunctions['Script']['Tangent'] = %s\n"%(__command,__v)
1985 __text += "%s_ScriptWithFunctions['Script']['Adjoint'] = %s\n"%(__command,__v)
1987 __f = 'ScriptWithFunctions'
1988 __v = '%s_ScriptWithFunctions'%(__command,)
1989 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
1990 __text += "%s_config['From'] = '%s'\n"%(__command,__f)
1991 __text += "%s_config['Data'] = %s\n"%(__command,__v)
1992 __text = __text.replace("''","'")
1993 elif __k in ('Stored', 'Checked'):
1995 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
1996 elif __k in ('AvoidRC', 'noDetails'):
1998 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
2000 if __k == 'Parameters': __k = "Dict"
2001 if isinstance(__v,Persistence.Persistence): __v = __v.values()
2002 if callable(__v): __text = self._missing%__v.__name__+__text
2003 if isinstance(__v,dict):
2004 for val in __v.values():
2005 if callable(val): __text = self._missing%val.__name__+__text
2006 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
2007 __text += "%s_config['From'] = '%s'\n"%(__command,'String')
2008 __text += "%s_config['Data'] = \"\"\"%s\"\"\"\n"%(__command,repr(__v))
2009 __text += "study_config['%s'] = %s_config"%(__command,__command)
2010 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
2012 self._switchoff = True
2013 if __text is not None: self._addLine(__text)
2015 self._switchoff = False
2016 def _finalize(self):
2017 self.__loadVariablesByScript()
2019 self._addLine("Analysis_config = {}")
2020 self._addLine("Analysis_config['From'] = 'String'")
2021 self._addLine("Analysis_config['Data'] = \"\"\"import numpy")
2022 self._addLine("xa=numpy.ravel(ADD.get('Analysis')[-1])")
2023 self._addLine("print 'Analysis:',xa\"\"\"")
2024 self._addLine("study_config['UserPostAnalysis'] = Analysis_config")
2025 def __loadVariablesByScript(self):
2026 __ExecVariables = {} # Necessaire pour recuperer la variable
2027 exec("\n".join(self._lineSerie), __ExecVariables)
2028 study_config = __ExecVariables['study_config']
2029 self.__hasAlgorithm = bool(study_config['Algorithm'])
2030 if not self.__hasAlgorithm and \
2031 "AlgorithmParameters" in study_config and \
2032 isinstance(study_config['AlgorithmParameters'], dict) and \
2033 "From" in study_config['AlgorithmParameters'] and \
2034 "Data" in study_config['AlgorithmParameters'] and \
2035 study_config['AlgorithmParameters']['From'] == 'Script':
2036 __asScript = study_config['AlgorithmParameters']['Data']
2037 __var = ImportFromScript(__asScript).getvalue( "Algorithm" )
2038 __text = "#\nstudy_config['Algorithm'] = '%s'"%(__var,)
2039 self._addLine(__text)
2040 if self.__hasAlgorithm and \
2041 "AlgorithmParameters" in study_config and \
2042 isinstance(study_config['AlgorithmParameters'], dict) and \
2043 "From" not in study_config['AlgorithmParameters'] and \
2044 "Data" not in study_config['AlgorithmParameters']:
2046 __text += "AlgorithmParameters_config['Type'] = 'Dict'\n"
2047 __text += "AlgorithmParameters_config['From'] = 'String'\n"
2048 __text += "AlgorithmParameters_config['Data'] = '{}'\n"
2049 self._addLine(__text)
2052 class _XMLViewer(GenericCaseViewer):
2054 Etablissement des commandes de creation d'un cas XML
2056 def __init__(self, __name="", __objname="case", __content=None):
2057 "Initialisation et enregistrement de l'entete"
2058 GenericCaseViewer.__init__(self, __name, __objname, __content)
2059 raise NotImplementedError()
2061 # ==============================================================================
2062 class ImportFromScript(object):
2064 Obtention d'une variable nommee depuis un fichier script importe
2066 def __init__(self, __filename=None):
2067 "Verifie l'existence et importe le script"
2068 self.__filename = __filename.rstrip(".py")
2069 if self.__filename is None:
2070 raise ValueError("The name of the file, containing the variable to be read, has to be specified.")
2071 if not os.path.isfile(str(self.__filename)+".py"):
2072 raise ValueError("The file containing the variable to be imported doesn't seem to exist. Please check the file. The given file name is:\n \"%s\""%self.__filename)
2073 self.__scriptfile = __import__(self.__filename, globals(), locals(), [])
2074 self.__scriptstring = open(self.__filename+".py",'r').read()
2075 def getvalue(self, __varname=None, __synonym=None ):
2076 "Renvoie la variable demandee"
2077 if __varname is None:
2078 raise ValueError("The name of the variable to be read has to be specified. Please check the content of the file and the syntax.")
2079 if not hasattr(self.__scriptfile, __varname):
2080 if __synonym is None:
2081 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__filename)+".py",__varname))
2082 elif not hasattr(self.__scriptfile, __synonym):
2083 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__filename)+".py",__synonym))
2085 return getattr(self.__scriptfile, __synonym)
2087 return getattr(self.__scriptfile, __varname)
2088 def getstring(self):
2089 "Renvoie le script complet"
2090 return self.__scriptstring
2092 # ==============================================================================
2093 def CostFunction3D(_x,
2094 _Hm = None, # Pour simuler Hm(x) : HO["Direct"].appliedTo
2095 _HmX = None, # Simulation déjà faite de Hm(x)
2096 _arg = None, # Arguments supplementaires pour Hm, sous la forme d'un tuple
2101 _SIV = False, # A résorber pour la 8.0
2102 _SSC = [], # self._parameters["StoreSupplementaryCalculations"]
2103 _nPS = 0, # nbPreviousSteps
2104 _QM = "DA", # QualityMeasure
2105 _SSV = {}, # Entrée et/ou sortie : self.StoredVariables
2106 _fRt = False, # Restitue ou pas la sortie étendue
2107 _sSc = True, # Stocke ou pas les SSC
2110 Fonction-coût générale utile pour les algorithmes statiques/3D : 3DVAR, BLUE
2111 et dérivés, Kalman et dérivés, LeastSquares, SamplingTest, PSO, SA, Tabu,
2112 DFO, QuantileRegression
2118 for k in ["CostFunctionJ",
2124 "SimulatedObservationAtCurrentOptimum",
2125 "SimulatedObservationAtCurrentState",
2129 if hasattr(_SSV[k],"store"):
2130 _SSV[k].append = _SSV[k].store # Pour utiliser "append" au lieu de "store"
2132 _X = numpy.asmatrix(numpy.ravel( _x )).T
2133 if _SIV or "CurrentState" in _SSC or "CurrentOptimum" in _SSC:
2134 _SSV["CurrentState"].append( _X )
2136 if _HmX is not None:
2140 raise ValueError("COSTFUNCTION3D Operator has to be defined.")
2144 _HX = _Hm( _X, *_arg )
2145 _HX = numpy.asmatrix(numpy.ravel( _HX )).T
2147 if "SimulatedObservationAtCurrentState" in _SSC or \
2148 "SimulatedObservationAtCurrentOptimum" in _SSC:
2149 _SSV["SimulatedObservationAtCurrentState"].append( _HX )
2151 if numpy.any(numpy.isnan(_HX)):
2152 Jb, Jo, J = numpy.nan, numpy.nan, numpy.nan
2154 _Y = numpy.asmatrix(numpy.ravel( _Y )).T
2155 if _QM in ["AugmentedWeightedLeastSquares", "AWLS", "AugmentedPonderatedLeastSquares", "APLS", "DA"]:
2156 if _BI is None or _RI is None:
2157 raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
2158 _Xb = numpy.asmatrix(numpy.ravel( _Xb )).T
2159 Jb = 0.5 * (_X - _Xb).T * _BI * (_X - _Xb)
2160 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2161 elif _QM in ["WeightedLeastSquares", "WLS", "PonderatedLeastSquares", "PLS"]:
2163 raise ValueError("Observation error covariance matrix has to be properly defined!")
2165 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2166 elif _QM in ["LeastSquares", "LS", "L2"]:
2168 Jo = 0.5 * (_Y - _HX).T * (_Y - _HX)
2169 elif _QM in ["AbsoluteValue", "L1"]:
2171 Jo = numpy.sum( numpy.abs(_Y - _HX) )
2172 elif _QM in ["MaximumError", "ME"]:
2174 Jo = numpy.max( numpy.abs(_Y - _HX) )
2175 elif _QM in ["QR", "Null"]:
2179 raise ValueError("Unknown asked quality measure!")
2181 J = float( Jb ) + float( Jo )
2184 _SSV["CostFunctionJb"].append( Jb )
2185 _SSV["CostFunctionJo"].append( Jo )
2186 _SSV["CostFunctionJ" ].append( J )
2188 if "IndexOfOptimum" in _SSC or \
2189 "CurrentOptimum" in _SSC or \
2190 "SimulatedObservationAtCurrentOptimum" in _SSC:
2191 IndexMin = numpy.argmin( _SSV["CostFunctionJ"][_nPS:] ) + _nPS
2192 if "IndexOfOptimum" in _SSC:
2193 _SSV["IndexOfOptimum"].append( IndexMin )
2194 if "CurrentOptimum" in _SSC:
2195 _SSV["CurrentOptimum"].append( _SSV["CurrentState"][IndexMin] )
2196 if "SimulatedObservationAtCurrentOptimum" in _SSC:
2197 _SSV["SimulatedObservationAtCurrentOptimum"].append( _SSV["SimulatedObservationAtCurrentState"][IndexMin] )
2202 if _QM in ["QR"]: # Pour le QuantileRegression
2207 # ==============================================================================
2208 if __name__ == "__main__":
2209 print('\n AUTODIAGNOSTIC \n')