1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2018 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 Définit les outils généraux élémentaires.
26 Ce module est destiné à être appelée par AssimilationStudy.
28 __author__ = "Jean-Philippe ARGAUD"
36 from daCore import Persistence
37 from daCore import PlatformInfo
38 from daCore import Templates
40 # ==============================================================================
41 class CacheManager(object):
43 Classe générale de gestion d'un cache de calculs
46 toleranceInRedundancy = 1.e-18,
47 lenghtOfRedundancy = -1,
50 Les caractéristiques de tolérance peuvent être modifées à la création.
52 self.__tolerBP = float(toleranceInRedundancy)
53 self.__lenghtOR = int(lenghtOfRedundancy)
54 self.__initlnOR = self.__lenghtOR
59 self.__listOPCV = [] # Operator Previous Calculated Points, Results, Point Norms
60 # logging.debug("CM Tolerance de determination des doublons : %.2e", self.__tolerBP)
62 def wasCalculatedIn(self, xValue ): #, info="" ):
63 "Vérifie l'existence d'un calcul correspondant à la valeur"
66 for i in range(min(len(self.__listOPCV),self.__lenghtOR)-1,-1,-1):
67 if xValue.size != self.__listOPCV[i][0].size:
68 # logging.debug("CM Différence de la taille %s de X et de celle %s du point %i déjà calculé", xValue.shape,i,self.__listOPCP[i].shape)
70 if numpy.linalg.norm(numpy.ravel(xValue) - self.__listOPCV[i][0]) < self.__tolerBP * self.__listOPCV[i][2]:
72 __HxV = self.__listOPCV[i][1]
73 # logging.debug("CM Cas%s déja calculé, portant le numéro %i", info, i)
77 def storeValueInX(self, xValue, HxValue ):
78 "Stocke un calcul correspondant à la valeur"
79 if self.__lenghtOR < 0:
80 self.__lenghtOR = 2 * xValue.size + 2
81 self.__initlnOR = self.__lenghtOR
82 while len(self.__listOPCV) > self.__lenghtOR:
83 # logging.debug("CM Réduction de la liste des cas à %i éléments par suppression du premier", self.__lenghtOR)
84 self.__listOPCV.pop(0)
85 self.__listOPCV.append( (
86 copy.copy(numpy.ravel(xValue)),
88 numpy.linalg.norm(xValue),
93 self.__initlnOR = self.__lenghtOR
98 self.__lenghtOR = self.__initlnOR
100 # ==============================================================================
101 class Operator(object):
103 Classe générale d'interface de type opérateur simple
110 def __init__(self, fromMethod=None, fromMatrix=None, avoidingRedundancy = True):
112 On construit un objet de ce type en fournissant à l'aide de l'un des
113 deux mots-clé, soit une fonction python, soit une matrice.
115 - fromMethod : argument de type fonction Python
116 - fromMatrix : argument adapté au constructeur numpy.matrix
117 - avoidingRedundancy : évite ou pas les calculs redondants
119 self.__NbCallsAsMatrix, self.__NbCallsAsMethod, self.__NbCallsOfCached = 0, 0, 0
120 self.__AvoidRC = bool( avoidingRedundancy )
121 if fromMethod is not None:
122 self.__Method = fromMethod # logtimer(fromMethod)
124 self.__Type = "Method"
125 elif fromMatrix is not None:
127 self.__Matrix = numpy.matrix( fromMatrix, numpy.float )
128 self.__Type = "Matrix"
134 def disableAvoidingRedundancy(self):
136 Operator.CM.disable()
138 def enableAvoidingRedundancy(self):
143 Operator.CM.disable()
149 def appliedTo(self, xValue, HValue = None):
151 Permet de restituer le résultat de l'application de l'opérateur à un
152 argument xValue. Cette méthode se contente d'appliquer, son argument
153 devant a priori être du bon type.
155 - xValue : argument adapté pour appliquer l'opérateur
157 if HValue is not None:
158 HxValue = numpy.asmatrix( numpy.ravel( HValue ) ).T
160 Operator.CM.storeValueInX(xValue,HxValue)
163 __alreadyCalculated, __HxV = Operator.CM.wasCalculatedIn(xValue)
165 __alreadyCalculated = False
167 if __alreadyCalculated:
168 self.__addOneCacheCall()
171 if self.__Matrix is not None:
172 self.__addOneMatrixCall()
173 HxValue = self.__Matrix * xValue
175 self.__addOneMethodCall()
176 HxValue = self.__Method( xValue )
178 Operator.CM.storeValueInX(xValue,HxValue)
182 def appliedControledFormTo(self, paire ):
184 Permet de restituer le résultat de l'application de l'opérateur à une
185 paire (xValue, uValue). Cette méthode se contente d'appliquer, son
186 argument devant a priori être du bon type. Si la uValue est None,
187 on suppose que l'opérateur ne s'applique qu'à xValue.
189 - xValue : argument X adapté pour appliquer l'opérateur
190 - uValue : argument U adapté pour appliquer l'opérateur
192 assert len(paire) == 2, "Incorrect number of arguments"
193 xValue, uValue = paire
194 if self.__Matrix is not None:
195 self.__addOneMatrixCall()
196 return self.__Matrix * xValue
197 elif uValue is not None:
198 self.__addOneMethodCall()
199 return self.__Method( (xValue, uValue) )
201 self.__addOneMethodCall()
202 return self.__Method( xValue )
204 def appliedInXTo(self, paire ):
206 Permet de restituer le résultat de l'application de l'opérateur à un
207 argument xValue, sachant que l'opérateur est valable en xNominal.
208 Cette méthode se contente d'appliquer, son argument devant a priori
209 être du bon type. Si l'opérateur est linéaire car c'est une matrice,
210 alors il est valable en tout point nominal et il n'est pas nécessaire
212 Arguments : une liste contenant
213 - xNominal : argument permettant de donner le point où l'opérateur
214 est construit pour etre ensuite appliqué
215 - xValue : argument adapté pour appliquer l'opérateur
217 assert len(paire) == 2, "Incorrect number of arguments"
218 xNominal, xValue = paire
219 if self.__Matrix is not None:
220 self.__addOneMatrixCall()
221 return self.__Matrix * xValue
223 self.__addOneMethodCall()
224 return self.__Method( (xNominal, xValue) )
226 def asMatrix(self, ValueForMethodForm = "UnknownVoidValue"):
228 Permet de renvoyer l'opérateur sous la forme d'une matrice
230 if self.__Matrix is not None:
231 self.__addOneMatrixCall()
233 elif ValueForMethodForm is not "UnknownVoidValue": # Ne pas utiliser "None"
234 self.__addOneMethodCall()
235 return numpy.matrix( self.__Method( (ValueForMethodForm, None) ) )
237 raise ValueError("Matrix form of the operator defined as a function/method requires to give an operating point.")
241 Renvoie la taille sous forme numpy si l'opérateur est disponible sous
242 la forme d'une matrice
244 if self.__Matrix is not None:
245 return self.__Matrix.shape
247 raise ValueError("Matrix form of the operator is not available, nor the shape")
249 def nbcalls(self, which=None):
251 Renvoie les nombres d'évaluations de l'opérateur
254 self.__NbCallsAsMatrix+self.__NbCallsAsMethod,
255 self.__NbCallsAsMatrix,
256 self.__NbCallsAsMethod,
257 self.__NbCallsOfCached,
258 Operator.NbCallsAsMatrix+Operator.NbCallsAsMethod,
259 Operator.NbCallsAsMatrix,
260 Operator.NbCallsAsMethod,
261 Operator.NbCallsOfCached,
263 if which is None: return __nbcalls
264 else: return __nbcalls[which]
266 def __addOneMatrixCall(self):
267 "Comptabilise un appel"
268 self.__NbCallsAsMatrix += 1 # Decompte local
269 Operator.NbCallsAsMatrix += 1 # Decompte global
271 def __addOneMethodCall(self):
272 "Comptabilise un appel"
273 self.__NbCallsAsMethod += 1 # Decompte local
274 Operator.NbCallsAsMethod += 1 # Decompte global
276 def __addOneCacheCall(self):
277 "Comptabilise un appel"
278 self.__NbCallsOfCached += 1 # Decompte local
279 Operator.NbCallsOfCached += 1 # Decompte global
281 # ==============================================================================
282 class FullOperator(object):
284 Classe générale d'interface de type opérateur complet
285 (Direct, Linéaire Tangent, Adjoint)
288 name = "GenericFullOperator",
290 asOneFunction = None, # Fonction
291 asThreeFunctions = None, # Dictionnaire de fonctions
293 asDict = None, # Parameters
300 self.__name = str(name)
301 self.__check = bool(toBeChecked)
306 if (asDict is not None) and isinstance(asDict, dict):
307 __Parameters.update( asDict )
308 if "DifferentialIncrement" in asDict:
309 __Parameters["withIncrement"] = asDict["DifferentialIncrement"]
310 if "CenteredFiniteDifference" in asDict:
311 __Parameters["withCenteredDF"] = asDict["CenteredFiniteDifference"]
312 if "EnableMultiProcessing" in asDict:
313 __Parameters["withmpEnabled"] = asDict["EnableMultiProcessing"]
314 if "NumberOfProcesses" in asDict:
315 __Parameters["withmpWorkers"] = asDict["NumberOfProcesses"]
317 if asScript is not None:
318 __Matrix, __Function = None, None
320 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
322 __Function = { "Direct":ImportFromScript(asScript).getvalue( "DirectOperator" ) }
323 __Function.update({"useApproximatedDerivatives":True})
324 __Function.update(__Parameters)
325 elif asThreeFunctions:
327 "Direct" :ImportFromScript(asScript).getvalue( "DirectOperator" ),
328 "Tangent":ImportFromScript(asScript).getvalue( "TangentOperator" ),
329 "Adjoint":ImportFromScript(asScript).getvalue( "AdjointOperator" ),
331 __Function.update(__Parameters)
334 if asOneFunction is not None:
335 if isinstance(asOneFunction, dict) and "Direct" in asOneFunction:
336 if asOneFunction["Direct"] is not None:
337 __Function = asOneFunction
339 raise ValueError("The function has to be given in a dictionnary which have 1 key (\"Direct\")")
341 __Function = { "Direct":asOneFunction }
342 __Function.update({"useApproximatedDerivatives":True})
343 __Function.update(__Parameters)
344 elif asThreeFunctions is not None:
345 if isinstance(asThreeFunctions, dict) and \
346 ("Tangent" in asThreeFunctions) and (asThreeFunctions["Tangent"] is not None) and \
347 ("Adjoint" in asThreeFunctions) and (asThreeFunctions["Adjoint"] is not None) and \
348 (("useApproximatedDerivatives" not in asThreeFunctions) or not bool(asThreeFunctions["useApproximatedDerivatives"])):
349 __Function = asThreeFunctions
350 elif isinstance(asThreeFunctions, dict) and \
351 ("Direct" in asThreeFunctions) and (asThreeFunctions["Direct"] is not None):
352 __Function = asThreeFunctions
353 __Function.update({"useApproximatedDerivatives":True})
355 raise ValueError("The functions has to be given in a dictionnary which have either 1 key (\"Direct\") or 3 keys (\"Direct\" (optionnal), \"Tangent\" and \"Adjoint\")")
356 if "Direct" not in asThreeFunctions:
357 __Function["Direct"] = asThreeFunctions["Tangent"]
358 __Function.update(__Parameters)
362 # if sys.version_info[0] < 3 and isinstance(__Function, dict):
363 # for k in ("Direct", "Tangent", "Adjoint"):
364 # if k in __Function and hasattr(__Function[k],"__class__"):
365 # if type(__Function[k]) is type(self.__init__):
366 # raise TypeError("can't use a class method (%s) as a function for the \"%s\" operator. Use a real function instead."%(type(__Function[k]),k))
368 if appliedInX is not None and isinstance(appliedInX, dict):
369 __appliedInX = appliedInX
370 elif appliedInX is not None:
371 __appliedInX = {"HXb":appliedInX}
375 if scheduledBy is not None:
376 self.__T = scheduledBy
378 if isinstance(__Function, dict) and \
379 ("useApproximatedDerivatives" in __Function) and bool(__Function["useApproximatedDerivatives"]) and \
380 ("Direct" in __Function) and (__Function["Direct"] is not None):
381 if "withCenteredDF" not in __Function: __Function["withCenteredDF"] = False
382 if "withIncrement" not in __Function: __Function["withIncrement"] = 0.01
383 if "withdX" not in __Function: __Function["withdX"] = None
384 if "withAvoidingRedundancy" not in __Function: __Function["withAvoidingRedundancy"] = True
385 if "withToleranceInRedundancy" not in __Function: __Function["withToleranceInRedundancy"] = 1.e-18
386 if "withLenghtOfRedundancy" not in __Function: __Function["withLenghtOfRedundancy"] = -1
387 if "withmpEnabled" not in __Function: __Function["withmpEnabled"] = False
388 if "withmpWorkers" not in __Function: __Function["withmpWorkers"] = None
389 from daNumerics.ApproximatedDerivatives import FDApproximation
390 FDA = FDApproximation(
391 Function = __Function["Direct"],
392 centeredDF = __Function["withCenteredDF"],
393 increment = __Function["withIncrement"],
394 dX = __Function["withdX"],
395 avoidingRedundancy = __Function["withAvoidingRedundancy"],
396 toleranceInRedundancy = __Function["withToleranceInRedundancy"],
397 lenghtOfRedundancy = __Function["withLenghtOfRedundancy"],
398 mpEnabled = __Function["withmpEnabled"],
399 mpWorkers = __Function["withmpWorkers"],
401 self.__FO["Direct"] = Operator( fromMethod = FDA.DirectOperator, avoidingRedundancy = avoidRC )
402 self.__FO["Tangent"] = Operator( fromMethod = FDA.TangentOperator, avoidingRedundancy = avoidRC )
403 self.__FO["Adjoint"] = Operator( fromMethod = FDA.AdjointOperator, avoidingRedundancy = avoidRC )
404 elif isinstance(__Function, dict) and \
405 ("Direct" in __Function) and ("Tangent" in __Function) and ("Adjoint" in __Function) and \
406 (__Function["Direct"] is not None) and (__Function["Tangent"] is not None) and (__Function["Adjoint"] is not None):
407 self.__FO["Direct"] = Operator( fromMethod = __Function["Direct"], avoidingRedundancy = avoidRC )
408 self.__FO["Tangent"] = Operator( fromMethod = __Function["Tangent"], avoidingRedundancy = avoidRC )
409 self.__FO["Adjoint"] = Operator( fromMethod = __Function["Adjoint"], avoidingRedundancy = avoidRC )
410 elif asMatrix is not None:
411 __matrice = numpy.matrix( __Matrix, numpy.float )
412 self.__FO["Direct"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
413 self.__FO["Tangent"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
414 self.__FO["Adjoint"] = Operator( fromMatrix = __matrice.T, avoidingRedundancy = avoidRC )
417 raise ValueError("Improperly defined observation operator, it requires at minima either a matrix, a Direct for approximate derivatives or a Tangent/Adjoint pair.")
419 if __appliedInX is not None:
420 self.__FO["AppliedInX"] = {}
421 if not isinstance(__appliedInX, dict):
422 raise ValueError("Error: observation operator defined by \"AppliedInX\" need a dictionary as argument.")
423 for key in list(__appliedInX.keys()):
424 if type( __appliedInX[key] ) is type( numpy.matrix([]) ):
425 # Pour le cas où l'on a une vraie matrice
426 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].A1, numpy.float ).T
427 elif type( __appliedInX[key] ) is type( numpy.array([]) ) and len(__appliedInX[key].shape) > 1:
428 # Pour le cas où l'on a un vecteur représenté en array avec 2 dimensions
429 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].reshape(len(__appliedInX[key]),), numpy.float ).T
431 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key], numpy.float ).T
433 self.__FO["AppliedInX"] = None
439 "x.__repr__() <==> repr(x)"
440 return repr(self.__V)
443 "x.__str__() <==> str(x)"
446 # ==============================================================================
447 class Algorithm(object):
449 Classe générale d'interface de type algorithme
451 Elle donne un cadre pour l'écriture d'une classe élémentaire d'algorithme
452 d'assimilation, en fournissant un container (dictionnaire) de variables
453 persistantes initialisées, et des méthodes d'accès à ces variables stockées.
455 Une classe élémentaire d'algorithme doit implémenter la méthode "run".
457 def __init__(self, name):
459 L'initialisation présente permet de fabriquer des variables de stockage
460 disponibles de manière générique dans les algorithmes élémentaires. Ces
461 variables de stockage sont ensuite conservées dans un dictionnaire
462 interne à l'objet, mais auquel on accède par la méthode "get".
464 Les variables prévues sont :
465 - CostFunctionJ : fonction-cout globale, somme des deux parties suivantes
466 - CostFunctionJb : partie ébauche ou background de la fonction-cout
467 - CostFunctionJo : partie observations de la fonction-cout
468 - GradientOfCostFunctionJ : gradient de la fonction-cout globale
469 - GradientOfCostFunctionJb : gradient de la partie ébauche de la fonction-cout
470 - GradientOfCostFunctionJo : gradient de la partie observations de la fonction-cout
471 - CurrentState : état courant lors d'itérations
472 - Analysis : l'analyse Xa
473 - SimulatedObservationAtBackground : l'état observé H(Xb) à l'ébauche
474 - SimulatedObservationAtCurrentState : l'état observé H(X) à l'état courant
475 - SimulatedObservationAtOptimum : l'état observé H(Xa) à l'optimum
476 - Innovation : l'innovation : d = Y - H(X)
477 - InnovationAtCurrentState : l'innovation à l'état courant : dn = Y - H(Xn)
478 - SigmaObs2 : indicateur de correction optimale des erreurs d'observation
479 - SigmaBck2 : indicateur de correction optimale des erreurs d'ébauche
480 - MahalanobisConsistency : indicateur de consistance des covariances
481 - OMA : Observation moins Analysis : Y - Xa
482 - OMB : Observation moins Background : Y - Xb
483 - AMB : Analysis moins Background : Xa - Xb
484 - APosterioriCovariance : matrice A
485 - APosterioriVariances : variances de la matrice A
486 - APosterioriStandardDeviations : écart-types de la matrice A
487 - APosterioriCorrelations : correlations de la matrice A
488 - Residu : dans le cas des algorithmes de vérification
489 On peut rajouter des variables à stocker dans l'initialisation de
490 l'algorithme élémentaire qui va hériter de cette classe
492 logging.debug("%s Initialisation", str(name))
493 self._m = PlatformInfo.SystemUsage()
495 self._name = str( name )
496 self._parameters = {"StoreSupplementaryCalculations":[]}
497 self.__required_parameters = {}
498 self.__required_inputs = {"RequiredInputValues":{"mandatory":(), "optional":()}}
500 self.StoredVariables = {}
501 self.StoredVariables["CostFunctionJ"] = Persistence.OneScalar(name = "CostFunctionJ")
502 self.StoredVariables["CostFunctionJb"] = Persistence.OneScalar(name = "CostFunctionJb")
503 self.StoredVariables["CostFunctionJo"] = Persistence.OneScalar(name = "CostFunctionJo")
504 self.StoredVariables["CostFunctionJAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJAtCurrentOptimum")
505 self.StoredVariables["CostFunctionJbAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJbAtCurrentOptimum")
506 self.StoredVariables["CostFunctionJoAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJoAtCurrentOptimum")
507 self.StoredVariables["GradientOfCostFunctionJ"] = Persistence.OneVector(name = "GradientOfCostFunctionJ")
508 self.StoredVariables["GradientOfCostFunctionJb"] = Persistence.OneVector(name = "GradientOfCostFunctionJb")
509 self.StoredVariables["GradientOfCostFunctionJo"] = Persistence.OneVector(name = "GradientOfCostFunctionJo")
510 self.StoredVariables["CurrentState"] = Persistence.OneVector(name = "CurrentState")
511 self.StoredVariables["Analysis"] = Persistence.OneVector(name = "Analysis")
512 self.StoredVariables["IndexOfOptimum"] = Persistence.OneIndex(name = "IndexOfOptimum")
513 self.StoredVariables["CurrentOptimum"] = Persistence.OneVector(name = "CurrentOptimum")
514 self.StoredVariables["SimulatedObservationAtBackground"] = Persistence.OneVector(name = "SimulatedObservationAtBackground")
515 self.StoredVariables["SimulatedObservationAtCurrentState"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentState")
516 self.StoredVariables["SimulatedObservationAtOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtOptimum")
517 self.StoredVariables["SimulatedObservationAtCurrentOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentOptimum")
518 self.StoredVariables["Innovation"] = Persistence.OneVector(name = "Innovation")
519 self.StoredVariables["InnovationAtCurrentState"] = Persistence.OneVector(name = "InnovationAtCurrentState")
520 self.StoredVariables["SigmaObs2"] = Persistence.OneScalar(name = "SigmaObs2")
521 self.StoredVariables["SigmaBck2"] = Persistence.OneScalar(name = "SigmaBck2")
522 self.StoredVariables["MahalanobisConsistency"] = Persistence.OneScalar(name = "MahalanobisConsistency")
523 self.StoredVariables["OMA"] = Persistence.OneVector(name = "OMA")
524 self.StoredVariables["OMB"] = Persistence.OneVector(name = "OMB")
525 self.StoredVariables["BMA"] = Persistence.OneVector(name = "BMA")
526 self.StoredVariables["APosterioriCovariance"] = Persistence.OneMatrix(name = "APosterioriCovariance")
527 self.StoredVariables["APosterioriVariances"] = Persistence.OneVector(name = "APosterioriVariances")
528 self.StoredVariables["APosterioriStandardDeviations"] = Persistence.OneVector(name = "APosterioriStandardDeviations")
529 self.StoredVariables["APosterioriCorrelations"] = Persistence.OneMatrix(name = "APosterioriCorrelations")
530 self.StoredVariables["SimulationQuantiles"] = Persistence.OneMatrix(name = "SimulationQuantiles")
531 self.StoredVariables["Residu"] = Persistence.OneScalar(name = "Residu")
533 def _pre_run(self, Parameters, Xb=None, Y=None, R=None, B=None, Q=None ):
535 logging.debug("%s Lancement", self._name)
536 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
538 # Mise a jour de self._parameters avec Parameters
539 self.__setParameters(Parameters)
541 # Corrections et complements
542 def __test_vvalue( argument, variable, argname):
544 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
545 raise ValueError("%s %s vector %s has to be properly defined!"%(self._name,argname,variable))
546 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
547 logging.debug("%s %s vector %s is not set, but is optional."%(self._name,argname,variable))
549 logging.debug("%s %s vector %s is not set, but is not required."%(self._name,argname,variable))
551 logging.debug("%s %s vector %s is set, and its size is %i."%(self._name,argname,variable,numpy.array(argument).size))
552 __test_vvalue( Xb, "Xb", "Background or initial state" )
553 __test_vvalue( Y, "Y", "Observation" )
554 def __test_cvalue( argument, variable, argname):
556 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
557 raise ValueError("%s %s error covariance matrix %s has to be properly defined!"%(self._name,argname,variable))
558 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
559 logging.debug("%s %s error covariance matrix %s is not set, but is optional."%(self._name,argname,variable))
561 logging.debug("%s %s error covariance matrix %s is not set, but is not required."%(self._name,argname,variable))
563 logging.debug("%s %s error covariance matrix %s is set."%(self._name,argname,variable))
564 __test_cvalue( R, "R", "Observation" )
565 __test_cvalue( B, "B", "Background" )
566 __test_cvalue( Q, "Q", "Evolution" )
568 if ("Bounds" in self._parameters) and isinstance(self._parameters["Bounds"], (list, tuple)) and (len(self._parameters["Bounds"]) > 0):
569 logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
571 self._parameters["Bounds"] = None
573 if logging.getLogger().level < logging.WARNING:
574 self._parameters["optiprint"], self._parameters["optdisp"] = 1, 1
575 if PlatformInfo.has_scipy:
576 import scipy.optimize
577 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_ALL
579 self._parameters["optmessages"] = 15
581 self._parameters["optiprint"], self._parameters["optdisp"] = -1, 0
582 if PlatformInfo.has_scipy:
583 import scipy.optimize
584 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_NONE
586 self._parameters["optmessages"] = 15
590 def _post_run(self,_oH=None):
592 if ("StoreSupplementaryCalculations" in self._parameters) and \
593 "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]:
594 for _A in self.StoredVariables["APosterioriCovariance"]:
595 if "APosterioriVariances" in self._parameters["StoreSupplementaryCalculations"]:
596 self.StoredVariables["APosterioriVariances"].store( numpy.diag(_A) )
597 if "APosterioriStandardDeviations" in self._parameters["StoreSupplementaryCalculations"]:
598 self.StoredVariables["APosterioriStandardDeviations"].store( numpy.sqrt(numpy.diag(_A)) )
599 if "APosterioriCorrelations" in self._parameters["StoreSupplementaryCalculations"]:
600 _EI = numpy.diag(1./numpy.sqrt(numpy.diag(_A)))
601 _C = numpy.dot(_EI, numpy.dot(_A, _EI))
602 self.StoredVariables["APosterioriCorrelations"].store( _C )
604 logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint.: %i/%i/%i", self._name, _oH["Direct"].nbcalls(0),_oH["Tangent"].nbcalls(0),_oH["Adjoint"].nbcalls(0))
605 logging.debug("%s Nombre d'appels au cache d'opérateur d'observation direct/tangent/adjoint..: %i/%i/%i", self._name, _oH["Direct"].nbcalls(3),_oH["Tangent"].nbcalls(3),_oH["Adjoint"].nbcalls(3))
606 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
607 logging.debug("%s Terminé", self._name)
610 def get(self, key=None):
612 Renvoie l'une des variables stockées identifiée par la clé, ou le
613 dictionnaire de l'ensemble des variables disponibles en l'absence de
614 clé. Ce sont directement les variables sous forme objet qui sont
615 renvoyées, donc les méthodes d'accès à l'objet individuel sont celles
616 des classes de persistance.
619 return self.StoredVariables[key]
621 return self.StoredVariables
623 def __contains__(self, key=None):
624 "D.__contains__(k) -> True if D has a key k, else False"
625 return key in self.StoredVariables
628 "D.keys() -> list of D's keys"
629 if hasattr(self, "StoredVariables"):
630 return self.StoredVariables.keys()
635 "D.pop(k[,d]) -> v, remove specified key and return the corresponding value"
636 if hasattr(self, "StoredVariables"):
637 return self.StoredVariables.pop(k, d)
642 raise TypeError("pop expected at least 1 arguments, got 0")
643 "If key is not found, d is returned if given, otherwise KeyError is raised"
649 def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
651 Doit implémenter l'opération élémentaire de calcul d'assimilation sous
652 sa forme mathématique la plus naturelle possible.
654 raise NotImplementedError("Mathematical assimilation calculation has not been implemented!")
656 def defineRequiredParameter(self, name = None, default = None, typecast = None, message = None, minval = None, maxval = None, listval = None):
658 Permet de définir dans l'algorithme des paramètres requis et leurs
659 caractéristiques par défaut.
662 raise ValueError("A name is mandatory to define a required parameter.")
664 self.__required_parameters[name] = {
666 "typecast" : typecast,
672 logging.debug("%s %s (valeur par défaut = %s)", self._name, message, self.setParameterValue(name))
674 def getRequiredParameters(self, noDetails=True):
676 Renvoie la liste des noms de paramètres requis ou directement le
677 dictionnaire des paramètres requis.
680 return sorted(self.__required_parameters.keys())
682 return self.__required_parameters
684 def setParameterValue(self, name=None, value=None):
686 Renvoie la valeur d'un paramètre requis de manière contrôlée
688 default = self.__required_parameters[name]["default"]
689 typecast = self.__required_parameters[name]["typecast"]
690 minval = self.__required_parameters[name]["minval"]
691 maxval = self.__required_parameters[name]["maxval"]
692 listval = self.__required_parameters[name]["listval"]
694 if value is None and default is None:
696 elif value is None and default is not None:
697 if typecast is None: __val = default
698 else: __val = typecast( default )
700 if typecast is None: __val = value
701 else: __val = typecast( value )
703 if minval is not None and (numpy.array(__val, float) < minval).any():
704 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be less than %s."%(name, __val, minval))
705 if maxval is not None and (numpy.array(__val, float) > maxval).any():
706 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be greater than %s."%(name, __val, maxval))
707 if listval is not None:
708 if typecast is list or typecast is tuple or isinstance(__val,list) or isinstance(__val,tuple):
711 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%(v, name, listval))
712 elif __val not in listval:
713 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%( __val, name,listval))
716 def requireInputArguments(self, mandatory=(), optional=()):
718 Permet d'imposer des arguments requises en entrée
720 self.__required_inputs["RequiredInputValues"]["mandatory"] = tuple( mandatory )
721 self.__required_inputs["RequiredInputValues"]["optional"] = tuple( optional )
723 def __setParameters(self, fromDico={}):
725 Permet de stocker les paramètres reçus dans le dictionnaire interne.
727 self._parameters.update( fromDico )
728 for k in self.__required_parameters.keys():
729 if k in fromDico.keys():
730 self._parameters[k] = self.setParameterValue(k,fromDico[k])
732 self._parameters[k] = self.setParameterValue(k)
733 logging.debug("%s %s : %s", self._name, self.__required_parameters[k]["message"], self._parameters[k])
735 # ==============================================================================
736 class Diagnostic(object):
738 Classe générale d'interface de type diagnostic
740 Ce template s'utilise de la manière suivante : il sert de classe "patron" en
741 même temps que l'une des classes de persistance, comme "OneScalar" par
744 Une classe élémentaire de diagnostic doit implémenter ses deux méthodes, la
745 méthode "_formula" pour écrire explicitement et proprement la formule pour
746 l'écriture mathématique du calcul du diagnostic (méthode interne non
747 publique), et "calculate" pour activer la précédente tout en ayant vérifié
748 et préparé les données, et pour stocker les résultats à chaque pas (méthode
749 externe d'activation).
751 def __init__(self, name = "", parameters = {}):
753 self.name = str(name)
754 self.parameters = dict( parameters )
756 def _formula(self, *args):
758 Doit implémenter l'opération élémentaire de diagnostic sous sa forme
759 mathématique la plus naturelle possible.
761 raise NotImplementedError("Diagnostic mathematical formula has not been implemented!")
763 def calculate(self, *args):
765 Active la formule de calcul avec les arguments correctement rangés
767 raise NotImplementedError("Diagnostic activation method has not been implemented!")
769 # ==============================================================================
770 class DiagnosticAndParameters(object):
772 Classe générale d'interface d'interface de type diagnostic
775 name = "GenericDiagnostic",
782 asExistingDiags = None,
786 self.__name = str(name)
792 self.__E = tuple(asExistingDiags)
793 self.__TheDiag = None
795 if asScript is not None:
796 __Diag = ImportFromScript(asScript).getvalue( "Diagnostic" )
797 __Iden = ImportFromScript(asScript).getvalue( "Identifier" )
798 __Dict = ImportFromScript(asScript).getvalue( self.__name, "Parameters" )
799 __Unit = ImportFromScript(asScript).getvalue( "Unit" )
800 __Base = ImportFromScript(asScript).getvalue( "BaseType" )
802 __Diag = asDiagnostic
803 __Iden = asIdentifier
808 if __Diag is not None:
809 self.__D = str(__Diag)
810 if __Iden is not None:
811 self.__I = str(__Iden)
813 self.__I = str(__Diag)
814 if __Dict is not None:
815 self.__P.update( dict(__Dict) )
816 if __Unit is None or __Unit == "None":
818 if __Base is None or __Base == "None":
821 self.__setDiagnostic( self.__D, self.__I, self.__U, self.__B, self.__P, self.__E )
825 return self.__TheDiag
827 def __setDiagnostic(self, __choice = None, __name = "", __unit = "", __basetype = None, __parameters = {}, __existings = () ):
829 Permet de sélectionner un diagnostic a effectuer
832 raise ValueError("Error: diagnostic choice has to be given")
833 __daDirectory = "daDiagnostics"
835 # Recherche explicitement le fichier complet
836 # ------------------------------------------
838 for directory in sys.path:
839 if os.path.isfile(os.path.join(directory, __daDirectory, str(__choice)+'.py')):
840 __module_path = os.path.abspath(os.path.join(directory, __daDirectory))
841 if __module_path is None:
842 raise ImportError("No diagnostic module named \"%s\" was found in a \"%s\" subdirectory\n The search path is %s"%(__choice, __daDirectory, sys.path))
844 # Importe le fichier complet comme un module
845 # ------------------------------------------
847 __sys_path_tmp = sys.path ; sys.path.insert(0,__module_path)
848 self.__diagnosticFile = __import__(str(__choice), globals(), locals(), [])
849 sys.path = __sys_path_tmp ; del __sys_path_tmp
850 except ImportError as e:
851 raise ImportError("The module named \"%s\" was found, but is incorrect at the import stage.\n The import error message is: %s"%(__choice,e))
853 # Instancie un objet du type élémentaire du fichier
854 # -------------------------------------------------
855 if __name in __existings:
856 raise ValueError("A default input with the same name \"%s\" already exists."%str(__name))
858 self.__TheDiag = self.__diagnosticFile.ElementaryDiagnostic(
861 basetype = __basetype,
862 parameters = __parameters )
865 # ==============================================================================
866 class AlgorithmAndParameters(object):
868 Classe générale d'interface d'action pour l'algorithme et ses paramètres
871 name = "GenericAlgorithm",
878 self.__name = str(name)
882 self.__algorithm = {}
883 self.__algorithmFile = None
884 self.__algorithmName = None
886 self.updateParameters( asDict, asScript )
888 if asScript is not None:
889 __Algo = ImportFromScript(asScript).getvalue( "Algorithm" )
893 if __Algo is not None:
894 self.__A = str(__Algo)
895 self.__P.update( {"Algorithm":self.__A} )
897 self.__setAlgorithm( self.__A )
899 def updateParameters(self,
903 "Mise a jour des parametres"
904 if asScript is not None:
905 __Dict = ImportFromScript(asScript).getvalue( self.__name, "Parameters" )
909 if __Dict is not None:
910 self.__P.update( dict(__Dict) )
912 def executePythonScheme(self, asDictAO = None):
913 "Permet de lancer le calcul d'assimilation"
914 Operator.CM.clearCache()
916 if not isinstance(asDictAO, dict):
917 raise ValueError("The objects for algorithm calculation have to be given together as a dictionnary, and they are not")
918 if hasattr(asDictAO["Background"],"getO"): self.__Xb = asDictAO["Background"].getO()
919 elif hasattr(asDictAO["CheckingPoint"],"getO"): self.__Xb = asDictAO["CheckingPoint"].getO()
920 else: self.__Xb = None
921 if hasattr(asDictAO["Observation"],"getO"): self.__Y = asDictAO["Observation"].getO()
922 else: self.__Y = asDictAO["Observation"]
923 if hasattr(asDictAO["ControlInput"],"getO"): self.__U = asDictAO["ControlInput"].getO()
924 else: self.__U = asDictAO["ControlInput"]
925 if hasattr(asDictAO["ObservationOperator"],"getO"): self.__HO = asDictAO["ObservationOperator"].getO()
926 else: self.__HO = asDictAO["ObservationOperator"]
927 if hasattr(asDictAO["EvolutionModel"],"getO"): self.__EM = asDictAO["EvolutionModel"].getO()
928 else: self.__EM = asDictAO["EvolutionModel"]
929 if hasattr(asDictAO["ControlModel"],"getO"): self.__CM = asDictAO["ControlModel"].getO()
930 else: self.__CM = asDictAO["ControlModel"]
931 self.__B = asDictAO["BackgroundError"]
932 self.__R = asDictAO["ObservationError"]
933 self.__Q = asDictAO["EvolutionError"]
935 self.__shape_validate()
937 self.__algorithm.run(
947 Parameters = self.__P,
951 def executeYACSScheme(self, FileName=None):
952 "Permet de lancer le calcul d'assimilation"
953 if FileName is None or not os.path.exists(FileName):
954 raise ValueError("a YACS file name has to be given for YACS execution.\n")
955 if not PlatformInfo.has_salome or not PlatformInfo.has_yacs or not PlatformInfo.has_adao:
956 raise ImportError("Unable to get SALOME, YACS or ADAO environnement variables. Please launch SALOME before executing.\n")
961 SALOMERuntime.RuntimeSALOME_setRuntime()
963 r = pilot.getRuntime()
964 xmlLoader = loader.YACSLoader()
965 xmlLoader.registerProcCataLoader()
967 catalogAd = r.loadCatalog("proc", os.path.abspath(FileName))
970 r.addCatalog(catalogAd)
973 p = xmlLoader.load(os.path.abspath(FileName))
974 except IOError as ex:
975 print("The YACS XML schema file can not be loaded: %s"%(ex,))
977 logger = p.getLogger("parser")
978 if not logger.isEmpty():
979 print("The imported YACS XML schema has errors on parsing:")
980 print(logger.getStr())
983 print("The YACS XML schema is not valid and will not be executed:")
984 print(p.getErrorReport())
986 info=pilot.LinkInfo(pilot.LinkInfo.ALL_DONT_STOP)
987 p.checkConsistency(info)
988 if info.areWarningsOrErrors():
989 print("The YACS XML schema is not coherent and will not be executed:")
990 print(info.getGlobalRepr())
992 e = pilot.ExecutorSwig()
994 if p.getEffectiveState() != pilot.DONE:
995 print(p.getErrorReport())
999 def get(self, key = None):
1000 "Vérifie l'existence d'une clé de variable ou de paramètres"
1001 if key in self.__algorithm:
1002 return self.__algorithm.get( key )
1003 elif key in self.__P:
1004 return self.__P[key]
1008 def pop(self, k, d):
1009 "Necessaire pour le pickling"
1010 return self.__algorithm.pop(k, d)
1012 def getAlgorithmRequiredParameters(self, noDetails=True):
1013 "Renvoie la liste des paramètres requis selon l'algorithme"
1014 return self.__algorithm.getRequiredParameters(noDetails)
1016 def setObserver(self, __V, __O, __I, __S):
1017 if self.__algorithm is None \
1018 or isinstance(self.__algorithm, dict) \
1019 or not hasattr(self.__algorithm,"StoredVariables"):
1020 raise ValueError("No observer can be build before choosing an algorithm.")
1021 if __V not in self.__algorithm:
1022 raise ValueError("An observer requires to be set on a variable named %s which does not exist."%__V)
1024 self.__algorithm.StoredVariables[ __V ].setDataObserver(
1027 HookParameters = __I,
1030 def removeObserver(self, __V, __O, __A = False):
1031 if self.__algorithm is None \
1032 or isinstance(self.__algorithm, dict) \
1033 or not hasattr(self.__algorithm,"StoredVariables"):
1034 raise ValueError("No observer can be removed before choosing an algorithm.")
1035 if __V not in self.__algorithm:
1036 raise ValueError("An observer requires to be removed on a variable named %s which does not exist."%__V)
1038 return self.__algorithm.StoredVariables[ __V ].removeDataObserver(
1043 def hasObserver(self, __V):
1044 if self.__algorithm is None \
1045 or isinstance(self.__algorithm, dict) \
1046 or not hasattr(self.__algorithm,"StoredVariables"):
1048 if __V not in self.__algorithm:
1050 return self.__algorithm.StoredVariables[ __V ].hasDataObserver()
1053 return list(self.__algorithm.keys()) + list(self.__P.keys())
1055 def __contains__(self, key=None):
1056 "D.__contains__(k) -> True if D has a key k, else False"
1057 return key in self.__algorithm or key in self.__P
1060 "x.__repr__() <==> repr(x)"
1061 return repr(self.__A)+", "+repr(self.__P)
1064 "x.__str__() <==> str(x)"
1065 return str(self.__A)+", "+str(self.__P)
1067 def __setAlgorithm(self, choice = None ):
1069 Permet de sélectionner l'algorithme à utiliser pour mener à bien l'étude
1070 d'assimilation. L'argument est un champ caractère se rapportant au nom
1071 d'un fichier contenu dans "../daAlgorithms" et réalisant l'opération
1072 d'assimilation sur les arguments fixes.
1075 raise ValueError("Error: algorithm choice has to be given")
1076 if self.__algorithmName is not None:
1077 raise ValueError("Error: algorithm choice has already been done as \"%s\", it can't be changed."%self.__algorithmName)
1078 daDirectory = "daAlgorithms"
1080 # Recherche explicitement le fichier complet
1081 # ------------------------------------------
1083 for directory in sys.path:
1084 if os.path.isfile(os.path.join(directory, daDirectory, str(choice)+'.py')):
1085 module_path = os.path.abspath(os.path.join(directory, daDirectory))
1086 if module_path is None:
1087 raise ImportError("No algorithm module named \"%s\" was found in a \"%s\" subdirectory\n The search path is %s"%(choice, daDirectory, sys.path))
1089 # Importe le fichier complet comme un module
1090 # ------------------------------------------
1092 sys_path_tmp = sys.path ; sys.path.insert(0,module_path)
1093 self.__algorithmFile = __import__(str(choice), globals(), locals(), [])
1094 self.__algorithmName = str(choice)
1095 sys.path = sys_path_tmp ; del sys_path_tmp
1096 except ImportError as e:
1097 raise ImportError("The module named \"%s\" was found, but is incorrect at the import stage.\n The import error message is: %s"%(choice,e))
1099 # Instancie un objet du type élémentaire du fichier
1100 # -------------------------------------------------
1101 self.__algorithm = self.__algorithmFile.ElementaryAlgorithm()
1104 def __shape_validate(self):
1106 Validation de la correspondance correcte des tailles des variables et
1107 des matrices s'il y en a.
1109 if self.__Xb is None: __Xb_shape = (0,)
1110 elif hasattr(self.__Xb,"size"): __Xb_shape = (self.__Xb.size,)
1111 elif hasattr(self.__Xb,"shape"):
1112 if isinstance(self.__Xb.shape, tuple): __Xb_shape = self.__Xb.shape
1113 else: __Xb_shape = self.__Xb.shape()
1114 else: raise TypeError("The background (Xb) has no attribute of shape: problem !")
1116 if self.__Y is None: __Y_shape = (0,)
1117 elif hasattr(self.__Y,"size"): __Y_shape = (self.__Y.size,)
1118 elif hasattr(self.__Y,"shape"):
1119 if isinstance(self.__Y.shape, tuple): __Y_shape = self.__Y.shape
1120 else: __Y_shape = self.__Y.shape()
1121 else: raise TypeError("The observation (Y) has no attribute of shape: problem !")
1123 if self.__U is None: __U_shape = (0,)
1124 elif hasattr(self.__U,"size"): __U_shape = (self.__U.size,)
1125 elif hasattr(self.__U,"shape"):
1126 if isinstance(self.__U.shape, tuple): __U_shape = self.__U.shape
1127 else: __U_shape = self.__U.shape()
1128 else: raise TypeError("The control (U) has no attribute of shape: problem !")
1130 if self.__B is None: __B_shape = (0,0)
1131 elif hasattr(self.__B,"shape"):
1132 if isinstance(self.__B.shape, tuple): __B_shape = self.__B.shape
1133 else: __B_shape = self.__B.shape()
1134 else: raise TypeError("The a priori errors covariance matrix (B) has no attribute of shape: problem !")
1136 if self.__R is None: __R_shape = (0,0)
1137 elif hasattr(self.__R,"shape"):
1138 if isinstance(self.__R.shape, tuple): __R_shape = self.__R.shape
1139 else: __R_shape = self.__R.shape()
1140 else: raise TypeError("The observation errors covariance matrix (R) has no attribute of shape: problem !")
1142 if self.__Q is None: __Q_shape = (0,0)
1143 elif hasattr(self.__Q,"shape"):
1144 if isinstance(self.__Q.shape, tuple): __Q_shape = self.__Q.shape
1145 else: __Q_shape = self.__Q.shape()
1146 else: raise TypeError("The evolution errors covariance matrix (Q) has no attribute of shape: problem !")
1148 if len(self.__HO) == 0: __HO_shape = (0,0)
1149 elif isinstance(self.__HO, dict): __HO_shape = (0,0)
1150 elif hasattr(self.__HO["Direct"],"shape"):
1151 if isinstance(self.__HO["Direct"].shape, tuple): __HO_shape = self.__HO["Direct"].shape
1152 else: __HO_shape = self.__HO["Direct"].shape()
1153 else: raise TypeError("The observation operator (H) has no attribute of shape: problem !")
1155 if len(self.__EM) == 0: __EM_shape = (0,0)
1156 elif isinstance(self.__EM, dict): __EM_shape = (0,0)
1157 elif hasattr(self.__EM["Direct"],"shape"):
1158 if isinstance(self.__EM["Direct"].shape, tuple): __EM_shape = self.__EM["Direct"].shape
1159 else: __EM_shape = self.__EM["Direct"].shape()
1160 else: raise TypeError("The evolution model (EM) has no attribute of shape: problem !")
1162 if len(self.__CM) == 0: __CM_shape = (0,0)
1163 elif isinstance(self.__CM, dict): __CM_shape = (0,0)
1164 elif hasattr(self.__CM["Direct"],"shape"):
1165 if isinstance(self.__CM["Direct"].shape, tuple): __CM_shape = self.__CM["Direct"].shape
1166 else: __CM_shape = self.__CM["Direct"].shape()
1167 else: raise TypeError("The control model (CM) has no attribute of shape: problem !")
1169 # Vérification des conditions
1170 # ---------------------------
1171 if not( len(__Xb_shape) == 1 or min(__Xb_shape) == 1 ):
1172 raise ValueError("Shape characteristic of background (Xb) is incorrect: \"%s\"."%(__Xb_shape,))
1173 if not( len(__Y_shape) == 1 or min(__Y_shape) == 1 ):
1174 raise ValueError("Shape characteristic of observation (Y) is incorrect: \"%s\"."%(__Y_shape,))
1176 if not( min(__B_shape) == max(__B_shape) ):
1177 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) is incorrect: \"%s\"."%(__B_shape,))
1178 if not( min(__R_shape) == max(__R_shape) ):
1179 raise ValueError("Shape characteristic of observation errors covariance matrix (R) is incorrect: \"%s\"."%(__R_shape,))
1180 if not( min(__Q_shape) == max(__Q_shape) ):
1181 raise ValueError("Shape characteristic of evolution errors covariance matrix (Q) is incorrect: \"%s\"."%(__Q_shape,))
1182 if not( min(__EM_shape) == max(__EM_shape) ):
1183 raise ValueError("Shape characteristic of evolution operator (EM) is incorrect: \"%s\"."%(__EM_shape,))
1185 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[1] == max(__Xb_shape) ):
1186 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and state (X) \"%s\" are incompatible."%(__HO_shape,__Xb_shape))
1187 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[0] == max(__Y_shape) ):
1188 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation (Y) \"%s\" are incompatible."%(__HO_shape,__Y_shape))
1189 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__B) > 0 and not( __HO_shape[1] == __B_shape[0] ):
1190 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and a priori errors covariance matrix (B) \"%s\" are incompatible."%(__HO_shape,__B_shape))
1191 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__R) > 0 and not( __HO_shape[0] == __R_shape[1] ):
1192 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation errors covariance matrix (R) \"%s\" are incompatible."%(__HO_shape,__R_shape))
1194 if self.__B is not None and len(self.__B) > 0 and not( __B_shape[1] == max(__Xb_shape) ):
1195 if self.__algorithmName in ["EnsembleBlue",]:
1196 asPersistentVector = self.__Xb.reshape((-1,min(__B_shape)))
1197 self.__Xb = Persistence.OneVector("Background", basetype=numpy.matrix)
1198 for member in asPersistentVector:
1199 self.__Xb.store( numpy.matrix( numpy.ravel(member), numpy.float ).T )
1200 __Xb_shape = min(__B_shape)
1202 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) \"%s\" and background (Xb) \"%s\" are incompatible."%(__B_shape,__Xb_shape))
1204 if self.__R is not None and len(self.__R) > 0 and not( __R_shape[1] == max(__Y_shape) ):
1205 raise ValueError("Shape characteristic of observation errors covariance matrix (R) \"%s\" and observation (Y) \"%s\" are incompatible."%(__R_shape,__Y_shape))
1207 if self.__EM is not None and len(self.__EM) > 0 and not isinstance(self.__EM, dict) and not( __EM_shape[1] == max(__Xb_shape) ):
1208 raise ValueError("Shape characteristic of evolution model (EM) \"%s\" and state (X) \"%s\" are incompatible."%(__EM_shape,__Xb_shape))
1210 if self.__CM is not None and len(self.__CM) > 0 and not isinstance(self.__CM, dict) and not( __CM_shape[1] == max(__U_shape) ):
1211 raise ValueError("Shape characteristic of control model (CM) \"%s\" and control (U) \"%s\" are incompatible."%(__CM_shape,__U_shape))
1213 if ("Bounds" in self.__P) \
1214 and (isinstance(self.__P["Bounds"], list) or isinstance(self.__P["Bounds"], tuple)) \
1215 and (len(self.__P["Bounds"]) != max(__Xb_shape)):
1216 raise ValueError("The number \"%s\" of bound pairs for the state (X) components is different of the size \"%s\" of the state itself." \
1217 %(len(self.__P["Bounds"]),max(__Xb_shape)))
1221 # ==============================================================================
1222 class DataObserver(object):
1224 Classe générale d'interface de type observer
1227 name = "GenericObserver",
1239 self.__name = str(name)
1244 if onVariable is None:
1245 raise ValueError("setting an observer has to be done over a variable name or a list of variable names, not over None.")
1246 elif type(onVariable) in (tuple, list):
1247 self.__V = tuple(map( str, onVariable ))
1248 if withInfo is None:
1251 self.__I = (str(withInfo),)*len(self.__V)
1252 elif isinstance(onVariable, str):
1253 self.__V = (onVariable,)
1254 if withInfo is None:
1255 self.__I = (onVariable,)
1257 self.__I = (str(withInfo),)
1259 raise ValueError("setting an observer has to be done over a variable name or a list of variable names.")
1261 if asString is not None:
1262 __FunctionText = asString
1263 elif (asTemplate is not None) and (asTemplate in Templates.ObserverTemplates):
1264 __FunctionText = Templates.ObserverTemplates[asTemplate]
1265 elif asScript is not None:
1266 __FunctionText = ImportFromScript(asScript).getstring()
1269 __Function = ObserverF(__FunctionText)
1271 if asObsObject is not None:
1272 self.__O = asObsObject
1274 self.__O = __Function.getfunc()
1276 for k in range(len(self.__V)):
1279 if ename not in withAlgo:
1280 raise ValueError("An observer is asked to be set on a variable named %s which does not exist."%ename)
1282 withAlgo.setObserver(ename, self.__O, einfo, scheduledBy)
1285 "x.__repr__() <==> repr(x)"
1286 return repr(self.__V)+"\n"+repr(self.__O)
1289 "x.__str__() <==> str(x)"
1290 return str(self.__V)+"\n"+str(self.__O)
1292 # ==============================================================================
1293 class State(object):
1295 Classe générale d'interface de type état
1298 name = "GenericVector",
1300 asPersistentVector = None,
1303 toBeChecked = False,
1306 Permet de définir un vecteur :
1307 - asVector : entrée des données, comme un vecteur compatible avec le
1308 constructeur de numpy.matrix, ou "True" si entrée par script.
1309 - asPersistentVector : entrée des données, comme une série de vecteurs
1310 compatible avec le constructeur de numpy.matrix, ou comme un objet de
1311 type Persistence, ou "True" si entrée par script.
1312 - asScript : si un script valide est donné contenant une variable
1313 nommée "name", la variable est de type "asVector" (par défaut) ou
1314 "asPersistentVector" selon que l'une de ces variables est placée à
1317 self.__name = str(name)
1318 self.__check = bool(toBeChecked)
1322 self.__is_vector = False
1323 self.__is_series = False
1325 if asScript is not None:
1326 __Vector, __Series = None, None
1327 if asPersistentVector:
1328 __Series = ImportFromScript(asScript).getvalue( self.__name )
1330 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1332 __Vector, __Series = asVector, asPersistentVector
1334 if __Vector is not None:
1335 self.__is_vector = True
1336 self.__V = numpy.matrix( numpy.asmatrix(__Vector).A1, numpy.float ).T
1337 self.shape = self.__V.shape
1338 self.size = self.__V.size
1339 elif __Series is not None:
1340 self.__is_series = True
1341 if isinstance(__Series, (tuple, list, numpy.ndarray, numpy.matrix)):
1342 self.__V = Persistence.OneVector(self.__name, basetype=numpy.matrix)
1343 for member in __Series:
1344 self.__V.store( numpy.matrix( numpy.asmatrix(member).A1, numpy.float ).T )
1345 import sys ; sys.stdout.flush()
1348 if isinstance(self.__V.shape, (tuple, list)):
1349 self.shape = self.__V.shape
1351 self.shape = self.__V.shape()
1352 if len(self.shape) == 1:
1353 self.shape = (self.shape[0],1)
1354 self.size = self.shape[0] * self.shape[1]
1356 raise ValueError("The %s object is improperly defined, it requires at minima either a vector, a list/tuple of vectors or a persistent object. Please check your vector input."%self.__name)
1358 if scheduledBy is not None:
1359 self.__T = scheduledBy
1361 def getO(self, withScheduler=False):
1363 return self.__V, self.__T
1364 elif self.__T is None:
1370 "Vérification du type interne"
1371 return self.__is_vector
1374 "Vérification du type interne"
1375 return self.__is_series
1378 "x.__repr__() <==> repr(x)"
1379 return repr(self.__V)
1382 "x.__str__() <==> str(x)"
1383 return str(self.__V)
1385 # ==============================================================================
1386 class Covariance(object):
1388 Classe générale d'interface de type covariance
1391 name = "GenericCovariance",
1392 asCovariance = None,
1393 asEyeByScalar = None,
1394 asEyeByVector = None,
1397 toBeChecked = False,
1400 Permet de définir une covariance :
1401 - asCovariance : entrée des données, comme une matrice compatible avec
1402 le constructeur de numpy.matrix
1403 - asEyeByScalar : entrée des données comme un seul scalaire de variance,
1404 multiplicatif d'une matrice de corrélation identité, aucune matrice
1405 n'étant donc explicitement à donner
1406 - asEyeByVector : entrée des données comme un seul vecteur de variance,
1407 à mettre sur la diagonale d'une matrice de corrélation, aucune matrice
1408 n'étant donc explicitement à donner
1409 - asCovObject : entrée des données comme un objet python, qui a les
1410 methodes obligatoires "getT", "getI", "diag", "trace", "__add__",
1411 "__sub__", "__neg__", "__mul__", "__rmul__" et facultatives "shape",
1412 "size", "cholesky", "choleskyI", "asfullmatrix", "__repr__", "__str__"
1413 - toBeChecked : booléen indiquant si le caractère SDP de la matrice
1414 pleine doit être vérifié
1416 self.__name = str(name)
1417 self.__check = bool(toBeChecked)
1420 self.__is_scalar = False
1421 self.__is_vector = False
1422 self.__is_matrix = False
1423 self.__is_object = False
1425 if asScript is not None:
1426 __Matrix, __Scalar, __Vector, __Object = None, None, None, None
1428 __Scalar = ImportFromScript(asScript).getvalue( self.__name )
1430 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1432 __Object = ImportFromScript(asScript).getvalue( self.__name )
1434 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
1436 __Matrix, __Scalar, __Vector, __Object = asCovariance, asEyeByScalar, asEyeByVector, asCovObject
1438 if __Scalar is not None:
1439 if numpy.matrix(__Scalar).size != 1:
1440 raise ValueError(' The diagonal multiplier given to define a sparse matrix is not a unique scalar value.\n Its actual measured size is %i. Please check your scalar input.'%numpy.matrix(__Scalar).size)
1441 self.__is_scalar = True
1442 self.__C = numpy.abs( float(__Scalar) )
1445 elif __Vector is not None:
1446 self.__is_vector = True
1447 self.__C = numpy.abs( numpy.array( numpy.ravel( numpy.matrix(__Vector, float ) ) ) )
1448 self.shape = (self.__C.size,self.__C.size)
1449 self.size = self.__C.size**2
1450 elif __Matrix is not None:
1451 self.__is_matrix = True
1452 self.__C = numpy.matrix( __Matrix, float )
1453 self.shape = self.__C.shape
1454 self.size = self.__C.size
1455 elif __Object is not None:
1456 self.__is_object = True
1458 for at in ("getT","getI","diag","trace","__add__","__sub__","__neg__","__mul__","__rmul__"):
1459 if not hasattr(self.__C,at):
1460 raise ValueError("The matrix given for %s as an object has no attribute \"%s\". Please check your object input."%(self.__name,at))
1461 if hasattr(self.__C,"shape"):
1462 self.shape = self.__C.shape
1465 if hasattr(self.__C,"size"):
1466 self.size = self.__C.size
1471 # raise ValueError("The %s covariance matrix has to be specified either as a matrix, a vector for its diagonal or a scalar multiplying an identity matrix."%self.__name)
1475 def __validate(self):
1477 if self.__C is None:
1478 raise UnboundLocalError("%s covariance matrix value has not been set!"%(self.__name,))
1479 if self.ismatrix() and min(self.shape) != max(self.shape):
1480 raise ValueError("The given matrix for %s is not a square one, its shape is %s. Please check your matrix input."%(self.__name,self.shape))
1481 if self.isobject() and min(self.shape) != max(self.shape):
1482 raise ValueError("The matrix given for \"%s\" is not a square one, its shape is %s. Please check your object input."%(self.__name,self.shape))
1483 if self.isscalar() and self.__C <= 0:
1484 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your scalar input %s."%(self.__name,self.__C))
1485 if self.isvector() and (self.__C <= 0).any():
1486 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your vector input."%(self.__name,))
1487 if self.ismatrix() and (self.__check or logging.getLogger().level < logging.WARNING):
1489 L = numpy.linalg.cholesky( self.__C )
1491 raise ValueError("The %s covariance matrix is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1492 if self.isobject() and (self.__check or logging.getLogger().level < logging.WARNING):
1494 L = self.__C.cholesky()
1496 raise ValueError("The %s covariance object is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1499 "Vérification du type interne"
1500 return self.__is_scalar
1503 "Vérification du type interne"
1504 return self.__is_vector
1507 "Vérification du type interne"
1508 return self.__is_matrix
1511 "Vérification du type interne"
1512 return self.__is_object
1517 return Covariance(self.__name+"I", asCovariance = self.__C.I )
1518 elif self.isvector():
1519 return Covariance(self.__name+"I", asEyeByVector = 1. / self.__C )
1520 elif self.isscalar():
1521 return Covariance(self.__name+"I", asEyeByScalar = 1. / self.__C )
1522 elif self.isobject():
1523 return Covariance(self.__name+"I", asCovObject = self.__C.getI() )
1525 return None # Indispensable
1530 return Covariance(self.__name+"T", asCovariance = self.__C.T )
1531 elif self.isvector():
1532 return Covariance(self.__name+"T", asEyeByVector = self.__C )
1533 elif self.isscalar():
1534 return Covariance(self.__name+"T", asEyeByScalar = self.__C )
1535 elif self.isobject():
1536 return Covariance(self.__name+"T", asCovObject = self.__C.getT() )
1539 "Décomposition de Cholesky"
1541 return Covariance(self.__name+"C", asCovariance = numpy.linalg.cholesky(self.__C) )
1542 elif self.isvector():
1543 return Covariance(self.__name+"C", asEyeByVector = numpy.sqrt( self.__C ) )
1544 elif self.isscalar():
1545 return Covariance(self.__name+"C", asEyeByScalar = numpy.sqrt( self.__C ) )
1546 elif self.isobject() and hasattr(self.__C,"cholesky"):
1547 return Covariance(self.__name+"C", asCovObject = self.__C.cholesky() )
1549 def choleskyI(self):
1550 "Inversion de la décomposition de Cholesky"
1552 return Covariance(self.__name+"H", asCovariance = numpy.linalg.cholesky(self.__C).I )
1553 elif self.isvector():
1554 return Covariance(self.__name+"H", asEyeByVector = 1.0 / numpy.sqrt( self.__C ) )
1555 elif self.isscalar():
1556 return Covariance(self.__name+"H", asEyeByScalar = 1.0 / numpy.sqrt( self.__C ) )
1557 elif self.isobject() and hasattr(self.__C,"choleskyI"):
1558 return Covariance(self.__name+"H", asCovObject = self.__C.choleskyI() )
1560 def diag(self, msize=None):
1561 "Diagonale de la matrice"
1563 return numpy.diag(self.__C)
1564 elif self.isvector():
1566 elif self.isscalar():
1568 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1570 return self.__C * numpy.ones(int(msize))
1571 elif self.isobject():
1572 return self.__C.diag()
1574 def asfullmatrix(self, msize=None):
1578 elif self.isvector():
1579 return numpy.matrix( numpy.diag(self.__C), float )
1580 elif self.isscalar():
1582 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1584 return numpy.matrix( self.__C * numpy.eye(int(msize)), float )
1585 elif self.isobject() and hasattr(self.__C,"asfullmatrix"):
1586 return self.__C.asfullmatrix()
1588 def trace(self, msize=None):
1589 "Trace de la matrice"
1591 return numpy.trace(self.__C)
1592 elif self.isvector():
1593 return float(numpy.sum(self.__C))
1594 elif self.isscalar():
1596 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1598 return self.__C * int(msize)
1599 elif self.isobject():
1600 return self.__C.trace()
1606 "x.__repr__() <==> repr(x)"
1607 return repr(self.__C)
1610 "x.__str__() <==> str(x)"
1611 return str(self.__C)
1613 def __add__(self, other):
1614 "x.__add__(y) <==> x+y"
1615 if self.ismatrix() or self.isobject():
1616 return self.__C + numpy.asmatrix(other)
1617 elif self.isvector() or self.isscalar():
1618 _A = numpy.asarray(other)
1619 _A.reshape(_A.size)[::_A.shape[1]+1] += self.__C
1620 return numpy.asmatrix(_A)
1622 def __radd__(self, other):
1623 "x.__radd__(y) <==> y+x"
1624 raise NotImplementedError("%s covariance matrix __radd__ method not available for %s type!"%(self.__name,type(other)))
1626 def __sub__(self, other):
1627 "x.__sub__(y) <==> x-y"
1628 if self.ismatrix() or self.isobject():
1629 return self.__C - numpy.asmatrix(other)
1630 elif self.isvector() or self.isscalar():
1631 _A = numpy.asarray(other)
1632 _A.reshape(_A.size)[::_A.shape[1]+1] = self.__C - _A.reshape(_A.size)[::_A.shape[1]+1]
1633 return numpy.asmatrix(_A)
1635 def __rsub__(self, other):
1636 "x.__rsub__(y) <==> y-x"
1637 raise NotImplementedError("%s covariance matrix __rsub__ method not available for %s type!"%(self.__name,type(other)))
1640 "x.__neg__() <==> -x"
1643 def __mul__(self, other):
1644 "x.__mul__(y) <==> x*y"
1645 if self.ismatrix() and isinstance(other,numpy.matrix):
1646 return self.__C * other
1647 elif self.ismatrix() and isinstance(other, (list, numpy.ndarray, tuple)):
1648 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1649 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1650 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1651 return self.__C * numpy.asmatrix(other)
1653 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.asmatrix(other).shape,self.__name))
1654 elif self.isvector() and isinstance(other, (list, numpy.matrix, numpy.ndarray, tuple)):
1655 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1656 return numpy.asmatrix(self.__C * numpy.ravel(other)).T
1657 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1658 return numpy.asmatrix((self.__C * (numpy.asarray(other).transpose())).transpose())
1660 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1661 elif self.isscalar() and isinstance(other,numpy.matrix):
1662 return self.__C * other
1663 elif self.isscalar() and isinstance(other, (list, numpy.ndarray, tuple)):
1664 if len(numpy.asarray(other).shape) == 1 or numpy.asarray(other).shape[1] == 1 or numpy.asarray(other).shape[0] == 1:
1665 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1667 return self.__C * numpy.asmatrix(other)
1668 elif self.isobject():
1669 return self.__C.__mul__(other)
1671 raise NotImplementedError("%s covariance matrix __mul__ method not available for %s type!"%(self.__name,type(other)))
1673 def __rmul__(self, other):
1674 "x.__rmul__(y) <==> y*x"
1675 if self.ismatrix() and isinstance(other,numpy.matrix):
1676 return other * self.__C
1677 elif self.isvector() and isinstance(other,numpy.matrix):
1678 if numpy.ravel(other).size == self.shape[0]: # Vecteur
1679 return numpy.asmatrix(numpy.ravel(other) * self.__C)
1680 elif numpy.asmatrix(other).shape[1] == self.shape[0]: # Matrice
1681 return numpy.asmatrix(numpy.array(other) * self.__C)
1683 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1684 elif self.isscalar() and isinstance(other,numpy.matrix):
1685 return other * self.__C
1686 elif self.isobject():
1687 return self.__C.__rmul__(other)
1689 raise NotImplementedError("%s covariance matrix __rmul__ method not available for %s type!"%(self.__name,type(other)))
1692 "x.__len__() <==> len(x)"
1693 return self.shape[0]
1695 # ==============================================================================
1696 class ObserverF(object):
1698 Creation d'une fonction d'observateur a partir de son texte
1700 def __init__(self, corps=""):
1701 self.__corps = corps
1702 def func(self,var,info):
1703 "Fonction d'observation"
1706 "Restitution du pointeur de fonction dans l'objet"
1709 # ==============================================================================
1710 class CaseLogger(object):
1712 Conservation des commandes de creation d'un cas
1714 def __init__(self, __name="", __objname="case", __addViewers=None, __addLoaders=None):
1715 self.__name = str(__name)
1716 self.__objname = str(__objname)
1717 self.__logSerie = []
1718 self.__switchoff = False
1719 self.__viewers = self.__loaders = {
1725 if __addViewers is not None:
1726 self.__viewers.update(dict(__addViewers))
1727 if __addLoaders is not None:
1728 self.__loaders.update(dict(__addLoaders))
1730 def register(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1731 "Enregistrement d'une commande individuelle"
1732 if __command is not None and __keys is not None and __local is not None and not self.__switchoff:
1733 if "self" in __keys: __keys.remove("self")
1734 self.__logSerie.append( (str(__command), __keys, __local, __pre, __switchoff) )
1736 self.__switchoff = True
1738 self.__switchoff = False
1740 def dump(self, __filename=None, __format="TUI"):
1741 "Restitution normalisée des commandes (par les *GenericCaseViewer)"
1742 if __format in self.__viewers:
1743 __formater = self.__viewers[__format](self.__name, self.__objname, self.__logSerie)
1745 raise ValueError("Dumping as \"%s\" is not available"%__format)
1746 return __formater.dump(__filename)
1748 def load(self, __filename=None, __format="TUI"):
1749 "Chargement normalisé des commandes"
1750 if __format in self.__loaders:
1751 __formater = self.__loaders[__format]()
1753 raise ValueError("Loading as \"%s\" is not available"%__format)
1754 return __formater.load(__filename)
1756 # ==============================================================================
1757 class GenericCaseViewer(object):
1759 Gestion des commandes de creation d'une vue de cas
1761 def __init__(self, __name="", __objname="case", __content=None):
1762 "Initialisation et enregistrement de l'entete"
1763 self._name = str(__name)
1764 self._objname = str(__objname)
1765 self._lineSerie = []
1766 self._switchoff = False
1767 self._numobservers = 2
1768 self._content = __content
1769 self._missing = """raise ValueError("This case requires beforehand to import or define the variable named <%s>. When corrected, remove this command, correct and uncomment the following one.")\n# """
1771 "Transformation de commande individuelle en enregistrement"
1772 raise NotImplementedError()
1774 "Transformation d'enregistrement en commande individuelle"
1775 raise NotImplementedError()
1776 def _finalize(self):
1777 "Enregistrement du final"
1779 def _addLine(self, line=""):
1780 "Ajoute un enregistrement individuel"
1781 self._lineSerie.append(line)
1782 def dump(self, __filename=None):
1783 "Restitution normalisée des commandes"
1785 __text = "\n".join(self._lineSerie)
1787 if __filename is not None:
1788 __file = os.path.abspath(__filename)
1789 __fid = open(__file,"w")
1793 def load(self, __filename=None):
1794 "Chargement normalisé des commandes"
1795 if os.path.exists(__filename):
1796 self._content = open(__filename, 'r').read()
1797 __commands = self._extract(self._content)
1800 # --> Inutile d'accrocher l'interpretation au cas
1801 # def _interpret(self):
1802 # "Interprétation d'une commande"
1803 # raise NotImplementedError()
1804 # def execCase(self, __filename=None):
1805 # "Exécution normalisée des commandes"
1806 # if os.path.exists(__filename):
1807 # self._content = open(__filename, 'r').read()
1808 # __retcode = self._interpret(self._content)
1811 class _TUIViewer(GenericCaseViewer):
1813 Etablissement des commandes d'un cas TUI
1815 def __init__(self, __name="", __objname="case", __content=None):
1816 "Initialisation et enregistrement de l'entete"
1817 GenericCaseViewer.__init__(self, __name, __objname, __content)
1818 self._addLine("# -*- coding: utf-8 -*-")
1819 self._addLine("#\n# Python script for ADAO TUI\n#")
1820 self._addLine("from numpy import array, matrix")
1821 self._addLine("import adaoBuilder")
1822 self._addLine("%s = adaoBuilder.New('%s')"%(self._objname, self._name))
1823 if self._content is not None:
1824 for command in self._content:
1825 self._append(*command)
1826 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1827 "Transformation d'une commande individuelle en un enregistrement"
1828 if __command is not None and __keys is not None and __local is not None:
1830 if __pre is not None:
1831 __text += "%s = "%__pre
1832 __text += "%s.%s( "%(self._objname,str(__command))
1833 if "self" in __keys: __keys.remove("self")
1834 if __command not in ("set","get") and "Concept" in __keys: __keys.remove("Concept")
1837 if __v is None: continue
1838 if k == "Checked" and not __v: continue
1839 if k == "Stored" and not __v: continue
1840 if k == "AvoidRC" and __v: continue
1841 if k == "noDetails": continue
1842 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1843 if callable(__v): __text = self._missing%__v.__name__+__text
1844 if isinstance(__v,dict):
1845 for val in __v.values():
1846 if callable(val): __text = self._missing%val.__name__+__text
1847 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1848 __text += "%s=%s, "%(k,repr(__v))
1849 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
1852 self._addLine(__text)
1853 def _extract(self, __content=""):
1854 "Transformation un enregistrement en une commande individuelle"
1857 __content = __content.replace("\r\n","\n")
1858 for line in __content.split("\n"):
1859 if "adaoBuilder.New" in line and "=" in line:
1860 self._objname = line.split("=")[0].strip()
1865 if self._objname+".set" in line:
1866 __commands.append( line.replace(self._objname+".","",1) )
1869 # def _interpret(self, __content=""):
1870 # "Interprétation d'une commande"
1871 # __content = __content.replace("\r\n","\n")
1875 class _DCTViewer(GenericCaseViewer):
1877 Etablissement des commandes d'un cas DCT
1879 def __init__(self, __name="", __objname="case", __content=None):
1880 "Initialisation et enregistrement de l'entete"
1881 GenericCaseViewer.__init__(self, __name, __objname, __content)
1882 self._observerIndex = 0
1883 self._addLine("# -*- coding: utf-8 -*-")
1884 self._addLine("#\n# Python script for ADAO DCT\n#")
1885 self._addLine("from numpy import array, matrix")
1887 self._addLine("%s = {}"%__objname)
1888 if self._content is not None:
1889 for command in self._content:
1890 self._append(*command)
1891 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1892 "Transformation d'une commande individuelle en un enregistrement"
1893 if __command is not None and __keys is not None and __local is not None:
1895 if "execute" in __command: return
1896 if "self" in __keys: __keys.remove("self")
1897 if __command in ("set","get") and "Concept" in __keys:
1898 __key = __local["Concept"]
1899 __keys.remove("Concept")
1901 __key = __command.replace("set","").replace("get","")
1902 if "Observer" in __key and 'Variable' in __keys:
1903 self._observerIndex += 1
1904 __key += "_%i"%self._observerIndex
1905 __text += "%s['%s'] = {"%(self._objname,str(__key))
1908 if __v is None: continue
1909 if k == "Checked" and not __v: continue
1910 if k == "Stored" and not __v: continue
1911 if k == "AvoidRC" and __v: continue
1912 if k == "noDetails": continue
1913 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1914 if callable(__v): __text = self._missing%__v.__name__+__text
1915 if isinstance(__v,dict):
1916 for val in __v.values():
1917 if callable(val): __text = self._missing%val.__name__+__text
1918 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1919 __text += "'%s':%s, "%(k,repr(__v))
1920 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
1921 __text.rstrip(", ").rstrip()
1923 if __text[-2:] == "{}": return # Supprime les *Debug et les variables
1924 self._addLine(__text)
1925 def _extract(self, __content=""):
1926 "Transformation un enregistrement en une commande individuelle"
1929 __content = __content.replace("\r\n","\n")
1931 self._objdata = None
1932 __getlocals = locals()
1933 for k in __getlocals:
1935 if 'AlgorithmParameters' in __getlocals[k] and type(__getlocals[k]) is dict:
1937 self._objdata = __getlocals[k]
1940 if self._objdata is None:
1941 raise ValueError("Impossible to load given content as a ADAO DCT one (no 'AlgorithmParameters' key found).")
1942 for k in self._objdata:
1943 if 'Observer_' in k:
1944 __command = k.split('_',1)[0]
1947 __arguments = ["%s = %s"%(k,repr(v)) for k,v in self._objdata[k].items()]
1948 __commands.append( "set( Concept='%s', %s )"%(__command, ", ".join(__arguments)))
1949 __commands.sort() # Pour commencer par 'AlgorithmParameters'
1952 class _SCDViewer(GenericCaseViewer):
1954 Etablissement des commandes d'un cas SCD (Study Config Dictionary)
1956 def __init__(self, __name="", __objname="case", __content=None):
1957 "Initialisation et enregistrement de l'entete"
1958 GenericCaseViewer.__init__(self, __name, __objname, __content)
1959 self._addLine("# -*- coding: utf-8 -*-")
1960 self._addLine("#\n# Input for ADAO converter to YACS\n#")
1961 self._addLine("from numpy import array, matrix")
1963 self._addLine("study_config = {}")
1964 self._addLine("study_config['StudyType'] = 'ASSIMILATION_STUDY'")
1965 self._addLine("study_config['Name'] = '%s'"%self._name)
1966 self._addLine("observers = {}")
1967 self._addLine("study_config['Observers'] = observers")
1969 self._addLine("inputvariables_config = {}")
1970 self._addLine("inputvariables_config['Order'] =['adao_default']")
1971 self._addLine("inputvariables_config['adao_default'] = -1")
1972 self._addLine("study_config['InputVariables'] = inputvariables_config")
1974 self._addLine("outputvariables_config = {}")
1975 self._addLine("outputvariables_config['Order'] = ['adao_default']")
1976 self._addLine("outputvariables_config['adao_default'] = -1")
1977 self._addLine("study_config['OutputVariables'] = outputvariables_config")
1978 if __content is not None:
1979 for command in __content:
1980 self._append(*command)
1981 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1982 "Transformation d'une commande individuelle en un enregistrement"
1983 if __command == "set": __command = __local["Concept"]
1984 else: __command = __command.replace("set", "", 1)
1987 if __command in (None, 'execute', 'executePythonScheme', 'executeYACSScheme', 'get'):
1989 elif __command in ['Debug', 'setDebug']:
1990 __text = "#\nstudy_config['Debug'] = '1'"
1991 elif __command in ['NoDebug', 'setNoDebug']:
1992 __text = "#\nstudy_config['Debug'] = '0'"
1993 elif __command in ['Observer', 'setObserver']:
1994 __obs = __local['Variable']
1995 self._numobservers += 1
1997 __text += "observers['%s'] = {}\n"%__obs
1998 if __local['String'] is not None:
1999 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
2000 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, __local['String'])
2001 if __local['Script'] is not None:
2002 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'Script')
2003 __text += "observers['%s']['Script'] = \"%s\"\n"%(__obs, __local['Script'])
2004 if __local['Template'] is not None and __local['Template'] in Templates.ObserverTemplates:
2005 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
2006 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, Templates.ObserverTemplates[__local['Template']])
2007 if __local['Info'] is not None:
2008 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __local['Info'])
2010 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __obs)
2011 __text += "observers['%s']['number'] = %s"%(__obs, self._numobservers)
2012 elif __local is not None: # __keys is not None and
2013 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
2015 __text += "%s_config = {}\n"%__command
2016 if 'self' in __local: __local.pop('self')
2017 __to_be_removed = []
2018 for __k,__v in __local.items():
2019 if __v is None: __to_be_removed.append(__k)
2020 for __k in __to_be_removed:
2022 for __k,__v in __local.items():
2023 if __k == "Concept": continue
2024 if __k in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix','OneFunction','ThreeFunctions'] and 'Script' in __local: continue
2025 if __k == 'Algorithm':
2026 __text += "study_config['Algorithm'] = %s\n"%(repr(__v))
2027 elif __k == 'Script':
2030 __v = "'"+repr(__v)+"'"
2031 for __lk in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix']:
2032 if __lk in __local and __local[__lk]: __k = __lk
2033 if __command == "AlgorithmParameters": __k = "Dict"
2034 if 'OneFunction' in __local and __local['OneFunction']:
2035 __text += "%s_ScriptWithOneFunction = {}\n"%(__command,)
2036 __text += "%s_ScriptWithOneFunction['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
2037 __text += "%s_ScriptWithOneFunction['Script'] = {}\n"%(__command,)
2038 __text += "%s_ScriptWithOneFunction['Script']['Direct'] = %s\n"%(__command,__v)
2039 __text += "%s_ScriptWithOneFunction['Script']['Tangent'] = %s\n"%(__command,__v)
2040 __text += "%s_ScriptWithOneFunction['Script']['Adjoint'] = %s\n"%(__command,__v)
2041 __text += "%s_ScriptWithOneFunction['DifferentialIncrement'] = 1e-06\n"%(__command,)
2042 __text += "%s_ScriptWithOneFunction['CenteredFiniteDifference'] = 0\n"%(__command,)
2044 __f = 'ScriptWithOneFunction'
2045 __v = '%s_ScriptWithOneFunction'%(__command,)
2046 if 'ThreeFunctions' in __local and __local['ThreeFunctions']:
2047 __text += "%s_ScriptWithFunctions = {}\n"%(__command,)
2048 __text += "%s_ScriptWithFunctions['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
2049 __text += "%s_ScriptWithFunctions['Script'] = {}\n"%(__command,)
2050 __text += "%s_ScriptWithFunctions['Script']['Direct'] = %s\n"%(__command,__v)
2051 __text += "%s_ScriptWithFunctions['Script']['Tangent'] = %s\n"%(__command,__v)
2052 __text += "%s_ScriptWithFunctions['Script']['Adjoint'] = %s\n"%(__command,__v)
2054 __f = 'ScriptWithFunctions'
2055 __v = '%s_ScriptWithFunctions'%(__command,)
2056 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
2057 __text += "%s_config['From'] = '%s'\n"%(__command,__f)
2058 __text += "%s_config['Data'] = %s\n"%(__command,__v)
2059 __text = __text.replace("''","'")
2060 elif __k in ('Stored', 'Checked'):
2062 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
2063 elif __k in ('AvoidRC', 'noDetails'):
2065 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
2067 if __k == 'Parameters': __k = "Dict"
2068 if isinstance(__v,Persistence.Persistence): __v = __v.values()
2069 if callable(__v): __text = self._missing%__v.__name__+__text
2070 if isinstance(__v,dict):
2071 for val in __v.values():
2072 if callable(val): __text = self._missing%val.__name__+__text
2073 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
2074 __text += "%s_config['From'] = '%s'\n"%(__command,'String')
2075 __text += "%s_config['Data'] = \"\"\"%s\"\"\"\n"%(__command,repr(__v))
2076 __text += "study_config['%s'] = %s_config"%(__command,__command)
2077 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
2079 self._switchoff = True
2080 if __text is not None: self._addLine(__text)
2082 self._switchoff = False
2083 def _finalize(self):
2084 self.__loadVariablesByScript()
2086 self._addLine("Analysis_config = {}")
2087 self._addLine("Analysis_config['From'] = 'String'")
2088 self._addLine("Analysis_config['Data'] = \"\"\"import numpy")
2089 self._addLine("xa=numpy.ravel(ADD.get('Analysis')[-1])")
2090 self._addLine("print 'Analysis:',xa\"\"\"")
2091 self._addLine("study_config['UserPostAnalysis'] = Analysis_config")
2092 def __loadVariablesByScript(self):
2093 __ExecVariables = {} # Necessaire pour recuperer la variable
2094 exec("\n".join(self._lineSerie), __ExecVariables)
2095 study_config = __ExecVariables['study_config']
2096 self.__hasAlgorithm = bool(study_config['Algorithm'])
2097 if not self.__hasAlgorithm and \
2098 "AlgorithmParameters" in study_config and \
2099 isinstance(study_config['AlgorithmParameters'], dict) and \
2100 "From" in study_config['AlgorithmParameters'] and \
2101 "Data" in study_config['AlgorithmParameters'] and \
2102 study_config['AlgorithmParameters']['From'] == 'Script':
2103 __asScript = study_config['AlgorithmParameters']['Data']
2104 __var = ImportFromScript(__asScript).getvalue( "Algorithm" )
2105 __text = "#\nstudy_config['Algorithm'] = '%s'"%(__var,)
2106 self._addLine(__text)
2107 if self.__hasAlgorithm and \
2108 "AlgorithmParameters" in study_config and \
2109 isinstance(study_config['AlgorithmParameters'], dict) and \
2110 "From" not in study_config['AlgorithmParameters'] and \
2111 "Data" not in study_config['AlgorithmParameters']:
2113 __text += "AlgorithmParameters_config['Type'] = 'Dict'\n"
2114 __text += "AlgorithmParameters_config['From'] = 'String'\n"
2115 __text += "AlgorithmParameters_config['Data'] = '{}'\n"
2116 self._addLine(__text)
2119 class _XMLViewer(GenericCaseViewer):
2121 Etablissement des commandes de creation d'un cas XML
2123 def __init__(self, __name="", __objname="case", __content=None):
2124 "Initialisation et enregistrement de l'entete"
2125 GenericCaseViewer.__init__(self, __name, __objname, __content)
2126 raise NotImplementedError()
2128 class _YACSViewer(GenericCaseViewer):
2130 Etablissement des commandes de creation d'un cas YACS
2132 def __init__(self, __name="", __objname="case", __content=None):
2133 "Initialisation et enregistrement de l'entete"
2134 GenericCaseViewer.__init__(self, __name, __objname, __content)
2135 self.__internalSCD = _SCDViewer(__name, __objname, __content)
2136 self._append = self.__internalSCD._append
2137 def dump(self, __filename=None):
2138 "Restitution normalisée des commandes"
2139 self.__internalSCD._finalize()
2141 if __filename is not None:
2142 __file = os.path.abspath(__filename)
2143 __SCDfile = __file[:__file.rfind(".")] + '_SCD.py'
2144 __SCDdump = self.__internalSCD.dump(__SCDfile)
2146 raise ValueError("A file name has to be given for YACS XML output.")
2148 if not PlatformInfo.has_salome or \
2149 not PlatformInfo.has_adao:
2150 raise ImportError("\n\n"+\
2151 "Unable to get SALOME or ADAO environnement variables.\n"+\
2152 "Please load the right environnement before trying to use it.\n")
2154 if os.path.isfile(__file) or os.path.islink(__file):
2156 __converterExe = os.path.join(os.environ["ADAO_ROOT_DIR"], "bin/salome", "AdaoYacsSchemaCreator.py")
2157 __args = ["python", __converterExe, __SCDfile, __file]
2159 __p = subprocess.Popen(__args)
2160 (__stdoutdata, __stderrdata) = __p.communicate()
2161 if not os.path.exists(__file):
2162 __msg = "An error occured during the ADAO YACS Schema build.\n"
2163 __msg += "Creator applied on the input file:\n"
2164 __msg += " %s\n"%__SCDfile
2165 __msg += "If SALOME GUI is launched by command line, see errors\n"
2166 __msg += "details in your terminal.\n"
2167 raise ValueError(__msg)
2168 os.remove(__SCDfile)
2170 __fid = open(__file,"r")
2171 __text = __fid.read()
2175 # ==============================================================================
2176 class ImportFromScript(object):
2178 Obtention d'une variable nommee depuis un fichier script importe
2180 def __init__(self, __filename=None):
2181 "Verifie l'existence et importe le script"
2182 self.__filename = __filename.rstrip(".py")
2183 if self.__filename is None:
2184 raise ValueError("The name of the file, containing the variable to be read, has to be specified.")
2185 if not os.path.isfile(str(self.__filename)+".py"):
2186 raise ValueError("The file containing the variable to be imported doesn't seem to exist. Please check the file. The given file name is:\n \"%s\""%self.__filename)
2187 self.__scriptfile = __import__(self.__filename, globals(), locals(), [])
2188 self.__scriptstring = open(self.__filename+".py",'r').read()
2189 def getvalue(self, __varname=None, __synonym=None ):
2190 "Renvoie la variable demandee"
2191 if __varname is None:
2192 raise ValueError("The name of the variable to be read has to be specified. Please check the content of the file and the syntax.")
2193 if not hasattr(self.__scriptfile, __varname):
2194 if __synonym is None:
2195 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__filename)+".py",__varname))
2196 elif not hasattr(self.__scriptfile, __synonym):
2197 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__filename)+".py",__synonym))
2199 return getattr(self.__scriptfile, __synonym)
2201 return getattr(self.__scriptfile, __varname)
2202 def getstring(self):
2203 "Renvoie le script complet"
2204 return self.__scriptstring
2206 # ==============================================================================
2207 def CostFunction3D(_x,
2208 _Hm = None, # Pour simuler Hm(x) : HO["Direct"].appliedTo
2209 _HmX = None, # Simulation déjà faite de Hm(x)
2210 _arg = None, # Arguments supplementaires pour Hm, sous la forme d'un tuple
2215 _SIV = False, # A résorber pour la 8.0
2216 _SSC = [], # self._parameters["StoreSupplementaryCalculations"]
2217 _nPS = 0, # nbPreviousSteps
2218 _QM = "DA", # QualityMeasure
2219 _SSV = {}, # Entrée et/ou sortie : self.StoredVariables
2220 _fRt = False, # Restitue ou pas la sortie étendue
2221 _sSc = True, # Stocke ou pas les SSC
2224 Fonction-coût générale utile pour les algorithmes statiques/3D : 3DVAR, BLUE
2225 et dérivés, Kalman et dérivés, LeastSquares, SamplingTest, PSO, SA, Tabu,
2226 DFO, QuantileRegression
2232 for k in ["CostFunctionJ",
2238 "SimulatedObservationAtCurrentOptimum",
2239 "SimulatedObservationAtCurrentState",
2243 if hasattr(_SSV[k],"store"):
2244 _SSV[k].append = _SSV[k].store # Pour utiliser "append" au lieu de "store"
2246 _X = numpy.asmatrix(numpy.ravel( _x )).T
2247 if _SIV or "CurrentState" in _SSC or "CurrentOptimum" in _SSC:
2248 _SSV["CurrentState"].append( _X )
2250 if _HmX is not None:
2254 raise ValueError("COSTFUNCTION3D Operator has to be defined.")
2258 _HX = _Hm( _X, *_arg )
2259 _HX = numpy.asmatrix(numpy.ravel( _HX )).T
2261 if "SimulatedObservationAtCurrentState" in _SSC or \
2262 "SimulatedObservationAtCurrentOptimum" in _SSC:
2263 _SSV["SimulatedObservationAtCurrentState"].append( _HX )
2265 if numpy.any(numpy.isnan(_HX)):
2266 Jb, Jo, J = numpy.nan, numpy.nan, numpy.nan
2268 _Y = numpy.asmatrix(numpy.ravel( _Y )).T
2269 if _QM in ["AugmentedWeightedLeastSquares", "AWLS", "AugmentedPonderatedLeastSquares", "APLS", "DA"]:
2270 if _BI is None or _RI is None:
2271 raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
2272 _Xb = numpy.asmatrix(numpy.ravel( _Xb )).T
2273 Jb = 0.5 * (_X - _Xb).T * _BI * (_X - _Xb)
2274 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2275 elif _QM in ["WeightedLeastSquares", "WLS", "PonderatedLeastSquares", "PLS"]:
2277 raise ValueError("Observation error covariance matrix has to be properly defined!")
2279 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2280 elif _QM in ["LeastSquares", "LS", "L2"]:
2282 Jo = 0.5 * (_Y - _HX).T * (_Y - _HX)
2283 elif _QM in ["AbsoluteValue", "L1"]:
2285 Jo = numpy.sum( numpy.abs(_Y - _HX) )
2286 elif _QM in ["MaximumError", "ME"]:
2288 Jo = numpy.max( numpy.abs(_Y - _HX) )
2289 elif _QM in ["QR", "Null"]:
2293 raise ValueError("Unknown asked quality measure!")
2295 J = float( Jb ) + float( Jo )
2298 _SSV["CostFunctionJb"].append( Jb )
2299 _SSV["CostFunctionJo"].append( Jo )
2300 _SSV["CostFunctionJ" ].append( J )
2302 if "IndexOfOptimum" in _SSC or \
2303 "CurrentOptimum" in _SSC or \
2304 "SimulatedObservationAtCurrentOptimum" in _SSC:
2305 IndexMin = numpy.argmin( _SSV["CostFunctionJ"][_nPS:] ) + _nPS
2306 if "IndexOfOptimum" in _SSC:
2307 _SSV["IndexOfOptimum"].append( IndexMin )
2308 if "CurrentOptimum" in _SSC:
2309 _SSV["CurrentOptimum"].append( _SSV["CurrentState"][IndexMin] )
2310 if "SimulatedObservationAtCurrentOptimum" in _SSC:
2311 _SSV["SimulatedObservationAtCurrentOptimum"].append( _SSV["SimulatedObservationAtCurrentState"][IndexMin] )
2316 if _QM in ["QR"]: # Pour le QuantileRegression
2321 # ==============================================================================
2322 if __name__ == "__main__":
2323 print('\n AUTODIAGNOSTIC \n')