1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2018 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 Définit les outils généraux élémentaires.
26 Ce module est destiné à être appelée par AssimilationStudy.
28 __author__ = "Jean-Philippe ARGAUD"
36 from daCore import Persistence
37 from daCore import PlatformInfo
38 from daCore import Templates
40 # ==============================================================================
41 class CacheManager(object):
43 Classe générale de gestion d'un cache de calculs
46 toleranceInRedundancy = 1.e-18,
47 lenghtOfRedundancy = -1,
50 Les caractéristiques de tolérance peuvent être modifées à la création.
52 self.__tolerBP = float(toleranceInRedundancy)
53 self.__lenghtOR = int(lenghtOfRedundancy)
54 self.__initlnOR = self.__lenghtOR
59 self.__listOPCV = [] # Operator Previous Calculated Points, Results, Point Norms
60 # logging.debug("CM Tolerance de determination des doublons : %.2e", self.__tolerBP)
62 def wasCalculatedIn(self, xValue ): #, info="" ):
63 "Vérifie l'existence d'un calcul correspondant à la valeur"
66 for i in range(min(len(self.__listOPCV),self.__lenghtOR)-1,-1,-1):
67 if xValue.size != self.__listOPCV[i][0].size:
68 # logging.debug("CM Différence de la taille %s de X et de celle %s du point %i déjà calculé", xValue.shape,i,self.__listOPCP[i].shape)
70 if numpy.linalg.norm(numpy.ravel(xValue) - self.__listOPCV[i][0]) < self.__tolerBP * self.__listOPCV[i][2]:
72 __HxV = self.__listOPCV[i][1]
73 # logging.debug("CM Cas%s déja calculé, portant le numéro %i", info, i)
77 def storeValueInX(self, xValue, HxValue ):
78 "Stocke un calcul correspondant à la valeur"
79 if self.__lenghtOR < 0:
80 self.__lenghtOR = 2 * xValue.size + 2
81 self.__initlnOR = self.__lenghtOR
82 while len(self.__listOPCV) > self.__lenghtOR:
83 # logging.debug("CM Réduction de la liste des cas à %i éléments par suppression du premier", self.__lenghtOR)
84 self.__listOPCV.pop(0)
85 self.__listOPCV.append( (
86 copy.copy(numpy.ravel(xValue)),
88 numpy.linalg.norm(xValue),
93 self.__initlnOR = self.__lenghtOR
98 self.__lenghtOR = self.__initlnOR
100 # ==============================================================================
101 class Operator(object):
103 Classe générale d'interface de type opérateur simple
110 def __init__(self, fromMethod=None, fromMatrix=None, avoidingRedundancy = True):
112 On construit un objet de ce type en fournissant à l'aide de l'un des
113 deux mots-clé, soit une fonction python, soit une matrice.
115 - fromMethod : argument de type fonction Python
116 - fromMatrix : argument adapté au constructeur numpy.matrix
117 - avoidingRedundancy : évite ou pas les calculs redondants
119 self.__NbCallsAsMatrix, self.__NbCallsAsMethod, self.__NbCallsOfCached = 0, 0, 0
120 self.__AvoidRC = bool( avoidingRedundancy )
121 if fromMethod is not None:
122 self.__Method = fromMethod # logtimer(fromMethod)
124 self.__Type = "Method"
125 elif fromMatrix is not None:
127 self.__Matrix = numpy.matrix( fromMatrix, numpy.float )
128 self.__Type = "Matrix"
134 def disableAvoidingRedundancy(self):
136 Operator.CM.disable()
138 def enableAvoidingRedundancy(self):
143 Operator.CM.disable()
149 def appliedTo(self, xValue, HValue = None):
151 Permet de restituer le résultat de l'application de l'opérateur à un
152 argument xValue. Cette méthode se contente d'appliquer, son argument
153 devant a priori être du bon type.
155 - xValue : argument adapté pour appliquer l'opérateur
157 if HValue is not None:
158 HxValue = numpy.asmatrix( numpy.ravel( HValue ) ).T
160 Operator.CM.storeValueInX(xValue,HxValue)
163 __alreadyCalculated, __HxV = Operator.CM.wasCalculatedIn(xValue)
165 __alreadyCalculated = False
167 if __alreadyCalculated:
168 self.__addOneCacheCall()
171 if self.__Matrix is not None:
172 self.__addOneMatrixCall()
173 HxValue = self.__Matrix * xValue
175 self.__addOneMethodCall()
176 HxValue = self.__Method( xValue )
178 Operator.CM.storeValueInX(xValue,HxValue)
182 def appliedControledFormTo(self, paire ):
184 Permet de restituer le résultat de l'application de l'opérateur à une
185 paire (xValue, uValue). Cette méthode se contente d'appliquer, son
186 argument devant a priori être du bon type. Si la uValue est None,
187 on suppose que l'opérateur ne s'applique qu'à xValue.
189 - xValue : argument X adapté pour appliquer l'opérateur
190 - uValue : argument U adapté pour appliquer l'opérateur
192 assert len(paire) == 2, "Incorrect number of arguments"
193 xValue, uValue = paire
194 if self.__Matrix is not None:
195 self.__addOneMatrixCall()
196 return self.__Matrix * xValue
197 elif uValue is not None:
198 self.__addOneMethodCall()
199 return self.__Method( (xValue, uValue) )
201 self.__addOneMethodCall()
202 return self.__Method( xValue )
204 def appliedInXTo(self, paire ):
206 Permet de restituer le résultat de l'application de l'opérateur à un
207 argument xValue, sachant que l'opérateur est valable en xNominal.
208 Cette méthode se contente d'appliquer, son argument devant a priori
209 être du bon type. Si l'opérateur est linéaire car c'est une matrice,
210 alors il est valable en tout point nominal et il n'est pas nécessaire
212 Arguments : une liste contenant
213 - xNominal : argument permettant de donner le point où l'opérateur
214 est construit pour etre ensuite appliqué
215 - xValue : argument adapté pour appliquer l'opérateur
217 assert len(paire) == 2, "Incorrect number of arguments"
218 xNominal, xValue = paire
219 if self.__Matrix is not None:
220 self.__addOneMatrixCall()
221 return self.__Matrix * xValue
223 self.__addOneMethodCall()
224 return self.__Method( (xNominal, xValue) )
226 def asMatrix(self, ValueForMethodForm = "UnknownVoidValue"):
228 Permet de renvoyer l'opérateur sous la forme d'une matrice
230 if self.__Matrix is not None:
231 self.__addOneMatrixCall()
233 elif ValueForMethodForm is not "UnknownVoidValue": # Ne pas utiliser "None"
234 self.__addOneMethodCall()
235 return numpy.matrix( self.__Method( (ValueForMethodForm, None) ) )
237 raise ValueError("Matrix form of the operator defined as a function/method requires to give an operating point.")
241 Renvoie la taille sous forme numpy si l'opérateur est disponible sous
242 la forme d'une matrice
244 if self.__Matrix is not None:
245 return self.__Matrix.shape
247 raise ValueError("Matrix form of the operator is not available, nor the shape")
249 def nbcalls(self, which=None):
251 Renvoie les nombres d'évaluations de l'opérateur
254 self.__NbCallsAsMatrix+self.__NbCallsAsMethod,
255 self.__NbCallsAsMatrix,
256 self.__NbCallsAsMethod,
257 self.__NbCallsOfCached,
258 Operator.NbCallsAsMatrix+Operator.NbCallsAsMethod,
259 Operator.NbCallsAsMatrix,
260 Operator.NbCallsAsMethod,
261 Operator.NbCallsOfCached,
263 if which is None: return __nbcalls
264 else: return __nbcalls[which]
266 def __addOneMatrixCall(self):
267 "Comptabilise un appel"
268 self.__NbCallsAsMatrix += 1 # Decompte local
269 Operator.NbCallsAsMatrix += 1 # Decompte global
271 def __addOneMethodCall(self):
272 "Comptabilise un appel"
273 self.__NbCallsAsMethod += 1 # Decompte local
274 Operator.NbCallsAsMethod += 1 # Decompte global
276 def __addOneCacheCall(self):
277 "Comptabilise un appel"
278 self.__NbCallsOfCached += 1 # Decompte local
279 Operator.NbCallsOfCached += 1 # Decompte global
281 # ==============================================================================
282 class FullOperator(object):
284 Classe générale d'interface de type opérateur complet
285 (Direct, Linéaire Tangent, Adjoint)
288 name = "GenericFullOperator",
290 asOneFunction = None, # Fonction
291 asThreeFunctions = None, # Dictionnaire de fonctions
293 asDict = None, # Parameters
300 self.__name = str(name)
301 self.__check = bool(toBeChecked)
306 if (asDict is not None) and isinstance(asDict, dict):
307 __Parameters.update( asDict )
308 if "DifferentialIncrement" in asDict:
309 __Parameters["withIncrement"] = asDict["DifferentialIncrement"]
310 if "CenteredFiniteDifference" in asDict:
311 __Parameters["withCenteredDF"] = asDict["CenteredFiniteDifference"]
312 if "EnableMultiProcessing" in asDict:
313 __Parameters["withmpEnabled"] = asDict["EnableMultiProcessing"]
314 if "NumberOfProcesses" in asDict:
315 __Parameters["withmpWorkers"] = asDict["NumberOfProcesses"]
317 if asScript is not None:
318 __Matrix, __Function = None, None
320 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
322 __Function = { "Direct":ImportFromScript(asScript).getvalue( "DirectOperator" ) }
323 __Function.update({"useApproximatedDerivatives":True})
324 __Function.update(__Parameters)
325 elif asThreeFunctions:
327 "Direct" :ImportFromScript(asScript).getvalue( "DirectOperator" ),
328 "Tangent":ImportFromScript(asScript).getvalue( "TangentOperator" ),
329 "Adjoint":ImportFromScript(asScript).getvalue( "AdjointOperator" ),
331 __Function.update(__Parameters)
334 if asOneFunction is not None:
335 if isinstance(asOneFunction, dict) and "Direct" in asOneFunction:
336 if asOneFunction["Direct"] is not None:
337 __Function = asOneFunction
339 raise ValueError("The function has to be given in a dictionnary which have 1 key (\"Direct\")")
341 __Function = { "Direct":asOneFunction }
342 __Function.update({"useApproximatedDerivatives":True})
343 __Function.update(__Parameters)
344 elif asThreeFunctions is not None:
345 if isinstance(asThreeFunctions, dict) and \
346 ("Tangent" in asThreeFunctions) and (asThreeFunctions["Tangent"] is not None) and \
347 ("Adjoint" in asThreeFunctions) and (asThreeFunctions["Adjoint"] is not None) and \
348 (("useApproximatedDerivatives" not in asThreeFunctions) or not bool(asThreeFunctions["useApproximatedDerivatives"])):
349 __Function = asThreeFunctions
350 elif isinstance(asThreeFunctions, dict) and \
351 ("Direct" in asThreeFunctions) and (asThreeFunctions["Direct"] is not None):
352 __Function = asThreeFunctions
353 __Function.update({"useApproximatedDerivatives":True})
355 raise ValueError("The functions has to be given in a dictionnary which have either 1 key (\"Direct\") or 3 keys (\"Direct\" (optionnal), \"Tangent\" and \"Adjoint\")")
356 if "Direct" not in asThreeFunctions:
357 __Function["Direct"] = asThreeFunctions["Tangent"]
358 __Function.update(__Parameters)
362 # if sys.version_info[0] < 3 and isinstance(__Function, dict):
363 # for k in ("Direct", "Tangent", "Adjoint"):
364 # if k in __Function and hasattr(__Function[k],"__class__"):
365 # if type(__Function[k]) is type(self.__init__):
366 # raise TypeError("can't use a class method (%s) as a function for the \"%s\" operator. Use a real function instead."%(type(__Function[k]),k))
368 if appliedInX is not None and isinstance(appliedInX, dict):
369 __appliedInX = appliedInX
370 elif appliedInX is not None:
371 __appliedInX = {"HXb":appliedInX}
375 if scheduledBy is not None:
376 self.__T = scheduledBy
378 if isinstance(__Function, dict) and \
379 ("useApproximatedDerivatives" in __Function) and bool(__Function["useApproximatedDerivatives"]) and \
380 ("Direct" in __Function) and (__Function["Direct"] is not None):
381 if "withCenteredDF" not in __Function: __Function["withCenteredDF"] = False
382 if "withIncrement" not in __Function: __Function["withIncrement"] = 0.01
383 if "withdX" not in __Function: __Function["withdX"] = None
384 if "withAvoidingRedundancy" not in __Function: __Function["withAvoidingRedundancy"] = True
385 if "withToleranceInRedundancy" not in __Function: __Function["withToleranceInRedundancy"] = 1.e-18
386 if "withLenghtOfRedundancy" not in __Function: __Function["withLenghtOfRedundancy"] = -1
387 if "withmpEnabled" not in __Function: __Function["withmpEnabled"] = False
388 if "withmpWorkers" not in __Function: __Function["withmpWorkers"] = None
389 from daNumerics.ApproximatedDerivatives import FDApproximation
390 FDA = FDApproximation(
391 Function = __Function["Direct"],
392 centeredDF = __Function["withCenteredDF"],
393 increment = __Function["withIncrement"],
394 dX = __Function["withdX"],
395 avoidingRedundancy = __Function["withAvoidingRedundancy"],
396 toleranceInRedundancy = __Function["withToleranceInRedundancy"],
397 lenghtOfRedundancy = __Function["withLenghtOfRedundancy"],
398 mpEnabled = __Function["withmpEnabled"],
399 mpWorkers = __Function["withmpWorkers"],
401 self.__FO["Direct"] = Operator( fromMethod = FDA.DirectOperator, avoidingRedundancy = avoidRC )
402 self.__FO["Tangent"] = Operator( fromMethod = FDA.TangentOperator, avoidingRedundancy = avoidRC )
403 self.__FO["Adjoint"] = Operator( fromMethod = FDA.AdjointOperator, avoidingRedundancy = avoidRC )
404 elif isinstance(__Function, dict) and \
405 ("Direct" in __Function) and ("Tangent" in __Function) and ("Adjoint" in __Function) and \
406 (__Function["Direct"] is not None) and (__Function["Tangent"] is not None) and (__Function["Adjoint"] is not None):
407 self.__FO["Direct"] = Operator( fromMethod = __Function["Direct"], avoidingRedundancy = avoidRC )
408 self.__FO["Tangent"] = Operator( fromMethod = __Function["Tangent"], avoidingRedundancy = avoidRC )
409 self.__FO["Adjoint"] = Operator( fromMethod = __Function["Adjoint"], avoidingRedundancy = avoidRC )
410 elif asMatrix is not None:
411 __matrice = numpy.matrix( __Matrix, numpy.float )
412 self.__FO["Direct"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
413 self.__FO["Tangent"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
414 self.__FO["Adjoint"] = Operator( fromMatrix = __matrice.T, avoidingRedundancy = avoidRC )
417 raise ValueError("Improperly defined observation operator, it requires at minima either a matrix, a Direct for approximate derivatives or a Tangent/Adjoint pair.")
419 if __appliedInX is not None:
420 self.__FO["AppliedInX"] = {}
421 if not isinstance(__appliedInX, dict):
422 raise ValueError("Error: observation operator defined by \"AppliedInX\" need a dictionary as argument.")
423 for key in list(__appliedInX.keys()):
424 if type( __appliedInX[key] ) is type( numpy.matrix([]) ):
425 # Pour le cas où l'on a une vraie matrice
426 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].A1, numpy.float ).T
427 elif type( __appliedInX[key] ) is type( numpy.array([]) ) and len(__appliedInX[key].shape) > 1:
428 # Pour le cas où l'on a un vecteur représenté en array avec 2 dimensions
429 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].reshape(len(__appliedInX[key]),), numpy.float ).T
431 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key], numpy.float ).T
433 self.__FO["AppliedInX"] = None
439 "x.__repr__() <==> repr(x)"
440 return repr(self.__V)
443 "x.__str__() <==> str(x)"
446 # ==============================================================================
447 class Algorithm(object):
449 Classe générale d'interface de type algorithme
451 Elle donne un cadre pour l'écriture d'une classe élémentaire d'algorithme
452 d'assimilation, en fournissant un container (dictionnaire) de variables
453 persistantes initialisées, et des méthodes d'accès à ces variables stockées.
455 Une classe élémentaire d'algorithme doit implémenter la méthode "run".
457 def __init__(self, name):
459 L'initialisation présente permet de fabriquer des variables de stockage
460 disponibles de manière générique dans les algorithmes élémentaires. Ces
461 variables de stockage sont ensuite conservées dans un dictionnaire
462 interne à l'objet, mais auquel on accède par la méthode "get".
464 Les variables prévues sont :
465 - CostFunctionJ : fonction-cout globale, somme des deux parties suivantes
466 - CostFunctionJb : partie ébauche ou background de la fonction-cout
467 - CostFunctionJo : partie observations de la fonction-cout
468 - GradientOfCostFunctionJ : gradient de la fonction-cout globale
469 - GradientOfCostFunctionJb : gradient de la partie ébauche de la fonction-cout
470 - GradientOfCostFunctionJo : gradient de la partie observations de la fonction-cout
471 - CurrentState : état courant lors d'itérations
472 - Analysis : l'analyse Xa
473 - SimulatedObservationAtBackground : l'état observé H(Xb) à l'ébauche
474 - SimulatedObservationAtCurrentState : l'état observé H(X) à l'état courant
475 - SimulatedObservationAtOptimum : l'état observé H(Xa) à l'optimum
476 - Innovation : l'innovation : d = Y - H(X)
477 - InnovationAtCurrentState : l'innovation à l'état courant : dn = Y - H(Xn)
478 - SigmaObs2 : indicateur de correction optimale des erreurs d'observation
479 - SigmaBck2 : indicateur de correction optimale des erreurs d'ébauche
480 - MahalanobisConsistency : indicateur de consistance des covariances
481 - OMA : Observation moins Analysis : Y - Xa
482 - OMB : Observation moins Background : Y - Xb
483 - AMB : Analysis moins Background : Xa - Xb
484 - APosterioriCovariance : matrice A
485 - APosterioriVariances : variances de la matrice A
486 - APosterioriStandardDeviations : écart-types de la matrice A
487 - APosterioriCorrelations : correlations de la matrice A
488 - Residu : dans le cas des algorithmes de vérification
489 On peut rajouter des variables à stocker dans l'initialisation de
490 l'algorithme élémentaire qui va hériter de cette classe
492 logging.debug("%s Initialisation", str(name))
493 self._m = PlatformInfo.SystemUsage()
495 self._name = str( name )
496 self._parameters = {"StoreSupplementaryCalculations":[]}
497 self.__required_parameters = {}
498 self.__required_inputs = {"RequiredInputValues":{"mandatory":(), "optional":()}}
500 self.StoredVariables = {}
501 self.StoredVariables["CostFunctionJ"] = Persistence.OneScalar(name = "CostFunctionJ")
502 self.StoredVariables["CostFunctionJb"] = Persistence.OneScalar(name = "CostFunctionJb")
503 self.StoredVariables["CostFunctionJo"] = Persistence.OneScalar(name = "CostFunctionJo")
504 self.StoredVariables["CostFunctionJAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJAtCurrentOptimum")
505 self.StoredVariables["CostFunctionJbAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJbAtCurrentOptimum")
506 self.StoredVariables["CostFunctionJoAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJoAtCurrentOptimum")
507 self.StoredVariables["GradientOfCostFunctionJ"] = Persistence.OneVector(name = "GradientOfCostFunctionJ")
508 self.StoredVariables["GradientOfCostFunctionJb"] = Persistence.OneVector(name = "GradientOfCostFunctionJb")
509 self.StoredVariables["GradientOfCostFunctionJo"] = Persistence.OneVector(name = "GradientOfCostFunctionJo")
510 self.StoredVariables["CurrentState"] = Persistence.OneVector(name = "CurrentState")
511 self.StoredVariables["Analysis"] = Persistence.OneVector(name = "Analysis")
512 self.StoredVariables["IndexOfOptimum"] = Persistence.OneIndex(name = "IndexOfOptimum")
513 self.StoredVariables["CurrentOptimum"] = Persistence.OneVector(name = "CurrentOptimum")
514 self.StoredVariables["SimulatedObservationAtBackground"] = Persistence.OneVector(name = "SimulatedObservationAtBackground")
515 self.StoredVariables["SimulatedObservationAtCurrentState"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentState")
516 self.StoredVariables["SimulatedObservationAtOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtOptimum")
517 self.StoredVariables["SimulatedObservationAtCurrentOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentOptimum")
518 self.StoredVariables["Innovation"] = Persistence.OneVector(name = "Innovation")
519 self.StoredVariables["InnovationAtCurrentState"] = Persistence.OneVector(name = "InnovationAtCurrentState")
520 self.StoredVariables["SigmaObs2"] = Persistence.OneScalar(name = "SigmaObs2")
521 self.StoredVariables["SigmaBck2"] = Persistence.OneScalar(name = "SigmaBck2")
522 self.StoredVariables["MahalanobisConsistency"] = Persistence.OneScalar(name = "MahalanobisConsistency")
523 self.StoredVariables["OMA"] = Persistence.OneVector(name = "OMA")
524 self.StoredVariables["OMB"] = Persistence.OneVector(name = "OMB")
525 self.StoredVariables["BMA"] = Persistence.OneVector(name = "BMA")
526 self.StoredVariables["APosterioriCovariance"] = Persistence.OneMatrix(name = "APosterioriCovariance")
527 self.StoredVariables["APosterioriVariances"] = Persistence.OneVector(name = "APosterioriVariances")
528 self.StoredVariables["APosterioriStandardDeviations"] = Persistence.OneVector(name = "APosterioriStandardDeviations")
529 self.StoredVariables["APosterioriCorrelations"] = Persistence.OneMatrix(name = "APosterioriCorrelations")
530 self.StoredVariables["SimulationQuantiles"] = Persistence.OneMatrix(name = "SimulationQuantiles")
531 self.StoredVariables["Residu"] = Persistence.OneScalar(name = "Residu")
533 def _pre_run(self, Parameters, Xb=None, Y=None, R=None, B=None, Q=None ):
535 logging.debug("%s Lancement", self._name)
536 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
538 # Mise a jour de self._parameters avec Parameters
539 self.__setParameters(Parameters)
541 # Corrections et complements
542 def __test_vvalue( argument, variable, argname):
544 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
545 raise ValueError("%s %s vector %s has to be properly defined!"%(self._name,argname,variable))
546 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
547 logging.debug("%s %s vector %s is not set, but is optional."%(self._name,argname,variable))
549 logging.debug("%s %s vector %s is not set, but is not required."%(self._name,argname,variable))
551 logging.debug("%s %s vector %s is set, and its size is %i."%(self._name,argname,variable,numpy.array(argument).size))
552 __test_vvalue( Xb, "Xb", "Background or initial state" )
553 __test_vvalue( Y, "Y", "Observation" )
554 def __test_cvalue( argument, variable, argname):
556 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
557 raise ValueError("%s %s error covariance matrix %s has to be properly defined!"%(self._name,argname,variable))
558 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
559 logging.debug("%s %s error covariance matrix %s is not set, but is optional."%(self._name,argname,variable))
561 logging.debug("%s %s error covariance matrix %s is not set, but is not required."%(self._name,argname,variable))
563 logging.debug("%s %s error covariance matrix %s is set."%(self._name,argname,variable))
564 __test_cvalue( R, "R", "Observation" )
565 __test_cvalue( B, "B", "Background" )
566 __test_cvalue( Q, "Q", "Evolution" )
568 if ("Bounds" in self._parameters) and isinstance(self._parameters["Bounds"], (list, tuple)) and (len(self._parameters["Bounds"]) > 0):
569 logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
571 self._parameters["Bounds"] = None
573 if logging.getLogger().level < logging.WARNING:
574 self._parameters["optiprint"], self._parameters["optdisp"] = 1, 1
575 if PlatformInfo.has_scipy:
576 import scipy.optimize
577 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_ALL
579 self._parameters["optmessages"] = 15
581 self._parameters["optiprint"], self._parameters["optdisp"] = -1, 0
582 if PlatformInfo.has_scipy:
583 import scipy.optimize
584 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_NONE
586 self._parameters["optmessages"] = 15
590 def _post_run(self,_oH=None):
592 if ("StoreSupplementaryCalculations" in self._parameters) and \
593 "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]:
594 for _A in self.StoredVariables["APosterioriCovariance"]:
595 if "APosterioriVariances" in self._parameters["StoreSupplementaryCalculations"]:
596 self.StoredVariables["APosterioriVariances"].store( numpy.diag(_A) )
597 if "APosterioriStandardDeviations" in self._parameters["StoreSupplementaryCalculations"]:
598 self.StoredVariables["APosterioriStandardDeviations"].store( numpy.sqrt(numpy.diag(_A)) )
599 if "APosterioriCorrelations" in self._parameters["StoreSupplementaryCalculations"]:
600 _EI = numpy.diag(1./numpy.sqrt(numpy.diag(_A)))
601 _C = numpy.dot(_EI, numpy.dot(_A, _EI))
602 self.StoredVariables["APosterioriCorrelations"].store( _C )
604 logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint.: %i/%i/%i", self._name, _oH["Direct"].nbcalls(0),_oH["Tangent"].nbcalls(0),_oH["Adjoint"].nbcalls(0))
605 logging.debug("%s Nombre d'appels au cache d'opérateur d'observation direct/tangent/adjoint..: %i/%i/%i", self._name, _oH["Direct"].nbcalls(3),_oH["Tangent"].nbcalls(3),_oH["Adjoint"].nbcalls(3))
606 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
607 logging.debug("%s Terminé", self._name)
610 def get(self, key=None):
612 Renvoie l'une des variables stockées identifiée par la clé, ou le
613 dictionnaire de l'ensemble des variables disponibles en l'absence de
614 clé. Ce sont directement les variables sous forme objet qui sont
615 renvoyées, donc les méthodes d'accès à l'objet individuel sont celles
616 des classes de persistance.
619 return self.StoredVariables[key]
621 return self.StoredVariables
623 def __contains__(self, key=None):
624 "D.__contains__(k) -> True if D has a key k, else False"
625 return key in self.StoredVariables
628 "D.keys() -> list of D's keys"
629 if hasattr(self, "StoredVariables"):
630 return self.StoredVariables.keys()
635 "D.pop(k[,d]) -> v, remove specified key and return the corresponding value"
636 if hasattr(self, "StoredVariables"):
637 return self.StoredVariables.pop(k, d)
642 raise TypeError("pop expected at least 1 arguments, got 0")
643 "If key is not found, d is returned if given, otherwise KeyError is raised"
649 def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
651 Doit implémenter l'opération élémentaire de calcul d'assimilation sous
652 sa forme mathématique la plus naturelle possible.
654 raise NotImplementedError("Mathematical assimilation calculation has not been implemented!")
656 def defineRequiredParameter(self, name = None, default = None, typecast = None, message = None, minval = None, maxval = None, listval = None):
658 Permet de définir dans l'algorithme des paramètres requis et leurs
659 caractéristiques par défaut.
662 raise ValueError("A name is mandatory to define a required parameter.")
664 self.__required_parameters[name] = {
666 "typecast" : typecast,
672 logging.debug("%s %s (valeur par défaut = %s)", self._name, message, self.setParameterValue(name))
674 def getRequiredParameters(self, noDetails=True):
676 Renvoie la liste des noms de paramètres requis ou directement le
677 dictionnaire des paramètres requis.
680 return sorted(self.__required_parameters.keys())
682 return self.__required_parameters
684 def setParameterValue(self, name=None, value=None):
686 Renvoie la valeur d'un paramètre requis de manière contrôlée
688 default = self.__required_parameters[name]["default"]
689 typecast = self.__required_parameters[name]["typecast"]
690 minval = self.__required_parameters[name]["minval"]
691 maxval = self.__required_parameters[name]["maxval"]
692 listval = self.__required_parameters[name]["listval"]
694 if value is None and default is None:
696 elif value is None and default is not None:
697 if typecast is None: __val = default
698 else: __val = typecast( default )
700 if typecast is None: __val = value
701 else: __val = typecast( value )
703 if minval is not None and (numpy.array(__val, float) < minval).any():
704 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be less than %s."%(name, __val, minval))
705 if maxval is not None and (numpy.array(__val, float) > maxval).any():
706 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be greater than %s."%(name, __val, maxval))
707 if listval is not None:
708 if typecast is list or typecast is tuple or isinstance(__val,list) or isinstance(__val,tuple):
711 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%(v, name, listval))
712 elif __val not in listval:
713 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%( __val, name,listval))
716 def requireInputArguments(self, mandatory=(), optional=()):
718 Permet d'imposer des arguments requises en entrée
720 self.__required_inputs["RequiredInputValues"]["mandatory"] = tuple( mandatory )
721 self.__required_inputs["RequiredInputValues"]["optional"] = tuple( optional )
723 def __setParameters(self, fromDico={}):
725 Permet de stocker les paramètres reçus dans le dictionnaire interne.
727 self._parameters.update( fromDico )
728 for k in self.__required_parameters.keys():
729 if k in fromDico.keys():
730 self._parameters[k] = self.setParameterValue(k,fromDico[k])
732 self._parameters[k] = self.setParameterValue(k)
733 logging.debug("%s %s : %s", self._name, self.__required_parameters[k]["message"], self._parameters[k])
735 # ==============================================================================
736 class Diagnostic(object):
738 Classe générale d'interface de type diagnostic
740 Ce template s'utilise de la manière suivante : il sert de classe "patron" en
741 même temps que l'une des classes de persistance, comme "OneScalar" par
744 Une classe élémentaire de diagnostic doit implémenter ses deux méthodes, la
745 méthode "_formula" pour écrire explicitement et proprement la formule pour
746 l'écriture mathématique du calcul du diagnostic (méthode interne non
747 publique), et "calculate" pour activer la précédente tout en ayant vérifié
748 et préparé les données, et pour stocker les résultats à chaque pas (méthode
749 externe d'activation).
751 def __init__(self, name = "", parameters = {}):
753 self.name = str(name)
754 self.parameters = dict( parameters )
756 def _formula(self, *args):
758 Doit implémenter l'opération élémentaire de diagnostic sous sa forme
759 mathématique la plus naturelle possible.
761 raise NotImplementedError("Diagnostic mathematical formula has not been implemented!")
763 def calculate(self, *args):
765 Active la formule de calcul avec les arguments correctement rangés
767 raise NotImplementedError("Diagnostic activation method has not been implemented!")
769 # ==============================================================================
770 class DiagnosticAndParameters(object):
772 Classe générale d'interface d'interface de type diagnostic
775 name = "GenericDiagnostic",
782 asExistingDiags = None,
786 self.__name = str(name)
792 self.__E = tuple(asExistingDiags)
793 self.__TheDiag = None
795 if asScript is not None:
796 __Diag = ImportFromScript(asScript).getvalue( "Diagnostic" )
797 __Iden = ImportFromScript(asScript).getvalue( "Identifier" )
798 __Dict = ImportFromScript(asScript).getvalue( self.__name, "Parameters" )
799 __Unit = ImportFromScript(asScript).getvalue( "Unit" )
800 __Base = ImportFromScript(asScript).getvalue( "BaseType" )
802 __Diag = asDiagnostic
803 __Iden = asIdentifier
808 if __Diag is not None:
809 self.__D = str(__Diag)
810 if __Iden is not None:
811 self.__I = str(__Iden)
813 self.__I = str(__Diag)
814 if __Dict is not None:
815 self.__P.update( dict(__Dict) )
816 if __Unit is None or __Unit == "None":
818 if __Base is None or __Base == "None":
821 self.__setDiagnostic( self.__D, self.__I, self.__U, self.__B, self.__P, self.__E )
825 return self.__TheDiag
827 def __setDiagnostic(self, __choice = None, __name = "", __unit = "", __basetype = None, __parameters = {}, __existings = () ):
829 Permet de sélectionner un diagnostic a effectuer
832 raise ValueError("Error: diagnostic choice has to be given")
833 __daDirectory = "daDiagnostics"
835 # Recherche explicitement le fichier complet
836 # ------------------------------------------
838 for directory in sys.path:
839 if os.path.isfile(os.path.join(directory, __daDirectory, str(__choice)+'.py')):
840 __module_path = os.path.abspath(os.path.join(directory, __daDirectory))
841 if __module_path is None:
842 raise ImportError("No diagnostic module named \"%s\" was found in a \"%s\" subdirectory\n The search path is %s"%(__choice, __daDirectory, sys.path))
844 # Importe le fichier complet comme un module
845 # ------------------------------------------
847 __sys_path_tmp = sys.path ; sys.path.insert(0,__module_path)
848 self.__diagnosticFile = __import__(str(__choice), globals(), locals(), [])
849 sys.path = __sys_path_tmp ; del __sys_path_tmp
850 except ImportError as e:
851 raise ImportError("The module named \"%s\" was found, but is incorrect at the import stage.\n The import error message is: %s"%(__choice,e))
853 # Instancie un objet du type élémentaire du fichier
854 # -------------------------------------------------
855 if __name in __existings:
856 raise ValueError("A default input with the same name \"%s\" already exists."%str(__name))
858 self.__TheDiag = self.__diagnosticFile.ElementaryDiagnostic(
861 basetype = __basetype,
862 parameters = __parameters )
865 # ==============================================================================
866 class AlgorithmAndParameters(object):
868 Classe générale d'interface d'action pour l'algorithme et ses paramètres
871 name = "GenericAlgorithm",
878 self.__name = str(name)
882 self.__algorithm = {}
883 self.__algorithmFile = None
884 self.__algorithmName = None
886 self.updateParameters( asDict, asScript )
888 if asScript is not None:
889 __Algo = ImportFromScript(asScript).getvalue( "Algorithm" )
893 if __Algo is not None:
894 self.__A = str(__Algo)
895 self.__P.update( {"Algorithm":self.__A} )
897 self.__setAlgorithm( self.__A )
899 def updateParameters(self,
903 "Mise a jour des parametres"
904 if asScript is not None:
905 __Dict = ImportFromScript(asScript).getvalue( self.__name, "Parameters" )
909 if __Dict is not None:
910 self.__P.update( dict(__Dict) )
912 def executePythonScheme(self, asDictAO = None):
913 "Permet de lancer le calcul d'assimilation"
914 Operator.CM.clearCache()
916 if not isinstance(asDictAO, dict):
917 raise ValueError("The objects for algorithm calculation have to be given together as a dictionnary, and they are not")
918 if hasattr(asDictAO["Background"],"getO"): self.__Xb = asDictAO["Background"].getO()
919 elif hasattr(asDictAO["CheckingPoint"],"getO"): self.__Xb = asDictAO["CheckingPoint"].getO()
920 else: self.__Xb = None
921 if hasattr(asDictAO["Observation"],"getO"): self.__Y = asDictAO["Observation"].getO()
922 else: self.__Y = asDictAO["Observation"]
923 if hasattr(asDictAO["ControlInput"],"getO"): self.__U = asDictAO["ControlInput"].getO()
924 else: self.__U = asDictAO["ControlInput"]
925 if hasattr(asDictAO["ObservationOperator"],"getO"): self.__HO = asDictAO["ObservationOperator"].getO()
926 else: self.__HO = asDictAO["ObservationOperator"]
927 if hasattr(asDictAO["EvolutionModel"],"getO"): self.__EM = asDictAO["EvolutionModel"].getO()
928 else: self.__EM = asDictAO["EvolutionModel"]
929 if hasattr(asDictAO["ControlModel"],"getO"): self.__CM = asDictAO["ControlModel"].getO()
930 else: self.__CM = asDictAO["ControlModel"]
931 self.__B = asDictAO["BackgroundError"]
932 self.__R = asDictAO["ObservationError"]
933 self.__Q = asDictAO["EvolutionError"]
935 self.__shape_validate()
937 self.__algorithm.run(
947 Parameters = self.__P,
951 def executeYACSScheme(self, FileName=None):
952 "Permet de lancer le calcul d'assimilation"
953 if FileName is None or not os.path.exists(FileName):
954 raise ValueError("a YACS file name has to be given for YACS execution.\n")
955 if not PlatformInfo.has_salome or not PlatformInfo.has_yacs or not PlatformInfo.has_adao:
956 raise ImportError("Unable to get SALOME, YACS or ADAO environnement variables. Please launch SALOME before executing.\n")
962 SALOMERuntime.RuntimeSALOME_setRuntime()
964 r = pilot.getRuntime()
965 xmlLoader = loader.YACSLoader()
966 xmlLoader.registerProcCataLoader()
968 catalogAd = r.loadCatalog("proc", os.path.abspath(FileName))
971 r.addCatalog(catalogAd)
974 p = xmlLoader.load(os.path.abspath(FileName))
975 except IOError as ex:
976 print("IO exception: %s"%(ex,))
978 logger = p.getLogger("parser")
979 if not logger.isEmpty():
980 print("The imported file has errors :")
981 print(logger.getStr())
984 print("Le schéma n'est pas valide et ne peut pas être exécuté")
985 print(p.getErrorReport())
987 info=pilot.LinkInfo(pilot.LinkInfo.ALL_DONT_STOP)
988 p.checkConsistency(info)
989 if info.areWarningsOrErrors():
990 print("Le schéma n'est pas cohérent et ne peut pas être exécuté")
991 print(info.getGlobalRepr())
993 e = pilot.ExecutorSwig()
995 if p.getEffectiveState() != pilot.DONE:
996 print(p.getErrorReport())
998 raise ValueError("execution error of YACS scheme")
1002 def get(self, key = None):
1003 "Vérifie l'existence d'une clé de variable ou de paramètres"
1004 if key in self.__algorithm:
1005 return self.__algorithm.get( key )
1006 elif key in self.__P:
1007 return self.__P[key]
1011 def pop(self, k, d):
1012 "Necessaire pour le pickling"
1013 return self.__algorithm.pop(k, d)
1015 def getAlgorithmRequiredParameters(self, noDetails=True):
1016 "Renvoie la liste des paramètres requis selon l'algorithme"
1017 return self.__algorithm.getRequiredParameters(noDetails)
1019 def setObserver(self, __V, __O, __I, __S):
1020 if self.__algorithm is None \
1021 or isinstance(self.__algorithm, dict) \
1022 or not hasattr(self.__algorithm,"StoredVariables"):
1023 raise ValueError("No observer can be build before choosing an algorithm.")
1024 if __V not in self.__algorithm:
1025 raise ValueError("An observer requires to be set on a variable named %s which does not exist."%__V)
1027 self.__algorithm.StoredVariables[ __V ].setDataObserver(
1030 HookParameters = __I,
1033 def removeObserver(self, __V, __O, __A = False):
1034 if self.__algorithm is None \
1035 or isinstance(self.__algorithm, dict) \
1036 or not hasattr(self.__algorithm,"StoredVariables"):
1037 raise ValueError("No observer can be removed before choosing an algorithm.")
1038 if __V not in self.__algorithm:
1039 raise ValueError("An observer requires to be removed on a variable named %s which does not exist."%__V)
1041 return self.__algorithm.StoredVariables[ __V ].removeDataObserver(
1046 def hasObserver(self, __V):
1047 if self.__algorithm is None \
1048 or isinstance(self.__algorithm, dict) \
1049 or not hasattr(self.__algorithm,"StoredVariables"):
1051 if __V not in self.__algorithm:
1053 return self.__algorithm.StoredVariables[ __V ].hasDataObserver()
1056 return list(self.__algorithm.keys()) + list(self.__P.keys())
1058 def __contains__(self, key=None):
1059 "D.__contains__(k) -> True if D has a key k, else False"
1060 return key in self.__algorithm or key in self.__P
1063 "x.__repr__() <==> repr(x)"
1064 return repr(self.__A)+", "+repr(self.__P)
1067 "x.__str__() <==> str(x)"
1068 return str(self.__A)+", "+str(self.__P)
1070 def __setAlgorithm(self, choice = None ):
1072 Permet de sélectionner l'algorithme à utiliser pour mener à bien l'étude
1073 d'assimilation. L'argument est un champ caractère se rapportant au nom
1074 d'un fichier contenu dans "../daAlgorithms" et réalisant l'opération
1075 d'assimilation sur les arguments fixes.
1078 raise ValueError("Error: algorithm choice has to be given")
1079 if self.__algorithmName is not None:
1080 raise ValueError("Error: algorithm choice has already been done as \"%s\", it can't be changed."%self.__algorithmName)
1081 daDirectory = "daAlgorithms"
1083 # Recherche explicitement le fichier complet
1084 # ------------------------------------------
1086 for directory in sys.path:
1087 if os.path.isfile(os.path.join(directory, daDirectory, str(choice)+'.py')):
1088 module_path = os.path.abspath(os.path.join(directory, daDirectory))
1089 if module_path is None:
1090 raise ImportError("No algorithm module named \"%s\" was found in a \"%s\" subdirectory\n The search path is %s"%(choice, daDirectory, sys.path))
1092 # Importe le fichier complet comme un module
1093 # ------------------------------------------
1095 sys_path_tmp = sys.path ; sys.path.insert(0,module_path)
1096 self.__algorithmFile = __import__(str(choice), globals(), locals(), [])
1097 self.__algorithmName = str(choice)
1098 sys.path = sys_path_tmp ; del sys_path_tmp
1099 except ImportError as e:
1100 raise ImportError("The module named \"%s\" was found, but is incorrect at the import stage.\n The import error message is: %s"%(choice,e))
1102 # Instancie un objet du type élémentaire du fichier
1103 # -------------------------------------------------
1104 self.__algorithm = self.__algorithmFile.ElementaryAlgorithm()
1107 def __shape_validate(self):
1109 Validation de la correspondance correcte des tailles des variables et
1110 des matrices s'il y en a.
1112 if self.__Xb is None: __Xb_shape = (0,)
1113 elif hasattr(self.__Xb,"size"): __Xb_shape = (self.__Xb.size,)
1114 elif hasattr(self.__Xb,"shape"):
1115 if isinstance(self.__Xb.shape, tuple): __Xb_shape = self.__Xb.shape
1116 else: __Xb_shape = self.__Xb.shape()
1117 else: raise TypeError("The background (Xb) has no attribute of shape: problem !")
1119 if self.__Y is None: __Y_shape = (0,)
1120 elif hasattr(self.__Y,"size"): __Y_shape = (self.__Y.size,)
1121 elif hasattr(self.__Y,"shape"):
1122 if isinstance(self.__Y.shape, tuple): __Y_shape = self.__Y.shape
1123 else: __Y_shape = self.__Y.shape()
1124 else: raise TypeError("The observation (Y) has no attribute of shape: problem !")
1126 if self.__U is None: __U_shape = (0,)
1127 elif hasattr(self.__U,"size"): __U_shape = (self.__U.size,)
1128 elif hasattr(self.__U,"shape"):
1129 if isinstance(self.__U.shape, tuple): __U_shape = self.__U.shape
1130 else: __U_shape = self.__U.shape()
1131 else: raise TypeError("The control (U) has no attribute of shape: problem !")
1133 if self.__B is None: __B_shape = (0,0)
1134 elif hasattr(self.__B,"shape"):
1135 if isinstance(self.__B.shape, tuple): __B_shape = self.__B.shape
1136 else: __B_shape = self.__B.shape()
1137 else: raise TypeError("The a priori errors covariance matrix (B) has no attribute of shape: problem !")
1139 if self.__R is None: __R_shape = (0,0)
1140 elif hasattr(self.__R,"shape"):
1141 if isinstance(self.__R.shape, tuple): __R_shape = self.__R.shape
1142 else: __R_shape = self.__R.shape()
1143 else: raise TypeError("The observation errors covariance matrix (R) has no attribute of shape: problem !")
1145 if self.__Q is None: __Q_shape = (0,0)
1146 elif hasattr(self.__Q,"shape"):
1147 if isinstance(self.__Q.shape, tuple): __Q_shape = self.__Q.shape
1148 else: __Q_shape = self.__Q.shape()
1149 else: raise TypeError("The evolution errors covariance matrix (Q) has no attribute of shape: problem !")
1151 if len(self.__HO) == 0: __HO_shape = (0,0)
1152 elif isinstance(self.__HO, dict): __HO_shape = (0,0)
1153 elif hasattr(self.__HO["Direct"],"shape"):
1154 if isinstance(self.__HO["Direct"].shape, tuple): __HO_shape = self.__HO["Direct"].shape
1155 else: __HO_shape = self.__HO["Direct"].shape()
1156 else: raise TypeError("The observation operator (H) has no attribute of shape: problem !")
1158 if len(self.__EM) == 0: __EM_shape = (0,0)
1159 elif isinstance(self.__EM, dict): __EM_shape = (0,0)
1160 elif hasattr(self.__EM["Direct"],"shape"):
1161 if isinstance(self.__EM["Direct"].shape, tuple): __EM_shape = self.__EM["Direct"].shape
1162 else: __EM_shape = self.__EM["Direct"].shape()
1163 else: raise TypeError("The evolution model (EM) has no attribute of shape: problem !")
1165 if len(self.__CM) == 0: __CM_shape = (0,0)
1166 elif isinstance(self.__CM, dict): __CM_shape = (0,0)
1167 elif hasattr(self.__CM["Direct"],"shape"):
1168 if isinstance(self.__CM["Direct"].shape, tuple): __CM_shape = self.__CM["Direct"].shape
1169 else: __CM_shape = self.__CM["Direct"].shape()
1170 else: raise TypeError("The control model (CM) has no attribute of shape: problem !")
1172 # Vérification des conditions
1173 # ---------------------------
1174 if not( len(__Xb_shape) == 1 or min(__Xb_shape) == 1 ):
1175 raise ValueError("Shape characteristic of background (Xb) is incorrect: \"%s\"."%(__Xb_shape,))
1176 if not( len(__Y_shape) == 1 or min(__Y_shape) == 1 ):
1177 raise ValueError("Shape characteristic of observation (Y) is incorrect: \"%s\"."%(__Y_shape,))
1179 if not( min(__B_shape) == max(__B_shape) ):
1180 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) is incorrect: \"%s\"."%(__B_shape,))
1181 if not( min(__R_shape) == max(__R_shape) ):
1182 raise ValueError("Shape characteristic of observation errors covariance matrix (R) is incorrect: \"%s\"."%(__R_shape,))
1183 if not( min(__Q_shape) == max(__Q_shape) ):
1184 raise ValueError("Shape characteristic of evolution errors covariance matrix (Q) is incorrect: \"%s\"."%(__Q_shape,))
1185 if not( min(__EM_shape) == max(__EM_shape) ):
1186 raise ValueError("Shape characteristic of evolution operator (EM) is incorrect: \"%s\"."%(__EM_shape,))
1188 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[1] == max(__Xb_shape) ):
1189 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and state (X) \"%s\" are incompatible."%(__HO_shape,__Xb_shape))
1190 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[0] == max(__Y_shape) ):
1191 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation (Y) \"%s\" are incompatible."%(__HO_shape,__Y_shape))
1192 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__B) > 0 and not( __HO_shape[1] == __B_shape[0] ):
1193 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and a priori errors covariance matrix (B) \"%s\" are incompatible."%(__HO_shape,__B_shape))
1194 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__R) > 0 and not( __HO_shape[0] == __R_shape[1] ):
1195 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation errors covariance matrix (R) \"%s\" are incompatible."%(__HO_shape,__R_shape))
1197 if self.__B is not None and len(self.__B) > 0 and not( __B_shape[1] == max(__Xb_shape) ):
1198 if self.__algorithmName in ["EnsembleBlue",]:
1199 asPersistentVector = self.__Xb.reshape((-1,min(__B_shape)))
1200 self.__Xb = Persistence.OneVector("Background", basetype=numpy.matrix)
1201 for member in asPersistentVector:
1202 self.__Xb.store( numpy.matrix( numpy.ravel(member), numpy.float ).T )
1203 __Xb_shape = min(__B_shape)
1205 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) \"%s\" and background (Xb) \"%s\" are incompatible."%(__B_shape,__Xb_shape))
1207 if self.__R is not None and len(self.__R) > 0 and not( __R_shape[1] == max(__Y_shape) ):
1208 raise ValueError("Shape characteristic of observation errors covariance matrix (R) \"%s\" and observation (Y) \"%s\" are incompatible."%(__R_shape,__Y_shape))
1210 if self.__EM is not None and len(self.__EM) > 0 and not isinstance(self.__EM, dict) and not( __EM_shape[1] == max(__Xb_shape) ):
1211 raise ValueError("Shape characteristic of evolution model (EM) \"%s\" and state (X) \"%s\" are incompatible."%(__EM_shape,__Xb_shape))
1213 if self.__CM is not None and len(self.__CM) > 0 and not isinstance(self.__CM, dict) and not( __CM_shape[1] == max(__U_shape) ):
1214 raise ValueError("Shape characteristic of control model (CM) \"%s\" and control (U) \"%s\" are incompatible."%(__CM_shape,__U_shape))
1216 if ("Bounds" in self.__P) \
1217 and (isinstance(self.__P["Bounds"], list) or isinstance(self.__P["Bounds"], tuple)) \
1218 and (len(self.__P["Bounds"]) != max(__Xb_shape)):
1219 raise ValueError("The number \"%s\" of bound pairs for the state (X) components is different of the size \"%s\" of the state itself." \
1220 %(len(self.__P["Bounds"]),max(__Xb_shape)))
1224 # ==============================================================================
1225 class DataObserver(object):
1227 Classe générale d'interface de type observer
1230 name = "GenericObserver",
1242 self.__name = str(name)
1247 if onVariable is None:
1248 raise ValueError("setting an observer has to be done over a variable name or a list of variable names, not over None.")
1249 elif type(onVariable) in (tuple, list):
1250 self.__V = tuple(map( str, onVariable ))
1251 if withInfo is None:
1254 self.__I = (str(withInfo),)*len(self.__V)
1255 elif isinstance(onVariable, str):
1256 self.__V = (onVariable,)
1257 if withInfo is None:
1258 self.__I = (onVariable,)
1260 self.__I = (str(withInfo),)
1262 raise ValueError("setting an observer has to be done over a variable name or a list of variable names.")
1264 if asString is not None:
1265 __FunctionText = asString
1266 elif (asTemplate is not None) and (asTemplate in Templates.ObserverTemplates):
1267 __FunctionText = Templates.ObserverTemplates[asTemplate]
1268 elif asScript is not None:
1269 __FunctionText = ImportFromScript(asScript).getstring()
1272 __Function = ObserverF(__FunctionText)
1274 if asObsObject is not None:
1275 self.__O = asObsObject
1277 self.__O = __Function.getfunc()
1279 for k in range(len(self.__V)):
1282 if ename not in withAlgo:
1283 raise ValueError("An observer is asked to be set on a variable named %s which does not exist."%ename)
1285 withAlgo.setObserver(ename, self.__O, einfo, scheduledBy)
1288 "x.__repr__() <==> repr(x)"
1289 return repr(self.__V)+"\n"+repr(self.__O)
1292 "x.__str__() <==> str(x)"
1293 return str(self.__V)+"\n"+str(self.__O)
1295 # ==============================================================================
1296 class State(object):
1298 Classe générale d'interface de type état
1301 name = "GenericVector",
1303 asPersistentVector = None,
1306 toBeChecked = False,
1309 Permet de définir un vecteur :
1310 - asVector : entrée des données, comme un vecteur compatible avec le
1311 constructeur de numpy.matrix, ou "True" si entrée par script.
1312 - asPersistentVector : entrée des données, comme une série de vecteurs
1313 compatible avec le constructeur de numpy.matrix, ou comme un objet de
1314 type Persistence, ou "True" si entrée par script.
1315 - asScript : si un script valide est donné contenant une variable
1316 nommée "name", la variable est de type "asVector" (par défaut) ou
1317 "asPersistentVector" selon que l'une de ces variables est placée à
1320 self.__name = str(name)
1321 self.__check = bool(toBeChecked)
1325 self.__is_vector = False
1326 self.__is_series = False
1328 if asScript is not None:
1329 __Vector, __Series = None, None
1330 if asPersistentVector:
1331 __Series = ImportFromScript(asScript).getvalue( self.__name )
1333 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1335 __Vector, __Series = asVector, asPersistentVector
1337 if __Vector is not None:
1338 self.__is_vector = True
1339 self.__V = numpy.matrix( numpy.asmatrix(__Vector).A1, numpy.float ).T
1340 self.shape = self.__V.shape
1341 self.size = self.__V.size
1342 elif __Series is not None:
1343 self.__is_series = True
1344 if isinstance(__Series, (tuple, list, numpy.ndarray, numpy.matrix)):
1345 self.__V = Persistence.OneVector(self.__name, basetype=numpy.matrix)
1346 for member in __Series:
1347 self.__V.store( numpy.matrix( numpy.asmatrix(member).A1, numpy.float ).T )
1348 import sys ; sys.stdout.flush()
1351 if isinstance(self.__V.shape, (tuple, list)):
1352 self.shape = self.__V.shape
1354 self.shape = self.__V.shape()
1355 if len(self.shape) == 1:
1356 self.shape = (self.shape[0],1)
1357 self.size = self.shape[0] * self.shape[1]
1359 raise ValueError("The %s object is improperly defined, it requires at minima either a vector, a list/tuple of vectors or a persistent object. Please check your vector input."%self.__name)
1361 if scheduledBy is not None:
1362 self.__T = scheduledBy
1364 def getO(self, withScheduler=False):
1366 return self.__V, self.__T
1367 elif self.__T is None:
1373 "Vérification du type interne"
1374 return self.__is_vector
1377 "Vérification du type interne"
1378 return self.__is_series
1381 "x.__repr__() <==> repr(x)"
1382 return repr(self.__V)
1385 "x.__str__() <==> str(x)"
1386 return str(self.__V)
1388 # ==============================================================================
1389 class Covariance(object):
1391 Classe générale d'interface de type covariance
1394 name = "GenericCovariance",
1395 asCovariance = None,
1396 asEyeByScalar = None,
1397 asEyeByVector = None,
1400 toBeChecked = False,
1403 Permet de définir une covariance :
1404 - asCovariance : entrée des données, comme une matrice compatible avec
1405 le constructeur de numpy.matrix
1406 - asEyeByScalar : entrée des données comme un seul scalaire de variance,
1407 multiplicatif d'une matrice de corrélation identité, aucune matrice
1408 n'étant donc explicitement à donner
1409 - asEyeByVector : entrée des données comme un seul vecteur de variance,
1410 à mettre sur la diagonale d'une matrice de corrélation, aucune matrice
1411 n'étant donc explicitement à donner
1412 - asCovObject : entrée des données comme un objet python, qui a les
1413 methodes obligatoires "getT", "getI", "diag", "trace", "__add__",
1414 "__sub__", "__neg__", "__mul__", "__rmul__" et facultatives "shape",
1415 "size", "cholesky", "choleskyI", "asfullmatrix", "__repr__", "__str__"
1416 - toBeChecked : booléen indiquant si le caractère SDP de la matrice
1417 pleine doit être vérifié
1419 self.__name = str(name)
1420 self.__check = bool(toBeChecked)
1423 self.__is_scalar = False
1424 self.__is_vector = False
1425 self.__is_matrix = False
1426 self.__is_object = False
1428 if asScript is not None:
1429 __Matrix, __Scalar, __Vector, __Object = None, None, None, None
1431 __Scalar = ImportFromScript(asScript).getvalue( self.__name )
1433 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1435 __Object = ImportFromScript(asScript).getvalue( self.__name )
1437 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
1439 __Matrix, __Scalar, __Vector, __Object = asCovariance, asEyeByScalar, asEyeByVector, asCovObject
1441 if __Scalar is not None:
1442 if numpy.matrix(__Scalar).size != 1:
1443 raise ValueError(' The diagonal multiplier given to define a sparse matrix is not a unique scalar value.\n Its actual measured size is %i. Please check your scalar input.'%numpy.matrix(__Scalar).size)
1444 self.__is_scalar = True
1445 self.__C = numpy.abs( float(__Scalar) )
1448 elif __Vector is not None:
1449 self.__is_vector = True
1450 self.__C = numpy.abs( numpy.array( numpy.ravel( numpy.matrix(__Vector, float ) ) ) )
1451 self.shape = (self.__C.size,self.__C.size)
1452 self.size = self.__C.size**2
1453 elif __Matrix is not None:
1454 self.__is_matrix = True
1455 self.__C = numpy.matrix( __Matrix, float )
1456 self.shape = self.__C.shape
1457 self.size = self.__C.size
1458 elif __Object is not None:
1459 self.__is_object = True
1461 for at in ("getT","getI","diag","trace","__add__","__sub__","__neg__","__mul__","__rmul__"):
1462 if not hasattr(self.__C,at):
1463 raise ValueError("The matrix given for %s as an object has no attribute \"%s\". Please check your object input."%(self.__name,at))
1464 if hasattr(self.__C,"shape"):
1465 self.shape = self.__C.shape
1468 if hasattr(self.__C,"size"):
1469 self.size = self.__C.size
1474 # raise ValueError("The %s covariance matrix has to be specified either as a matrix, a vector for its diagonal or a scalar multiplying an identity matrix."%self.__name)
1478 def __validate(self):
1480 if self.__C is None:
1481 raise UnboundLocalError("%s covariance matrix value has not been set!"%(self.__name,))
1482 if self.ismatrix() and min(self.shape) != max(self.shape):
1483 raise ValueError("The given matrix for %s is not a square one, its shape is %s. Please check your matrix input."%(self.__name,self.shape))
1484 if self.isobject() and min(self.shape) != max(self.shape):
1485 raise ValueError("The matrix given for \"%s\" is not a square one, its shape is %s. Please check your object input."%(self.__name,self.shape))
1486 if self.isscalar() and self.__C <= 0:
1487 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your scalar input %s."%(self.__name,self.__C))
1488 if self.isvector() and (self.__C <= 0).any():
1489 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your vector input."%(self.__name,))
1490 if self.ismatrix() and (self.__check or logging.getLogger().level < logging.WARNING):
1492 L = numpy.linalg.cholesky( self.__C )
1494 raise ValueError("The %s covariance matrix is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1495 if self.isobject() and (self.__check or logging.getLogger().level < logging.WARNING):
1497 L = self.__C.cholesky()
1499 raise ValueError("The %s covariance object is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1502 "Vérification du type interne"
1503 return self.__is_scalar
1506 "Vérification du type interne"
1507 return self.__is_vector
1510 "Vérification du type interne"
1511 return self.__is_matrix
1514 "Vérification du type interne"
1515 return self.__is_object
1520 return Covariance(self.__name+"I", asCovariance = self.__C.I )
1521 elif self.isvector():
1522 return Covariance(self.__name+"I", asEyeByVector = 1. / self.__C )
1523 elif self.isscalar():
1524 return Covariance(self.__name+"I", asEyeByScalar = 1. / self.__C )
1525 elif self.isobject():
1526 return Covariance(self.__name+"I", asCovObject = self.__C.getI() )
1528 return None # Indispensable
1533 return Covariance(self.__name+"T", asCovariance = self.__C.T )
1534 elif self.isvector():
1535 return Covariance(self.__name+"T", asEyeByVector = self.__C )
1536 elif self.isscalar():
1537 return Covariance(self.__name+"T", asEyeByScalar = self.__C )
1538 elif self.isobject():
1539 return Covariance(self.__name+"T", asCovObject = self.__C.getT() )
1542 "Décomposition de Cholesky"
1544 return Covariance(self.__name+"C", asCovariance = numpy.linalg.cholesky(self.__C) )
1545 elif self.isvector():
1546 return Covariance(self.__name+"C", asEyeByVector = numpy.sqrt( self.__C ) )
1547 elif self.isscalar():
1548 return Covariance(self.__name+"C", asEyeByScalar = numpy.sqrt( self.__C ) )
1549 elif self.isobject() and hasattr(self.__C,"cholesky"):
1550 return Covariance(self.__name+"C", asCovObject = self.__C.cholesky() )
1552 def choleskyI(self):
1553 "Inversion de la décomposition de Cholesky"
1555 return Covariance(self.__name+"H", asCovariance = numpy.linalg.cholesky(self.__C).I )
1556 elif self.isvector():
1557 return Covariance(self.__name+"H", asEyeByVector = 1.0 / numpy.sqrt( self.__C ) )
1558 elif self.isscalar():
1559 return Covariance(self.__name+"H", asEyeByScalar = 1.0 / numpy.sqrt( self.__C ) )
1560 elif self.isobject() and hasattr(self.__C,"choleskyI"):
1561 return Covariance(self.__name+"H", asCovObject = self.__C.choleskyI() )
1563 def diag(self, msize=None):
1564 "Diagonale de la matrice"
1566 return numpy.diag(self.__C)
1567 elif self.isvector():
1569 elif self.isscalar():
1571 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1573 return self.__C * numpy.ones(int(msize))
1574 elif self.isobject():
1575 return self.__C.diag()
1577 def asfullmatrix(self, msize=None):
1581 elif self.isvector():
1582 return numpy.matrix( numpy.diag(self.__C), float )
1583 elif self.isscalar():
1585 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1587 return numpy.matrix( self.__C * numpy.eye(int(msize)), float )
1588 elif self.isobject() and hasattr(self.__C,"asfullmatrix"):
1589 return self.__C.asfullmatrix()
1591 def trace(self, msize=None):
1592 "Trace de la matrice"
1594 return numpy.trace(self.__C)
1595 elif self.isvector():
1596 return float(numpy.sum(self.__C))
1597 elif self.isscalar():
1599 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1601 return self.__C * int(msize)
1602 elif self.isobject():
1603 return self.__C.trace()
1609 "x.__repr__() <==> repr(x)"
1610 return repr(self.__C)
1613 "x.__str__() <==> str(x)"
1614 return str(self.__C)
1616 def __add__(self, other):
1617 "x.__add__(y) <==> x+y"
1618 if self.ismatrix() or self.isobject():
1619 return self.__C + numpy.asmatrix(other)
1620 elif self.isvector() or self.isscalar():
1621 _A = numpy.asarray(other)
1622 _A.reshape(_A.size)[::_A.shape[1]+1] += self.__C
1623 return numpy.asmatrix(_A)
1625 def __radd__(self, other):
1626 "x.__radd__(y) <==> y+x"
1627 raise NotImplementedError("%s covariance matrix __radd__ method not available for %s type!"%(self.__name,type(other)))
1629 def __sub__(self, other):
1630 "x.__sub__(y) <==> x-y"
1631 if self.ismatrix() or self.isobject():
1632 return self.__C - numpy.asmatrix(other)
1633 elif self.isvector() or self.isscalar():
1634 _A = numpy.asarray(other)
1635 _A.reshape(_A.size)[::_A.shape[1]+1] = self.__C - _A.reshape(_A.size)[::_A.shape[1]+1]
1636 return numpy.asmatrix(_A)
1638 def __rsub__(self, other):
1639 "x.__rsub__(y) <==> y-x"
1640 raise NotImplementedError("%s covariance matrix __rsub__ method not available for %s type!"%(self.__name,type(other)))
1643 "x.__neg__() <==> -x"
1646 def __mul__(self, other):
1647 "x.__mul__(y) <==> x*y"
1648 if self.ismatrix() and isinstance(other,numpy.matrix):
1649 return self.__C * other
1650 elif self.ismatrix() and isinstance(other, (list, numpy.ndarray, tuple)):
1651 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1652 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1653 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1654 return self.__C * numpy.asmatrix(other)
1656 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.asmatrix(other).shape,self.__name))
1657 elif self.isvector() and isinstance(other, (list, numpy.matrix, numpy.ndarray, tuple)):
1658 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1659 return numpy.asmatrix(self.__C * numpy.ravel(other)).T
1660 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1661 return numpy.asmatrix((self.__C * (numpy.asarray(other).transpose())).transpose())
1663 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1664 elif self.isscalar() and isinstance(other,numpy.matrix):
1665 return self.__C * other
1666 elif self.isscalar() and isinstance(other, (list, numpy.ndarray, tuple)):
1667 if len(numpy.asarray(other).shape) == 1 or numpy.asarray(other).shape[1] == 1 or numpy.asarray(other).shape[0] == 1:
1668 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1670 return self.__C * numpy.asmatrix(other)
1671 elif self.isobject():
1672 return self.__C.__mul__(other)
1674 raise NotImplementedError("%s covariance matrix __mul__ method not available for %s type!"%(self.__name,type(other)))
1676 def __rmul__(self, other):
1677 "x.__rmul__(y) <==> y*x"
1678 if self.ismatrix() and isinstance(other,numpy.matrix):
1679 return other * self.__C
1680 elif self.isvector() and isinstance(other,numpy.matrix):
1681 if numpy.ravel(other).size == self.shape[0]: # Vecteur
1682 return numpy.asmatrix(numpy.ravel(other) * self.__C)
1683 elif numpy.asmatrix(other).shape[1] == self.shape[0]: # Matrice
1684 return numpy.asmatrix(numpy.array(other) * self.__C)
1686 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1687 elif self.isscalar() and isinstance(other,numpy.matrix):
1688 return other * self.__C
1689 elif self.isobject():
1690 return self.__C.__rmul__(other)
1692 raise NotImplementedError("%s covariance matrix __rmul__ method not available for %s type!"%(self.__name,type(other)))
1695 "x.__len__() <==> len(x)"
1696 return self.shape[0]
1698 # ==============================================================================
1699 class ObserverF(object):
1701 Creation d'une fonction d'observateur a partir de son texte
1703 def __init__(self, corps=""):
1704 self.__corps = corps
1705 def func(self,var,info):
1706 "Fonction d'observation"
1709 "Restitution du pointeur de fonction dans l'objet"
1712 # ==============================================================================
1713 class CaseLogger(object):
1715 Conservation des commandes de creation d'un cas
1717 def __init__(self, __name="", __objname="case", __addViewers=None, __addLoaders=None):
1718 self.__name = str(__name)
1719 self.__objname = str(__objname)
1720 self.__logSerie = []
1721 self.__switchoff = False
1722 self.__viewers = self.__loaders = {
1727 if __addViewers is not None:
1728 self.__viewers.update(dict(__addViewers))
1729 if __addLoaders is not None:
1730 self.__loaders.update(dict(__addLoaders))
1732 def register(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1733 "Enregistrement d'une commande individuelle"
1734 if __command is not None and __keys is not None and __local is not None and not self.__switchoff:
1735 if "self" in __keys: __keys.remove("self")
1736 self.__logSerie.append( (str(__command), __keys, __local, __pre, __switchoff) )
1738 self.__switchoff = True
1740 self.__switchoff = False
1742 def dump(self, __filename=None, __format="TUI"):
1743 "Restitution normalisée des commandes (par les *GenericCaseViewer)"
1744 if __format in self.__viewers:
1745 __formater = self.__viewers[__format](self.__name, self.__objname, self.__logSerie)
1747 raise ValueError("Dumping as \"%s\" is not available"%__format)
1748 return __formater.dump(__filename)
1750 def load(self, __filename=None, __format="TUI"):
1751 "Chargement normalisé des commandes"
1752 if __format in self.__loaders:
1753 __formater = self.__loaders[__format]()
1755 raise ValueError("Loading as \"%s\" is not available"%__format)
1756 return __formater.load(__filename)
1758 # ==============================================================================
1759 class GenericCaseViewer(object):
1761 Gestion des commandes de creation d'une vue de cas
1763 def __init__(self, __name="", __objname="case", __content=None):
1764 "Initialisation et enregistrement de l'entete"
1765 self._name = str(__name)
1766 self._objname = str(__objname)
1767 self._lineSerie = []
1768 self._switchoff = False
1769 self._numobservers = 2
1770 self._content = __content
1771 self._missing = """raise ValueError("This case requires beforehand to import or define the variable named <%s>. When corrected, remove this command, correct and uncomment the following one.")\n# """
1773 "Transformation de commande individuelle en enregistrement"
1774 raise NotImplementedError()
1776 "Transformation d'enregistrement en commande individuelle"
1777 raise NotImplementedError()
1778 def _interpret(self):
1779 "Interprétation d'une commande"
1780 raise NotImplementedError()
1781 def _finalize(self):
1782 "Enregistrement du final"
1784 def _addLine(self, line=""):
1785 "Ajoute un enregistrement individuel"
1786 self._lineSerie.append(line)
1787 def dump(self, __filename=None):
1788 "Restitution normalisée des commandes"
1790 __text = "\n".join(self._lineSerie)
1792 if __filename is not None:
1793 __file = os.path.abspath(__filename)
1794 __fid = open(__file,"w")
1798 def load(self, __filename=None):
1799 "Chargement normalisé des commandes"
1800 if os.path.exists(__filename):
1801 self._content = open(__filename, 'r').read()
1802 __commands = self._extract(self._content)
1804 def execCase(self, __filename=None):
1805 "Exécution normalisée des commandes"
1806 if os.path.exists(__filename):
1807 self._content = open(__filename, 'r').read()
1808 __retcode = self._interpret(self._content)
1811 class _TUIViewer(GenericCaseViewer):
1813 Etablissement des commandes de creation d'un cas TUI
1815 def __init__(self, __name="", __objname="case", __content=None):
1816 "Initialisation et enregistrement de l'entete"
1817 GenericCaseViewer.__init__(self, __name, __objname, __content)
1818 self._addLine("#\n# Python script for ADAO TUI\n#")
1819 self._addLine("from numpy import array, matrix")
1820 self._addLine("import adaoBuilder")
1821 self._addLine("%s = adaoBuilder.New('%s')"%(self._objname, self._name))
1822 if self._content is not None:
1823 for command in self._content:
1824 self._append(*command)
1825 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1826 "Transformation d'une commande individuelle en un enregistrement"
1827 if __command is not None and __keys is not None and __local is not None:
1829 if __pre is not None:
1830 __text += "%s = "%__pre
1831 __text += "%s.%s( "%(self._objname,str(__command))
1832 if "self" in __keys: __keys.remove("self")
1833 if __command not in ("set","get") and "Concept" in __keys: __keys.remove("Concept")
1836 if __v is None: continue
1837 if k == "Checked" and not __v: continue
1838 if k == "Stored" and not __v: continue
1839 if k == "AvoidRC" and __v: continue
1840 if k == "noDetails": continue
1841 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1842 if callable(__v): __text = self._missing%__v.__name__+__text
1843 if isinstance(__v,dict):
1844 for val in __v.values():
1845 if callable(val): __text = self._missing%val.__name__+__text
1846 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1847 __text += "%s=%s, "%(k,repr(__v))
1848 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
1851 self._addLine(__text)
1852 def _extract(self, __content=""):
1853 "Transformation un enregistrement en une commande individuelle"
1856 __content = __content.replace("\r\n","\n")
1857 for line in __content.split("\n"):
1858 if "adaoBuilder" in line and "=" in line:
1859 self._objname = line.split("=")[0].strip()
1864 if self._objname+".set" in line:
1865 __commands.append( line.replace(self._objname+".","",1) )
1867 def _interpret(self, __content=""):
1868 "Interprétation d'une commande"
1869 __content = __content.replace("\r\n","\n")
1873 class _DICViewer(GenericCaseViewer):
1875 Etablissement des commandes de creation d'un cas DIC
1877 def __init__(self, __name="", __objname="case", __content=None):
1878 "Initialisation et enregistrement de l'entete"
1879 GenericCaseViewer.__init__(self, __name, __objname, __content)
1880 self._addLine("# -*- coding: utf-8 -*-")
1881 self._addLine("#\n# Input for ADAO converter to YACS\n#")
1882 self._addLine("from numpy import array, matrix")
1884 self._addLine("study_config = {}")
1885 self._addLine("study_config['StudyType'] = 'ASSIMILATION_STUDY'")
1886 self._addLine("study_config['Name'] = '%s'"%self._name)
1887 self._addLine("observers = {}")
1888 self._addLine("study_config['Observers'] = observers")
1890 self._addLine("inputvariables_config = {}")
1891 self._addLine("inputvariables_config['Order'] =['adao_default']")
1892 self._addLine("inputvariables_config['adao_default'] = -1")
1893 self._addLine("study_config['InputVariables'] = inputvariables_config")
1895 self._addLine("outputvariables_config = {}")
1896 self._addLine("outputvariables_config['Order'] = ['adao_default']")
1897 self._addLine("outputvariables_config['adao_default'] = -1")
1898 self._addLine("study_config['OutputVariables'] = outputvariables_config")
1899 if __content is not None:
1900 for command in __content:
1901 self._append(*command)
1902 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1903 "Transformation d'une commande individuelle en un enregistrement"
1904 if __command == "set": __command = __local["Concept"]
1905 else: __command = __command.replace("set", "", 1)
1908 if __command in (None, 'execute', 'executePythonScheme', 'executeYACSScheme', 'get'):
1910 elif __command in ['Debug', 'setDebug']:
1911 __text = "#\nstudy_config['Debug'] = '1'"
1912 elif __command in ['NoDebug', 'setNoDebug']:
1913 __text = "#\nstudy_config['Debug'] = '0'"
1914 elif __command in ['Observer', 'setObserver']:
1915 __obs = __local['Variable']
1916 self._numobservers += 1
1918 __text += "observers['%s'] = {}\n"%__obs
1919 if __local['String'] is not None:
1920 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
1921 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, __local['String'])
1922 if __local['Script'] is not None:
1923 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'Script')
1924 __text += "observers['%s']['Script'] = \"%s\"\n"%(__obs, __local['Script'])
1925 if __local['Template'] is not None and __local['Template'] in Templates.ObserverTemplates:
1926 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
1927 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, Templates.ObserverTemplates[__local['Template']])
1928 if __local['Info'] is not None:
1929 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __local['Info'])
1931 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __obs)
1932 __text += "observers['%s']['number'] = %s"%(__obs, self._numobservers)
1933 elif __local is not None: # __keys is not None and
1934 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1936 __text += "%s_config = {}\n"%__command
1937 if 'self' in __local: __local.pop('self')
1938 __to_be_removed = []
1939 for __k,__v in __local.items():
1940 if __v is None: __to_be_removed.append(__k)
1941 for __k in __to_be_removed:
1943 for __k,__v in __local.items():
1944 if __k == "Concept": continue
1945 if __k in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix','OneFunction','ThreeFunctions'] and 'Script' in __local: continue
1946 if __k == 'Algorithm':
1947 __text += "study_config['Algorithm'] = %s\n"%(repr(__v))
1948 elif __k == 'Script':
1951 __v = "'"+repr(__v)+"'"
1952 for __lk in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix']:
1953 if __lk in __local and __local[__lk]: __k = __lk
1954 if __command == "AlgorithmParameters": __k = "Dict"
1955 if 'OneFunction' in __local and __local['OneFunction']:
1956 __text += "%s_ScriptWithOneFunction = {}\n"%(__command,)
1957 __text += "%s_ScriptWithOneFunction['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
1958 __text += "%s_ScriptWithOneFunction['Script'] = {}\n"%(__command,)
1959 __text += "%s_ScriptWithOneFunction['Script']['Direct'] = %s\n"%(__command,__v)
1960 __text += "%s_ScriptWithOneFunction['Script']['Tangent'] = %s\n"%(__command,__v)
1961 __text += "%s_ScriptWithOneFunction['Script']['Adjoint'] = %s\n"%(__command,__v)
1962 __text += "%s_ScriptWithOneFunction['DifferentialIncrement'] = 1e-06\n"%(__command,)
1963 __text += "%s_ScriptWithOneFunction['CenteredFiniteDifference'] = 0\n"%(__command,)
1965 __f = 'ScriptWithOneFunction'
1966 __v = '%s_ScriptWithOneFunction'%(__command,)
1967 if 'ThreeFunctions' in __local and __local['ThreeFunctions']:
1968 __text += "%s_ScriptWithFunctions = {}\n"%(__command,)
1969 __text += "%s_ScriptWithFunctions['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
1970 __text += "%s_ScriptWithFunctions['Script'] = {}\n"%(__command,)
1971 __text += "%s_ScriptWithFunctions['Script']['Direct'] = %s\n"%(__command,__v)
1972 __text += "%s_ScriptWithFunctions['Script']['Tangent'] = %s\n"%(__command,__v)
1973 __text += "%s_ScriptWithFunctions['Script']['Adjoint'] = %s\n"%(__command,__v)
1975 __f = 'ScriptWithFunctions'
1976 __v = '%s_ScriptWithFunctions'%(__command,)
1977 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
1978 __text += "%s_config['From'] = '%s'\n"%(__command,__f)
1979 __text += "%s_config['Data'] = %s\n"%(__command,__v)
1980 __text = __text.replace("''","'")
1981 elif __k in ('Stored', 'Checked'):
1983 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
1984 elif __k in ('AvoidRC', 'noDetails'):
1986 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
1988 if __k == 'Parameters': __k = "Dict"
1989 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1990 if callable(__v): __text = self._missing%__v.__name__+__text
1991 if isinstance(__v,dict):
1992 for val in __v.values():
1993 if callable(val): __text = self._missing%val.__name__+__text
1994 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
1995 __text += "%s_config['From'] = '%s'\n"%(__command,'String')
1996 __text += "%s_config['Data'] = \"\"\"%s\"\"\"\n"%(__command,repr(__v))
1997 __text += "study_config['%s'] = %s_config"%(__command,__command)
1998 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
2000 self._switchoff = True
2001 if __text is not None: self._addLine(__text)
2003 self._switchoff = False
2004 def _finalize(self):
2005 self.__loadVariablesByScript()
2007 self._addLine("Analysis_config = {}")
2008 self._addLine("Analysis_config['From'] = 'String'")
2009 self._addLine("Analysis_config['Data'] = \"\"\"import numpy")
2010 self._addLine("xa=numpy.ravel(ADD.get('Analysis')[-1])")
2011 self._addLine("print 'Analysis:',xa\"\"\"")
2012 self._addLine("study_config['UserPostAnalysis'] = Analysis_config")
2013 def __loadVariablesByScript(self):
2014 __ExecVariables = {} # Necessaire pour recuperer la variable
2015 exec("\n".join(self._lineSerie), __ExecVariables)
2016 study_config = __ExecVariables['study_config']
2017 self.__hasAlgorithm = bool(study_config['Algorithm'])
2018 if not self.__hasAlgorithm and \
2019 "AlgorithmParameters" in study_config and \
2020 isinstance(study_config['AlgorithmParameters'], dict) and \
2021 "From" in study_config['AlgorithmParameters'] and \
2022 "Data" in study_config['AlgorithmParameters'] and \
2023 study_config['AlgorithmParameters']['From'] == 'Script':
2024 __asScript = study_config['AlgorithmParameters']['Data']
2025 __var = ImportFromScript(__asScript).getvalue( "Algorithm" )
2026 __text = "#\nstudy_config['Algorithm'] = '%s'"%(__var,)
2027 self._addLine(__text)
2028 if self.__hasAlgorithm and \
2029 "AlgorithmParameters" in study_config and \
2030 isinstance(study_config['AlgorithmParameters'], dict) and \
2031 "From" not in study_config['AlgorithmParameters'] and \
2032 "Data" not in study_config['AlgorithmParameters']:
2034 __text += "AlgorithmParameters_config['Type'] = 'Dict'\n"
2035 __text += "AlgorithmParameters_config['From'] = 'String'\n"
2036 __text += "AlgorithmParameters_config['Data'] = '{}'\n"
2037 self._addLine(__text)
2040 class _XMLViewer(GenericCaseViewer):
2042 Etablissement des commandes de creation d'un cas XML
2044 def __init__(self, __name="", __objname="case", __content=None):
2045 "Initialisation et enregistrement de l'entete"
2046 GenericCaseViewer.__init__(self, __name, __objname, __content)
2047 raise NotImplementedError()
2049 class _YACSViewer(GenericCaseViewer):
2051 Etablissement des commandes de creation d'un cas YACS
2053 def __init__(self, __name="", __objname="case", __content=None):
2054 "Initialisation et enregistrement de l'entete"
2055 GenericCaseViewer.__init__(self, __name, __objname, __content)
2056 self.__internalDIC = _DICViewer(__name, __objname, __content)
2057 self._append = self.__internalDIC._append
2058 def dump(self, __filename=None):
2059 "Restitution normalisée des commandes"
2060 self.__internalDIC._finalize()
2062 if __filename is not None:
2063 __file = os.path.abspath(__filename)
2064 __DICfile = __file[:__file.rfind(".")] + '_DIC.py'
2065 __DICdump = self.__internalDIC.dump(__DICfile)
2067 raise ValueError("A file name has to be given for YACS XML output.")
2069 if not PlatformInfo.has_salome or \
2070 not PlatformInfo.has_adao:
2071 raise ImportError("\n\n"+\
2072 "Unable to get SALOME or ADAO environnement variables.\n"+\
2073 "Please load the right environnement before trying to use it.\n")
2075 if os.path.isfile(__file) or os.path.islink(__file):
2077 __converterExe = os.path.join(os.environ["ADAO_ROOT_DIR"], "bin/salome", "AdaoYacsSchemaCreator.py")
2078 __args = ["python", __converterExe, __DICfile, __file]
2080 __p = subprocess.Popen(__args)
2081 (__stdoutdata, __stderrdata) = __p.communicate()
2082 if not os.path.exists(__file):
2083 __msg = "An error occured during the ADAO YACS Schema build.\n"
2084 __msg += "Creator applied on the input file:\n"
2085 __msg += " %s\n"%__DICfile
2086 __msg += "If SALOME GUI is launched by command line, see errors\n"
2087 __msg += "details in your terminal.\n"
2088 raise ValueError(__msg)
2089 os.remove(__DICfile)
2091 __fid = open(__file,"r")
2092 __text = __fid.read()
2096 # ==============================================================================
2097 class ImportFromScript(object):
2099 Obtention d'une variable nommee depuis un fichier script importe
2101 def __init__(self, __filename=None):
2102 "Verifie l'existence et importe le script"
2103 self.__filename = __filename.rstrip(".py")
2104 if self.__filename is None:
2105 raise ValueError("The name of the file, containing the variable to be read, has to be specified.")
2106 if not os.path.isfile(str(self.__filename)+".py"):
2107 raise ValueError("The file containing the variable to be imported doesn't seem to exist. Please check the file. The given file name is:\n \"%s\""%self.__filename)
2108 self.__scriptfile = __import__(self.__filename, globals(), locals(), [])
2109 self.__scriptstring = open(self.__filename+".py",'r').read()
2110 def getvalue(self, __varname=None, __synonym=None ):
2111 "Renvoie la variable demandee"
2112 if __varname is None:
2113 raise ValueError("The name of the variable to be read has to be specified. Please check the content of the file and the syntax.")
2114 if not hasattr(self.__scriptfile, __varname):
2115 if __synonym is None:
2116 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__filename)+".py",__varname))
2117 elif not hasattr(self.__scriptfile, __synonym):
2118 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__filename)+".py",__synonym))
2120 return getattr(self.__scriptfile, __synonym)
2122 return getattr(self.__scriptfile, __varname)
2123 def getstring(self):
2124 "Renvoie le script complet"
2125 return self.__scriptstring
2127 # ==============================================================================
2128 def CostFunction3D(_x,
2129 _Hm = None, # Pour simuler Hm(x) : HO["Direct"].appliedTo
2130 _HmX = None, # Simulation déjà faite de Hm(x)
2131 _arg = None, # Arguments supplementaires pour Hm, sous la forme d'un tuple
2136 _SIV = False, # A résorber pour la 8.0
2137 _SSC = [], # self._parameters["StoreSupplementaryCalculations"]
2138 _nPS = 0, # nbPreviousSteps
2139 _QM = "DA", # QualityMeasure
2140 _SSV = {}, # Entrée et/ou sortie : self.StoredVariables
2141 _fRt = False, # Restitue ou pas la sortie étendue
2142 _sSc = True, # Stocke ou pas les SSC
2145 Fonction-coût générale utile pour les algorithmes statiques/3D : 3DVAR, BLUE
2146 et dérivés, Kalman et dérivés, LeastSquares, SamplingTest, PSO, SA, Tabu,
2147 DFO, QuantileRegression
2153 for k in ["CostFunctionJ",
2159 "SimulatedObservationAtCurrentOptimum",
2160 "SimulatedObservationAtCurrentState",
2164 if hasattr(_SSV[k],"store"):
2165 _SSV[k].append = _SSV[k].store # Pour utiliser "append" au lieu de "store"
2167 _X = numpy.asmatrix(numpy.ravel( _x )).T
2168 if _SIV or "CurrentState" in _SSC or "CurrentOptimum" in _SSC:
2169 _SSV["CurrentState"].append( _X )
2171 if _HmX is not None:
2175 raise ValueError("COSTFUNCTION3D Operator has to be defined.")
2179 _HX = _Hm( _X, *_arg )
2180 _HX = numpy.asmatrix(numpy.ravel( _HX )).T
2182 if "SimulatedObservationAtCurrentState" in _SSC or \
2183 "SimulatedObservationAtCurrentOptimum" in _SSC:
2184 _SSV["SimulatedObservationAtCurrentState"].append( _HX )
2186 if numpy.any(numpy.isnan(_HX)):
2187 Jb, Jo, J = numpy.nan, numpy.nan, numpy.nan
2189 _Y = numpy.asmatrix(numpy.ravel( _Y )).T
2190 if _QM in ["AugmentedWeightedLeastSquares", "AWLS", "AugmentedPonderatedLeastSquares", "APLS", "DA"]:
2191 if _BI is None or _RI is None:
2192 raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
2193 _Xb = numpy.asmatrix(numpy.ravel( _Xb )).T
2194 Jb = 0.5 * (_X - _Xb).T * _BI * (_X - _Xb)
2195 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2196 elif _QM in ["WeightedLeastSquares", "WLS", "PonderatedLeastSquares", "PLS"]:
2198 raise ValueError("Observation error covariance matrix has to be properly defined!")
2200 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2201 elif _QM in ["LeastSquares", "LS", "L2"]:
2203 Jo = 0.5 * (_Y - _HX).T * (_Y - _HX)
2204 elif _QM in ["AbsoluteValue", "L1"]:
2206 Jo = numpy.sum( numpy.abs(_Y - _HX) )
2207 elif _QM in ["MaximumError", "ME"]:
2209 Jo = numpy.max( numpy.abs(_Y - _HX) )
2210 elif _QM in ["QR", "Null"]:
2214 raise ValueError("Unknown asked quality measure!")
2216 J = float( Jb ) + float( Jo )
2219 _SSV["CostFunctionJb"].append( Jb )
2220 _SSV["CostFunctionJo"].append( Jo )
2221 _SSV["CostFunctionJ" ].append( J )
2223 if "IndexOfOptimum" in _SSC or \
2224 "CurrentOptimum" in _SSC or \
2225 "SimulatedObservationAtCurrentOptimum" in _SSC:
2226 IndexMin = numpy.argmin( _SSV["CostFunctionJ"][_nPS:] ) + _nPS
2227 if "IndexOfOptimum" in _SSC:
2228 _SSV["IndexOfOptimum"].append( IndexMin )
2229 if "CurrentOptimum" in _SSC:
2230 _SSV["CurrentOptimum"].append( _SSV["CurrentState"][IndexMin] )
2231 if "SimulatedObservationAtCurrentOptimum" in _SSC:
2232 _SSV["SimulatedObservationAtCurrentOptimum"].append( _SSV["SimulatedObservationAtCurrentState"][IndexMin] )
2237 if _QM in ["QR"]: # Pour le QuantileRegression
2242 # ==============================================================================
2243 if __name__ == "__main__":
2244 print('\n AUTODIAGNOSTIC \n')