1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2018 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 Définit les outils généraux élémentaires.
26 Ce module est destiné à être appelée par AssimilationStudy.
28 __author__ = "Jean-Philippe ARGAUD"
36 from daCore import Persistence
37 from daCore import PlatformInfo
38 from daCore import Templates
40 # ==============================================================================
41 class CacheManager(object):
43 Classe générale de gestion d'un cache de calculs
46 toleranceInRedundancy = 1.e-18,
47 lenghtOfRedundancy = -1,
50 Les caractéristiques de tolérance peuvent être modifées à la création.
52 self.__tolerBP = float(toleranceInRedundancy)
53 self.__lenghtOR = int(lenghtOfRedundancy)
54 self.__initlnOR = self.__lenghtOR
59 self.__listOPCV = [] # Operator Previous Calculated Points, Results, Point Norms
60 # logging.debug("CM Tolerance de determination des doublons : %.2e", self.__tolerBP)
62 def wasCalculatedIn(self, xValue ): #, info="" ):
63 "Vérifie l'existence d'un calcul correspondant à la valeur"
66 for i in range(min(len(self.__listOPCV),self.__lenghtOR)-1,-1,-1):
67 if xValue.size != self.__listOPCV[i][0].size:
68 # logging.debug("CM Différence de la taille %s de X et de celle %s du point %i déjà calculé", xValue.shape,i,self.__listOPCP[i].shape)
70 if numpy.linalg.norm(numpy.ravel(xValue) - self.__listOPCV[i][0]) < self.__tolerBP * self.__listOPCV[i][2]:
72 __HxV = self.__listOPCV[i][1]
73 # logging.debug("CM Cas%s déja calculé, portant le numéro %i", info, i)
77 def storeValueInX(self, xValue, HxValue ):
78 "Stocke un calcul correspondant à la valeur"
79 if self.__lenghtOR < 0:
80 self.__lenghtOR = 2 * xValue.size + 2
81 self.__initlnOR = self.__lenghtOR
82 while len(self.__listOPCV) > self.__lenghtOR:
83 # logging.debug("CM Réduction de la liste des cas à %i éléments par suppression du premier", self.__lenghtOR)
84 self.__listOPCV.pop(0)
85 self.__listOPCV.append( (
86 copy.copy(numpy.ravel(xValue)),
88 numpy.linalg.norm(xValue),
93 self.__initlnOR = self.__lenghtOR
98 self.__lenghtOR = self.__initlnOR
100 # ==============================================================================
101 class Operator(object):
103 Classe générale d'interface de type opérateur simple
110 def __init__(self, fromMethod=None, fromMatrix=None, avoidingRedundancy = True):
112 On construit un objet de ce type en fournissant à l'aide de l'un des
113 deux mots-clé, soit une fonction python, soit une matrice.
115 - fromMethod : argument de type fonction Python
116 - fromMatrix : argument adapté au constructeur numpy.matrix
117 - avoidingRedundancy : évite ou pas les calculs redondants
119 self.__NbCallsAsMatrix, self.__NbCallsAsMethod, self.__NbCallsOfCached = 0, 0, 0
120 self.__AvoidRC = bool( avoidingRedundancy )
121 if fromMethod is not None:
122 self.__Method = fromMethod # logtimer(fromMethod)
124 self.__Type = "Method"
125 elif fromMatrix is not None:
127 self.__Matrix = numpy.matrix( fromMatrix, numpy.float )
128 self.__Type = "Matrix"
134 def disableAvoidingRedundancy(self):
136 Operator.CM.disable()
138 def enableAvoidingRedundancy(self):
143 Operator.CM.disable()
149 def appliedTo(self, xValue, HValue = None):
151 Permet de restituer le résultat de l'application de l'opérateur à un
152 argument xValue. Cette méthode se contente d'appliquer, son argument
153 devant a priori être du bon type.
155 - xValue : argument adapté pour appliquer l'opérateur
157 if HValue is not None:
158 HxValue = numpy.asmatrix( numpy.ravel( HValue ) ).T
160 Operator.CM.storeValueInX(xValue,HxValue)
163 __alreadyCalculated, __HxV = Operator.CM.wasCalculatedIn(xValue)
165 __alreadyCalculated = False
167 if __alreadyCalculated:
168 self.__addOneCacheCall()
171 if self.__Matrix is not None:
172 self.__addOneMatrixCall()
173 HxValue = self.__Matrix * xValue
175 self.__addOneMethodCall()
176 HxValue = self.__Method( xValue )
178 Operator.CM.storeValueInX(xValue,HxValue)
182 def appliedControledFormTo(self, paire ):
184 Permet de restituer le résultat de l'application de l'opérateur à une
185 paire (xValue, uValue). Cette méthode se contente d'appliquer, son
186 argument devant a priori être du bon type. Si la uValue est None,
187 on suppose que l'opérateur ne s'applique qu'à xValue.
189 - xValue : argument X adapté pour appliquer l'opérateur
190 - uValue : argument U adapté pour appliquer l'opérateur
192 assert len(paire) == 2, "Incorrect number of arguments"
193 xValue, uValue = paire
194 if self.__Matrix is not None:
195 self.__addOneMatrixCall()
196 return self.__Matrix * xValue
197 elif uValue is not None:
198 self.__addOneMethodCall()
199 return self.__Method( (xValue, uValue) )
201 self.__addOneMethodCall()
202 return self.__Method( xValue )
204 def appliedInXTo(self, paire ):
206 Permet de restituer le résultat de l'application de l'opérateur à un
207 argument xValue, sachant que l'opérateur est valable en xNominal.
208 Cette méthode se contente d'appliquer, son argument devant a priori
209 être du bon type. Si l'opérateur est linéaire car c'est une matrice,
210 alors il est valable en tout point nominal et il n'est pas nécessaire
212 Arguments : une liste contenant
213 - xNominal : argument permettant de donner le point où l'opérateur
214 est construit pour etre ensuite appliqué
215 - xValue : argument adapté pour appliquer l'opérateur
217 assert len(paire) == 2, "Incorrect number of arguments"
218 xNominal, xValue = paire
219 if self.__Matrix is not None:
220 self.__addOneMatrixCall()
221 return self.__Matrix * xValue
223 self.__addOneMethodCall()
224 return self.__Method( (xNominal, xValue) )
226 def asMatrix(self, ValueForMethodForm = "UnknownVoidValue"):
228 Permet de renvoyer l'opérateur sous la forme d'une matrice
230 if self.__Matrix is not None:
231 self.__addOneMatrixCall()
233 elif ValueForMethodForm is not "UnknownVoidValue": # Ne pas utiliser "None"
234 self.__addOneMethodCall()
235 return numpy.matrix( self.__Method( (ValueForMethodForm, None) ) )
237 raise ValueError("Matrix form of the operator defined as a function/method requires to give an operating point.")
241 Renvoie la taille sous forme numpy si l'opérateur est disponible sous
242 la forme d'une matrice
244 if self.__Matrix is not None:
245 return self.__Matrix.shape
247 raise ValueError("Matrix form of the operator is not available, nor the shape")
249 def nbcalls(self, which=None):
251 Renvoie les nombres d'évaluations de l'opérateur
254 self.__NbCallsAsMatrix+self.__NbCallsAsMethod,
255 self.__NbCallsAsMatrix,
256 self.__NbCallsAsMethod,
257 self.__NbCallsOfCached,
258 Operator.NbCallsAsMatrix+Operator.NbCallsAsMethod,
259 Operator.NbCallsAsMatrix,
260 Operator.NbCallsAsMethod,
261 Operator.NbCallsOfCached,
263 if which is None: return __nbcalls
264 else: return __nbcalls[which]
266 def __addOneMatrixCall(self):
267 "Comptabilise un appel"
268 self.__NbCallsAsMatrix += 1 # Decompte local
269 Operator.NbCallsAsMatrix += 1 # Decompte global
271 def __addOneMethodCall(self):
272 "Comptabilise un appel"
273 self.__NbCallsAsMethod += 1 # Decompte local
274 Operator.NbCallsAsMethod += 1 # Decompte global
276 def __addOneCacheCall(self):
277 "Comptabilise un appel"
278 self.__NbCallsOfCached += 1 # Decompte local
279 Operator.NbCallsOfCached += 1 # Decompte global
281 # ==============================================================================
282 class FullOperator(object):
284 Classe générale d'interface de type opérateur complet
285 (Direct, Linéaire Tangent, Adjoint)
288 name = "GenericFullOperator",
290 asOneFunction = None, # Fonction
291 asThreeFunctions = None, # Dictionnaire de fonctions
293 asDict = None, # Parameters
300 self.__name = str(name)
301 self.__check = bool(toBeChecked)
306 if (asDict is not None) and isinstance(asDict, dict):
307 __Parameters.update( asDict )
308 if "DifferentialIncrement" in asDict:
309 __Parameters["withIncrement"] = asDict["DifferentialIncrement"]
310 if "CenteredFiniteDifference" in asDict:
311 __Parameters["withCenteredDF"] = asDict["CenteredFiniteDifference"]
312 if "EnableMultiProcessing" in asDict:
313 __Parameters["withmpEnabled"] = asDict["EnableMultiProcessing"]
314 if "NumberOfProcesses" in asDict:
315 __Parameters["withmpWorkers"] = asDict["NumberOfProcesses"]
317 if asScript is not None:
318 __Matrix, __Function = None, None
320 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
322 __Function = { "Direct":ImportFromScript(asScript).getvalue( "DirectOperator" ) }
323 __Function.update({"useApproximatedDerivatives":True})
324 __Function.update(__Parameters)
325 elif asThreeFunctions:
327 "Direct" :ImportFromScript(asScript).getvalue( "DirectOperator" ),
328 "Tangent":ImportFromScript(asScript).getvalue( "TangentOperator" ),
329 "Adjoint":ImportFromScript(asScript).getvalue( "AdjointOperator" ),
331 __Function.update(__Parameters)
334 if asOneFunction is not None:
335 if isinstance(asOneFunction, dict) and "Direct" in asOneFunction:
336 if asOneFunction["Direct"] is not None:
337 __Function = asOneFunction
339 raise ValueError("The function has to be given in a dictionnary which have 1 key (\"Direct\")")
341 __Function = { "Direct":asOneFunction }
342 __Function.update({"useApproximatedDerivatives":True})
343 __Function.update(__Parameters)
344 elif asThreeFunctions is not None:
345 if isinstance(asThreeFunctions, dict) and \
346 ("Tangent" in asThreeFunctions) and (asThreeFunctions["Tangent"] is not None) and \
347 ("Adjoint" in asThreeFunctions) and (asThreeFunctions["Adjoint"] is not None) and \
348 (("useApproximatedDerivatives" not in asThreeFunctions) or not bool(asThreeFunctions["useApproximatedDerivatives"])):
349 __Function = asThreeFunctions
350 elif isinstance(asThreeFunctions, dict) and \
351 ("Direct" in asThreeFunctions) and (asThreeFunctions["Direct"] is not None):
352 __Function = asThreeFunctions
353 __Function.update({"useApproximatedDerivatives":True})
355 raise ValueError("The functions has to be given in a dictionnary which have either 1 key (\"Direct\") or 3 keys (\"Direct\" (optionnal), \"Tangent\" and \"Adjoint\")")
356 if "Direct" not in asThreeFunctions:
357 __Function["Direct"] = asThreeFunctions["Tangent"]
358 __Function.update(__Parameters)
362 # if sys.version_info[0] < 3 and isinstance(__Function, dict):
363 # for k in ("Direct", "Tangent", "Adjoint"):
364 # if k in __Function and hasattr(__Function[k],"__class__"):
365 # if type(__Function[k]) is type(self.__init__):
366 # raise TypeError("can't use a class method (%s) as a function for the \"%s\" operator. Use a real function instead."%(type(__Function[k]),k))
368 if appliedInX is not None and isinstance(appliedInX, dict):
369 __appliedInX = appliedInX
370 elif appliedInX is not None:
371 __appliedInX = {"HXb":appliedInX}
375 if scheduledBy is not None:
376 self.__T = scheduledBy
378 if isinstance(__Function, dict) and \
379 ("useApproximatedDerivatives" in __Function) and bool(__Function["useApproximatedDerivatives"]) and \
380 ("Direct" in __Function) and (__Function["Direct"] is not None):
381 if "withCenteredDF" not in __Function: __Function["withCenteredDF"] = False
382 if "withIncrement" not in __Function: __Function["withIncrement"] = 0.01
383 if "withdX" not in __Function: __Function["withdX"] = None
384 if "withAvoidingRedundancy" not in __Function: __Function["withAvoidingRedundancy"] = True
385 if "withToleranceInRedundancy" not in __Function: __Function["withToleranceInRedundancy"] = 1.e-18
386 if "withLenghtOfRedundancy" not in __Function: __Function["withLenghtOfRedundancy"] = -1
387 if "withmpEnabled" not in __Function: __Function["withmpEnabled"] = False
388 if "withmpWorkers" not in __Function: __Function["withmpWorkers"] = None
389 from daNumerics.ApproximatedDerivatives import FDApproximation
390 FDA = FDApproximation(
391 Function = __Function["Direct"],
392 centeredDF = __Function["withCenteredDF"],
393 increment = __Function["withIncrement"],
394 dX = __Function["withdX"],
395 avoidingRedundancy = __Function["withAvoidingRedundancy"],
396 toleranceInRedundancy = __Function["withToleranceInRedundancy"],
397 lenghtOfRedundancy = __Function["withLenghtOfRedundancy"],
398 mpEnabled = __Function["withmpEnabled"],
399 mpWorkers = __Function["withmpWorkers"],
401 self.__FO["Direct"] = Operator( fromMethod = FDA.DirectOperator, avoidingRedundancy = avoidRC )
402 self.__FO["Tangent"] = Operator( fromMethod = FDA.TangentOperator, avoidingRedundancy = avoidRC )
403 self.__FO["Adjoint"] = Operator( fromMethod = FDA.AdjointOperator, avoidingRedundancy = avoidRC )
404 elif isinstance(__Function, dict) and \
405 ("Direct" in __Function) and ("Tangent" in __Function) and ("Adjoint" in __Function) and \
406 (__Function["Direct"] is not None) and (__Function["Tangent"] is not None) and (__Function["Adjoint"] is not None):
407 self.__FO["Direct"] = Operator( fromMethod = __Function["Direct"], avoidingRedundancy = avoidRC )
408 self.__FO["Tangent"] = Operator( fromMethod = __Function["Tangent"], avoidingRedundancy = avoidRC )
409 self.__FO["Adjoint"] = Operator( fromMethod = __Function["Adjoint"], avoidingRedundancy = avoidRC )
410 elif asMatrix is not None:
411 __matrice = numpy.matrix( __Matrix, numpy.float )
412 self.__FO["Direct"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
413 self.__FO["Tangent"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
414 self.__FO["Adjoint"] = Operator( fromMatrix = __matrice.T, avoidingRedundancy = avoidRC )
417 raise ValueError("Improperly defined observation operator, it requires at minima either a matrix, a Direct for approximate derivatives or a Tangent/Adjoint pair.")
419 if __appliedInX is not None:
420 self.__FO["AppliedInX"] = {}
421 if not isinstance(__appliedInX, dict):
422 raise ValueError("Error: observation operator defined by \"AppliedInX\" need a dictionary as argument.")
423 for key in list(__appliedInX.keys()):
424 if type( __appliedInX[key] ) is type( numpy.matrix([]) ):
425 # Pour le cas où l'on a une vraie matrice
426 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].A1, numpy.float ).T
427 elif type( __appliedInX[key] ) is type( numpy.array([]) ) and len(__appliedInX[key].shape) > 1:
428 # Pour le cas où l'on a un vecteur représenté en array avec 2 dimensions
429 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].reshape(len(__appliedInX[key]),), numpy.float ).T
431 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key], numpy.float ).T
433 self.__FO["AppliedInX"] = None
439 "x.__repr__() <==> repr(x)"
440 return repr(self.__V)
443 "x.__str__() <==> str(x)"
446 # ==============================================================================
447 class Algorithm(object):
449 Classe générale d'interface de type algorithme
451 Elle donne un cadre pour l'écriture d'une classe élémentaire d'algorithme
452 d'assimilation, en fournissant un container (dictionnaire) de variables
453 persistantes initialisées, et des méthodes d'accès à ces variables stockées.
455 Une classe élémentaire d'algorithme doit implémenter la méthode "run".
457 def __init__(self, name):
459 L'initialisation présente permet de fabriquer des variables de stockage
460 disponibles de manière générique dans les algorithmes élémentaires. Ces
461 variables de stockage sont ensuite conservées dans un dictionnaire
462 interne à l'objet, mais auquel on accède par la méthode "get".
464 Les variables prévues sont :
465 - CostFunctionJ : fonction-cout globale, somme des deux parties suivantes
466 - CostFunctionJb : partie ébauche ou background de la fonction-cout
467 - CostFunctionJo : partie observations de la fonction-cout
468 - GradientOfCostFunctionJ : gradient de la fonction-cout globale
469 - GradientOfCostFunctionJb : gradient de la partie ébauche de la fonction-cout
470 - GradientOfCostFunctionJo : gradient de la partie observations de la fonction-cout
471 - CurrentState : état courant lors d'itérations
472 - Analysis : l'analyse Xa
473 - SimulatedObservationAtBackground : l'état observé H(Xb) à l'ébauche
474 - SimulatedObservationAtCurrentState : l'état observé H(X) à l'état courant
475 - SimulatedObservationAtOptimum : l'état observé H(Xa) à l'optimum
476 - Innovation : l'innovation : d = Y - H(X)
477 - InnovationAtCurrentState : l'innovation à l'état courant : dn = Y - H(Xn)
478 - SigmaObs2 : indicateur de correction optimale des erreurs d'observation
479 - SigmaBck2 : indicateur de correction optimale des erreurs d'ébauche
480 - MahalanobisConsistency : indicateur de consistance des covariances
481 - OMA : Observation moins Analysis : Y - Xa
482 - OMB : Observation moins Background : Y - Xb
483 - AMB : Analysis moins Background : Xa - Xb
484 - APosterioriCovariance : matrice A
485 - APosterioriVariances : variances de la matrice A
486 - APosterioriStandardDeviations : écart-types de la matrice A
487 - APosterioriCorrelations : correlations de la matrice A
488 - Residu : dans le cas des algorithmes de vérification
489 On peut rajouter des variables à stocker dans l'initialisation de
490 l'algorithme élémentaire qui va hériter de cette classe
492 logging.debug("%s Initialisation", str(name))
493 self._m = PlatformInfo.SystemUsage()
495 self._name = str( name )
496 self._parameters = {"StoreSupplementaryCalculations":[]}
497 self.__required_parameters = {}
498 self.__required_inputs = {"RequiredInputValues":{"mandatory":(), "optional":()}}
500 self.StoredVariables = {}
501 self.StoredVariables["CostFunctionJ"] = Persistence.OneScalar(name = "CostFunctionJ")
502 self.StoredVariables["CostFunctionJb"] = Persistence.OneScalar(name = "CostFunctionJb")
503 self.StoredVariables["CostFunctionJo"] = Persistence.OneScalar(name = "CostFunctionJo")
504 self.StoredVariables["CostFunctionJAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJAtCurrentOptimum")
505 self.StoredVariables["CostFunctionJbAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJbAtCurrentOptimum")
506 self.StoredVariables["CostFunctionJoAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJoAtCurrentOptimum")
507 self.StoredVariables["GradientOfCostFunctionJ"] = Persistence.OneVector(name = "GradientOfCostFunctionJ")
508 self.StoredVariables["GradientOfCostFunctionJb"] = Persistence.OneVector(name = "GradientOfCostFunctionJb")
509 self.StoredVariables["GradientOfCostFunctionJo"] = Persistence.OneVector(name = "GradientOfCostFunctionJo")
510 self.StoredVariables["CurrentState"] = Persistence.OneVector(name = "CurrentState")
511 self.StoredVariables["Analysis"] = Persistence.OneVector(name = "Analysis")
512 self.StoredVariables["IndexOfOptimum"] = Persistence.OneIndex(name = "IndexOfOptimum")
513 self.StoredVariables["CurrentOptimum"] = Persistence.OneVector(name = "CurrentOptimum")
514 self.StoredVariables["SimulatedObservationAtBackground"] = Persistence.OneVector(name = "SimulatedObservationAtBackground")
515 self.StoredVariables["SimulatedObservationAtCurrentState"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentState")
516 self.StoredVariables["SimulatedObservationAtOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtOptimum")
517 self.StoredVariables["SimulatedObservationAtCurrentOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentOptimum")
518 self.StoredVariables["Innovation"] = Persistence.OneVector(name = "Innovation")
519 self.StoredVariables["InnovationAtCurrentState"] = Persistence.OneVector(name = "InnovationAtCurrentState")
520 self.StoredVariables["SigmaObs2"] = Persistence.OneScalar(name = "SigmaObs2")
521 self.StoredVariables["SigmaBck2"] = Persistence.OneScalar(name = "SigmaBck2")
522 self.StoredVariables["MahalanobisConsistency"] = Persistence.OneScalar(name = "MahalanobisConsistency")
523 self.StoredVariables["OMA"] = Persistence.OneVector(name = "OMA")
524 self.StoredVariables["OMB"] = Persistence.OneVector(name = "OMB")
525 self.StoredVariables["BMA"] = Persistence.OneVector(name = "BMA")
526 self.StoredVariables["APosterioriCovariance"] = Persistence.OneMatrix(name = "APosterioriCovariance")
527 self.StoredVariables["APosterioriVariances"] = Persistence.OneVector(name = "APosterioriVariances")
528 self.StoredVariables["APosterioriStandardDeviations"] = Persistence.OneVector(name = "APosterioriStandardDeviations")
529 self.StoredVariables["APosterioriCorrelations"] = Persistence.OneMatrix(name = "APosterioriCorrelations")
530 self.StoredVariables["SimulationQuantiles"] = Persistence.OneMatrix(name = "SimulationQuantiles")
531 self.StoredVariables["Residu"] = Persistence.OneScalar(name = "Residu")
533 def _pre_run(self, Parameters, Xb=None, Y=None, R=None, B=None, Q=None ):
535 logging.debug("%s Lancement", self._name)
536 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
538 # Mise a jour de self._parameters avec Parameters
539 self.__setParameters(Parameters)
541 # Corrections et complements
542 def __test_vvalue( argument, variable, argname):
544 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
545 raise ValueError("%s %s vector %s has to be properly defined!"%(self._name,argname,variable))
546 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
547 logging.debug("%s %s vector %s is not set, but is optional."%(self._name,argname,variable))
549 logging.debug("%s %s vector %s is not set, but is not required."%(self._name,argname,variable))
551 logging.debug("%s %s vector %s is set, and its size is %i."%(self._name,argname,variable,numpy.array(argument).size))
552 __test_vvalue( Xb, "Xb", "Background or initial state" )
553 __test_vvalue( Y, "Y", "Observation" )
554 def __test_cvalue( argument, variable, argname):
556 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
557 raise ValueError("%s %s error covariance matrix %s has to be properly defined!"%(self._name,argname,variable))
558 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
559 logging.debug("%s %s error covariance matrix %s is not set, but is optional."%(self._name,argname,variable))
561 logging.debug("%s %s error covariance matrix %s is not set, but is not required."%(self._name,argname,variable))
563 logging.debug("%s %s error covariance matrix %s is set."%(self._name,argname,variable))
564 __test_cvalue( R, "R", "Observation" )
565 __test_cvalue( B, "B", "Background" )
566 __test_cvalue( Q, "Q", "Evolution" )
568 if ("Bounds" in self._parameters) and isinstance(self._parameters["Bounds"], (list, tuple)) and (len(self._parameters["Bounds"]) > 0):
569 logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
571 self._parameters["Bounds"] = None
573 if logging.getLogger().level < logging.WARNING:
574 self._parameters["optiprint"], self._parameters["optdisp"] = 1, 1
575 if PlatformInfo.has_scipy:
576 import scipy.optimize
577 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_ALL
579 self._parameters["optmessages"] = 15
581 self._parameters["optiprint"], self._parameters["optdisp"] = -1, 0
582 if PlatformInfo.has_scipy:
583 import scipy.optimize
584 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_NONE
586 self._parameters["optmessages"] = 15
590 def _post_run(self,_oH=None):
592 if ("StoreSupplementaryCalculations" in self._parameters) and \
593 "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]:
594 for _A in self.StoredVariables["APosterioriCovariance"]:
595 if "APosterioriVariances" in self._parameters["StoreSupplementaryCalculations"]:
596 self.StoredVariables["APosterioriVariances"].store( numpy.diag(_A) )
597 if "APosterioriStandardDeviations" in self._parameters["StoreSupplementaryCalculations"]:
598 self.StoredVariables["APosterioriStandardDeviations"].store( numpy.sqrt(numpy.diag(_A)) )
599 if "APosterioriCorrelations" in self._parameters["StoreSupplementaryCalculations"]:
600 _EI = numpy.diag(1./numpy.sqrt(numpy.diag(_A)))
601 _C = numpy.dot(_EI, numpy.dot(_A, _EI))
602 self.StoredVariables["APosterioriCorrelations"].store( _C )
604 logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint.: %i/%i/%i", self._name, _oH["Direct"].nbcalls(0),_oH["Tangent"].nbcalls(0),_oH["Adjoint"].nbcalls(0))
605 logging.debug("%s Nombre d'appels au cache d'opérateur d'observation direct/tangent/adjoint..: %i/%i/%i", self._name, _oH["Direct"].nbcalls(3),_oH["Tangent"].nbcalls(3),_oH["Adjoint"].nbcalls(3))
606 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
607 logging.debug("%s Terminé", self._name)
610 def get(self, key=None):
612 Renvoie l'une des variables stockées identifiée par la clé, ou le
613 dictionnaire de l'ensemble des variables disponibles en l'absence de
614 clé. Ce sont directement les variables sous forme objet qui sont
615 renvoyées, donc les méthodes d'accès à l'objet individuel sont celles
616 des classes de persistance.
619 return self.StoredVariables[key]
621 return self.StoredVariables
623 def __contains__(self, key=None):
624 "D.__contains__(k) -> True if D has a key k, else False"
625 return key in self.StoredVariables
628 "D.keys() -> list of D's keys"
629 if hasattr(self, "StoredVariables"):
630 return self.StoredVariables.keys()
635 "D.pop(k[,d]) -> v, remove specified key and return the corresponding value"
636 if hasattr(self, "StoredVariables"):
637 return self.StoredVariables.pop(k, d)
642 raise TypeError("pop expected at least 1 arguments, got 0")
643 "If key is not found, d is returned if given, otherwise KeyError is raised"
649 def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
651 Doit implémenter l'opération élémentaire de calcul d'assimilation sous
652 sa forme mathématique la plus naturelle possible.
654 raise NotImplementedError("Mathematical assimilation calculation has not been implemented!")
656 def defineRequiredParameter(self, name = None, default = None, typecast = None, message = None, minval = None, maxval = None, listval = None):
658 Permet de définir dans l'algorithme des paramètres requis et leurs
659 caractéristiques par défaut.
662 raise ValueError("A name is mandatory to define a required parameter.")
664 self.__required_parameters[name] = {
666 "typecast" : typecast,
672 logging.debug("%s %s (valeur par défaut = %s)", self._name, message, self.setParameterValue(name))
674 def getRequiredParameters(self, noDetails=True):
676 Renvoie la liste des noms de paramètres requis ou directement le
677 dictionnaire des paramètres requis.
680 return sorted(self.__required_parameters.keys())
682 return self.__required_parameters
684 def setParameterValue(self, name=None, value=None):
686 Renvoie la valeur d'un paramètre requis de manière contrôlée
688 default = self.__required_parameters[name]["default"]
689 typecast = self.__required_parameters[name]["typecast"]
690 minval = self.__required_parameters[name]["minval"]
691 maxval = self.__required_parameters[name]["maxval"]
692 listval = self.__required_parameters[name]["listval"]
694 if value is None and default is None:
696 elif value is None and default is not None:
697 if typecast is None: __val = default
698 else: __val = typecast( default )
700 if typecast is None: __val = value
701 else: __val = typecast( value )
703 if minval is not None and (numpy.array(__val, float) < minval).any():
704 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be less than %s."%(name, __val, minval))
705 if maxval is not None and (numpy.array(__val, float) > maxval).any():
706 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be greater than %s."%(name, __val, maxval))
707 if listval is not None:
708 if typecast is list or typecast is tuple or isinstance(__val,list) or isinstance(__val,tuple):
711 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%(v, name, listval))
712 elif __val not in listval:
713 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%( __val, name,listval))
716 def requireInputArguments(self, mandatory=(), optional=()):
718 Permet d'imposer des arguments requises en entrée
720 self.__required_inputs["RequiredInputValues"]["mandatory"] = tuple( mandatory )
721 self.__required_inputs["RequiredInputValues"]["optional"] = tuple( optional )
723 def __setParameters(self, fromDico={}):
725 Permet de stocker les paramètres reçus dans le dictionnaire interne.
727 self._parameters.update( fromDico )
728 for k in self.__required_parameters.keys():
729 if k in fromDico.keys():
730 self._parameters[k] = self.setParameterValue(k,fromDico[k])
732 self._parameters[k] = self.setParameterValue(k)
733 logging.debug("%s %s : %s", self._name, self.__required_parameters[k]["message"], self._parameters[k])
735 # ==============================================================================
736 class AlgorithmAndParameters(object):
738 Classe générale d'interface d'action pour l'algorithme et ses paramètres
741 name = "GenericAlgorithm",
748 self.__name = str(name)
752 self.__algorithm = {}
753 self.__algorithmFile = None
754 self.__algorithmName = None
756 self.updateParameters( asDict, asScript )
758 if asScript is not None:
759 __Algo = ImportFromScript(asScript).getvalue( "Algorithm" )
763 if __Algo is not None:
764 self.__A = str(__Algo)
765 self.__P.update( {"Algorithm":self.__A} )
767 self.__setAlgorithm( self.__A )
769 def updateParameters(self,
773 "Mise a jour des parametres"
774 if asScript is not None:
775 __Dict = ImportFromScript(asScript).getvalue( self.__name, "Parameters" )
779 if __Dict is not None:
780 self.__P.update( dict(__Dict) )
782 def executePythonScheme(self, asDictAO = None):
783 "Permet de lancer le calcul d'assimilation"
784 Operator.CM.clearCache()
786 if not isinstance(asDictAO, dict):
787 raise ValueError("The objects for algorithm calculation have to be given together as a dictionnary, and they are not")
788 if hasattr(asDictAO["Background"],"getO"): self.__Xb = asDictAO["Background"].getO()
789 elif hasattr(asDictAO["CheckingPoint"],"getO"): self.__Xb = asDictAO["CheckingPoint"].getO()
790 else: self.__Xb = None
791 if hasattr(asDictAO["Observation"],"getO"): self.__Y = asDictAO["Observation"].getO()
792 else: self.__Y = asDictAO["Observation"]
793 if hasattr(asDictAO["ControlInput"],"getO"): self.__U = asDictAO["ControlInput"].getO()
794 else: self.__U = asDictAO["ControlInput"]
795 if hasattr(asDictAO["ObservationOperator"],"getO"): self.__HO = asDictAO["ObservationOperator"].getO()
796 else: self.__HO = asDictAO["ObservationOperator"]
797 if hasattr(asDictAO["EvolutionModel"],"getO"): self.__EM = asDictAO["EvolutionModel"].getO()
798 else: self.__EM = asDictAO["EvolutionModel"]
799 if hasattr(asDictAO["ControlModel"],"getO"): self.__CM = asDictAO["ControlModel"].getO()
800 else: self.__CM = asDictAO["ControlModel"]
801 self.__B = asDictAO["BackgroundError"]
802 self.__R = asDictAO["ObservationError"]
803 self.__Q = asDictAO["EvolutionError"]
805 self.__shape_validate()
807 self.__algorithm.run(
817 Parameters = self.__P,
821 def executeYACSScheme(self, FileName=None):
822 "Permet de lancer le calcul d'assimilation"
823 if FileName is None or not os.path.exists(FileName):
824 raise ValueError("a YACS file name has to be given for YACS execution.\n")
825 if not PlatformInfo.has_salome or not PlatformInfo.has_yacs or not PlatformInfo.has_adao:
826 raise ImportError("Unable to get SALOME, YACS or ADAO environnement variables. Please launch SALOME before executing.\n")
831 SALOMERuntime.RuntimeSALOME_setRuntime()
833 r = pilot.getRuntime()
834 xmlLoader = loader.YACSLoader()
835 xmlLoader.registerProcCataLoader()
837 catalogAd = r.loadCatalog("proc", os.path.abspath(FileName))
838 r.addCatalog(catalogAd)
843 p = xmlLoader.load(os.path.abspath(FileName))
844 except IOError as ex:
845 print("The YACS XML schema file can not be loaded: %s"%(ex,))
847 logger = p.getLogger("parser")
848 if not logger.isEmpty():
849 print("The imported YACS XML schema has errors on parsing:")
850 print(logger.getStr())
853 print("The YACS XML schema is not valid and will not be executed:")
854 print(p.getErrorReport())
856 info=pilot.LinkInfo(pilot.LinkInfo.ALL_DONT_STOP)
857 p.checkConsistency(info)
858 if info.areWarningsOrErrors():
859 print("The YACS XML schema is not coherent and will not be executed:")
860 print(info.getGlobalRepr())
862 e = pilot.ExecutorSwig()
864 if p.getEffectiveState() != pilot.DONE:
865 print(p.getErrorReport())
869 def get(self, key = None):
870 "Vérifie l'existence d'une clé de variable ou de paramètres"
871 if key in self.__algorithm:
872 return self.__algorithm.get( key )
873 elif key in self.__P:
879 "Necessaire pour le pickling"
880 return self.__algorithm.pop(k, d)
882 def getAlgorithmRequiredParameters(self, noDetails=True):
883 "Renvoie la liste des paramètres requis selon l'algorithme"
884 return self.__algorithm.getRequiredParameters(noDetails)
886 def setObserver(self, __V, __O, __I, __S):
887 if self.__algorithm is None \
888 or isinstance(self.__algorithm, dict) \
889 or not hasattr(self.__algorithm,"StoredVariables"):
890 raise ValueError("No observer can be build before choosing an algorithm.")
891 if __V not in self.__algorithm:
892 raise ValueError("An observer requires to be set on a variable named %s which does not exist."%__V)
894 self.__algorithm.StoredVariables[ __V ].setDataObserver(
897 HookParameters = __I,
900 def removeObserver(self, __V, __O, __A = False):
901 if self.__algorithm is None \
902 or isinstance(self.__algorithm, dict) \
903 or not hasattr(self.__algorithm,"StoredVariables"):
904 raise ValueError("No observer can be removed before choosing an algorithm.")
905 if __V not in self.__algorithm:
906 raise ValueError("An observer requires to be removed on a variable named %s which does not exist."%__V)
908 return self.__algorithm.StoredVariables[ __V ].removeDataObserver(
913 def hasObserver(self, __V):
914 if self.__algorithm is None \
915 or isinstance(self.__algorithm, dict) \
916 or not hasattr(self.__algorithm,"StoredVariables"):
918 if __V not in self.__algorithm:
920 return self.__algorithm.StoredVariables[ __V ].hasDataObserver()
923 return list(self.__algorithm.keys()) + list(self.__P.keys())
925 def __contains__(self, key=None):
926 "D.__contains__(k) -> True if D has a key k, else False"
927 return key in self.__algorithm or key in self.__P
930 "x.__repr__() <==> repr(x)"
931 return repr(self.__A)+", "+repr(self.__P)
934 "x.__str__() <==> str(x)"
935 return str(self.__A)+", "+str(self.__P)
937 def __setAlgorithm(self, choice = None ):
939 Permet de sélectionner l'algorithme à utiliser pour mener à bien l'étude
940 d'assimilation. L'argument est un champ caractère se rapportant au nom
941 d'un algorithme réalisant l'opération sur les arguments fixes.
944 raise ValueError("Error: algorithm choice has to be given")
945 if self.__algorithmName is not None:
946 raise ValueError("Error: algorithm choice has already been done as \"%s\", it can't be changed."%self.__algorithmName)
947 daDirectory = "daAlgorithms"
949 # Recherche explicitement le fichier complet
950 # ------------------------------------------
952 for directory in sys.path:
953 if os.path.isfile(os.path.join(directory, daDirectory, str(choice)+'.py')):
954 module_path = os.path.abspath(os.path.join(directory, daDirectory))
955 if module_path is None:
956 raise ImportError("No algorithm module named \"%s\" has been found in the search path.\n The search path is %s"%(choice, sys.path))
958 # Importe le fichier complet comme un module
959 # ------------------------------------------
961 sys_path_tmp = sys.path ; sys.path.insert(0,module_path)
962 self.__algorithmFile = __import__(str(choice), globals(), locals(), [])
963 if not hasattr(self.__algorithmFile, "ElementaryAlgorithm"):
964 raise ImportError("this module does not define a valid elementary algorithm.")
965 self.__algorithmName = str(choice)
966 sys.path = sys_path_tmp ; del sys_path_tmp
967 except ImportError as e:
968 raise ImportError("The module named \"%s\" was found, but is incorrect at the import stage.\n The import error message is: %s"%(choice,e))
970 # Instancie un objet du type élémentaire du fichier
971 # -------------------------------------------------
972 self.__algorithm = self.__algorithmFile.ElementaryAlgorithm()
975 def __shape_validate(self):
977 Validation de la correspondance correcte des tailles des variables et
978 des matrices s'il y en a.
980 if self.__Xb is None: __Xb_shape = (0,)
981 elif hasattr(self.__Xb,"size"): __Xb_shape = (self.__Xb.size,)
982 elif hasattr(self.__Xb,"shape"):
983 if isinstance(self.__Xb.shape, tuple): __Xb_shape = self.__Xb.shape
984 else: __Xb_shape = self.__Xb.shape()
985 else: raise TypeError("The background (Xb) has no attribute of shape: problem !")
987 if self.__Y is None: __Y_shape = (0,)
988 elif hasattr(self.__Y,"size"): __Y_shape = (self.__Y.size,)
989 elif hasattr(self.__Y,"shape"):
990 if isinstance(self.__Y.shape, tuple): __Y_shape = self.__Y.shape
991 else: __Y_shape = self.__Y.shape()
992 else: raise TypeError("The observation (Y) has no attribute of shape: problem !")
994 if self.__U is None: __U_shape = (0,)
995 elif hasattr(self.__U,"size"): __U_shape = (self.__U.size,)
996 elif hasattr(self.__U,"shape"):
997 if isinstance(self.__U.shape, tuple): __U_shape = self.__U.shape
998 else: __U_shape = self.__U.shape()
999 else: raise TypeError("The control (U) has no attribute of shape: problem !")
1001 if self.__B is None: __B_shape = (0,0)
1002 elif hasattr(self.__B,"shape"):
1003 if isinstance(self.__B.shape, tuple): __B_shape = self.__B.shape
1004 else: __B_shape = self.__B.shape()
1005 else: raise TypeError("The a priori errors covariance matrix (B) has no attribute of shape: problem !")
1007 if self.__R is None: __R_shape = (0,0)
1008 elif hasattr(self.__R,"shape"):
1009 if isinstance(self.__R.shape, tuple): __R_shape = self.__R.shape
1010 else: __R_shape = self.__R.shape()
1011 else: raise TypeError("The observation errors covariance matrix (R) has no attribute of shape: problem !")
1013 if self.__Q is None: __Q_shape = (0,0)
1014 elif hasattr(self.__Q,"shape"):
1015 if isinstance(self.__Q.shape, tuple): __Q_shape = self.__Q.shape
1016 else: __Q_shape = self.__Q.shape()
1017 else: raise TypeError("The evolution errors covariance matrix (Q) has no attribute of shape: problem !")
1019 if len(self.__HO) == 0: __HO_shape = (0,0)
1020 elif isinstance(self.__HO, dict): __HO_shape = (0,0)
1021 elif hasattr(self.__HO["Direct"],"shape"):
1022 if isinstance(self.__HO["Direct"].shape, tuple): __HO_shape = self.__HO["Direct"].shape
1023 else: __HO_shape = self.__HO["Direct"].shape()
1024 else: raise TypeError("The observation operator (H) has no attribute of shape: problem !")
1026 if len(self.__EM) == 0: __EM_shape = (0,0)
1027 elif isinstance(self.__EM, dict): __EM_shape = (0,0)
1028 elif hasattr(self.__EM["Direct"],"shape"):
1029 if isinstance(self.__EM["Direct"].shape, tuple): __EM_shape = self.__EM["Direct"].shape
1030 else: __EM_shape = self.__EM["Direct"].shape()
1031 else: raise TypeError("The evolution model (EM) has no attribute of shape: problem !")
1033 if len(self.__CM) == 0: __CM_shape = (0,0)
1034 elif isinstance(self.__CM, dict): __CM_shape = (0,0)
1035 elif hasattr(self.__CM["Direct"],"shape"):
1036 if isinstance(self.__CM["Direct"].shape, tuple): __CM_shape = self.__CM["Direct"].shape
1037 else: __CM_shape = self.__CM["Direct"].shape()
1038 else: raise TypeError("The control model (CM) has no attribute of shape: problem !")
1040 # Vérification des conditions
1041 # ---------------------------
1042 if not( len(__Xb_shape) == 1 or min(__Xb_shape) == 1 ):
1043 raise ValueError("Shape characteristic of background (Xb) is incorrect: \"%s\"."%(__Xb_shape,))
1044 if not( len(__Y_shape) == 1 or min(__Y_shape) == 1 ):
1045 raise ValueError("Shape characteristic of observation (Y) is incorrect: \"%s\"."%(__Y_shape,))
1047 if not( min(__B_shape) == max(__B_shape) ):
1048 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) is incorrect: \"%s\"."%(__B_shape,))
1049 if not( min(__R_shape) == max(__R_shape) ):
1050 raise ValueError("Shape characteristic of observation errors covariance matrix (R) is incorrect: \"%s\"."%(__R_shape,))
1051 if not( min(__Q_shape) == max(__Q_shape) ):
1052 raise ValueError("Shape characteristic of evolution errors covariance matrix (Q) is incorrect: \"%s\"."%(__Q_shape,))
1053 if not( min(__EM_shape) == max(__EM_shape) ):
1054 raise ValueError("Shape characteristic of evolution operator (EM) is incorrect: \"%s\"."%(__EM_shape,))
1056 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[1] == max(__Xb_shape) ):
1057 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and state (X) \"%s\" are incompatible."%(__HO_shape,__Xb_shape))
1058 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[0] == max(__Y_shape) ):
1059 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation (Y) \"%s\" are incompatible."%(__HO_shape,__Y_shape))
1060 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__B) > 0 and not( __HO_shape[1] == __B_shape[0] ):
1061 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and a priori errors covariance matrix (B) \"%s\" are incompatible."%(__HO_shape,__B_shape))
1062 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__R) > 0 and not( __HO_shape[0] == __R_shape[1] ):
1063 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation errors covariance matrix (R) \"%s\" are incompatible."%(__HO_shape,__R_shape))
1065 if self.__B is not None and len(self.__B) > 0 and not( __B_shape[1] == max(__Xb_shape) ):
1066 if self.__algorithmName in ["EnsembleBlue",]:
1067 asPersistentVector = self.__Xb.reshape((-1,min(__B_shape)))
1068 self.__Xb = Persistence.OneVector("Background", basetype=numpy.matrix)
1069 for member in asPersistentVector:
1070 self.__Xb.store( numpy.matrix( numpy.ravel(member), numpy.float ).T )
1071 __Xb_shape = min(__B_shape)
1073 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) \"%s\" and background (Xb) \"%s\" are incompatible."%(__B_shape,__Xb_shape))
1075 if self.__R is not None and len(self.__R) > 0 and not( __R_shape[1] == max(__Y_shape) ):
1076 raise ValueError("Shape characteristic of observation errors covariance matrix (R) \"%s\" and observation (Y) \"%s\" are incompatible."%(__R_shape,__Y_shape))
1078 if self.__EM is not None and len(self.__EM) > 0 and not isinstance(self.__EM, dict) and not( __EM_shape[1] == max(__Xb_shape) ):
1079 raise ValueError("Shape characteristic of evolution model (EM) \"%s\" and state (X) \"%s\" are incompatible."%(__EM_shape,__Xb_shape))
1081 if self.__CM is not None and len(self.__CM) > 0 and not isinstance(self.__CM, dict) and not( __CM_shape[1] == max(__U_shape) ):
1082 raise ValueError("Shape characteristic of control model (CM) \"%s\" and control (U) \"%s\" are incompatible."%(__CM_shape,__U_shape))
1084 if ("Bounds" in self.__P) \
1085 and (isinstance(self.__P["Bounds"], list) or isinstance(self.__P["Bounds"], tuple)) \
1086 and (len(self.__P["Bounds"]) != max(__Xb_shape)):
1087 raise ValueError("The number \"%s\" of bound pairs for the state (X) components is different of the size \"%s\" of the state itself." \
1088 %(len(self.__P["Bounds"]),max(__Xb_shape)))
1092 # ==============================================================================
1093 class DataObserver(object):
1095 Classe générale d'interface de type observer
1098 name = "GenericObserver",
1110 self.__name = str(name)
1115 if onVariable is None:
1116 raise ValueError("setting an observer has to be done over a variable name or a list of variable names, not over None.")
1117 elif type(onVariable) in (tuple, list):
1118 self.__V = tuple(map( str, onVariable ))
1119 if withInfo is None:
1122 self.__I = (str(withInfo),)*len(self.__V)
1123 elif isinstance(onVariable, str):
1124 self.__V = (onVariable,)
1125 if withInfo is None:
1126 self.__I = (onVariable,)
1128 self.__I = (str(withInfo),)
1130 raise ValueError("setting an observer has to be done over a variable name or a list of variable names.")
1132 if asString is not None:
1133 __FunctionText = asString
1134 elif (asTemplate is not None) and (asTemplate in Templates.ObserverTemplates):
1135 __FunctionText = Templates.ObserverTemplates[asTemplate]
1136 elif asScript is not None:
1137 __FunctionText = ImportFromScript(asScript).getstring()
1140 __Function = ObserverF(__FunctionText)
1142 if asObsObject is not None:
1143 self.__O = asObsObject
1145 self.__O = __Function.getfunc()
1147 for k in range(len(self.__V)):
1150 if ename not in withAlgo:
1151 raise ValueError("An observer is asked to be set on a variable named %s which does not exist."%ename)
1153 withAlgo.setObserver(ename, self.__O, einfo, scheduledBy)
1156 "x.__repr__() <==> repr(x)"
1157 return repr(self.__V)+"\n"+repr(self.__O)
1160 "x.__str__() <==> str(x)"
1161 return str(self.__V)+"\n"+str(self.__O)
1163 # ==============================================================================
1164 class State(object):
1166 Classe générale d'interface de type état
1169 name = "GenericVector",
1171 asPersistentVector = None,
1174 toBeChecked = False,
1177 Permet de définir un vecteur :
1178 - asVector : entrée des données, comme un vecteur compatible avec le
1179 constructeur de numpy.matrix, ou "True" si entrée par script.
1180 - asPersistentVector : entrée des données, comme une série de vecteurs
1181 compatible avec le constructeur de numpy.matrix, ou comme un objet de
1182 type Persistence, ou "True" si entrée par script.
1183 - asScript : si un script valide est donné contenant une variable
1184 nommée "name", la variable est de type "asVector" (par défaut) ou
1185 "asPersistentVector" selon que l'une de ces variables est placée à
1188 self.__name = str(name)
1189 self.__check = bool(toBeChecked)
1193 self.__is_vector = False
1194 self.__is_series = False
1196 if asScript is not None:
1197 __Vector, __Series = None, None
1198 if asPersistentVector:
1199 __Series = ImportFromScript(asScript).getvalue( self.__name )
1201 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1203 __Vector, __Series = asVector, asPersistentVector
1205 if __Vector is not None:
1206 self.__is_vector = True
1207 self.__V = numpy.matrix( numpy.asmatrix(__Vector).A1, numpy.float ).T
1208 self.shape = self.__V.shape
1209 self.size = self.__V.size
1210 elif __Series is not None:
1211 self.__is_series = True
1212 if isinstance(__Series, (tuple, list, numpy.ndarray, numpy.matrix)):
1213 self.__V = Persistence.OneVector(self.__name, basetype=numpy.matrix)
1214 for member in __Series:
1215 self.__V.store( numpy.matrix( numpy.asmatrix(member).A1, numpy.float ).T )
1216 import sys ; sys.stdout.flush()
1219 if isinstance(self.__V.shape, (tuple, list)):
1220 self.shape = self.__V.shape
1222 self.shape = self.__V.shape()
1223 if len(self.shape) == 1:
1224 self.shape = (self.shape[0],1)
1225 self.size = self.shape[0] * self.shape[1]
1227 raise ValueError("The %s object is improperly defined, it requires at minima either a vector, a list/tuple of vectors or a persistent object. Please check your vector input."%self.__name)
1229 if scheduledBy is not None:
1230 self.__T = scheduledBy
1232 def getO(self, withScheduler=False):
1234 return self.__V, self.__T
1235 elif self.__T is None:
1241 "Vérification du type interne"
1242 return self.__is_vector
1245 "Vérification du type interne"
1246 return self.__is_series
1249 "x.__repr__() <==> repr(x)"
1250 return repr(self.__V)
1253 "x.__str__() <==> str(x)"
1254 return str(self.__V)
1256 # ==============================================================================
1257 class Covariance(object):
1259 Classe générale d'interface de type covariance
1262 name = "GenericCovariance",
1263 asCovariance = None,
1264 asEyeByScalar = None,
1265 asEyeByVector = None,
1268 toBeChecked = False,
1271 Permet de définir une covariance :
1272 - asCovariance : entrée des données, comme une matrice compatible avec
1273 le constructeur de numpy.matrix
1274 - asEyeByScalar : entrée des données comme un seul scalaire de variance,
1275 multiplicatif d'une matrice de corrélation identité, aucune matrice
1276 n'étant donc explicitement à donner
1277 - asEyeByVector : entrée des données comme un seul vecteur de variance,
1278 à mettre sur la diagonale d'une matrice de corrélation, aucune matrice
1279 n'étant donc explicitement à donner
1280 - asCovObject : entrée des données comme un objet python, qui a les
1281 methodes obligatoires "getT", "getI", "diag", "trace", "__add__",
1282 "__sub__", "__neg__", "__mul__", "__rmul__" et facultatives "shape",
1283 "size", "cholesky", "choleskyI", "asfullmatrix", "__repr__", "__str__"
1284 - toBeChecked : booléen indiquant si le caractère SDP de la matrice
1285 pleine doit être vérifié
1287 self.__name = str(name)
1288 self.__check = bool(toBeChecked)
1291 self.__is_scalar = False
1292 self.__is_vector = False
1293 self.__is_matrix = False
1294 self.__is_object = False
1296 if asScript is not None:
1297 __Matrix, __Scalar, __Vector, __Object = None, None, None, None
1299 __Scalar = ImportFromScript(asScript).getvalue( self.__name )
1301 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1303 __Object = ImportFromScript(asScript).getvalue( self.__name )
1305 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
1307 __Matrix, __Scalar, __Vector, __Object = asCovariance, asEyeByScalar, asEyeByVector, asCovObject
1309 if __Scalar is not None:
1310 if numpy.matrix(__Scalar).size != 1:
1311 raise ValueError(' The diagonal multiplier given to define a sparse matrix is not a unique scalar value.\n Its actual measured size is %i. Please check your scalar input.'%numpy.matrix(__Scalar).size)
1312 self.__is_scalar = True
1313 self.__C = numpy.abs( float(__Scalar) )
1316 elif __Vector is not None:
1317 self.__is_vector = True
1318 self.__C = numpy.abs( numpy.array( numpy.ravel( numpy.matrix(__Vector, float ) ) ) )
1319 self.shape = (self.__C.size,self.__C.size)
1320 self.size = self.__C.size**2
1321 elif __Matrix is not None:
1322 self.__is_matrix = True
1323 self.__C = numpy.matrix( __Matrix, float )
1324 self.shape = self.__C.shape
1325 self.size = self.__C.size
1326 elif __Object is not None:
1327 self.__is_object = True
1329 for at in ("getT","getI","diag","trace","__add__","__sub__","__neg__","__mul__","__rmul__"):
1330 if not hasattr(self.__C,at):
1331 raise ValueError("The matrix given for %s as an object has no attribute \"%s\". Please check your object input."%(self.__name,at))
1332 if hasattr(self.__C,"shape"):
1333 self.shape = self.__C.shape
1336 if hasattr(self.__C,"size"):
1337 self.size = self.__C.size
1342 # raise ValueError("The %s covariance matrix has to be specified either as a matrix, a vector for its diagonal or a scalar multiplying an identity matrix."%self.__name)
1346 def __validate(self):
1348 if self.__C is None:
1349 raise UnboundLocalError("%s covariance matrix value has not been set!"%(self.__name,))
1350 if self.ismatrix() and min(self.shape) != max(self.shape):
1351 raise ValueError("The given matrix for %s is not a square one, its shape is %s. Please check your matrix input."%(self.__name,self.shape))
1352 if self.isobject() and min(self.shape) != max(self.shape):
1353 raise ValueError("The matrix given for \"%s\" is not a square one, its shape is %s. Please check your object input."%(self.__name,self.shape))
1354 if self.isscalar() and self.__C <= 0:
1355 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your scalar input %s."%(self.__name,self.__C))
1356 if self.isvector() and (self.__C <= 0).any():
1357 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your vector input."%(self.__name,))
1358 if self.ismatrix() and (self.__check or logging.getLogger().level < logging.WARNING):
1360 L = numpy.linalg.cholesky( self.__C )
1362 raise ValueError("The %s covariance matrix is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1363 if self.isobject() and (self.__check or logging.getLogger().level < logging.WARNING):
1365 L = self.__C.cholesky()
1367 raise ValueError("The %s covariance object is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1370 "Vérification du type interne"
1371 return self.__is_scalar
1374 "Vérification du type interne"
1375 return self.__is_vector
1378 "Vérification du type interne"
1379 return self.__is_matrix
1382 "Vérification du type interne"
1383 return self.__is_object
1388 return Covariance(self.__name+"I", asCovariance = self.__C.I )
1389 elif self.isvector():
1390 return Covariance(self.__name+"I", asEyeByVector = 1. / self.__C )
1391 elif self.isscalar():
1392 return Covariance(self.__name+"I", asEyeByScalar = 1. / self.__C )
1393 elif self.isobject():
1394 return Covariance(self.__name+"I", asCovObject = self.__C.getI() )
1396 return None # Indispensable
1401 return Covariance(self.__name+"T", asCovariance = self.__C.T )
1402 elif self.isvector():
1403 return Covariance(self.__name+"T", asEyeByVector = self.__C )
1404 elif self.isscalar():
1405 return Covariance(self.__name+"T", asEyeByScalar = self.__C )
1406 elif self.isobject():
1407 return Covariance(self.__name+"T", asCovObject = self.__C.getT() )
1410 "Décomposition de Cholesky"
1412 return Covariance(self.__name+"C", asCovariance = numpy.linalg.cholesky(self.__C) )
1413 elif self.isvector():
1414 return Covariance(self.__name+"C", asEyeByVector = numpy.sqrt( self.__C ) )
1415 elif self.isscalar():
1416 return Covariance(self.__name+"C", asEyeByScalar = numpy.sqrt( self.__C ) )
1417 elif self.isobject() and hasattr(self.__C,"cholesky"):
1418 return Covariance(self.__name+"C", asCovObject = self.__C.cholesky() )
1420 def choleskyI(self):
1421 "Inversion de la décomposition de Cholesky"
1423 return Covariance(self.__name+"H", asCovariance = numpy.linalg.cholesky(self.__C).I )
1424 elif self.isvector():
1425 return Covariance(self.__name+"H", asEyeByVector = 1.0 / numpy.sqrt( self.__C ) )
1426 elif self.isscalar():
1427 return Covariance(self.__name+"H", asEyeByScalar = 1.0 / numpy.sqrt( self.__C ) )
1428 elif self.isobject() and hasattr(self.__C,"choleskyI"):
1429 return Covariance(self.__name+"H", asCovObject = self.__C.choleskyI() )
1431 def diag(self, msize=None):
1432 "Diagonale de la matrice"
1434 return numpy.diag(self.__C)
1435 elif self.isvector():
1437 elif self.isscalar():
1439 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1441 return self.__C * numpy.ones(int(msize))
1442 elif self.isobject():
1443 return self.__C.diag()
1445 def asfullmatrix(self, msize=None):
1449 elif self.isvector():
1450 return numpy.matrix( numpy.diag(self.__C), float )
1451 elif self.isscalar():
1453 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1455 return numpy.matrix( self.__C * numpy.eye(int(msize)), float )
1456 elif self.isobject() and hasattr(self.__C,"asfullmatrix"):
1457 return self.__C.asfullmatrix()
1459 def trace(self, msize=None):
1460 "Trace de la matrice"
1462 return numpy.trace(self.__C)
1463 elif self.isvector():
1464 return float(numpy.sum(self.__C))
1465 elif self.isscalar():
1467 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1469 return self.__C * int(msize)
1470 elif self.isobject():
1471 return self.__C.trace()
1477 "x.__repr__() <==> repr(x)"
1478 return repr(self.__C)
1481 "x.__str__() <==> str(x)"
1482 return str(self.__C)
1484 def __add__(self, other):
1485 "x.__add__(y) <==> x+y"
1486 if self.ismatrix() or self.isobject():
1487 return self.__C + numpy.asmatrix(other)
1488 elif self.isvector() or self.isscalar():
1489 _A = numpy.asarray(other)
1490 _A.reshape(_A.size)[::_A.shape[1]+1] += self.__C
1491 return numpy.asmatrix(_A)
1493 def __radd__(self, other):
1494 "x.__radd__(y) <==> y+x"
1495 raise NotImplementedError("%s covariance matrix __radd__ method not available for %s type!"%(self.__name,type(other)))
1497 def __sub__(self, other):
1498 "x.__sub__(y) <==> x-y"
1499 if self.ismatrix() or self.isobject():
1500 return self.__C - numpy.asmatrix(other)
1501 elif self.isvector() or self.isscalar():
1502 _A = numpy.asarray(other)
1503 _A.reshape(_A.size)[::_A.shape[1]+1] = self.__C - _A.reshape(_A.size)[::_A.shape[1]+1]
1504 return numpy.asmatrix(_A)
1506 def __rsub__(self, other):
1507 "x.__rsub__(y) <==> y-x"
1508 raise NotImplementedError("%s covariance matrix __rsub__ method not available for %s type!"%(self.__name,type(other)))
1511 "x.__neg__() <==> -x"
1514 def __mul__(self, other):
1515 "x.__mul__(y) <==> x*y"
1516 if self.ismatrix() and isinstance(other,numpy.matrix):
1517 return self.__C * other
1518 elif self.ismatrix() and isinstance(other, (list, numpy.ndarray, tuple)):
1519 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1520 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1521 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1522 return self.__C * numpy.asmatrix(other)
1524 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.asmatrix(other).shape,self.__name))
1525 elif self.isvector() and isinstance(other, (list, numpy.matrix, numpy.ndarray, tuple)):
1526 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1527 return numpy.asmatrix(self.__C * numpy.ravel(other)).T
1528 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1529 return numpy.asmatrix((self.__C * (numpy.asarray(other).transpose())).transpose())
1531 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1532 elif self.isscalar() and isinstance(other,numpy.matrix):
1533 return self.__C * other
1534 elif self.isscalar() and isinstance(other, (list, numpy.ndarray, tuple)):
1535 if len(numpy.asarray(other).shape) == 1 or numpy.asarray(other).shape[1] == 1 or numpy.asarray(other).shape[0] == 1:
1536 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1538 return self.__C * numpy.asmatrix(other)
1539 elif self.isobject():
1540 return self.__C.__mul__(other)
1542 raise NotImplementedError("%s covariance matrix __mul__ method not available for %s type!"%(self.__name,type(other)))
1544 def __rmul__(self, other):
1545 "x.__rmul__(y) <==> y*x"
1546 if self.ismatrix() and isinstance(other,numpy.matrix):
1547 return other * self.__C
1548 elif self.isvector() and isinstance(other,numpy.matrix):
1549 if numpy.ravel(other).size == self.shape[0]: # Vecteur
1550 return numpy.asmatrix(numpy.ravel(other) * self.__C)
1551 elif numpy.asmatrix(other).shape[1] == self.shape[0]: # Matrice
1552 return numpy.asmatrix(numpy.array(other) * self.__C)
1554 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1555 elif self.isscalar() and isinstance(other,numpy.matrix):
1556 return other * self.__C
1557 elif self.isobject():
1558 return self.__C.__rmul__(other)
1560 raise NotImplementedError("%s covariance matrix __rmul__ method not available for %s type!"%(self.__name,type(other)))
1563 "x.__len__() <==> len(x)"
1564 return self.shape[0]
1566 # ==============================================================================
1567 class ObserverF(object):
1569 Creation d'une fonction d'observateur a partir de son texte
1571 def __init__(self, corps=""):
1572 self.__corps = corps
1573 def func(self,var,info):
1574 "Fonction d'observation"
1577 "Restitution du pointeur de fonction dans l'objet"
1580 # ==============================================================================
1581 class CaseLogger(object):
1583 Conservation des commandes de creation d'un cas
1585 def __init__(self, __name="", __objname="case", __addViewers=None, __addLoaders=None):
1586 self.__name = str(__name)
1587 self.__objname = str(__objname)
1588 self.__logSerie = []
1589 self.__switchoff = False
1590 self.__viewers = self.__loaders = {
1596 if __addViewers is not None:
1597 self.__viewers.update(dict(__addViewers))
1598 if __addLoaders is not None:
1599 self.__loaders.update(dict(__addLoaders))
1601 def register(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1602 "Enregistrement d'une commande individuelle"
1603 if __command is not None and __keys is not None and __local is not None and not self.__switchoff:
1604 if "self" in __keys: __keys.remove("self")
1605 self.__logSerie.append( (str(__command), __keys, __local, __pre, __switchoff) )
1607 self.__switchoff = True
1609 self.__switchoff = False
1611 def dump(self, __filename=None, __format="TUI"):
1612 "Restitution normalisée des commandes (par les *GenericCaseViewer)"
1613 if __format in self.__viewers:
1614 __formater = self.__viewers[__format](self.__name, self.__objname, self.__logSerie)
1616 raise ValueError("Dumping as \"%s\" is not available"%__format)
1617 return __formater.dump(__filename)
1619 def load(self, __filename=None, __format="TUI"):
1620 "Chargement normalisé des commandes"
1621 if __format in self.__loaders:
1622 __formater = self.__loaders[__format]()
1624 raise ValueError("Loading as \"%s\" is not available"%__format)
1625 return __formater.load(__filename)
1627 # ==============================================================================
1628 class GenericCaseViewer(object):
1630 Gestion des commandes de creation d'une vue de cas
1632 def __init__(self, __name="", __objname="case", __content=None):
1633 "Initialisation et enregistrement de l'entete"
1634 self._name = str(__name)
1635 self._objname = str(__objname)
1636 self._lineSerie = []
1637 self._switchoff = False
1638 self._numobservers = 2
1639 self._content = __content
1640 self._missing = """raise ValueError("This case requires beforehand to import or define the variable named <%s>. When corrected, remove this command, correct and uncomment the following one.")\n# """
1642 "Transformation de commande individuelle en enregistrement"
1643 raise NotImplementedError()
1645 "Transformation d'enregistrement en commande individuelle"
1646 raise NotImplementedError()
1647 def _finalize(self):
1648 "Enregistrement du final"
1650 def _addLine(self, line=""):
1651 "Ajoute un enregistrement individuel"
1652 self._lineSerie.append(line)
1653 def dump(self, __filename=None):
1654 "Restitution normalisée des commandes"
1656 __text = "\n".join(self._lineSerie)
1658 if __filename is not None:
1659 __file = os.path.abspath(__filename)
1660 __fid = open(__file,"w")
1664 def load(self, __filename=None):
1665 "Chargement normalisé des commandes"
1666 if os.path.exists(__filename):
1667 self._content = open(__filename, 'r').read()
1668 __commands = self._extract(self._content)
1671 # --> Inutile d'accrocher l'interpretation au cas
1672 # def _interpret(self):
1673 # "Interprétation d'une commande"
1674 # raise NotImplementedError()
1675 # def execCase(self, __filename=None):
1676 # "Exécution normalisée des commandes"
1677 # if os.path.exists(__filename):
1678 # self._content = open(__filename, 'r').read()
1679 # __retcode = self._interpret(self._content)
1682 class _TUIViewer(GenericCaseViewer):
1684 Etablissement des commandes d'un cas TUI
1686 def __init__(self, __name="", __objname="case", __content=None):
1687 "Initialisation et enregistrement de l'entete"
1688 GenericCaseViewer.__init__(self, __name, __objname, __content)
1689 self._addLine("# -*- coding: utf-8 -*-")
1690 self._addLine("#\n# Python script for ADAO TUI\n#")
1691 self._addLine("from numpy import array, matrix")
1692 self._addLine("import adaoBuilder")
1693 self._addLine("%s = adaoBuilder.New('%s')"%(self._objname, self._name))
1694 if self._content is not None:
1695 for command in self._content:
1696 self._append(*command)
1697 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1698 "Transformation d'une commande individuelle en un enregistrement"
1699 if __command is not None and __keys is not None and __local is not None:
1701 if __pre is not None:
1702 __text += "%s = "%__pre
1703 __text += "%s.%s( "%(self._objname,str(__command))
1704 if "self" in __keys: __keys.remove("self")
1705 if __command not in ("set","get") and "Concept" in __keys: __keys.remove("Concept")
1708 if __v is None: continue
1709 if k == "Checked" and not __v: continue
1710 if k == "Stored" and not __v: continue
1711 if k == "AvoidRC" and __v: continue
1712 if k == "noDetails": continue
1713 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1714 if callable(__v): __text = self._missing%__v.__name__+__text
1715 if isinstance(__v,dict):
1716 for val in __v.values():
1717 if callable(val): __text = self._missing%val.__name__+__text
1718 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1719 __text += "%s=%s, "%(k,repr(__v))
1720 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
1723 self._addLine(__text)
1724 def _extract(self, __content=""):
1725 "Transformation un enregistrement en une commande individuelle"
1728 __content = __content.replace("\r\n","\n")
1729 for line in __content.split("\n"):
1730 if "adaoBuilder.New" in line and "=" in line:
1731 self._objname = line.split("=")[0].strip()
1736 if self._objname+".set" in line:
1737 __commands.append( line.replace(self._objname+".","",1) )
1740 # def _interpret(self, __content=""):
1741 # "Interprétation d'une commande"
1742 # __content = __content.replace("\r\n","\n")
1746 class _DCTViewer(GenericCaseViewer):
1748 Etablissement des commandes d'un cas DCT
1750 def __init__(self, __name="", __objname="case", __content=None):
1751 "Initialisation et enregistrement de l'entete"
1752 GenericCaseViewer.__init__(self, __name, __objname, __content)
1753 self._observerIndex = 0
1754 self._addLine("# -*- coding: utf-8 -*-")
1755 self._addLine("#\n# Python script for ADAO DCT\n#")
1756 self._addLine("from numpy import array, matrix")
1758 self._addLine("%s = {}"%__objname)
1759 if self._content is not None:
1760 for command in self._content:
1761 self._append(*command)
1762 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1763 "Transformation d'une commande individuelle en un enregistrement"
1764 if __command is not None and __keys is not None and __local is not None:
1766 if "execute" in __command: return
1767 if "self" in __keys: __keys.remove("self")
1768 if __command in ("set","get") and "Concept" in __keys:
1769 __key = __local["Concept"]
1770 __keys.remove("Concept")
1772 __key = __command.replace("set","").replace("get","")
1773 if "Observer" in __key and 'Variable' in __keys:
1774 self._observerIndex += 1
1775 __key += "_%i"%self._observerIndex
1776 __text += "%s['%s'] = {"%(self._objname,str(__key))
1779 if __v is None: continue
1780 if k == "Checked" and not __v: continue
1781 if k == "Stored" and not __v: continue
1782 if k == "AvoidRC" and __v: continue
1783 if k == "noDetails": continue
1784 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1785 if callable(__v): __text = self._missing%__v.__name__+__text
1786 if isinstance(__v,dict):
1787 for val in __v.values():
1788 if callable(val): __text = self._missing%val.__name__+__text
1789 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1790 __text += "'%s':%s, "%(k,repr(__v))
1791 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
1792 __text.rstrip(", ").rstrip()
1794 if __text[-2:] == "{}": return # Supprime les *Debug et les variables
1795 self._addLine(__text)
1796 def _extract(self, __content=""):
1797 "Transformation un enregistrement en une commande individuelle"
1800 __content = __content.replace("\r\n","\n")
1802 self._objdata = None
1803 __getlocals = locals()
1804 for k in __getlocals:
1806 if 'AlgorithmParameters' in __getlocals[k] and type(__getlocals[k]) is dict:
1808 self._objdata = __getlocals[k]
1811 if self._objdata is None:
1812 raise ValueError("Impossible to load given content as a ADAO DCT one (no 'AlgorithmParameters' key found).")
1813 for k in self._objdata:
1814 if 'Observer_' in k:
1815 __command = k.split('_',1)[0]
1818 __arguments = ["%s = %s"%(k,repr(v)) for k,v in self._objdata[k].items()]
1819 __commands.append( "set( Concept='%s', %s )"%(__command, ", ".join(__arguments)))
1820 __commands.sort() # Pour commencer par 'AlgorithmParameters'
1823 class _SCDViewer(GenericCaseViewer):
1825 Etablissement des commandes d'un cas SCD (Study Config Dictionary)
1827 def __init__(self, __name="", __objname="case", __content=None):
1828 "Initialisation et enregistrement de l'entete"
1829 GenericCaseViewer.__init__(self, __name, __objname, __content)
1830 self._addLine("# -*- coding: utf-8 -*-")
1831 self._addLine("#\n# Input for ADAO converter to YACS\n#")
1832 self._addLine("from numpy import array, matrix")
1834 self._addLine("study_config = {}")
1835 self._addLine("study_config['StudyType'] = 'ASSIMILATION_STUDY'")
1836 self._addLine("study_config['Name'] = '%s'"%self._name)
1837 self._addLine("observers = {}")
1838 self._addLine("study_config['Observers'] = observers")
1840 self._addLine("inputvariables_config = {}")
1841 self._addLine("inputvariables_config['Order'] =['adao_default']")
1842 self._addLine("inputvariables_config['adao_default'] = -1")
1843 self._addLine("study_config['InputVariables'] = inputvariables_config")
1845 self._addLine("outputvariables_config = {}")
1846 self._addLine("outputvariables_config['Order'] = ['adao_default']")
1847 self._addLine("outputvariables_config['adao_default'] = -1")
1848 self._addLine("study_config['OutputVariables'] = outputvariables_config")
1849 if __content is not None:
1850 for command in __content:
1851 self._append(*command)
1852 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1853 "Transformation d'une commande individuelle en un enregistrement"
1854 if __command == "set": __command = __local["Concept"]
1855 else: __command = __command.replace("set", "", 1)
1858 if __command in (None, 'execute', 'executePythonScheme', 'executeYACSScheme', 'get'):
1860 elif __command in ['Debug', 'setDebug']:
1861 __text = "#\nstudy_config['Debug'] = '1'"
1862 elif __command in ['NoDebug', 'setNoDebug']:
1863 __text = "#\nstudy_config['Debug'] = '0'"
1864 elif __command in ['Observer', 'setObserver']:
1865 __obs = __local['Variable']
1866 self._numobservers += 1
1868 __text += "observers['%s'] = {}\n"%__obs
1869 if __local['String'] is not None:
1870 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
1871 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, __local['String'])
1872 if __local['Script'] is not None:
1873 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'Script')
1874 __text += "observers['%s']['Script'] = \"%s\"\n"%(__obs, __local['Script'])
1875 if __local['Template'] is not None and __local['Template'] in Templates.ObserverTemplates:
1876 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
1877 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, Templates.ObserverTemplates[__local['Template']])
1878 if __local['Info'] is not None:
1879 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __local['Info'])
1881 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __obs)
1882 __text += "observers['%s']['number'] = %s"%(__obs, self._numobservers)
1883 elif __local is not None: # __keys is not None and
1884 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1886 __text += "%s_config = {}\n"%__command
1887 if 'self' in __local: __local.pop('self')
1888 __to_be_removed = []
1889 for __k,__v in __local.items():
1890 if __v is None: __to_be_removed.append(__k)
1891 for __k in __to_be_removed:
1893 for __k,__v in __local.items():
1894 if __k == "Concept": continue
1895 if __k in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix','OneFunction','ThreeFunctions'] and 'Script' in __local: continue
1896 if __k == 'Algorithm':
1897 __text += "study_config['Algorithm'] = %s\n"%(repr(__v))
1898 elif __k == 'Script':
1901 __v = "'"+repr(__v)+"'"
1902 for __lk in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix']:
1903 if __lk in __local and __local[__lk]: __k = __lk
1904 if __command == "AlgorithmParameters": __k = "Dict"
1905 if 'OneFunction' in __local and __local['OneFunction']:
1906 __text += "%s_ScriptWithOneFunction = {}\n"%(__command,)
1907 __text += "%s_ScriptWithOneFunction['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
1908 __text += "%s_ScriptWithOneFunction['Script'] = {}\n"%(__command,)
1909 __text += "%s_ScriptWithOneFunction['Script']['Direct'] = %s\n"%(__command,__v)
1910 __text += "%s_ScriptWithOneFunction['Script']['Tangent'] = %s\n"%(__command,__v)
1911 __text += "%s_ScriptWithOneFunction['Script']['Adjoint'] = %s\n"%(__command,__v)
1912 __text += "%s_ScriptWithOneFunction['DifferentialIncrement'] = 1e-06\n"%(__command,)
1913 __text += "%s_ScriptWithOneFunction['CenteredFiniteDifference'] = 0\n"%(__command,)
1915 __f = 'ScriptWithOneFunction'
1916 __v = '%s_ScriptWithOneFunction'%(__command,)
1917 if 'ThreeFunctions' in __local and __local['ThreeFunctions']:
1918 __text += "%s_ScriptWithFunctions = {}\n"%(__command,)
1919 __text += "%s_ScriptWithFunctions['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
1920 __text += "%s_ScriptWithFunctions['Script'] = {}\n"%(__command,)
1921 __text += "%s_ScriptWithFunctions['Script']['Direct'] = %s\n"%(__command,__v)
1922 __text += "%s_ScriptWithFunctions['Script']['Tangent'] = %s\n"%(__command,__v)
1923 __text += "%s_ScriptWithFunctions['Script']['Adjoint'] = %s\n"%(__command,__v)
1925 __f = 'ScriptWithFunctions'
1926 __v = '%s_ScriptWithFunctions'%(__command,)
1927 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
1928 __text += "%s_config['From'] = '%s'\n"%(__command,__f)
1929 __text += "%s_config['Data'] = %s\n"%(__command,__v)
1930 __text = __text.replace("''","'")
1931 elif __k in ('Stored', 'Checked'):
1933 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
1934 elif __k in ('AvoidRC', 'noDetails'):
1936 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
1938 if __k == 'Parameters': __k = "Dict"
1939 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1940 if callable(__v): __text = self._missing%__v.__name__+__text
1941 if isinstance(__v,dict):
1942 for val in __v.values():
1943 if callable(val): __text = self._missing%val.__name__+__text
1944 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
1945 __text += "%s_config['From'] = '%s'\n"%(__command,'String')
1946 __text += "%s_config['Data'] = \"\"\"%s\"\"\"\n"%(__command,repr(__v))
1947 __text += "study_config['%s'] = %s_config"%(__command,__command)
1948 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
1950 self._switchoff = True
1951 if __text is not None: self._addLine(__text)
1953 self._switchoff = False
1954 def _finalize(self):
1955 self.__loadVariablesByScript()
1957 self._addLine("Analysis_config = {}")
1958 self._addLine("Analysis_config['From'] = 'String'")
1959 self._addLine("Analysis_config['Data'] = \"\"\"import numpy")
1960 self._addLine("xa=numpy.ravel(ADD.get('Analysis')[-1])")
1961 self._addLine("print 'Analysis:',xa\"\"\"")
1962 self._addLine("study_config['UserPostAnalysis'] = Analysis_config")
1963 def __loadVariablesByScript(self):
1964 __ExecVariables = {} # Necessaire pour recuperer la variable
1965 exec("\n".join(self._lineSerie), __ExecVariables)
1966 study_config = __ExecVariables['study_config']
1967 # Pour Python 3 : self.__hasAlgorithm = bool(study_config['Algorithm'])
1968 if 'Algorithm' in study_config:
1969 self.__hasAlgorithm = True
1971 self.__hasAlgorithm = False
1972 if not self.__hasAlgorithm and \
1973 "AlgorithmParameters" in study_config and \
1974 isinstance(study_config['AlgorithmParameters'], dict) and \
1975 "From" in study_config['AlgorithmParameters'] and \
1976 "Data" in study_config['AlgorithmParameters'] and \
1977 study_config['AlgorithmParameters']['From'] == 'Script':
1978 __asScript = study_config['AlgorithmParameters']['Data']
1979 __var = ImportFromScript(__asScript).getvalue( "Algorithm" )
1980 __text = "#\nstudy_config['Algorithm'] = '%s'"%(__var,)
1981 self._addLine(__text)
1982 if self.__hasAlgorithm and \
1983 "AlgorithmParameters" in study_config and \
1984 isinstance(study_config['AlgorithmParameters'], dict) and \
1985 "From" not in study_config['AlgorithmParameters'] and \
1986 "Data" not in study_config['AlgorithmParameters']:
1988 __text += "AlgorithmParameters_config['Type'] = 'Dict'\n"
1989 __text += "AlgorithmParameters_config['From'] = 'String'\n"
1990 __text += "AlgorithmParameters_config['Data'] = '{}'\n"
1991 self._addLine(__text)
1994 class _XMLViewer(GenericCaseViewer):
1996 Etablissement des commandes de creation d'un cas XML
1998 def __init__(self, __name="", __objname="case", __content=None):
1999 "Initialisation et enregistrement de l'entete"
2000 GenericCaseViewer.__init__(self, __name, __objname, __content)
2001 raise NotImplementedError()
2003 class _YACSViewer(GenericCaseViewer):
2005 Etablissement des commandes de creation d'un cas YACS
2007 def __init__(self, __name="", __objname="case", __content=None):
2008 "Initialisation et enregistrement de l'entete"
2009 GenericCaseViewer.__init__(self, __name, __objname, __content)
2010 self.__internalSCD = _SCDViewer(__name, __objname, __content)
2011 self._append = self.__internalSCD._append
2012 def dump(self, __filename=None, __convertSCDinMemory=True):
2013 "Restitution normalisée des commandes"
2014 self.__internalSCD._finalize()
2016 if __filename is None:
2017 raise ValueError("A file name has to be given for YACS XML output.")
2019 if not PlatformInfo.has_salome or \
2020 not PlatformInfo.has_adao:
2021 raise ImportError("\n\n"+\
2022 "Unable to get SALOME or ADAO environnement variables.\n"+\
2023 "Please load the right environnement before trying to use it.\n")
2024 elif __convertSCDinMemory:
2025 __file = os.path.abspath(__filename)
2026 __SCDdump = self.__internalSCD.dump()
2027 if os.path.isfile(__file) or os.path.islink(__file):
2029 from daYacsSchemaCreator.run import create_schema_from_content
2030 create_schema_from_content(__SCDdump, __file)
2032 __file = os.path.abspath(__filename)
2033 __SCDfile = __file[:__file.rfind(".")] + '_SCD.py'
2034 __SCDdump = self.__internalSCD.dump(__SCDfile)
2035 if os.path.isfile(__file) or os.path.islink(__file):
2037 __converterExe = os.path.join(os.environ["ADAO_ROOT_DIR"], "bin/salome", "AdaoYacsSchemaCreator.py")
2038 __args = ["python", __converterExe, __SCDfile, __file]
2040 __p = subprocess.Popen(__args)
2041 (__stdoutdata, __stderrdata) = __p.communicate()
2043 os.remove(__SCDfile)
2045 if not os.path.exists(__file):
2046 # logging.debug("-- Error YacsSchemaCreator with convert SCD in memory=%s --"%__convertSCDinMemory)
2047 # logging.debug("-- Content of the file : --")
2048 # logging.debug(__SCDdump)
2049 __msg = "An error occured during the ADAO YACS Schema build for\n"
2050 __msg += "the target output file:\n"
2051 __msg += " %s\n"%__file
2052 __msg += "See errors details in your launching terminal log.\n"
2053 raise ValueError(__msg)
2055 __fid = open(__file,"r")
2056 __text = __fid.read()
2060 # ==============================================================================
2061 class ImportFromScript(object):
2063 Obtention d'une variable nommee depuis un fichier script importe
2065 def __init__(self, __filename=None):
2066 "Verifie l'existence et importe le script"
2067 self.__filename = __filename.rstrip(".py")
2068 if self.__filename is None:
2069 raise ValueError("The name of the file, containing the variable to be read, has to be specified.")
2070 if not os.path.isfile(str(self.__filename)+".py"):
2071 raise ValueError("The file containing the variable to be imported doesn't seem to exist. Please check the file. The given file name is:\n \"%s\""%self.__filename)
2072 self.__scriptfile = __import__(self.__filename, globals(), locals(), [])
2073 self.__scriptstring = open(self.__filename+".py",'r').read()
2074 def getvalue(self, __varname=None, __synonym=None ):
2075 "Renvoie la variable demandee"
2076 if __varname is None:
2077 raise ValueError("The name of the variable to be read has to be specified. Please check the content of the file and the syntax.")
2078 if not hasattr(self.__scriptfile, __varname):
2079 if __synonym is None:
2080 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__filename)+".py",__varname))
2081 elif not hasattr(self.__scriptfile, __synonym):
2082 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__filename)+".py",__synonym))
2084 return getattr(self.__scriptfile, __synonym)
2086 return getattr(self.__scriptfile, __varname)
2087 def getstring(self):
2088 "Renvoie le script complet"
2089 return self.__scriptstring
2091 # ==============================================================================
2092 def CostFunction3D(_x,
2093 _Hm = None, # Pour simuler Hm(x) : HO["Direct"].appliedTo
2094 _HmX = None, # Simulation déjà faite de Hm(x)
2095 _arg = None, # Arguments supplementaires pour Hm, sous la forme d'un tuple
2100 _SIV = False, # A résorber pour la 8.0
2101 _SSC = [], # self._parameters["StoreSupplementaryCalculations"]
2102 _nPS = 0, # nbPreviousSteps
2103 _QM = "DA", # QualityMeasure
2104 _SSV = {}, # Entrée et/ou sortie : self.StoredVariables
2105 _fRt = False, # Restitue ou pas la sortie étendue
2106 _sSc = True, # Stocke ou pas les SSC
2109 Fonction-coût générale utile pour les algorithmes statiques/3D : 3DVAR, BLUE
2110 et dérivés, Kalman et dérivés, LeastSquares, SamplingTest, PSO, SA, Tabu,
2111 DFO, QuantileRegression
2117 for k in ["CostFunctionJ",
2123 "SimulatedObservationAtCurrentOptimum",
2124 "SimulatedObservationAtCurrentState",
2128 if hasattr(_SSV[k],"store"):
2129 _SSV[k].append = _SSV[k].store # Pour utiliser "append" au lieu de "store"
2131 _X = numpy.asmatrix(numpy.ravel( _x )).T
2132 if _SIV or "CurrentState" in _SSC or "CurrentOptimum" in _SSC:
2133 _SSV["CurrentState"].append( _X )
2135 if _HmX is not None:
2139 raise ValueError("COSTFUNCTION3D Operator has to be defined.")
2143 _HX = _Hm( _X, *_arg )
2144 _HX = numpy.asmatrix(numpy.ravel( _HX )).T
2146 if "SimulatedObservationAtCurrentState" in _SSC or \
2147 "SimulatedObservationAtCurrentOptimum" in _SSC:
2148 _SSV["SimulatedObservationAtCurrentState"].append( _HX )
2150 if numpy.any(numpy.isnan(_HX)):
2151 Jb, Jo, J = numpy.nan, numpy.nan, numpy.nan
2153 _Y = numpy.asmatrix(numpy.ravel( _Y )).T
2154 if _QM in ["AugmentedWeightedLeastSquares", "AWLS", "AugmentedPonderatedLeastSquares", "APLS", "DA"]:
2155 if _BI is None or _RI is None:
2156 raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
2157 _Xb = numpy.asmatrix(numpy.ravel( _Xb )).T
2158 Jb = 0.5 * (_X - _Xb).T * _BI * (_X - _Xb)
2159 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2160 elif _QM in ["WeightedLeastSquares", "WLS", "PonderatedLeastSquares", "PLS"]:
2162 raise ValueError("Observation error covariance matrix has to be properly defined!")
2164 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2165 elif _QM in ["LeastSquares", "LS", "L2"]:
2167 Jo = 0.5 * (_Y - _HX).T * (_Y - _HX)
2168 elif _QM in ["AbsoluteValue", "L1"]:
2170 Jo = numpy.sum( numpy.abs(_Y - _HX) )
2171 elif _QM in ["MaximumError", "ME"]:
2173 Jo = numpy.max( numpy.abs(_Y - _HX) )
2174 elif _QM in ["QR", "Null"]:
2178 raise ValueError("Unknown asked quality measure!")
2180 J = float( Jb ) + float( Jo )
2183 _SSV["CostFunctionJb"].append( Jb )
2184 _SSV["CostFunctionJo"].append( Jo )
2185 _SSV["CostFunctionJ" ].append( J )
2187 if "IndexOfOptimum" in _SSC or \
2188 "CurrentOptimum" in _SSC or \
2189 "SimulatedObservationAtCurrentOptimum" in _SSC:
2190 IndexMin = numpy.argmin( _SSV["CostFunctionJ"][_nPS:] ) + _nPS
2191 if "IndexOfOptimum" in _SSC:
2192 _SSV["IndexOfOptimum"].append( IndexMin )
2193 if "CurrentOptimum" in _SSC:
2194 _SSV["CurrentOptimum"].append( _SSV["CurrentState"][IndexMin] )
2195 if "SimulatedObservationAtCurrentOptimum" in _SSC:
2196 _SSV["SimulatedObservationAtCurrentOptimum"].append( _SSV["SimulatedObservationAtCurrentState"][IndexMin] )
2201 if _QM in ["QR"]: # Pour le QuantileRegression
2206 # ==============================================================================
2207 if __name__ == "__main__":
2208 print('\n AUTODIAGNOSTIC \n')