1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2018 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 Définit les outils généraux élémentaires.
26 Ce module est destiné à être appelée par AssimilationStudy.
28 __author__ = "Jean-Philippe ARGAUD"
36 from daCore import Persistence
37 from daCore import PlatformInfo
38 from daCore import Templates
40 # ==============================================================================
41 class CacheManager(object):
43 Classe générale de gestion d'un cache de calculs
46 toleranceInRedundancy = 1.e-18,
47 lenghtOfRedundancy = -1,
50 Les caractéristiques de tolérance peuvent être modifées à la création.
52 self.__tolerBP = float(toleranceInRedundancy)
53 self.__lenghtOR = int(lenghtOfRedundancy)
54 self.__initlnOR = self.__lenghtOR
59 self.__listOPCV = [] # Operator Previous Calculated Points, Results, Point Norms
60 # logging.debug("CM Tolerance de determination des doublons : %.2e", self.__tolerBP)
62 def wasCalculatedIn(self, xValue ): #, info="" ):
63 "Vérifie l'existence d'un calcul correspondant à la valeur"
66 for i in range(min(len(self.__listOPCV),self.__lenghtOR)-1,-1,-1):
67 if xValue.size != self.__listOPCV[i][0].size:
68 # logging.debug("CM Différence de la taille %s de X et de celle %s du point %i déjà calculé", xValue.shape,i,self.__listOPCP[i].shape)
70 if numpy.linalg.norm(numpy.ravel(xValue) - self.__listOPCV[i][0]) < self.__tolerBP * self.__listOPCV[i][2]:
72 __HxV = self.__listOPCV[i][1]
73 # logging.debug("CM Cas%s déja calculé, portant le numéro %i", info, i)
77 def storeValueInX(self, xValue, HxValue ):
78 "Stocke un calcul correspondant à la valeur"
79 if self.__lenghtOR < 0:
80 self.__lenghtOR = 2 * xValue.size + 2
81 self.__initlnOR = self.__lenghtOR
82 while len(self.__listOPCV) > self.__lenghtOR:
83 # logging.debug("CM Réduction de la liste des cas à %i éléments par suppression du premier", self.__lenghtOR)
84 self.__listOPCV.pop(0)
85 self.__listOPCV.append( (
86 copy.copy(numpy.ravel(xValue)),
88 numpy.linalg.norm(xValue),
93 self.__initlnOR = self.__lenghtOR
98 self.__lenghtOR = self.__initlnOR
100 # ==============================================================================
101 class Operator(object):
103 Classe générale d'interface de type opérateur simple
110 def __init__(self, fromMethod=None, fromMatrix=None, avoidingRedundancy = True):
112 On construit un objet de ce type en fournissant à l'aide de l'un des
113 deux mots-clé, soit une fonction python, soit une matrice.
115 - fromMethod : argument de type fonction Python
116 - fromMatrix : argument adapté au constructeur numpy.matrix
117 - avoidingRedundancy : évite ou pas les calculs redondants
119 self.__NbCallsAsMatrix, self.__NbCallsAsMethod, self.__NbCallsOfCached = 0, 0, 0
120 self.__AvoidRC = bool( avoidingRedundancy )
121 if fromMethod is not None:
122 self.__Method = fromMethod # logtimer(fromMethod)
124 self.__Type = "Method"
125 elif fromMatrix is not None:
127 self.__Matrix = numpy.matrix( fromMatrix, numpy.float )
128 self.__Type = "Matrix"
134 def disableAvoidingRedundancy(self):
136 Operator.CM.disable()
138 def enableAvoidingRedundancy(self):
143 Operator.CM.disable()
149 def appliedTo(self, xValue, HValue = None):
151 Permet de restituer le résultat de l'application de l'opérateur à un
152 argument xValue. Cette méthode se contente d'appliquer, son argument
153 devant a priori être du bon type.
155 - xValue : argument adapté pour appliquer l'opérateur
157 if HValue is not None:
158 HxValue = numpy.asmatrix( numpy.ravel( HValue ) ).T
160 Operator.CM.storeValueInX(xValue,HxValue)
163 __alreadyCalculated, __HxV = Operator.CM.wasCalculatedIn(xValue)
165 __alreadyCalculated = False
167 if __alreadyCalculated:
168 self.__addOneCacheCall()
171 if self.__Matrix is not None:
172 self.__addOneMatrixCall()
173 HxValue = self.__Matrix * xValue
175 self.__addOneMethodCall()
176 HxValue = self.__Method( xValue )
178 Operator.CM.storeValueInX(xValue,HxValue)
182 def appliedControledFormTo(self, paire ):
184 Permet de restituer le résultat de l'application de l'opérateur à une
185 paire (xValue, uValue). Cette méthode se contente d'appliquer, son
186 argument devant a priori être du bon type. Si la uValue est None,
187 on suppose que l'opérateur ne s'applique qu'à xValue.
189 - xValue : argument X adapté pour appliquer l'opérateur
190 - uValue : argument U adapté pour appliquer l'opérateur
192 assert len(paire) == 2, "Incorrect number of arguments"
193 xValue, uValue = paire
194 if self.__Matrix is not None:
195 self.__addOneMatrixCall()
196 return self.__Matrix * xValue
197 elif uValue is not None:
198 self.__addOneMethodCall()
199 return self.__Method( (xValue, uValue) )
201 self.__addOneMethodCall()
202 return self.__Method( xValue )
204 def appliedInXTo(self, paire ):
206 Permet de restituer le résultat de l'application de l'opérateur à un
207 argument xValue, sachant que l'opérateur est valable en xNominal.
208 Cette méthode se contente d'appliquer, son argument devant a priori
209 être du bon type. Si l'opérateur est linéaire car c'est une matrice,
210 alors il est valable en tout point nominal et il n'est pas nécessaire
212 Arguments : une liste contenant
213 - xNominal : argument permettant de donner le point où l'opérateur
214 est construit pour etre ensuite appliqué
215 - xValue : argument adapté pour appliquer l'opérateur
217 assert len(paire) == 2, "Incorrect number of arguments"
218 xNominal, xValue = paire
219 if self.__Matrix is not None:
220 self.__addOneMatrixCall()
221 return self.__Matrix * xValue
223 self.__addOneMethodCall()
224 return self.__Method( (xNominal, xValue) )
226 def asMatrix(self, ValueForMethodForm = "UnknownVoidValue"):
228 Permet de renvoyer l'opérateur sous la forme d'une matrice
230 if self.__Matrix is not None:
231 self.__addOneMatrixCall()
233 elif ValueForMethodForm is not "UnknownVoidValue": # Ne pas utiliser "None"
234 self.__addOneMethodCall()
235 return numpy.matrix( self.__Method( (ValueForMethodForm, None) ) )
237 raise ValueError("Matrix form of the operator defined as a function/method requires to give an operating point.")
241 Renvoie la taille sous forme numpy si l'opérateur est disponible sous
242 la forme d'une matrice
244 if self.__Matrix is not None:
245 return self.__Matrix.shape
247 raise ValueError("Matrix form of the operator is not available, nor the shape")
249 def nbcalls(self, which=None):
251 Renvoie les nombres d'évaluations de l'opérateur
254 self.__NbCallsAsMatrix+self.__NbCallsAsMethod,
255 self.__NbCallsAsMatrix,
256 self.__NbCallsAsMethod,
257 self.__NbCallsOfCached,
258 Operator.NbCallsAsMatrix+Operator.NbCallsAsMethod,
259 Operator.NbCallsAsMatrix,
260 Operator.NbCallsAsMethod,
261 Operator.NbCallsOfCached,
263 if which is None: return __nbcalls
264 else: return __nbcalls[which]
266 def __addOneMatrixCall(self):
267 "Comptabilise un appel"
268 self.__NbCallsAsMatrix += 1 # Decompte local
269 Operator.NbCallsAsMatrix += 1 # Decompte global
271 def __addOneMethodCall(self):
272 "Comptabilise un appel"
273 self.__NbCallsAsMethod += 1 # Decompte local
274 Operator.NbCallsAsMethod += 1 # Decompte global
276 def __addOneCacheCall(self):
277 "Comptabilise un appel"
278 self.__NbCallsOfCached += 1 # Decompte local
279 Operator.NbCallsOfCached += 1 # Decompte global
281 # ==============================================================================
282 class FullOperator(object):
284 Classe générale d'interface de type opérateur complet
285 (Direct, Linéaire Tangent, Adjoint)
288 name = "GenericFullOperator",
290 asOneFunction = None, # Fonction
291 asThreeFunctions = None, # Fonctions dictionary
292 asScript = None, # Fonction(s) script
293 asDict = None, # Parameters
300 self.__name = str(name)
301 self.__check = bool(toBeChecked)
306 if (asDict is not None) and isinstance(asDict, dict):
307 __Parameters.update( asDict )
308 if "DifferentialIncrement" in asDict:
309 __Parameters["withIncrement"] = asDict["DifferentialIncrement"]
310 if "CenteredFiniteDifference" in asDict:
311 __Parameters["withCenteredDF"] = asDict["CenteredFiniteDifference"]
312 if "EnableMultiProcessing" in asDict:
313 __Parameters["withmpEnabled"] = asDict["EnableMultiProcessing"]
314 if "NumberOfProcesses" in asDict:
315 __Parameters["withmpWorkers"] = asDict["NumberOfProcesses"]
317 if asScript is not None:
318 __Matrix, __Function = None, None
320 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
322 __Function = { "Direct":ImportFromScript(asScript).getvalue( "DirectOperator" ) }
323 __Function.update({"useApproximatedDerivatives":True})
324 __Function.update(__Parameters)
325 elif asThreeFunctions:
327 "Direct" :ImportFromScript(asScript).getvalue( "DirectOperator" ),
328 "Tangent":ImportFromScript(asScript).getvalue( "TangentOperator" ),
329 "Adjoint":ImportFromScript(asScript).getvalue( "AdjointOperator" ),
331 __Function.update(__Parameters)
334 if asOneFunction is not None:
335 if isinstance(asOneFunction, dict) and "Direct" in asOneFunction:
336 if asOneFunction["Direct"] is not None:
337 __Function = asOneFunction
339 raise ValueError("The function has to be given in a dictionnary which have 1 key (\"Direct\")")
341 __Function = { "Direct":asOneFunction }
342 __Function.update({"useApproximatedDerivatives":True})
343 __Function.update(__Parameters)
344 elif asThreeFunctions is not None:
345 if isinstance(asThreeFunctions, dict) and \
346 ("Tangent" in asThreeFunctions) and (asThreeFunctions["Tangent"] is not None) and \
347 ("Adjoint" in asThreeFunctions) and (asThreeFunctions["Adjoint"] is not None) and \
348 (("useApproximatedDerivatives" not in asThreeFunctions) or not bool(asThreeFunctions["useApproximatedDerivatives"])):
349 __Function = asThreeFunctions
350 elif isinstance(asThreeFunctions, dict) and \
351 ("Direct" in asThreeFunctions) and (asThreeFunctions["Direct"] is not None):
352 __Function = asThreeFunctions
353 __Function.update({"useApproximatedDerivatives":True})
355 raise ValueError("The functions has to be given in a dictionnary which have either 1 key (\"Direct\") or 3 keys (\"Direct\" (optionnal), \"Tangent\" and \"Adjoint\")")
356 if "Direct" not in asThreeFunctions:
357 __Function["Direct"] = asThreeFunctions["Tangent"]
358 __Function.update(__Parameters)
362 # if sys.version_info[0] < 3 and isinstance(__Function, dict):
363 # for k in ("Direct", "Tangent", "Adjoint"):
364 # if k in __Function and hasattr(__Function[k],"__class__"):
365 # if type(__Function[k]) is type(self.__init__):
366 # raise TypeError("can't use a class method (%s) as a function for the \"%s\" operator. Use a real function instead."%(type(__Function[k]),k))
368 if appliedInX is not None and isinstance(appliedInX, dict):
369 __appliedInX = appliedInX
370 elif appliedInX is not None:
371 __appliedInX = {"HXb":appliedInX}
375 if scheduledBy is not None:
376 self.__T = scheduledBy
378 if isinstance(__Function, dict) and \
379 ("useApproximatedDerivatives" in __Function) and bool(__Function["useApproximatedDerivatives"]) and \
380 ("Direct" in __Function) and (__Function["Direct"] is not None):
381 if "withCenteredDF" not in __Function: __Function["withCenteredDF"] = False
382 if "withIncrement" not in __Function: __Function["withIncrement"] = 0.01
383 if "withdX" not in __Function: __Function["withdX"] = None
384 if "withAvoidingRedundancy" not in __Function: __Function["withAvoidingRedundancy"] = True
385 if "withToleranceInRedundancy" not in __Function: __Function["withToleranceInRedundancy"] = 1.e-18
386 if "withLenghtOfRedundancy" not in __Function: __Function["withLenghtOfRedundancy"] = -1
387 if "withmpEnabled" not in __Function: __Function["withmpEnabled"] = False
388 if "withmpWorkers" not in __Function: __Function["withmpWorkers"] = None
389 from daNumerics.ApproximatedDerivatives import FDApproximation
390 FDA = FDApproximation(
391 Function = __Function["Direct"],
392 centeredDF = __Function["withCenteredDF"],
393 increment = __Function["withIncrement"],
394 dX = __Function["withdX"],
395 avoidingRedundancy = __Function["withAvoidingRedundancy"],
396 toleranceInRedundancy = __Function["withToleranceInRedundancy"],
397 lenghtOfRedundancy = __Function["withLenghtOfRedundancy"],
398 mpEnabled = __Function["withmpEnabled"],
399 mpWorkers = __Function["withmpWorkers"],
401 self.__FO["Direct"] = Operator( fromMethod = FDA.DirectOperator, avoidingRedundancy = avoidRC )
402 self.__FO["Tangent"] = Operator( fromMethod = FDA.TangentOperator, avoidingRedundancy = avoidRC )
403 self.__FO["Adjoint"] = Operator( fromMethod = FDA.AdjointOperator, avoidingRedundancy = avoidRC )
404 elif isinstance(__Function, dict) and \
405 ("Direct" in __Function) and ("Tangent" in __Function) and ("Adjoint" in __Function) and \
406 (__Function["Direct"] is not None) and (__Function["Tangent"] is not None) and (__Function["Adjoint"] is not None):
407 self.__FO["Direct"] = Operator( fromMethod = __Function["Direct"], avoidingRedundancy = avoidRC )
408 self.__FO["Tangent"] = Operator( fromMethod = __Function["Tangent"], avoidingRedundancy = avoidRC )
409 self.__FO["Adjoint"] = Operator( fromMethod = __Function["Adjoint"], avoidingRedundancy = avoidRC )
410 elif asMatrix is not None:
411 __matrice = numpy.matrix( __Matrix, numpy.float )
412 self.__FO["Direct"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
413 self.__FO["Tangent"] = Operator( fromMatrix = __matrice, avoidingRedundancy = avoidRC )
414 self.__FO["Adjoint"] = Operator( fromMatrix = __matrice.T, avoidingRedundancy = avoidRC )
417 raise ValueError("Improperly defined observation operator, it requires at minima either a matrix, a Direct for approximate derivatives or a Tangent/Adjoint pair.")
419 if __appliedInX is not None:
420 self.__FO["AppliedInX"] = {}
421 if not isinstance(__appliedInX, dict):
422 raise ValueError("Error: observation operator defined by \"AppliedInX\" need a dictionary as argument.")
423 for key in list(__appliedInX.keys()):
424 if type( __appliedInX[key] ) is type( numpy.matrix([]) ):
425 # Pour le cas où l'on a une vraie matrice
426 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].A1, numpy.float ).T
427 elif type( __appliedInX[key] ) is type( numpy.array([]) ) and len(__appliedInX[key].shape) > 1:
428 # Pour le cas où l'on a un vecteur représenté en array avec 2 dimensions
429 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].reshape(len(__appliedInX[key]),), numpy.float ).T
431 self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key], numpy.float ).T
433 self.__FO["AppliedInX"] = None
439 "x.__repr__() <==> repr(x)"
440 return repr(self.__V)
443 "x.__str__() <==> str(x)"
446 # ==============================================================================
447 class Algorithm(object):
449 Classe générale d'interface de type algorithme
451 Elle donne un cadre pour l'écriture d'une classe élémentaire d'algorithme
452 d'assimilation, en fournissant un container (dictionnaire) de variables
453 persistantes initialisées, et des méthodes d'accès à ces variables stockées.
455 Une classe élémentaire d'algorithme doit implémenter la méthode "run".
457 def __init__(self, name):
459 L'initialisation présente permet de fabriquer des variables de stockage
460 disponibles de manière générique dans les algorithmes élémentaires. Ces
461 variables de stockage sont ensuite conservées dans un dictionnaire
462 interne à l'objet, mais auquel on accède par la méthode "get".
464 Les variables prévues sont :
465 - CostFunctionJ : fonction-cout globale, somme des deux parties suivantes
466 - CostFunctionJb : partie ébauche ou background de la fonction-cout
467 - CostFunctionJo : partie observations de la fonction-cout
468 - GradientOfCostFunctionJ : gradient de la fonction-cout globale
469 - GradientOfCostFunctionJb : gradient de la partie ébauche de la fonction-cout
470 - GradientOfCostFunctionJo : gradient de la partie observations de la fonction-cout
471 - CurrentState : état courant lors d'itérations
472 - Analysis : l'analyse Xa
473 - SimulatedObservationAtBackground : l'état observé H(Xb) à l'ébauche
474 - SimulatedObservationAtCurrentState : l'état observé H(X) à l'état courant
475 - SimulatedObservationAtOptimum : l'état observé H(Xa) à l'optimum
476 - Innovation : l'innovation : d = Y - H(X)
477 - InnovationAtCurrentState : l'innovation à l'état courant : dn = Y - H(Xn)
478 - SigmaObs2 : indicateur de correction optimale des erreurs d'observation
479 - SigmaBck2 : indicateur de correction optimale des erreurs d'ébauche
480 - MahalanobisConsistency : indicateur de consistance des covariances
481 - OMA : Observation moins Analysis : Y - Xa
482 - OMB : Observation moins Background : Y - Xb
483 - AMB : Analysis moins Background : Xa - Xb
484 - APosterioriCovariance : matrice A
485 - APosterioriVariances : variances de la matrice A
486 - APosterioriStandardDeviations : écart-types de la matrice A
487 - APosterioriCorrelations : correlations de la matrice A
488 - Residu : dans le cas des algorithmes de vérification
489 On peut rajouter des variables à stocker dans l'initialisation de
490 l'algorithme élémentaire qui va hériter de cette classe
492 logging.debug("%s Initialisation", str(name))
493 self._m = PlatformInfo.SystemUsage()
495 self._name = str( name )
496 self._parameters = {"StoreSupplementaryCalculations":[]}
497 self.__required_parameters = {}
498 self.__required_inputs = {"RequiredInputValues":{"mandatory":(), "optional":()}}
500 self.StoredVariables = {}
501 self.StoredVariables["CostFunctionJ"] = Persistence.OneScalar(name = "CostFunctionJ")
502 self.StoredVariables["CostFunctionJb"] = Persistence.OneScalar(name = "CostFunctionJb")
503 self.StoredVariables["CostFunctionJo"] = Persistence.OneScalar(name = "CostFunctionJo")
504 self.StoredVariables["CostFunctionJAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJAtCurrentOptimum")
505 self.StoredVariables["CostFunctionJbAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJbAtCurrentOptimum")
506 self.StoredVariables["CostFunctionJoAtCurrentOptimum"] = Persistence.OneScalar(name = "CostFunctionJoAtCurrentOptimum")
507 self.StoredVariables["GradientOfCostFunctionJ"] = Persistence.OneVector(name = "GradientOfCostFunctionJ")
508 self.StoredVariables["GradientOfCostFunctionJb"] = Persistence.OneVector(name = "GradientOfCostFunctionJb")
509 self.StoredVariables["GradientOfCostFunctionJo"] = Persistence.OneVector(name = "GradientOfCostFunctionJo")
510 self.StoredVariables["CurrentState"] = Persistence.OneVector(name = "CurrentState")
511 self.StoredVariables["Analysis"] = Persistence.OneVector(name = "Analysis")
512 self.StoredVariables["IndexOfOptimum"] = Persistence.OneIndex(name = "IndexOfOptimum")
513 self.StoredVariables["CurrentOptimum"] = Persistence.OneVector(name = "CurrentOptimum")
514 self.StoredVariables["SimulatedObservationAtBackground"] = Persistence.OneVector(name = "SimulatedObservationAtBackground")
515 self.StoredVariables["SimulatedObservationAtCurrentState"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentState")
516 self.StoredVariables["SimulatedObservationAtOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtOptimum")
517 self.StoredVariables["SimulatedObservationAtCurrentOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentOptimum")
518 self.StoredVariables["Innovation"] = Persistence.OneVector(name = "Innovation")
519 self.StoredVariables["InnovationAtCurrentState"] = Persistence.OneVector(name = "InnovationAtCurrentState")
520 self.StoredVariables["SigmaObs2"] = Persistence.OneScalar(name = "SigmaObs2")
521 self.StoredVariables["SigmaBck2"] = Persistence.OneScalar(name = "SigmaBck2")
522 self.StoredVariables["MahalanobisConsistency"] = Persistence.OneScalar(name = "MahalanobisConsistency")
523 self.StoredVariables["OMA"] = Persistence.OneVector(name = "OMA")
524 self.StoredVariables["OMB"] = Persistence.OneVector(name = "OMB")
525 self.StoredVariables["BMA"] = Persistence.OneVector(name = "BMA")
526 self.StoredVariables["APosterioriCovariance"] = Persistence.OneMatrix(name = "APosterioriCovariance")
527 self.StoredVariables["APosterioriVariances"] = Persistence.OneVector(name = "APosterioriVariances")
528 self.StoredVariables["APosterioriStandardDeviations"] = Persistence.OneVector(name = "APosterioriStandardDeviations")
529 self.StoredVariables["APosterioriCorrelations"] = Persistence.OneMatrix(name = "APosterioriCorrelations")
530 self.StoredVariables["SimulationQuantiles"] = Persistence.OneMatrix(name = "SimulationQuantiles")
531 self.StoredVariables["Residu"] = Persistence.OneScalar(name = "Residu")
533 def _pre_run(self, Parameters, Xb=None, Y=None, R=None, B=None, Q=None ):
535 logging.debug("%s Lancement", self._name)
536 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
538 # Mise a jour de self._parameters avec Parameters
539 self.__setParameters(Parameters)
541 # Corrections et complements
542 def __test_vvalue( argument, variable, argname):
544 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
545 raise ValueError("%s %s vector %s has to be properly defined!"%(self._name,argname,variable))
546 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
547 logging.debug("%s %s vector %s is not set, but is optional."%(self._name,argname,variable))
549 logging.debug("%s %s vector %s is not set, but is not required."%(self._name,argname,variable))
551 logging.debug("%s %s vector %s is set, and its size is %i."%(self._name,argname,variable,numpy.array(argument).size))
552 __test_vvalue( Xb, "Xb", "Background or initial state" )
553 __test_vvalue( Y, "Y", "Observation" )
554 def __test_cvalue( argument, variable, argname):
556 if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]:
557 raise ValueError("%s %s error covariance matrix %s has to be properly defined!"%(self._name,argname,variable))
558 elif variable in self.__required_inputs["RequiredInputValues"]["optional"]:
559 logging.debug("%s %s error covariance matrix %s is not set, but is optional."%(self._name,argname,variable))
561 logging.debug("%s %s error covariance matrix %s is not set, but is not required."%(self._name,argname,variable))
563 logging.debug("%s %s error covariance matrix %s is set."%(self._name,argname,variable))
564 __test_cvalue( R, "R", "Observation" )
565 __test_cvalue( B, "B", "Background" )
566 __test_cvalue( Q, "Q", "Evolution" )
568 if ("Bounds" in self._parameters) and isinstance(self._parameters["Bounds"], (list, tuple)) and (len(self._parameters["Bounds"]) > 0):
569 logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
571 self._parameters["Bounds"] = None
573 if logging.getLogger().level < logging.WARNING:
574 self._parameters["optiprint"], self._parameters["optdisp"] = 1, 1
575 if PlatformInfo.has_scipy:
576 import scipy.optimize
577 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_ALL
579 self._parameters["optmessages"] = 15
581 self._parameters["optiprint"], self._parameters["optdisp"] = -1, 0
582 if PlatformInfo.has_scipy:
583 import scipy.optimize
584 self._parameters["optmessages"] = scipy.optimize.tnc.MSG_NONE
586 self._parameters["optmessages"] = 15
590 def _post_run(self,_oH=None):
592 if ("StoreSupplementaryCalculations" in self._parameters) and \
593 "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]:
594 for _A in self.StoredVariables["APosterioriCovariance"]:
595 if "APosterioriVariances" in self._parameters["StoreSupplementaryCalculations"]:
596 self.StoredVariables["APosterioriVariances"].store( numpy.diag(_A) )
597 if "APosterioriStandardDeviations" in self._parameters["StoreSupplementaryCalculations"]:
598 self.StoredVariables["APosterioriStandardDeviations"].store( numpy.sqrt(numpy.diag(_A)) )
599 if "APosterioriCorrelations" in self._parameters["StoreSupplementaryCalculations"]:
600 _EI = numpy.diag(1./numpy.sqrt(numpy.diag(_A)))
601 _C = numpy.dot(_EI, numpy.dot(_A, _EI))
602 self.StoredVariables["APosterioriCorrelations"].store( _C )
604 logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint.: %i/%i/%i", self._name, _oH["Direct"].nbcalls(0),_oH["Tangent"].nbcalls(0),_oH["Adjoint"].nbcalls(0))
605 logging.debug("%s Nombre d'appels au cache d'opérateur d'observation direct/tangent/adjoint..: %i/%i/%i", self._name, _oH["Direct"].nbcalls(3),_oH["Tangent"].nbcalls(3),_oH["Adjoint"].nbcalls(3))
606 logging.debug("%s Taille mémoire utilisée de %.0f Mio", self._name, self._m.getUsedMemory("Mio"))
607 logging.debug("%s Terminé", self._name)
610 def get(self, key=None):
612 Renvoie l'une des variables stockées identifiée par la clé, ou le
613 dictionnaire de l'ensemble des variables disponibles en l'absence de
614 clé. Ce sont directement les variables sous forme objet qui sont
615 renvoyées, donc les méthodes d'accès à l'objet individuel sont celles
616 des classes de persistance.
619 return self.StoredVariables[key]
621 return self.StoredVariables
623 def __contains__(self, key=None):
624 "D.__contains__(k) -> True if D has a key k, else False"
625 return key in self.StoredVariables
628 "D.keys() -> list of D's keys"
629 if hasattr(self, "StoredVariables"):
630 return self.StoredVariables.keys()
635 "D.pop(k[,d]) -> v, remove specified key and return the corresponding value"
636 if hasattr(self, "StoredVariables"):
637 return self.StoredVariables.pop(k, d)
642 raise TypeError("pop expected at least 1 arguments, got 0")
643 "If key is not found, d is returned if given, otherwise KeyError is raised"
649 def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
651 Doit implémenter l'opération élémentaire de calcul d'assimilation sous
652 sa forme mathématique la plus naturelle possible.
654 raise NotImplementedError("Mathematical assimilation calculation has not been implemented!")
656 def defineRequiredParameter(self, name = None, default = None, typecast = None, message = None, minval = None, maxval = None, listval = None):
658 Permet de définir dans l'algorithme des paramètres requis et leurs
659 caractéristiques par défaut.
662 raise ValueError("A name is mandatory to define a required parameter.")
664 self.__required_parameters[name] = {
666 "typecast" : typecast,
672 logging.debug("%s %s (valeur par défaut = %s)", self._name, message, self.setParameterValue(name))
674 def getRequiredParameters(self, noDetails=True):
676 Renvoie la liste des noms de paramètres requis ou directement le
677 dictionnaire des paramètres requis.
680 return sorted(self.__required_parameters.keys())
682 return self.__required_parameters
684 def setParameterValue(self, name=None, value=None):
686 Renvoie la valeur d'un paramètre requis de manière contrôlée
688 default = self.__required_parameters[name]["default"]
689 typecast = self.__required_parameters[name]["typecast"]
690 minval = self.__required_parameters[name]["minval"]
691 maxval = self.__required_parameters[name]["maxval"]
692 listval = self.__required_parameters[name]["listval"]
694 if value is None and default is None:
696 elif value is None and default is not None:
697 if typecast is None: __val = default
698 else: __val = typecast( default )
700 if typecast is None: __val = value
701 else: __val = typecast( value )
703 if minval is not None and (numpy.array(__val, float) < minval).any():
704 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be less than %s."%(name, __val, minval))
705 if maxval is not None and (numpy.array(__val, float) > maxval).any():
706 raise ValueError("The parameter named \"%s\" of value \"%s\" can not be greater than %s."%(name, __val, maxval))
707 if listval is not None:
708 if typecast is list or typecast is tuple or isinstance(__val,list) or isinstance(__val,tuple):
711 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%(v, name, listval))
712 elif __val not in listval:
713 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%( __val, name,listval))
716 def requireInputArguments(self, mandatory=(), optional=()):
718 Permet d'imposer des arguments requises en entrée
720 self.__required_inputs["RequiredInputValues"]["mandatory"] = tuple( mandatory )
721 self.__required_inputs["RequiredInputValues"]["optional"] = tuple( optional )
723 def __setParameters(self, fromDico={}):
725 Permet de stocker les paramètres reçus dans le dictionnaire interne.
727 self._parameters.update( fromDico )
728 for k in self.__required_parameters.keys():
729 if k in fromDico.keys():
730 self._parameters[k] = self.setParameterValue(k,fromDico[k])
732 self._parameters[k] = self.setParameterValue(k)
733 logging.debug("%s %s : %s", self._name, self.__required_parameters[k]["message"], self._parameters[k])
735 # ==============================================================================
736 class AlgorithmAndParameters(object):
738 Classe générale d'interface d'action pour l'algorithme et ses paramètres
741 name = "GenericAlgorithm",
748 self.__name = str(name)
752 self.__algorithm = {}
753 self.__algorithmFile = None
754 self.__algorithmName = None
756 self.updateParameters( asDict, asScript )
758 if asAlgorithm is None and asScript is not None:
759 __Algo = ImportFromScript(asScript).getvalue( "Algorithm" )
763 if __Algo is not None:
764 self.__A = str(__Algo)
765 self.__P.update( {"Algorithm":self.__A} )
767 self.__setAlgorithm( self.__A )
769 def updateParameters(self,
773 "Mise a jour des parametres"
774 if asDict is None and asScript is not None:
775 __Dict = ImportFromScript(asScript).getvalue( self.__name, "Parameters" )
779 if __Dict is not None:
780 self.__P.update( dict(__Dict) )
782 def executePythonScheme(self, asDictAO = None):
783 "Permet de lancer le calcul d'assimilation"
784 Operator.CM.clearCache()
786 if not isinstance(asDictAO, dict):
787 raise ValueError("The objects for algorithm calculation have to be given together as a dictionnary, and they are not")
788 if hasattr(asDictAO["Background"],"getO"): self.__Xb = asDictAO["Background"].getO()
789 elif hasattr(asDictAO["CheckingPoint"],"getO"): self.__Xb = asDictAO["CheckingPoint"].getO()
790 else: self.__Xb = None
791 if hasattr(asDictAO["Observation"],"getO"): self.__Y = asDictAO["Observation"].getO()
792 else: self.__Y = asDictAO["Observation"]
793 if hasattr(asDictAO["ControlInput"],"getO"): self.__U = asDictAO["ControlInput"].getO()
794 else: self.__U = asDictAO["ControlInput"]
795 if hasattr(asDictAO["ObservationOperator"],"getO"): self.__HO = asDictAO["ObservationOperator"].getO()
796 else: self.__HO = asDictAO["ObservationOperator"]
797 if hasattr(asDictAO["EvolutionModel"],"getO"): self.__EM = asDictAO["EvolutionModel"].getO()
798 else: self.__EM = asDictAO["EvolutionModel"]
799 if hasattr(asDictAO["ControlModel"],"getO"): self.__CM = asDictAO["ControlModel"].getO()
800 else: self.__CM = asDictAO["ControlModel"]
801 self.__B = asDictAO["BackgroundError"]
802 self.__R = asDictAO["ObservationError"]
803 self.__Q = asDictAO["EvolutionError"]
805 self.__shape_validate()
807 self.__algorithm.run(
817 Parameters = self.__P,
821 def executeYACSScheme(self, FileName=None):
822 "Permet de lancer le calcul d'assimilation"
823 if FileName is None or not os.path.exists(FileName):
824 raise ValueError("a YACS file name has to be given for YACS execution.\n")
825 if not PlatformInfo.has_salome or \
826 not PlatformInfo.has_yacs or \
827 not PlatformInfo.has_adao:
828 raise ImportError("\n\n"+\
829 "Unable to get SALOME, YACS or ADAO environnement variables.\n"+\
830 "Please load the right environnement before trying to use it.\n")
835 SALOMERuntime.RuntimeSALOME_setRuntime()
837 r = pilot.getRuntime()
838 xmlLoader = loader.YACSLoader()
839 xmlLoader.registerProcCataLoader()
841 catalogAd = r.loadCatalog("proc", os.path.abspath(FileName))
842 r.addCatalog(catalogAd)
847 p = xmlLoader.load(os.path.abspath(FileName))
848 except IOError as ex:
849 print("The YACS XML schema file can not be loaded: %s"%(ex,))
851 logger = p.getLogger("parser")
852 if not logger.isEmpty():
853 print("The imported YACS XML schema has errors on parsing:")
854 print(logger.getStr())
857 print("The YACS XML schema is not valid and will not be executed:")
858 print(p.getErrorReport())
860 info=pilot.LinkInfo(pilot.LinkInfo.ALL_DONT_STOP)
861 p.checkConsistency(info)
862 if info.areWarningsOrErrors():
863 print("The YACS XML schema is not coherent and will not be executed:")
864 print(info.getGlobalRepr())
866 e = pilot.ExecutorSwig()
868 if p.getEffectiveState() != pilot.DONE:
869 print(p.getErrorReport())
873 def get(self, key = None):
874 "Vérifie l'existence d'une clé de variable ou de paramètres"
875 if key in self.__algorithm:
876 return self.__algorithm.get( key )
877 elif key in self.__P:
883 "Necessaire pour le pickling"
884 return self.__algorithm.pop(k, d)
886 def getAlgorithmRequiredParameters(self, noDetails=True):
887 "Renvoie la liste des paramètres requis selon l'algorithme"
888 return self.__algorithm.getRequiredParameters(noDetails)
890 def setObserver(self, __V, __O, __I, __S):
891 if self.__algorithm is None \
892 or isinstance(self.__algorithm, dict) \
893 or not hasattr(self.__algorithm,"StoredVariables"):
894 raise ValueError("No observer can be build before choosing an algorithm.")
895 if __V not in self.__algorithm:
896 raise ValueError("An observer requires to be set on a variable named %s which does not exist."%__V)
898 self.__algorithm.StoredVariables[ __V ].setDataObserver(
901 HookParameters = __I,
904 def removeObserver(self, __V, __O, __A = False):
905 if self.__algorithm is None \
906 or isinstance(self.__algorithm, dict) \
907 or not hasattr(self.__algorithm,"StoredVariables"):
908 raise ValueError("No observer can be removed before choosing an algorithm.")
909 if __V not in self.__algorithm:
910 raise ValueError("An observer requires to be removed on a variable named %s which does not exist."%__V)
912 return self.__algorithm.StoredVariables[ __V ].removeDataObserver(
917 def hasObserver(self, __V):
918 if self.__algorithm is None \
919 or isinstance(self.__algorithm, dict) \
920 or not hasattr(self.__algorithm,"StoredVariables"):
922 if __V not in self.__algorithm:
924 return self.__algorithm.StoredVariables[ __V ].hasDataObserver()
927 return list(self.__algorithm.keys()) + list(self.__P.keys())
929 def __contains__(self, key=None):
930 "D.__contains__(k) -> True if D has a key k, else False"
931 return key in self.__algorithm or key in self.__P
934 "x.__repr__() <==> repr(x)"
935 return repr(self.__A)+", "+repr(self.__P)
938 "x.__str__() <==> str(x)"
939 return str(self.__A)+", "+str(self.__P)
941 def __setAlgorithm(self, choice = None ):
943 Permet de sélectionner l'algorithme à utiliser pour mener à bien l'étude
944 d'assimilation. L'argument est un champ caractère se rapportant au nom
945 d'un algorithme réalisant l'opération sur les arguments fixes.
948 raise ValueError("Error: algorithm choice has to be given")
949 if self.__algorithmName is not None:
950 raise ValueError("Error: algorithm choice has already been done as \"%s\", it can't be changed."%self.__algorithmName)
951 daDirectory = "daAlgorithms"
953 # Recherche explicitement le fichier complet
954 # ------------------------------------------
956 for directory in sys.path:
957 if os.path.isfile(os.path.join(directory, daDirectory, str(choice)+'.py')):
958 module_path = os.path.abspath(os.path.join(directory, daDirectory))
959 if module_path is None:
960 raise ImportError("No algorithm module named \"%s\" has been found in the search path.\n The search path is %s"%(choice, sys.path))
962 # Importe le fichier complet comme un module
963 # ------------------------------------------
965 sys_path_tmp = sys.path ; sys.path.insert(0,module_path)
966 self.__algorithmFile = __import__(str(choice), globals(), locals(), [])
967 if not hasattr(self.__algorithmFile, "ElementaryAlgorithm"):
968 raise ImportError("this module does not define a valid elementary algorithm.")
969 self.__algorithmName = str(choice)
970 sys.path = sys_path_tmp ; del sys_path_tmp
971 except ImportError as e:
972 raise ImportError("The module named \"%s\" was found, but is incorrect at the import stage.\n The import error message is: %s"%(choice,e))
974 # Instancie un objet du type élémentaire du fichier
975 # -------------------------------------------------
976 self.__algorithm = self.__algorithmFile.ElementaryAlgorithm()
979 def __shape_validate(self):
981 Validation de la correspondance correcte des tailles des variables et
982 des matrices s'il y en a.
984 if self.__Xb is None: __Xb_shape = (0,)
985 elif hasattr(self.__Xb,"size"): __Xb_shape = (self.__Xb.size,)
986 elif hasattr(self.__Xb,"shape"):
987 if isinstance(self.__Xb.shape, tuple): __Xb_shape = self.__Xb.shape
988 else: __Xb_shape = self.__Xb.shape()
989 else: raise TypeError("The background (Xb) has no attribute of shape: problem !")
991 if self.__Y is None: __Y_shape = (0,)
992 elif hasattr(self.__Y,"size"): __Y_shape = (self.__Y.size,)
993 elif hasattr(self.__Y,"shape"):
994 if isinstance(self.__Y.shape, tuple): __Y_shape = self.__Y.shape
995 else: __Y_shape = self.__Y.shape()
996 else: raise TypeError("The observation (Y) has no attribute of shape: problem !")
998 if self.__U is None: __U_shape = (0,)
999 elif hasattr(self.__U,"size"): __U_shape = (self.__U.size,)
1000 elif hasattr(self.__U,"shape"):
1001 if isinstance(self.__U.shape, tuple): __U_shape = self.__U.shape
1002 else: __U_shape = self.__U.shape()
1003 else: raise TypeError("The control (U) has no attribute of shape: problem !")
1005 if self.__B is None: __B_shape = (0,0)
1006 elif hasattr(self.__B,"shape"):
1007 if isinstance(self.__B.shape, tuple): __B_shape = self.__B.shape
1008 else: __B_shape = self.__B.shape()
1009 else: raise TypeError("The a priori errors covariance matrix (B) has no attribute of shape: problem !")
1011 if self.__R is None: __R_shape = (0,0)
1012 elif hasattr(self.__R,"shape"):
1013 if isinstance(self.__R.shape, tuple): __R_shape = self.__R.shape
1014 else: __R_shape = self.__R.shape()
1015 else: raise TypeError("The observation errors covariance matrix (R) has no attribute of shape: problem !")
1017 if self.__Q is None: __Q_shape = (0,0)
1018 elif hasattr(self.__Q,"shape"):
1019 if isinstance(self.__Q.shape, tuple): __Q_shape = self.__Q.shape
1020 else: __Q_shape = self.__Q.shape()
1021 else: raise TypeError("The evolution errors covariance matrix (Q) has no attribute of shape: problem !")
1023 if len(self.__HO) == 0: __HO_shape = (0,0)
1024 elif isinstance(self.__HO, dict): __HO_shape = (0,0)
1025 elif hasattr(self.__HO["Direct"],"shape"):
1026 if isinstance(self.__HO["Direct"].shape, tuple): __HO_shape = self.__HO["Direct"].shape
1027 else: __HO_shape = self.__HO["Direct"].shape()
1028 else: raise TypeError("The observation operator (H) has no attribute of shape: problem !")
1030 if len(self.__EM) == 0: __EM_shape = (0,0)
1031 elif isinstance(self.__EM, dict): __EM_shape = (0,0)
1032 elif hasattr(self.__EM["Direct"],"shape"):
1033 if isinstance(self.__EM["Direct"].shape, tuple): __EM_shape = self.__EM["Direct"].shape
1034 else: __EM_shape = self.__EM["Direct"].shape()
1035 else: raise TypeError("The evolution model (EM) has no attribute of shape: problem !")
1037 if len(self.__CM) == 0: __CM_shape = (0,0)
1038 elif isinstance(self.__CM, dict): __CM_shape = (0,0)
1039 elif hasattr(self.__CM["Direct"],"shape"):
1040 if isinstance(self.__CM["Direct"].shape, tuple): __CM_shape = self.__CM["Direct"].shape
1041 else: __CM_shape = self.__CM["Direct"].shape()
1042 else: raise TypeError("The control model (CM) has no attribute of shape: problem !")
1044 # Vérification des conditions
1045 # ---------------------------
1046 if not( len(__Xb_shape) == 1 or min(__Xb_shape) == 1 ):
1047 raise ValueError("Shape characteristic of background (Xb) is incorrect: \"%s\"."%(__Xb_shape,))
1048 if not( len(__Y_shape) == 1 or min(__Y_shape) == 1 ):
1049 raise ValueError("Shape characteristic of observation (Y) is incorrect: \"%s\"."%(__Y_shape,))
1051 if not( min(__B_shape) == max(__B_shape) ):
1052 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) is incorrect: \"%s\"."%(__B_shape,))
1053 if not( min(__R_shape) == max(__R_shape) ):
1054 raise ValueError("Shape characteristic of observation errors covariance matrix (R) is incorrect: \"%s\"."%(__R_shape,))
1055 if not( min(__Q_shape) == max(__Q_shape) ):
1056 raise ValueError("Shape characteristic of evolution errors covariance matrix (Q) is incorrect: \"%s\"."%(__Q_shape,))
1057 if not( min(__EM_shape) == max(__EM_shape) ):
1058 raise ValueError("Shape characteristic of evolution operator (EM) is incorrect: \"%s\"."%(__EM_shape,))
1060 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[1] == max(__Xb_shape) ):
1061 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and state (X) \"%s\" are incompatible."%(__HO_shape,__Xb_shape))
1062 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and not( __HO_shape[0] == max(__Y_shape) ):
1063 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation (Y) \"%s\" are incompatible."%(__HO_shape,__Y_shape))
1064 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__B) > 0 and not( __HO_shape[1] == __B_shape[0] ):
1065 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and a priori errors covariance matrix (B) \"%s\" are incompatible."%(__HO_shape,__B_shape))
1066 if len(self.__HO) > 0 and not isinstance(self.__HO, dict) and len(self.__R) > 0 and not( __HO_shape[0] == __R_shape[1] ):
1067 raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation errors covariance matrix (R) \"%s\" are incompatible."%(__HO_shape,__R_shape))
1069 if self.__B is not None and len(self.__B) > 0 and not( __B_shape[1] == max(__Xb_shape) ):
1070 if self.__algorithmName in ["EnsembleBlue",]:
1071 asPersistentVector = self.__Xb.reshape((-1,min(__B_shape)))
1072 self.__Xb = Persistence.OneVector("Background", basetype=numpy.matrix)
1073 for member in asPersistentVector:
1074 self.__Xb.store( numpy.matrix( numpy.ravel(member), numpy.float ).T )
1075 __Xb_shape = min(__B_shape)
1077 raise ValueError("Shape characteristic of a priori errors covariance matrix (B) \"%s\" and background (Xb) \"%s\" are incompatible."%(__B_shape,__Xb_shape))
1079 if self.__R is not None and len(self.__R) > 0 and not( __R_shape[1] == max(__Y_shape) ):
1080 raise ValueError("Shape characteristic of observation errors covariance matrix (R) \"%s\" and observation (Y) \"%s\" are incompatible."%(__R_shape,__Y_shape))
1082 if self.__EM is not None and len(self.__EM) > 0 and not isinstance(self.__EM, dict) and not( __EM_shape[1] == max(__Xb_shape) ):
1083 raise ValueError("Shape characteristic of evolution model (EM) \"%s\" and state (X) \"%s\" are incompatible."%(__EM_shape,__Xb_shape))
1085 if self.__CM is not None and len(self.__CM) > 0 and not isinstance(self.__CM, dict) and not( __CM_shape[1] == max(__U_shape) ):
1086 raise ValueError("Shape characteristic of control model (CM) \"%s\" and control (U) \"%s\" are incompatible."%(__CM_shape,__U_shape))
1088 if ("Bounds" in self.__P) \
1089 and (isinstance(self.__P["Bounds"], list) or isinstance(self.__P["Bounds"], tuple)) \
1090 and (len(self.__P["Bounds"]) != max(__Xb_shape)):
1091 raise ValueError("The number \"%s\" of bound pairs for the state (X) components is different of the size \"%s\" of the state itself." \
1092 %(len(self.__P["Bounds"]),max(__Xb_shape)))
1096 # ==============================================================================
1097 class DataObserver(object):
1099 Classe générale d'interface de type observer
1102 name = "GenericObserver",
1114 self.__name = str(name)
1119 if onVariable is None:
1120 raise ValueError("setting an observer has to be done over a variable name or a list of variable names, not over None.")
1121 elif type(onVariable) in (tuple, list):
1122 self.__V = tuple(map( str, onVariable ))
1123 if withInfo is None:
1126 self.__I = (str(withInfo),)*len(self.__V)
1127 elif isinstance(onVariable, str):
1128 self.__V = (onVariable,)
1129 if withInfo is None:
1130 self.__I = (onVariable,)
1132 self.__I = (str(withInfo),)
1134 raise ValueError("setting an observer has to be done over a variable name or a list of variable names.")
1136 if asString is not None:
1137 __FunctionText = asString
1138 elif (asTemplate is not None) and (asTemplate in Templates.ObserverTemplates):
1139 __FunctionText = Templates.ObserverTemplates[asTemplate]
1140 elif asScript is not None:
1141 __FunctionText = ImportFromScript(asScript).getstring()
1144 __Function = ObserverF(__FunctionText)
1146 if asObsObject is not None:
1147 self.__O = asObsObject
1149 self.__O = __Function.getfunc()
1151 for k in range(len(self.__V)):
1154 if ename not in withAlgo:
1155 raise ValueError("An observer is asked to be set on a variable named %s which does not exist."%ename)
1157 withAlgo.setObserver(ename, self.__O, einfo, scheduledBy)
1160 "x.__repr__() <==> repr(x)"
1161 return repr(self.__V)+"\n"+repr(self.__O)
1164 "x.__str__() <==> str(x)"
1165 return str(self.__V)+"\n"+str(self.__O)
1167 # ==============================================================================
1168 class State(object):
1170 Classe générale d'interface de type état
1173 name = "GenericVector",
1175 asPersistentVector = None,
1178 toBeChecked = False,
1181 Permet de définir un vecteur :
1182 - asVector : entrée des données, comme un vecteur compatible avec le
1183 constructeur de numpy.matrix, ou "True" si entrée par script.
1184 - asPersistentVector : entrée des données, comme une série de vecteurs
1185 compatible avec le constructeur de numpy.matrix, ou comme un objet de
1186 type Persistence, ou "True" si entrée par script.
1187 - asScript : si un script valide est donné contenant une variable
1188 nommée "name", la variable est de type "asVector" (par défaut) ou
1189 "asPersistentVector" selon que l'une de ces variables est placée à
1192 self.__name = str(name)
1193 self.__check = bool(toBeChecked)
1197 self.__is_vector = False
1198 self.__is_series = False
1200 if asScript is not None:
1201 __Vector, __Series = None, None
1202 if asPersistentVector:
1203 __Series = ImportFromScript(asScript).getvalue( self.__name )
1205 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1207 __Vector, __Series = asVector, asPersistentVector
1209 if __Vector is not None:
1210 self.__is_vector = True
1211 self.__V = numpy.matrix( numpy.asmatrix(__Vector).A1, numpy.float ).T
1212 self.shape = self.__V.shape
1213 self.size = self.__V.size
1214 elif __Series is not None:
1215 self.__is_series = True
1216 if isinstance(__Series, (tuple, list, numpy.ndarray, numpy.matrix, str)):
1217 self.__V = Persistence.OneVector(self.__name, basetype=numpy.matrix)
1218 if isinstance(__Series, str): __Series = eval(__Series)
1219 for member in __Series:
1220 self.__V.store( numpy.matrix( numpy.asmatrix(member).A1, numpy.float ).T )
1221 import sys ; sys.stdout.flush()
1224 if isinstance(self.__V.shape, (tuple, list)):
1225 self.shape = self.__V.shape
1227 self.shape = self.__V.shape()
1228 if len(self.shape) == 1:
1229 self.shape = (self.shape[0],1)
1230 self.size = self.shape[0] * self.shape[1]
1232 raise ValueError("The %s object is improperly defined, it requires at minima either a vector, a list/tuple of vectors or a persistent object. Please check your vector input."%self.__name)
1234 if scheduledBy is not None:
1235 self.__T = scheduledBy
1237 def getO(self, withScheduler=False):
1239 return self.__V, self.__T
1240 elif self.__T is None:
1246 "Vérification du type interne"
1247 return self.__is_vector
1250 "Vérification du type interne"
1251 return self.__is_series
1254 "x.__repr__() <==> repr(x)"
1255 return repr(self.__V)
1258 "x.__str__() <==> str(x)"
1259 return str(self.__V)
1261 # ==============================================================================
1262 class Covariance(object):
1264 Classe générale d'interface de type covariance
1267 name = "GenericCovariance",
1268 asCovariance = None,
1269 asEyeByScalar = None,
1270 asEyeByVector = None,
1273 toBeChecked = False,
1276 Permet de définir une covariance :
1277 - asCovariance : entrée des données, comme une matrice compatible avec
1278 le constructeur de numpy.matrix
1279 - asEyeByScalar : entrée des données comme un seul scalaire de variance,
1280 multiplicatif d'une matrice de corrélation identité, aucune matrice
1281 n'étant donc explicitement à donner
1282 - asEyeByVector : entrée des données comme un seul vecteur de variance,
1283 à mettre sur la diagonale d'une matrice de corrélation, aucune matrice
1284 n'étant donc explicitement à donner
1285 - asCovObject : entrée des données comme un objet python, qui a les
1286 methodes obligatoires "getT", "getI", "diag", "trace", "__add__",
1287 "__sub__", "__neg__", "__mul__", "__rmul__" et facultatives "shape",
1288 "size", "cholesky", "choleskyI", "asfullmatrix", "__repr__", "__str__"
1289 - toBeChecked : booléen indiquant si le caractère SDP de la matrice
1290 pleine doit être vérifié
1292 self.__name = str(name)
1293 self.__check = bool(toBeChecked)
1296 self.__is_scalar = False
1297 self.__is_vector = False
1298 self.__is_matrix = False
1299 self.__is_object = False
1301 if asScript is not None:
1302 __Matrix, __Scalar, __Vector, __Object = None, None, None, None
1304 __Scalar = ImportFromScript(asScript).getvalue( self.__name )
1306 __Vector = ImportFromScript(asScript).getvalue( self.__name )
1308 __Object = ImportFromScript(asScript).getvalue( self.__name )
1310 __Matrix = ImportFromScript(asScript).getvalue( self.__name )
1312 __Matrix, __Scalar, __Vector, __Object = asCovariance, asEyeByScalar, asEyeByVector, asCovObject
1314 if __Scalar is not None:
1315 if numpy.matrix(__Scalar).size != 1:
1316 raise ValueError(' The diagonal multiplier given to define a sparse matrix is not a unique scalar value.\n Its actual measured size is %i. Please check your scalar input.'%numpy.matrix(__Scalar).size)
1317 self.__is_scalar = True
1318 self.__C = numpy.abs( float(__Scalar) )
1321 elif __Vector is not None:
1322 self.__is_vector = True
1323 self.__C = numpy.abs( numpy.array( numpy.ravel( numpy.matrix(__Vector, float ) ) ) )
1324 self.shape = (self.__C.size,self.__C.size)
1325 self.size = self.__C.size**2
1326 elif __Matrix is not None:
1327 self.__is_matrix = True
1328 self.__C = numpy.matrix( __Matrix, float )
1329 self.shape = self.__C.shape
1330 self.size = self.__C.size
1331 elif __Object is not None:
1332 self.__is_object = True
1334 for at in ("getT","getI","diag","trace","__add__","__sub__","__neg__","__mul__","__rmul__"):
1335 if not hasattr(self.__C,at):
1336 raise ValueError("The matrix given for %s as an object has no attribute \"%s\". Please check your object input."%(self.__name,at))
1337 if hasattr(self.__C,"shape"):
1338 self.shape = self.__C.shape
1341 if hasattr(self.__C,"size"):
1342 self.size = self.__C.size
1347 # raise ValueError("The %s covariance matrix has to be specified either as a matrix, a vector for its diagonal or a scalar multiplying an identity matrix."%self.__name)
1351 def __validate(self):
1353 if self.__C is None:
1354 raise UnboundLocalError("%s covariance matrix value has not been set!"%(self.__name,))
1355 if self.ismatrix() and min(self.shape) != max(self.shape):
1356 raise ValueError("The given matrix for %s is not a square one, its shape is %s. Please check your matrix input."%(self.__name,self.shape))
1357 if self.isobject() and min(self.shape) != max(self.shape):
1358 raise ValueError("The matrix given for \"%s\" is not a square one, its shape is %s. Please check your object input."%(self.__name,self.shape))
1359 if self.isscalar() and self.__C <= 0:
1360 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your scalar input %s."%(self.__name,self.__C))
1361 if self.isvector() and (self.__C <= 0).any():
1362 raise ValueError("The \"%s\" covariance matrix is not positive-definite. Please check your vector input."%(self.__name,))
1363 if self.ismatrix() and (self.__check or logging.getLogger().level < logging.WARNING):
1365 L = numpy.linalg.cholesky( self.__C )
1367 raise ValueError("The %s covariance matrix is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1368 if self.isobject() and (self.__check or logging.getLogger().level < logging.WARNING):
1370 L = self.__C.cholesky()
1372 raise ValueError("The %s covariance object is not symmetric positive-definite. Please check your matrix input."%(self.__name,))
1375 "Vérification du type interne"
1376 return self.__is_scalar
1379 "Vérification du type interne"
1380 return self.__is_vector
1383 "Vérification du type interne"
1384 return self.__is_matrix
1387 "Vérification du type interne"
1388 return self.__is_object
1393 return Covariance(self.__name+"I", asCovariance = self.__C.I )
1394 elif self.isvector():
1395 return Covariance(self.__name+"I", asEyeByVector = 1. / self.__C )
1396 elif self.isscalar():
1397 return Covariance(self.__name+"I", asEyeByScalar = 1. / self.__C )
1398 elif self.isobject():
1399 return Covariance(self.__name+"I", asCovObject = self.__C.getI() )
1401 return None # Indispensable
1406 return Covariance(self.__name+"T", asCovariance = self.__C.T )
1407 elif self.isvector():
1408 return Covariance(self.__name+"T", asEyeByVector = self.__C )
1409 elif self.isscalar():
1410 return Covariance(self.__name+"T", asEyeByScalar = self.__C )
1411 elif self.isobject():
1412 return Covariance(self.__name+"T", asCovObject = self.__C.getT() )
1415 "Décomposition de Cholesky"
1417 return Covariance(self.__name+"C", asCovariance = numpy.linalg.cholesky(self.__C) )
1418 elif self.isvector():
1419 return Covariance(self.__name+"C", asEyeByVector = numpy.sqrt( self.__C ) )
1420 elif self.isscalar():
1421 return Covariance(self.__name+"C", asEyeByScalar = numpy.sqrt( self.__C ) )
1422 elif self.isobject() and hasattr(self.__C,"cholesky"):
1423 return Covariance(self.__name+"C", asCovObject = self.__C.cholesky() )
1425 def choleskyI(self):
1426 "Inversion de la décomposition de Cholesky"
1428 return Covariance(self.__name+"H", asCovariance = numpy.linalg.cholesky(self.__C).I )
1429 elif self.isvector():
1430 return Covariance(self.__name+"H", asEyeByVector = 1.0 / numpy.sqrt( self.__C ) )
1431 elif self.isscalar():
1432 return Covariance(self.__name+"H", asEyeByScalar = 1.0 / numpy.sqrt( self.__C ) )
1433 elif self.isobject() and hasattr(self.__C,"choleskyI"):
1434 return Covariance(self.__name+"H", asCovObject = self.__C.choleskyI() )
1436 def diag(self, msize=None):
1437 "Diagonale de la matrice"
1439 return numpy.diag(self.__C)
1440 elif self.isvector():
1442 elif self.isscalar():
1444 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1446 return self.__C * numpy.ones(int(msize))
1447 elif self.isobject():
1448 return self.__C.diag()
1450 def asfullmatrix(self, msize=None):
1454 elif self.isvector():
1455 return numpy.matrix( numpy.diag(self.__C), float )
1456 elif self.isscalar():
1458 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1460 return numpy.matrix( self.__C * numpy.eye(int(msize)), float )
1461 elif self.isobject() and hasattr(self.__C,"asfullmatrix"):
1462 return self.__C.asfullmatrix()
1464 def trace(self, msize=None):
1465 "Trace de la matrice"
1467 return numpy.trace(self.__C)
1468 elif self.isvector():
1469 return float(numpy.sum(self.__C))
1470 elif self.isscalar():
1472 raise ValueError("the size of the %s covariance matrix has to be given in case of definition as a scalar over the diagonal."%(self.__name,))
1474 return self.__C * int(msize)
1475 elif self.isobject():
1476 return self.__C.trace()
1482 "x.__repr__() <==> repr(x)"
1483 return repr(self.__C)
1486 "x.__str__() <==> str(x)"
1487 return str(self.__C)
1489 def __add__(self, other):
1490 "x.__add__(y) <==> x+y"
1491 if self.ismatrix() or self.isobject():
1492 return self.__C + numpy.asmatrix(other)
1493 elif self.isvector() or self.isscalar():
1494 _A = numpy.asarray(other)
1495 _A.reshape(_A.size)[::_A.shape[1]+1] += self.__C
1496 return numpy.asmatrix(_A)
1498 def __radd__(self, other):
1499 "x.__radd__(y) <==> y+x"
1500 raise NotImplementedError("%s covariance matrix __radd__ method not available for %s type!"%(self.__name,type(other)))
1502 def __sub__(self, other):
1503 "x.__sub__(y) <==> x-y"
1504 if self.ismatrix() or self.isobject():
1505 return self.__C - numpy.asmatrix(other)
1506 elif self.isvector() or self.isscalar():
1507 _A = numpy.asarray(other)
1508 _A.reshape(_A.size)[::_A.shape[1]+1] = self.__C - _A.reshape(_A.size)[::_A.shape[1]+1]
1509 return numpy.asmatrix(_A)
1511 def __rsub__(self, other):
1512 "x.__rsub__(y) <==> y-x"
1513 raise NotImplementedError("%s covariance matrix __rsub__ method not available for %s type!"%(self.__name,type(other)))
1516 "x.__neg__() <==> -x"
1519 def __mul__(self, other):
1520 "x.__mul__(y) <==> x*y"
1521 if self.ismatrix() and isinstance(other,numpy.matrix):
1522 return self.__C * other
1523 elif self.ismatrix() and isinstance(other, (list, numpy.ndarray, tuple)):
1524 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1525 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1526 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1527 return self.__C * numpy.asmatrix(other)
1529 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.asmatrix(other).shape,self.__name))
1530 elif self.isvector() and isinstance(other, (list, numpy.matrix, numpy.ndarray, tuple)):
1531 if numpy.ravel(other).size == self.shape[1]: # Vecteur
1532 return numpy.asmatrix(self.__C * numpy.ravel(other)).T
1533 elif numpy.asmatrix(other).shape[0] == self.shape[1]: # Matrice
1534 return numpy.asmatrix((self.__C * (numpy.asarray(other).transpose())).transpose())
1536 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1537 elif self.isscalar() and isinstance(other,numpy.matrix):
1538 return self.__C * other
1539 elif self.isscalar() and isinstance(other, (list, numpy.ndarray, tuple)):
1540 if len(numpy.asarray(other).shape) == 1 or numpy.asarray(other).shape[1] == 1 or numpy.asarray(other).shape[0] == 1:
1541 return self.__C * numpy.asmatrix(numpy.ravel(other)).T
1543 return self.__C * numpy.asmatrix(other)
1544 elif self.isobject():
1545 return self.__C.__mul__(other)
1547 raise NotImplementedError("%s covariance matrix __mul__ method not available for %s type!"%(self.__name,type(other)))
1549 def __rmul__(self, other):
1550 "x.__rmul__(y) <==> y*x"
1551 if self.ismatrix() and isinstance(other,numpy.matrix):
1552 return other * self.__C
1553 elif self.isvector() and isinstance(other,numpy.matrix):
1554 if numpy.ravel(other).size == self.shape[0]: # Vecteur
1555 return numpy.asmatrix(numpy.ravel(other) * self.__C)
1556 elif numpy.asmatrix(other).shape[1] == self.shape[0]: # Matrice
1557 return numpy.asmatrix(numpy.array(other) * self.__C)
1559 raise ValueError("operands could not be broadcast together with shapes %s %s in %s matrix"%(self.shape,numpy.ravel(other).shape,self.__name))
1560 elif self.isscalar() and isinstance(other,numpy.matrix):
1561 return other * self.__C
1562 elif self.isobject():
1563 return self.__C.__rmul__(other)
1565 raise NotImplementedError("%s covariance matrix __rmul__ method not available for %s type!"%(self.__name,type(other)))
1568 "x.__len__() <==> len(x)"
1569 return self.shape[0]
1571 # ==============================================================================
1572 class ObserverF(object):
1574 Creation d'une fonction d'observateur a partir de son texte
1576 def __init__(self, corps=""):
1577 self.__corps = corps
1578 def func(self,var,info):
1579 "Fonction d'observation"
1582 "Restitution du pointeur de fonction dans l'objet"
1585 # ==============================================================================
1586 class CaseLogger(object):
1588 Conservation des commandes de creation d'un cas
1590 def __init__(self, __name="", __objname="case", __addViewers=None, __addLoaders=None):
1591 self.__name = str(__name)
1592 self.__objname = str(__objname)
1593 self.__logSerie = []
1594 self.__switchoff = False
1595 self.__viewers = self.__loaders = {
1602 if __addViewers is not None:
1603 self.__viewers.update(dict(__addViewers))
1604 if __addLoaders is not None:
1605 self.__loaders.update(dict(__addLoaders))
1607 def register(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1608 "Enregistrement d'une commande individuelle"
1609 if __command is not None and __keys is not None and __local is not None and not self.__switchoff:
1610 if "self" in __keys: __keys.remove("self")
1611 self.__logSerie.append( (str(__command), __keys, __local, __pre, __switchoff) )
1613 self.__switchoff = True
1615 self.__switchoff = False
1617 def dump(self, __filename=None, __format="TUI", __upa=""):
1618 "Restitution normalisée des commandes (par les *GenericCaseViewer)"
1619 if __format in self.__viewers:
1620 __formater = self.__viewers[__format](self.__name, self.__objname, self.__logSerie)
1622 raise ValueError("Dumping as \"%s\" is not available"%__format)
1623 return __formater.dump(__filename, __upa)
1625 def load(self, __filename=None, __content=None, __object=None, __format="TUI"):
1626 "Chargement normalisé des commandes"
1627 if __format in self.__loaders:
1628 __formater = self.__loaders[__format]()
1630 raise ValueError("Loading as \"%s\" is not available"%__format)
1631 return __formater.load(__filename, __content, __object)
1633 # ==============================================================================
1634 class GenericCaseViewer(object):
1636 Gestion des commandes de creation d'une vue de cas
1638 def __init__(self, __name="", __objname="case", __content=None, __object=None):
1639 "Initialisation et enregistrement de l'entete"
1640 self._name = str(__name)
1641 self._objname = str(__objname)
1642 self._lineSerie = []
1643 self._switchoff = False
1644 self._numobservers = 2
1645 self._content = __content
1646 self._object = __object
1647 self._missing = """raise ValueError("This case requires beforehand to import or define the variable named <%s>. When corrected, remove this command, correct and uncomment the following one.")\n# """
1648 def _append(self, *args):
1649 "Transformation de commande individuelle en enregistrement"
1650 raise NotImplementedError()
1651 def _extract(self, *args):
1652 "Transformation d'enregistrement en commande individuelle"
1653 raise NotImplementedError()
1654 def _finalize(self, __upa=None):
1655 "Enregistrement du final"
1656 if __upa is not None and len(__upa)>0:
1657 self._lineSerie.append("%s.execute()"%(self._objname,))
1658 self._lineSerie.append(__upa)
1659 def _addLine(self, line=""):
1660 "Ajoute un enregistrement individuel"
1661 self._lineSerie.append(line)
1662 def _get_objname(self):
1663 return self._objname
1664 def dump(self, __filename=None, __upa=None):
1665 "Restitution normalisée des commandes"
1666 self._finalize(__upa)
1667 __text = "\n".join(self._lineSerie)
1669 if __filename is not None:
1670 __file = os.path.abspath(__filename)
1671 __fid = open(__file,"w")
1675 def load(self, __filename=None, __content=None, __object=None):
1676 "Chargement normalisé des commandes"
1677 if __filename is not None and os.path.exists(__filename):
1678 self._content = open(__filename, 'r').read()
1679 elif __content is not None and type(__content) is str:
1680 self._content = __content
1681 elif __object is not None and type(__object) is dict:
1682 self._object = copy.deepcopy(__object)
1684 pass # use "self._content" from initialization
1685 __commands = self._extract(self._content, self._object)
1688 class _TUIViewer(GenericCaseViewer):
1690 Etablissement des commandes d'un cas ADAO TUI (Cas<->TUI)
1692 def __init__(self, __name="", __objname="case", __content=None, __object=None):
1693 "Initialisation et enregistrement de l'entete"
1694 GenericCaseViewer.__init__(self, __name, __objname, __content, __object)
1695 self._addLine("# -*- coding: utf-8 -*-")
1696 self._addLine("#\n# Python script for ADAO TUI\n#")
1697 self._addLine("from numpy import array, matrix")
1698 self._addLine("import adaoBuilder")
1699 self._addLine("%s = adaoBuilder.New('%s')"%(self._objname, self._name))
1700 if self._content is not None:
1701 for command in self._content:
1702 self._append(*command)
1703 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1704 "Transformation d'une commande individuelle en un enregistrement"
1705 if __command is not None and __keys is not None and __local is not None:
1707 if __pre is not None:
1708 __text += "%s = "%__pre
1709 __text += "%s.%s( "%(self._objname,str(__command))
1710 if "self" in __keys: __keys.remove("self")
1711 if __command not in ("set","get") and "Concept" in __keys: __keys.remove("Concept")
1714 if __v is None: continue
1715 if k == "Checked" and not __v: continue
1716 if k == "Stored" and not __v: continue
1717 if k == "AvoidRC" and __v: continue
1718 if k == "noDetails": continue
1719 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1720 if callable(__v): __text = self._missing%__v.__name__+__text
1721 if isinstance(__v,dict):
1722 for val in __v.values():
1723 if callable(val): __text = self._missing%val.__name__+__text
1724 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1725 __text += "%s=%s, "%(k,repr(__v))
1726 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
1729 self._addLine(__text)
1730 def _extract(self, __multilines="", __object=None):
1731 "Transformation un enregistrement en une commande individuelle"
1734 __multilines = __multilines.replace("\r\n","\n")
1735 for line in __multilines.split("\n"):
1736 if "adaoBuilder.New" in line and "=" in line:
1737 self._objname = line.split("=")[0].strip()
1739 logging.debug("TUI Extracting commands of '%s' object..."%(self._objname,))
1743 if self._objname+".set" in line:
1744 __commands.append( line.replace(self._objname+".","",1) )
1745 logging.debug("TUI Extracted command: %s"%(__commands[-1],))
1748 class _EPDViewer(GenericCaseViewer):
1750 Etablissement des commandes d'un cas EPD (Eficas Python Dictionnary/Cas<-EPD)
1752 def __init__(self, __name="", __objname="case", __content=None, __object=None):
1753 "Initialisation et enregistrement de l'entete"
1754 GenericCaseViewer.__init__(self, __name, __objname, __content, __object)
1755 self._observerIndex = 0
1756 self._addLine("# -*- coding: utf-8 -*-")
1757 self._addLine("#\n# Python script for ADAO EPD\n#")
1758 self._addLine("from numpy import array, matrix")
1760 self._addLine("%s = {}"%__objname)
1761 if self._content is not None:
1762 for command in self._content:
1763 self._append(*command)
1764 def _extract(self, __multilines=None, __object=None):
1765 "Transformation un enregistrement en une commande individuelle"
1766 if __multilines is not None:
1767 __multilines = __multilines.replace("\r\n","\n")
1769 self._objdata = None
1770 __getlocals = locals()
1771 for k in __getlocals:
1773 if type(__getlocals[k]) is dict:
1774 if 'ASSIMILATION_STUDY' in __getlocals[k]:
1776 self._objdata = __getlocals[k]['ASSIMILATION_STUDY']
1777 if 'CHECKING_STUDY' in __getlocals[k]:
1779 self._objdata = __getlocals[k]['CHECKING_STUDY']
1782 elif __multilines is None and __object is not None and type(__object) is dict:
1783 self._objname = "case"
1784 self._objdata = None
1785 if 'ASSIMILATION_STUDY' in __object:
1786 self._objdata = __object['ASSIMILATION_STUDY']
1787 if 'CHECKING_STUDY' in __object:
1788 self._objdata = __object['CHECKING_STUDY']
1790 self._objdata = None
1792 if self._objdata is None or not(type(self._objdata) is dict) or not('AlgorithmParameters' in self._objdata):
1793 raise ValueError("Impossible to load given content as a ADAO EPD one (no dictionnary or no 'AlgorithmParameters' key found).")
1794 # ----------------------------------------------------------------------
1795 logging.debug("EPD Extracting commands of '%s' object..."%(self._objname,))
1797 __UserPostAnalysis = ""
1798 for k,r in self._objdata.items():
1800 logging.debug("EPD Extracted command: %s:%s"%(k, r))
1801 if __command == "StudyName" and len(str(r))>0:
1802 __commands.append( "set( Concept='Name', String='%s')"%(str(r),) )
1803 elif __command == "StudyRepertory":
1804 __commands.append( "set( Concept='Directory', String='%s')"%(str(r),) )
1806 elif __command == "UserPostAnalysis" and type(r) is dict:
1807 if 'STRING_DATA' in r:
1808 __UserPostAnalysis = r['STRING_DATA']['STRING']
1809 elif 'SCRIPT_DATA' in r and os.path.exists(r['SCRIPT_DATA']['SCRIPT_FILE']):
1810 __UserPostAnalysis = open(r['SCRIPT_DATA']['SCRIPT_FILE'],'r').read()
1811 elif 'TEMPLATE_DATA' in r:
1812 # AnalysisPrinter...
1813 __itempl = r['TEMPLATE_DATA']['Template']
1814 __UserPostAnalysis = r['TEMPLATE_DATA'][__itempl]['ValueTemplate']
1816 __UserPostAnalysis = ""
1817 __UserPostAnalysis = __UserPostAnalysis.replace("ADD",self._objname)
1819 elif __command == "AlgorithmParameters" and type(r) is dict and 'Algorithm' in r:
1820 if 'Parameters%s'%(r['Algorithm'],) in r and r['Parameters'] == 'Defaults':
1821 __Dict = r['Parameters%s'%(r['Algorithm'],)]
1822 if 'SetSeed' in __Dict:__Dict['SetSeed'] = int(__Dict['SetSeed'])
1823 if 'BoxBounds' in __Dict and type(__Dict['BoxBounds']) is str:
1824 __Dict['BoxBounds'] = eval(__Dict['BoxBounds'])
1825 __parameters = ', Parameters=%s'%(repr(__Dict),)
1826 elif 'Dict' in r and r['Parameters'] == 'Dict':
1827 __from = r['Dict']['data']
1828 if 'STRING_DATA' in __from:
1829 __parameters = ", Parameters=%s"%(repr(eval(__from['STRING_DATA']['STRING'])),)
1830 elif 'SCRIPT_DATA' in __from and os.path.exists(__from['SCRIPT_DATA']['SCRIPT_FILE']):
1831 __parameters = ", Script='%s'"%(__from['SCRIPT_DATA']['SCRIPT_FILE'],)
1834 __commands.append( "set( Concept='AlgorithmParameters', Algorithm='%s'%s )"%(r['Algorithm'],__parameters) )
1836 elif __command == "Observers" and type(r) is dict and 'SELECTION' in r:
1837 if type(r['SELECTION']) is str:
1838 __selection = (r['SELECTION'],)
1840 __selection = tuple(r['SELECTION'])
1841 for sk in __selection:
1842 __idata = r[sk]['%s_data'%sk]
1843 if __idata['NodeType'] == 'Template' and 'Template' in __idata['ObserverTemplate']:
1844 __template =__idata['ObserverTemplate']['Template']
1845 __commands.append( "set( Concept='Observer', Variable='%s', Template='%s' )"%(sk,__template) )
1846 if __idata['NodeType'] == 'String' and 'Value' in __idata:
1847 __value =__idata['Value']
1848 __commands.append( "set( Concept='Observer', Variable='%s', String='%s' )"%(sk,__value) )
1850 # Background, ObservationError, ObservationOperator...
1851 elif type(r) is dict:
1852 __argumentsList = []
1853 if 'Stored' in r and bool(r['Stored']):
1854 __argumentsList.append(['Stored',True])
1855 if 'INPUT_TYPE' in r and r['INPUT_TYPE'] in r:
1856 # Vector, Matrix, ScalarSparseMatrix, DiagonalSparseMatrix, Function
1857 __itype = r['INPUT_TYPE']
1858 __idata = r[__itype]['data']
1859 if 'FROM' in __idata and __idata['FROM'].upper()+'_DATA' in __idata:
1860 # String, Script, Template, ScriptWithOneFunction, ScriptWithFunctions
1861 __ifrom = __idata['FROM']
1862 if __ifrom == 'String' or __ifrom == 'Template':
1863 __argumentsList.append([__itype,__idata['STRING_DATA']['STRING']])
1864 if __ifrom == 'Script':
1865 __argumentsList.append([__itype,True])
1866 __argumentsList.append(['Script',__idata['SCRIPT_DATA']['SCRIPT_FILE']])
1867 if __ifrom == 'ScriptWithOneFunction':
1868 __argumentsList.append(['OneFunction',True])
1869 __argumentsList.append(['Script',__idata['SCRIPTWITHONEFUNCTION_DATA'].pop('SCRIPTWITHONEFUNCTION_FILE')])
1870 if len(__idata['SCRIPTWITHONEFUNCTION_DATA'])>0:
1871 __argumentsList.append(['Parameters',__idata['SCRIPTWITHONEFUNCTION_DATA']])
1872 if __ifrom == 'ScriptWithFunctions':
1873 __argumentsList.append(['ThreeFunctions',True])
1874 __argumentsList.append(['Script',__idata['SCRIPTWITHFUNCTIONS_DATA'].pop('SCRIPTWITHFUNCTIONS_FILE')])
1875 if len(__idata['SCRIPTWITHFUNCTIONS_DATA'])>0:
1876 __argumentsList.append(['Parameters',__idata['SCRIPTWITHFUNCTIONS_DATA']])
1877 __arguments = ["%s = %s"%(k,repr(v)) for k,v in __argumentsList]
1878 __commands.append( "set( Concept='%s', %s )"%(__command, ", ".join(__arguments)))
1880 # ----------------------------------------------------------------------
1881 __commands.sort() # Pour commencer par 'AlgorithmParameters'
1882 __commands.append(__UserPostAnalysis)
1885 class _DCTViewer(GenericCaseViewer):
1887 Etablissement des commandes d'un cas DCT (Cas<->DCT)
1889 def __init__(self, __name="", __objname="case", __content=None, __object=None):
1890 "Initialisation et enregistrement de l'entete"
1891 GenericCaseViewer.__init__(self, __name, __objname, __content, __object)
1892 self._observerIndex = 0
1893 self._addLine("# -*- coding: utf-8 -*-")
1894 self._addLine("#\n# Python script for ADAO DCT\n#")
1895 self._addLine("from numpy import array, matrix")
1897 self._addLine("%s = {}"%__objname)
1898 if self._content is not None:
1899 for command in self._content:
1900 self._append(*command)
1901 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1902 "Transformation d'une commande individuelle en un enregistrement"
1903 if __command is not None and __keys is not None and __local is not None:
1905 if "execute" in __command: return
1906 if "self" in __keys: __keys.remove("self")
1907 if __command in ("set","get") and "Concept" in __keys:
1908 __key = __local["Concept"]
1909 __keys.remove("Concept")
1911 __key = __command.replace("set","").replace("get","")
1912 if "Observer" in __key and 'Variable' in __keys:
1913 self._observerIndex += 1
1914 __key += "_%i"%self._observerIndex
1915 __text += "%s['%s'] = {"%(self._objname,str(__key))
1918 if __v is None: continue
1919 if k == "Checked" and not __v: continue
1920 if k == "Stored" and not __v: continue
1921 if k == "AvoidRC" and __v: continue
1922 if k == "noDetails": continue
1923 if isinstance(__v,Persistence.Persistence): __v = __v.values()
1924 if callable(__v): __text = self._missing%__v.__name__+__text
1925 if isinstance(__v,dict):
1926 for val in __v.values():
1927 if callable(val): __text = self._missing%val.__name__+__text
1928 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
1929 __text += "'%s':%s, "%(k,repr(__v))
1930 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
1931 __text.rstrip(", ").rstrip()
1933 if __text[-2:] == "{}": return # Supprime les *Debug et les variables
1934 self._addLine(__text)
1935 def _extract(self, __multilines="", __object=None):
1936 "Transformation un enregistrement en une commande individuelle"
1938 __multilines = __multilines.replace("\r\n","\n")
1940 self._objdata = None
1941 __getlocals = locals()
1942 for k in __getlocals:
1944 if 'AlgorithmParameters' in __getlocals[k] and type(__getlocals[k]) is dict:
1946 self._objdata = __getlocals[k]
1949 if self._objdata is None:
1950 raise ValueError("Impossible to load given content as a ADAO DCT one (no 'AlgorithmParameters' key found).")
1951 for k in self._objdata:
1952 if 'Observer_' in k:
1953 __command = k.split('_',1)[0]
1956 __arguments = ["%s = %s"%(k,repr(v)) for k,v in self._objdata[k].items()]
1957 __commands.append( "set( Concept='%s', %s )"%(__command, ", ".join(__arguments)))
1958 __commands.sort() # Pour commencer par 'AlgorithmParameters'
1961 class _SCDViewer(GenericCaseViewer):
1963 Etablissement des commandes d'un cas SCD (Study Config Dictionary/Cas->SCD)
1965 def __init__(self, __name="", __objname="case", __content=None, __object=None):
1966 "Initialisation et enregistrement de l'entete"
1967 GenericCaseViewer.__init__(self, __name, __objname, __content, __object)
1968 self._addLine("# -*- coding: utf-8 -*-")
1969 self._addLine("#\n# Input for ADAO converter to YACS\n#")
1970 self._addLine("from numpy import array, matrix")
1972 self._addLine("study_config = {}")
1973 self._addLine("study_config['StudyType'] = 'ASSIMILATION_STUDY'")
1974 self._addLine("study_config['Name'] = '%s'"%self._name)
1975 self._addLine("observers = {}")
1976 self._addLine("study_config['Observers'] = observers")
1978 self._addLine("inputvariables_config = {}")
1979 self._addLine("inputvariables_config['Order'] =['adao_default']")
1980 self._addLine("inputvariables_config['adao_default'] = -1")
1981 self._addLine("study_config['InputVariables'] = inputvariables_config")
1983 self._addLine("outputvariables_config = {}")
1984 self._addLine("outputvariables_config['Order'] = ['adao_default']")
1985 self._addLine("outputvariables_config['adao_default'] = -1")
1986 self._addLine("study_config['OutputVariables'] = outputvariables_config")
1987 if __content is not None:
1988 for command in __content:
1989 self._append(*command)
1990 def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
1991 "Transformation d'une commande individuelle en un enregistrement"
1992 if __command == "set": __command = __local["Concept"]
1993 else: __command = __command.replace("set", "", 1)
1996 if __command in (None, 'execute', 'executePythonScheme', 'executeYACSScheme', 'get', 'Name'):
1998 elif __command in ['Debug', 'setDebug']:
1999 __text = "#\nstudy_config['Debug'] = '1'"
2000 elif __command in ['NoDebug', 'setNoDebug']:
2001 __text = "#\nstudy_config['Debug'] = '0'"
2002 elif __command in ['Observer', 'setObserver']:
2003 __obs = __local['Variable']
2004 self._numobservers += 1
2006 __text += "observers['%s'] = {}\n"%__obs
2007 if __local['String'] is not None:
2008 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
2009 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, __local['String'])
2010 if __local['Script'] is not None:
2011 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'Script')
2012 __text += "observers['%s']['Script'] = \"%s\"\n"%(__obs, __local['Script'])
2013 if __local['Template'] is not None and __local['Template'] in Templates.ObserverTemplates:
2014 __text += "observers['%s']['nodetype'] = '%s'\n"%(__obs, 'String')
2015 __text += "observers['%s']['String'] = \"\"\"%s\"\"\"\n"%(__obs, Templates.ObserverTemplates[__local['Template']])
2016 if __local['Info'] is not None:
2017 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __local['Info'])
2019 __text += "observers['%s']['info'] = \"\"\"%s\"\"\"\n"%(__obs, __obs)
2020 __text += "observers['%s']['number'] = %s"%(__obs, self._numobservers)
2021 elif __local is not None: # __keys is not None and
2022 numpy.set_printoptions(precision=15,threshold=1000000,linewidth=1000*15)
2024 __text += "%s_config = {}\n"%__command
2025 if 'self' in __local: __local.pop('self')
2026 __to_be_removed = []
2027 for __k,__v in __local.items():
2028 if __v is None: __to_be_removed.append(__k)
2029 for __k in __to_be_removed:
2031 for __k,__v in __local.items():
2032 if __k == "Concept": continue
2033 if __k in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix','OneFunction','ThreeFunctions'] and 'Script' in __local: continue
2034 if __k == 'Algorithm':
2035 __text += "study_config['Algorithm'] = %s\n"%(repr(__v))
2036 elif __k == 'Script':
2039 __v = "'"+repr(__v)+"'"
2040 for __lk in ['ScalarSparseMatrix','DiagonalSparseMatrix','Matrix']:
2041 if __lk in __local and __local[__lk]: __k = __lk
2042 if __command == "AlgorithmParameters": __k = "Dict"
2043 if 'OneFunction' in __local and __local['OneFunction']:
2044 __text += "%s_ScriptWithOneFunction = {}\n"%(__command,)
2045 __text += "%s_ScriptWithOneFunction['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
2046 __text += "%s_ScriptWithOneFunction['Script'] = {}\n"%(__command,)
2047 __text += "%s_ScriptWithOneFunction['Script']['Direct'] = %s\n"%(__command,__v)
2048 __text += "%s_ScriptWithOneFunction['Script']['Tangent'] = %s\n"%(__command,__v)
2049 __text += "%s_ScriptWithOneFunction['Script']['Adjoint'] = %s\n"%(__command,__v)
2050 __text += "%s_ScriptWithOneFunction['DifferentialIncrement'] = 1e-06\n"%(__command,)
2051 __text += "%s_ScriptWithOneFunction['CenteredFiniteDifference'] = 0\n"%(__command,)
2053 __f = 'ScriptWithOneFunction'
2054 __v = '%s_ScriptWithOneFunction'%(__command,)
2055 if 'ThreeFunctions' in __local and __local['ThreeFunctions']:
2056 __text += "%s_ScriptWithFunctions = {}\n"%(__command,)
2057 __text += "%s_ScriptWithFunctions['Function'] = ['Direct', 'Tangent', 'Adjoint']\n"%(__command,)
2058 __text += "%s_ScriptWithFunctions['Script'] = {}\n"%(__command,)
2059 __text += "%s_ScriptWithFunctions['Script']['Direct'] = %s\n"%(__command,__v)
2060 __text += "%s_ScriptWithFunctions['Script']['Tangent'] = %s\n"%(__command,__v)
2061 __text += "%s_ScriptWithFunctions['Script']['Adjoint'] = %s\n"%(__command,__v)
2063 __f = 'ScriptWithFunctions'
2064 __v = '%s_ScriptWithFunctions'%(__command,)
2065 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
2066 __text += "%s_config['From'] = '%s'\n"%(__command,__f)
2067 __text += "%s_config['Data'] = %s\n"%(__command,__v)
2068 __text = __text.replace("''","'")
2069 elif __k in ('Stored', 'Checked'):
2071 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
2072 elif __k in ('AvoidRC', 'noDetails'):
2074 __text += "%s_config['%s'] = '%s'\n"%(__command,__k,int(bool(__v)))
2076 if __k == 'Parameters': __k = "Dict"
2077 if isinstance(__v,Persistence.Persistence): __v = __v.values()
2078 if callable(__v): __text = self._missing%__v.__name__+__text
2079 if isinstance(__v,dict):
2080 for val in __v.values():
2081 if callable(val): __text = self._missing%val.__name__+__text
2082 __text += "%s_config['Type'] = '%s'\n"%(__command,__k)
2083 __text += "%s_config['From'] = '%s'\n"%(__command,'String')
2084 __text += "%s_config['Data'] = \"\"\"%s\"\"\"\n"%(__command,repr(__v))
2085 __text += "study_config['%s'] = %s_config"%(__command,__command)
2086 numpy.set_printoptions(precision=8,threshold=1000,linewidth=75)
2088 self._switchoff = True
2089 if __text is not None: self._addLine(__text)
2091 self._switchoff = False
2092 def _finalize(self, *__args):
2093 self.__loadVariablesByScript()
2095 self._addLine("Analysis_config = {}")
2096 self._addLine("Analysis_config['From'] = 'String'")
2097 self._addLine("Analysis_config['Data'] = \"\"\"import numpy")
2098 self._addLine("xa=numpy.ravel(ADD.get('Analysis')[-1])")
2099 self._addLine("print 'Analysis:',xa\"\"\"")
2100 self._addLine("study_config['UserPostAnalysis'] = Analysis_config")
2101 def __loadVariablesByScript(self):
2102 __ExecVariables = {} # Necessaire pour recuperer la variable
2103 exec("\n".join(self._lineSerie), __ExecVariables)
2104 study_config = __ExecVariables['study_config']
2105 # Pour Python 3 : self.__hasAlgorithm = bool(study_config['Algorithm'])
2106 if 'Algorithm' in study_config:
2107 self.__hasAlgorithm = True
2109 self.__hasAlgorithm = False
2110 if not self.__hasAlgorithm and \
2111 "AlgorithmParameters" in study_config and \
2112 isinstance(study_config['AlgorithmParameters'], dict) and \
2113 "From" in study_config['AlgorithmParameters'] and \
2114 "Data" in study_config['AlgorithmParameters'] and \
2115 study_config['AlgorithmParameters']['From'] == 'Script':
2116 __asScript = study_config['AlgorithmParameters']['Data']
2117 __var = ImportFromScript(__asScript).getvalue( "Algorithm" )
2118 __text = "#\nstudy_config['Algorithm'] = '%s'"%(__var,)
2119 self._addLine(__text)
2120 if self.__hasAlgorithm and \
2121 "AlgorithmParameters" in study_config and \
2122 isinstance(study_config['AlgorithmParameters'], dict) and \
2123 "From" not in study_config['AlgorithmParameters'] and \
2124 "Data" not in study_config['AlgorithmParameters']:
2126 __text += "AlgorithmParameters_config['Type'] = 'Dict'\n"
2127 __text += "AlgorithmParameters_config['From'] = 'String'\n"
2128 __text += "AlgorithmParameters_config['Data'] = '{}'\n"
2129 self._addLine(__text)
2132 class _XMLViewer(GenericCaseViewer):
2134 Etablissement des commandes d'un cas XML
2136 def __init__(self, __name="", __objname="case", __content=None, __object=None):
2137 "Initialisation et enregistrement de l'entete"
2138 GenericCaseViewer.__init__(self, __name, __objname, __content, __object)
2139 raise NotImplementedError()
2141 class _YACSViewer(GenericCaseViewer):
2143 Etablissement des commandes d'un cas YACS (Cas->SCD->YACS)
2145 def __init__(self, __name="", __objname="case", __content=None, __object=None):
2146 "Initialisation et enregistrement de l'entete"
2147 GenericCaseViewer.__init__(self, __name, __objname, __content, __object)
2148 self.__internalSCD = _SCDViewer(__name, __objname, __content, __object)
2149 self._append = self.__internalSCD._append
2150 def dump(self, __filename=None, __convertSCDinMemory=True):
2151 "Restitution normalisée des commandes"
2152 self.__internalSCD._finalize()
2154 if __filename is None:
2155 raise ValueError("A file name has to be given for YACS XML output.")
2157 if not PlatformInfo.has_salome or \
2158 not PlatformInfo.has_adao:
2159 raise ImportError("\n\n"+\
2160 "Unable to get SALOME or ADAO environnement variables.\n"+\
2161 "Please load the right environnement before trying to use it.\n")
2162 elif __convertSCDinMemory:
2163 __file = os.path.abspath(__filename)
2164 __SCDdump = self.__internalSCD.dump()
2165 if os.path.isfile(__file) or os.path.islink(__file):
2167 from daYacsSchemaCreator.run import create_schema_from_content
2168 create_schema_from_content(__SCDdump, __file)
2170 __file = os.path.abspath(__filename)
2171 __SCDfile = __file[:__file.rfind(".")] + '_SCD.py'
2172 __SCDdump = self.__internalSCD.dump(__SCDfile)
2173 if os.path.isfile(__file) or os.path.islink(__file):
2175 __converterExe = os.path.join(os.environ["ADAO_ROOT_DIR"], "bin/salome", "AdaoYacsSchemaCreator.py")
2176 __args = ["python", __converterExe, __SCDfile, __file]
2178 __p = subprocess.Popen(__args)
2179 (__stdoutdata, __stderrdata) = __p.communicate()
2181 os.remove(__SCDfile)
2183 if not os.path.exists(__file):
2184 __msg = "An error occured during the ADAO YACS Schema build for\n"
2185 __msg += "the target output file:\n"
2186 __msg += " %s\n"%__file
2187 __msg += "See errors details in your launching terminal log.\n"
2188 raise ValueError(__msg)
2190 __fid = open(__file,"r")
2191 __text = __fid.read()
2195 # ==============================================================================
2196 class ImportFromScript(object):
2198 Obtention d'une variable nommee depuis un fichier script importe
2200 def __init__(self, __filename=None):
2201 "Verifie l'existence et importe le script"
2202 if __filename is None:
2203 raise ValueError("The name of the file, containing the variable to be read, has to be specified.")
2204 if not os.path.isfile(__filename):
2205 raise ValueError("The file containing the variable to be imported doesn't seem to exist. Please check the file. The given file name is:\n \"%s\""%__filename)
2206 if os.path.dirname(__filename) != '':
2207 sys.path.insert(0, os.path.dirname(__filename))
2208 __basename = os.path.basename(__filename).rstrip(".py")
2210 __basename = __filename.rstrip(".py")
2211 self.__basename = __basename
2212 self.__scriptfile = __import__(__basename, globals(), locals(), [])
2213 self.__scriptstring = open(__filename,'r').read()
2214 def getvalue(self, __varname=None, __synonym=None ):
2215 "Renvoie la variable demandee"
2216 if __varname is None:
2217 raise ValueError("The name of the variable to be read has to be specified. Please check the content of the file and the syntax.")
2218 if not hasattr(self.__scriptfile, __varname):
2219 if __synonym is None:
2220 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__basename)+".py",__varname))
2221 elif not hasattr(self.__scriptfile, __synonym):
2222 raise ValueError("The imported script file \"%s\" doesn't contain the mandatory variable \"%s\" to be read. Please check the content of the file and the syntax."%(str(self.__basename)+".py",__synonym))
2224 return getattr(self.__scriptfile, __synonym)
2226 return getattr(self.__scriptfile, __varname)
2227 def getstring(self):
2228 "Renvoie le script complet"
2229 return self.__scriptstring
2231 # ==============================================================================
2232 def CostFunction3D(_x,
2233 _Hm = None, # Pour simuler Hm(x) : HO["Direct"].appliedTo
2234 _HmX = None, # Simulation déjà faite de Hm(x)
2235 _arg = None, # Arguments supplementaires pour Hm, sous la forme d'un tuple
2240 _SIV = False, # A résorber pour la 8.0
2241 _SSC = [], # self._parameters["StoreSupplementaryCalculations"]
2242 _nPS = 0, # nbPreviousSteps
2243 _QM = "DA", # QualityMeasure
2244 _SSV = {}, # Entrée et/ou sortie : self.StoredVariables
2245 _fRt = False, # Restitue ou pas la sortie étendue
2246 _sSc = True, # Stocke ou pas les SSC
2249 Fonction-coût générale utile pour les algorithmes statiques/3D : 3DVAR, BLUE
2250 et dérivés, Kalman et dérivés, LeastSquares, SamplingTest, PSO, SA, Tabu,
2251 DFO, QuantileRegression
2257 for k in ["CostFunctionJ",
2263 "SimulatedObservationAtCurrentOptimum",
2264 "SimulatedObservationAtCurrentState",
2268 if hasattr(_SSV[k],"store"):
2269 _SSV[k].append = _SSV[k].store # Pour utiliser "append" au lieu de "store"
2271 _X = numpy.asmatrix(numpy.ravel( _x )).T
2272 if _SIV or "CurrentState" in _SSC or "CurrentOptimum" in _SSC:
2273 _SSV["CurrentState"].append( _X )
2275 if _HmX is not None:
2279 raise ValueError("COSTFUNCTION3D Operator has to be defined.")
2283 _HX = _Hm( _X, *_arg )
2284 _HX = numpy.asmatrix(numpy.ravel( _HX )).T
2286 if "SimulatedObservationAtCurrentState" in _SSC or \
2287 "SimulatedObservationAtCurrentOptimum" in _SSC:
2288 _SSV["SimulatedObservationAtCurrentState"].append( _HX )
2290 if numpy.any(numpy.isnan(_HX)):
2291 Jb, Jo, J = numpy.nan, numpy.nan, numpy.nan
2293 _Y = numpy.asmatrix(numpy.ravel( _Y )).T
2294 if _QM in ["AugmentedWeightedLeastSquares", "AWLS", "AugmentedPonderatedLeastSquares", "APLS", "DA"]:
2295 if _BI is None or _RI is None:
2296 raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
2297 _Xb = numpy.asmatrix(numpy.ravel( _Xb )).T
2298 Jb = 0.5 * (_X - _Xb).T * _BI * (_X - _Xb)
2299 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2300 elif _QM in ["WeightedLeastSquares", "WLS", "PonderatedLeastSquares", "PLS"]:
2302 raise ValueError("Observation error covariance matrix has to be properly defined!")
2304 Jo = 0.5 * (_Y - _HX).T * _RI * (_Y - _HX)
2305 elif _QM in ["LeastSquares", "LS", "L2"]:
2307 Jo = 0.5 * (_Y - _HX).T * (_Y - _HX)
2308 elif _QM in ["AbsoluteValue", "L1"]:
2310 Jo = numpy.sum( numpy.abs(_Y - _HX) )
2311 elif _QM in ["MaximumError", "ME"]:
2313 Jo = numpy.max( numpy.abs(_Y - _HX) )
2314 elif _QM in ["QR", "Null"]:
2318 raise ValueError("Unknown asked quality measure!")
2320 J = float( Jb ) + float( Jo )
2323 _SSV["CostFunctionJb"].append( Jb )
2324 _SSV["CostFunctionJo"].append( Jo )
2325 _SSV["CostFunctionJ" ].append( J )
2327 if "IndexOfOptimum" in _SSC or \
2328 "CurrentOptimum" in _SSC or \
2329 "SimulatedObservationAtCurrentOptimum" in _SSC:
2330 IndexMin = numpy.argmin( _SSV["CostFunctionJ"][_nPS:] ) + _nPS
2331 if "IndexOfOptimum" in _SSC:
2332 _SSV["IndexOfOptimum"].append( IndexMin )
2333 if "CurrentOptimum" in _SSC:
2334 _SSV["CurrentOptimum"].append( _SSV["CurrentState"][IndexMin] )
2335 if "SimulatedObservationAtCurrentOptimum" in _SSC:
2336 _SSV["SimulatedObservationAtCurrentOptimum"].append( _SSV["SimulatedObservationAtCurrentState"][IndexMin] )
2341 if _QM in ["QR"]: # Pour le QuantileRegression
2346 # ==============================================================================
2347 if __name__ == "__main__":
2348 print('\n AUTODIAGNOSTIC \n')