X-Git-Url: http://git.salome-platform.org/gitweb/?a=blobdiff_plain;f=src%2FdaComposant%2FdaAlgorithms%2FBlue.py;h=5f46aa72d3a175730b3ab70ccfd2619753d81068;hb=671c7e1161d628a4dd90aa1eae9188e02ac35835;hp=2f6c2d87eebee8afc820a7c5f0255e0193f22e0e;hpb=41a17018c0720c4270debd7de5778088156fe935;p=modules%2Fadao.git diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index 2f6c2d8..5f46aa7 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -1,6 +1,6 @@ -#-*-coding:iso-8859-1-*- +# -*- coding: utf-8 -*- # -# Copyright (C) 2008-2017 EDF R&D +# Copyright (C) 2008-2019 EDF R&D # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public @@ -32,14 +32,40 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): name = "StoreInternalVariables", default = False, typecast = bool, - message = "Stockage des variables internes ou intermédiaires du calcul", + message = "Stockage des variables internes ou intermédiaires du calcul", ) self.defineRequiredParameter( name = "StoreSupplementaryCalculations", default = [], typecast = tuple, - message = "Liste de calculs supplémentaires à stocker et/ou effectuer", - listval = ["APosterioriCorrelations", "APosterioriCovariance", "APosterioriStandardDeviations", "APosterioriVariances", "BMA", "OMA", "OMB", "CurrentState", "CostFunctionJ", "CostFunctionJb", "CostFunctionJo", "Innovation", "SigmaBck2", "SigmaObs2", "MahalanobisConsistency", "SimulationQuantiles", "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"] + message = "Liste de calculs supplémentaires à stocker et/ou effectuer", + listval = [ + "Analysis", + "APosterioriCorrelations", + "APosterioriCovariance", + "APosterioriStandardDeviations", + "APosterioriVariances", + "BMA", + "CostFunctionJ", + "CostFunctionJAtCurrentOptimum", + "CostFunctionJb", + "CostFunctionJbAtCurrentOptimum", + "CostFunctionJo", + "CostFunctionJoAtCurrentOptimum", + "CurrentOptimum", + "CurrentState", + "Innovation", + "MahalanobisConsistency", + "OMA", + "OMB", + "SigmaBck2", + "SigmaObs2", + "SimulatedObservationAtBackground", + "SimulatedObservationAtCurrentOptimum", + "SimulatedObservationAtCurrentState", + "SimulatedObservationAtOptimum", + "SimulationQuantiles", + ] ) self.defineRequiredParameter( name = "Quantiles", @@ -52,13 +78,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.defineRequiredParameter( name = "SetSeed", typecast = numpy.random.seed, - message = "Graine fixée pour le générateur aléatoire", + message = "Graine fixée pour le générateur aléatoire", ) self.defineRequiredParameter( name = "NumberOfSamplesForQuantiles", default = 100, typecast = int, - message = "Nombre d'échantillons simulés pour le calcul des quantiles", + message = "Nombre d'échantillons simulés pour le calcul des quantiles", minval = 1, ) self.defineRequiredParameter( @@ -68,25 +94,22 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): message = "Type de simulation pour l'estimation des quantiles", listval = ["Linear", "NonLinear"] ) + self.requireInputArguments( + mandatory= ("Xb", "Y", "HO", "R", "B"), + ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) + self._pre_run(Parameters, Xb, Y, R, B, Q) # - # Opérateurs - # ---------- Hm = HO["Tangent"].asMatrix(Xb) Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape Ha = HO["Adjoint"].asMatrix(Xb) Ha = Ha.reshape(Xb.size,Y.size) # ADAO & check shape # - # Utilisation éventuelle d'un vecteur H(Xb) précalculé (sans cout) - # ---------------------------------------------------------------- - if HO["AppliedToX"] is not None and HO["AppliedToX"].has_key("HXb"): - HXb = HO["AppliedToX"]["HXb"] + # Utilisation éventuelle d'un vecteur H(Xb) précalculé + # ---------------------------------------------------- + if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]: + HXb = HO["AppliedInX"]["HXb"] else: HXb = Hm * Xb HXb = numpy.asmatrix(numpy.ravel( HXb )).T @@ -95,7 +118,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if max(Y.shape) != max(HXb.shape): raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) # - # Précalcul des inversions de B et R + # Précalcul des inversions de B et R # ---------------------------------- BI = B.getI() RI = R.getI() @@ -107,45 +130,51 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Calcul de la matrice de gain et de l'analyse # -------------------------------------------- if Y.size <= Xb.size: - _A = R + Hm * B * Ha + _A = R + numpy.dot(Hm, B * Ha) _u = numpy.linalg.solve( _A , d ) Xa = Xb + B * Ha * _u else: - _A = BI + Ha * RI * Hm - _u = numpy.linalg.solve( _A , Ha * RI * d ) + _A = BI + numpy.dot(Ha, RI * Hm) + _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI * d) ) Xa = Xb + _u self.StoredVariables["Analysis"].store( Xa.A1 ) # - # Calcul de la fonction coût + # Calcul de la fonction coût # -------------------------- if self._parameters["StoreInternalVariables"] or \ - "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] or \ - "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ - "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"] or \ - "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \ + self._toStore("OMA") or \ + self._toStore("SigmaObs2") or \ + self._toStore("MahalanobisConsistency") or \ + self._toStore("SimulatedObservationAtCurrentOptimum") or \ + self._toStore("SimulatedObservationAtCurrentState") or \ + self._toStore("SimulatedObservationAtOptimum") or \ + self._toStore("SimulationQuantiles"): HXa = Hm * Xa oma = Y - HXa if self._parameters["StoreInternalVariables"] or \ - "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] or \ - "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: - # + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \ + self._toStore("MahalanobisConsistency"): Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) ) Jo = float( 0.5 * oma.T * RI * oma ) J = Jb + Jo - # self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) + self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb ) + self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo ) + self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J ) # # Calcul de la covariance d'analyse # --------------------------------- - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: - if (Y.size <= Xb.size): K = B * Ha * (R + Hm * B * Ha).I - elif (Y.size > Xb.size): K = (BI + Ha * RI * Hm).I * Ha * RI + if self._toStore("APosterioriCovariance") or \ + self._toStore("SimulationQuantiles"): + if (Y.size <= Xb.size): K = B * Ha * (R + numpy.dot(Hm, B * Ha)).I + elif (Y.size > Xb.size): K = (BI + numpy.dot(Ha, RI * Hm)).I * Ha * RI A = B - K * Hm * B if min(A.shape) != max(A.shape): raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(self._name,str(A.shape))) @@ -158,27 +187,28 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(self._name,)) self.StoredVariables["APosterioriCovariance"].store( A ) # - # Calculs et/ou stockages supplémentaires + # Calculs et/ou stockages supplémentaires # --------------------------------------- - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) ) - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentOptimum"): + self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) ) + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(oma) ) - if "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMB"): self.StoredVariables["OMB"].store( numpy.ravel(d) ) - if "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SigmaObs2"): TraceR = R.trace(Y.size) self.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(oma)).T)) ) / TraceR ) - if "SigmaBck2" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SigmaBck2"): self.StoredVariables["SigmaBck2"].store( float( (d.T * Hm * (Xa - Xb))/(Hm * B * Hm.T).trace() ) ) - if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("MahalanobisConsistency"): self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/d.size ) ) - if "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: - Qtls = map(float, self._parameters["Quantiles"]) + if self._toStore("SimulationQuantiles"): nech = self._parameters["NumberOfSamplesForQuantiles"] YfQ = None for i in range(nech): @@ -195,17 +225,19 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): YfQ = numpy.hstack((YfQ,Yr)) YfQ.sort(axis=-1) YQ = None - for quantile in Qtls: - if not (0. <= quantile <= 1.): continue - indice = int(nech * quantile - 1./nech) + for quantile in self._parameters["Quantiles"]: + if not (0. <= float(quantile) <= 1.): continue + indice = int(nech * float(quantile) - 1./nech) if YQ is None: YQ = YfQ[:,indice] else: YQ = numpy.hstack((YQ,YfQ[:,indice])) self.StoredVariables["SimulationQuantiles"].store( YQ ) - if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentOptimum"): + self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) ) + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run(HO) @@ -213,4 +245,4 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' + print('\n AUTODIAGNOSTIC\n')