Salome HOME
Update internal settings
[modules/adao.git] / src / daComposant / daAlgorithms / Blue.py
index 5c2e7618194953d8c2aaee0aba1e618d5cf1e990..f93ace8d941dbefa06ea360060ea3c88466d91b3 100644 (file)
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
 #
-# Copyright (C) 2008-2017 EDF R&D
+# Copyright (C) 2008-2019 EDF R&D
 #
 # This library is free software; you can redistribute it and/or
 # modify it under the terms of the GNU Lesser General Public
@@ -45,20 +45,25 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 "APosterioriStandardDeviations",
                 "APosterioriVariances",
                 "BMA",
-                "OMA",
-                "OMB",
-                "CurrentState",
                 "CostFunctionJ",
+                "CostFunctionJAtCurrentOptimum",
                 "CostFunctionJb",
+                "CostFunctionJbAtCurrentOptimum",
                 "CostFunctionJo",
+                "CostFunctionJoAtCurrentOptimum",
+                "CurrentOptimum",
+                "CurrentState",
                 "Innovation",
+                "MahalanobisConsistency",
+                "OMA",
+                "OMB",
                 "SigmaBck2",
                 "SigmaObs2",
-                "MahalanobisConsistency",
-                "SimulationQuantiles",
                 "SimulatedObservationAtBackground",
+                "SimulatedObservationAtCurrentOptimum",
                 "SimulatedObservationAtCurrentState",
                 "SimulatedObservationAtOptimum",
+                "SimulationQuantiles",
                 ]
             )
         self.defineRequiredParameter(
@@ -124,43 +129,51 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         # Calcul de la matrice de gain et de l'analyse
         # --------------------------------------------
         if Y.size <= Xb.size:
-            _A = R + Hm * B * Ha
+            _A = R + numpy.dot(Hm, B * Ha)
             _u = numpy.linalg.solve( _A , d )
             Xa = Xb + B * Ha * _u
         else:
-            _A = BI + Ha * RI * Hm
-            _u = numpy.linalg.solve( _A , Ha * RI * d )
+            _A = BI + numpy.dot(Ha, RI * Hm)
+            _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI * d) )
             Xa = Xb + _u
         self.StoredVariables["Analysis"].store( Xa.A1 )
         #
         # Calcul de la fonction coût
         # --------------------------
         if self._parameters["StoreInternalVariables"] or \
-           "CostFunctionJ"                      in self._parameters["StoreSupplementaryCalculations"] or \
-           "OMA"                                in self._parameters["StoreSupplementaryCalculations"] or \
-           "SigmaObs2"                          in self._parameters["StoreSupplementaryCalculations"] or \
-           "MahalanobisConsistency"             in self._parameters["StoreSupplementaryCalculations"] or \
-           "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \
-           "SimulatedObservationAtOptimum"      in self._parameters["StoreSupplementaryCalculations"] or \
-           "SimulationQuantiles"                in self._parameters["StoreSupplementaryCalculations"]:
+            self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
+            self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+            self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
+            self._toStore("OMA") or \
+            self._toStore("SigmaObs2") or \
+            self._toStore("MahalanobisConsistency") or \
+            self._toStore("SimulatedObservationAtCurrentOptimum") or \
+            self._toStore("SimulatedObservationAtCurrentState") or \
+            self._toStore("SimulatedObservationAtOptimum") or \
+            self._toStore("SimulationQuantiles"):
             HXa = Hm * Xa
             oma = Y - HXa
         if self._parameters["StoreInternalVariables"] or \
-           "CostFunctionJ"                 in self._parameters["StoreSupplementaryCalculations"] or \
-           "MahalanobisConsistency"        in self._parameters["StoreSupplementaryCalculations"]:
+            self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
+            self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+            self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
+            self._toStore("MahalanobisConsistency"):
             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
             Jo  = float( 0.5 * oma.T * RI * oma )
             J   = Jb + Jo
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
+            self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb )
+            self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo )
+            self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J )
         #
         # Calcul de la covariance d'analyse
         # ---------------------------------
-        if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"] or \
-           "SimulationQuantiles"   in self._parameters["StoreSupplementaryCalculations"]:
-            if   (Y.size <= Xb.size): K  = B * Ha * (R + Hm * B * Ha).I
-            elif (Y.size >  Xb.size): K = (BI + Ha * RI * Hm).I * Ha * RI
+        if self._toStore("APosterioriCovariance") or \
+            self._toStore("SimulationQuantiles"):
+            if   (Y.size <= Xb.size): K  = B * Ha * (R + numpy.dot(Hm, B * Ha)).I
+            elif (Y.size >  Xb.size): K = (BI + numpy.dot(Ha, RI * Hm)).I * Ha * RI
             A = B - K * Hm * B
             if min(A.shape) != max(A.shape):
                 raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(self._name,str(A.shape)))
@@ -175,24 +188,26 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Calculs et/ou stockages supplémentaires
         # ---------------------------------------
-        if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
             self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) )
-        if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._toStore("CurrentOptimum"):
+            self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) )
+        if self._toStore("Innovation"):
             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
-        if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._toStore("BMA"):
             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
-        if "OMA" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._toStore("OMA"):
             self.StoredVariables["OMA"].store( numpy.ravel(oma) )
-        if "OMB" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._toStore("OMB"):
             self.StoredVariables["OMB"].store( numpy.ravel(d) )
-        if "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._toStore("SigmaObs2"):
             TraceR = R.trace(Y.size)
             self.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(oma)).T)) ) / TraceR )
-        if "SigmaBck2" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._toStore("SigmaBck2"):
             self.StoredVariables["SigmaBck2"].store( float( (d.T * Hm * (Xa - Xb))/(Hm * B * Hm.T).trace() ) )
-        if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._toStore("MahalanobisConsistency"):
             self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/d.size ) )
-        if "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._toStore("SimulationQuantiles"):
             nech = self._parameters["NumberOfSamplesForQuantiles"]
             YfQ  = None
             for i in range(nech):
@@ -215,11 +230,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 if YQ is None: YQ = YfQ[:,indice]
                 else:          YQ = numpy.hstack((YQ,YfQ[:,indice]))
             self.StoredVariables["SimulationQuantiles"].store( YQ )
-        if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._toStore("SimulatedObservationAtBackground"):
             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
-        if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._toStore("SimulatedObservationAtCurrentState"):
             self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) )
-        if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+        if self._toStore("SimulatedObservationAtCurrentOptimum"):
+            self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) )
+        if self._toStore("SimulatedObservationAtOptimum"):
             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
         #
         self._post_run(HO)