Salome HOME
Minor print update
[modules/adao.git] / src / daComposant / daAlgorithms / Blue.py
index b1ea9b9a0c1e287d11866e1c3ddfb05273dda865..5f46aa72d3a175730b3ab70ccfd2619753d81068 100644 (file)
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
 #
-# Copyright (C) 2008-2018 EDF R&D
+# Copyright (C) 2008-2019 EDF R&D
 #
 # This library is free software; you can redistribute it and/or
 # modify it under the terms of the GNU Lesser General Public
@@ -40,25 +40,31 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             typecast = tuple,
             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
             listval  = [
+                "Analysis",
                 "APosterioriCorrelations",
                 "APosterioriCovariance",
                 "APosterioriStandardDeviations",
                 "APosterioriVariances",
                 "BMA",
-                "OMA",
-                "OMB",
-                "CurrentState",
                 "CostFunctionJ",
+                "CostFunctionJAtCurrentOptimum",
                 "CostFunctionJb",
+                "CostFunctionJbAtCurrentOptimum",
                 "CostFunctionJo",
+                "CostFunctionJoAtCurrentOptimum",
+                "CurrentOptimum",
+                "CurrentState",
                 "Innovation",
+                "MahalanobisConsistency",
+                "OMA",
+                "OMB",
                 "SigmaBck2",
                 "SigmaObs2",
-                "MahalanobisConsistency",
-                "SimulationQuantiles",
                 "SimulatedObservationAtBackground",
+                "SimulatedObservationAtCurrentOptimum",
                 "SimulatedObservationAtCurrentState",
                 "SimulatedObservationAtOptimum",
+                "SimulationQuantiles",
                 ]
             )
         self.defineRequiredParameter(
@@ -124,29 +130,34 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         # Calcul de la matrice de gain et de l'analyse
         # --------------------------------------------
         if Y.size <= Xb.size:
-            _A = R + Hm * B * Ha
+            _A = R + numpy.dot(Hm, B * Ha)
             _u = numpy.linalg.solve( _A , d )
             Xa = Xb + B * Ha * _u
         else:
-            _A = BI + Ha * RI * Hm
-            _u = numpy.linalg.solve( _A , Ha * RI * d )
+            _A = BI + numpy.dot(Ha, RI * Hm)
+            _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI * d) )
             Xa = Xb + _u
         self.StoredVariables["Analysis"].store( Xa.A1 )
         #
         # Calcul de la fonction coût
         # --------------------------
         if self._parameters["StoreInternalVariables"] or \
-            self._toStore("CostFunctionJ") or \
+            self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
+            self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+            self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
             self._toStore("OMA") or \
             self._toStore("SigmaObs2") or \
             self._toStore("MahalanobisConsistency") or \
+            self._toStore("SimulatedObservationAtCurrentOptimum") or \
             self._toStore("SimulatedObservationAtCurrentState") or \
             self._toStore("SimulatedObservationAtOptimum") or \
             self._toStore("SimulationQuantiles"):
             HXa = Hm * Xa
             oma = Y - HXa
         if self._parameters["StoreInternalVariables"] or \
-            self._toStore("CostFunctionJ") or \
+            self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
+            self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+            self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
             self._toStore("MahalanobisConsistency"):
             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
             Jo  = float( 0.5 * oma.T * RI * oma )
@@ -154,13 +165,16 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
+            self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb )
+            self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo )
+            self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J )
         #
         # Calcul de la covariance d'analyse
         # ---------------------------------
         if self._toStore("APosterioriCovariance") or \
-           self._toStore("SimulationQuantiles"):
-            if   (Y.size <= Xb.size): K  = B * Ha * (R + Hm * B * Ha).I
-            elif (Y.size >  Xb.size): K = (BI + Ha * RI * Hm).I * Ha * RI
+            self._toStore("SimulationQuantiles"):
+            if   (Y.size <= Xb.size): K  = B * Ha * (R + numpy.dot(Hm, B * Ha)).I
+            elif (Y.size >  Xb.size): K = (BI + numpy.dot(Ha, RI * Hm)).I * Ha * RI
             A = B - K * Hm * B
             if min(A.shape) != max(A.shape):
                 raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(self._name,str(A.shape)))
@@ -177,6 +191,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         # ---------------------------------------
         if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
             self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) )
+        if self._toStore("CurrentOptimum"):
+            self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) )
         if self._toStore("Innovation"):
             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
         if self._toStore("BMA"):
@@ -219,6 +235,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
         if self._toStore("SimulatedObservationAtCurrentState"):
             self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) )
+        if self._toStore("SimulatedObservationAtCurrentOptimum"):
+            self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) )
         if self._toStore("SimulatedObservationAtOptimum"):
             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
         #
@@ -227,4 +245,4 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
 
 # ==============================================================================
 if __name__ == "__main__":
-    print('\n AUTODIAGNOSTIC \n')
+    print('\n AUTODIAGNOSTIC\n')