]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Documentation and source minor corrections for observers
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Tue, 13 Oct 2015 14:07:34 +0000 (16:07 +0200)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Tue, 13 Oct 2015 14:07:34 +0000 (16:07 +0200)
doc/en/ref_algorithm_DerivativeFreeOptimization.rst
doc/fr/ref_algorithm_DerivativeFreeOptimization.rst
src/daComposant/daAlgorithms/3DVAR.py
src/daComposant/daAlgorithms/Blue.py
src/daComposant/daAlgorithms/DerivativeFreeOptimization.py

index 2152a067112b15765bc0c3cac370bcfd29e19067..86ff57d0ad47a6230968aa22cdce0c2f03c43aed 100644 (file)
@@ -167,10 +167,12 @@ The options of the algorithm are the following:
     calculations or memory consumptions. The default is a void list, none of
     these variables being calculated and stored by default. The possible names
     are in the following list: ["CurrentState", "CostFunctionJ",
-    "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentState",
-    "SimulatedObservationAtOptimum"].
+    "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum",
+    "InnovationAtCurrentState", "BMA", "OMA", "OMB",
+    "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum",
+    "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"].
 
-    Example : ``{"StoreSupplementaryCalculations":["CurrentState", "CostFunctionJ"]}``
+    Example : ``{"StoreSupplementaryCalculations":["BMA", "Innovation"]}``
 
 Information and variables available at the end of the algorithm
 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
index 48e2f3770cc5daba81ef7ab4464b9da7b646055a..d5710166a8c96e188740b550d9e7ff481ec8a5d6 100644 (file)
@@ -170,10 +170,12 @@ Les options de l'algorithme sont les suivantes:
     calculs ou du stockage coûteux. La valeur par défaut est une liste vide,
     aucune de ces variables n'étant calculée et stockée par défaut. Les noms
     possibles sont dans la liste suivante : ["CurrentState", "CostFunctionJ",
-    "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentState",
-    "SimulatedObservationAtOptimum"].
+    "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum",
+    "InnovationAtCurrentState", "BMA", "OMA", "OMB",
+    "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum",
+    "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"].
 
-    Exemple : ``{"StoreSupplementaryCalculations":["CurrentState", "CostFunctionJ"]}``
+    Exemple : ``{"StoreSupplementaryCalculations":["BMA", "Innovation"]}``
 
 Informations et variables disponibles à la fin de l'algorithme
 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
index f63f1902f057a1f8e2861ff5041130551cb1dd80..05cf2e969a7ae788bf547ff7f0df57ba5ae51dbb 100644 (file)
@@ -134,21 +134,17 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         Hm = HO["Direct"].appliedTo
         Ha = HO["Adjoint"].appliedInXTo
         #
-        # Utilisation éventuelle d'un vecteur H(Xb) précalculé
-        # ----------------------------------------------------
+        # Utilisation éventuelle d'un vecteur H(Xb) précalculé (sans cout)
+        # ----------------------------------------------------------------
         if HO["AppliedToX"] is not None and HO["AppliedToX"].has_key("HXb"):
             HXb = HO["AppliedToX"]["HXb"]
         else:
             HXb = Hm( Xb )
         HXb = numpy.asmatrix(numpy.ravel( HXb )).T
-        #
-        # Calcul de l'innovation
-        # ----------------------
         if Y.size != HXb.size:
             raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
         if max(Y.shape) != max(HXb.shape):
             raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
-        d  = Y - HXb
         #
         # Précalcul des inversions de B et R
         # ----------------------------------
@@ -170,10 +166,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
             if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
-                self.StoredVariables["InnovationAtCurrentState"].store( _HX )
+                self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
+            #
             Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
             Jo  = 0.5 * _Innovation.T * RI * _Innovation
             J   = float( Jb ) + float( Jo )
+            #
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
@@ -205,10 +203,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Point de démarrage de l'optimisation : Xini = Xb
         # ------------------------------------
-        if type(Xb) is type(numpy.matrix([])):
-            Xini = Xb.A1.tolist()
-        else:
-            Xini = list(Xb)
+        Xini = numpy.ravel(Xb)
         #
         # Minimisation de la fonctionnelle
         # --------------------------------
@@ -332,6 +327,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Calculs et/ou stockages supplémentaires
         # ---------------------------------------
+        if "Innovation" in self._parameters["StoreSupplementaryCalculations"] or \
+            "OMB" in self._parameters["StoreSupplementaryCalculations"] or \
+            "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"] or \
+            "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]:
+            d  = Y - HXb
         if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
         if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
index 0ad3195e07839f1e9f75fcab7bcf803de5d20ef9..0fe119afee2b91ca304f5856a450d63cc5471fbf 100644 (file)
@@ -76,20 +76,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         # ----------------------
         self.setParameters(Parameters)
         #
-        # Opérateur d'observation
-        # -----------------------
+        # Opérateurs
+        # ----------
         Hm = HO["Tangent"].asMatrix(Xb)
         Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape
         Ha = HO["Adjoint"].asMatrix(Xb)
         Ha = Ha.reshape(Xb.size,Y.size) # ADAO & check shape
         #
-        # Utilisation éventuelle d'un vecteur H(Xb) précalculé
-        # ----------------------------------------------------
+        # Utilisation éventuelle d'un vecteur H(Xb) précalculé (sans cout)
+        # ----------------------------------------------------------------
         if HO["AppliedToX"] is not None and HO["AppliedToX"].has_key("HXb"):
             HXb = HO["AppliedToX"]["HXb"]
         else:
             HXb = Hm * Xb
         HXb = numpy.asmatrix(numpy.ravel( HXb )).T
+        if Y.size != HXb.size:
+            raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
+        if max(Y.shape) != max(HXb.shape):
+            raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
         #
         # Précalcul des inversions de B et R
         # ----------------------------------
@@ -98,10 +102,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Calcul de l'innovation
         # ----------------------
-        if Y.size != HXb.size:
-            raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
-        if max(Y.shape) != max(HXb.shape):
-            raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
         d  = Y - HXb
         #
         # Calcul de la matrice de gain et de l'analyse
@@ -131,9 +131,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         if self._parameters["StoreInternalVariables"] or \
            "CostFunctionJ"                 in self._parameters["StoreSupplementaryCalculations"] or \
            "MahalanobisConsistency"        in self._parameters["StoreSupplementaryCalculations"]:
+            #
             Jb  = 0.5 * (Xa - Xb).T * BI * (Xa - Xb)
             Jo  = 0.5 * oma.T * RI * oma
             J   = float( Jb ) + float( Jo )
+            #
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
index e2f900fe8f7ddf951325fe4e525958c6a9501b87..69a76f1e47723a76c0d0e265ce00a81330b49f3e 100644 (file)
@@ -83,7 +83,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             default  = [],
             typecast = tuple,
             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
-            listval  = ["CurrentState", "CostFunctionJ", "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"]
+            listval  = ["CurrentState", "CostFunctionJ", "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum", "InnovationAtCurrentState", "BMA", "OMA", "OMB", "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"]
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
@@ -96,8 +96,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         # Paramètres de pilotage
         # ----------------------
         self.setParameters(Parameters)
-#         self.setParameterValue("StoreInternalVariables",True)
-#         print self._parameters["StoreInternalVariables"]
         #
         # Opérateurs
         # ----------
@@ -115,34 +113,53 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             self.StoredVariables["CurrentState"].store( _X )
             _HX = Hm( _X )
             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
-            if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
+            _Innovation = Y - _HX
+            if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \
+               "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
+            if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
+                self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
             #
             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
                 if BI is None or RI is None:
                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
-                Jo  = 0.5 * (Y - _HX).T * RI * (Y - _HX)
+                Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
                 if RI is None:
                     raise ValueError("Observation error covariance matrix has to be properly defined!")
                 Jb  = 0.
-                Jo  = 0.5 * (Y - _HX).T * RI * (Y - _HX)
+                Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
             elif QualityMeasure in ["LeastSquares","LS","L2"]:
                 Jb  = 0.
-                Jo  = 0.5 * (Y - _HX).T * (Y - _HX)
+                Jo  = 0.5 * (_Innovation).T * (_Innovation)
             elif QualityMeasure in ["AbsoluteValue","L1"]:
                 Jb  = 0.
-                Jo  = numpy.sum( numpy.abs(Y - _HX) )
+                Jo  = numpy.sum( numpy.abs(_Innovation) )
             elif QualityMeasure in ["MaximumError","ME"]:
                 Jb  = 0.
-                Jo  = numpy.max( numpy.abs(Y - _HX) )
+                Jo  = numpy.max( numpy.abs(_Innovation) )
             #
             J   = float( Jb ) + float( Jo )
             #
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
+            if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
+               "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
+               "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
+               "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+                IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
+            if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+                self.StoredVariables["IndexOfOptimum"].store( IndexMin )
+            if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+                self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
+            if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+                self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
+            if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+                self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
+                self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
+                self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
             return J
         #
         # Point de démarrage de l'optimisation : Xini = Xb
@@ -190,10 +207,27 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         self.StoredVariables["Analysis"].store( Xa.A1 )
         #
+        if "OMA"                           in self._parameters["StoreSupplementaryCalculations"] or \
+           "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+            if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
+                HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
+            elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+                HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
+            else:
+                HXa = Hm(Xa)
+        #
+        if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
+            self.StoredVariables["Innovation"].store( numpy.ravel(d) )
+        if "OMB" in self._parameters["StoreSupplementaryCalculations"]:
+            self.StoredVariables["OMB"].store( numpy.ravel(d) )
+        if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
+            self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
+        if "OMA" in self._parameters["StoreSupplementaryCalculations"]:
+            self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
         if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]:
             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
         if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
-            self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(Hm(Xa)) )
+            self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
         #
         self._post_run()
         return 0