Salome HOME
Minor internal modifications and bounds corrections
[modules/adao.git] / src / daComposant / daAlgorithms / NonLinearLeastSquares.py
index cc73276c56f09539f7b65d553a447c10da9af0e6..aad66fc737b92a4dac264863d8522024f8a2e278 100644 (file)
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
 #
-# Copyright (C) 2008-2019 EDF R&D
+# Copyright (C) 2008-2021 EDF R&D
 #
 # This library is free software; you can redistribute it and/or
 # modify it under the terms of the GNU Lesser General Public
@@ -22,7 +22,7 @@
 
 import logging
 from daCore import BasicObjects
-import numpy, scipy.optimize
+import numpy, scipy.optimize, scipy.version
 
 # ==============================================================================
 class ElementaryAlgorithm(BasicObjects.Algorithm):
@@ -47,6 +47,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             default  = 1.e-7,
             typecast = float,
             message  = "Diminution relative minimale du coût lors de l'arrêt",
+            minval   = 0.,
             )
         self.defineRequiredParameter(
             name     = "ProjectedGradientTolerance",
@@ -60,6 +61,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             default  = 1.e-05,
             typecast = float,
             message  = "Maximum des composantes du gradient lors de l'arrêt",
+            minval   = 0.,
             )
         self.defineRequiredParameter(
             name     = "StoreInternalVariables",
@@ -73,40 +75,48 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             typecast = tuple,
             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
             listval  = [
+                "Analysis",
                 "BMA",
-                "OMA",
-                "OMB",
                 "CostFunctionJ",
+                "CostFunctionJAtCurrentOptimum",
                 "CostFunctionJb",
+                "CostFunctionJbAtCurrentOptimum",
                 "CostFunctionJo",
-                "CurrentState",
+                "CostFunctionJoAtCurrentOptimum",
+                "CurrentIterationNumber",
                 "CurrentOptimum",
+                "CurrentState",
                 "IndexOfOptimum",
                 "Innovation",
                 "InnovationAtCurrentState",
-                "CostFunctionJAtCurrentOptimum",
-                "CostFunctionJbAtCurrentOptimum",
-                "CostFunctionJoAtCurrentOptimum",
+                "OMA",
+                "OMB",
                 "SimulatedObservationAtBackground",
+                "SimulatedObservationAtCurrentOptimum",
                 "SimulatedObservationAtCurrentState",
                 "SimulatedObservationAtOptimum",
-                "SimulatedObservationAtCurrentOptimum",
                 ]
             )
         self.defineRequiredParameter( # Pas de type
             name     = "Bounds",
             message  = "Liste des valeurs de bornes",
             )
+        self.defineRequiredParameter(
+            name     = "InitializationPoint",
+            typecast = numpy.ravel,
+            message  = "État initial imposé (par défaut, c'est l'ébauche si None)",
+            )
         self.requireInputArguments(
             mandatory= ("Xb", "Y", "HO", "R"),
             )
+        self.setAttributes(tags=(
+            "Optimization",
+            "NonLinear",
+            "Variational",
+            ))
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run(Parameters, Xb, Y, R, B, Q)
-        #
-        # Correction pour pallier a un bug de TNC sur le retour du Minimum
-        if "Minimizer" in self._parameters and self._parameters["Minimizer"] == "TNC":
-            self.setParameterValue("StoreInternalVariables",True)
+        self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
         #
         # Opérateurs
         # ----------
@@ -152,6 +162,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
             J   = Jb + Jo
             #
+            self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
@@ -168,12 +179,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
             if self._toStore("SimulatedObservationAtCurrentOptimum"):
                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
-            if self._toStore("CostFunctionJAtCurrentOptimum"):
-                self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
             if self._toStore("CostFunctionJbAtCurrentOptimum"):
                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
             if self._toStore("CostFunctionJoAtCurrentOptimum"):
                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
+            if self._toStore("CostFunctionJAtCurrentOptimum"):
+                self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
             return J
         #
         def GradientOfCostFunction(x):
@@ -213,7 +224,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Point de démarrage de l'optimisation : Xini = Xb
         # ------------------------------------
-        Xini = numpy.ravel(Xb)
+        Xini = self._parameters["InitializationPoint"]
         #
         # Minimisation de la fonctionnelle
         # --------------------------------
@@ -221,8 +232,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         if self._parameters["Minimizer"] == "LBFGSB":
             # Minimum, J_optimal, Informations = scipy.optimize.fmin_l_bfgs_b(
-            import lbfgsbhlt
-            Minimum, J_optimal, Informations = lbfgsbhlt.fmin_l_bfgs_b(
+            if "0.19" <= scipy.version.version <= "1.1.0":
+                import lbfgsbhlt as optimiseur
+            else:
+                import scipy.optimize as optimiseur
+            Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
                 func        = CostFunction,
                 x0          = Xini,
                 fprime      = GradientOfCostFunction,
@@ -342,4 +356,4 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
 
 # ==============================================================================
 if __name__ == "__main__":
-    print('\n AUTODIAGNOSTIC \n')
+    print('\n AUTODIAGNOSTIC\n')