# -*- coding: utf-8 -*-
#
-# Copyright (C) 2008-2019 EDF R&D
+# Copyright (C) 2008-2021 EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
import logging
from daCore import BasicObjects
-import numpy, scipy.optimize
+import numpy, scipy.optimize, scipy.version
# ==============================================================================
class ElementaryAlgorithm(BasicObjects.Algorithm):
default = 1.e-7,
typecast = float,
message = "Diminution relative minimale du coût lors de l'arrêt",
+ minval = 0.,
)
self.defineRequiredParameter(
name = "ProjectedGradientTolerance",
default = 1.e-05,
typecast = float,
message = "Maximum des composantes du gradient lors de l'arrêt",
+ minval = 0.,
)
self.defineRequiredParameter(
name = "StoreInternalVariables",
typecast = tuple,
message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
listval = [
+ "Analysis",
"BMA",
- "OMA",
- "OMB",
"CostFunctionJ",
+ "CostFunctionJAtCurrentOptimum",
"CostFunctionJb",
+ "CostFunctionJbAtCurrentOptimum",
"CostFunctionJo",
- "CurrentState",
+ "CostFunctionJoAtCurrentOptimum",
+ "CurrentIterationNumber",
"CurrentOptimum",
+ "CurrentState",
"IndexOfOptimum",
"Innovation",
"InnovationAtCurrentState",
- "CostFunctionJAtCurrentOptimum",
- "CostFunctionJbAtCurrentOptimum",
- "CostFunctionJoAtCurrentOptimum",
+ "OMA",
+ "OMB",
"SimulatedObservationAtBackground",
+ "SimulatedObservationAtCurrentOptimum",
"SimulatedObservationAtCurrentState",
"SimulatedObservationAtOptimum",
- "SimulatedObservationAtCurrentOptimum",
]
)
self.defineRequiredParameter( # Pas de type
name = "Bounds",
message = "Liste des valeurs de bornes",
)
+ self.defineRequiredParameter(
+ name = "InitializationPoint",
+ typecast = numpy.ravel,
+ message = "État initial imposé (par défaut, c'est l'ébauche si None)",
+ )
self.requireInputArguments(
mandatory= ("Xb", "Y", "HO", "R"),
)
+ self.setAttributes(tags=(
+ "Optimization",
+ "NonLinear",
+ "Variational",
+ ))
def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
- self._pre_run(Parameters, Xb, Y, R, B, Q)
- #
- # Correction pour pallier a un bug de TNC sur le retour du Minimum
- if "Minimizer" in self._parameters and self._parameters["Minimizer"] == "TNC":
- self.setParameterValue("StoreInternalVariables",True)
+ self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
#
# Opérateurs
# ----------
Jo = float( 0.5 * _Innovation.T * RI * _Innovation )
J = Jb + Jo
#
+ self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
self.StoredVariables["CostFunctionJb"].store( Jb )
self.StoredVariables["CostFunctionJo"].store( Jo )
self.StoredVariables["CostFunctionJ" ].store( J )
self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
if self._toStore("SimulatedObservationAtCurrentOptimum"):
self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
- if self._toStore("CostFunctionJAtCurrentOptimum"):
- self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
if self._toStore("CostFunctionJbAtCurrentOptimum"):
self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
if self._toStore("CostFunctionJoAtCurrentOptimum"):
self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
+ if self._toStore("CostFunctionJAtCurrentOptimum"):
+ self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
return J
#
def GradientOfCostFunction(x):
#
# Point de démarrage de l'optimisation : Xini = Xb
# ------------------------------------
- Xini = numpy.ravel(Xb)
+ Xini = self._parameters["InitializationPoint"]
#
# Minimisation de la fonctionnelle
# --------------------------------
#
if self._parameters["Minimizer"] == "LBFGSB":
# Minimum, J_optimal, Informations = scipy.optimize.fmin_l_bfgs_b(
- import lbfgsbhlt
- Minimum, J_optimal, Informations = lbfgsbhlt.fmin_l_bfgs_b(
+ if "0.19" <= scipy.version.version <= "1.1.0":
+ import lbfgsbhlt as optimiseur
+ else:
+ import scipy.optimize as optimiseur
+ Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
func = CostFunction,
x0 = Xini,
fprime = GradientOfCostFunction,
# ==============================================================================
if __name__ == "__main__":
- print('\n AUTODIAGNOSTIC \n')
+ print('\n AUTODIAGNOSTIC\n')