X-Git-Url: http://git.salome-platform.org/gitweb/?a=blobdiff_plain;f=src%2FdaComposant%2FdaAlgorithms%2FNonLinearLeastSquares.py;h=aad66fc737b92a4dac264863d8522024f8a2e278;hb=cf85bc4e4ff25a695443edbffb1800a97ba6afd8;hp=a77b09fc3091ff9029eeebb31f4c6fd84dff1ace;hpb=3213a7ad0e568555ac362f55281eabf64a896006;p=modules%2Fadao.git diff --git a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py index a77b09f..aad66fc 100644 --- a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2008-2019 EDF R&D +# Copyright (C) 2008-2021 EDF R&D # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public @@ -83,6 +83,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "CostFunctionJbAtCurrentOptimum", "CostFunctionJo", "CostFunctionJoAtCurrentOptimum", + "CurrentIterationNumber", "CurrentOptimum", "CurrentState", "IndexOfOptimum", @@ -100,16 +101,22 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): name = "Bounds", message = "Liste des valeurs de bornes", ) + self.defineRequiredParameter( + name = "InitializationPoint", + typecast = numpy.ravel, + message = "État initial imposé (par défaut, c'est l'ébauche si None)", + ) self.requireInputArguments( mandatory= ("Xb", "Y", "HO", "R"), ) + self.setAttributes(tags=( + "Optimization", + "NonLinear", + "Variational", + )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) - # - # Correction pour pallier a un bug de TNC sur le retour du Minimum - if "Minimizer" in self._parameters and self._parameters["Minimizer"] == "TNC": - self.setParameterValue("StoreInternalVariables",True) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # # Opérateurs # ---------- @@ -155,6 +162,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Jo = float( 0.5 * _Innovation.T * RI * _Innovation ) J = Jb + Jo # + self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) ) self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) @@ -216,7 +224,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Point de démarrage de l'optimisation : Xini = Xb # ------------------------------------ - Xini = numpy.ravel(Xb) + Xini = self._parameters["InitializationPoint"] # # Minimisation de la fonctionnelle # --------------------------------