Salome HOME
Minor documentation and source update for compatibilities
[modules/adao.git] / src / daComposant / daAlgorithms / Atoms / ecwnlls.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2024 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 __doc__ = """
24     Non Linear Least Squares
25 """
26 __author__ = "Jean-Philippe ARGAUD"
27
28 import numpy, scipy, scipy.optimize, scipy.version
29 from daCore.PlatformInfo import vt, vfloat
30
31 # ==============================================================================
32 def ecwnlls(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
33     """
34     Correction
35     """
36     #
37     # Initialisations
38     # ---------------
39     Hm = HO["Direct"].appliedTo
40     Ha = HO["Adjoint"].appliedInXTo
41     #
42     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
43         HXb = numpy.asarray(Hm( Xb, HO["AppliedInX"]["HXb"] ))
44     else:
45         HXb = numpy.asarray(Hm( Xb ))
46     HXb = HXb.reshape((-1, 1))
47     if Y.size != HXb.size:
48         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size, HXb.size))  # noqa: E501
49     if max(Y.shape) != max(HXb.shape):
50         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape, HXb.shape))  # noqa: E501
51     #
52     RI = R.getI()
53     if selfA._parameters["Minimizer"] == "LM":
54         RdemiI = R.choleskyI()
55     #
56     Xini = selfA._parameters["InitializationPoint"]
57     #
58     # Définition de la fonction-coût
59     # ------------------------------
60
61     def CostFunction(x):
62         _X  = numpy.asarray(x).reshape((-1, 1))
63         if selfA._parameters["StoreInternalVariables"] or \
64                 selfA._toStore("CurrentState") or \
65                 selfA._toStore("CurrentOptimum"):
66             selfA.StoredVariables["CurrentState"].store( _X )
67         _HX = numpy.asarray(Hm( _X )).reshape((-1, 1))
68         _Innovation = Y - _HX
69         if selfA._toStore("SimulatedObservationAtCurrentState") or \
70                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
71             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
72         if selfA._toStore("InnovationAtCurrentState"):
73             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
74         #
75         Jb  = 0.
76         Jo  = vfloat( 0.5 * _Innovation.T * (RI * _Innovation) )
77         J   = Jb + Jo
78         #
79         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
80         selfA.StoredVariables["CostFunctionJb"].store( Jb )
81         selfA.StoredVariables["CostFunctionJo"].store( Jo )
82         selfA.StoredVariables["CostFunctionJ" ].store( J )
83         if selfA._toStore("IndexOfOptimum") or \
84                 selfA._toStore("CurrentOptimum") or \
85                 selfA._toStore("CostFunctionJAtCurrentOptimum") or \
86                 selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
87                 selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
88                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
89             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
90         if selfA._toStore("IndexOfOptimum"):
91             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
92         if selfA._toStore("CurrentOptimum"):
93             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
94         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
95             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )  # noqa: E501
96         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
97             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )  # noqa: E501
98         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
99             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )  # noqa: E501
100         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
101             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )  # noqa: E501
102         return J
103
104     def GradientOfCostFunction(x):
105         _X      = numpy.asarray(x).reshape((-1, 1))
106         _HX     = numpy.asarray(Hm( _X )).reshape((-1, 1))
107         GradJb  = 0.
108         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
109         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
110         return GradJ
111
112     def CostFunctionLM(x):
113         _X  = numpy.ravel( x ).reshape((-1, 1))
114         _HX = Hm( _X ).reshape((-1, 1))
115         _Innovation = Y - _HX
116         Jb  = 0.
117         Jo  = vfloat( 0.5 * _Innovation.T * (RI * _Innovation) )
118         J   = Jb + Jo
119         if selfA._parameters["StoreInternalVariables"] or \
120                 selfA._toStore("CurrentState"):
121             selfA.StoredVariables["CurrentState"].store( _X )
122         selfA.StoredVariables["CostFunctionJb"].store( Jb )
123         selfA.StoredVariables["CostFunctionJo"].store( Jo )
124         selfA.StoredVariables["CostFunctionJ" ].store( J )
125         #
126         return numpy.ravel( RdemiI * _Innovation )
127
128     def GradientOfCostFunctionLM(x):
129         _X      = x.reshape((-1, 1))
130         return - RdemiI * HO["Tangent"].asMatrix( _X )
131     #
132     # Minimisation de la fonctionnelle
133     # --------------------------------
134     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
135     #
136     if selfA._parameters["Minimizer"] == "LBFGSB":
137         if vt("0.19")  <= vt(scipy.version.version) <= vt("1.4.99"):
138             import daAlgorithms.Atoms.lbfgsb14hlt as optimiseur
139         elif vt("1.5.0") <= vt(scipy.version.version) <= vt("1.7.99"):
140             import daAlgorithms.Atoms.lbfgsb17hlt as optimiseur
141         elif vt("1.8.0") <= vt(scipy.version.version) <= vt("1.8.99"):
142             import daAlgorithms.Atoms.lbfgsb18hlt as optimiseur
143         elif vt("1.9.0") <= vt(scipy.version.version) <= vt("1.10.99"):
144             import daAlgorithms.Atoms.lbfgsb19hlt as optimiseur
145         elif vt("1.11.0") <= vt(scipy.version.version) <= vt("1.11.99"):
146             import daAlgorithms.Atoms.lbfgsb111hlt as optimiseur
147         elif vt("1.12.0") <= vt(scipy.version.version) <= vt("1.12.99"):
148             import daAlgorithms.Atoms.lbfgsb112hlt as optimiseur
149         elif vt("1.13.0") <= vt(scipy.version.version) <= vt("1.13.99"):
150             import daAlgorithms.Atoms.lbfgsb113hlt as optimiseur
151         elif vt("1.14.0") <= vt(scipy.version.version) <= vt("1.14.99"):
152             import daAlgorithms.Atoms.lbfgsb114hlt as optimiseur
153         else:
154             import scipy.optimize as optimiseur
155         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
156             func        = CostFunction,
157             x0          = Xini,
158             fprime      = GradientOfCostFunction,
159             args        = (),
160             bounds      = selfA._parameters["Bounds"],
161             maxfun      = selfA._parameters["MaximumNumberOfIterations"] - 1,
162             factr       = selfA._parameters["CostDecrementTolerance"] * 1.e14,
163             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
164             iprint      = selfA._parameters["optiprint"],
165         )
166         # nfeval = Informations['funcalls']
167         # rc     = Informations['warnflag']
168     elif selfA._parameters["Minimizer"] == "TNC":
169         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
170             func        = CostFunction,
171             x0          = Xini,
172             fprime      = GradientOfCostFunction,
173             args        = (),
174             bounds      = selfA._parameters["Bounds"],
175             maxfun      = selfA._parameters["MaximumNumberOfIterations"],
176             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
177             ftol        = selfA._parameters["CostDecrementTolerance"],
178             messages    = selfA._parameters["optmessages"],
179         )
180     elif selfA._parameters["Minimizer"] == "CG":
181         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
182             f           = CostFunction,
183             x0          = Xini,
184             fprime      = GradientOfCostFunction,
185             args        = (),
186             maxiter     = selfA._parameters["MaximumNumberOfIterations"],
187             gtol        = selfA._parameters["GradientNormTolerance"],
188             disp        = selfA._parameters["optdisp"],
189             full_output = True,
190         )
191     elif selfA._parameters["Minimizer"] == "NCG":
192         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
193             f           = CostFunction,
194             x0          = Xini,
195             fprime      = GradientOfCostFunction,
196             args        = (),
197             maxiter     = selfA._parameters["MaximumNumberOfIterations"],
198             avextol     = selfA._parameters["CostDecrementTolerance"],
199             disp        = selfA._parameters["optdisp"],
200             full_output = True,
201         )
202     elif selfA._parameters["Minimizer"] == "BFGS":
203         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
204             f           = CostFunction,
205             x0          = Xini,
206             fprime      = GradientOfCostFunction,
207             args        = (),
208             maxiter     = selfA._parameters["MaximumNumberOfIterations"],
209             gtol        = selfA._parameters["GradientNormTolerance"],
210             disp        = selfA._parameters["optdisp"],
211             full_output = True,
212         )
213     elif selfA._parameters["Minimizer"] == "LM":
214         Minimum, cov_x, infodict, mesg, rc = scipy.optimize.leastsq(
215             func        = CostFunctionLM,
216             x0          = Xini,
217             Dfun        = GradientOfCostFunctionLM,
218             args        = (),
219             ftol        = selfA._parameters["CostDecrementTolerance"],
220             maxfev      = selfA._parameters["MaximumNumberOfIterations"],
221             gtol        = selfA._parameters["GradientNormTolerance"],
222             full_output = True,
223         )
224         # nfeval = infodict['nfev']
225     else:
226         raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
227     #
228     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
229     #
230     # Correction pour pallier a un bug de TNC sur le retour du Minimum
231     # ----------------------------------------------------------------
232     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
233         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
234     #
235     Xa = Minimum
236     if __storeState:
237         selfA._setInternalState("Xn", Xa)
238     # --------------------------
239     #
240     selfA.StoredVariables["Analysis"].store( Xa )
241     #
242     if selfA._toStore("OMA") or \
243             selfA._toStore("InnovationAtCurrentAnalysis") or \
244             selfA._toStore("SimulatedObservationAtOptimum"):
245         if selfA._toStore("SimulatedObservationAtCurrentState"):
246             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
247         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
248             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
249         else:
250             HXa = Hm( Xa )
251         oma = Y - numpy.asarray(HXa).reshape((-1, 1))
252     #
253     # Calculs et/ou stockages supplémentaires
254     # ---------------------------------------
255     if selfA._toStore("Innovation") or \
256             selfA._toStore("OMB"):
257         Innovation  = Y - HXb
258     if selfA._toStore("Innovation"):
259         selfA.StoredVariables["Innovation"].store( Innovation )
260     if selfA._toStore("BMA"):
261         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
262     if selfA._toStore("OMA"):
263         selfA.StoredVariables["OMA"].store( oma )
264     if selfA._toStore("InnovationAtCurrentAnalysis"):
265         selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( oma )
266     if selfA._toStore("OMB"):
267         selfA.StoredVariables["OMB"].store( Innovation )
268     if selfA._toStore("SimulatedObservationAtBackground"):
269         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
270     if selfA._toStore("SimulatedObservationAtOptimum"):
271         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
272     #
273     return 0
274
275 # ==============================================================================
276 if __name__ == "__main__":
277     print('\n AUTODIAGNOSTIC\n')