Salome HOME
Minor source update for OM compatibility
[modules/adao.git] / src / daComposant / daAlgorithms / DerivativeFreeOptimization.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2024 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import numpy, logging, scipy.optimize
24 from daCore import BasicObjects, PlatformInfo
25 from daCore.NumericObjects import ApplyBounds, ForceNumericBounds
26 from daCore.PlatformInfo import vfloat
27 lpi = PlatformInfo.PlatformInfo()
28
29 # ==============================================================================
30 class ElementaryAlgorithm(BasicObjects.Algorithm):
31     def __init__(self):
32         BasicObjects.Algorithm.__init__(self, "DERIVATIVEFREEOPTIMIZATION")
33         self.defineRequiredParameter(
34             name     = "Minimizer",
35             default  = "BOBYQA",
36             typecast = str,
37             message  = "Minimiseur utilisé",
38             listval  = [
39                 "BOBYQA",
40                 "COBYLA",
41                 "NEWUOA",
42                 "POWELL",
43                 "SIMPLEX",
44                 "SUBPLEX",
45             ],
46         )
47         self.defineRequiredParameter(
48             name     = "MaximumNumberOfIterations",
49             default  = 15000,
50             typecast = int,
51             message  = "Nombre maximal de pas d'optimisation",
52             minval   = -1,
53             oldname  = "MaximumNumberOfSteps",
54         )
55         self.defineRequiredParameter(
56             name     = "MaximumNumberOfFunctionEvaluations",
57             default  = 15000,
58             typecast = int,
59             message  = "Nombre maximal d'évaluations de la fonction",
60             minval   = -1,
61         )
62         self.defineRequiredParameter(
63             name     = "StateVariationTolerance",
64             default  = 1.e-4,
65             typecast = float,
66             message  = "Variation relative maximale de l'état lors de l'arrêt",
67         )
68         self.defineRequiredParameter(
69             name     = "CostDecrementTolerance",
70             default  = 1.e-7,
71             typecast = float,
72             message  = "Diminution relative minimale du cout lors de l'arrêt",
73         )
74         self.defineRequiredParameter(
75             name     = "QualityCriterion",
76             default  = "AugmentedWeightedLeastSquares",
77             typecast = str,
78             message  = "Critère de qualité utilisé",
79             listval  = [
80                 "AugmentedWeightedLeastSquares", "AWLS", "DA",
81                 "WeightedLeastSquares", "WLS",
82                 "LeastSquares", "LS", "L2",
83                 "AbsoluteValue", "L1",
84                 "MaximumError", "ME", "Linf",
85             ],
86         )
87         self.defineRequiredParameter(
88             name     = "StoreInternalVariables",
89             default  = False,
90             typecast = bool,
91             message  = "Stockage des variables internes ou intermédiaires du calcul",
92         )
93         self.defineRequiredParameter(
94             name     = "StoreSupplementaryCalculations",
95             default  = [],
96             typecast = tuple,
97             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
98             listval  = [
99                 "Analysis",
100                 "BMA",
101                 "CostFunctionJ",
102                 "CostFunctionJb",
103                 "CostFunctionJo",
104                 "CostFunctionJAtCurrentOptimum",
105                 "CostFunctionJbAtCurrentOptimum",
106                 "CostFunctionJoAtCurrentOptimum",
107                 "CurrentIterationNumber",
108                 "CurrentOptimum",
109                 "CurrentState",
110                 "IndexOfOptimum",
111                 "Innovation",
112                 "InnovationAtCurrentState",
113                 "OMA",
114                 "OMB",
115                 "SimulatedObservationAtBackground",
116                 "SimulatedObservationAtCurrentOptimum",
117                 "SimulatedObservationAtCurrentState",
118                 "SimulatedObservationAtOptimum",
119             ]
120         )
121         self.defineRequiredParameter(  # Pas de type
122             name     = "Bounds",
123             message  = "Liste des valeurs de bornes",
124         )
125         self.requireInputArguments(
126             mandatory= ("Xb", "Y", "HO", "R", "B"),
127         )
128         self.setAttributes(
129             tags=(
130                 "Optimization",
131                 "NonLinear",
132                 "MetaHeuristic",
133             ),
134             features=(
135                 "NonLocalOptimization",
136                 "DerivativeFree",
137                 "ParallelFree",
138                 "ConvergenceOnBoth",
139             ),
140         )
141
142     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
143         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
144         #
145         if not lpi.has_nlopt and not self._parameters["Minimizer"] in ["COBYLA", "POWELL", "SIMPLEX"]:
146             logging.warning(
147                 "%s Minimization by SIMPLEX is forced because %s "%(self._name, self._parameters["Minimizer"]) + \
148                 "is unavailable (COBYLA, POWELL are also available)")
149             self._parameters["Minimizer"] = "SIMPLEX"
150         #
151         Hm = HO["Direct"].appliedTo
152         #
153         BI = B.getI()
154         RI = R.getI()
155
156         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
157             _X  = numpy.ravel( x ).reshape((-1, 1))
158             _HX = numpy.ravel( Hm( _X ) ).reshape((-1, 1))
159             _Innovation = Y - _HX
160             self.StoredVariables["CurrentState"].store( _X )
161             if self._toStore("SimulatedObservationAtCurrentState") or \
162                     self._toStore("SimulatedObservationAtCurrentOptimum"):
163                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
164             if self._toStore("InnovationAtCurrentState"):
165                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
166             #
167             if QualityMeasure in ["AugmentedWeightedLeastSquares", "AWLS", "DA"]:
168                 if BI is None or RI is None:
169                     raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
170                 Jb  = vfloat(0.5 * (_X - Xb).T @ (BI @ (_X - Xb)))
171                 Jo  = vfloat(0.5 * _Innovation.T @ (RI @ _Innovation))
172             elif QualityMeasure in ["WeightedLeastSquares", "WLS"]:
173                 if RI is None:
174                     raise ValueError("Observation error covariance matrix has to be properly defined!")
175                 Jb  = 0.
176                 Jo  = vfloat(0.5 * _Innovation.T @ (RI @ _Innovation))
177             elif QualityMeasure in ["LeastSquares", "LS", "L2"]:
178                 Jb  = 0.
179                 Jo  = vfloat(0.5 * _Innovation.T @ _Innovation)
180             elif QualityMeasure in ["AbsoluteValue", "L1"]:
181                 Jb  = 0.
182                 Jo  = vfloat(numpy.sum( numpy.abs(_Innovation) ))
183             elif QualityMeasure in ["MaximumError", "ME", "Linf"]:
184                 Jb  = 0.
185                 Jo  = vfloat(numpy.max( numpy.abs(_Innovation) ))
186             #
187             J   = Jb + Jo
188             #
189             self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
190             self.StoredVariables["CostFunctionJb"].store( Jb )
191             self.StoredVariables["CostFunctionJo"].store( Jo )
192             self.StoredVariables["CostFunctionJ" ].store( J )
193             if self._toStore("IndexOfOptimum") or \
194                     self._toStore("CurrentOptimum") or \
195                     self._toStore("CostFunctionJAtCurrentOptimum") or \
196                     self._toStore("CostFunctionJbAtCurrentOptimum") or \
197                     self._toStore("CostFunctionJoAtCurrentOptimum") or \
198                     self._toStore("SimulatedObservationAtCurrentOptimum"):
199                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
200             if self._toStore("IndexOfOptimum"):
201                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
202             if self._toStore("CurrentOptimum"):
203                 self.StoredVariables["CurrentOptimum"].store(
204                     self.StoredVariables["CurrentState"][IndexMin] )
205             if self._toStore("SimulatedObservationAtCurrentOptimum"):
206                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store(
207                     self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
208                 )
209             if self._toStore("CostFunctionJAtCurrentOptimum"):
210                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store(
211                     self.StoredVariables["CostFunctionJ" ][IndexMin] )
212             if self._toStore("CostFunctionJbAtCurrentOptimum"):
213                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store(
214                     self.StoredVariables["CostFunctionJb"][IndexMin] )
215             if self._toStore("CostFunctionJoAtCurrentOptimum"):
216                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store(
217                     self.StoredVariables["CostFunctionJo"][IndexMin] )
218             return J
219         #
220         Xini = numpy.ravel(Xb)
221         if len(Xini) < 2 and self._parameters["Minimizer"] == "NEWUOA":
222             raise ValueError(
223                 "The minimizer %s "%self._parameters["Minimizer"] + \
224                 "can not be used when the optimisation state dimension " + \
225                 "is 1. Please choose another minimizer.")
226         #
227         # Minimisation de la fonctionnelle
228         # --------------------------------
229         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
230         #
231         if self._parameters["Minimizer"] == "POWELL":
232             Minimum, J_optimal, direc, niter, nfeval, rc = scipy.optimize.fmin_powell(
233                 func        = CostFunction,
234                 x0          = Xini,
235                 args        = (self._parameters["QualityCriterion"],),
236                 maxiter     = self._parameters["MaximumNumberOfIterations"] - 1,
237                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
238                 xtol        = self._parameters["StateVariationTolerance"],
239                 ftol        = self._parameters["CostDecrementTolerance"],
240                 full_output = True,
241                 disp        = self._parameters["optdisp"],
242             )
243         elif self._parameters["Minimizer"] == "COBYLA" and not lpi.has_nlopt:
244             def make_constraints(bounds):
245                 constraints = []
246                 for (i, (a, b)) in enumerate(bounds):
247                     lower = lambda x: x[i] - a  # noqa: E731
248                     upper = lambda x: b - x[i]  # noqa: E731
249                     constraints = constraints + [lower] + [upper]
250                 return constraints
251             if self._parameters["Bounds"] is None:
252                 raise ValueError("Bounds have to be given for all axes as a list of lower/upper pairs!")
253             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
254             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
255             Minimum = scipy.optimize.fmin_cobyla(
256                 func        = CostFunction,
257                 x0          = Xini,
258                 cons        = make_constraints( self._parameters["Bounds"] ),
259                 args        = (self._parameters["QualityCriterion"],),
260                 consargs    = (),  # To avoid extra-args
261                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
262                 rhobeg      = 1.0,
263                 rhoend      = self._parameters["StateVariationTolerance"],
264                 catol       = 2. * self._parameters["StateVariationTolerance"],
265                 disp        = self._parameters["optdisp"],
266             )
267         elif self._parameters["Minimizer"] == "COBYLA" and lpi.has_nlopt:
268             import nlopt
269             opt = nlopt.opt(nlopt.LN_COBYLA, Xini.size)
270
271             def _f(_Xx, Grad):
272                 # DFO, so no gradient
273                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
274             opt.set_min_objective(_f)
275             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
276             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
277             if self._parameters["Bounds"] is not None:
278                 lub = numpy.array(self._parameters["Bounds"], dtype=float).reshape((Xini.size, 2))
279                 lb = lub[:, 0]; lb[numpy.isnan(lb)] = -float('inf')  # noqa: E702
280                 ub = lub[:, 1]; ub[numpy.isnan(ub)] = +float('inf')  # noqa: E702
281                 if self._parameters["optdisp"]:
282                     print("%s: upper bounds %s"%(opt.get_algorithm_name(), ub))
283                     print("%s: lower bounds %s"%(opt.get_algorithm_name(), lb))
284                 opt.set_upper_bounds(ub)
285                 opt.set_lower_bounds(lb)
286             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
287             opt.set_xtol_rel(2. * self._parameters["StateVariationTolerance"])
288             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
289             Minimum = opt.optimize( Xini )
290             if self._parameters["optdisp"]:
291                 print("%s: optimal state: %s"%(opt.get_algorithm_name(), Minimum))
292                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(), opt.last_optimum_value()))
293                 print("%s: return code: %i"%(opt.get_algorithm_name(), opt.last_optimize_result()))
294         elif self._parameters["Minimizer"] == "SIMPLEX" and not lpi.has_nlopt:
295             Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin(
296                 func        = CostFunction,
297                 x0          = Xini,
298                 args        = (self._parameters["QualityCriterion"],),
299                 maxiter     = self._parameters["MaximumNumberOfIterations"] - 1,
300                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
301                 xtol        = self._parameters["StateVariationTolerance"],
302                 ftol        = self._parameters["CostDecrementTolerance"],
303                 full_output = True,
304                 disp        = self._parameters["optdisp"],
305             )
306         elif self._parameters["Minimizer"] == "SIMPLEX" and lpi.has_nlopt:
307             import nlopt
308             opt = nlopt.opt(nlopt.LN_NELDERMEAD, Xini.size)
309
310             def _f(_Xx, Grad):
311                 # DFO, so no gradient
312                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
313             opt.set_min_objective(_f)
314             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
315             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
316             if self._parameters["Bounds"] is not None:
317                 lub = numpy.array(self._parameters["Bounds"], dtype=float).reshape((Xini.size, 2))
318                 lb = lub[:, 0]; lb[numpy.isnan(lb)] = -float('inf')  # noqa: E702
319                 ub = lub[:, 1]; ub[numpy.isnan(ub)] = +float('inf')  # noqa: E702
320                 if self._parameters["optdisp"]:
321                     print("%s: upper bounds %s"%(opt.get_algorithm_name(), ub))
322                     print("%s: lower bounds %s"%(opt.get_algorithm_name(), lb))
323                 opt.set_upper_bounds(ub)
324                 opt.set_lower_bounds(lb)
325             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
326             opt.set_xtol_rel(2. * self._parameters["StateVariationTolerance"])
327             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
328             Minimum = opt.optimize( Xini )
329             if self._parameters["optdisp"]:
330                 print("%s: optimal state: %s"%(opt.get_algorithm_name(), Minimum))
331                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(), opt.last_optimum_value()))
332                 print("%s: return code: %i"%(opt.get_algorithm_name(), opt.last_optimize_result()))
333         elif self._parameters["Minimizer"] == "BOBYQA" and lpi.has_nlopt:
334             import nlopt
335             opt = nlopt.opt(nlopt.LN_BOBYQA, Xini.size)
336
337             def _f(_Xx, Grad):
338                 # DFO, so no gradient
339                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
340             opt.set_min_objective(_f)
341             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
342             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
343             if self._parameters["Bounds"] is not None:
344                 lub = numpy.array(self._parameters["Bounds"], dtype=float).reshape((Xini.size, 2))
345                 lb = lub[:, 0]; lb[numpy.isnan(lb)] = -float('inf')  # noqa: E702
346                 ub = lub[:, 1]; ub[numpy.isnan(ub)] = +float('inf')  # noqa: E702
347                 if self._parameters["optdisp"]:
348                     print("%s: upper bounds %s"%(opt.get_algorithm_name(), ub))
349                     print("%s: lower bounds %s"%(opt.get_algorithm_name(), lb))
350                 opt.set_upper_bounds(ub)
351                 opt.set_lower_bounds(lb)
352             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
353             opt.set_xtol_rel(2. * self._parameters["StateVariationTolerance"])
354             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
355             Minimum = opt.optimize( Xini )
356             if self._parameters["optdisp"]:
357                 print("%s: optimal state: %s"%(opt.get_algorithm_name(), Minimum))
358                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(), opt.last_optimum_value()))
359                 print("%s: return code: %i"%(opt.get_algorithm_name(), opt.last_optimize_result()))
360         elif self._parameters["Minimizer"] == "NEWUOA" and lpi.has_nlopt:
361             import nlopt
362             opt = nlopt.opt(nlopt.LN_NEWUOA, Xini.size)
363
364             def _f(_Xx, Grad):
365                 # DFO, so no gradient
366                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
367             opt.set_min_objective(_f)
368             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
369             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
370             if self._parameters["Bounds"] is not None:
371                 lub = numpy.array(self._parameters["Bounds"], dtype=float).reshape((Xini.size, 2))
372                 lb = lub[:, 0]; lb[numpy.isnan(lb)] = -float('inf')  # noqa: E702
373                 ub = lub[:, 1]; ub[numpy.isnan(ub)] = +float('inf')  # noqa: E702
374                 if self._parameters["optdisp"]:
375                     print("%s: upper bounds %s"%(opt.get_algorithm_name(), ub))
376                     print("%s: lower bounds %s"%(opt.get_algorithm_name(), lb))
377                 opt.set_upper_bounds(ub)
378                 opt.set_lower_bounds(lb)
379             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
380             opt.set_xtol_rel(2. * self._parameters["StateVariationTolerance"])
381             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
382             Minimum = opt.optimize( Xini )
383             if self._parameters["optdisp"]:
384                 print("%s: optimal state: %s"%(opt.get_algorithm_name(), Minimum))
385                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(), opt.last_optimum_value()))
386                 print("%s: return code: %i"%(opt.get_algorithm_name(), opt.last_optimize_result()))
387         elif self._parameters["Minimizer"] == "SUBPLEX" and lpi.has_nlopt:
388             import nlopt
389             opt = nlopt.opt(nlopt.LN_SBPLX, Xini.size)
390
391             def _f(_Xx, Grad):
392                 # DFO, so no gradient
393                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
394             opt.set_min_objective(_f)
395             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
396             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
397             if self._parameters["Bounds"] is not None:
398                 lub = numpy.array(self._parameters["Bounds"], dtype=float).reshape((Xini.size, 2))
399                 lb = lub[:, 0]; lb[numpy.isnan(lb)] = -float('inf')  # noqa: E702
400                 ub = lub[:, 1]; ub[numpy.isnan(ub)] = +float('inf')  # noqa: E702
401                 if self._parameters["optdisp"]:
402                     print("%s: upper bounds %s"%(opt.get_algorithm_name(), ub))
403                     print("%s: lower bounds %s"%(opt.get_algorithm_name(), lb))
404                 opt.set_upper_bounds(ub)
405                 opt.set_lower_bounds(lb)
406             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
407             opt.set_xtol_rel(2. * self._parameters["StateVariationTolerance"])
408             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
409             Minimum = opt.optimize( Xini )
410             if self._parameters["optdisp"]:
411                 print("%s: optimal state: %s"%(opt.get_algorithm_name(), Minimum))
412                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(), opt.last_optimum_value()))
413                 print("%s: return code: %i"%(opt.get_algorithm_name(), opt.last_optimize_result()))
414         else:
415             raise ValueError("Error in minimizer name: %s is unkown"%self._parameters["Minimizer"])
416         #
417         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
418         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
419         #
420         # Obtention de l'analyse
421         # ----------------------
422         Xa = Minimum
423         #
424         self.StoredVariables["Analysis"].store( Xa )
425         #
426         # Calculs et/ou stockages supplémentaires
427         # ---------------------------------------
428         if self._toStore("OMA") or \
429                 self._toStore("SimulatedObservationAtOptimum"):
430             if self._toStore("SimulatedObservationAtCurrentState"):
431                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
432             elif self._toStore("SimulatedObservationAtCurrentOptimum"):
433                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
434             else:
435                 HXa = Hm(Xa)
436             HXa = HXa.reshape((-1, 1))
437         if self._toStore("Innovation") or \
438                 self._toStore("OMB") or \
439                 self._toStore("SimulatedObservationAtBackground"):
440             HXb = Hm(Xb).reshape((-1, 1))
441             Innovation = Y - HXb
442         if self._toStore("Innovation"):
443             self.StoredVariables["Innovation"].store( Innovation )
444         if self._toStore("OMB"):
445             self.StoredVariables["OMB"].store( Innovation )
446         if self._toStore("BMA"):
447             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
448         if self._toStore("OMA"):
449             self.StoredVariables["OMA"].store( Y - HXa )
450         if self._toStore("SimulatedObservationAtBackground"):
451             self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
452         if self._toStore("SimulatedObservationAtOptimum"):
453             self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
454         #
455         self._post_run(HO, EM)
456         return 0
457
458 # ==============================================================================
459 if __name__ == "__main__":
460     print("\n AUTODIAGNOSTIC\n")