Salome HOME
Sysinfo update and complements
[modules/adao.git] / src / daComposant / daAlgorithms / DerivativeFreeOptimization.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2024 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import numpy, logging, scipy.optimize
24 from daCore import BasicObjects, PlatformInfo
25 from daCore.NumericObjects import ApplyBounds, ForceNumericBounds
26 from daCore.PlatformInfo import vfloat
27 lpi = PlatformInfo.PlatformInfo()
28
29 # ==============================================================================
30 class ElementaryAlgorithm(BasicObjects.Algorithm):
31     def __init__(self):
32         BasicObjects.Algorithm.__init__(self, "DERIVATIVEFREEOPTIMIZATION")
33         self.defineRequiredParameter(
34             name     = "Minimizer",
35             default  = "BOBYQA",
36             typecast = str,
37             message  = "Minimiseur utilisé",
38             listval  = [
39                 "BOBYQA",
40                 "COBYLA",
41                 "NEWUOA",
42                 "POWELL",
43                 "SIMPLEX",
44                 "SUBPLEX",
45             ],
46         )
47         self.defineRequiredParameter(
48             name     = "MaximumNumberOfIterations",
49             default  = 15000,
50             typecast = int,
51             message  = "Nombre maximal de pas d'optimisation",
52             minval   = -1,
53             oldname  = "MaximumNumberOfSteps",
54         )
55         self.defineRequiredParameter(
56             name     = "MaximumNumberOfFunctionEvaluations",
57             default  = 15000,
58             typecast = int,
59             message  = "Nombre maximal d'évaluations de la fonction",
60             minval   = -1,
61         )
62         self.defineRequiredParameter(
63             name     = "StateVariationTolerance",
64             default  = 1.e-4,
65             typecast = float,
66             message  = "Variation relative maximale de l'état lors de l'arrêt",
67         )
68         self.defineRequiredParameter(
69             name     = "CostDecrementTolerance",
70             default  = 1.e-7,
71             typecast = float,
72             message  = "Diminution relative minimale du cout lors de l'arrêt",
73         )
74         self.defineRequiredParameter(
75             name     = "QualityCriterion",
76             default  = "AugmentedWeightedLeastSquares",
77             typecast = str,
78             message  = "Critère de qualité utilisé",
79             listval  = [
80                 "AugmentedWeightedLeastSquares", "AWLS", "DA",
81                 "WeightedLeastSquares", "WLS",
82                 "LeastSquares", "LS", "L2",
83                 "AbsoluteValue", "L1",
84                 "MaximumError", "ME", "Linf",
85             ],
86         )
87         self.defineRequiredParameter(
88             name     = "StoreInternalVariables",
89             default  = False,
90             typecast = bool,
91             message  = "Stockage des variables internes ou intermédiaires du calcul",
92         )
93         self.defineRequiredParameter(
94             name     = "StoreSupplementaryCalculations",
95             default  = [],
96             typecast = tuple,
97             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
98             listval  = [
99                 "Analysis",
100                 "BMA",
101                 "CostFunctionJ",
102                 "CostFunctionJb",
103                 "CostFunctionJo",
104                 "CostFunctionJAtCurrentOptimum",
105                 "CostFunctionJbAtCurrentOptimum",
106                 "CostFunctionJoAtCurrentOptimum",
107                 "CurrentIterationNumber",
108                 "CurrentOptimum",
109                 "CurrentState",
110                 "IndexOfOptimum",
111                 "Innovation",
112                 "InnovationAtCurrentState",
113                 "OMA",
114                 "OMB",
115                 "SimulatedObservationAtBackground",
116                 "SimulatedObservationAtCurrentOptimum",
117                 "SimulatedObservationAtCurrentState",
118                 "SimulatedObservationAtOptimum",
119             ]
120         )
121         self.defineRequiredParameter(  # Pas de type
122             name     = "Bounds",
123             message  = "Liste des valeurs de bornes",
124         )
125         self.requireInputArguments(
126             mandatory= ("Xb", "Y", "HO", "R", "B"),
127         )
128         self.setAttributes(
129             tags=(
130                 "Optimization",
131                 "NonLinear",
132                 "MetaHeuristic",
133             ),
134             features=(
135                 "NonLocalOptimization",
136                 "DerivativeFree",
137                 "ParallelFree",
138             ),
139         )
140
141     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
142         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
143         #
144         if not lpi.has_nlopt and not self._parameters["Minimizer"] in ["COBYLA", "POWELL", "SIMPLEX"]:
145             logging.warning(
146                 "%s Minimization by SIMPLEX is forced because %s "%(self._name, self._parameters["Minimizer"]) + \
147                 "is unavailable (COBYLA, POWELL are also available)")
148             self._parameters["Minimizer"] = "SIMPLEX"
149         #
150         Hm = HO["Direct"].appliedTo
151         #
152         BI = B.getI()
153         RI = R.getI()
154
155         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
156             _X  = numpy.ravel( x ).reshape((-1, 1))
157             _HX = numpy.ravel( Hm( _X ) ).reshape((-1, 1))
158             _Innovation = Y - _HX
159             self.StoredVariables["CurrentState"].store( _X )
160             if self._toStore("SimulatedObservationAtCurrentState") or \
161                     self._toStore("SimulatedObservationAtCurrentOptimum"):
162                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
163             if self._toStore("InnovationAtCurrentState"):
164                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
165             #
166             if QualityMeasure in ["AugmentedWeightedLeastSquares", "AWLS", "DA"]:
167                 if BI is None or RI is None:
168                     raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
169                 Jb  = vfloat(0.5 * (_X - Xb).T @ (BI @ (_X - Xb)))
170                 Jo  = vfloat(0.5 * _Innovation.T @ (RI @ _Innovation))
171             elif QualityMeasure in ["WeightedLeastSquares", "WLS"]:
172                 if RI is None:
173                     raise ValueError("Observation error covariance matrix has to be properly defined!")
174                 Jb  = 0.
175                 Jo  = vfloat(0.5 * _Innovation.T @ (RI @ _Innovation))
176             elif QualityMeasure in ["LeastSquares", "LS", "L2"]:
177                 Jb  = 0.
178                 Jo  = vfloat(0.5 * _Innovation.T @ _Innovation)
179             elif QualityMeasure in ["AbsoluteValue", "L1"]:
180                 Jb  = 0.
181                 Jo  = vfloat(numpy.sum( numpy.abs(_Innovation) ))
182             elif QualityMeasure in ["MaximumError", "ME", "Linf"]:
183                 Jb  = 0.
184                 Jo  = vfloat(numpy.max( numpy.abs(_Innovation) ))
185             #
186             J   = Jb + Jo
187             #
188             self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
189             self.StoredVariables["CostFunctionJb"].store( Jb )
190             self.StoredVariables["CostFunctionJo"].store( Jo )
191             self.StoredVariables["CostFunctionJ" ].store( J )
192             if self._toStore("IndexOfOptimum") or \
193                     self._toStore("CurrentOptimum") or \
194                     self._toStore("CostFunctionJAtCurrentOptimum") or \
195                     self._toStore("CostFunctionJbAtCurrentOptimum") or \
196                     self._toStore("CostFunctionJoAtCurrentOptimum") or \
197                     self._toStore("SimulatedObservationAtCurrentOptimum"):
198                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
199             if self._toStore("IndexOfOptimum"):
200                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
201             if self._toStore("CurrentOptimum"):
202                 self.StoredVariables["CurrentOptimum"].store(
203                     self.StoredVariables["CurrentState"][IndexMin] )
204             if self._toStore("SimulatedObservationAtCurrentOptimum"):
205                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store(
206                     self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
207                 )
208             if self._toStore("CostFunctionJAtCurrentOptimum"):
209                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store(
210                     self.StoredVariables["CostFunctionJ" ][IndexMin] )
211             if self._toStore("CostFunctionJbAtCurrentOptimum"):
212                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store(
213                     self.StoredVariables["CostFunctionJb"][IndexMin] )
214             if self._toStore("CostFunctionJoAtCurrentOptimum"):
215                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store(
216                     self.StoredVariables["CostFunctionJo"][IndexMin] )
217             return J
218         #
219         Xini = numpy.ravel(Xb)
220         if len(Xini) < 2 and self._parameters["Minimizer"] == "NEWUOA":
221             raise ValueError(
222                 "The minimizer %s "%self._parameters["Minimizer"] + \
223                 "can not be used when the optimisation state dimension " + \
224                 "is 1. Please choose another minimizer.")
225         #
226         # Minimisation de la fonctionnelle
227         # --------------------------------
228         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
229         #
230         if self._parameters["Minimizer"] == "POWELL":
231             Minimum, J_optimal, direc, niter, nfeval, rc = scipy.optimize.fmin_powell(
232                 func        = CostFunction,
233                 x0          = Xini,
234                 args        = (self._parameters["QualityCriterion"],),
235                 maxiter     = self._parameters["MaximumNumberOfIterations"] - 1,
236                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
237                 xtol        = self._parameters["StateVariationTolerance"],
238                 ftol        = self._parameters["CostDecrementTolerance"],
239                 full_output = True,
240                 disp        = self._parameters["optdisp"],
241             )
242         elif self._parameters["Minimizer"] == "COBYLA" and not lpi.has_nlopt:
243             def make_constraints(bounds):
244                 constraints = []
245                 for (i, (a, b)) in enumerate(bounds):
246                     lower = lambda x: x[i] - a  # noqa: E731
247                     upper = lambda x: b - x[i]  # noqa: E731
248                     constraints = constraints + [lower] + [upper]
249                 return constraints
250             if self._parameters["Bounds"] is None:
251                 raise ValueError("Bounds have to be given for all axes as a list of lower/upper pairs!")
252             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
253             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
254             Minimum = scipy.optimize.fmin_cobyla(
255                 func        = CostFunction,
256                 x0          = Xini,
257                 cons        = make_constraints( self._parameters["Bounds"] ),
258                 args        = (self._parameters["QualityCriterion"],),
259                 consargs    = (),  # To avoid extra-args
260                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
261                 rhobeg      = 1.0,
262                 rhoend      = self._parameters["StateVariationTolerance"],
263                 catol       = 2. * self._parameters["StateVariationTolerance"],
264                 disp        = self._parameters["optdisp"],
265             )
266         elif self._parameters["Minimizer"] == "COBYLA" and lpi.has_nlopt:
267             import nlopt
268             opt = nlopt.opt(nlopt.LN_COBYLA, Xini.size)
269
270             def _f(_Xx, Grad):
271                 # DFO, so no gradient
272                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
273             opt.set_min_objective(_f)
274             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
275             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
276             if self._parameters["Bounds"] is not None:
277                 lub = numpy.array(self._parameters["Bounds"], dtype=float).reshape((Xini.size, 2))
278                 lb = lub[:, 0]; lb[numpy.isnan(lb)] = -float('inf')  # noqa: E702
279                 ub = lub[:, 1]; ub[numpy.isnan(ub)] = +float('inf')  # noqa: E702
280                 if self._parameters["optdisp"]:
281                     print("%s: upper bounds %s"%(opt.get_algorithm_name(), ub))
282                     print("%s: lower bounds %s"%(opt.get_algorithm_name(), lb))
283                 opt.set_upper_bounds(ub)
284                 opt.set_lower_bounds(lb)
285             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
286             opt.set_xtol_rel(2. * self._parameters["StateVariationTolerance"])
287             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
288             Minimum = opt.optimize( Xini )
289             if self._parameters["optdisp"]:
290                 print("%s: optimal state: %s"%(opt.get_algorithm_name(), Minimum))
291                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(), opt.last_optimum_value()))
292                 print("%s: return code: %i"%(opt.get_algorithm_name(), opt.last_optimize_result()))
293         elif self._parameters["Minimizer"] == "SIMPLEX" and not lpi.has_nlopt:
294             Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin(
295                 func        = CostFunction,
296                 x0          = Xini,
297                 args        = (self._parameters["QualityCriterion"],),
298                 maxiter     = self._parameters["MaximumNumberOfIterations"] - 1,
299                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
300                 xtol        = self._parameters["StateVariationTolerance"],
301                 ftol        = self._parameters["CostDecrementTolerance"],
302                 full_output = True,
303                 disp        = self._parameters["optdisp"],
304             )
305         elif self._parameters["Minimizer"] == "SIMPLEX" and lpi.has_nlopt:
306             import nlopt
307             opt = nlopt.opt(nlopt.LN_NELDERMEAD, Xini.size)
308
309             def _f(_Xx, Grad):
310                 # DFO, so no gradient
311                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
312             opt.set_min_objective(_f)
313             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
314             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
315             if self._parameters["Bounds"] is not None:
316                 lub = numpy.array(self._parameters["Bounds"], dtype=float).reshape((Xini.size, 2))
317                 lb = lub[:, 0]; lb[numpy.isnan(lb)] = -float('inf')  # noqa: E702
318                 ub = lub[:, 1]; ub[numpy.isnan(ub)] = +float('inf')  # noqa: E702
319                 if self._parameters["optdisp"]:
320                     print("%s: upper bounds %s"%(opt.get_algorithm_name(), ub))
321                     print("%s: lower bounds %s"%(opt.get_algorithm_name(), lb))
322                 opt.set_upper_bounds(ub)
323                 opt.set_lower_bounds(lb)
324             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
325             opt.set_xtol_rel(2. * self._parameters["StateVariationTolerance"])
326             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
327             Minimum = opt.optimize( Xini )
328             if self._parameters["optdisp"]:
329                 print("%s: optimal state: %s"%(opt.get_algorithm_name(), Minimum))
330                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(), opt.last_optimum_value()))
331                 print("%s: return code: %i"%(opt.get_algorithm_name(), opt.last_optimize_result()))
332         elif self._parameters["Minimizer"] == "BOBYQA" and lpi.has_nlopt:
333             import nlopt
334             opt = nlopt.opt(nlopt.LN_BOBYQA, Xini.size)
335
336             def _f(_Xx, Grad):
337                 # DFO, so no gradient
338                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
339             opt.set_min_objective(_f)
340             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
341             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
342             if self._parameters["Bounds"] is not None:
343                 lub = numpy.array(self._parameters["Bounds"], dtype=float).reshape((Xini.size, 2))
344                 lb = lub[:, 0]; lb[numpy.isnan(lb)] = -float('inf')  # noqa: E702
345                 ub = lub[:, 1]; ub[numpy.isnan(ub)] = +float('inf')  # noqa: E702
346                 if self._parameters["optdisp"]:
347                     print("%s: upper bounds %s"%(opt.get_algorithm_name(), ub))
348                     print("%s: lower bounds %s"%(opt.get_algorithm_name(), lb))
349                 opt.set_upper_bounds(ub)
350                 opt.set_lower_bounds(lb)
351             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
352             opt.set_xtol_rel(2. * self._parameters["StateVariationTolerance"])
353             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
354             Minimum = opt.optimize( Xini )
355             if self._parameters["optdisp"]:
356                 print("%s: optimal state: %s"%(opt.get_algorithm_name(), Minimum))
357                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(), opt.last_optimum_value()))
358                 print("%s: return code: %i"%(opt.get_algorithm_name(), opt.last_optimize_result()))
359         elif self._parameters["Minimizer"] == "NEWUOA" and lpi.has_nlopt:
360             import nlopt
361             opt = nlopt.opt(nlopt.LN_NEWUOA, Xini.size)
362
363             def _f(_Xx, Grad):
364                 # DFO, so no gradient
365                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
366             opt.set_min_objective(_f)
367             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
368             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
369             if self._parameters["Bounds"] is not None:
370                 lub = numpy.array(self._parameters["Bounds"], dtype=float).reshape((Xini.size, 2))
371                 lb = lub[:, 0]; lb[numpy.isnan(lb)] = -float('inf')  # noqa: E702
372                 ub = lub[:, 1]; ub[numpy.isnan(ub)] = +float('inf')  # noqa: E702
373                 if self._parameters["optdisp"]:
374                     print("%s: upper bounds %s"%(opt.get_algorithm_name(), ub))
375                     print("%s: lower bounds %s"%(opt.get_algorithm_name(), lb))
376                 opt.set_upper_bounds(ub)
377                 opt.set_lower_bounds(lb)
378             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
379             opt.set_xtol_rel(2. * self._parameters["StateVariationTolerance"])
380             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
381             Minimum = opt.optimize( Xini )
382             if self._parameters["optdisp"]:
383                 print("%s: optimal state: %s"%(opt.get_algorithm_name(), Minimum))
384                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(), opt.last_optimum_value()))
385                 print("%s: return code: %i"%(opt.get_algorithm_name(), opt.last_optimize_result()))
386         elif self._parameters["Minimizer"] == "SUBPLEX" and lpi.has_nlopt:
387             import nlopt
388             opt = nlopt.opt(nlopt.LN_SBPLX, Xini.size)
389
390             def _f(_Xx, Grad):
391                 # DFO, so no gradient
392                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
393             opt.set_min_objective(_f)
394             self._parameters["Bounds"] = ForceNumericBounds( self._parameters["Bounds"] )
395             Xini = ApplyBounds( Xini, self._parameters["Bounds"] )
396             if self._parameters["Bounds"] is not None:
397                 lub = numpy.array(self._parameters["Bounds"], dtype=float).reshape((Xini.size, 2))
398                 lb = lub[:, 0]; lb[numpy.isnan(lb)] = -float('inf')  # noqa: E702
399                 ub = lub[:, 1]; ub[numpy.isnan(ub)] = +float('inf')  # noqa: E702
400                 if self._parameters["optdisp"]:
401                     print("%s: upper bounds %s"%(opt.get_algorithm_name(), ub))
402                     print("%s: lower bounds %s"%(opt.get_algorithm_name(), lb))
403                 opt.set_upper_bounds(ub)
404                 opt.set_lower_bounds(lb)
405             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
406             opt.set_xtol_rel(2. * self._parameters["StateVariationTolerance"])
407             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
408             Minimum = opt.optimize( Xini )
409             if self._parameters["optdisp"]:
410                 print("%s: optimal state: %s"%(opt.get_algorithm_name(), Minimum))
411                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(), opt.last_optimum_value()))
412                 print("%s: return code: %i"%(opt.get_algorithm_name(), opt.last_optimize_result()))
413         else:
414             raise ValueError("Error in minimizer name: %s is unkown"%self._parameters["Minimizer"])
415         #
416         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
417         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
418         #
419         # Obtention de l'analyse
420         # ----------------------
421         Xa = Minimum
422         #
423         self.StoredVariables["Analysis"].store( Xa )
424         #
425         # Calculs et/ou stockages supplémentaires
426         # ---------------------------------------
427         if self._toStore("OMA") or \
428                 self._toStore("SimulatedObservationAtOptimum"):
429             if self._toStore("SimulatedObservationAtCurrentState"):
430                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
431             elif self._toStore("SimulatedObservationAtCurrentOptimum"):
432                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
433             else:
434                 HXa = Hm(Xa)
435             HXa = HXa.reshape((-1, 1))
436         if self._toStore("Innovation") or \
437                 self._toStore("OMB") or \
438                 self._toStore("SimulatedObservationAtBackground"):
439             HXb = Hm(Xb).reshape((-1, 1))
440             Innovation = Y - HXb
441         if self._toStore("Innovation"):
442             self.StoredVariables["Innovation"].store( Innovation )
443         if self._toStore("OMB"):
444             self.StoredVariables["OMB"].store( Innovation )
445         if self._toStore("BMA"):
446             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
447         if self._toStore("OMA"):
448             self.StoredVariables["OMA"].store( Y - HXa )
449         if self._toStore("SimulatedObservationAtBackground"):
450             self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
451         if self._toStore("SimulatedObservationAtOptimum"):
452             self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
453         #
454         self._post_run(HO, EM)
455         return 0
456
457 # ==============================================================================
458 if __name__ == "__main__":
459     print("\n AUTODIAGNOSTIC\n")