1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2024 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 from daCore import BasicObjects, NumericObjects, PlatformInfo
25 mpr = PlatformInfo.PlatformInfo().MachinePrecision()
26 mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
31 BasicObjects.Algorithm.__init__(self, "GRADIENTTEST")
32 self.defineRequiredParameter(
33 name = "ResiduFormula",
36 message = "Formule de résidu utilisée",
37 listval = ["Norm", "TaylorOnNorm", "Taylor"],
39 self.defineRequiredParameter(
40 name = "EpsilonMinimumExponent",
43 message = "Exposant minimal en puissance de 10 pour le multiplicateur d'incrément",
47 self.defineRequiredParameter(
48 name = "InitialDirection",
51 message = "Direction initiale de la dérivée directionnelle autour du point nominal",
53 self.defineRequiredParameter(
54 name = "AmplitudeOfInitialDirection",
57 message = "Amplitude de la direction initiale de la dérivée directionnelle autour du point nominal",
59 self.defineRequiredParameter(
60 name = "AmplitudeOfTangentPerturbation",
63 message = "Amplitude de la perturbation pour le calcul de la forme tangente",
67 self.defineRequiredParameter(
69 typecast = numpy.random.seed,
70 message = "Graine fixée pour le générateur aléatoire",
72 self.defineRequiredParameter(
73 name = "NumberOfPrintedDigits",
76 message = "Nombre de chiffres affichés pour les impressions de réels",
79 self.defineRequiredParameter(
83 message = "Titre du tableau et de la figure",
85 self.defineRequiredParameter(
89 message = "Label de la courbe tracée dans la figure",
91 self.defineRequiredParameter(
93 default = self._name + "_result_file",
95 message = "Nom de base (hors extension) des fichiers de sauvegarde des résultats",
97 self.defineRequiredParameter(
101 message = "Trace et sauve les résultats",
103 self.defineRequiredParameter(
104 name = "StoreSupplementaryCalculations",
107 message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
111 "SimulatedObservationAtCurrentState",
114 self.requireInputArguments(
115 mandatory= ("Xb", "HO"),
123 "ParallelDerivativesOnly",
127 def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
128 self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
130 Hm = HO["Direct"].appliedTo
131 if self._parameters["ResiduFormula"] in ["Taylor", "TaylorOnNorm"]:
132 Ht = HO["Tangent"].appliedInXTo
134 X0 = numpy.ravel( Xb ).reshape((-1, 1))
137 __p = self._parameters["NumberOfPrintedDigits"]
140 __flech = 3 * "=" + "> "
142 if len(self._parameters["ResultTitle"]) > 0:
143 __rt = str(self._parameters["ResultTitle"])
144 msgs += (__marge + "====" + "=" * len(__rt) + "====\n")
145 msgs += (__marge + " " + __rt + "\n")
146 msgs += (__marge + "====" + "=" * len(__rt) + "====\n")
148 msgs += (__marge + "%s\n"%self._name)
149 msgs += (__marge + "%s\n"%("=" * len(self._name),))
152 msgs += (__marge + "This test allows to analyze the numerical stability of the gradient of some\n")
153 msgs += (__marge + "given simulation operator F, applied to one single vector argument x.\n")
154 msgs += (__marge + "The output shows simple statistics related to its stability for various\n")
155 msgs += (__marge + "increments, around an input checking point X.\n")
157 msgs += (__flech + "Information before launching:\n")
158 msgs += (__marge + "-----------------------------\n")
160 msgs += (__marge + "Characteristics of input vector X, internally converted:\n")
161 msgs += (__marge + " Type...............: %s\n")%type( X0 )
162 msgs += (__marge + " Length of vector...: %i\n")%max(numpy.ravel( X0 ).shape)
163 msgs += (__marge + " Minimum value......: %." + str(__p) + "e\n")%numpy.min( X0 )
164 msgs += (__marge + " Maximum value......: %." + str(__p) + "e\n")%numpy.max( X0 )
165 msgs += (__marge + " Mean of vector.....: %." + str(__p) + "e\n")%numpy.mean( X0, dtype=mfp )
166 msgs += (__marge + " Standard error.....: %." + str(__p) + "e\n")%numpy.std( X0, dtype=mfp )
167 msgs += (__marge + " L2 norm of vector..: %." + str(__p) + "e\n")%numpy.linalg.norm( X0 )
169 msgs += (__marge + "%s\n\n"%("-" * 75,))
170 msgs += (__flech + "Numerical quality indicators:\n")
171 msgs += (__marge + "-----------------------------\n")
173 msgs += (__marge + "Using the \"%s\" formula, one observes the residue R which is the\n"%self._parameters["ResiduFormula"]) # noqa: E501
174 msgs += (__marge + "following ratio or comparison:\n")
177 if self._parameters["ResiduFormula"] == "Taylor":
178 msgs += (__marge + " || F(X+Alpha*dX) - F(X) - Alpha * GradientF_X(dX) ||\n")
179 msgs += (__marge + " R(Alpha) = ----------------------------------------------------\n")
180 msgs += (__marge + " || F(X) ||\n")
182 msgs += (__marge + "If the residue decreases and if the decay is in Alpha**2 according to\n")
183 msgs += (__marge + "Alpha, it means that the gradient is well calculated up to the stopping\n")
184 msgs += (__marge + "precision of the quadratic decay, and that F is not linear.\n")
186 msgs += (__marge + "If the residue decreases and if the decay is done in Alpha according\n")
187 msgs += (__marge + "to Alpha, until a certain threshold after which the residue is small\n")
188 msgs += (__marge + "and constant, it means that F is linear and that the residue decreases\n")
189 msgs += (__marge + "from the error made in the calculation of the GradientF_X term.\n")
191 __entete = u" i Alpha ||X|| ||F(X)|| ||F(X+dX)|| ||dX|| ||F(X+dX)-F(X)|| ||F(X+dX)-F(X)||/||dX|| R(Alpha) log( R )" # noqa: E501
193 if self._parameters["ResiduFormula"] == "TaylorOnNorm":
194 msgs += (__marge + " || F(X+Alpha*dX) - F(X) - Alpha * GradientF_X(dX) ||\n")
195 msgs += (__marge + " R(Alpha) = ----------------------------------------------------\n")
196 msgs += (__marge + " Alpha**2\n")
198 msgs += (__marge + "It is a residue essentially similar to the classical Taylor criterion,\n")
199 msgs += (__marge + "but its behavior may differ depending on the numerical properties of\n")
200 msgs += (__marge + "the calculations of its various terms.\n")
202 msgs += (__marge + "If the residue is constant up to a certain threshold and increasing\n")
203 msgs += (__marge + "afterwards, it means that the gradient is well computed up to this\n")
204 msgs += (__marge + "stopping precision, and that F is not linear.\n")
206 msgs += (__marge + "If the residue is systematically increasing starting from a small\n")
207 msgs += (__marge + "value compared to ||F(X)||, it means that F is (quasi-)linear and that\n")
208 msgs += (__marge + "the calculation of the gradient is correct until the residue is of the\n")
209 msgs += (__marge + "order of magnitude of ||F(X)||.\n")
211 __entete = u" i Alpha ||X|| ||F(X)|| ||F(X+dX)|| ||dX|| ||F(X+dX)-F(X)|| ||F(X+dX)-F(X)||/||dX|| R(Alpha) log( R )" # noqa: E501
213 if self._parameters["ResiduFormula"] == "Norm":
214 msgs += (__marge + " || F(X+Alpha*dX) - F(X) ||\n")
215 msgs += (__marge + " R(Alpha) = --------------------------\n")
216 msgs += (__marge + " Alpha\n")
218 msgs += (__marge + "which must remain constant until the accuracy of the calculation is\n")
219 msgs += (__marge + "reached.\n")
221 __entete = u" i Alpha ||X|| ||F(X)|| ||F(X+dX)|| ||dX|| ||F(X+dX)-F(X)|| ||F(X+dX)-F(X)||/||dX|| R(Alpha) log( R )" # noqa: E501
224 msgs += (__marge + "We take dX0 = Normal(0,X) and dX = Alpha*dX0. F is the calculation code.\n")
225 if "DifferentialIncrement" in HO and HO["DifferentialIncrement"] is not None:
227 msgs += (__marge + "Reminder: gradient operator is obtained internally by finite differences,\n")
228 msgs += (__marge + "with a differential increment of value %.2e.\n"%HO["DifferentialIncrement"])
230 msgs += (__marge + "(Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr)
233 Perturbations = [ 10**i for i in range(self._parameters["EpsilonMinimumExponent"], 1) ]
234 Perturbations.reverse()
236 FX = numpy.ravel( Hm( X0 ) ).reshape((-1, 1))
237 NormeX = numpy.linalg.norm( X0 )
238 NormeFX = numpy.linalg.norm( FX )
241 if self._toStore("CurrentState"):
242 self.StoredVariables["CurrentState"].store( X0 )
243 if self._toStore("SimulatedObservationAtCurrentState"):
244 self.StoredVariables["SimulatedObservationAtCurrentState"].store( FX )
246 dX0 = NumericObjects.SetInitialDirection(
247 self._parameters["InitialDirection"],
248 self._parameters["AmplitudeOfInitialDirection"],
252 if self._parameters["ResiduFormula"] in ["Taylor", "TaylorOnNorm"]:
253 dX1 = float(self._parameters["AmplitudeOfTangentPerturbation"]) * dX0
254 GradFxdX = Ht( (X0, dX1) )
255 GradFxdX = numpy.ravel( GradFxdX ).reshape((-1, 1))
256 GradFxdX = float(1. / self._parameters["AmplitudeOfTangentPerturbation"]) * GradFxdX
258 # Boucle sur les perturbations
259 # ----------------------------
260 __nbtirets = len(__entete) + 2
262 msgs += "\n" + __marge + "-" * __nbtirets
263 msgs += "\n" + __marge + __entete
264 msgs += "\n" + __marge + "-" * __nbtirets
274 for ip, amplitude in enumerate(Perturbations):
275 dX = amplitude * dX0.reshape((-1, 1))
278 FX_plus_dX = Hm( X_plus_dX )
279 FX_plus_dX = numpy.ravel( FX_plus_dX ).reshape((-1, 1))
281 if self._toStore("CurrentState"):
282 self.StoredVariables["CurrentState"].store( X_plus_dX )
283 if self._toStore("SimulatedObservationAtCurrentState"):
284 self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX_plus_dX) )
286 NormedX = numpy.linalg.norm( dX )
287 NormeFXdX = numpy.linalg.norm( FX_plus_dX )
288 NormedFX = numpy.linalg.norm( FX_plus_dX - FX )
289 NormedFXsdX = NormedFX / NormedX
291 if self._parameters["ResiduFormula"] in ["Taylor", "TaylorOnNorm"]:
292 NormedFXGdX = numpy.linalg.norm( FX_plus_dX - FX - amplitude * GradFxdX )
294 NormedFXsAm = NormedFX / amplitude
296 # if numpy.abs(NormedFX) < 1.e-20:
299 NormesdX.append( NormedX )
300 NormesFXdX.append( NormeFXdX )
301 NormesdFX.append( NormedFX )
302 if self._parameters["ResiduFormula"] in ["Taylor", "TaylorOnNorm"]:
303 NormesdFXGdX.append( NormedFXGdX )
304 NormesdFXsdX.append( NormedFXsdX )
305 NormesdFXsAm.append( NormedFXsAm )
307 if self._parameters["ResiduFormula"] == "Taylor":
308 Residu = NormedFXGdX / NormeFX
309 elif self._parameters["ResiduFormula"] == "TaylorOnNorm":
310 Residu = NormedFXGdX / (amplitude * amplitude)
311 elif self._parameters["ResiduFormula"] == "Norm":
314 self.StoredVariables["Residu"].store( Residu )
315 ttsep = " %2i %5.0e %9.3e %9.3e %9.3e %9.3e %9.3e | %9.3e | %9.3e %4.0f\n"%(ip, amplitude, NormeX, NormeFX, NormeFXdX, NormedX, NormedFX, NormedFXsdX, Residu, math.log10(max(1.e-99, Residu))) # noqa: E501
316 msgs += __marge + ttsep
318 msgs += (__marge + "-" * __nbtirets + "\n\n")
319 msgs += (__marge + "End of the \"%s\" verification by the \"%s\" formula.\n\n"%(self._name, self._parameters["ResiduFormula"])) # noqa: E501
320 msgs += (__marge + "%s\n"%("-" * 75,))
323 if self._parameters["PlotAndSave"]:
324 f = open(str(self._parameters["ResultFile"]) + ".txt", 'a')
328 Residus = self.StoredVariables["Residu"][-len(Perturbations):]
329 if self._parameters["ResiduFormula"] in ["Taylor", "TaylorOnNorm"]:
330 PerturbationsCarre = [ 10**(2 * i) for i in range(-len(NormesdFXGdX) + 1, 1) ]
331 PerturbationsCarre.reverse()
335 titre = self._parameters["ResultTitle"],
336 label = self._parameters["ResultLabel"],
339 filename = str(self._parameters["ResultFile"]) + ".ps",
340 YRef = PerturbationsCarre,
341 normdY0 = numpy.log10( NormesdFX[0] ),
343 elif self._parameters["ResiduFormula"] == "Norm":
347 titre = self._parameters["ResultTitle"],
348 label = self._parameters["ResultLabel"],
351 filename = str(self._parameters["ResultFile"]) + ".ps",
354 self._post_run(HO, EM)
357 # ==============================================================================
368 YRef = None, # Vecteur de reference a comparer a Y
369 recalYRef = True, # Decalage du point 0 de YRef a Y[0]
370 normdY0 = 0.): # Norme de DeltaY[0]
373 __g = __gnuplot.Gnuplot(persist=1) # persist=1
374 # __g('set terminal '+__gnuplot.GnuplotOpts.default_term)
375 __g('set style data lines')
378 __g('set title "' + titre + '"')
379 # __g('set range [] reverse')
380 # __g('set yrange [0:2]')
383 steps = numpy.log10( X )
384 __g('set xlabel "Facteur multiplicatif de dX, en echelle log10"')
387 __g('set xlabel "Facteur multiplicatif de dX"')
390 values = numpy.log10( Y )
391 __g('set ylabel "Amplitude du residu, en echelle log10"')
394 __g('set ylabel "Amplitude du residu"')
396 __g.plot( __gnuplot.Data( steps, values, title=label, with_='lines lw 3' ) )
399 valuesRef = numpy.log10( YRef )
402 if recalYRef and not numpy.all(values < -8):
403 valuesRef = valuesRef + values[0]
404 elif recalYRef and numpy.all(values < -8):
405 valuesRef = valuesRef + normdY0
408 __g.replot( __gnuplot.Data( steps, valuesRef, title="Reference", with_='lines lw 1' ) )
411 __g.hardcopy( filename, color=1)
413 eval(input('Please press return to continue...\n'))
415 # ==============================================================================
416 if __name__ == "__main__":
417 print("\n AUTODIAGNOSTIC\n")