1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2023 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
23 import numpy, copy, logging
24 from daCore import BasicObjects, PlatformInfo
25 mpr = PlatformInfo.PlatformInfo().MachinePrecision()
26 mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
31 BasicObjects.Algorithm.__init__(self, "FUNCTIONTEST")
32 self.defineRequiredParameter(
33 name = "ShowElementarySummary",
36 message = "Calcule et affiche un résumé à chaque évaluation élémentaire",
38 self.defineRequiredParameter(
39 name = "NumberOfPrintedDigits",
42 message = "Nombre de chiffres affichés pour les impressions de réels",
45 self.defineRequiredParameter(
46 name = "NumberOfRepetition",
49 message = "Nombre de fois où l'exécution de la fonction est répétée",
52 self.defineRequiredParameter(
56 message = "Titre du tableau et de la figure",
58 self.defineRequiredParameter(
62 message = "Activation du mode debug lors de l'exécution",
64 self.defineRequiredParameter(
65 name = "StoreSupplementaryCalculations",
68 message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
71 "SimulatedObservationAtCurrentState",
74 self.requireInputArguments(
75 mandatory= ("Xb", "HO"),
77 self.setAttributes(tags=(
81 def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
82 self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
84 Hm = HO["Direct"].appliedTo
89 __s = self._parameters["ShowElementarySummary"]
90 __p = self._parameters["NumberOfPrintedDigits"]
91 __r = self._parameters["NumberOfRepetition"]
96 if len(self._parameters["ResultTitle"]) > 0:
97 __rt = str(self._parameters["ResultTitle"])
98 msgs += (__marge + "====" + "="*len(__rt) + "====\n")
99 msgs += (__marge + " " + __rt + "\n")
100 msgs += (__marge + "====" + "="*len(__rt) + "====\n")
102 msgs += (__marge + "%s\n"%self._name)
103 msgs += (__marge + "%s\n"%("="*len(self._name),))
106 msgs += (__marge + "This test allows to analyze the (repetition of the) launch of some\n")
107 msgs += (__marge + "given simulation operator F, applied to one single vector argument x,\n")
108 msgs += (__marge + "in a sequential way.\n")
109 msgs += (__marge + "The output shows simple statistics related to its successful execution,\n")
110 msgs += (__marge + "or related to the similarities of repetition of its execution.\n")
112 msgs += (__flech + "Information before launching:\n")
113 msgs += (__marge + "-----------------------------\n")
115 msgs += (__marge + "Characteristics of input vector X, internally converted:\n")
116 msgs += (__marge + " Type...............: %s\n")%type( X0 )
117 msgs += (__marge + " Length of vector...: %i\n")%max(numpy.ravel( X0 ).shape)
118 msgs += (__marge + " Minimum value......: %."+str(__p)+"e\n")%numpy.min( X0 )
119 msgs += (__marge + " Maximum value......: %."+str(__p)+"e\n")%numpy.max( X0 )
120 msgs += (__marge + " Mean of vector.....: %."+str(__p)+"e\n")%numpy.mean( X0, dtype=mfp )
121 msgs += (__marge + " Standard error.....: %."+str(__p)+"e\n")%numpy.std( X0, dtype=mfp )
122 msgs += (__marge + " L2 norm of vector..: %."+str(__p)+"e\n")%numpy.linalg.norm( X0 )
124 msgs += (__marge + "%s\n\n"%("-"*75,))
126 if self._parameters["SetDebug"]:
127 CUR_LEVEL = logging.getLogger().getEffectiveLevel()
128 logging.getLogger().setLevel(logging.DEBUG)
130 msgs += (__flech + "Beginning of repeated evaluation, activating debug\n")
132 msgs += (__flech + "Beginning of evaluation, activating debug\n")
135 msgs += (__flech + "Beginning of repeated evaluation, without activating debug\n")
137 msgs += (__flech + "Beginning of evaluation, without activating debug\n")
141 HO["Direct"].disableAvoidingRedundancy()
145 if self._toStore("CurrentState"):
146 self.StoredVariables["CurrentState"].store( X0 )
148 msgs = (__marge + "%s\n"%("-"*75,)) # 2-1
151 msgs += (__flech + "Repetition step number %i on a total of %i\n"%(i+1,__r))
153 msgs += (__flech + "Launching operator sequential evaluation\n")
160 msgs += (__flech + "End of operator sequential evaluation\n")
162 msgs += (__flech + "Information after evaluation:\n")
164 msgs += (__marge + "Characteristics of simulated output vector Y=F(X), to compare to others:\n")
165 msgs += (__marge + " Type...............: %s\n")%type( Yn )
166 msgs += (__marge + " Length of vector...: %i\n")%max(numpy.ravel( Yn ).shape)
167 msgs += (__marge + " Minimum value......: %."+str(__p)+"e\n")%numpy.min( Yn )
168 msgs += (__marge + " Maximum value......: %."+str(__p)+"e\n")%numpy.max( Yn )
169 msgs += (__marge + " Mean of vector.....: %."+str(__p)+"e\n")%numpy.mean( Yn, dtype=mfp )
170 msgs += (__marge + " Standard error.....: %."+str(__p)+"e\n")%numpy.std( Yn, dtype=mfp )
171 msgs += (__marge + " L2 norm of vector..: %."+str(__p)+"e\n")%numpy.linalg.norm( Yn )
173 if self._toStore("SimulatedObservationAtCurrentState"):
174 self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(Yn) )
176 Ys.append( copy.copy( numpy.ravel(
180 HO["Direct"].enableAvoidingRedundancy()
183 msgs = (__marge + "%s\n\n"%("-"*75,)) # 3
184 if self._parameters["SetDebug"]:
186 msgs += (__flech + "End of repeated evaluation, deactivating debug if necessary\n")
188 msgs += (__flech + "End of evaluation, deactivating debug if necessary\n")
189 logging.getLogger().setLevel(CUR_LEVEL)
192 msgs += (__flech + "End of repeated evaluation, without deactivating debug\n")
194 msgs += (__flech + "End of evaluation, without deactivating debug\n")
196 msgs += (__marge + "%s\n"%("-"*75,))
200 msgs += (__flech + "Launching statistical summary calculation for %i states\n"%__r)
202 msgs += (__marge + "%s\n"%("-"*75,))
204 msgs += (__flech + "Statistical analysis of the outputs obtained through sequential repeated evaluations\n")
206 msgs += (__marge + "(Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr)
208 Yy = numpy.array( Ys )
209 msgs += (__marge + "Number of evaluations...........................: %i\n")%len( Ys )
211 msgs += (__marge + "Characteristics of the whole set of outputs Y:\n")
212 msgs += (__marge + " Size of each of the outputs...................: %i\n")%Ys[0].size
213 msgs += (__marge + " Minimum value of the whole set of outputs.....: %."+str(__p)+"e\n")%numpy.min( Yy )
214 msgs += (__marge + " Maximum value of the whole set of outputs.....: %."+str(__p)+"e\n")%numpy.max( Yy )
215 msgs += (__marge + " Mean of vector of the whole set of outputs....: %."+str(__p)+"e\n")%numpy.mean( Yy, dtype=mfp )
216 msgs += (__marge + " Standard error of the whole set of outputs....: %."+str(__p)+"e\n")%numpy.std( Yy, dtype=mfp )
218 Ym = numpy.mean( numpy.array( Ys ), axis=0, dtype=mfp )
219 msgs += (__marge + "Characteristics of the vector Ym, mean of the outputs Y:\n")
220 msgs += (__marge + " Size of the mean of the outputs...............: %i\n")%Ym.size
221 msgs += (__marge + " Minimum value of the mean of the outputs......: %."+str(__p)+"e\n")%numpy.min( Ym )
222 msgs += (__marge + " Maximum value of the mean of the outputs......: %."+str(__p)+"e\n")%numpy.max( Ym )
223 msgs += (__marge + " Mean of the mean of the outputs...............: %."+str(__p)+"e\n")%numpy.mean( Ym, dtype=mfp )
224 msgs += (__marge + " Standard error of the mean of the outputs.....: %."+str(__p)+"e\n")%numpy.std( Ym, dtype=mfp )
226 Ye = numpy.mean( numpy.array( Ys ) - Ym, axis=0, dtype=mfp )
227 msgs += (__marge + "Characteristics of the mean of the differences between the outputs Y and their mean Ym:\n")
228 msgs += (__marge + " Size of the mean of the differences...........: %i\n")%Ye.size
229 msgs += (__marge + " Minimum value of the mean of the differences..: %."+str(__p)+"e\n")%numpy.min( Ye )
230 msgs += (__marge + " Maximum value of the mean of the differences..: %."+str(__p)+"e\n")%numpy.max( Ye )
231 msgs += (__marge + " Mean of the mean of the differences...........: %."+str(__p)+"e\n")%numpy.mean( Ye, dtype=mfp )
232 msgs += (__marge + " Standard error of the mean of the differences.: %."+str(__p)+"e\n")%numpy.std( Ye, dtype=mfp )
234 msgs += (__marge + "%s\n"%("-"*75,))
237 msgs += (__marge + "End of the \"%s\" verification\n\n"%self._name)
238 msgs += (__marge + "%s\n"%("-"*75,))
244 # ==============================================================================
245 if __name__ == "__main__":
246 print('\n AUTODIAGNOSTIC\n')