1 #-*-coding:iso-8859-1-*-
3 # Copyright (C) 2008-2017 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 from daCore import BasicObjects, PlatformInfo
26 mpr = PlatformInfo.PlatformInfo().MachinePrecision()
27 mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
29 # ==============================================================================
30 class ElementaryAlgorithm(BasicObjects.Algorithm):
32 BasicObjects.Algorithm.__init__(self, "FUNCTIONTEST")
33 self.defineRequiredParameter(
34 name = "NumberOfPrintedDigits",
37 message = "Nombre de chiffres affichés pour les impressions de réels",
40 self.defineRequiredParameter(
41 name = "NumberOfRepetition",
44 message = "Nombre de fois où l'exécution de la fonction est répétée",
47 self.defineRequiredParameter(
51 message = "Titre du tableau et de la figure",
53 self.defineRequiredParameter(
57 message = "Activation du mode debug lors de l'exécution",
59 self.defineRequiredParameter(
60 name = "StoreSupplementaryCalculations",
63 message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
64 listval = ["CurrentState", "SimulatedObservationAtCurrentState"]
67 def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
68 self._pre_run(Parameters)
70 Hm = HO["Direct"].appliedTo
75 _p = self._parameters["NumberOfPrintedDigits"]
76 if len(self._parameters["ResultTitle"]) > 0:
77 msg = " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n"
78 msg += " " + self._parameters["ResultTitle"] + "\n"
79 msg += " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n"
82 msg = ("===> Information before launching:\n")
83 msg += (" -----------------------------\n")
84 msg += (" Characteristics of input vector X, internally converted:\n")
85 msg += (" Type...............: %s\n")%type( Xn )
86 msg += (" Lenght of vector...: %i\n")%max(numpy.matrix( Xn ).shape)
87 msg += (" Minimum value......: %."+str(_p)+"e\n")%numpy.min( Xn )
88 msg += (" Maximum value......: %."+str(_p)+"e\n")%numpy.max( Xn )
89 msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Xn, dtype=mfp )
90 msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Xn, dtype=mfp )
91 msg += (" L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Xn )
94 if self._parameters["SetDebug"]:
95 CUR_LEVEL = logging.getLogger().getEffectiveLevel()
96 logging.getLogger().setLevel(logging.DEBUG)
97 print("===> Beginning of evaluation, activating debug\n")
99 print("===> Beginning of evaluation, without activating debug\n")
102 HO["Direct"].disableAvoidingRedundancy()
105 for i in range(self._parameters["NumberOfRepetition"]):
106 if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]:
107 self.StoredVariables["CurrentState"].store( numpy.ravel(Xn) )
108 print(" %s\n"%("-"*75,))
109 if self._parameters["NumberOfRepetition"] > 1:
110 print("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"]))
111 print("===> Launching direct operator evaluation\n")
115 print("\n===> End of direct operator evaluation\n")
117 msg = ("===> Information after evaluation:\n")
118 msg += ("\n Characteristics of simulated output vector Y=H(X), to compare to others:\n")
119 msg += (" Type...............: %s\n")%type( Yn )
120 msg += (" Lenght of vector...: %i\n")%max(numpy.matrix( Yn ).shape)
121 msg += (" Minimum value......: %."+str(_p)+"e\n")%numpy.min( Yn )
122 msg += (" Maximum value......: %."+str(_p)+"e\n")%numpy.max( Yn )
123 msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn, dtype=mfp )
124 msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn, dtype=mfp )
125 msg += (" L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Yn )
127 if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
128 self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(Yn) )
130 Ys.append( copy.copy( numpy.ravel(
134 HO["Direct"].enableAvoidingRedundancy()
137 print(" %s\n"%("-"*75,))
138 if self._parameters["SetDebug"]:
139 print("===> End evaluation, deactivating debug if necessary\n")
140 logging.getLogger().setLevel(CUR_LEVEL)
142 if self._parameters["NumberOfRepetition"] > 1:
143 msg = (" %s\n"%("-"*75,))
144 msg += ("\n===> Statistical analysis of the outputs obtained throught repeated evaluations\n")
145 msg += ("\n (Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr)
146 Yy = numpy.array( Ys )
147 msg += ("\n Characteristics of the whole set of outputs Y:\n")
148 msg += (" Number of evaluations.........................: %i\n")%len( Ys )
149 msg += (" Minimum value of the whole set of outputs.....: %."+str(_p)+"e\n")%numpy.min( Yy )
150 msg += (" Maximum value of the whole set of outputs.....: %."+str(_p)+"e\n")%numpy.max( Yy )
151 msg += (" Mean of vector of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.mean( Yy, dtype=mfp )
152 msg += (" Standard error of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.std( Yy, dtype=mfp )
153 Ym = numpy.mean( numpy.array( Ys ), axis=0, dtype=mfp )
154 msg += ("\n Characteristics of the vector Ym, mean of the outputs Y:\n")
155 msg += (" Size of the mean of the outputs...............: %i\n")%Ym.size
156 msg += (" Minimum value of the mean of the outputs......: %."+str(_p)+"e\n")%numpy.min( Ym )
157 msg += (" Maximum value of the mean of the outputs......: %."+str(_p)+"e\n")%numpy.max( Ym )
158 msg += (" Mean of the mean of the outputs...............: %."+str(_p)+"e\n")%numpy.mean( Ym, dtype=mfp )
159 msg += (" Standard error of the mean of the outputs.....: %."+str(_p)+"e\n")%numpy.std( Ym, dtype=mfp )
160 Ye = numpy.mean( numpy.array( Ys ) - Ym, axis=0, dtype=mfp )
161 msg += "\n Characteristics of the mean of the differences between the outputs Y and their mean Ym:\n"
162 msg += (" Size of the mean of the differences...........: %i\n")%Ym.size
163 msg += (" Minimum value of the mean of the differences..: %."+str(_p)+"e\n")%numpy.min( Ye )
164 msg += (" Maximum value of the mean of the differences..: %."+str(_p)+"e\n")%numpy.max( Ye )
165 msg += (" Mean of the mean of the differences...........: %."+str(_p)+"e\n")%numpy.mean( Ye, dtype=mfp )
166 msg += (" Standard error of the mean of the differences.: %."+str(_p)+"e\n")%numpy.std( Ye, dtype=mfp )
167 msg += ("\n %s\n"%("-"*75,))
173 # ==============================================================================
174 if __name__ == "__main__":
175 print('\n AUTODIAGNOSTIC \n')