1 #-*-coding:iso-8859-1-*-
3 # Copyright (C) 2008-2016 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 from daCore import BasicObjects, PlatformInfo
26 mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
31 BasicObjects.Algorithm.__init__(self, "FUNCTIONTEST")
32 self.defineRequiredParameter(
33 name = "NumberOfPrintedDigits",
36 message = "Nombre de chiffres affichés pour les impressions de réels",
39 self.defineRequiredParameter(
40 name = "NumberOfRepetition",
43 message = "Nombre de fois où l'exécution de la fonction est répétée",
46 self.defineRequiredParameter(
50 message = "Titre du tableau et de la figure",
52 self.defineRequiredParameter(
56 message = "Activation du mode debug lors de l'exécution",
58 self.defineRequiredParameter(
59 name = "StoreSupplementaryCalculations",
62 message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
63 listval = ["CurrentState", "SimulatedObservationAtCurrentState"]
66 def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
69 self.setParameters(Parameters)
71 Hm = HO["Direct"].appliedTo
76 _p = self._parameters["NumberOfPrintedDigits"]
77 if len(self._parameters["ResultTitle"]) > 0:
78 msg = " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n"
79 msg += " " + self._parameters["ResultTitle"] + "\n"
80 msg += " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n"
83 msg = ("===> Information before launching:\n")
84 msg += (" -----------------------------\n")
85 msg += (" Characteristics of input vector X, internally converted:\n")
86 msg += (" Type...............: %s\n")%type( Xn )
87 msg += (" Lenght of vector...: %i\n")%max(numpy.matrix( Xn ).shape)
88 msg += (" Minimum value......: %."+str(_p)+"e\n")%numpy.min( Xn )
89 msg += (" Maximum value......: %."+str(_p)+"e\n")%numpy.max( Xn )
90 msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Xn, dtype=mfp )
91 msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Xn, dtype=mfp )
92 msg += (" L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Xn )
95 if self._parameters["SetDebug"]:
96 CUR_LEVEL = logging.getLogger().getEffectiveLevel()
97 logging.getLogger().setLevel(logging.DEBUG)
98 print("===> Beginning of evaluation, activating debug\n")
100 print("===> Beginning of evaluation, without activating debug\n")
103 HO["Direct"].disableAvoidingRedundancy()
106 for i in range(self._parameters["NumberOfRepetition"]):
107 if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]:
108 self.StoredVariables["CurrentState"].store( numpy.ravel(Xn) )
109 print(" %s\n"%("-"*75,))
110 if self._parameters["NumberOfRepetition"] > 1:
111 print("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"]))
112 print("===> Launching direct operator evaluation\n")
116 print("\n===> End of direct operator evaluation\n")
118 msg = ("===> Information after evaluation:\n")
119 msg += ("\n Characteristics of simulated output vector Y=H(X), to compare to others:\n")
120 msg += (" Type...............: %s\n")%type( Yn )
121 msg += (" Lenght of vector...: %i\n")%max(numpy.matrix( Yn ).shape)
122 msg += (" Minimum value......: %."+str(_p)+"e\n")%numpy.min( Yn )
123 msg += (" Maximum value......: %."+str(_p)+"e\n")%numpy.max( Yn )
124 msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn, dtype=mfp )
125 msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn, dtype=mfp )
126 msg += (" L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Yn )
128 if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
129 self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(Yn) )
131 Ys.append( copy.copy( numpy.ravel(
135 HO["Direct"].enableAvoidingRedundancy()
138 print(" %s\n"%("-"*75,))
139 if self._parameters["SetDebug"]:
140 print("===> End evaluation, deactivating debug if necessary\n")
141 logging.getLogger().setLevel(CUR_LEVEL)
143 if self._parameters["NumberOfRepetition"] > 1:
144 msg = (" %s\n"%("-"*75,))
145 msg += ("\n===> Statistical analysis of the outputs obtained throught repeated evaluations\n")
146 msg += ("\n (Remark: numbers that are (about) under 1.e-16 represent 0 to machine precision)\n")
147 Yy = numpy.array( Ys )
148 msg += ("\n Characteristics of the whole set of outputs Y:\n")
149 msg += (" Number of evaluations.........................: %i\n")%len( Ys )
150 msg += (" Minimum value of the whole set of outputs.....: %."+str(_p)+"e\n")%numpy.min( Yy )
151 msg += (" Maximum value of the whole set of outputs.....: %."+str(_p)+"e\n")%numpy.max( Yy )
152 msg += (" Mean of vector of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.mean( Yy, dtype=mfp )
153 msg += (" Standard error of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.std( Yy, dtype=mfp )
154 Ym = numpy.mean( numpy.array( Ys ), axis=0, dtype=mfp )
155 msg += ("\n Characteristics of the vector Ym, mean of the outputs Y:\n")
156 msg += (" Size of the mean of the outputs...............: %i\n")%Ym.size
157 msg += (" Minimum value of the mean of the outputs......: %."+str(_p)+"e\n")%numpy.min( Ym )
158 msg += (" Maximum value of the mean of the outputs......: %."+str(_p)+"e\n")%numpy.max( Ym )
159 msg += (" Mean of the mean of the outputs...............: %."+str(_p)+"e\n")%numpy.mean( Ym, dtype=mfp )
160 msg += (" Standard error of the mean of the outputs.....: %."+str(_p)+"e\n")%numpy.std( Ym, dtype=mfp )
161 Ye = numpy.mean( numpy.array( Ys ) - Ym, axis=0, dtype=mfp )
162 msg += "\n Characteristics of the mean of the differences between the outputs Y and their mean Ym:\n"
163 msg += (" Size of the mean of the differences...........: %i\n")%Ym.size
164 msg += (" Minimum value of the mean of the differences..: %."+str(_p)+"e\n")%numpy.min( Ye )
165 msg += (" Maximum value of the mean of the differences..: %."+str(_p)+"e\n")%numpy.max( Ye )
166 msg += (" Mean of the mean of the differences...........: %."+str(_p)+"e\n")%numpy.mean( Ye, dtype=mfp )
167 msg += (" Standard error of the mean of the differences.: %."+str(_p)+"e\n")%numpy.std( Ye, dtype=mfp )
168 msg += ("\n %s\n"%("-"*75,))
174 # ==============================================================================
175 if __name__ == "__main__":
176 print '\n AUTODIAGNOSTIC \n'