1 #-*-coding:iso-8859-1-*-
3 # Copyright (C) 2008-2014 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 from daCore import BasicObjects, PlatformInfo
25 m = PlatformInfo.SystemUsage()
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
31 BasicObjects.Algorithm.__init__(self, "QUANTILEREGRESSION")
32 self.defineRequiredParameter(
36 message = "Quantile pour la regression de quantile",
40 self.defineRequiredParameter(
44 message = "Minimiseur utilisé",
47 self.defineRequiredParameter(
48 name = "MaximumNumberOfSteps",
51 message = "Nombre maximal de pas d'optimisation",
54 self.defineRequiredParameter(
55 name = "CostDecrementTolerance",
58 message = "Maximum de variation de la fonction d'estimation lors de l'arrêt",
60 self.defineRequiredParameter(
61 name = "StoreInternalVariables",
64 message = "Stockage des variables internes ou intermédiaires du calcul",
66 self.defineRequiredParameter(
67 name = "StoreSupplementaryCalculations",
70 message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
71 listval = ["BMA", "OMA", "OMB", "Innovation"]
74 def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
75 logging.debug("%s Lancement"%self._name)
76 logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M")))
78 # Paramètres de pilotage
79 # ----------------------
80 self.setParameters(Parameters)
82 if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0):
83 Bounds = self._parameters["Bounds"]
84 logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
88 # Opérateur d'observation
89 # -----------------------
90 Hm = HO["Direct"].appliedTo
92 # Utilisation éventuelle d'un vecteur H(Xb) précalculé
93 # ----------------------------------------------------
94 if HO["AppliedToX"] is not None and HO["AppliedToX"].has_key("HXb"):
95 HXb = HO["AppliedToX"]["HXb"]
98 HXb = numpy.asmatrix(numpy.ravel( HXb )).T
100 # Calcul de l'innovation
101 # ----------------------
102 if Y.size != HXb.size:
103 raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
104 if max(Y.shape) != max(HXb.shape):
105 raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
108 # Définition de la fonction-coût
109 # ------------------------------
111 _X = numpy.asmatrix(numpy.ravel( x )).T
113 _HX = numpy.asmatrix(numpy.ravel( _HX )).T
117 if self._parameters["StoreInternalVariables"]:
118 self.StoredVariables["CurrentState"].store( _X )
119 self.StoredVariables["CostFunctionJb"].store( Jb )
120 self.StoredVariables["CostFunctionJo"].store( Jo )
121 self.StoredVariables["CostFunctionJ" ].store( J )
124 def GradientOfCostFunction(x):
125 _X = numpy.asmatrix(numpy.ravel( x )).T
126 Hg = HO["Tangent"].asMatrix( _X )
129 # Point de démarrage de l'optimisation : Xini = Xb
130 # ------------------------------------
131 if type(Xb) is type(numpy.matrix([])):
132 Xini = Xb.A1.tolist()
136 # Minimisation de la fonctionnelle
137 # --------------------------------
138 if self._parameters["Minimizer"] == "MMQR":
140 Minimum, J_optimal, Informations = mmqr.mmqr(
143 fprime = GradientOfCostFunction,
145 quantile = self._parameters["Quantile"],
146 maxfun = self._parameters["MaximumNumberOfSteps"],
147 toler = self._parameters["CostDecrementTolerance"],
150 nfeval = Informations[2]
153 raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
155 # Obtention de l'analyse
156 # ----------------------
157 Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
159 self.StoredVariables["Analysis"].store( Xa.A1 )
161 # Calculs et/ou stockages supplémentaires
162 # ---------------------------------------
163 if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
164 self.StoredVariables["Innovation"].store( numpy.ravel(d) )
165 if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
166 self.StoredVariables["BMA"].store( numpy.ravel(Xb - Xa) )
167 if "OMA" in self._parameters["StoreSupplementaryCalculations"]:
168 self.StoredVariables["OMA"].store( numpy.ravel(Y - Hm(Xa)) )
169 if "OMB" in self._parameters["StoreSupplementaryCalculations"]:
170 self.StoredVariables["OMB"].store( numpy.ravel(d) )
172 logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint.: %i/%i/%i"%(self._name, HO["Direct"].nbcalls(0),HO["Tangent"].nbcalls(0),HO["Adjoint"].nbcalls(0)))
173 logging.debug("%s Nombre d'appels au cache d'opérateur d'observation direct/tangent/adjoint..: %i/%i/%i"%(self._name, HO["Direct"].nbcalls(3),HO["Tangent"].nbcalls(3),HO["Adjoint"].nbcalls(3)))
174 logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M")))
175 logging.debug("%s Terminé"%self._name)
179 # ==============================================================================
180 if __name__ == "__main__":
181 print '\n AUTODIAGNOSTIC \n'