1 #-*-coding:iso-8859-1-*-
3 # Copyright (C) 2008-2012 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
23 from daCore import BasicObjects, PlatformInfo
24 m = PlatformInfo.SystemUsage()
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
31 BasicObjects.Algorithm.__init__(self, "QUANTILEREGRESSION")
32 self.defineRequiredParameter(
36 message = "Quantile pour la regression de quantile",
40 self.defineRequiredParameter(
44 message = "Minimiseur utilisé",
47 self.defineRequiredParameter(
48 name = "MaximumNumberOfSteps",
51 message = "Nombre maximal de pas d'optimisation",
54 self.defineRequiredParameter(
55 name = "CostDecrementTolerance",
58 message = "Maximum de variation de la fonction d'estimation lors de l'arrêt",
61 def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
63 Calcul des parametres definissant le quantile
65 logging.debug("%s Lancement"%self._name)
66 logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("Mo")))
68 # Paramètres de pilotage
69 # ----------------------
70 self.setParameters(Parameters)
72 # Opérateur d'observation
73 # -----------------------
74 Hm = H["Direct"].appliedTo
76 # Utilisation éventuelle d'un vecteur H(Xb) précalculé
77 # ----------------------------------------------------
78 if H["AppliedToX"] is not None and H["AppliedToX"].has_key("HXb"):
79 logging.debug("%s Utilisation de HXb"%self._name)
80 HXb = H["AppliedToX"]["HXb"]
82 logging.debug("%s Calcul de Hm(Xb)"%self._name)
84 HXb = numpy.asmatrix(HXb).flatten().T
86 # Calcul de l'innovation
87 # ----------------------
88 if Y.size != HXb.size:
89 raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
90 if max(Y.shape) != max(HXb.shape):
91 raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
93 logging.debug("%s Innovation d = %s"%(self._name, d))
95 # Définition de la fonction-coût
96 # ------------------------------
98 _X = numpy.asmatrix(x).flatten().T
99 logging.debug("%s CostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten()))
101 _HX = numpy.asmatrix(_HX).flatten().T
105 logging.debug("%s CostFunction Jb = %s"%(self._name, Jb))
106 logging.debug("%s CostFunction Jo = %s"%(self._name, Jo))
107 logging.debug("%s CostFunction J = %s"%(self._name, J))
108 self.StoredVariables["CurrentState"].store( _X.A1 )
109 self.StoredVariables["CostFunctionJb"].store( Jb )
110 self.StoredVariables["CostFunctionJo"].store( Jo )
111 self.StoredVariables["CostFunctionJ" ].store( J )
114 def GradientOfCostFunction(x):
115 _X = numpy.asmatrix(x).flatten().T
116 logging.debug("%s GradientOfCostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten()))
117 Hg = H["Tangent"].asMatrix( _X )
120 # Point de démarrage de l'optimisation : Xini = Xb
121 # ------------------------------------
122 if type(Xb) is type(numpy.matrix([])):
123 Xini = Xb.A1.tolist()
126 logging.debug("%s Point de démarrage Xini = %s"%(self._name, Xini))
128 # Minimisation de la fonctionnelle
129 # --------------------------------
130 if self._parameters["Minimizer"] == "MMQR":
132 Minimum, J_optimal, Informations = mmqr.mmqr(
135 fprime = GradientOfCostFunction,
136 quantile = self._parameters["Quantile"],
137 maxfun = self._parameters["MaximumNumberOfSteps"],
138 toler = self._parameters["CostDecrementTolerance"],
141 nfeval = Informations[2]
144 raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
146 logging.debug("%s %s Step of min cost = %s"%(self._name, self._parameters["Minimizer"], nfeval))
147 logging.debug("%s %s Minimum cost = %s"%(self._name, self._parameters["Minimizer"], J_optimal))
148 logging.debug("%s %s Minimum state = %s"%(self._name, self._parameters["Minimizer"], Minimum))
149 logging.debug("%s %s Nb of F = %s"%(self._name, self._parameters["Minimizer"], nfeval))
150 logging.debug("%s %s RetCode = %s"%(self._name, self._parameters["Minimizer"], rc))
152 # Obtention de l'analyse
153 # ----------------------
154 Xa = numpy.asmatrix(Minimum).T
155 logging.debug("%s Analyse Xa = %s"%(self._name, Xa))
157 self.StoredVariables["Analysis"].store( Xa.A1 )
158 self.StoredVariables["Innovation"].store( d.A1 )
160 logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("MB")))
161 logging.debug("%s Terminé"%self._name)
165 # ==============================================================================
166 if __name__ == "__main__":
167 print '\n AUTODIAGNOSTIC \n'