X-Git-Url: http://git.salome-platform.org/gitweb/?a=blobdiff_plain;f=src%2FdaComposant%2FdaAlgorithms%2FQuantileRegression.py;h=4b0dd223528f1ad1c12dab21206bc9de2e779e0c;hb=67a9e7898a4e0455f469b63898f42e965da0b33e;hp=616b41ae76c2a75c8e701f26d32edfd43b074a7c;hpb=4c6d99976fc4966797d1951ac9ad4b57c516a19b;p=modules%2Fadao.git diff --git a/src/daComposant/daAlgorithms/QuantileRegression.py b/src/daComposant/daAlgorithms/QuantileRegression.py index 616b41a..4b0dd22 100644 --- a/src/daComposant/daAlgorithms/QuantileRegression.py +++ b/src/daComposant/daAlgorithms/QuantileRegression.py @@ -1,28 +1,27 @@ -#-*-coding:iso-8859-1-*- +# -*- coding: utf-8 -*- # -# Copyright (C) 2008-2012 EDF R&D +# Copyright (C) 2008-2019 EDF R&D # -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License. +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License. # -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. # -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # -# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com +# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com # +# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D import logging -from daCore import BasicObjects, PlatformInfo -m = PlatformInfo.SystemUsage() - +from daCore import BasicObjects, NumericObjects import numpy # ============================================================================== @@ -41,7 +40,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): name = "Minimizer", default = "MMQR", typecast = str, - message = "Minimiseur utilisé", + message = "Minimiseur utilisé", listval = ["MMQR"], ) self.defineRequiredParameter( @@ -55,39 +54,54 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): name = "CostDecrementTolerance", default = 1.e-6, typecast = float, - message = "Maximum de variation de la fonction d'estimation lors de l'arrêt", + message = "Maximum de variation de la fonction d'estimation lors de l'arrêt", ) self.defineRequiredParameter( name = "StoreInternalVariables", default = False, typecast = bool, - message = "Stockage des variables internes ou intermédiaires du calcul", + message = "Stockage des variables internes ou intermédiaires du calcul", + ) + self.defineRequiredParameter( + name = "StoreSupplementaryCalculations", + default = [], + typecast = tuple, + message = "Liste de calculs supplémentaires à stocker et/ou effectuer", + listval = [ + "Analysis", + "BMA", + "CostFunctionJ", + "CostFunctionJb", + "CostFunctionJo", + "CurrentState", + "Innovation", + "OMA", + "OMB", + "SimulatedObservationAtBackground", + "SimulatedObservationAtCurrentState", + "SimulatedObservationAtOptimum", + ] + ) + self.defineRequiredParameter( # Pas de type + name = "Bounds", + message = "Liste des valeurs de bornes", + ) + self.requireInputArguments( + mandatory= ("Xb", "Y", "HO" ), ) - def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None): - """ - Calcul des parametres definissant le quantile - """ - logging.debug("%s Lancement"%self._name) - logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("Mo"))) + def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): + self._pre_run(Parameters, Xb, Y, R, B, Q) # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # - # Opérateur d'observation - # ----------------------- - Hm = H["Direct"].appliedTo + Hm = HO["Direct"].appliedTo # - # Utilisation éventuelle d'un vecteur H(Xb) précalculé + # Utilisation éventuelle d'un vecteur H(Xb) précalculé # ---------------------------------------------------- - if H["AppliedToX"] is not None and H["AppliedToX"].has_key("HXb"): - logging.debug("%s Utilisation de HXb"%self._name) - HXb = H["AppliedToX"]["HXb"] + if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]: + HXb = Hm( Xb, HO["AppliedInX"]["HXb"]) else: - logging.debug("%s Calcul de Hm(Xb)"%self._name) HXb = Hm( Xb ) - HXb = numpy.asmatrix(HXb).flatten().T + HXb = numpy.asmatrix(numpy.ravel( HXb )).T # # Calcul de l'innovation # ---------------------- @@ -96,50 +110,45 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if max(Y.shape) != max(HXb.shape): raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) d = Y - HXb - logging.debug("%s Innovation d = %s"%(self._name, d)) # - # Définition de la fonction-coût + # Définition de la fonction-coût # ------------------------------ def CostFunction(x): - _X = numpy.asmatrix(x).flatten().T - logging.debug("%s CostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten())) + _X = numpy.asmatrix(numpy.ravel( x )).T + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): + self.StoredVariables["CurrentState"].store( _X ) _HX = Hm( _X ) - _HX = numpy.asmatrix(_HX).flatten().T + _HX = numpy.asmatrix(numpy.ravel( _HX )).T + if self._toStore("SimulatedObservationAtCurrentState"): + self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) Jb = 0. Jo = 0. J = Jb + Jo - logging.debug("%s CostFunction Jb = %s"%(self._name, Jb)) - logging.debug("%s CostFunction Jo = %s"%(self._name, Jo)) - logging.debug("%s CostFunction J = %s"%(self._name, J)) - if self._parameters["StoreInternalVariables"]: - self.StoredVariables["CurrentState"].store( _X.A1 ) self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) return _HX # def GradientOfCostFunction(x): - _X = numpy.asmatrix(x).flatten().T - logging.debug("%s GradientOfCostFunction X = %s"%(self._name, _X.A1)) - Hg = H["Tangent"].asMatrix( _X ) + _X = numpy.asmatrix(numpy.ravel( x )).T + Hg = HO["Tangent"].asMatrix( _X ) return Hg # - # Point de démarrage de l'optimisation : Xini = Xb + # Point de démarrage de l'optimisation : Xini = Xb # ------------------------------------ - if type(Xb) is type(numpy.matrix([])): + if isinstance(Xb, type(numpy.matrix([]))): Xini = Xb.A1.tolist() else: Xini = list(Xb) - logging.debug("%s Point de démarrage Xini = %s"%(self._name, Xini)) # # Minimisation de la fonctionnelle # -------------------------------- if self._parameters["Minimizer"] == "MMQR": - import mmqr - Minimum, J_optimal, Informations = mmqr.mmqr( + Minimum, J_optimal, Informations = NumericObjects.mmqr( func = CostFunction, x0 = Xini, fprime = GradientOfCostFunction, + bounds = self._parameters["Bounds"], quantile = self._parameters["Quantile"], maxfun = self._parameters["MaximumNumberOfSteps"], toler = self._parameters["CostDecrementTolerance"], @@ -150,25 +159,34 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): else: raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"]) # - logging.debug("%s %s Step of min cost = %s"%(self._name, self._parameters["Minimizer"], nfeval)) - logging.debug("%s %s Minimum cost = %s"%(self._name, self._parameters["Minimizer"], J_optimal)) - logging.debug("%s %s Minimum state = %s"%(self._name, self._parameters["Minimizer"], Minimum)) - logging.debug("%s %s Nb of F = %s"%(self._name, self._parameters["Minimizer"], nfeval)) - logging.debug("%s %s RetCode = %s"%(self._name, self._parameters["Minimizer"], rc)) - # # Obtention de l'analyse # ---------------------- - Xa = numpy.asmatrix(Minimum).flatten().T - logging.debug("%s Analyse Xa = %s"%(self._name, Xa)) + Xa = numpy.asmatrix(numpy.ravel( Minimum )).T # self.StoredVariables["Analysis"].store( Xa.A1 ) - self.StoredVariables["Innovation"].store( d.A1 ) # - logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("MB"))) - logging.debug("%s Terminé"%self._name) + if self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtOptimum"): + HXa = Hm(Xa) + # + # Calculs et/ou stockages supplémentaires + # --------------------------------------- + if self._toStore("Innovation"): + self.StoredVariables["Innovation"].store( numpy.ravel(d) ) + if self._toStore("BMA"): + self.StoredVariables["BMA"].store( numpy.ravel(Xb - Xa) ) + if self._toStore("OMA"): + self.StoredVariables["OMA"].store( numpy.ravel(Y - HXa) ) + if self._toStore("OMB"): + self.StoredVariables["OMB"].store( numpy.ravel(d) ) + if self._toStore("SimulatedObservationAtBackground"): + self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) + if self._toStore("SimulatedObservationAtOptimum"): + self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # + self._post_run(HO) return 0 # ============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' + print('\n AUTODIAGNOSTIC\n')