X-Git-Url: http://git.salome-platform.org/gitweb/?a=blobdiff_plain;f=src%2FdaComposant%2FdaAlgorithms%2FDerivativeFreeOptimization.py;h=221d4efe659465ee7181b385939197dd9c90dd23;hb=c4d26891b4d5909257317d959d19ce7068030171;hp=2ae938dfd4d60f8651554d2b3f5ee218cfd5058d;hpb=690c0682f92ee1deb7a4ed5fa0a9aa8c596ce2ad;p=modules%2Fadao.git diff --git a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py index 2ae938d..221d4ef 100644 --- a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py +++ b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py @@ -1,6 +1,6 @@ #-*-coding:iso-8859-1-*- # -# Copyright (C) 2008-2016 EDF R&D +# Copyright (C) 2008-2017 EDF R&D # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public @@ -21,7 +21,7 @@ # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D import logging -from daCore import BasicObjects +from daCore import BasicObjects, PlatformInfo import numpy, scipy.optimize # ============================================================================== @@ -30,10 +30,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): BasicObjects.Algorithm.__init__(self, "DERIVATIVEFREEOPTIMIZATION") self.defineRequiredParameter( name = "Minimizer", - default = "POWELL", + default = "BOBYQA", typecast = str, message = "Minimiseur utilisé", - listval = ["POWELL", "SIMPLEX", "COBYLA"], + listval = ["BOBYQA", "COBYLA", "NEWUOA", "POWELL", "SIMPLEX", "SUBPLEX"], ) self.defineRequiredParameter( name = "MaximumNumberOfSteps", @@ -46,7 +46,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): name = "MaximumNumberOfFunctionEvaluations", default = 15000, typecast = int, - message = "Nombre maximal de d'évaluations de la fonction", + message = "Nombre maximal d'évaluations de la fonction", minval = -1, ) self.defineRequiredParameter( @@ -91,21 +91,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - if logging.getLogger().level < logging.WARNING: - self.__disp = 1 - else: - self.__disp = 0 - # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) + self._pre_run(Parameters) # - if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0): - Bounds = self._parameters["Bounds"] - logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) - else: - Bounds = None + if not PlatformInfo.has_nlopt and not self._parameters["Minimizer"] in ["COBYLA", "POWELL", "SIMPLEX"]: + self._parameters["Minimizer"] = "SIMPLEX" # # Opérateurs # ---------- @@ -175,6 +164,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Point de démarrage de l'optimisation : Xini = Xb # ------------------------------------ Xini = numpy.ravel(Xb) + if len(Xini) < 2 and self._parameters["Minimizer"] == "NEWUOA": + raise ValueError("The minimizer %s can not be used when the optimisation state dimension is 1. Please choose another minimizer."%self._parameters["Minimizer"]) # # Minimisation de la fonctionnelle # -------------------------------- @@ -190,21 +181,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): xtol = self._parameters["StateVariationTolerance"], ftol = self._parameters["CostDecrementTolerance"], full_output = True, - disp = self.__disp, - ) - elif self._parameters["Minimizer"] == "SIMPLEX": - Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin( - func = CostFunction, - x0 = Xini, - args = (self._parameters["QualityCriterion"],), - maxiter = self._parameters["MaximumNumberOfSteps"]-1, - maxfun = self._parameters["MaximumNumberOfFunctionEvaluations"], - xtol = self._parameters["StateVariationTolerance"], - ftol = self._parameters["CostDecrementTolerance"], - full_output = True, - disp = self.__disp, + disp = self._parameters["optdisp"], ) - elif self._parameters["Minimizer"] == "COBYLA": + elif self._parameters["Minimizer"] == "COBYLA" and not PlatformInfo.has_nlopt: def make_constraints(bounds): constraints = [] for (i,(a,b)) in enumerate(bounds): @@ -212,20 +191,152 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): upper = lambda x: b - x[i] constraints = constraints + [lower] + [upper] return constraints - if Bounds is None: + if self._parameters["Bounds"] is None: raise ValueError("Bounds have to be given for all axes as a list of lower/upper pairs!") Minimum = scipy.optimize.fmin_cobyla( func = CostFunction, x0 = Xini, - cons = make_constraints( Bounds ), + cons = make_constraints( self._parameters["Bounds"] ), args = (self._parameters["QualityCriterion"],), consargs = (), # To avoid extra-args maxfun = self._parameters["MaximumNumberOfFunctionEvaluations"], rhobeg = 1.0, rhoend = self._parameters["StateVariationTolerance"], catol = 2.*self._parameters["StateVariationTolerance"], - disp = self.__disp, + disp = self._parameters["optdisp"], + ) + elif self._parameters["Minimizer"] == "COBYLA" and PlatformInfo.has_nlopt: + import nlopt + opt = nlopt.opt(nlopt.LN_COBYLA, Xini.size) + def _f(_Xx, Grad): + # DFO, so no gradient + return CostFunction(_Xx, self._parameters["QualityCriterion"]) + opt.set_min_objective(_f) + if self._parameters["Bounds"] is not None: + lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2)) + lb = lub[:,0] + ub = lub[:,1] + if self._parameters["optdisp"]: + print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)) + print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)) + opt.set_upper_bounds(ub) + opt.set_lower_bounds(lb) + opt.set_ftol_rel(self._parameters["CostDecrementTolerance"]) + opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"]) + opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) + Minimum = opt.optimize( Xini ) + if self._parameters["optdisp"]: + print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)) + print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())) + print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())) + elif self._parameters["Minimizer"] == "SIMPLEX" and not PlatformInfo.has_nlopt: + Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin( + func = CostFunction, + x0 = Xini, + args = (self._parameters["QualityCriterion"],), + maxiter = self._parameters["MaximumNumberOfSteps"]-1, + maxfun = self._parameters["MaximumNumberOfFunctionEvaluations"], + xtol = self._parameters["StateVariationTolerance"], + ftol = self._parameters["CostDecrementTolerance"], + full_output = True, + disp = self._parameters["optdisp"], ) + elif self._parameters["Minimizer"] == "SIMPLEX" and PlatformInfo.has_nlopt: + import nlopt + opt = nlopt.opt(nlopt.LN_NELDERMEAD, Xini.size) + def _f(_Xx, Grad): + # DFO, so no gradient + return CostFunction(_Xx, self._parameters["QualityCriterion"]) + opt.set_min_objective(_f) + if self._parameters["Bounds"] is not None: + lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2)) + lb = lub[:,0] + ub = lub[:,1] + if self._parameters["optdisp"]: + print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)) + print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)) + opt.set_upper_bounds(ub) + opt.set_lower_bounds(lb) + opt.set_ftol_rel(self._parameters["CostDecrementTolerance"]) + opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"]) + opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) + Minimum = opt.optimize( Xini ) + if self._parameters["optdisp"]: + print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)) + print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())) + print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())) + elif self._parameters["Minimizer"] == "BOBYQA" and PlatformInfo.has_nlopt: + import nlopt + opt = nlopt.opt(nlopt.LN_BOBYQA, Xini.size) + def _f(_Xx, Grad): + # DFO, so no gradient + return CostFunction(_Xx, self._parameters["QualityCriterion"]) + opt.set_min_objective(_f) + if self._parameters["Bounds"] is not None: + lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2)) + lb = lub[:,0] + ub = lub[:,1] + if self._parameters["optdisp"]: + print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)) + print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)) + opt.set_upper_bounds(ub) + opt.set_lower_bounds(lb) + opt.set_ftol_rel(self._parameters["CostDecrementTolerance"]) + opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"]) + opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) + Minimum = opt.optimize( Xini ) + if self._parameters["optdisp"]: + print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)) + print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())) + print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())) + elif self._parameters["Minimizer"] == "NEWUOA" and PlatformInfo.has_nlopt: + import nlopt + opt = nlopt.opt(nlopt.LN_NEWUOA, Xini.size) + def _f(_Xx, Grad): + # DFO, so no gradient + return CostFunction(_Xx, self._parameters["QualityCriterion"]) + opt.set_min_objective(_f) + if self._parameters["Bounds"] is not None: + lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2)) + lb = lub[:,0] + ub = lub[:,1] + if self._parameters["optdisp"]: + print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)) + print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)) + opt.set_upper_bounds(ub) + opt.set_lower_bounds(lb) + opt.set_ftol_rel(self._parameters["CostDecrementTolerance"]) + opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"]) + opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) + Minimum = opt.optimize( Xini ) + if self._parameters["optdisp"]: + print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)) + print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())) + print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())) + elif self._parameters["Minimizer"] == "SUBPLEX" and PlatformInfo.has_nlopt: + import nlopt + opt = nlopt.opt(nlopt.LN_SBPLX, Xini.size) + def _f(_Xx, Grad): + # DFO, so no gradient + return CostFunction(_Xx, self._parameters["QualityCriterion"]) + opt.set_min_objective(_f) + if self._parameters["Bounds"] is not None: + lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2)) + lb = lub[:,0] + ub = lub[:,1] + if self._parameters["optdisp"]: + print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)) + print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)) + opt.set_upper_bounds(ub) + opt.set_lower_bounds(lb) + opt.set_ftol_rel(self._parameters["CostDecrementTolerance"]) + opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"]) + opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) + Minimum = opt.optimize( Xini ) + if self._parameters["optdisp"]: + print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)) + print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())) + print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())) else: raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"]) # @@ -266,4 +377,4 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' + print('\n AUTODIAGNOSTIC \n')