From ca04ebfa18e547eb61480179e25654e6c1c6a276 Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Tue, 31 Jan 2017 21:37:29 +0100 Subject: [PATCH] Improvement of internal pre run --- src/daComposant/daAlgorithms/3DVAR.py | 32 +++------ src/daComposant/daAlgorithms/4DVAR.py | 38 ++++------- src/daComposant/daAlgorithms/AdjointTest.py | 4 +- src/daComposant/daAlgorithms/Blue.py | 8 +-- .../DerivativeFreeOptimization.py | 65 ++++++++----------- src/daComposant/daAlgorithms/EnsembleBlue.py | 6 +- src/daComposant/daAlgorithms/ExtendedBlue.py | 8 +-- .../daAlgorithms/ExtendedKalmanFilter.py | 17 ++--- src/daComposant/daAlgorithms/FunctionTest.py | 4 +- src/daComposant/daAlgorithms/GradientTest.py | 6 +- src/daComposant/daAlgorithms/KalmanFilter.py | 6 +- .../daAlgorithms/LinearLeastSquares.py | 8 +-- src/daComposant/daAlgorithms/LinearityTest.py | 6 +- .../daAlgorithms/NonLinearLeastSquares.py | 32 +++------ src/daComposant/daAlgorithms/ObserverTest.py | 6 +- .../daAlgorithms/ParticleSwarmOptimization.py | 6 +- .../daAlgorithms/QuantileRegression.py | 16 +---- src/daComposant/daAlgorithms/SamplingTest.py | 4 +- src/daComposant/daAlgorithms/TangentTest.py | 8 +-- .../daAlgorithms/UnscentedKalmanFilter.py | 43 +++++------- src/daComposant/daCore/BasicObjects.py | 29 ++++++++- 21 files changed, 116 insertions(+), 236 deletions(-) diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index 50193f7..d9a1187 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -107,23 +107,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - if logging.getLogger().level < logging.WARNING: - self.__iprint, self.__disp = 1, 1 - self.__message = scipy.optimize.tnc.MSG_ALL - else: - self.__iprint, self.__disp = -1, 0 - self.__message = scipy.optimize.tnc.MSG_NONE - # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # - if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0): - Bounds = self._parameters["Bounds"] - logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) - else: - Bounds = None + self._pre_run(Parameters) # # Correction pour pallier a un bug de TNC sur le retour du Minimum if self._parameters.has_key("Minimizer") == "TNC": @@ -215,11 +199,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): x0 = Xini, fprime = GradientOfCostFunction, args = (), - bounds = Bounds, + bounds = self._parameters["Bounds"], maxfun = self._parameters["MaximumNumberOfSteps"]-1, factr = self._parameters["CostDecrementTolerance"]*1.e14, pgtol = self._parameters["ProjectedGradientTolerance"], - iprint = self.__iprint, + iprint = self._parameters["optiprint"], ) nfeval = Informations['funcalls'] rc = Informations['warnflag'] @@ -229,11 +213,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): x0 = Xini, fprime = GradientOfCostFunction, args = (), - bounds = Bounds, + bounds = self._parameters["Bounds"], maxfun = self._parameters["MaximumNumberOfSteps"], pgtol = self._parameters["ProjectedGradientTolerance"], ftol = self._parameters["CostDecrementTolerance"], - messages = self.__message, + messages = self._parameters["optmessages"], ) elif self._parameters["Minimizer"] == "CG": Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg( @@ -243,7 +227,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): args = (), maxiter = self._parameters["MaximumNumberOfSteps"], gtol = self._parameters["GradientNormTolerance"], - disp = self.__disp, + disp = self._parameters["optdisp"], full_output = True, ) elif self._parameters["Minimizer"] == "NCG": @@ -254,7 +238,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): args = (), maxiter = self._parameters["MaximumNumberOfSteps"], avextol = self._parameters["CostDecrementTolerance"], - disp = self.__disp, + disp = self._parameters["optdisp"], full_output = True, ) elif self._parameters["Minimizer"] == "BFGS": @@ -265,7 +249,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): args = (), maxiter = self._parameters["MaximumNumberOfSteps"], gtol = self._parameters["GradientNormTolerance"], - disp = self.__disp, + disp = self._parameters["optdisp"], full_output = True, ) else: diff --git a/src/daComposant/daAlgorithms/4DVAR.py b/src/daComposant/daAlgorithms/4DVAR.py index b676839..9c49f91 100644 --- a/src/daComposant/daAlgorithms/4DVAR.py +++ b/src/daComposant/daAlgorithms/4DVAR.py @@ -94,23 +94,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - if logging.getLogger().level < logging.WARNING: - self.__iprint, self.__disp = 1, 1 - self.__message = scipy.optimize.tnc.MSG_ALL - else: - self.__iprint, self.__disp = -1, 0 - self.__message = scipy.optimize.tnc.MSG_NONE - # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # - if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0): - Bounds = self._parameters["Bounds"] - logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) - else: - Bounds = None + self._pre_run(Parameters) # # Correction pour pallier a un bug de TNC sur le retour du Minimum if self._parameters.has_key("Minimizer") == "TNC": @@ -192,9 +176,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): elif self._parameters["EstimationOf"] == "Parameters": pass # - if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": - _Xn = numpy.max(numpy.hstack((_Xn,numpy.asmatrix(Bounds)[:,0])),axis=1) - _Xn = numpy.min(numpy.hstack((_Xn,numpy.asmatrix(Bounds)[:,1])),axis=1) + if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": + _Xn = numpy.max(numpy.hstack((_Xn,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1) + _Xn = numpy.min(numpy.hstack((_Xn,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1) # # Etape de différence aux observations if self._parameters["EstimationOf"] == "State": @@ -260,11 +244,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): x0 = Xini, fprime = GradientOfCostFunction, args = (), - bounds = Bounds, + bounds = self._parameters["Bounds"], maxfun = self._parameters["MaximumNumberOfSteps"]-1, factr = self._parameters["CostDecrementTolerance"]*1.e14, pgtol = self._parameters["ProjectedGradientTolerance"], - iprint = self.__iprint, + iprint = self._parameters["optiprint"], ) nfeval = Informations['funcalls'] rc = Informations['warnflag'] @@ -274,11 +258,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): x0 = Xini, fprime = GradientOfCostFunction, args = (), - bounds = Bounds, + bounds = self._parameters["Bounds"], maxfun = self._parameters["MaximumNumberOfSteps"], pgtol = self._parameters["ProjectedGradientTolerance"], ftol = self._parameters["CostDecrementTolerance"], - messages = self.__message, + messages = self._parameters["optmessages"], ) elif self._parameters["Minimizer"] == "CG": Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg( @@ -288,7 +272,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): args = (), maxiter = self._parameters["MaximumNumberOfSteps"], gtol = self._parameters["GradientNormTolerance"], - disp = self.__disp, + disp = self._parameters["optdisp"], full_output = True, ) elif self._parameters["Minimizer"] == "NCG": @@ -299,7 +283,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): args = (), maxiter = self._parameters["MaximumNumberOfSteps"], avextol = self._parameters["CostDecrementTolerance"], - disp = self.__disp, + disp = self._parameters["optdisp"], full_output = True, ) elif self._parameters["Minimizer"] == "BFGS": @@ -310,7 +294,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): args = (), maxiter = self._parameters["MaximumNumberOfSteps"], gtol = self._parameters["GradientNormTolerance"], - disp = self.__disp, + disp = self._parameters["optdisp"], full_output = True, ) else: diff --git a/src/daComposant/daAlgorithms/AdjointTest.py b/src/daComposant/daAlgorithms/AdjointTest.py index 49428a9..bda7162 100644 --- a/src/daComposant/daAlgorithms/AdjointTest.py +++ b/src/daComposant/daAlgorithms/AdjointTest.py @@ -76,9 +76,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - # - self.setParameters(Parameters) + self._pre_run(Parameters) # Hm = HO["Direct"].appliedTo Ht = HO["Tangent"].appliedInXTo diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index 2f6c2d8..2e9b1af 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -70,14 +70,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() + self._pre_run(Parameters) # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # - # Opérateurs - # ---------- Hm = HO["Tangent"].asMatrix(Xb) Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape Ha = HO["Adjoint"].asMatrix(Xb) diff --git a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py index ccf94a9..44f9364 100644 --- a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py +++ b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py @@ -91,23 +91,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - if logging.getLogger().level < logging.WARNING: - self.__disp = 1 - else: - self.__disp = 0 - # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) + self._pre_run(Parameters) # if not PlatformInfo.has_nlopt and not self._parameters["Minimizer"] in ["COBYLA", "POWELL", "SIMPLEX"]: self._parameters["Minimizer"] = "SIMPLEX" - if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0): - Bounds = self._parameters["Bounds"] - logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) - else: - Bounds = None # # Opérateurs # ---------- @@ -194,7 +181,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): xtol = self._parameters["StateVariationTolerance"], ftol = self._parameters["CostDecrementTolerance"], full_output = True, - disp = self.__disp, + disp = self._parameters["optdisp"], ) elif self._parameters["Minimizer"] == "COBYLA" and not PlatformInfo.has_nlopt: def make_constraints(bounds): @@ -204,19 +191,19 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): upper = lambda x: b - x[i] constraints = constraints + [lower] + [upper] return constraints - if Bounds is None: + if self._parameters["Bounds"] is None: raise ValueError("Bounds have to be given for all axes as a list of lower/upper pairs!") Minimum = scipy.optimize.fmin_cobyla( func = CostFunction, x0 = Xini, - cons = make_constraints( Bounds ), + cons = make_constraints( self._parameters["Bounds"] ), args = (self._parameters["QualityCriterion"],), consargs = (), # To avoid extra-args maxfun = self._parameters["MaximumNumberOfFunctionEvaluations"], rhobeg = 1.0, rhoend = self._parameters["StateVariationTolerance"], catol = 2.*self._parameters["StateVariationTolerance"], - disp = self.__disp, + disp = self._parameters["optdisp"], ) elif self._parameters["Minimizer"] == "COBYLA" and PlatformInfo.has_nlopt: import nlopt @@ -225,11 +212,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # DFO, so no gradient return CostFunction(_Xx, self._parameters["QualityCriterion"]) opt.set_min_objective(_f) - if Bounds is not None: - lub = numpy.array(Bounds).reshape((Xini.size,2)) + if self._parameters["Bounds"] is not None: + lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2)) lb = lub[:,0] ub = lub[:,1] - if self.__disp: + if self._parameters["optdisp"]: print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub) print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb) opt.set_upper_bounds(ub) @@ -238,7 +225,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"]) opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) Minimum = opt.optimize( Xini ) - if self.__disp: + if self._parameters["optdisp"]: print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum) print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()) print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()) @@ -252,7 +239,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): xtol = self._parameters["StateVariationTolerance"], ftol = self._parameters["CostDecrementTolerance"], full_output = True, - disp = self.__disp, + disp = self._parameters["optdisp"], ) elif self._parameters["Minimizer"] == "SIMPLEX" and PlatformInfo.has_nlopt: import nlopt @@ -261,11 +248,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # DFO, so no gradient return CostFunction(_Xx, self._parameters["QualityCriterion"]) opt.set_min_objective(_f) - if Bounds is not None: - lub = numpy.array(Bounds).reshape((Xini.size,2)) + if self._parameters["Bounds"] is not None: + lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2)) lb = lub[:,0] ub = lub[:,1] - if self.__disp: + if self._parameters["optdisp"]: print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub) print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb) opt.set_upper_bounds(ub) @@ -274,7 +261,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"]) opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) Minimum = opt.optimize( Xini ) - if self.__disp: + if self._parameters["optdisp"]: print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum) print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()) print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()) @@ -285,11 +272,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # DFO, so no gradient return CostFunction(_Xx, self._parameters["QualityCriterion"]) opt.set_min_objective(_f) - if Bounds is not None: - lub = numpy.array(Bounds).reshape((Xini.size,2)) + if self._parameters["Bounds"] is not None: + lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2)) lb = lub[:,0] ub = lub[:,1] - if self.__disp: + if self._parameters["optdisp"]: print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub) print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb) opt.set_upper_bounds(ub) @@ -298,7 +285,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"]) opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) Minimum = opt.optimize( Xini ) - if self.__disp: + if self._parameters["optdisp"]: print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum) print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()) print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()) @@ -309,11 +296,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # DFO, so no gradient return CostFunction(_Xx, self._parameters["QualityCriterion"]) opt.set_min_objective(_f) - if Bounds is not None: - lub = numpy.array(Bounds).reshape((Xini.size,2)) + if self._parameters["Bounds"] is not None: + lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2)) lb = lub[:,0] ub = lub[:,1] - if self.__disp: + if self._parameters["optdisp"]: print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub) print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb) opt.set_upper_bounds(ub) @@ -322,7 +309,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"]) opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) Minimum = opt.optimize( Xini ) - if self.__disp: + if self._parameters["optdisp"]: print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum) print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()) print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()) @@ -333,11 +320,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # DFO, so no gradient return CostFunction(_Xx, self._parameters["QualityCriterion"]) opt.set_min_objective(_f) - if Bounds is not None: - lub = numpy.array(Bounds).reshape((Xini.size,2)) + if self._parameters["Bounds"] is not None: + lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2)) lb = lub[:,0] ub = lub[:,1] - if self.__disp: + if self._parameters["optdisp"]: print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub) print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb) opt.set_upper_bounds(ub) @@ -346,7 +333,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"]) opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) Minimum = opt.optimize( Xini ) - if self.__disp: + if self._parameters["optdisp"]: print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum) print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()) print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()) diff --git a/src/daComposant/daAlgorithms/EnsembleBlue.py b/src/daComposant/daAlgorithms/EnsembleBlue.py index 54ce130..f07c53c 100644 --- a/src/daComposant/daAlgorithms/EnsembleBlue.py +++ b/src/daComposant/daAlgorithms/EnsembleBlue.py @@ -48,11 +48,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) + self._pre_run(Parameters) # # Précalcul des inversions de B et R # ---------------------------------- diff --git a/src/daComposant/daAlgorithms/ExtendedBlue.py b/src/daComposant/daAlgorithms/ExtendedBlue.py index 4ecda94..eb6ff58 100644 --- a/src/daComposant/daAlgorithms/ExtendedBlue.py +++ b/src/daComposant/daAlgorithms/ExtendedBlue.py @@ -70,14 +70,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() + self._pre_run(Parameters) # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # - # Opérateur d'observation - # ----------------------- Hm = HO["Tangent"].asMatrix(Xb) Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape Ha = HO["Adjoint"].asMatrix(Xb) diff --git a/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py b/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py index a4d5c95..b38239e 100644 --- a/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py +++ b/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py @@ -61,17 +61,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() + self._pre_run(Parameters) # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # - if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0): - Bounds = self._parameters["Bounds"] - logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) - else: - Bounds = None if self._parameters["EstimationOf"] == "Parameters": self._parameters["StoreInternalVariables"] = True # @@ -155,9 +146,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Xn_predicted = Xn Pn_predicted = Pn # - if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": - Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(Bounds)[:,0])),axis=1) - Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(Bounds)[:,1])),axis=1) + if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": + Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1) + Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1) # if self._parameters["EstimationOf"] == "State": d = Ynpu - numpy.asmatrix(numpy.ravel( H( (Xn_predicted, None) ) )).T diff --git a/src/daComposant/daAlgorithms/FunctionTest.py b/src/daComposant/daAlgorithms/FunctionTest.py index 828930e..4d194a2 100644 --- a/src/daComposant/daAlgorithms/FunctionTest.py +++ b/src/daComposant/daAlgorithms/FunctionTest.py @@ -65,9 +65,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - # - self.setParameters(Parameters) + self._pre_run(Parameters) # Hm = HO["Direct"].appliedTo # diff --git a/src/daComposant/daAlgorithms/GradientTest.py b/src/daComposant/daAlgorithms/GradientTest.py index cc49cb7..842604b 100644 --- a/src/daComposant/daAlgorithms/GradientTest.py +++ b/src/daComposant/daAlgorithms/GradientTest.py @@ -102,9 +102,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - # - self.setParameters(Parameters) + self._pre_run(Parameters) # Hm = HO["Direct"].appliedTo if self._parameters["ResiduFormula"] in ["Taylor", "TaylorOnNorm"]: @@ -223,7 +221,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Boucle sur les perturbations # ---------------------------- - Normalisation= -1 NormesdX = [] NormesFXdX = [] NormesdFX = [] @@ -269,7 +266,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Residu = NormedFXGdX / (amplitude*amplitude) elif self._parameters["ResiduFormula"] == "Norm": Residu = NormedFXsAm - if Normalisation < 0 : Normalisation = Residu # msg = " %2i %5.0e %9.3e %9.3e %9.3e %9.3e %9.3e | %9.3e | %9.3e %4.0f"%(i,amplitude,NormeX,NormeFX,NormeFXdX,NormedX,NormedFX,NormedFXsdX,Residu,math.log10(max(1.e-99,Residu))) msgs += "\n" + __marge + msg diff --git a/src/daComposant/daAlgorithms/KalmanFilter.py b/src/daComposant/daAlgorithms/KalmanFilter.py index 20ac177..caca1e7 100644 --- a/src/daComposant/daAlgorithms/KalmanFilter.py +++ b/src/daComposant/daAlgorithms/KalmanFilter.py @@ -50,11 +50,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) + self._pre_run(Parameters) # if self._parameters["EstimationOf"] == "Parameters": self._parameters["StoreInternalVariables"] = True diff --git a/src/daComposant/daAlgorithms/LinearLeastSquares.py b/src/daComposant/daAlgorithms/LinearLeastSquares.py index d335dec..491ea0a 100644 --- a/src/daComposant/daAlgorithms/LinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/LinearLeastSquares.py @@ -43,14 +43,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() + self._pre_run(Parameters) # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # - # Opérateur d'observation - # ----------------------- Hm = HO["Tangent"].asMatrix(None) Hm = Hm.reshape(Y.size,-1) # ADAO & check shape Ha = HO["Adjoint"].asMatrix(None) diff --git a/src/daComposant/daAlgorithms/LinearityTest.py b/src/daComposant/daAlgorithms/LinearityTest.py index 22baa9b..c09fcf6 100644 --- a/src/daComposant/daAlgorithms/LinearityTest.py +++ b/src/daComposant/daAlgorithms/LinearityTest.py @@ -84,11 +84,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) + self._pre_run(Parameters) # def RMS(V1, V2): import math diff --git a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py index 08c5705..b568207 100644 --- a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py @@ -80,23 +80,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - if logging.getLogger().level < logging.WARNING: - self.__iprint, self.__disp = 1, 1 - self.__message = scipy.optimize.tnc.MSG_ALL - else: - self.__iprint, self.__disp = -1, 0 - self.__message = scipy.optimize.tnc.MSG_NONE - # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # - if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0): - Bounds = self._parameters["Bounds"] - logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) - else: - Bounds = None + self._pre_run(Parameters) # # Correction pour pallier a un bug de TNC sur le retour du Minimum if self._parameters.has_key("Minimizer") == "TNC": @@ -197,11 +181,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): x0 = Xini, fprime = GradientOfCostFunction, args = (), - bounds = Bounds, + bounds = self._parameters["Bounds"], maxfun = self._parameters["MaximumNumberOfSteps"]-1, factr = self._parameters["CostDecrementTolerance"]*1.e14, pgtol = self._parameters["ProjectedGradientTolerance"], - iprint = self.__iprint, + iprint = self._parameters["optiprint"], ) nfeval = Informations['funcalls'] rc = Informations['warnflag'] @@ -211,11 +195,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): x0 = Xini, fprime = GradientOfCostFunction, args = (), - bounds = Bounds, + bounds = self._parameters["Bounds"], maxfun = self._parameters["MaximumNumberOfSteps"], pgtol = self._parameters["ProjectedGradientTolerance"], ftol = self._parameters["CostDecrementTolerance"], - messages = self.__message, + messages = self._parameters["optmessages"], ) elif self._parameters["Minimizer"] == "CG": Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg( @@ -225,7 +209,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): args = (), maxiter = self._parameters["MaximumNumberOfSteps"], gtol = self._parameters["GradientNormTolerance"], - disp = self.__disp, + disp = self._parameters["optdisp"], full_output = True, ) elif self._parameters["Minimizer"] == "NCG": @@ -236,7 +220,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): args = (), maxiter = self._parameters["MaximumNumberOfSteps"], avextol = self._parameters["CostDecrementTolerance"], - disp = self.__disp, + disp = self._parameters["optdisp"], full_output = True, ) elif self._parameters["Minimizer"] == "BFGS": @@ -247,7 +231,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): args = (), maxiter = self._parameters["MaximumNumberOfSteps"], gtol = self._parameters["GradientNormTolerance"], - disp = self.__disp, + disp = self._parameters["optdisp"], full_output = True, ) elif self._parameters["Minimizer"] == "LM": diff --git a/src/daComposant/daAlgorithms/ObserverTest.py b/src/daComposant/daAlgorithms/ObserverTest.py index b62d7ea..07b20df 100644 --- a/src/daComposant/daAlgorithms/ObserverTest.py +++ b/src/daComposant/daAlgorithms/ObserverTest.py @@ -30,15 +30,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): BasicObjects.Algorithm.__init__(self, "OBSERVERTEST") def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() + self._pre_run(Parameters) print "Results of observer check on all potential variables or commands," print " only activated on selected ones by explicit association." print # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # __Xa = 1.+numpy.arange(3.) __Xb = numpy.zeros(3) __YY = 1.+numpy.arange(5.) diff --git a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py index 52e8e63..68dba9f 100644 --- a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py +++ b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py @@ -99,11 +99,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) + self._pre_run(Parameters) # if self._parameters.has_key("BoxBounds") and (type(self._parameters["BoxBounds"]) is type([]) or type(self._parameters["BoxBounds"]) is type(())) and (len(self._parameters["BoxBounds"]) > 0): BoxBounds = self._parameters["BoxBounds"] diff --git a/src/daComposant/daAlgorithms/QuantileRegression.py b/src/daComposant/daAlgorithms/QuantileRegression.py index 2942db4..671bfa5 100644 --- a/src/daComposant/daAlgorithms/QuantileRegression.py +++ b/src/daComposant/daAlgorithms/QuantileRegression.py @@ -75,20 +75,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() + self._pre_run(Parameters) # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # - if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0): - Bounds = self._parameters["Bounds"] - logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) - else: - Bounds = None - # - # Opérateur d'observation - # ----------------------- Hm = HO["Direct"].appliedTo # # Utilisation éventuelle d'un vecteur H(Xb) précalculé @@ -145,7 +133,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): func = CostFunction, x0 = Xini, fprime = GradientOfCostFunction, - bounds = Bounds, + bounds = self._parameters["Bounds"], quantile = self._parameters["Quantile"], maxfun = self._parameters["MaximumNumberOfSteps"], toler = self._parameters["CostDecrementTolerance"], diff --git a/src/daComposant/daAlgorithms/SamplingTest.py b/src/daComposant/daAlgorithms/SamplingTest.py index acf9d69..cb71257 100644 --- a/src/daComposant/daAlgorithms/SamplingTest.py +++ b/src/daComposant/daAlgorithms/SamplingTest.py @@ -83,9 +83,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() - # - self.setParameters(Parameters) + self._pre_run(Parameters) # Hm = HO["Direct"].appliedTo # diff --git a/src/daComposant/daAlgorithms/TangentTest.py b/src/daComposant/daAlgorithms/TangentTest.py index 1039bdf..1ed6440 100644 --- a/src/daComposant/daAlgorithms/TangentTest.py +++ b/src/daComposant/daAlgorithms/TangentTest.py @@ -84,14 +84,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() + self._pre_run(Parameters) # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # - # Opérateurs - # ---------- Hm = HO["Direct"].appliedTo Ht = HO["Tangent"].appliedInXTo # diff --git a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py index acd220d..3f1245c 100644 --- a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py +++ b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py @@ -90,17 +90,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run() + self._pre_run(Parameters) # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) - # - if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0): - Bounds = self._parameters["Bounds"] - logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) - else: - Bounds = None if self._parameters["EstimationOf"] == "Parameters": self._parameters["StoreInternalVariables"] = True # @@ -192,10 +183,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi]) nbSpts = 2*Xn.size+1 # - if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": + if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": for point in range(nbSpts): - Xnp[:,point] = numpy.max(numpy.hstack((Xnp[:,point],numpy.asmatrix(Bounds)[:,0])),axis=1) - Xnp[:,point] = numpy.min(numpy.hstack((Xnp[:,point],numpy.asmatrix(Bounds)[:,1])),axis=1) + Xnp[:,point] = numpy.max(numpy.hstack((Xnp[:,point],numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1) + Xnp[:,point] = numpy.min(numpy.hstack((Xnp[:,point],numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1) # XEtnnp = [] for point in range(nbSpts): @@ -204,9 +195,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape XEtnnpi = XEtnnpi + Cm * Un - if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": - XEtnnpi = numpy.max(numpy.hstack((XEtnnpi,numpy.asmatrix(Bounds)[:,0])),axis=1) - XEtnnpi = numpy.min(numpy.hstack((XEtnnpi,numpy.asmatrix(Bounds)[:,1])),axis=1) + if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": + XEtnnpi = numpy.max(numpy.hstack((XEtnnpi,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1) + XEtnnpi = numpy.min(numpy.hstack((XEtnnpi,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1) elif self._parameters["EstimationOf"] == "Parameters": # --- > Par principe, M = Id, Q = 0 XEtnnpi = Xnp[:,point] @@ -215,26 +206,26 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Xncm = numpy.matrix( XEtnnp.getA()*numpy.array(Wm) ).sum(axis=1) # - if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": - Xncm = numpy.max(numpy.hstack((Xncm,numpy.asmatrix(Bounds)[:,0])),axis=1) - Xncm = numpy.min(numpy.hstack((Xncm,numpy.asmatrix(Bounds)[:,1])),axis=1) + if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": + Xncm = numpy.max(numpy.hstack((Xncm,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1) + Xncm = numpy.min(numpy.hstack((Xncm,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1) # if self._parameters["EstimationOf"] == "State": Pnm = Q elif self._parameters["EstimationOf"] == "Parameters": Pnm = 0. for point in range(nbSpts): Pnm += Wc[i] * (XEtnnp[:,point]-Xncm) * (XEtnnp[:,point]-Xncm).T # - if self._parameters["EstimationOf"] == "Parameters" and Bounds is not None: + if self._parameters["EstimationOf"] == "Parameters" and self._parameters["Bounds"] is not None: Pnmdemi = self._parameters["Reconditioner"] * numpy.linalg.cholesky(Pnm) else: Pnmdemi = numpy.linalg.cholesky(Pnm) # Xnnp = numpy.hstack([Xncm, Xncm+Gamma*Pnmdemi, Xncm-Gamma*Pnmdemi]) # - if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": + if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": for point in range(nbSpts): - Xnnp[:,point] = numpy.max(numpy.hstack((Xnnp[:,point],numpy.asmatrix(Bounds)[:,0])),axis=1) - Xnnp[:,point] = numpy.min(numpy.hstack((Xnnp[:,point],numpy.asmatrix(Bounds)[:,1])),axis=1) + Xnnp[:,point] = numpy.max(numpy.hstack((Xnnp[:,point],numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1) + Xnnp[:,point] = numpy.min(numpy.hstack((Xnnp[:,point],numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1) # Ynnp = [] for point in range(nbSpts): @@ -262,9 +253,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Xn = Xncm + Kn * d Pn = Pnm - Kn * Pyyn * Kn.T # - if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": - Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(Bounds)[:,0])),axis=1) - Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(Bounds)[:,1])),axis=1) + if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": + Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1) + Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1) # self.StoredVariables["Analysis"].store( Xn.A1 ) if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: diff --git a/src/daComposant/daCore/BasicObjects.py b/src/daComposant/daCore/BasicObjects.py index 2de542b..300e316 100644 --- a/src/daComposant/daCore/BasicObjects.py +++ b/src/daComposant/daCore/BasicObjects.py @@ -356,10 +356,35 @@ class Algorithm(object): self.StoredVariables["SimulationQuantiles"] = Persistence.OneMatrix(name = "SimulationQuantiles") self.StoredVariables["Residu"] = Persistence.OneScalar(name = "Residu") - def _pre_run(self): + def _pre_run(self, Parameters ): "Pré-calcul" logging.debug("%s Lancement", self._name) logging.debug("%s Taille mémoire utilisée de %.1f Mio", self._name, self._m.getUsedMemory("Mio")) + # + # Mise a jour de self._parameters avec Parameters + self.__setParameters(Parameters) + # + # Corrections et complements + if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0): + logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) + else: + self._parameters["Bounds"] = None + # + if logging.getLogger().level < logging.WARNING: + self._parameters["optiprint"], self._parameters["optdisp"] = 1, 1 + if PlatformInfo.has_scipy: + import scipy.optimize + self._parameters["optmessages"] = scipy.optimize.tnc.MSG_ALL + else: + self._parameters["optmessages"] = 15 + else: + self._parameters["optiprint"], self._parameters["optdisp"] = -1, 0 + if PlatformInfo.has_scipy: + import scipy.optimize + self._parameters["optmessages"] = scipy.optimize.tnc.MSG_NONE + else: + self._parameters["optmessages"] = 15 + # return 0 def _post_run(self,_oH=None): @@ -472,7 +497,7 @@ class Algorithm(object): raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%( __val, name,listval)) return __val - def setParameters(self, fromDico={}): + def __setParameters(self, fromDico={}): """ Permet de stocker les paramètres reçus dans le dictionnaire interne. """ -- 2.39.2