]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Improvement of internal pre run
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Tue, 31 Jan 2017 20:37:29 +0000 (21:37 +0100)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Tue, 31 Jan 2017 20:37:29 +0000 (21:37 +0100)
21 files changed:
src/daComposant/daAlgorithms/3DVAR.py
src/daComposant/daAlgorithms/4DVAR.py
src/daComposant/daAlgorithms/AdjointTest.py
src/daComposant/daAlgorithms/Blue.py
src/daComposant/daAlgorithms/DerivativeFreeOptimization.py
src/daComposant/daAlgorithms/EnsembleBlue.py
src/daComposant/daAlgorithms/ExtendedBlue.py
src/daComposant/daAlgorithms/ExtendedKalmanFilter.py
src/daComposant/daAlgorithms/FunctionTest.py
src/daComposant/daAlgorithms/GradientTest.py
src/daComposant/daAlgorithms/KalmanFilter.py
src/daComposant/daAlgorithms/LinearLeastSquares.py
src/daComposant/daAlgorithms/LinearityTest.py
src/daComposant/daAlgorithms/NonLinearLeastSquares.py
src/daComposant/daAlgorithms/ObserverTest.py
src/daComposant/daAlgorithms/ParticleSwarmOptimization.py
src/daComposant/daAlgorithms/QuantileRegression.py
src/daComposant/daAlgorithms/SamplingTest.py
src/daComposant/daAlgorithms/TangentTest.py
src/daComposant/daAlgorithms/UnscentedKalmanFilter.py
src/daComposant/daCore/BasicObjects.py

index 50193f79a0689761fbcb9a7b361d80e12e0e115d..d9a1187535dcec15b907cf1078f71947a2f0c34c 100644 (file)
@@ -107,23 +107,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        if logging.getLogger().level < logging.WARNING:
-            self.__iprint, self.__disp = 1, 1
-            self.__message = scipy.optimize.tnc.MSG_ALL
-        else:
-            self.__iprint, self.__disp = -1, 0
-            self.__message = scipy.optimize.tnc.MSG_NONE
-        #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
-        #
-        if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0):
-            Bounds = self._parameters["Bounds"]
-            logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
-        else:
-            Bounds = None
+        self._pre_run(Parameters)
         #
         # Correction pour pallier a un bug de TNC sur le retour du Minimum
         if self._parameters.has_key("Minimizer") == "TNC":
@@ -215,11 +199,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 x0          = Xini,
                 fprime      = GradientOfCostFunction,
                 args        = (),
-                bounds      = Bounds,
+                bounds      = self._parameters["Bounds"],
                 maxfun      = self._parameters["MaximumNumberOfSteps"]-1,
                 factr       = self._parameters["CostDecrementTolerance"]*1.e14,
                 pgtol       = self._parameters["ProjectedGradientTolerance"],
-                iprint      = self.__iprint,
+                iprint      = self._parameters["optiprint"],
                 )
             nfeval = Informations['funcalls']
             rc     = Informations['warnflag']
@@ -229,11 +213,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 x0          = Xini,
                 fprime      = GradientOfCostFunction,
                 args        = (),
-                bounds      = Bounds,
+                bounds      = self._parameters["Bounds"],
                 maxfun      = self._parameters["MaximumNumberOfSteps"],
                 pgtol       = self._parameters["ProjectedGradientTolerance"],
                 ftol        = self._parameters["CostDecrementTolerance"],
-                messages    = self.__message,
+                messages    = self._parameters["optmessages"],
                 )
         elif self._parameters["Minimizer"] == "CG":
             Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
@@ -243,7 +227,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 gtol        = self._parameters["GradientNormTolerance"],
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 full_output = True,
                 )
         elif self._parameters["Minimizer"] == "NCG":
@@ -254,7 +238,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 avextol     = self._parameters["CostDecrementTolerance"],
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 full_output = True,
                 )
         elif self._parameters["Minimizer"] == "BFGS":
@@ -265,7 +249,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 gtol        = self._parameters["GradientNormTolerance"],
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 full_output = True,
                 )
         else:
index b67683968139c6eec0b4992a9ed2273c0306adf5..9c49f91f5c8f2ead086ee3fa46812c197abc47de 100644 (file)
@@ -94,23 +94,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        if logging.getLogger().level < logging.WARNING:
-            self.__iprint, self.__disp = 1, 1
-            self.__message = scipy.optimize.tnc.MSG_ALL
-        else:
-            self.__iprint, self.__disp = -1, 0
-            self.__message = scipy.optimize.tnc.MSG_NONE
-        #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
-        #
-        if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0):
-            Bounds = self._parameters["Bounds"]
-            logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
-        else:
-            Bounds = None
+        self._pre_run(Parameters)
         #
         # Correction pour pallier a un bug de TNC sur le retour du Minimum
         if self._parameters.has_key("Minimizer") == "TNC":
@@ -192,9 +176,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 elif self._parameters["EstimationOf"] == "Parameters":
                     pass
                 #
-                if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
-                    _Xn = numpy.max(numpy.hstack((_Xn,numpy.asmatrix(Bounds)[:,0])),axis=1)
-                    _Xn = numpy.min(numpy.hstack((_Xn,numpy.asmatrix(Bounds)[:,1])),axis=1)
+                if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
+                    _Xn = numpy.max(numpy.hstack((_Xn,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1)
+                    _Xn = numpy.min(numpy.hstack((_Xn,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1)
                 #
                 # Etape de différence aux observations
                 if self._parameters["EstimationOf"] == "State":
@@ -260,11 +244,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 x0          = Xini,
                 fprime      = GradientOfCostFunction,
                 args        = (),
-                bounds      = Bounds,
+                bounds      = self._parameters["Bounds"],
                 maxfun      = self._parameters["MaximumNumberOfSteps"]-1,
                 factr       = self._parameters["CostDecrementTolerance"]*1.e14,
                 pgtol       = self._parameters["ProjectedGradientTolerance"],
-                iprint      = self.__iprint,
+                iprint      = self._parameters["optiprint"],
                 )
             nfeval = Informations['funcalls']
             rc     = Informations['warnflag']
@@ -274,11 +258,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 x0          = Xini,
                 fprime      = GradientOfCostFunction,
                 args        = (),
-                bounds      = Bounds,
+                bounds      = self._parameters["Bounds"],
                 maxfun      = self._parameters["MaximumNumberOfSteps"],
                 pgtol       = self._parameters["ProjectedGradientTolerance"],
                 ftol        = self._parameters["CostDecrementTolerance"],
-                messages    = self.__message,
+                messages    = self._parameters["optmessages"],
                 )
         elif self._parameters["Minimizer"] == "CG":
             Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
@@ -288,7 +272,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 gtol        = self._parameters["GradientNormTolerance"],
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 full_output = True,
                 )
         elif self._parameters["Minimizer"] == "NCG":
@@ -299,7 +283,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 avextol     = self._parameters["CostDecrementTolerance"],
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 full_output = True,
                 )
         elif self._parameters["Minimizer"] == "BFGS":
@@ -310,7 +294,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 gtol        = self._parameters["GradientNormTolerance"],
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 full_output = True,
                 )
         else:
index 49428a9fd6b0126d3183df720df04638f9fd0bf9..bda716223d50b1eec0d10d13d38dd248b32f7023 100644 (file)
@@ -76,9 +76,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        #
-        self.setParameters(Parameters)
+        self._pre_run(Parameters)
         #
         Hm = HO["Direct"].appliedTo
         Ht = HO["Tangent"].appliedInXTo
index 2f6c2d87eebee8afc820a7c5f0255e0193f22e0e..2e9b1afa68554fe09394ac6bd74dbb3e8208f360 100644 (file)
@@ -70,14 +70,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
+        self._pre_run(Parameters)
         #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
-        #
-        # Opérateurs
-        # ----------
         Hm = HO["Tangent"].asMatrix(Xb)
         Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape
         Ha = HO["Adjoint"].asMatrix(Xb)
index ccf94a9c24cadf416c900d058b350bd45efb9d5f..44f93647dab488c7099c457ff0631e7ad0c2c3d3 100644 (file)
@@ -91,23 +91,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        if logging.getLogger().level < logging.WARNING:
-            self.__disp = 1
-        else:
-            self.__disp = 0
-        #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
+        self._pre_run(Parameters)
         #
         if not PlatformInfo.has_nlopt and not self._parameters["Minimizer"] in ["COBYLA", "POWELL", "SIMPLEX"]:
             self._parameters["Minimizer"] = "SIMPLEX"
-        if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0):
-            Bounds = self._parameters["Bounds"]
-            logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
-        else:
-            Bounds = None
         #
         # Opérateurs
         # ----------
@@ -194,7 +181,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 xtol        = self._parameters["StateVariationTolerance"],
                 ftol        = self._parameters["CostDecrementTolerance"],
                 full_output = True,
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 )
         elif self._parameters["Minimizer"] == "COBYLA" and not PlatformInfo.has_nlopt:
             def make_constraints(bounds):
@@ -204,19 +191,19 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                     upper = lambda x: b - x[i]
                     constraints = constraints + [lower] + [upper]
                 return constraints
-            if Bounds is None:
+            if self._parameters["Bounds"] is None:
                 raise ValueError("Bounds have to be given for all axes as a list of lower/upper pairs!")
             Minimum = scipy.optimize.fmin_cobyla(
                 func        = CostFunction,
                 x0          = Xini,
-                cons        = make_constraints( Bounds ),
+                cons        = make_constraints( self._parameters["Bounds"] ),
                 args        = (self._parameters["QualityCriterion"],),
                 consargs    = (), # To avoid extra-args
                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
                 rhobeg      = 1.0,
                 rhoend      = self._parameters["StateVariationTolerance"],
                 catol       = 2.*self._parameters["StateVariationTolerance"],
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 )
         elif self._parameters["Minimizer"] == "COBYLA" and PlatformInfo.has_nlopt:
             import nlopt
@@ -225,11 +212,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 # DFO, so no gradient
                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
             opt.set_min_objective(_f)
-            if Bounds is not None:
-                lub = numpy.array(Bounds).reshape((Xini.size,2))
+            if self._parameters["Bounds"] is not None:
+                lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2))
                 lb = lub[:,0]
                 ub = lub[:,1]
-                if self.__disp:
+                if self._parameters["optdisp"]:
                     print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub)
                     print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb)
                 opt.set_upper_bounds(ub)
@@ -238,7 +225,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
             Minimum = opt.optimize( Xini )
-            if self.__disp:
+            if self._parameters["optdisp"]:
                 print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)
                 print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())
                 print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())
@@ -252,7 +239,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 xtol        = self._parameters["StateVariationTolerance"],
                 ftol        = self._parameters["CostDecrementTolerance"],
                 full_output = True,
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 )
         elif self._parameters["Minimizer"] == "SIMPLEX" and PlatformInfo.has_nlopt:
             import nlopt
@@ -261,11 +248,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 # DFO, so no gradient
                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
             opt.set_min_objective(_f)
-            if Bounds is not None:
-                lub = numpy.array(Bounds).reshape((Xini.size,2))
+            if self._parameters["Bounds"] is not None:
+                lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2))
                 lb = lub[:,0]
                 ub = lub[:,1]
-                if self.__disp:
+                if self._parameters["optdisp"]:
                     print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub)
                     print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb)
                 opt.set_upper_bounds(ub)
@@ -274,7 +261,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
             Minimum = opt.optimize( Xini )
-            if self.__disp:
+            if self._parameters["optdisp"]:
                 print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)
                 print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())
                 print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())
@@ -285,11 +272,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 # DFO, so no gradient
                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
             opt.set_min_objective(_f)
-            if Bounds is not None:
-                lub = numpy.array(Bounds).reshape((Xini.size,2))
+            if self._parameters["Bounds"] is not None:
+                lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2))
                 lb = lub[:,0]
                 ub = lub[:,1]
-                if self.__disp:
+                if self._parameters["optdisp"]:
                     print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub)
                     print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb)
                 opt.set_upper_bounds(ub)
@@ -298,7 +285,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
             Minimum = opt.optimize( Xini )
-            if self.__disp:
+            if self._parameters["optdisp"]:
                 print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)
                 print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())
                 print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())
@@ -309,11 +296,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 # DFO, so no gradient
                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
             opt.set_min_objective(_f)
-            if Bounds is not None:
-                lub = numpy.array(Bounds).reshape((Xini.size,2))
+            if self._parameters["Bounds"] is not None:
+                lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2))
                 lb = lub[:,0]
                 ub = lub[:,1]
-                if self.__disp:
+                if self._parameters["optdisp"]:
                     print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub)
                     print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb)
                 opt.set_upper_bounds(ub)
@@ -322,7 +309,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
             Minimum = opt.optimize( Xini )
-            if self.__disp:
+            if self._parameters["optdisp"]:
                 print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)
                 print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())
                 print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())
@@ -333,11 +320,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 # DFO, so no gradient
                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
             opt.set_min_objective(_f)
-            if Bounds is not None:
-                lub = numpy.array(Bounds).reshape((Xini.size,2))
+            if self._parameters["Bounds"] is not None:
+                lub = numpy.array(self._parameters["Bounds"]).reshape((Xini.size,2))
                 lb = lub[:,0]
                 ub = lub[:,1]
-                if self.__disp:
+                if self._parameters["optdisp"]:
                     print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub)
                     print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb)
                 opt.set_upper_bounds(ub)
@@ -346,7 +333,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
             Minimum = opt.optimize( Xini )
-            if self.__disp:
+            if self._parameters["optdisp"]:
                 print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)
                 print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())
                 print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())
index 54ce1304af4778099a7b2643ab671f78063d712e..f07c53cdc20506f0eac6d63616e5f992e84b6068 100644 (file)
@@ -48,11 +48,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
+        self._pre_run(Parameters)
         #
         # Précalcul des inversions de B et R
         # ----------------------------------
index 4ecda94f7ad2ace49106b7976d37e8fbe9ca33fd..eb6ff58617a1e9a882798407f3fefd7543a17398 100644 (file)
@@ -70,14 +70,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
+        self._pre_run(Parameters)
         #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
-        #
-        # Opérateur d'observation
-        # -----------------------
         Hm = HO["Tangent"].asMatrix(Xb)
         Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape
         Ha = HO["Adjoint"].asMatrix(Xb)
index a4d5c9534c7d1f9c176f9697c5f812c63320fa19..b38239ed4fad56d5cae4c7e20da929f5a5e1181f 100644 (file)
@@ -61,17 +61,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
+        self._pre_run(Parameters)
         #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
-        #
-        if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0):
-            Bounds = self._parameters["Bounds"]
-            logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
-        else:
-            Bounds = None
         if self._parameters["EstimationOf"] == "Parameters":
             self._parameters["StoreInternalVariables"] = True
         #
@@ -155,9 +146,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 Xn_predicted = Xn
                 Pn_predicted = Pn
             #
-            if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
-                Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(Bounds)[:,0])),axis=1)
-                Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(Bounds)[:,1])),axis=1)
+            if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
+                Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1)
+                Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1)
             #
             if self._parameters["EstimationOf"] == "State":
                 d  = Ynpu - numpy.asmatrix(numpy.ravel( H( (Xn_predicted, None) ) )).T
index 828930ebba863adf45042ffc15d499d3648b75f7..4d194a2f846d77cd5b2aa7409c50a4fea62a6b02 100644 (file)
@@ -65,9 +65,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        #
-        self.setParameters(Parameters)
+        self._pre_run(Parameters)
         #
         Hm = HO["Direct"].appliedTo
         #
index cc49cb73e6811afa262dfbe90e5fca06285b8541..842604bc1ae0fe81332772ffde518870316f0fa3 100644 (file)
@@ -102,9 +102,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        #
-        self.setParameters(Parameters)
+        self._pre_run(Parameters)
         #
         Hm = HO["Direct"].appliedTo
         if self._parameters["ResiduFormula"] in ["Taylor", "TaylorOnNorm"]:
@@ -223,7 +221,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Boucle sur les perturbations
         # ----------------------------
-        Normalisation= -1
         NormesdX     = []
         NormesFXdX   = []
         NormesdFX    = []
@@ -269,7 +266,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 Residu = NormedFXGdX / (amplitude*amplitude)
             elif self._parameters["ResiduFormula"] == "Norm":
                 Residu = NormedFXsAm
-            if Normalisation < 0 : Normalisation = Residu
             #
             msg = "  %2i  %5.0e   %9.3e   %9.3e   %9.3e   %9.3e   %9.3e      |      %9.3e          |   %9.3e   %4.0f"%(i,amplitude,NormeX,NormeFX,NormeFXdX,NormedX,NormedFX,NormedFXsdX,Residu,math.log10(max(1.e-99,Residu)))
             msgs += "\n" + __marge + msg
index 20ac1774c08b654bb06752dbdf597cccfd69d418..caca1e7525219dc6b4b7250b861b5bbf69c77897 100644 (file)
@@ -50,11 +50,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
+        self._pre_run(Parameters)
         #
         if self._parameters["EstimationOf"] == "Parameters":
             self._parameters["StoreInternalVariables"] = True
index d335deccd546fc9d77de63dc3f61f45967544a7d..491ea0a5ec89cb3eb07d5eae4dd95ae15cbd5e83 100644 (file)
@@ -43,14 +43,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
+        self._pre_run(Parameters)
         #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
-        #
-        # Opérateur d'observation
-        # -----------------------
         Hm = HO["Tangent"].asMatrix(None)
         Hm = Hm.reshape(Y.size,-1) # ADAO & check shape
         Ha = HO["Adjoint"].asMatrix(None)
index 22baa9b8a97f8f4caafe9958aec9dcb3658f4f07..c09fcf6be00144207dfe8bed4eb5069516717469 100644 (file)
@@ -84,11 +84,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
+        self._pre_run(Parameters)
         #
         def RMS(V1, V2):
             import math
index 08c57058190bd59b09d75604e040884c29f6e2a5..b568207b0a44d6e141c96aa41948aff9a6c03fa5 100644 (file)
@@ -80,23 +80,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        if logging.getLogger().level < logging.WARNING:
-            self.__iprint, self.__disp = 1, 1
-            self.__message = scipy.optimize.tnc.MSG_ALL
-        else:
-            self.__iprint, self.__disp = -1, 0
-            self.__message = scipy.optimize.tnc.MSG_NONE
-        #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
-        #
-        if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0):
-            Bounds = self._parameters["Bounds"]
-            logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
-        else:
-            Bounds = None
+        self._pre_run(Parameters)
         #
         # Correction pour pallier a un bug de TNC sur le retour du Minimum
         if self._parameters.has_key("Minimizer") == "TNC":
@@ -197,11 +181,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 x0          = Xini,
                 fprime      = GradientOfCostFunction,
                 args        = (),
-                bounds      = Bounds,
+                bounds      = self._parameters["Bounds"],
                 maxfun      = self._parameters["MaximumNumberOfSteps"]-1,
                 factr       = self._parameters["CostDecrementTolerance"]*1.e14,
                 pgtol       = self._parameters["ProjectedGradientTolerance"],
-                iprint      = self.__iprint,
+                iprint      = self._parameters["optiprint"],
                 )
             nfeval = Informations['funcalls']
             rc     = Informations['warnflag']
@@ -211,11 +195,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 x0          = Xini,
                 fprime      = GradientOfCostFunction,
                 args        = (),
-                bounds      = Bounds,
+                bounds      = self._parameters["Bounds"],
                 maxfun      = self._parameters["MaximumNumberOfSteps"],
                 pgtol       = self._parameters["ProjectedGradientTolerance"],
                 ftol        = self._parameters["CostDecrementTolerance"],
-                messages    = self.__message,
+                messages    = self._parameters["optmessages"],
                 )
         elif self._parameters["Minimizer"] == "CG":
             Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
@@ -225,7 +209,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 gtol        = self._parameters["GradientNormTolerance"],
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 full_output = True,
                 )
         elif self._parameters["Minimizer"] == "NCG":
@@ -236,7 +220,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 avextol     = self._parameters["CostDecrementTolerance"],
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 full_output = True,
                 )
         elif self._parameters["Minimizer"] == "BFGS":
@@ -247,7 +231,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 gtol        = self._parameters["GradientNormTolerance"],
-                disp        = self.__disp,
+                disp        = self._parameters["optdisp"],
                 full_output = True,
                 )
         elif self._parameters["Minimizer"] == "LM":
index b62d7ea3345e361fa42f33e79329ca99be4d7c0c..07b20df078fda1789f665f71219f7a770f476557 100644 (file)
@@ -30,15 +30,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         BasicObjects.Algorithm.__init__(self, "OBSERVERTEST")
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
+        self._pre_run(Parameters)
         print "Results of observer check on all potential variables or commands,"
         print "         only activated on selected ones by explicit association."
         print
         #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
-        #
         __Xa = 1.+numpy.arange(3.)
         __Xb = numpy.zeros(3)
         __YY = 1.+numpy.arange(5.)
index 52e8e635e536c0d818aece24fc7750e2cdb8a363..68dba9f6c3537056943847960b941bb3a2a599e1 100644 (file)
@@ -99,11 +99,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
+        self._pre_run(Parameters)
         #
         if self._parameters.has_key("BoxBounds") and (type(self._parameters["BoxBounds"]) is type([]) or type(self._parameters["BoxBounds"]) is type(())) and (len(self._parameters["BoxBounds"]) > 0):
             BoxBounds = self._parameters["BoxBounds"]
index 2942db4e947a32f52354421e0c4b31dcf9d535d9..671bfa5ee2e5f69de7a7b67f098e634706a7a787 100644 (file)
@@ -75,20 +75,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
+        self._pre_run(Parameters)
         #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
-        #
-        if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0):
-            Bounds = self._parameters["Bounds"]
-            logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
-        else:
-            Bounds = None
-        #
-        # Opérateur d'observation
-        # -----------------------
         Hm = HO["Direct"].appliedTo
         #
         # Utilisation éventuelle d'un vecteur H(Xb) précalculé
@@ -145,7 +133,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 func        = CostFunction,
                 x0          = Xini,
                 fprime      = GradientOfCostFunction,
-                bounds      = Bounds,
+                bounds      = self._parameters["Bounds"],
                 quantile    = self._parameters["Quantile"],
                 maxfun      = self._parameters["MaximumNumberOfSteps"],
                 toler       = self._parameters["CostDecrementTolerance"],
index acf9d699109d52f7c6a49f63c4b4472cdeb3440d..cb712577fa2bbd2e03f35501c1e045f6fd9f5dc7 100644 (file)
@@ -83,9 +83,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
-        #
-        self.setParameters(Parameters)
+        self._pre_run(Parameters)
         #
         Hm = HO["Direct"].appliedTo
         #
index 1039bdf758ea15ee4b57cd09683d2459b58549fe..1ed6440cf9921ab4315b8921929699a144a5efd4 100644 (file)
@@ -84,14 +84,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
+        self._pre_run(Parameters)
         #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
-        #
-        # Opérateurs
-        # ----------
         Hm = HO["Direct"].appliedTo
         Ht = HO["Tangent"].appliedInXTo
         #
index acd220d87434e37c9b2025d06b2b1f3c4a8c2bc8..3f1245c918b3eadc6be0ad18cd9e1a634bf09a3c 100644 (file)
@@ -90,17 +90,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run()
+        self._pre_run(Parameters)
         #
-        # Paramètres de pilotage
-        # ----------------------
-        self.setParameters(Parameters)
-        #
-        if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0):
-            Bounds = self._parameters["Bounds"]
-            logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
-        else:
-            Bounds = None
         if self._parameters["EstimationOf"] == "Parameters":
             self._parameters["StoreInternalVariables"] = True
         #
@@ -192,10 +183,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
             nbSpts = 2*Xn.size+1
             #
-            if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
+            if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
                 for point in range(nbSpts):
-                    Xnp[:,point] = numpy.max(numpy.hstack((Xnp[:,point],numpy.asmatrix(Bounds)[:,0])),axis=1)
-                    Xnp[:,point] = numpy.min(numpy.hstack((Xnp[:,point],numpy.asmatrix(Bounds)[:,1])),axis=1)
+                    Xnp[:,point] = numpy.max(numpy.hstack((Xnp[:,point],numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1)
+                    Xnp[:,point] = numpy.min(numpy.hstack((Xnp[:,point],numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1)
             #
             XEtnnp = []
             for point in range(nbSpts):
@@ -204,9 +195,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                     if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
                         Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
                         XEtnnpi = XEtnnpi + Cm * Un
-                    if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
-                        XEtnnpi = numpy.max(numpy.hstack((XEtnnpi,numpy.asmatrix(Bounds)[:,0])),axis=1)
-                        XEtnnpi = numpy.min(numpy.hstack((XEtnnpi,numpy.asmatrix(Bounds)[:,1])),axis=1)
+                    if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
+                        XEtnnpi = numpy.max(numpy.hstack((XEtnnpi,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1)
+                        XEtnnpi = numpy.min(numpy.hstack((XEtnnpi,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1)
                 elif self._parameters["EstimationOf"] == "Parameters":
                     # --- > Par principe, M = Id, Q = 0
                     XEtnnpi = Xnp[:,point]
@@ -215,26 +206,26 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             #
             Xncm = numpy.matrix( XEtnnp.getA()*numpy.array(Wm) ).sum(axis=1)
             #
-            if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
-                Xncm = numpy.max(numpy.hstack((Xncm,numpy.asmatrix(Bounds)[:,0])),axis=1)
-                Xncm = numpy.min(numpy.hstack((Xncm,numpy.asmatrix(Bounds)[:,1])),axis=1)
+            if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
+                Xncm = numpy.max(numpy.hstack((Xncm,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1)
+                Xncm = numpy.min(numpy.hstack((Xncm,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1)
             #
             if self._parameters["EstimationOf"] == "State":        Pnm = Q
             elif self._parameters["EstimationOf"] == "Parameters": Pnm = 0.
             for point in range(nbSpts):
                 Pnm += Wc[i] * (XEtnnp[:,point]-Xncm) * (XEtnnp[:,point]-Xncm).T
             #
-            if self._parameters["EstimationOf"] == "Parameters" and Bounds is not None:
+            if self._parameters["EstimationOf"] == "Parameters" and self._parameters["Bounds"] is not None:
                 Pnmdemi = self._parameters["Reconditioner"] * numpy.linalg.cholesky(Pnm)
             else:
                 Pnmdemi = numpy.linalg.cholesky(Pnm)
             #
             Xnnp = numpy.hstack([Xncm, Xncm+Gamma*Pnmdemi, Xncm-Gamma*Pnmdemi])
             #
-            if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
+            if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
                 for point in range(nbSpts):
-                    Xnnp[:,point] = numpy.max(numpy.hstack((Xnnp[:,point],numpy.asmatrix(Bounds)[:,0])),axis=1)
-                    Xnnp[:,point] = numpy.min(numpy.hstack((Xnnp[:,point],numpy.asmatrix(Bounds)[:,1])),axis=1)
+                    Xnnp[:,point] = numpy.max(numpy.hstack((Xnnp[:,point],numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1)
+                    Xnnp[:,point] = numpy.min(numpy.hstack((Xnnp[:,point],numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1)
             #
             Ynnp = []
             for point in range(nbSpts):
@@ -262,9 +253,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             Xn = Xncm + Kn * d
             Pn = Pnm - Kn * Pyyn * Kn.T
             #
-            if Bounds is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
-                Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(Bounds)[:,0])),axis=1)
-                Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(Bounds)[:,1])),axis=1)
+            if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
+                Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1)
+                Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1)
             #
             self.StoredVariables["Analysis"].store( Xn.A1 )
             if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]:
index 2de542b3444173c99ffda0fddf220e785a1b3c16..300e31617925e29e874edea2748d1a947c477fb5 100644 (file)
@@ -356,10 +356,35 @@ class Algorithm(object):
         self.StoredVariables["SimulationQuantiles"]                  = Persistence.OneMatrix(name = "SimulationQuantiles")
         self.StoredVariables["Residu"]                               = Persistence.OneScalar(name = "Residu")
 
-    def _pre_run(self):
+    def _pre_run(self, Parameters ):
         "Pré-calcul"
         logging.debug("%s Lancement", self._name)
         logging.debug("%s Taille mémoire utilisée de %.1f Mio", self._name, self._m.getUsedMemory("Mio"))
+        #
+        # Mise a jour de self._parameters avec Parameters
+        self.__setParameters(Parameters)
+        #
+        # Corrections et complements
+        if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0):
+            logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
+        else:
+            self._parameters["Bounds"] = None
+        #
+        if logging.getLogger().level < logging.WARNING:
+            self._parameters["optiprint"], self._parameters["optdisp"] = 1, 1
+            if PlatformInfo.has_scipy:
+                import scipy.optimize
+                self._parameters["optmessages"] = scipy.optimize.tnc.MSG_ALL
+            else:
+                self._parameters["optmessages"] = 15
+        else:
+            self._parameters["optiprint"], self._parameters["optdisp"] = -1, 0
+            if PlatformInfo.has_scipy:
+                import scipy.optimize
+                self._parameters["optmessages"] = scipy.optimize.tnc.MSG_NONE
+            else:
+                self._parameters["optmessages"] = 15
+        #
         return 0
 
     def _post_run(self,_oH=None):
@@ -472,7 +497,7 @@ class Algorithm(object):
                 raise ValueError("The value \"%s\" of the parameter named \"%s\" is not allowed, it has to be in the list %s."%( __val, name,listval))
         return __val
 
-    def setParameters(self, fromDico={}):
+    def __setParameters(self, fromDico={}):
         """
         Permet de stocker les paramètres reçus dans le dictionnaire interne.
         """