From 5ae2f269277f702b0ab031f22477ebabdd6f74d2 Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Fri, 15 Feb 2013 16:59:29 +0100 Subject: [PATCH] Improving documentation and naming of estimation --- doc/reference.rst | 80 +++++++++++-------- .../daAlgorithms/ExtendedKalmanFilter.py | 18 ++--- src/daComposant/daAlgorithms/KalmanFilter.py | 16 ++-- src/daComposant/daCore/AssimilationStudy.py | 2 +- 4 files changed, 65 insertions(+), 51 deletions(-) diff --git a/doc/reference.rst b/doc/reference.rst index c23a6bf..d59e96c 100644 --- a/doc/reference.rst +++ b/doc/reference.rst @@ -146,8 +146,9 @@ following: noted :math:`M`, which describes a step of evolution. It is defined as a "*Function*" type object, that is, given as a script. Different functional forms can be used, as described in the following subsection `Requirements - for functions describing an operator`_. If there is some control :math:`U`, - the operator has to be applied to a pair :math:`(X,U)`. + for functions describing an operator`_. If there is some control :math:`U` + included in the evolution model, the operator has to be applied to a pair + :math:`(X,U)`. **InputVariables** *Optional command*. This command allows to indicates the name and size of @@ -172,7 +173,8 @@ following: :math:`\mathbf{y}^o`. It is defined as a "*Function*" type object, that is, given as a script. Different functional forms can be used, as described in the following subsection `Requirements for functions describing an - operator`_. + operator`_. If there is some control :math:`U` included in the observation, + the operator has to be applied to a pair :math:`(X,U)`. **Observers** *Optional command*. This command allows to set internal observers, that are @@ -314,8 +316,8 @@ This section describes the available options algorithm by algorithm. If an option is specified for an algorithm that doesn't support it, the option is simply left unused. The meaning of the acronyms or particular names can be found in the :ref:`genindex` or the :ref:`section_glossary`. In addition, for each -algorithm, the required commands are given, being described in `List of commands -and keywords for an ADAO calculation case`_. +algorithm, the required commands/keywords are given, being described in `List of +commands and keywords for an ADAO calculation case`_. **"Blue"** @@ -485,19 +487,20 @@ and keywords for an ADAO calculation case`_. *"Background", "BackgroundError", "Observation", "ObservationError", "ObservationOperator", - "EvolutionModel", "EvolutionError"* + "EvolutionModel", "EvolutionError", + "ControlInput"* - EstimationType + EstimationOf This key allows to choose the type of estimation to be performed. It can be either state-estimation, named "State", or parameter-estimation, named - "Parameters". The default choice is "Parameters". + "Parameters". The default choice is "State". StoreSupplementaryCalculations This list indicates the names of the supplementary variables that can be available at the end of the algorithm. It involves potentially costly calculations. The default is a void list, none of these variables being calculated and stored by default. The possible names are in the following - list: ["APosterioriCovariance", "CostFunctionJ", "Innovation"]. + list: ["APosterioriCovariance", "BMA", "Innovation"]. **"ExtendedKalmanFilter"** @@ -508,17 +511,28 @@ and keywords for an ADAO calculation case`_. "EvolutionModel", "EvolutionError", "ControlInput"* - EstimationType + Bounds + This key allows to define upper and lower bounds for every control variable + being optimized. Bounds can be given by a list of list of pairs of + lower/upper bounds for each variable, with extreme values every time there + is no bound. The bounds can always be specified, but they are taken into + account only by the constrained minimizers. + + ConstrainedBy + This key allows to define the method to take bounds into account. The + possible methods are in the following list: ["EstimateProjection"]. + + EstimationOf This key allows to choose the type of estimation to be performed. It can be either state-estimation, named "State", or parameter-estimation, named - "Parameters". The default choice is "Parameters". + "Parameters". The default choice is "State". StoreSupplementaryCalculations This list indicates the names of the supplementary variables that can be available at the end of the algorithm. It involves potentially costly calculations. The default is a void list, none of these variables being calculated and stored by default. The possible names are in the following - list: ["APosterioriCovariance", "CostFunctionJ", "Innovation"]. + list: ["APosterioriCovariance", "BMA", "Innovation"]. **"ParticleSwarmOptimization"** @@ -629,13 +643,13 @@ procedure. So the mathematical representation of such a process is a function. It was briefly described in the section :ref:`section_theory` and is generalized here by the relation: -.. math:: \mathbf{y} = H( \mathbf{x} ) +.. math:: \mathbf{y} = O( \mathbf{x} ) between the pseudo-observations :math:`\mathbf{y}` and the parameters -:math:`\mathbf{x}` using the observation operator :math:`H`. The same functional -representation can be used for the linear tangent model :math:`\mathbf{H}` of -:math:`H` and its adjoint :math:`\mathbf{H}^*`, also required by some data -assimilation or optimization algorithms. +:math:`\mathbf{x}` using the observation or evolution operator :math:`O`. The +same functional representation can be used for the linear tangent model +:math:`\mathbf{O}` of :math:`O` and its adjoint :math:`\mathbf{O}^*`, also +required by some data assimilation or optimization algorithms. Then, **to describe completely an operator, the user has only to provide a function that fully and only realize the functional operation**. @@ -664,7 +678,7 @@ template:: ... ... ... - return Y=H(X) + return Y=O(X) In this case, the user can also provide a value for the differential increment, using through the GUI the keyword "*DifferentialIncrement*", which has a default @@ -678,7 +692,7 @@ Second functional form: using "*ScriptWithFunctions*" +++++++++++++++++++++++++++++++++++++++++++++++++++++ The second one consist in providing directly the three associated operators -:math:`H`, :math:`\mathbf{H}` and :math:`\mathbf{H}^*`. This is done by using +:math:`O`, :math:`\mathbf{O}` and :math:`\mathbf{O}^*`. This is done by using the keyword "*ScriptWithFunctions*" for the description of the chosen operator in the ADAO GUI. The user have to provide three functions in one script, with three mandatory names "*DirectOperator*", "*TangentOperator*" and @@ -743,26 +757,26 @@ Here is the switch template:: logging.info("Found method is \'%s\'"%method) # logging.info("Loading operator functions") - FunctionH = Physical_simulation_functions.DirectOperator - TangentH = Physical_simulation_functions.TangentOperator - AdjointH = Physical_simulation_functions.AdjointOperator + Function = Physical_simulation_functions.DirectOperator + Tangent = Physical_simulation_functions.TangentOperator + Adjoint = Physical_simulation_functions.AdjointOperator # logging.info("Executing the possible computations") data = [] if method == "Direct": logging.info("Direct computation") Xcurrent = computation["inputValues"][0][0][0] - data = FunctionH(numpy.matrix( Xcurrent ).T) + data = Function(numpy.matrix( Xcurrent ).T) if method == "Tangent": logging.info("Tangent computation") Xcurrent = computation["inputValues"][0][0][0] dXcurrent = computation["inputValues"][0][0][1] - data = TangentH(numpy.matrix(Xcurrent).T, numpy.matrix(dXcurrent).T) + data = Tangent(numpy.matrix(Xcurrent).T, numpy.matrix(dXcurrent).T) if method == "Adjoint": logging.info("Adjoint computation") Xcurrent = computation["inputValues"][0][0][0] Ycurrent = computation["inputValues"][0][0][1] - data = AdjointH((numpy.matrix(Xcurrent).T, numpy.matrix(Ycurrent).T)) + data = Adjoint((numpy.matrix(Xcurrent).T, numpy.matrix(Ycurrent).T)) # logging.info("Formatting the output") it = numpy.ravel(data) @@ -781,25 +795,25 @@ All various modifications could be done from this template hypothesis. Special case of controled evolution operator ++++++++++++++++++++++++++++++++++++++++++++ -In some cases, the evolution operator is required to be controled by an external -input control, given a priori. In this case, the generic form of the incremental -evolution model is slightly modified as follows: +In some cases, the evolution or the observation operators are required to be +controled by an external input control, given a priori. In this case, the +generic form of the incremental evolution model is slightly modified as follows: -.. math:: \mathbf{y} = H( \mathbf{x}, \mathbf{u}) +.. math:: \mathbf{y} = O( \mathbf{x}, \mathbf{u}) where :math:`\mathbf{u}` is the control over one state increment. In this case, -the direct evolution operator has to be applied to a pair of variables -:math:`(X,U)`. Schematically, the evolution operator has to be set as:: +the direct operator has to be applied to a pair of variables :math:`(X,U)`. +Schematically, the operator has to be set as:: def DirectOperator( (X, U) ): """ Direct non-linear simulation operator """ ... ... ... - return something like X(n+1) + return something like X(n+1) or Y(n+1) The tangent and adjoint operators have the same signature as previously, noting -that the derivatives has to be done only against :math:`\mathbf{x}` partially. +that the derivatives has to be done only partially against :math:`\mathbf{x}`. In such a case with explicit control, only the second functional form (using "*ScriptWithFunctions*") and third functional form (using "*ScriptWithSwitch*") can be used. diff --git a/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py b/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py index 93668db..775f7b4 100644 --- a/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py +++ b/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py @@ -37,7 +37,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): listval = ["APosterioriCovariance", "BMA", "Innovation"] ) self.defineRequiredParameter( - name = "EstimationType", + name = "EstimationOf", default = "State", typecast = str, message = "Estimation d'etat ou de parametres", @@ -70,7 +70,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) else: Bounds = None - if self._parameters["EstimationType"] == "Parameters": + if self._parameters["EstimationOf"] == "Parameters": self._parameters["StoreInternalVariables"] = True # # Opérateurs @@ -82,7 +82,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # H = HO["Direct"].appliedControledFormTo # - if self._parameters["EstimationType"] == "State": + if self._parameters["EstimationOf"] == "State": M = EM["Direct"].appliedControledFormTo # if CM is not None and CM.has_key("Tangent") and U is not None: @@ -133,7 +133,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn) Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape # - if self._parameters["EstimationType"] == "State": + if self._parameters["EstimationOf"] == "State": Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn) Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn) @@ -149,12 +149,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): else: Un = None # - if self._parameters["EstimationType"] == "State": + if self._parameters["EstimationOf"] == "State": Xn_predicted = numpy.asmatrix(numpy.ravel( M( (Xn, Un) ) )).T if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! Xn_predicted = Xn_predicted + Cm * Un Pn_predicted = Mt * Pn * Ma + Q - elif self._parameters["EstimationType"] == "Parameters": + elif self._parameters["EstimationOf"] == "Parameters": # --- > Par principe, M = Id, Q = 0 Xn_predicted = Xn Pn_predicted = Pn @@ -163,9 +163,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(Bounds)[:,0])),axis=1) Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(Bounds)[:,1])),axis=1) # - if self._parameters["EstimationType"] == "State": + if self._parameters["EstimationOf"] == "State": d = Ynpu - numpy.asmatrix(numpy.ravel( H( (Xn_predicted, None) ) )).T - elif self._parameters["EstimationType"] == "Parameters": + elif self._parameters["EstimationOf"] == "Parameters": d = Ynpu - numpy.asmatrix(numpy.ravel( H( (Xn_predicted, Un) ) )).T if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon ! d = d - Cm * Un @@ -198,7 +198,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Stockage supplementaire de l'optimum en estimation de parametres # ---------------------------------------------------------------- - if self._parameters["EstimationType"] == "Parameters": + if self._parameters["EstimationOf"] == "Parameters": self.StoredVariables["Analysis"].store( Xa.A1 ) if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["APosterioriCovariance"].store( covarianceXa ) diff --git a/src/daComposant/daAlgorithms/KalmanFilter.py b/src/daComposant/daAlgorithms/KalmanFilter.py index 9d6a0fd..36b6c4c 100644 --- a/src/daComposant/daAlgorithms/KalmanFilter.py +++ b/src/daComposant/daAlgorithms/KalmanFilter.py @@ -37,7 +37,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): listval = ["APosterioriCovariance", "BMA", "Innovation"] ) self.defineRequiredParameter( - name = "EstimationType", + name = "EstimationOf", default = "State", typecast = str, message = "Estimation d'etat ou de parametres", @@ -58,7 +58,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ---------------------- self.setParameters(Parameters) # - if self._parameters["EstimationType"] == "Parameters": + if self._parameters["EstimationOf"] == "Parameters": self._parameters["StoreInternalVariables"] = True # # Opérateurs @@ -71,7 +71,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Ht = HO["Tangent"].asMatrix(Xb) Ha = HO["Adjoint"].asMatrix(Xb) # - if self._parameters["EstimationType"] == "State": + if self._parameters["EstimationOf"] == "State": Mt = EM["Tangent"].asMatrix(Xb) Ma = EM["Adjoint"].asMatrix(Xb) # @@ -128,19 +128,19 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): else: Un = None # - if self._parameters["EstimationType"] == "State": + if self._parameters["EstimationOf"] == "State": Xn_predicted = Mt * Xn if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! Xn_predicted = Xn_predicted + Cm * Un Pn_predicted = Mt * Pn * Ma + Q - elif self._parameters["EstimationType"] == "Parameters": + elif self._parameters["EstimationOf"] == "Parameters": # --- > Par principe, M = Id, Q = 0 Xn_predicted = Xn Pn_predicted = Pn # - if self._parameters["EstimationType"] == "State": + if self._parameters["EstimationOf"] == "State": d = Ynpu - Ht * Xn_predicted - elif self._parameters["EstimationType"] == "Parameters": + elif self._parameters["EstimationOf"] == "Parameters": d = Ynpu - Ht * Xn_predicted if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon ! d = d - Cm * Un @@ -173,7 +173,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Stockage supplementaire de l'optimum en estimation de parametres # ---------------------------------------------------------------- - if self._parameters["EstimationType"] == "Parameters": + if self._parameters["EstimationOf"] == "Parameters": self.StoredVariables["Analysis"].store( Xa.A1 ) if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["APosterioriCovariance"].store( covarianceXa ) diff --git a/src/daComposant/daCore/AssimilationStudy.py b/src/daComposant/daCore/AssimilationStudy.py index 98f3f98..f8c7920 100644 --- a/src/daComposant/daCore/AssimilationStudy.py +++ b/src/daComposant/daCore/AssimilationStudy.py @@ -32,7 +32,7 @@ __author__ = "Jean-Philippe ARGAUD" import os, sys import numpy import Logging ; Logging.Logging() # A importer en premier -import scipy.optimize # Import preventif car son abscence a de l'effet +import scipy.optimize import Persistence from BasicObjects import Operator from PlatformInfo import uniq -- 2.39.2