default = 15000,
typecast = int,
message = "Nombre maximal de pas d'optimisation",
- minval = -1
+ minval = 1,
)
self.defineRequiredParameter(
name = "CostDecrementTolerance",
typecast = float,
message = "Maximum de variation de la fonction d'estimation lors de l'arrĂȘt",
)
+ self.defineRequiredParameter(
+ name = "StoreInternalVariables",
+ default = False,
+ typecast = bool,
+ message = "Stockage des variables internes ou intermédiaires du calcul",
+ )
def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
"""
logging.debug("%s CostFunction Jb = %s"%(self._name, Jb))
logging.debug("%s CostFunction Jo = %s"%(self._name, Jo))
logging.debug("%s CostFunction J = %s"%(self._name, J))
- self.StoredVariables["CurrentState"].store( _X.A1 )
+ if self._parameters["StoreInternalVariables"]:
+ self.StoredVariables["CurrentState"].store( _X.A1 )
self.StoredVariables["CostFunctionJb"].store( Jb )
self.StoredVariables["CostFunctionJo"].store( Jo )
self.StoredVariables["CostFunctionJ" ].store( J )
#
def GradientOfCostFunction(x):
_X = numpy.asmatrix(x).flatten().T
- logging.debug("%s GradientOfCostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten()))
+ logging.debug("%s GradientOfCostFunction X = %s"%(self._name, _X.A1))
Hg = H["Tangent"].asMatrix( _X )
return Hg
#
#
# Obtention de l'analyse
# ----------------------
- Xa = numpy.asmatrix(Minimum).T
+ Xa = numpy.asmatrix(Minimum).flatten().T
logging.debug("%s Analyse Xa = %s"%(self._name, Xa))
#
self.StoredVariables["Analysis"].store( Xa.A1 )