From: Jean-Philippe ARGAUD Date: Thu, 29 Mar 2012 13:28:35 +0000 (+0200) Subject: Correcting logging levels X-Git-Tag: V6_5_0~20^2~2 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=cee976c01b92436ef9531779b2c76f3c20f57549;p=modules%2Fadao.git Correcting logging levels --- diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index 8be3e3a..cee5c22 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -89,7 +89,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ------------------------------ def CostFunction(x): _X = numpy.asmatrix(x).flatten().T - logging.info("%s CostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten())) + logging.debug("%s CostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten())) _HX = Hm( _X ) _HX = numpy.asmatrix(_HX).flatten().T Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb) @@ -106,7 +106,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # def GradientOfCostFunction(x): _X = numpy.asmatrix(x).flatten().T - logging.info("%s GradientOfCostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten())) + logging.debug("%s GradientOfCostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten())) _HX = Hm( _X ) _HX = numpy.asmatrix(_HX).flatten().T GradJb = BI * (_X - Xb) diff --git a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py index a01ad3c..7dc703d 100644 --- a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py @@ -90,7 +90,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ------------------------------ def CostFunction(x): _X = numpy.asmatrix(x).flatten().T - logging.info("%s CostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten())) + logging.debug("%s CostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten())) _HX = Hm( _X ) _HX = numpy.asmatrix(_HX).flatten().T Jb = 0. @@ -107,7 +107,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # def GradientOfCostFunction(x): _X = numpy.asmatrix(x).flatten().T - logging.info("%s GradientOfCostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten())) + logging.debug("%s GradientOfCostFunction X = %s"%(self._name, numpy.asmatrix( _X ).flatten())) _HX = Hm( _X ) _HX = numpy.asmatrix(_HX).flatten().T GradJb = 0. diff --git a/src/daComposant/daCore/version.py b/src/daComposant/daCore/version.py index 13a7a61..619e91d 100644 --- a/src/daComposant/daCore/version.py +++ b/src/daComposant/daCore/version.py @@ -21,5 +21,5 @@ # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D name = "Data Assimilation Package" -version = "0.4.0-SP640" -date = "mardi 11 octobre 2011, 11:11:11 (UTC+0200)" +version = "0.5.0-SP650" +date = "jeudi 29 mars 2012, 11:11:11 (UTC+0200)"