Salome HOME
Minor modification of debug information
[modules/adao.git] / src / daComposant / daAlgorithms / NonLinearLeastSquares.py
index 757cda1e1a4eb956270995c50e10a2c3d3c1bad5..8098f9315770554bc428dbc27d1c36de70359fdc 100644 (file)
@@ -1,6 +1,6 @@
 #-*-coding:iso-8859-1-*-
 #
-#  Copyright (C) 2008-2013 EDF R&D
+#  Copyright (C) 2008-2014 EDF R&D
 #
 #  This library is free software; you can redistribute it and/or
 #  modify it under the terms of the GNU Lesser General Public
 #  Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
 
 import logging
-from daCore import BasicObjects, PlatformInfo
-m = PlatformInfo.SystemUsage()
-
-import numpy
-import scipy.optimize
-
-if logging.getLogger().level < logging.WARNING:
-    iprint  = 1
-    message = scipy.optimize.tnc.MSG_ALL
-    disp    = 1
-else:
-    iprint  = -1
-    message = scipy.optimize.tnc.MSG_NONE
-    disp    = 0
+from daCore import BasicObjects
+import numpy, scipy.optimize
 
 # ==============================================================================
 class ElementaryAlgorithm(BasicObjects.Algorithm):
@@ -88,8 +76,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             )
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        logging.debug("%s Lancement"%self._name)
-        logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M")))
+        self._pre_run()
+        if logging.getLogger().level < logging.WARNING:
+            self.__iprint, self.__disp = 1, 1
+            self.__message = scipy.optimize.tnc.MSG_ALL
+        else:
+            self.__iprint, self.__disp = -1, 0
+            self.__message = scipy.optimize.tnc.MSG_NONE
         #
         # Paramètres de pilotage
         # ----------------------
@@ -142,7 +135,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             Jo  = 0.5 * (Y - _HX).T * RI * (Y - _HX)
             J   = float( Jb ) + float( Jo )
             if self._parameters["StoreInternalVariables"]:
-                self.StoredVariables["CurrentState"].store( _X.A1 )
+                self.StoredVariables["CurrentState"].store( _X )
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
@@ -165,7 +158,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             Jo  = 0.5 * (Y - _HX).T * RI * (Y - _HX)
             J   = float( Jb ) + float( Jo )
             if self._parameters["StoreInternalVariables"]:
-                self.StoredVariables["CurrentState"].store( _X.A1 )
+                self.StoredVariables["CurrentState"].store( _X )
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
@@ -202,7 +195,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 maxfun      = self._parameters["MaximumNumberOfSteps"]-1,
                 factr       = self._parameters["CostDecrementTolerance"]*1.e14,
                 pgtol       = self._parameters["ProjectedGradientTolerance"],
-                iprint      = iprint,
+                iprint      = self.__iprint,
                 )
             nfeval = Informations['funcalls']
             rc     = Informations['warnflag']
@@ -216,7 +209,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 maxfun      = self._parameters["MaximumNumberOfSteps"],
                 pgtol       = self._parameters["ProjectedGradientTolerance"],
                 ftol        = self._parameters["CostDecrementTolerance"],
-                messages    = message,
+                messages    = self.__message,
                 )
         elif self._parameters["Minimizer"] == "CG":
             Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
@@ -226,7 +219,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 gtol        = self._parameters["GradientNormTolerance"],
-                disp        = disp,
+                disp        = self.__disp,
                 full_output = True,
                 )
         elif self._parameters["Minimizer"] == "NCG":
@@ -237,7 +230,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 avextol     = self._parameters["CostDecrementTolerance"],
-                disp        = disp,
+                disp        = self.__disp,
                 full_output = True,
                 )
         elif self._parameters["Minimizer"] == "BFGS":
@@ -248,7 +241,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 args        = (),
                 maxiter     = self._parameters["MaximumNumberOfSteps"],
                 gtol        = self._parameters["GradientNormTolerance"],
-                disp        = disp,
+                disp        = self.__disp,
                 full_output = True,
                 )
         elif self._parameters["Minimizer"] == "LM":
@@ -291,9 +284,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         if "OMB" in self._parameters["StoreSupplementaryCalculations"]:
             self.StoredVariables["OMB"].store( numpy.ravel(d) )
         #
-        logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M")))
-        logging.debug("%s Terminé"%self._name)
-        #
+        self._post_run(HO)
         return 0
 
 # ==============================================================================