]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Slight corrections of parameters and variables treatment
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Fri, 10 Aug 2012 13:42:21 +0000 (15:42 +0200)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Fri, 10 Aug 2012 13:42:21 +0000 (15:42 +0200)
src/daComposant/daAlgorithms/3DVAR.py
src/daComposant/daAlgorithms/NonLinearLeastSquares.py
src/daComposant/daAlgorithms/ParticleSwarmOptimization.py
src/daComposant/daAlgorithms/QuantileRegression.py

index dbb710bb27a6473d0940d78b3fd2d1796551a748..8997f3a1cdfe84799ebcc12de6496c8ec40cfcb1 100644 (file)
@@ -267,7 +267,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Obtention de l'analyse
         # ----------------------
-        Xa = numpy.asmatrix(Minimum).T
+        Xa = numpy.asmatrix(Minimum).flatten().T
         logging.debug("%s Analyse Xa = %s"%(self._name, Xa))
         #
         self.StoredVariables["Analysis"].store( Xa.A1 )
index f77ab0165551f835277928a80bda6b0fb3c81893..cee9082e535cb9d689bf9c33755517b90707aa7f 100644 (file)
@@ -262,7 +262,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Obtention de l'analyse
         # ----------------------
-        Xa = numpy.asmatrix(Minimum).T
+        Xa = numpy.asmatrix(Minimum).flatten().T
         logging.debug("%s Analyse Xa = %s"%(self._name, Xa))
         #
         self.StoredVariables["Analysis"].store( Xa.A1 )
index 5c43edc7a0c68973168ae706e5a3f9e4c47b300c..0f818577f07d9160d83af86873a53570ec6067cb 100644 (file)
@@ -35,7 +35,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             default  = 50,
             typecast = int,
             message  = "Nombre maximal de pas d'optimisation",
-            minval   = -1
+            minval   = 1,
             )
         self.defineRequiredParameter(
             name     = "SetSeed",
@@ -130,7 +130,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         # ------------------------------
         def CostFunction(x, QualityMeasure="AugmentedPonderatedLeastSquares"):
             _X  = numpy.asmatrix(x).flatten().T
-            logging.debug("%s CostFunction X  = %s"%(self._name, numpy.asmatrix( _X ).flatten()))
+            logging.debug("%s CostFunction X  = %s"%(self._name, _X.A1))
             _HX = Hm( _X )
             _HX = numpy.asmatrix(_HX).flatten().T
             #
@@ -164,8 +164,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             logging.debug("%s CostFunction J  = %s"%(self._name, J))
             return J
         #
-        # Paramètres de pilotage
-        # ----------------------
         # Point de démarrage de l'optimisation : Xini = Xb
         # ------------------------------------
         if type(Xb) is type(numpy.matrix([])):
@@ -223,17 +221,17 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                     if quality < qBest :
                         Best  = numpy.asmatrix(insect).flatten().A1
                         qBest = quality
-            logging.debug("%s Iteration %i : qBest = %.5f, Best = %s"%(self._name, n+1,qBest,numpy.asmatrix(Best.flatten()).A1))
+            logging.debug("%s Iteration %i : qBest = %.5f, Best = %s"%(self._name, n+1,qBest,Best))
             #
             if self._parameters["StoreInternalVariables"]:
-                self.StoredVariables["CurrentState"].store( numpy.asmatrix(Best.flatten()).A1 )
+                self.StoredVariables["CurrentState"].store( Best )
             self.StoredVariables["CostFunctionJb"].store( 0. )
             self.StoredVariables["CostFunctionJo"].store( 0. )
             self.StoredVariables["CostFunctionJ" ].store( qBest )
         #
         logging.debug("%s %s Step of min cost  = %s"%(self._name, self._parameters["QualityCriterion"], self._parameters["MaximumNumberOfSteps"]))
         logging.debug("%s %s Minimum cost      = %s"%(self._name, self._parameters["QualityCriterion"], qBest))
-        logging.debug("%s %s Minimum state     = %s"%(self._name, self._parameters["QualityCriterion"], numpy.asmatrix(Best).flatten().T))
+        logging.debug("%s %s Minimum state     = %s"%(self._name, self._parameters["QualityCriterion"], Best))
         logging.debug("%s %s Nb of F           = %s"%(self._name, self._parameters["QualityCriterion"], (self._parameters["MaximumNumberOfSteps"]+1)*self._parameters["NumberOfInsects"]+1))
         logging.debug("%s %s RetCode           = %s"%(self._name, self._parameters["QualityCriterion"], 0))
         #
index 8e93044904590db10f7286b1291832cb7c99be9b..616b41ae76c2a75c8e701f26d32edfd43b074a7c 100644 (file)
@@ -49,7 +49,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             default  = 15000,
             typecast = int,
             message  = "Nombre maximal de pas d'optimisation",
-            minval   = -1
+            minval   = 1,
             )
         self.defineRequiredParameter(
             name     = "CostDecrementTolerance",
@@ -120,7 +120,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         def GradientOfCostFunction(x):
             _X      = numpy.asmatrix(x).flatten().T
-            logging.debug("%s GradientOfCostFunction X      = %s"%(self._name, numpy.asmatrix( _X ).flatten()))
+            logging.debug("%s GradientOfCostFunction X      = %s"%(self._name, _X.A1))
             Hg = H["Tangent"].asMatrix( _X )
             return Hg
         #
@@ -158,7 +158,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Obtention de l'analyse
         # ----------------------
-        Xa = numpy.asmatrix(Minimum).T
+        Xa = numpy.asmatrix(Minimum).flatten().T
         logging.debug("%s Analyse Xa = %s"%(self._name, Xa))
         #
         self.StoredVariables["Analysis"].store( Xa.A1 )