]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
User messages and naming convention improvements
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Tue, 1 Jul 2014 14:11:28 +0000 (16:11 +0200)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Tue, 1 Jul 2014 14:11:28 +0000 (16:11 +0200)
resources/ADAOSchemaCatalog.xml
src/daComposant/daAlgorithms/AdjointTest.py
src/daComposant/daAlgorithms/FunctionTest.py
src/daComposant/daAlgorithms/LinearityTest.py
src/daComposant/daAlgorithms/ParticleSwarmOptimization.py

index bc2095d407f65683dd04435be6b0eef878af0749..1b888c1b58d62a1902c6c328930e18889552c4ca 100644 (file)
@@ -489,7 +489,6 @@ ADD.analyze()
 #-*-coding:iso-8859-1-*-
 import logging
 logging.debug("TERMINATE Entering in SimpleUserAnalysis")
-from daYacsIntegration.daStudy import *
 ADD = Study.getAssimilationStudy()
 # User code is below
 
index cd8288e9f6e7a2e665d515c3e729ac00452d5478..fb2f7c89b2b50bce3396fa133e67d96ac3009eee 100644 (file)
@@ -109,7 +109,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
 
               R(Alpha) = | < TangentF_X(dX) , Y > - < dX , AdjointF_X(Y) > |
 
-            qui doit rester constamment egal zero a la precision du calcul.
+            qui doit rester constamment egal zero a la precision du calcul.
             On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul.
             Y doit etre dans l'image de F. S'il n'est pas donne, on prend Y = F(X).
             """
index 319b9ee3d95083dd9117c59669b54e6803c5105b..e656b5c25a4292e98106d93a3fd38449208c0048 100644 (file)
@@ -62,7 +62,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         Hm = HO["Direct"].appliedTo
         #
-        Xn = numpy.asmatrix(numpy.ravel( Xb )).T
+        Xn = copy.copy( Xb )
         #
         # ----------
         _p = self._parameters["NumberOfPrintedDigits"]
@@ -99,22 +99,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 print("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"]))
             print("===> Launching direct operator evaluation\n")
             #
-            Y = Hm( Xn )
+            Yn = Hm( Xn )
             #
             print("\n===> End of direct operator evaluation\n")
             #
             msg  = ("===> Information after evaluation:\n")
-            msg += ("\n     Characteristics of output vector Y, to compare to other calculations:\n")
-            msg += ("       Type...............: %s\n")%type( Y )
-            msg += ("       Lenght of vector...: %i\n")%max(numpy.matrix( Y ).shape)
-            msg += ("       Minimum value......: %."+str(_p)+"e\n")%numpy.min( Y )
-            msg += ("       Maximum value......: %."+str(_p)+"e\n")%numpy.max( Y )
-            msg += ("       Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Y )
-            msg += ("       Standard error.....: %."+str(_p)+"e\n")%numpy.std( Y )
-            msg += ("       L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Y )
+            msg += ("\n     Characteristics of simulated output vector Y=H(X), to compare to others:\n")
+            msg += ("       Type...............: %s\n")%type( Yn )
+            msg += ("       Lenght of vector...: %i\n")%max(numpy.matrix( Yn ).shape)
+            msg += ("       Minimum value......: %."+str(_p)+"e\n")%numpy.min( Yn )
+            msg += ("       Maximum value......: %."+str(_p)+"e\n")%numpy.max( Yn )
+            msg += ("       Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn )
+            msg += ("       Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn )
+            msg += ("       L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Yn )
             print(msg)
             #
-            Ys.append( copy.copy( numpy.ravel(Y) ) )
+            Ys.append( copy.copy( numpy.ravel(
+                Yn
+                ) ) )
         #
         print("     %s\n"%("-"*75,))
         if self._parameters["SetDebug"]:
index 2d29fa84e3a903b12871a5d65c867a63112d9ab5..e1f922db810a078d49e2ed39f8a70ecb1f4f8245 100644 (file)
@@ -27,7 +27,7 @@ import numpy, math
 # ==============================================================================
 class ElementaryAlgorithm(BasicObjects.Algorithm):
     def __init__(self):
-        BasicObjects.Algorithm.__init__(self, "FUNCTIONTEST")
+        BasicObjects.Algorithm.__init__(self, "LINEARITYTEST")
         self.defineRequiredParameter(
             name     = "ResiduFormula",
             default  = "CenteredDL",
index 862c350d2a9226a4b1725ee54ac8d5e8657d70bd..ba52587eb13ba1301970879f227a4db60f722419 100644 (file)
@@ -128,26 +128,28 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
                 Jo  = 0.5 * (Y - _HX).T * RI * (Y - _HX)
-                J   = float( Jb ) + float( Jo )
             elif QualityMeasure in ["WeightedLeastSquares","WLS","PonderatedLeastSquares","PLS"]:
                 if RI is None:
                     raise ValueError("Observation error covariance matrix has to be properly defined!")
                 Jb  = 0.
                 Jo  = 0.5 * (Y - _HX).T * RI * (Y - _HX)
-                J   = float( Jb ) + float( Jo )
             elif QualityMeasure in ["LeastSquares","LS","L2"]:
                 Jb  = 0.
                 Jo  = 0.5 * (Y - _HX).T * (Y - _HX)
-                J   = float( Jb ) + float( Jo )
             elif QualityMeasure in ["AbsoluteValue","L1"]:
                 Jb  = 0.
                 Jo  = numpy.sum( numpy.abs(Y - _HX) )
-                J   = float( Jb ) + float( Jo )
             elif QualityMeasure in ["MaximumError","ME"]:
                 Jb  = 0.
                 Jo  = numpy.max( numpy.abs(Y - _HX) )
-                J   = float( Jb ) + float( Jo )
             #
+            J   = float( Jb ) + float( Jo )
+            #
+            if self._parameters["StoreInternalVariables"]:
+                self.StoredVariables["CurrentState"].store( _X )
+            self.StoredVariables["CostFunctionJb"].store( Jb )
+            self.StoredVariables["CostFunctionJo"].store( Jo )
+            self.StoredVariables["CostFunctionJ" ].store( J )
             return J
         #
         # Point de démarrage de l'optimisation : Xini = Xb
@@ -183,18 +185,19 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         qBest = CostFunction(Best,self._parameters["QualityCriterion"])
         #
         for i in range(self._parameters["NumberOfInsects"]):
-            insect  = numpy.array(PosInsect[:,i].A1)
+            insect  = numpy.ravel(PosInsect[:,i])
             quality = CostFunction(insect,self._parameters["QualityCriterion"])
             qBestPosInsect.append(quality)
             if quality < qBest:
-                Best  = insect
-                qBest = quality
+                Best  = copy.copy( insect )
+                qBest = copy.copy( quality )
+        logging.debug("%s Initialisation, Insecte = %s, Qualité = %s"%(self._name, str(Best), str(qBest)))
         #
         # Minimisation de la fonctionnelle
         # --------------------------------
         for n in range(self._parameters["MaximumNumberOfSteps"]):
             for i in range(self._parameters["NumberOfInsects"]) :
-                insect  = PosInsect[:,i]
+                insect  = numpy.ravel(PosInsect[:,i])
                 rp = numpy.random.uniform(size=nbparam)
                 rg = numpy.random.uniform(size=nbparam)
                 for j in range(nbparam) :
@@ -202,10 +205,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                     PosInsect[j,i] = PosInsect[j,i]+VelocityInsect[j,i]
                 quality = CostFunction(insect,self._parameters["QualityCriterion"])
                 if quality < qBestPosInsect[i]:
-                    BestPosInsect[:,i] = numpy.ravel( insect )
+                    BestPosInsect[:,i] = copy.copy( insect )
+                    qBestPosInsect[i]  = copy.copy( quality )
                     if quality < qBest :
-                        Best  = numpy.ravel( insect )
-                        qBest = quality
+                        Best  = copy.copy( insect )
+                        qBest = copy.copy( quality )
+            logging.debug("%s Etape %i, Insecte = %s, Qualité = %s"%(self._name, n, str(Best), str(qBest)))
             #
             if self._parameters["StoreInternalVariables"]:
                 self.StoredVariables["CurrentState"].store( Best )