]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Minor documentation and code review corrections (27)
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Fri, 4 Mar 2022 07:21:55 +0000 (08:21 +0100)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Fri, 4 Mar 2022 07:21:55 +0000 (08:21 +0100)
Safely remove some deprecated numpy.matrix and review internal variable
storage

16 files changed:
src/daComposant/daAlgorithms/Atoms/cekf.py
src/daComposant/daAlgorithms/Atoms/ecwnlls.py
src/daComposant/daAlgorithms/Atoms/exkf.py
src/daComposant/daAlgorithms/Atoms/incr3dvar.py
src/daComposant/daAlgorithms/Atoms/mmqr.py
src/daComposant/daAlgorithms/Atoms/psas3dvar.py
src/daComposant/daAlgorithms/Atoms/std3dvar.py
src/daComposant/daAlgorithms/Atoms/std4dvar.py
src/daComposant/daAlgorithms/Atoms/van3dvar.py
src/daComposant/daAlgorithms/DerivativeFreeOptimization.py
src/daComposant/daAlgorithms/DifferentialEvolution.py
src/daComposant/daAlgorithms/ParticleSwarmOptimization.py
src/daComposant/daAlgorithms/QuantileRegression.py
src/daComposant/daAlgorithms/TabuSearch.py
src/daComposant/daCore/BasicObjects.py
src/daComposant/daCore/PlatformInfo.py

index c668b5c37fe4e6619f69587b8245e8349d300708..908ea2ec7feeb5686559a9f415b904fbb6d53f48 100644 (file)
@@ -47,18 +47,21 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
     else:
         duration = 2
         __p = numpy.array(Y).size
+    __n = Xb.size
     #
     # Précalcul des inversions de B et R
-    if selfA._parameters["StoreInternalVariables"] \
-        or selfA._toStore("CostFunctionJ") \
-        or selfA._toStore("CostFunctionJb") \
-        or selfA._toStore("CostFunctionJo") \
-        or selfA._toStore("CurrentOptimum") \
-        or selfA._toStore("APosterioriCovariance"):
-        BI = B.getI()
+    if selfA._parameters["StoreInternalVariables"] or \
+        selfA._toStore("CostFunctionJ")  or selfA._toStore("CostFunctionJAtCurrentOptimum") or \
+        selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
+        selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
+        selfA._toStore("CurrentOptimum") or selfA._toStore("APosterioriCovariance") or \
+        (__p > __n):
+        if isinstance(B,numpy.ndarray):
+            BI = numpy.linalg.inv(B)
+        else:
+            BI = B.getI()
         RI = R.getI()
     #
-    __n = Xb.size
     nbPreviousSteps  = len(selfA.StoredVariables["Analysis"])
     #
     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
@@ -75,6 +78,8 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
     elif selfA._parameters["nextStep"]:
         Xn = selfA._getInternalState("Xn")
         Pn = selfA._getInternalState("Pn")
+    if hasattr(Pn,"asfullmatrix"):
+        Pn = Pn.asfullmatrix(Xn.size)
     #
     if selfA._parameters["EstimationOf"] == "Parameters":
         XaMin            = Xn
@@ -106,7 +111,7 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
                 Cm = CM["Tangent"].asMatrix(Xn_predicted)
                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
                 Xn_predicted = Xn_predicted + Cm @ Un
-            Pn_predicted = Q + Mt * (Pn * Ma)
+            Pn_predicted = Q + Mt @ (Pn @ Ma)
         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
             # --- > Par principe, M = Id, Q = 0
             Xn_predicted = Xn
@@ -137,9 +142,22 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
                 _Innovation = _Innovation - Cm @ Un
         #
-        Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
-        Xn = Xn_predicted + Kn * _Innovation
-        Pn = Pn_predicted - Kn * Ht * Pn_predicted
+        if Ynpu.size <= Xn.size:
+            _HNHt = numpy.dot(Ht, Pn_predicted @ Ha)
+            _A = R + _HNHt
+            _u = numpy.linalg.solve( _A , _Innovation )
+            Xn = Xn_predicted + (Pn_predicted @ (Ha @ _u)).reshape((-1,1))
+            Kn = Pn_predicted @ (Ha @ numpy.linalg.inv(_A))
+        else:
+            _HtRH = numpy.dot(Ha, RI @ Ht)
+            _A = numpy.linalg.inv(Pn_predicted) + _HtRH
+            _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ _Innovation) )
+            Xn = Xn_predicted + _u.reshape((-1,1))
+            Kn = numpy.linalg.inv(_A) @ (Ha @ RI.asfullmatrix(Ynpu.size))
+        #
+        Pn = Pn_predicted - Kn @ (Ht @ Pn_predicted)
+        Pn = (Pn + Pn.T) * 0.5 # Symétrie
+        Pn = Pn + mpr*numpy.trace( Pn ) * numpy.identity(Xn.size) # Positivité
         #
         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
             Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
index ec3c4cb11612ae438a15ee302dba1bc6d8bc2f4b..1d9ca016792dbbcdf711a1deddcabc0f89ef3e89 100644 (file)
@@ -207,7 +207,7 @@ def ecwnlls(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
             )
         # nfeval = infodict['nfev']
     else:
-        raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+        raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
     #
     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
     #
index 6936fc98d5d3cffb344028fb59c1b987c67ddac4..2361cb31cbfc06c3c5605fbb49ac4a6c855da323 100644 (file)
@@ -45,18 +45,21 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
     else:
         duration = 2
         __p = numpy.array(Y).size
+    __n = Xb.size
     #
     # Précalcul des inversions de B et R
-    if selfA._parameters["StoreInternalVariables"] \
-        or selfA._toStore("CostFunctionJ") \
-        or selfA._toStore("CostFunctionJb") \
-        or selfA._toStore("CostFunctionJo") \
-        or selfA._toStore("CurrentOptimum") \
-        or selfA._toStore("APosterioriCovariance"):
-        BI = B.getI()
+    if selfA._parameters["StoreInternalVariables"] or \
+        selfA._toStore("CostFunctionJ")  or selfA._toStore("CostFunctionJAtCurrentOptimum") or \
+        selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
+        selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
+        selfA._toStore("CurrentOptimum") or selfA._toStore("APosterioriCovariance") or \
+        (__p > __n):
+        if isinstance(B,numpy.ndarray):
+            BI = numpy.linalg.inv(B)
+        else:
+            BI = B.getI()
         RI = R.getI()
     #
-    __n = Xb.size
     nbPreviousSteps  = len(selfA.StoredVariables["Analysis"])
     #
     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
@@ -73,6 +76,8 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
     elif selfA._parameters["nextStep"]:
         Xn = selfA._getInternalState("Xn")
         Pn = selfA._getInternalState("Pn")
+    if hasattr(Pn,"asfullmatrix"):
+        Pn = Pn.asfullmatrix(Xn.size)
     #
     if selfA._parameters["EstimationOf"] == "Parameters":
         XaMin            = Xn
@@ -101,7 +106,7 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
                 Cm = CM["Tangent"].asMatrix(Xn_predicted)
                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
                 Xn_predicted = Xn_predicted + Cm @ Un
-            Pn_predicted = Q + Mt * (Pn * Ma)
+            Pn_predicted = Q + Mt @ (Pn @ Ma)
         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
             # --- > Par principe, M = Id, Q = 0
             Xn_predicted = Xn
@@ -129,9 +134,22 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
                 _Innovation = _Innovation - Cm @ Un
         #
-        Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
-        Xn = Xn_predicted + Kn * _Innovation
-        Pn = Pn_predicted - Kn * Ht * Pn_predicted
+        if Ynpu.size <= Xn.size:
+            _HNHt = numpy.dot(Ht, Pn_predicted @ Ha)
+            _A = R + _HNHt
+            _u = numpy.linalg.solve( _A , _Innovation )
+            Xn = Xn_predicted + (Pn_predicted @ (Ha @ _u)).reshape((-1,1))
+            Kn = Pn_predicted @ (Ha @ numpy.linalg.inv(_A))
+        else:
+            _HtRH = numpy.dot(Ha, RI @ Ht)
+            _A = numpy.linalg.inv(Pn_predicted) + _HtRH
+            _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ _Innovation) )
+            Xn = Xn_predicted + _u.reshape((-1,1))
+            Kn = numpy.linalg.inv(_A) @ (Ha @ RI.asfullmatrix(Ynpu.size))
+        #
+        Pn = Pn_predicted - Kn @ (Ht @ Pn_predicted)
+        Pn = (Pn + Pn.T) * 0.5 # Symétrie
+        Pn = Pn + mpr*numpy.trace( Pn ) * numpy.identity(Xn.size) # Positivité
         #
         Xa = Xn # Pointeurs
         #--------------------------
index 10b903c846984f20a680e15fb9fd744a8f122cd1..f55354300807c03af10385e50fcebf992648194e 100644 (file)
@@ -197,7 +197,7 @@ def incr3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
                 full_output = True,
                 )
         else:
-            raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+            raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
         #
         IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
         MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
index b1a58a0ae2138d862b3f76647311366ac259d47a..48bf73558c35e17e0950745e31d31b04cf22b8b7 100644 (file)
@@ -78,14 +78,14 @@ def mmqr(
         Derivees  = numpy.array(fprime(variables))
         Derivees  = Derivees.reshape(n,p) # ADAO & check shape
         DeriveesT = Derivees.transpose()
-        M         =   numpy.dot( DeriveesT , (numpy.array(numpy.matrix(p*[poids,]).T)*Derivees) )
+        M         =   numpy.dot( DeriveesT , (numpy.array(p*[poids,]).T * Derivees) )
         SM        =   numpy.transpose(numpy.dot( DeriveesT , veps ))
         step      = - numpy.linalg.lstsq( M, SM, rcond=-1 )[0]
         #
         variables = variables + step
         if bounds is not None:
             # Attention : boucle infinie à éviter si un intervalle est trop petit
-            while( (variables < numpy.ravel(numpy.asmatrix(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asmatrix(bounds)[:,1])).any() ):
+            while( (variables < numpy.ravel(numpy.asarray(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asarray(bounds)[:,1])).any() ):
                 step      = step/2.
                 variables = variables - step
         residus   = mesures - numpy.ravel( func(variables) )
index 8f69ece22824982627be65c15ddeffa81749f390..dad9e595c36923c89d59a248e72b171694188baf 100644 (file)
@@ -175,7 +175,7 @@ def psas3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
             full_output = True,
             )
     else:
-        raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+        raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
     #
     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
index 5ee297617736947f2122501fa4eb33916e2d1ca7..bd70689b18ebff036d4f9ce3b2e71c586d9c0c6f 100644 (file)
@@ -180,7 +180,7 @@ def std3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
             full_output = True,
             )
     else:
-        raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+        raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
     #
     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
index 1eca8267820d348ac040c06675b265268978152e..cd582a81d688e1a093d753689e3889a5d3d6111c 100644 (file)
@@ -241,7 +241,7 @@ def std4dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
             full_output = True,
             )
     else:
-        raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+        raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
     #
     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
     #
index 0e96130d16646112ace22399ed40bd5af6bddac9..5ae73fa73da1d4537d5fbc585f89e6ecdf2d0a4d 100644 (file)
@@ -189,7 +189,7 @@ def van3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
             full_output = True,
             )
     else:
-        raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+        raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
     #
     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
index 299a1b4f6b0a230127345038f933c3317f300829..b191c8f2b60586597722b766bf2774cebee5c848 100644 (file)
@@ -20,9 +20,8 @@
 #
 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
 
-import logging
+import numpy, logging, scipy.optimize
 from daCore import BasicObjects, PlatformInfo
-import numpy, scipy.optimize
 
 # ==============================================================================
 class ElementaryAlgorithm(BasicObjects.Algorithm):
@@ -74,11 +73,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             typecast = str,
             message  = "Critère de qualité utilisé",
             listval  = [
-                "AugmentedWeightedLeastSquares","AWLS","DA",
-                "WeightedLeastSquares","WLS",
-                "LeastSquares","LS","L2",
-                "AbsoluteValue","L1",
-                "MaximumError","ME",
+                "AugmentedWeightedLeastSquares", "AWLS", "DA",
+                "WeightedLeastSquares", "WLS",
+                "LeastSquares", "LS", "L2",
+                "AbsoluteValue", "L1",
+                "MaximumError", "ME",
                 ],
             )
         self.defineRequiredParameter(
@@ -120,7 +119,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             message  = "Liste des valeurs de bornes",
             )
         self.requireInputArguments(
-            mandatory= ("Xb", "Y", "HO", "R", "B" ),
+            mandatory= ("Xb", "Y", "HO", "R", "B"),
             )
         self.setAttributes(tags=(
             "Optimization",
@@ -135,23 +134,16 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             logging.warning("%s Minimization by SIMPLEX is forced because %s is unavailable (COBYLA, POWELL are also available)"%(self._name,self._parameters["Minimizer"]))
             self._parameters["Minimizer"] = "SIMPLEX"
         #
-        # Opérateurs
-        # ----------
         Hm = HO["Direct"].appliedTo
         #
-        # Précalcul des inversions de B et R
-        # ----------------------------------
         BI = B.getI()
         RI = R.getI()
         #
-        # Définition de la fonction-coût
-        # ------------------------------
         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
-            _X  = numpy.asmatrix(numpy.ravel( x )).T
-            self.StoredVariables["CurrentState"].store( _X )
-            _HX = Hm( _X )
-            _HX = numpy.asmatrix(numpy.ravel( _HX )).T
+            _X  = numpy.ravel( x ).reshape((-1,1))
+            _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1))
             _Innovation = Y - _HX
+            self.StoredVariables["CurrentState"].store( _X )
             if self._toStore("SimulatedObservationAtCurrentState") or \
                 self._toStore("SimulatedObservationAtCurrentOptimum"):
                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
@@ -160,17 +152,17 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             #
             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
                 if BI is None or RI is None:
-                    raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
-                Jb  = 0.5 * (_X - Xb).T * (BI * (_X - Xb))
-                Jo  = 0.5 * _Innovation.T * (RI * _Innovation)
+                    raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
+                Jb  = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb))
+                Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
                 if RI is None:
                     raise ValueError("Observation error covariance matrix has to be properly defined!")
                 Jb  = 0.
-                Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
+                Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
             elif QualityMeasure in ["LeastSquares","LS","L2"]:
                 Jb  = 0.
-                Jo  = 0.5 * (_Innovation).T * (_Innovation)
+                Jo  = 0.5 * _Innovation.T @ _Innovation
             elif QualityMeasure in ["AbsoluteValue","L1"]:
                 Jb  = 0.
                 Jo  = numpy.sum( numpy.abs(_Innovation) )
@@ -205,8 +197,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
             return J
         #
-        # Point de démarrage de l'optimisation : Xini = Xb
-        # ------------------------------------
         Xini = numpy.ravel(Xb)
         if len(Xini) < 2 and self._parameters["Minimizer"] == "NEWUOA":
             raise ValueError("The minimizer %s can not be used when the optimisation state dimension is 1. Please choose another minimizer."%self._parameters["Minimizer"])
@@ -382,7 +372,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
         else:
-            raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
+            raise ValueError("Error in minimizer name: %s is unkown"%self._parameters["Minimizer"])
         #
         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
@@ -390,7 +380,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Obtention de l'analyse
         # ----------------------
-        Xa = numpy.ravel( Minimum )
+        Xa = Minimum
         #
         self.StoredVariables["Analysis"].store( Xa )
         #
@@ -404,11 +394,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
             else:
                 HXa = Hm(Xa)
+            HXa = HXa.reshape((-1,1))
         if self._toStore("Innovation") or \
             self._toStore("OMB") or \
             self._toStore("SimulatedObservationAtBackground"):
-            HXb = Hm(Xb)
-            Innovation  = Y - HXb
+            HXb = Hm(Xb).reshape((-1,1))
+            Innovation = Y - HXb
         if self._toStore("Innovation"):
             self.StoredVariables["Innovation"].store( Innovation )
         if self._toStore("OMB"):
@@ -416,13 +407,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         if self._toStore("BMA"):
             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
         if self._toStore("OMA"):
-            self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
+            self.StoredVariables["OMA"].store( Y - HXa )
         if self._toStore("SimulatedObservationAtBackground"):
             self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
         if self._toStore("SimulatedObservationAtOptimum"):
             self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
         #
-        self._post_run()
+        self._post_run(HO)
         return 0
 
 # ==============================================================================
index 11fdbba925a72de77389caa7321fa9d3a9c19a9f..0a624a484467871a1abbe7226f2c0845c274f6fb 100644 (file)
@@ -20,7 +20,7 @@
 #
 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
 
-import logging, numpy, scipy.optimize
+import numpy, logging, scipy.optimize
 from daCore import BasicObjects
 
 # ==============================================================================
@@ -63,6 +63,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             message  = "Nombre maximal d'évaluations de la fonction",
             minval   = -1,
             )
+        self.defineRequiredParameter(
+            name     = "SetSeed",
+            typecast = numpy.random.seed,
+            message  = "Graine fixée pour le générateur aléatoire",
+            )
         self.defineRequiredParameter(
             name     = "PopulationSize",
             default  = 100,
@@ -92,11 +97,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             typecast = str,
             message  = "Critère de qualité utilisé",
             listval  = [
-                "AugmentedWeightedLeastSquares","AWLS","DA",
-                "WeightedLeastSquares","WLS",
-                "LeastSquares","LS","L2",
-                "AbsoluteValue","L1",
-                "MaximumError","ME",
+                "AugmentedWeightedLeastSquares", "AWLS", "DA",
+                "WeightedLeastSquares", "WLS",
+                "LeastSquares", "LS", "L2",
+                "AbsoluteValue", "L1",
+                "MaximumError", "ME",
                 ],
             )
         self.defineRequiredParameter(
@@ -133,17 +138,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 "SimulatedObservationAtOptimum",
                 ]
             )
-        self.defineRequiredParameter(
-            name     = "SetSeed",
-            typecast = numpy.random.seed,
-            message  = "Graine fixée pour le générateur aléatoire",
-            )
         self.defineRequiredParameter( # Pas de type
             name     = "Bounds",
             message  = "Liste des valeurs de bornes",
             )
         self.requireInputArguments(
-            mandatory= ("Xb", "Y", "HO", "R", "B" ),
+            mandatory= ("Xb", "Y", "HO", "R", "B"),
             )
         self.setAttributes(tags=(
             "Optimization",
@@ -160,23 +160,16 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         maxiter = min(self._parameters["MaximumNumberOfSteps"],round(self._parameters["MaximumNumberOfFunctionEvaluations"]/(popsize*len_X) - 1))
         logging.debug("%s Nombre maximal de générations = %i, taille de la population à chaque génération = %i"%(self._name, maxiter, popsize*len_X))
         #
-        # Opérateurs
-        # ----------
         Hm = HO["Direct"].appliedTo
         #
-        # Précalcul des inversions de B et R
-        # ----------------------------------
         BI = B.getI()
         RI = R.getI()
         #
-        # Définition de la fonction-coût
-        # ------------------------------
         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
-            _X  = numpy.asmatrix(numpy.ravel( x )).T
-            self.StoredVariables["CurrentState"].store( _X )
-            _HX = Hm( _X )
-            _HX = numpy.asmatrix(numpy.ravel( _HX )).T
+            _X  = numpy.ravel( x ).reshape((-1,1))
+            _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1))
             _Innovation = Y - _HX
+            self.StoredVariables["CurrentState"].store( _X )
             if self._toStore("SimulatedObservationAtCurrentState") or \
                 self._toStore("SimulatedObservationAtCurrentOptimum"):
                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
@@ -185,17 +178,17 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             #
             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
                 if BI is None or RI is None:
-                    raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
-                Jb  = 0.5 * (_X - Xb).T * (BI * (_X - Xb))
-                Jo  = 0.5 * _Innovation.T * (RI * _Innovation)
+                    raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
+                Jb  = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb))
+                Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
                 if RI is None:
                     raise ValueError("Observation error covariance matrix has to be properly defined!")
                 Jb  = 0.
-                Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
+                Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
             elif QualityMeasure in ["LeastSquares","LS","L2"]:
                 Jb  = 0.
-                Jo  = 0.5 * (_Innovation).T * (_Innovation)
+                Jo  = 0.5 * _Innovation.T @ _Innovation
             elif QualityMeasure in ["AbsoluteValue","L1"]:
                 Jb  = 0.
                 Jo  = numpy.sum( numpy.abs(_Innovation) )
@@ -230,8 +223,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
             return J
         #
-        # Point de démarrage de l'optimisation : Xini = Xb
-        # ------------------------------------
         Xini = numpy.ravel(Xb)
         #
         # Minimisation de la fonctionnelle
@@ -255,7 +246,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Obtention de l'analyse
         # ----------------------
-        Xa = numpy.ravel( Minimum )
+        Xa = Minimum
         #
         self.StoredVariables["Analysis"].store( Xa )
         #
@@ -269,11 +260,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
             else:
                 HXa = Hm(Xa)
+            HXa = HXa.reshape((-1,1))
         if self._toStore("Innovation") or \
             self._toStore("OMB") or \
             self._toStore("SimulatedObservationAtBackground"):
-            HXb = Hm(Xb)
-            Innovation  = Y - HXb
+            HXb = Hm(Xb).reshape((-1,1))
+            Innovation = Y - HXb
         if self._toStore("Innovation"):
             self.StoredVariables["Innovation"].store( Innovation )
         if self._toStore("OMB"):
@@ -281,13 +273,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         if self._toStore("BMA"):
             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
         if self._toStore("OMA"):
-            self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
+            self.StoredVariables["OMA"].store( Y - HXa )
         if self._toStore("SimulatedObservationAtBackground"):
             self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
         if self._toStore("SimulatedObservationAtOptimum"):
             self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
         #
-        self._post_run()
+        self._post_run(HO)
         return 0
 
 # ==============================================================================
index 408a6d515aa22ee06a1d802599aff4b97806ec8f..c7d743d01d2a8f2fc2d2b36f3d449eb871a0132c 100644 (file)
@@ -20,9 +20,8 @@
 #
 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
 
-import logging
+import numpy, logging, copy
 from daCore import BasicObjects
-import numpy, copy
 
 # ==============================================================================
 class ElementaryAlgorithm(BasicObjects.Algorithm):
@@ -75,17 +74,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             typecast = str,
             message  = "Critère de qualité utilisé",
             listval  = [
-                "DA",
-                "AugmentedWeightedLeastSquares", "AWLS",
+                "AugmentedWeightedLeastSquares", "AWLS", "DA",
                 "WeightedLeastSquares", "WLS",
                 "LeastSquares", "LS", "L2",
                 "AbsoluteValue", "L1",
                 "MaximumError", "ME",
                 ],
-            listadv  = [
-                "AugmentedPonderatedLeastSquares","APLS",
-                "PonderatedLeastSquares","PLS",
-                ]
             )
         self.defineRequiredParameter(
             name     = "StoreInternalVariables",
@@ -124,6 +118,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         self.setAttributes(tags=(
             "Optimization",
             "NonLinear",
+            "MetaHeuristic",
             "Population",
             ))
 
@@ -148,32 +143,30 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         BI = B.getI()
         RI = R.getI()
         #
-        # Définition de la fonction-coût
-        # ------------------------------
         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
-            _X  = numpy.asmatrix(numpy.ravel( x )).T
-            _HX = Hm( _X )
-            _HX = numpy.asmatrix(numpy.ravel( _HX )).T
+            _X  = numpy.ravel( x ).reshape((-1,1))
+            _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1))
+            _Innovation = Y - _HX
             #
-            if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","AugmentedPonderatedLeastSquares","APLS","DA"]:
+            if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
                 if BI is None or RI is None:
-                    raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
-                Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
-                Jo  = 0.5 * (Y - _HX).T * RI * (Y - _HX)
-            elif QualityMeasure in ["WeightedLeastSquares","WLS","PonderatedLeastSquares","PLS"]:
+                    raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
+                Jb  = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb))
+                Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
+            elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
                 if RI is None:
                     raise ValueError("Observation error covariance matrix has to be properly defined!")
                 Jb  = 0.
-                Jo  = 0.5 * (Y - _HX).T * RI * (Y - _HX)
+                Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
             elif QualityMeasure in ["LeastSquares","LS","L2"]:
                 Jb  = 0.
-                Jo  = 0.5 * (Y - _HX).T * (Y - _HX)
+                Jo  = 0.5 * _Innovation.T @ _Innovation
             elif QualityMeasure in ["AbsoluteValue","L1"]:
                 Jb  = 0.
-                Jo  = numpy.sum( numpy.abs(Y - _HX) )
+                Jo  = numpy.sum( numpy.abs(_Innovation) )
             elif QualityMeasure in ["MaximumError","ME"]:
                 Jb  = 0.
-                Jo  = numpy.max( numpy.abs(Y - _HX) )
+                Jo  = numpy.max( numpy.abs(_Innovation) )
             #
             J   = float( Jb ) + float( Jo )
             #
@@ -198,13 +191,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         for i in range(nbparam) :
             PosInsect.append(numpy.random.uniform(low=SpaceLow[i], high=SpaceUp[i], size=self._parameters["NumberOfInsects"]))
             VelocityInsect.append(numpy.random.uniform(low=-LimitVelocity[i], high=LimitVelocity[i], size=self._parameters["NumberOfInsects"]))
-        VelocityInsect = numpy.matrix(VelocityInsect)
-        PosInsect = numpy.matrix(PosInsect)
+        VelocityInsect = numpy.array(VelocityInsect)
+        PosInsect = numpy.array(PosInsect)
         #
         BestPosInsect = numpy.array(PosInsect)
         qBestPosInsect = []
-        Best = copy.copy(SpaceLow)
-        qBest = CostFunction(Best,self._parameters["QualityCriterion"])
+        _Best = copy.copy(SpaceLow)
+        _qualityBest = CostFunction(_Best,self._parameters["QualityCriterion"])
         NumberOfFunctionEvaluations += 1
         #
         for i in range(self._parameters["NumberOfInsects"]):
@@ -212,17 +205,17 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             quality = CostFunction(insect,self._parameters["QualityCriterion"])
             NumberOfFunctionEvaluations += 1
             qBestPosInsect.append(quality)
-            if quality < qBest:
-                Best  = copy.copy( insect )
-                qBest = copy.copy( quality )
-        logging.debug("%s Initialisation, Insecte = %s, Qualité = %s"%(self._name, str(Best), str(qBest)))
+            if quality < _qualityBest:
+                _Best  = copy.copy( insect )
+                _qualityBest = copy.copy( quality )
+        logging.debug("%s Initialisation, Insecte = %s, Qualité = %s"%(self._name, str(_Best), str(_qualityBest)))
         #
         self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
         if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
-            self.StoredVariables["CurrentState"].store( Best )
+            self.StoredVariables["CurrentState"].store( _Best )
         self.StoredVariables["CostFunctionJb"].store( 0. )
         self.StoredVariables["CostFunctionJo"].store( 0. )
-        self.StoredVariables["CostFunctionJ" ].store( qBest )
+        self.StoredVariables["CostFunctionJ" ].store( _qualityBest )
         #
         # Minimisation de la fonctionnelle
         # --------------------------------
@@ -232,57 +225,55 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 rp = numpy.random.uniform(size=nbparam)
                 rg = numpy.random.uniform(size=nbparam)
                 for j in range(nbparam) :
-                    VelocityInsect[j,i] = self._parameters["SwarmVelocity"]*VelocityInsect[j,i] +  Phip*rp[j]*(BestPosInsect[j,i]-PosInsect[j,i]) +  Phig*rg[j]*(Best[j]-PosInsect[j,i])
+                    VelocityInsect[j,i] = self._parameters["SwarmVelocity"]*VelocityInsect[j,i] +  Phip*rp[j]*(BestPosInsect[j,i]-PosInsect[j,i]) +  Phig*rg[j]*(_Best[j]-PosInsect[j,i])
                     PosInsect[j,i] = PosInsect[j,i]+VelocityInsect[j,i]
                 quality = CostFunction(insect,self._parameters["QualityCriterion"])
                 NumberOfFunctionEvaluations += 1
                 if quality < qBestPosInsect[i]:
                     BestPosInsect[:,i] = copy.copy( insect )
                     qBestPosInsect[i]  = copy.copy( quality )
-                    if quality < qBest :
-                        Best  = copy.copy( insect )
-                        qBest = copy.copy( quality )
-            logging.debug("%s Etape %i, Insecte = %s, Qualité = %s"%(self._name, n, str(Best), str(qBest)))
+                    if quality < _qualityBest :
+                        _Best  = copy.copy( insect )
+                        _qualityBest = copy.copy( quality )
+            logging.debug("%s Etape %i, Insecte = %s, Qualité = %s"%(self._name, n, str(_Best), str(_qualityBest)))
             #
             self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
             if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
-                self.StoredVariables["CurrentState"].store( Best )
+                self.StoredVariables["CurrentState"].store( _Best )
             if self._toStore("SimulatedObservationAtCurrentState"):
-                _HmX = Hm( numpy.asmatrix(numpy.ravel( Best )).T )
-                _HmX = numpy.asmatrix(numpy.ravel( _HmX )).T
+                _HmX = Hm( _Best )
                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HmX )
             self.StoredVariables["CostFunctionJb"].store( 0. )
             self.StoredVariables["CostFunctionJo"].store( 0. )
-            self.StoredVariables["CostFunctionJ" ].store( qBest )
+            self.StoredVariables["CostFunctionJ" ].store( _qualityBest )
             if NumberOfFunctionEvaluations > self._parameters["MaximumNumberOfFunctionEvaluations"]:
                 logging.debug("%s Stopping search because the number %i of function evaluations is exceeding the maximum %i."%(self._name, NumberOfFunctionEvaluations, self._parameters["MaximumNumberOfFunctionEvaluations"]))
                 break
         #
         # Obtention de l'analyse
         # ----------------------
-        Xa = numpy.asmatrix(numpy.ravel( Best )).T
+        Xa = _Best
         #
-        self.StoredVariables["Analysis"].store( Xa.A1 )
+        self.StoredVariables["Analysis"].store( Xa )
         #
+        # Calculs et/ou stockages supplémentaires
+        # ---------------------------------------
+        if self._toStore("OMA") or \
+            self._toStore("SimulatedObservationAtOptimum"):
+            HXa = Hm(Xa)
         if self._toStore("Innovation") or \
             self._toStore("OMB") or \
             self._toStore("SimulatedObservationAtBackground"):
             HXb = Hm(Xb)
-            d = Y - HXb
-        if self._toStore("OMA") or \
-            self._toStore("SimulatedObservationAtOptimum"):
-            HXa = Hm(Xa)
-        #
-        # Calculs et/ou stockages supplémentaires
-        # ---------------------------------------
+            Innovation = Y - HXb
         if self._toStore("Innovation"):
-            self.StoredVariables["Innovation"].store( d )
+            self.StoredVariables["Innovation"].store( Innovation )
+        if self._toStore("OMB"):
+            self.StoredVariables["OMB"].store( Innovation )
         if self._toStore("BMA"):
             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
         if self._toStore("OMA"):
             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
-        if self._toStore("OMB"):
-            self.StoredVariables["OMB"].store( d )
         if self._toStore("SimulatedObservationAtBackground"):
             self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
         if self._toStore("SimulatedObservationAtOptimum"):
index 390e48f7de51dc6d8e10bf1ccd88244fe958a360..f806736862426d552022f49551aa6f3823e1221d 100644 (file)
@@ -107,28 +107,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         Hm = HO["Direct"].appliedTo
         #
-        # Utilisation éventuelle d'un vecteur H(Xb) précalculé
-        # ----------------------------------------------------
-        if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
-            HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
-        else:
-            HXb = Hm( Xb )
-        HXb = numpy.asmatrix(numpy.ravel( HXb )).T
-        if Y.size != HXb.size:
-            raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
-        if max(Y.shape) != max(HXb.shape):
-            raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
-        d  = Y - HXb
-        #
-        # Définition de la fonction-coût
-        # ------------------------------
         def CostFunction(x):
-            _X  = numpy.asmatrix(numpy.ravel( x )).T
+            _X = numpy.asarray(x).reshape((-1,1))
             if self._parameters["StoreInternalVariables"] or \
                 self._toStore("CurrentState"):
                 self.StoredVariables["CurrentState"].store( _X )
-            _HX = Hm( _X )
-            _HX = numpy.asmatrix(numpy.ravel( _HX )).T
+            _HX = numpy.asarray(Hm( _X )).reshape((-1,1))
             if self._toStore("SimulatedObservationAtCurrentState"):
                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
             Jb  = 0.
@@ -142,12 +126,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             return _HX
         #
         def GradientOfCostFunction(x):
-            _X      = numpy.asmatrix(numpy.ravel( x )).T
+            _X = numpy.asarray(x).reshape((-1,1))
             Hg = HO["Tangent"].asMatrix( _X )
             return Hg
         #
-        # Point de démarrage de l'optimisation
-        # ------------------------------------
         Xini = self._parameters["InitializationPoint"]
         #
         # Minimisation de la fonctionnelle
@@ -163,35 +145,37 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 toler       = self._parameters["CostDecrementTolerance"],
                 y           = Y,
                 )
-            nfeval = Informations[2]
-            rc     = Informations[4]
         else:
-            raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
+            raise ValueError("Error in minimizer name: %s is unkown"%self._parameters["Minimizer"])
         #
         # Obtention de l'analyse
         # ----------------------
-        Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
-        #
-        self.StoredVariables["Analysis"].store( Xa.A1 )
+        Xa = Minimum
         #
-        if self._toStore("OMA") or \
-            self._toStore("SimulatedObservationAtOptimum"):
-            HXa = Hm( Xa )
+        self.StoredVariables["Analysis"].store( Xa )
         #
         # Calculs et/ou stockages supplémentaires
         # ---------------------------------------
+        if self._toStore("OMA") or \
+            self._toStore("SimulatedObservationAtOptimum"):
+            HXa = Hm(Xa).reshape((-1,1))
+        if self._toStore("Innovation") or \
+            self._toStore("OMB") or \
+            self._toStore("SimulatedObservationAtBackground"):
+            HXb = Hm(Xb).reshape((-1,1))
+            Innovation = Y - HXb
         if self._toStore("Innovation"):
-            self.StoredVariables["Innovation"].store( numpy.ravel(d) )
+            self.StoredVariables["Innovation"].store( Innovation )
+        if self._toStore("OMB"):
+            self.StoredVariables["OMB"].store( Innovation )
         if self._toStore("BMA"):
             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
         if self._toStore("OMA"):
-            self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
-        if self._toStore("OMB"):
-            self.StoredVariables["OMB"].store( numpy.ravel(d) )
+            self.StoredVariables["OMA"].store( Y - HXa )
         if self._toStore("SimulatedObservationAtBackground"):
-            self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
+            self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
         if self._toStore("SimulatedObservationAtOptimum"):
-            self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
+            self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
         #
         self._post_run(HO)
         return 0
index a33ba994e92906cf03f18533d6ff895ef9ca5c09..b6bf80c918e5df27036c7239f0939a164083ca31 100644 (file)
@@ -65,22 +65,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             default  = "AugmentedWeightedLeastSquares",
             typecast = str,
             message  = "Critère de qualité utilisé",
-            listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
-                        "WeightedLeastSquares","WLS",
-                        "LeastSquares","LS","L2",
-                        "AbsoluteValue","L1",
-                        "MaximumError","ME"],
+            listval  = [
+                "AugmentedWeightedLeastSquares", "AWLS", "DA",
+                "WeightedLeastSquares", "WLS",
+                "LeastSquares", "LS", "L2",
+                "AbsoluteValue", "L1",
+                "MaximumError", "ME",
+                ],
             )
         self.defineRequiredParameter(
             name     = "NoiseHalfRange",
             default  = [],
-            typecast = numpy.matrix,
+            typecast = numpy.ravel,
             message  = "Demi-amplitude des perturbations uniformes centrées d'état pour chaque composante de l'état",
             )
         self.defineRequiredParameter(
             name     = "StandardDeviation",
             default  = [],
-            typecast = numpy.matrix,
+            typecast = numpy.ravel,
             message  = "Ecart-type des perturbations gaussiennes d'état pour chaque composante de l'état",
             )
         self.defineRequiredParameter(
@@ -135,7 +137,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
         #
         if self._parameters["NoiseDistribution"] == "Uniform":
-            nrange = numpy.ravel(self._parameters["NoiseHalfRange"]) # Vecteur
+            nrange = self._parameters["NoiseHalfRange"] # Vecteur
             if nrange.size != Xb.size:
                 raise ValueError("Noise generation by Uniform distribution requires range for all variable increments. The actual noise half range vector is:\n%s"%nrange)
         elif self._parameters["NoiseDistribution"] == "Gaussian":
@@ -143,19 +145,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             if sigma.size != Xb.size:
                 raise ValueError("Noise generation by Gaussian distribution requires standard deviation for all variable increments. The actual standard deviation vector is:\n%s"%sigma)
         #
-        # Opérateur d'observation
-        # -----------------------
         Hm = HO["Direct"].appliedTo
         #
-        # Précalcul des inversions de B et R
-        # ----------------------------------
         BI = B.getI()
         RI = R.getI()
         #
-        # Définition de la fonction de deplacement
-        # ----------------------------------------
         def Tweak( x, NoiseDistribution, NoiseAddingProbability ):
-            _X  = numpy.matrix(numpy.ravel( x )).T
+            _X  = numpy.array( x, dtype=float, copy=True ).ravel().reshape((-1,1))
             if NoiseDistribution == "Uniform":
                 for i in range(_X.size):
                     if NoiseAddingProbability >= numpy.random.uniform():
@@ -171,32 +167,49 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             #
             return _X
         #
-        def StateInList( x, TL ):
+        def StateInList( x, _TL ):
             _X  = numpy.ravel( x )
             _xInList = False
-            for state in TL:
+            for state in _TL:
                 if numpy.all(numpy.abs( _X - numpy.ravel(state) ) <= 1e-16*numpy.abs(_X)):
                     _xInList = True
             # if _xInList: import sys ; sys.exit()
             return _xInList
         #
+        def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
+            _X  = numpy.ravel( x ).reshape((-1,1))
+            _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1))
+            _Innovation = Y - _HX
+            #
+            if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
+                if BI is None or RI is None:
+                    raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
+                Jb  = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb))
+                Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
+            elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
+                if RI is None:
+                    raise ValueError("Observation error covariance matrix has to be properly defined!")
+                Jb  = 0.
+                Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
+            elif QualityMeasure in ["LeastSquares","LS","L2"]:
+                Jb  = 0.
+                Jo  = 0.5 * _Innovation.T @ _Innovation
+            elif QualityMeasure in ["AbsoluteValue","L1"]:
+                Jb  = 0.
+                Jo  = numpy.sum( numpy.abs(_Innovation) )
+            elif QualityMeasure in ["MaximumError","ME"]:
+                Jb  = 0.
+                Jo  = numpy.max( numpy.abs(_Innovation) )
+            #
+            J   = float( Jb ) + float( Jo )
+            #
+            return J
+        #
         # Minimisation de la fonctionnelle
         # --------------------------------
         _n = 0
         _S = Xb
-        # _qualityS = CostFunction( _S, self._parameters["QualityCriterion"] )
-        _qualityS = BasicObjects.CostFunction3D(
-                   _S,
-            _Hm  = Hm,
-            _BI  = BI,
-            _RI  = RI,
-            _Xb  = Xb,
-            _Y   = Y,
-            _SSC = self._parameters["StoreSupplementaryCalculations"],
-            _QM  = self._parameters["QualityCriterion"],
-            _SSV = self.StoredVariables,
-            _sSc = False,
-            )
+        _qualityS = CostFunction( _S, self._parameters["QualityCriterion"] )
         _Best, _qualityBest   =   _S, _qualityS
         _TabuList = []
         _TabuList.append( _S )
@@ -205,34 +218,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             if len(_TabuList) > self._parameters["LengthOfTabuList"]:
                 _TabuList.pop(0)
             _R = Tweak( _S, self._parameters["NoiseDistribution"], self._parameters["NoiseAddingProbability"] )
-            # _qualityR = CostFunction( _R, self._parameters["QualityCriterion"] )
-            _qualityR = BasicObjects.CostFunction3D(
-                       _R,
-                _Hm  = Hm,
-                _BI  = BI,
-                _RI  = RI,
-                _Xb  = Xb,
-                _Y   = Y,
-                _SSC = self._parameters["StoreSupplementaryCalculations"],
-                _QM  = self._parameters["QualityCriterion"],
-                _SSV = self.StoredVariables,
-                _sSc = False,
-                )
+            _qualityR = CostFunction( _R, self._parameters["QualityCriterion"] )
             for nbt in range(self._parameters["NumberOfElementaryPerturbations"]-1):
                 _W = Tweak( _S, self._parameters["NoiseDistribution"], self._parameters["NoiseAddingProbability"] )
-                # _qualityW = CostFunction( _W, self._parameters["QualityCriterion"] )
-                _qualityW = BasicObjects.CostFunction3D(
-                           _W,
-                    _Hm  = Hm,
-                    _BI  = BI,
-                    _RI  = RI,
-                    _Xb  = Xb,
-                    _Y   = Y,
-                    _SSC = self._parameters["StoreSupplementaryCalculations"],
-                    _QM  = self._parameters["QualityCriterion"],
-                    _SSV = self.StoredVariables,
-                    _sSc = False,
-                    )
+                _qualityW = CostFunction( _W, self._parameters["QualityCriterion"] )
                 if (not StateInList(_W, _TabuList)) and ( (_qualityW < _qualityR) or StateInList(_R,_TabuList) ):
                     _R, _qualityR   =   _W, _qualityW
             if (not StateInList( _R, _TabuList )) and (_qualityR < _qualityS):
@@ -241,46 +230,44 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             if _qualityS < _qualityBest:
                 _Best, _qualityBest   =   _S, _qualityS
             #
+            self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
             if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
                 self.StoredVariables["CurrentState"].store( _Best )
             if self._toStore("SimulatedObservationAtCurrentState"):
-                _HmX = Hm( numpy.asmatrix(numpy.ravel( _Best )).T )
-                _HmX = numpy.asmatrix(numpy.ravel( _HmX )).T
+                _HmX = Hm( _Best )
                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HmX )
-            self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
             self.StoredVariables["CostFunctionJb"].store( 0. )
             self.StoredVariables["CostFunctionJo"].store( 0. )
             self.StoredVariables["CostFunctionJ" ].store( _qualityBest )
         #
         # Obtention de l'analyse
         # ----------------------
-        Xa = numpy.asmatrix(numpy.ravel( _Best )).T
+        Xa = _Best
         #
-        self.StoredVariables["Analysis"].store( Xa.A1 )
+        self.StoredVariables["Analysis"].store( Xa )
         #
+        # Calculs et/ou stockages supplémentaires
+        # ---------------------------------------
+        if self._toStore("OMA") or \
+            self._toStore("SimulatedObservationAtOptimum"):
+            HXa = Hm(Xa).reshape((-1,1))
         if self._toStore("Innovation") or \
             self._toStore("OMB") or \
             self._toStore("SimulatedObservationAtBackground"):
-            HXb = Hm(Xb)
-            d = Y - HXb
-        if self._toStore("OMA") or \
-           self._toStore("SimulatedObservationAtOptimum"):
-            HXa = Hm(Xa)
-        #
-        # Calculs et/ou stockages supplémentaires
-        # ---------------------------------------
+            HXb = Hm(Xb).reshape((-1,1))
+            Innovation = Y - HXb
         if self._toStore("Innovation"):
-            self.StoredVariables["Innovation"].store( numpy.ravel(d) )
+            self.StoredVariables["Innovation"].store( Innovation )
+        if self._toStore("OMB"):
+            self.StoredVariables["OMB"].store( Innovation )
         if self._toStore("BMA"):
             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
         if self._toStore("OMA"):
-            self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
-        if self._toStore("OMB"):
-            self.StoredVariables["OMB"].store( numpy.ravel(d) )
+            self.StoredVariables["OMA"].store( Y - HXa )
         if self._toStore("SimulatedObservationAtBackground"):
-            self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
+            self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
         if self._toStore("SimulatedObservationAtOptimum"):
-            self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
+            self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
         #
         self._post_run(HO)
         return 0
index d4b9a27e279fdf9aa50ac02ae15fed6a941b9983..79db6e7ee88583c41a533b4527eddea56087f7f2 100644 (file)
@@ -568,15 +568,10 @@ class FullOperator(object):
         #
         if __appliedInX is not None:
             self.__FO["AppliedInX"] = {}
-            for key in list(__appliedInX.keys()):
-                if type( __appliedInX[key] ) is type( numpy.matrix([]) ):
-                    # Pour le cas où l'on a une vraie matrice
-                    self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].A1, numpy.float ).T
-                elif type( __appliedInX[key] ) is type( numpy.array([]) ) and len(__appliedInX[key].shape) > 1:
-                    # Pour le cas où l'on a un vecteur représenté en array avec 2 dimensions
-                    self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].reshape(len(__appliedInX[key]),), numpy.float ).T
-                else:
-                    self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key],    numpy.float ).T
+            for key in __appliedInX:
+                if isinstance(__appliedInX[key], str):
+                    __appliedInX[key] = PlatformInfo.strvect2liststr( __appliedInX[key] )
+                self.__FO["AppliedInX"][key] = numpy.ravel( __appliedInX[key] ).reshape((-1,1))
         else:
             self.__FO["AppliedInX"] = None
 
index 3a01d6a268852e56de7d0328693dde2c6b368313..e0d484d4f5c5de267b51ffff510080d6c1bfd0c5 100644 (file)
@@ -322,7 +322,7 @@ def strmatrix2liststr( __strvect ):
     __strvect = __strvect.replace(","," ") # Blanc
     for s in ("]", ")"):
         __strvect = __strvect.replace(s,";") # "]" et ")" par ";"
-    __strvect = re.sub(';\s*;',';',__strvect)
+    __strvect = re.sub(r';\s*;',r';',__strvect)
     __strvect = __strvect.rstrip(";") # Après ^ et avant v
     __strmat = [l.split() for l in __strvect.split(";")]
     return __strmat