else:
duration = 2
__p = numpy.array(Y).size
+ __n = Xb.size
#
# Précalcul des inversions de B et R
- if selfA._parameters["StoreInternalVariables"] \
- or selfA._toStore("CostFunctionJ") \
- or selfA._toStore("CostFunctionJb") \
- or selfA._toStore("CostFunctionJo") \
- or selfA._toStore("CurrentOptimum") \
- or selfA._toStore("APosterioriCovariance"):
- BI = B.getI()
+ if selfA._parameters["StoreInternalVariables"] or \
+ selfA._toStore("CostFunctionJ") or selfA._toStore("CostFunctionJAtCurrentOptimum") or \
+ selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
+ selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
+ selfA._toStore("CurrentOptimum") or selfA._toStore("APosterioriCovariance") or \
+ (__p > __n):
+ if isinstance(B,numpy.ndarray):
+ BI = numpy.linalg.inv(B)
+ else:
+ BI = B.getI()
RI = R.getI()
#
- __n = Xb.size
nbPreviousSteps = len(selfA.StoredVariables["Analysis"])
#
if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
elif selfA._parameters["nextStep"]:
Xn = selfA._getInternalState("Xn")
Pn = selfA._getInternalState("Pn")
+ if hasattr(Pn,"asfullmatrix"):
+ Pn = Pn.asfullmatrix(Xn.size)
#
if selfA._parameters["EstimationOf"] == "Parameters":
XaMin = Xn
Cm = CM["Tangent"].asMatrix(Xn_predicted)
Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
Xn_predicted = Xn_predicted + Cm @ Un
- Pn_predicted = Q + Mt * (Pn * Ma)
+ Pn_predicted = Q + Mt @ (Pn @ Ma)
elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
# --- > Par principe, M = Id, Q = 0
Xn_predicted = Xn
Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
_Innovation = _Innovation - Cm @ Un
#
- Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
- Xn = Xn_predicted + Kn * _Innovation
- Pn = Pn_predicted - Kn * Ht * Pn_predicted
+ if Ynpu.size <= Xn.size:
+ _HNHt = numpy.dot(Ht, Pn_predicted @ Ha)
+ _A = R + _HNHt
+ _u = numpy.linalg.solve( _A , _Innovation )
+ Xn = Xn_predicted + (Pn_predicted @ (Ha @ _u)).reshape((-1,1))
+ Kn = Pn_predicted @ (Ha @ numpy.linalg.inv(_A))
+ else:
+ _HtRH = numpy.dot(Ha, RI @ Ht)
+ _A = numpy.linalg.inv(Pn_predicted) + _HtRH
+ _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ _Innovation) )
+ Xn = Xn_predicted + _u.reshape((-1,1))
+ Kn = numpy.linalg.inv(_A) @ (Ha @ RI.asfullmatrix(Ynpu.size))
+ #
+ Pn = Pn_predicted - Kn @ (Ht @ Pn_predicted)
+ Pn = (Pn + Pn.T) * 0.5 # Symétrie
+ Pn = Pn + mpr*numpy.trace( Pn ) * numpy.identity(Xn.size) # Positivité
#
if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
)
# nfeval = infodict['nfev']
else:
- raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+ raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
#
IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
#
else:
duration = 2
__p = numpy.array(Y).size
+ __n = Xb.size
#
# Précalcul des inversions de B et R
- if selfA._parameters["StoreInternalVariables"] \
- or selfA._toStore("CostFunctionJ") \
- or selfA._toStore("CostFunctionJb") \
- or selfA._toStore("CostFunctionJo") \
- or selfA._toStore("CurrentOptimum") \
- or selfA._toStore("APosterioriCovariance"):
- BI = B.getI()
+ if selfA._parameters["StoreInternalVariables"] or \
+ selfA._toStore("CostFunctionJ") or selfA._toStore("CostFunctionJAtCurrentOptimum") or \
+ selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
+ selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
+ selfA._toStore("CurrentOptimum") or selfA._toStore("APosterioriCovariance") or \
+ (__p > __n):
+ if isinstance(B,numpy.ndarray):
+ BI = numpy.linalg.inv(B)
+ else:
+ BI = B.getI()
RI = R.getI()
#
- __n = Xb.size
nbPreviousSteps = len(selfA.StoredVariables["Analysis"])
#
if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
elif selfA._parameters["nextStep"]:
Xn = selfA._getInternalState("Xn")
Pn = selfA._getInternalState("Pn")
+ if hasattr(Pn,"asfullmatrix"):
+ Pn = Pn.asfullmatrix(Xn.size)
#
if selfA._parameters["EstimationOf"] == "Parameters":
XaMin = Xn
Cm = CM["Tangent"].asMatrix(Xn_predicted)
Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
Xn_predicted = Xn_predicted + Cm @ Un
- Pn_predicted = Q + Mt * (Pn * Ma)
+ Pn_predicted = Q + Mt @ (Pn @ Ma)
elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
# --- > Par principe, M = Id, Q = 0
Xn_predicted = Xn
Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
_Innovation = _Innovation - Cm @ Un
#
- Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
- Xn = Xn_predicted + Kn * _Innovation
- Pn = Pn_predicted - Kn * Ht * Pn_predicted
+ if Ynpu.size <= Xn.size:
+ _HNHt = numpy.dot(Ht, Pn_predicted @ Ha)
+ _A = R + _HNHt
+ _u = numpy.linalg.solve( _A , _Innovation )
+ Xn = Xn_predicted + (Pn_predicted @ (Ha @ _u)).reshape((-1,1))
+ Kn = Pn_predicted @ (Ha @ numpy.linalg.inv(_A))
+ else:
+ _HtRH = numpy.dot(Ha, RI @ Ht)
+ _A = numpy.linalg.inv(Pn_predicted) + _HtRH
+ _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ _Innovation) )
+ Xn = Xn_predicted + _u.reshape((-1,1))
+ Kn = numpy.linalg.inv(_A) @ (Ha @ RI.asfullmatrix(Ynpu.size))
+ #
+ Pn = Pn_predicted - Kn @ (Ht @ Pn_predicted)
+ Pn = (Pn + Pn.T) * 0.5 # Symétrie
+ Pn = Pn + mpr*numpy.trace( Pn ) * numpy.identity(Xn.size) # Positivité
#
Xa = Xn # Pointeurs
#--------------------------
full_output = True,
)
else:
- raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+ raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
#
IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
MinJ = selfA.StoredVariables["CostFunctionJ"][IndexMin]
Derivees = numpy.array(fprime(variables))
Derivees = Derivees.reshape(n,p) # ADAO & check shape
DeriveesT = Derivees.transpose()
- M = numpy.dot( DeriveesT , (numpy.array(numpy.matrix(p*[poids,]).T)*Derivees) )
+ M = numpy.dot( DeriveesT , (numpy.array(p*[poids,]).T * Derivees) )
SM = numpy.transpose(numpy.dot( DeriveesT , veps ))
step = - numpy.linalg.lstsq( M, SM, rcond=-1 )[0]
#
variables = variables + step
if bounds is not None:
# Attention : boucle infinie à éviter si un intervalle est trop petit
- while( (variables < numpy.ravel(numpy.asmatrix(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asmatrix(bounds)[:,1])).any() ):
+ while( (variables < numpy.ravel(numpy.asarray(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asarray(bounds)[:,1])).any() ):
step = step/2.
variables = variables - step
residus = mesures - numpy.ravel( func(variables) )
full_output = True,
)
else:
- raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+ raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
#
IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
MinJ = selfA.StoredVariables["CostFunctionJ"][IndexMin]
full_output = True,
)
else:
- raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+ raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
#
IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
MinJ = selfA.StoredVariables["CostFunctionJ"][IndexMin]
full_output = True,
)
else:
- raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+ raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
#
IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
#
full_output = True,
)
else:
- raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
+ raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"])
#
IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
MinJ = selfA.StoredVariables["CostFunctionJ"][IndexMin]
#
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
-import logging
+import numpy, logging, scipy.optimize
from daCore import BasicObjects, PlatformInfo
-import numpy, scipy.optimize
# ==============================================================================
class ElementaryAlgorithm(BasicObjects.Algorithm):
typecast = str,
message = "Critère de qualité utilisé",
listval = [
- "AugmentedWeightedLeastSquares","AWLS","DA",
- "WeightedLeastSquares","WLS",
- "LeastSquares","LS","L2",
- "AbsoluteValue","L1",
- "MaximumError","ME",
+ "AugmentedWeightedLeastSquares", "AWLS", "DA",
+ "WeightedLeastSquares", "WLS",
+ "LeastSquares", "LS", "L2",
+ "AbsoluteValue", "L1",
+ "MaximumError", "ME",
],
)
self.defineRequiredParameter(
message = "Liste des valeurs de bornes",
)
self.requireInputArguments(
- mandatory= ("Xb", "Y", "HO", "R", "B" ),
+ mandatory= ("Xb", "Y", "HO", "R", "B"),
)
self.setAttributes(tags=(
"Optimization",
logging.warning("%s Minimization by SIMPLEX is forced because %s is unavailable (COBYLA, POWELL are also available)"%(self._name,self._parameters["Minimizer"]))
self._parameters["Minimizer"] = "SIMPLEX"
#
- # Opérateurs
- # ----------
Hm = HO["Direct"].appliedTo
#
- # Précalcul des inversions de B et R
- # ----------------------------------
BI = B.getI()
RI = R.getI()
#
- # Définition de la fonction-coût
- # ------------------------------
def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
- _X = numpy.asmatrix(numpy.ravel( x )).T
- self.StoredVariables["CurrentState"].store( _X )
- _HX = Hm( _X )
- _HX = numpy.asmatrix(numpy.ravel( _HX )).T
+ _X = numpy.ravel( x ).reshape((-1,1))
+ _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1))
_Innovation = Y - _HX
+ self.StoredVariables["CurrentState"].store( _X )
if self._toStore("SimulatedObservationAtCurrentState") or \
self._toStore("SimulatedObservationAtCurrentOptimum"):
self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
#
if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
if BI is None or RI is None:
- raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
- Jb = 0.5 * (_X - Xb).T * (BI * (_X - Xb))
- Jo = 0.5 * _Innovation.T * (RI * _Innovation)
+ raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
+ Jb = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb))
+ Jo = 0.5 * _Innovation.T @ (RI @ _Innovation)
elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
if RI is None:
raise ValueError("Observation error covariance matrix has to be properly defined!")
Jb = 0.
- Jo = 0.5 * (_Innovation).T * RI * (_Innovation)
+ Jo = 0.5 * _Innovation.T @ (RI @ _Innovation)
elif QualityMeasure in ["LeastSquares","LS","L2"]:
Jb = 0.
- Jo = 0.5 * (_Innovation).T * (_Innovation)
+ Jo = 0.5 * _Innovation.T @ _Innovation
elif QualityMeasure in ["AbsoluteValue","L1"]:
Jb = 0.
Jo = numpy.sum( numpy.abs(_Innovation) )
self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
return J
#
- # Point de démarrage de l'optimisation : Xini = Xb
- # ------------------------------------
Xini = numpy.ravel(Xb)
if len(Xini) < 2 and self._parameters["Minimizer"] == "NEWUOA":
raise ValueError("The minimizer %s can not be used when the optimisation state dimension is 1. Please choose another minimizer."%self._parameters["Minimizer"])
print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
else:
- raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
+ raise ValueError("Error in minimizer name: %s is unkown"%self._parameters["Minimizer"])
#
IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
MinJ = self.StoredVariables["CostFunctionJ"][IndexMin]
#
# Obtention de l'analyse
# ----------------------
- Xa = numpy.ravel( Minimum )
+ Xa = Minimum
#
self.StoredVariables["Analysis"].store( Xa )
#
HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
else:
HXa = Hm(Xa)
+ HXa = HXa.reshape((-1,1))
if self._toStore("Innovation") or \
self._toStore("OMB") or \
self._toStore("SimulatedObservationAtBackground"):
- HXb = Hm(Xb)
- Innovation = Y - HXb
+ HXb = Hm(Xb).reshape((-1,1))
+ Innovation = Y - HXb
if self._toStore("Innovation"):
self.StoredVariables["Innovation"].store( Innovation )
if self._toStore("OMB"):
if self._toStore("BMA"):
self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
if self._toStore("OMA"):
- self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
+ self.StoredVariables["OMA"].store( Y - HXa )
if self._toStore("SimulatedObservationAtBackground"):
self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
if self._toStore("SimulatedObservationAtOptimum"):
self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
#
- self._post_run()
+ self._post_run(HO)
return 0
# ==============================================================================
#
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
-import logging, numpy, scipy.optimize
+import numpy, logging, scipy.optimize
from daCore import BasicObjects
# ==============================================================================
message = "Nombre maximal d'évaluations de la fonction",
minval = -1,
)
+ self.defineRequiredParameter(
+ name = "SetSeed",
+ typecast = numpy.random.seed,
+ message = "Graine fixée pour le générateur aléatoire",
+ )
self.defineRequiredParameter(
name = "PopulationSize",
default = 100,
typecast = str,
message = "Critère de qualité utilisé",
listval = [
- "AugmentedWeightedLeastSquares","AWLS","DA",
- "WeightedLeastSquares","WLS",
- "LeastSquares","LS","L2",
- "AbsoluteValue","L1",
- "MaximumError","ME",
+ "AugmentedWeightedLeastSquares", "AWLS", "DA",
+ "WeightedLeastSquares", "WLS",
+ "LeastSquares", "LS", "L2",
+ "AbsoluteValue", "L1",
+ "MaximumError", "ME",
],
)
self.defineRequiredParameter(
"SimulatedObservationAtOptimum",
]
)
- self.defineRequiredParameter(
- name = "SetSeed",
- typecast = numpy.random.seed,
- message = "Graine fixée pour le générateur aléatoire",
- )
self.defineRequiredParameter( # Pas de type
name = "Bounds",
message = "Liste des valeurs de bornes",
)
self.requireInputArguments(
- mandatory= ("Xb", "Y", "HO", "R", "B" ),
+ mandatory= ("Xb", "Y", "HO", "R", "B"),
)
self.setAttributes(tags=(
"Optimization",
maxiter = min(self._parameters["MaximumNumberOfSteps"],round(self._parameters["MaximumNumberOfFunctionEvaluations"]/(popsize*len_X) - 1))
logging.debug("%s Nombre maximal de générations = %i, taille de la population à chaque génération = %i"%(self._name, maxiter, popsize*len_X))
#
- # Opérateurs
- # ----------
Hm = HO["Direct"].appliedTo
#
- # Précalcul des inversions de B et R
- # ----------------------------------
BI = B.getI()
RI = R.getI()
#
- # Définition de la fonction-coût
- # ------------------------------
def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
- _X = numpy.asmatrix(numpy.ravel( x )).T
- self.StoredVariables["CurrentState"].store( _X )
- _HX = Hm( _X )
- _HX = numpy.asmatrix(numpy.ravel( _HX )).T
+ _X = numpy.ravel( x ).reshape((-1,1))
+ _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1))
_Innovation = Y - _HX
+ self.StoredVariables["CurrentState"].store( _X )
if self._toStore("SimulatedObservationAtCurrentState") or \
self._toStore("SimulatedObservationAtCurrentOptimum"):
self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
#
if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
if BI is None or RI is None:
- raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
- Jb = 0.5 * (_X - Xb).T * (BI * (_X - Xb))
- Jo = 0.5 * _Innovation.T * (RI * _Innovation)
+ raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
+ Jb = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb))
+ Jo = 0.5 * _Innovation.T @ (RI @ _Innovation)
elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
if RI is None:
raise ValueError("Observation error covariance matrix has to be properly defined!")
Jb = 0.
- Jo = 0.5 * (_Innovation).T * RI * (_Innovation)
+ Jo = 0.5 * _Innovation.T @ (RI @ _Innovation)
elif QualityMeasure in ["LeastSquares","LS","L2"]:
Jb = 0.
- Jo = 0.5 * (_Innovation).T * (_Innovation)
+ Jo = 0.5 * _Innovation.T @ _Innovation
elif QualityMeasure in ["AbsoluteValue","L1"]:
Jb = 0.
Jo = numpy.sum( numpy.abs(_Innovation) )
self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
return J
#
- # Point de démarrage de l'optimisation : Xini = Xb
- # ------------------------------------
Xini = numpy.ravel(Xb)
#
# Minimisation de la fonctionnelle
#
# Obtention de l'analyse
# ----------------------
- Xa = numpy.ravel( Minimum )
+ Xa = Minimum
#
self.StoredVariables["Analysis"].store( Xa )
#
HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
else:
HXa = Hm(Xa)
+ HXa = HXa.reshape((-1,1))
if self._toStore("Innovation") or \
self._toStore("OMB") or \
self._toStore("SimulatedObservationAtBackground"):
- HXb = Hm(Xb)
- Innovation = Y - HXb
+ HXb = Hm(Xb).reshape((-1,1))
+ Innovation = Y - HXb
if self._toStore("Innovation"):
self.StoredVariables["Innovation"].store( Innovation )
if self._toStore("OMB"):
if self._toStore("BMA"):
self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
if self._toStore("OMA"):
- self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
+ self.StoredVariables["OMA"].store( Y - HXa )
if self._toStore("SimulatedObservationAtBackground"):
self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
if self._toStore("SimulatedObservationAtOptimum"):
self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
#
- self._post_run()
+ self._post_run(HO)
return 0
# ==============================================================================
#
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
-import logging
+import numpy, logging, copy
from daCore import BasicObjects
-import numpy, copy
# ==============================================================================
class ElementaryAlgorithm(BasicObjects.Algorithm):
typecast = str,
message = "Critère de qualité utilisé",
listval = [
- "DA",
- "AugmentedWeightedLeastSquares", "AWLS",
+ "AugmentedWeightedLeastSquares", "AWLS", "DA",
"WeightedLeastSquares", "WLS",
"LeastSquares", "LS", "L2",
"AbsoluteValue", "L1",
"MaximumError", "ME",
],
- listadv = [
- "AugmentedPonderatedLeastSquares","APLS",
- "PonderatedLeastSquares","PLS",
- ]
)
self.defineRequiredParameter(
name = "StoreInternalVariables",
self.setAttributes(tags=(
"Optimization",
"NonLinear",
+ "MetaHeuristic",
"Population",
))
BI = B.getI()
RI = R.getI()
#
- # Définition de la fonction-coût
- # ------------------------------
def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
- _X = numpy.asmatrix(numpy.ravel( x )).T
- _HX = Hm( _X )
- _HX = numpy.asmatrix(numpy.ravel( _HX )).T
+ _X = numpy.ravel( x ).reshape((-1,1))
+ _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1))
+ _Innovation = Y - _HX
#
- if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","AugmentedPonderatedLeastSquares","APLS","DA"]:
+ if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
if BI is None or RI is None:
- raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
- Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb)
- Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX)
- elif QualityMeasure in ["WeightedLeastSquares","WLS","PonderatedLeastSquares","PLS"]:
+ raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
+ Jb = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb))
+ Jo = 0.5 * _Innovation.T @ (RI @ _Innovation)
+ elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
if RI is None:
raise ValueError("Observation error covariance matrix has to be properly defined!")
Jb = 0.
- Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX)
+ Jo = 0.5 * _Innovation.T @ (RI @ _Innovation)
elif QualityMeasure in ["LeastSquares","LS","L2"]:
Jb = 0.
- Jo = 0.5 * (Y - _HX).T * (Y - _HX)
+ Jo = 0.5 * _Innovation.T @ _Innovation
elif QualityMeasure in ["AbsoluteValue","L1"]:
Jb = 0.
- Jo = numpy.sum( numpy.abs(Y - _HX) )
+ Jo = numpy.sum( numpy.abs(_Innovation) )
elif QualityMeasure in ["MaximumError","ME"]:
Jb = 0.
- Jo = numpy.max( numpy.abs(Y - _HX) )
+ Jo = numpy.max( numpy.abs(_Innovation) )
#
J = float( Jb ) + float( Jo )
#
for i in range(nbparam) :
PosInsect.append(numpy.random.uniform(low=SpaceLow[i], high=SpaceUp[i], size=self._parameters["NumberOfInsects"]))
VelocityInsect.append(numpy.random.uniform(low=-LimitVelocity[i], high=LimitVelocity[i], size=self._parameters["NumberOfInsects"]))
- VelocityInsect = numpy.matrix(VelocityInsect)
- PosInsect = numpy.matrix(PosInsect)
+ VelocityInsect = numpy.array(VelocityInsect)
+ PosInsect = numpy.array(PosInsect)
#
BestPosInsect = numpy.array(PosInsect)
qBestPosInsect = []
- Best = copy.copy(SpaceLow)
- qBest = CostFunction(Best,self._parameters["QualityCriterion"])
+ _Best = copy.copy(SpaceLow)
+ _qualityBest = CostFunction(_Best,self._parameters["QualityCriterion"])
NumberOfFunctionEvaluations += 1
#
for i in range(self._parameters["NumberOfInsects"]):
quality = CostFunction(insect,self._parameters["QualityCriterion"])
NumberOfFunctionEvaluations += 1
qBestPosInsect.append(quality)
- if quality < qBest:
- Best = copy.copy( insect )
- qBest = copy.copy( quality )
- logging.debug("%s Initialisation, Insecte = %s, Qualité = %s"%(self._name, str(Best), str(qBest)))
+ if quality < _qualityBest:
+ _Best = copy.copy( insect )
+ _qualityBest = copy.copy( quality )
+ logging.debug("%s Initialisation, Insecte = %s, Qualité = %s"%(self._name, str(_Best), str(_qualityBest)))
#
self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
- self.StoredVariables["CurrentState"].store( Best )
+ self.StoredVariables["CurrentState"].store( _Best )
self.StoredVariables["CostFunctionJb"].store( 0. )
self.StoredVariables["CostFunctionJo"].store( 0. )
- self.StoredVariables["CostFunctionJ" ].store( qBest )
+ self.StoredVariables["CostFunctionJ" ].store( _qualityBest )
#
# Minimisation de la fonctionnelle
# --------------------------------
rp = numpy.random.uniform(size=nbparam)
rg = numpy.random.uniform(size=nbparam)
for j in range(nbparam) :
- VelocityInsect[j,i] = self._parameters["SwarmVelocity"]*VelocityInsect[j,i] + Phip*rp[j]*(BestPosInsect[j,i]-PosInsect[j,i]) + Phig*rg[j]*(Best[j]-PosInsect[j,i])
+ VelocityInsect[j,i] = self._parameters["SwarmVelocity"]*VelocityInsect[j,i] + Phip*rp[j]*(BestPosInsect[j,i]-PosInsect[j,i]) + Phig*rg[j]*(_Best[j]-PosInsect[j,i])
PosInsect[j,i] = PosInsect[j,i]+VelocityInsect[j,i]
quality = CostFunction(insect,self._parameters["QualityCriterion"])
NumberOfFunctionEvaluations += 1
if quality < qBestPosInsect[i]:
BestPosInsect[:,i] = copy.copy( insect )
qBestPosInsect[i] = copy.copy( quality )
- if quality < qBest :
- Best = copy.copy( insect )
- qBest = copy.copy( quality )
- logging.debug("%s Etape %i, Insecte = %s, Qualité = %s"%(self._name, n, str(Best), str(qBest)))
+ if quality < _qualityBest :
+ _Best = copy.copy( insect )
+ _qualityBest = copy.copy( quality )
+ logging.debug("%s Etape %i, Insecte = %s, Qualité = %s"%(self._name, n, str(_Best), str(_qualityBest)))
#
self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
- self.StoredVariables["CurrentState"].store( Best )
+ self.StoredVariables["CurrentState"].store( _Best )
if self._toStore("SimulatedObservationAtCurrentState"):
- _HmX = Hm( numpy.asmatrix(numpy.ravel( Best )).T )
- _HmX = numpy.asmatrix(numpy.ravel( _HmX )).T
+ _HmX = Hm( _Best )
self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HmX )
self.StoredVariables["CostFunctionJb"].store( 0. )
self.StoredVariables["CostFunctionJo"].store( 0. )
- self.StoredVariables["CostFunctionJ" ].store( qBest )
+ self.StoredVariables["CostFunctionJ" ].store( _qualityBest )
if NumberOfFunctionEvaluations > self._parameters["MaximumNumberOfFunctionEvaluations"]:
logging.debug("%s Stopping search because the number %i of function evaluations is exceeding the maximum %i."%(self._name, NumberOfFunctionEvaluations, self._parameters["MaximumNumberOfFunctionEvaluations"]))
break
#
# Obtention de l'analyse
# ----------------------
- Xa = numpy.asmatrix(numpy.ravel( Best )).T
+ Xa = _Best
#
- self.StoredVariables["Analysis"].store( Xa.A1 )
+ self.StoredVariables["Analysis"].store( Xa )
#
+ # Calculs et/ou stockages supplémentaires
+ # ---------------------------------------
+ if self._toStore("OMA") or \
+ self._toStore("SimulatedObservationAtOptimum"):
+ HXa = Hm(Xa)
if self._toStore("Innovation") or \
self._toStore("OMB") or \
self._toStore("SimulatedObservationAtBackground"):
HXb = Hm(Xb)
- d = Y - HXb
- if self._toStore("OMA") or \
- self._toStore("SimulatedObservationAtOptimum"):
- HXa = Hm(Xa)
- #
- # Calculs et/ou stockages supplémentaires
- # ---------------------------------------
+ Innovation = Y - HXb
if self._toStore("Innovation"):
- self.StoredVariables["Innovation"].store( d )
+ self.StoredVariables["Innovation"].store( Innovation )
+ if self._toStore("OMB"):
+ self.StoredVariables["OMB"].store( Innovation )
if self._toStore("BMA"):
self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
if self._toStore("OMA"):
self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
- if self._toStore("OMB"):
- self.StoredVariables["OMB"].store( d )
if self._toStore("SimulatedObservationAtBackground"):
self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
if self._toStore("SimulatedObservationAtOptimum"):
#
Hm = HO["Direct"].appliedTo
#
- # Utilisation éventuelle d'un vecteur H(Xb) précalculé
- # ----------------------------------------------------
- if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
- HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
- else:
- HXb = Hm( Xb )
- HXb = numpy.asmatrix(numpy.ravel( HXb )).T
- if Y.size != HXb.size:
- raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
- if max(Y.shape) != max(HXb.shape):
- raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
- d = Y - HXb
- #
- # Définition de la fonction-coût
- # ------------------------------
def CostFunction(x):
- _X = numpy.asmatrix(numpy.ravel( x )).T
+ _X = numpy.asarray(x).reshape((-1,1))
if self._parameters["StoreInternalVariables"] or \
self._toStore("CurrentState"):
self.StoredVariables["CurrentState"].store( _X )
- _HX = Hm( _X )
- _HX = numpy.asmatrix(numpy.ravel( _HX )).T
+ _HX = numpy.asarray(Hm( _X )).reshape((-1,1))
if self._toStore("SimulatedObservationAtCurrentState"):
self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
Jb = 0.
return _HX
#
def GradientOfCostFunction(x):
- _X = numpy.asmatrix(numpy.ravel( x )).T
+ _X = numpy.asarray(x).reshape((-1,1))
Hg = HO["Tangent"].asMatrix( _X )
return Hg
#
- # Point de démarrage de l'optimisation
- # ------------------------------------
Xini = self._parameters["InitializationPoint"]
#
# Minimisation de la fonctionnelle
toler = self._parameters["CostDecrementTolerance"],
y = Y,
)
- nfeval = Informations[2]
- rc = Informations[4]
else:
- raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
+ raise ValueError("Error in minimizer name: %s is unkown"%self._parameters["Minimizer"])
#
# Obtention de l'analyse
# ----------------------
- Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
- #
- self.StoredVariables["Analysis"].store( Xa.A1 )
+ Xa = Minimum
#
- if self._toStore("OMA") or \
- self._toStore("SimulatedObservationAtOptimum"):
- HXa = Hm( Xa )
+ self.StoredVariables["Analysis"].store( Xa )
#
# Calculs et/ou stockages supplémentaires
# ---------------------------------------
+ if self._toStore("OMA") or \
+ self._toStore("SimulatedObservationAtOptimum"):
+ HXa = Hm(Xa).reshape((-1,1))
+ if self._toStore("Innovation") or \
+ self._toStore("OMB") or \
+ self._toStore("SimulatedObservationAtBackground"):
+ HXb = Hm(Xb).reshape((-1,1))
+ Innovation = Y - HXb
if self._toStore("Innovation"):
- self.StoredVariables["Innovation"].store( numpy.ravel(d) )
+ self.StoredVariables["Innovation"].store( Innovation )
+ if self._toStore("OMB"):
+ self.StoredVariables["OMB"].store( Innovation )
if self._toStore("BMA"):
self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
if self._toStore("OMA"):
- self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
- if self._toStore("OMB"):
- self.StoredVariables["OMB"].store( numpy.ravel(d) )
+ self.StoredVariables["OMA"].store( Y - HXa )
if self._toStore("SimulatedObservationAtBackground"):
- self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
+ self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
if self._toStore("SimulatedObservationAtOptimum"):
- self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
+ self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
#
self._post_run(HO)
return 0
default = "AugmentedWeightedLeastSquares",
typecast = str,
message = "Critère de qualité utilisé",
- listval = ["AugmentedWeightedLeastSquares","AWLS","DA",
- "WeightedLeastSquares","WLS",
- "LeastSquares","LS","L2",
- "AbsoluteValue","L1",
- "MaximumError","ME"],
+ listval = [
+ "AugmentedWeightedLeastSquares", "AWLS", "DA",
+ "WeightedLeastSquares", "WLS",
+ "LeastSquares", "LS", "L2",
+ "AbsoluteValue", "L1",
+ "MaximumError", "ME",
+ ],
)
self.defineRequiredParameter(
name = "NoiseHalfRange",
default = [],
- typecast = numpy.matrix,
+ typecast = numpy.ravel,
message = "Demi-amplitude des perturbations uniformes centrées d'état pour chaque composante de l'état",
)
self.defineRequiredParameter(
name = "StandardDeviation",
default = [],
- typecast = numpy.matrix,
+ typecast = numpy.ravel,
message = "Ecart-type des perturbations gaussiennes d'état pour chaque composante de l'état",
)
self.defineRequiredParameter(
self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
#
if self._parameters["NoiseDistribution"] == "Uniform":
- nrange = numpy.ravel(self._parameters["NoiseHalfRange"]) # Vecteur
+ nrange = self._parameters["NoiseHalfRange"] # Vecteur
if nrange.size != Xb.size:
raise ValueError("Noise generation by Uniform distribution requires range for all variable increments. The actual noise half range vector is:\n%s"%nrange)
elif self._parameters["NoiseDistribution"] == "Gaussian":
if sigma.size != Xb.size:
raise ValueError("Noise generation by Gaussian distribution requires standard deviation for all variable increments. The actual standard deviation vector is:\n%s"%sigma)
#
- # Opérateur d'observation
- # -----------------------
Hm = HO["Direct"].appliedTo
#
- # Précalcul des inversions de B et R
- # ----------------------------------
BI = B.getI()
RI = R.getI()
#
- # Définition de la fonction de deplacement
- # ----------------------------------------
def Tweak( x, NoiseDistribution, NoiseAddingProbability ):
- _X = numpy.matrix(numpy.ravel( x )).T
+ _X = numpy.array( x, dtype=float, copy=True ).ravel().reshape((-1,1))
if NoiseDistribution == "Uniform":
for i in range(_X.size):
if NoiseAddingProbability >= numpy.random.uniform():
#
return _X
#
- def StateInList( x, TL ):
+ def StateInList( x, _TL ):
_X = numpy.ravel( x )
_xInList = False
- for state in TL:
+ for state in _TL:
if numpy.all(numpy.abs( _X - numpy.ravel(state) ) <= 1e-16*numpy.abs(_X)):
_xInList = True
# if _xInList: import sys ; sys.exit()
return _xInList
#
+ def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
+ _X = numpy.ravel( x ).reshape((-1,1))
+ _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1))
+ _Innovation = Y - _HX
+ #
+ if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
+ if BI is None or RI is None:
+ raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
+ Jb = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb))
+ Jo = 0.5 * _Innovation.T @ (RI @ _Innovation)
+ elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
+ if RI is None:
+ raise ValueError("Observation error covariance matrix has to be properly defined!")
+ Jb = 0.
+ Jo = 0.5 * _Innovation.T @ (RI @ _Innovation)
+ elif QualityMeasure in ["LeastSquares","LS","L2"]:
+ Jb = 0.
+ Jo = 0.5 * _Innovation.T @ _Innovation
+ elif QualityMeasure in ["AbsoluteValue","L1"]:
+ Jb = 0.
+ Jo = numpy.sum( numpy.abs(_Innovation) )
+ elif QualityMeasure in ["MaximumError","ME"]:
+ Jb = 0.
+ Jo = numpy.max( numpy.abs(_Innovation) )
+ #
+ J = float( Jb ) + float( Jo )
+ #
+ return J
+ #
# Minimisation de la fonctionnelle
# --------------------------------
_n = 0
_S = Xb
- # _qualityS = CostFunction( _S, self._parameters["QualityCriterion"] )
- _qualityS = BasicObjects.CostFunction3D(
- _S,
- _Hm = Hm,
- _BI = BI,
- _RI = RI,
- _Xb = Xb,
- _Y = Y,
- _SSC = self._parameters["StoreSupplementaryCalculations"],
- _QM = self._parameters["QualityCriterion"],
- _SSV = self.StoredVariables,
- _sSc = False,
- )
+ _qualityS = CostFunction( _S, self._parameters["QualityCriterion"] )
_Best, _qualityBest = _S, _qualityS
_TabuList = []
_TabuList.append( _S )
if len(_TabuList) > self._parameters["LengthOfTabuList"]:
_TabuList.pop(0)
_R = Tweak( _S, self._parameters["NoiseDistribution"], self._parameters["NoiseAddingProbability"] )
- # _qualityR = CostFunction( _R, self._parameters["QualityCriterion"] )
- _qualityR = BasicObjects.CostFunction3D(
- _R,
- _Hm = Hm,
- _BI = BI,
- _RI = RI,
- _Xb = Xb,
- _Y = Y,
- _SSC = self._parameters["StoreSupplementaryCalculations"],
- _QM = self._parameters["QualityCriterion"],
- _SSV = self.StoredVariables,
- _sSc = False,
- )
+ _qualityR = CostFunction( _R, self._parameters["QualityCriterion"] )
for nbt in range(self._parameters["NumberOfElementaryPerturbations"]-1):
_W = Tweak( _S, self._parameters["NoiseDistribution"], self._parameters["NoiseAddingProbability"] )
- # _qualityW = CostFunction( _W, self._parameters["QualityCriterion"] )
- _qualityW = BasicObjects.CostFunction3D(
- _W,
- _Hm = Hm,
- _BI = BI,
- _RI = RI,
- _Xb = Xb,
- _Y = Y,
- _SSC = self._parameters["StoreSupplementaryCalculations"],
- _QM = self._parameters["QualityCriterion"],
- _SSV = self.StoredVariables,
- _sSc = False,
- )
+ _qualityW = CostFunction( _W, self._parameters["QualityCriterion"] )
if (not StateInList(_W, _TabuList)) and ( (_qualityW < _qualityR) or StateInList(_R,_TabuList) ):
_R, _qualityR = _W, _qualityW
if (not StateInList( _R, _TabuList )) and (_qualityR < _qualityS):
if _qualityS < _qualityBest:
_Best, _qualityBest = _S, _qualityS
#
+ self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
self.StoredVariables["CurrentState"].store( _Best )
if self._toStore("SimulatedObservationAtCurrentState"):
- _HmX = Hm( numpy.asmatrix(numpy.ravel( _Best )).T )
- _HmX = numpy.asmatrix(numpy.ravel( _HmX )).T
+ _HmX = Hm( _Best )
self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HmX )
- self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
self.StoredVariables["CostFunctionJb"].store( 0. )
self.StoredVariables["CostFunctionJo"].store( 0. )
self.StoredVariables["CostFunctionJ" ].store( _qualityBest )
#
# Obtention de l'analyse
# ----------------------
- Xa = numpy.asmatrix(numpy.ravel( _Best )).T
+ Xa = _Best
#
- self.StoredVariables["Analysis"].store( Xa.A1 )
+ self.StoredVariables["Analysis"].store( Xa )
#
+ # Calculs et/ou stockages supplémentaires
+ # ---------------------------------------
+ if self._toStore("OMA") or \
+ self._toStore("SimulatedObservationAtOptimum"):
+ HXa = Hm(Xa).reshape((-1,1))
if self._toStore("Innovation") or \
self._toStore("OMB") or \
self._toStore("SimulatedObservationAtBackground"):
- HXb = Hm(Xb)
- d = Y - HXb
- if self._toStore("OMA") or \
- self._toStore("SimulatedObservationAtOptimum"):
- HXa = Hm(Xa)
- #
- # Calculs et/ou stockages supplémentaires
- # ---------------------------------------
+ HXb = Hm(Xb).reshape((-1,1))
+ Innovation = Y - HXb
if self._toStore("Innovation"):
- self.StoredVariables["Innovation"].store( numpy.ravel(d) )
+ self.StoredVariables["Innovation"].store( Innovation )
+ if self._toStore("OMB"):
+ self.StoredVariables["OMB"].store( Innovation )
if self._toStore("BMA"):
self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
if self._toStore("OMA"):
- self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
- if self._toStore("OMB"):
- self.StoredVariables["OMB"].store( numpy.ravel(d) )
+ self.StoredVariables["OMA"].store( Y - HXa )
if self._toStore("SimulatedObservationAtBackground"):
- self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
+ self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
if self._toStore("SimulatedObservationAtOptimum"):
- self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
+ self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
#
self._post_run(HO)
return 0
#
if __appliedInX is not None:
self.__FO["AppliedInX"] = {}
- for key in list(__appliedInX.keys()):
- if type( __appliedInX[key] ) is type( numpy.matrix([]) ):
- # Pour le cas où l'on a une vraie matrice
- self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].A1, numpy.float ).T
- elif type( __appliedInX[key] ) is type( numpy.array([]) ) and len(__appliedInX[key].shape) > 1:
- # Pour le cas où l'on a un vecteur représenté en array avec 2 dimensions
- self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].reshape(len(__appliedInX[key]),), numpy.float ).T
- else:
- self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key], numpy.float ).T
+ for key in __appliedInX:
+ if isinstance(__appliedInX[key], str):
+ __appliedInX[key] = PlatformInfo.strvect2liststr( __appliedInX[key] )
+ self.__FO["AppliedInX"][key] = numpy.ravel( __appliedInX[key] ).reshape((-1,1))
else:
self.__FO["AppliedInX"] = None
__strvect = __strvect.replace(","," ") # Blanc
for s in ("]", ")"):
__strvect = __strvect.replace(s,";") # "]" et ")" par ";"
- __strvect = re.sub(';\s*;',';',__strvect)
+ __strvect = re.sub(r';\s*;',r';',__strvect)
__strvect = __strvect.rstrip(";") # Après ^ et avant v
__strmat = [l.split() for l in __strvect.split(";")]
return __strmat