From 3213a7ad0e568555ac362f55281eabf64a896006 Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Sun, 19 May 2019 17:22:13 +0200 Subject: [PATCH] Extend the range of supported versions for various modules (1) --- src/daComposant/daAlgorithms/3DVAR.py | 9 ++++++--- src/daComposant/daAlgorithms/4DVAR.py | 9 ++++++--- src/daComposant/daAlgorithms/EnsembleBlue.py | 19 +++++++++---------- .../daAlgorithms/EnsembleKalmanFilter.py | 4 ++-- .../daAlgorithms/NonLinearLeastSquares.py | 9 ++++++--- src/daComposant/daCore/Persistence.py | 16 +++++++--------- 6 files changed, 36 insertions(+), 30 deletions(-) diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index deb7367..231d963 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -22,7 +22,7 @@ import logging from daCore import BasicObjects -import numpy, scipy.optimize +import numpy, scipy.optimize, scipy.version # ============================================================================== class ElementaryAlgorithm(BasicObjects.Algorithm): @@ -239,8 +239,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # if self._parameters["Minimizer"] == "LBFGSB": # Minimum, J_optimal, Informations = scipy.optimize.fmin_l_bfgs_b( - import lbfgsbhlt - Minimum, J_optimal, Informations = lbfgsbhlt.fmin_l_bfgs_b( + if "0.19" <= scipy.version.version <= "1.1.0": + import lbfgsbhlt as optimiseur + else: + import scipy.optimize as optimiseur + Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b( func = CostFunction, x0 = Xini, fprime = GradientOfCostFunction, diff --git a/src/daComposant/daAlgorithms/4DVAR.py b/src/daComposant/daAlgorithms/4DVAR.py index 5c75872..20f2f86 100644 --- a/src/daComposant/daAlgorithms/4DVAR.py +++ b/src/daComposant/daAlgorithms/4DVAR.py @@ -22,7 +22,7 @@ import logging from daCore import BasicObjects -import numpy, scipy.optimize +import numpy, scipy.optimize, scipy.version # ============================================================================== class ElementaryAlgorithm(BasicObjects.Algorithm): @@ -262,8 +262,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # if self._parameters["Minimizer"] == "LBFGSB": # Minimum, J_optimal, Informations = scipy.optimize.fmin_l_bfgs_b( - import lbfgsbhlt - Minimum, J_optimal, Informations = lbfgsbhlt.fmin_l_bfgs_b( + if "0.19" <= scipy.version.version <= "1.1.0": + import lbfgsbhlt as optimiseur + else: + import scipy.optimize as optimiseur + Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b( func = CostFunction, x0 = Xini, fprime = GradientOfCostFunction, diff --git a/src/daComposant/daAlgorithms/EnsembleBlue.py b/src/daComposant/daAlgorithms/EnsembleBlue.py index 174bd51..ecc3acf 100644 --- a/src/daComposant/daAlgorithms/EnsembleBlue.py +++ b/src/daComposant/daAlgorithms/EnsembleBlue.py @@ -77,7 +77,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): for npar in range(DiagonaleR.size): bruit = numpy.random.normal(0,DiagonaleR[npar],nb_ens) EnsembleY[npar,:] = Y[npar] + bruit - EnsembleY = numpy.matrix(EnsembleY) # # Initialisation des opérateurs d'observation et de la matrice gain # ----------------------------------------------------------------- @@ -96,24 +95,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Calcul du BLUE pour chaque membre de l'ensemble # ----------------------------------------------- for iens in range(nb_ens): - HXb = Hm * Xb[iens] + HXb = numpy.ravel(numpy.dot(Hm, Xb[iens])) if self._toStore("SimulatedObservationAtBackground"): - self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) - d = EnsembleY[:,iens] - HXb + self.StoredVariables["SimulatedObservationAtBackground"].store( HXb ) + d = numpy.ravel(EnsembleY[:,iens]) - HXb if self._toStore("Innovation"): - self.StoredVariables["Innovation"].store( numpy.ravel(d) ) - Xa = Xb[iens] + K*d + self.StoredVariables["Innovation"].store( d ) + Xa = numpy.ravel(Xb[iens]) + numpy.dot(K, d) self.StoredVariables["CurrentState"].store( Xa ) if self._toStore("SimulatedObservationAtCurrentState"): - self.StoredVariables["SimulatedObservationAtCurrentState"].store( Hm * Xa ) + self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.dot(Hm, Xa) ) # # Fabrication de l'analyse # ------------------------ Members = self.StoredVariables["CurrentState"][-nb_ens:] - Xa = numpy.matrix( Members ).mean(axis=0) - self.StoredVariables["Analysis"].store( Xa.A1 ) + Xa = numpy.array( Members ).mean(axis=0) + self.StoredVariables["Analysis"].store( Xa ) if self._toStore("SimulatedObservationAtOptimum"): - self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel( Hm * Xa ) ) + self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.dot(Hm, Xa) ) # self._post_run(HO) return 0 diff --git a/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py b/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py index 3ec8099..13b5f4f 100644 --- a/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py +++ b/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py @@ -172,7 +172,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # if self._parameters["EstimationOf"] == "State": for i in range(__m): - qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn)).T + qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn, (1,1,1))).T Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( M((Xn[:,i], Un)) )).T + qi HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! @@ -197,7 +197,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): del PfHT, HPfHT # for i in range(__m): - ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn)).T + ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn, (1,1,1))).T Xn[:,i] = Xn_predicted[:,i] + K * (Ynpu + ri - HX_predicted[:,i]) # Xa = Xn.mean(axis=1, dtype=mfp) diff --git a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py index c753c27..a77b09f 100644 --- a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py @@ -22,7 +22,7 @@ import logging from daCore import BasicObjects -import numpy, scipy.optimize +import numpy, scipy.optimize, scipy.version # ============================================================================== class ElementaryAlgorithm(BasicObjects.Algorithm): @@ -224,8 +224,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # if self._parameters["Minimizer"] == "LBFGSB": # Minimum, J_optimal, Informations = scipy.optimize.fmin_l_bfgs_b( - import lbfgsbhlt - Minimum, J_optimal, Informations = lbfgsbhlt.fmin_l_bfgs_b( + if "0.19" <= scipy.version.version <= "1.1.0": + import lbfgsbhlt as optimiseur + else: + import scipy.optimize as optimiseur + Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b( func = CostFunction, x0 = Xini, fprime = GradientOfCostFunction, diff --git a/src/daComposant/daCore/Persistence.py b/src/daComposant/daCore/Persistence.py index ef263d8..ec1f280 100644 --- a/src/daComposant/daCore/Persistence.py +++ b/src/daComposant/daCore/Persistence.py @@ -276,7 +276,7 @@ class Persistence(object): élémentaires numpy. """ try: - return [numpy.matrix(item).mean() for item in self.__values] + return [numpy.array(item).mean() for item in self.__values] except: raise TypeError("Base type is incompatible with numpy") @@ -291,9 +291,9 @@ class Persistence(object): """ try: if numpy.version.version >= '1.1.0': - return [numpy.matrix(item).std(ddof=ddof) for item in self.__values] + return [numpy.array(item).std(ddof=ddof) for item in self.__values] else: - return [numpy.matrix(item).std() for item in self.__values] + return [numpy.array(item).std() for item in self.__values] except: raise TypeError("Base type is incompatible with numpy") @@ -304,7 +304,7 @@ class Persistence(object): numpy. """ try: - return [numpy.matrix(item).sum() for item in self.__values] + return [numpy.array(item).sum() for item in self.__values] except: raise TypeError("Base type is incompatible with numpy") @@ -315,7 +315,7 @@ class Persistence(object): numpy. """ try: - return [numpy.matrix(item).min() for item in self.__values] + return [numpy.array(item).min() for item in self.__values] except: raise TypeError("Base type is incompatible with numpy") @@ -326,7 +326,7 @@ class Persistence(object): numpy. """ try: - return [numpy.matrix(item).max() for item in self.__values] + return [numpy.array(item).max() for item in self.__values] except: raise TypeError("Base type is incompatible with numpy") @@ -460,6 +460,7 @@ class Persistence(object): eval(input('Please press return to continue...\n')) # --------------------------------------------------------- + # On pourrait aussi utiliser d'autres attributs d'un "array" comme "tofile" def mean(self): """ Renvoie la moyenne sur toutes les valeurs sans tenir compte de la @@ -535,9 +536,6 @@ class Persistence(object): except: raise TypeError("Base type is incompatible with numpy") - # On pourrait aussi utiliser les autres attributs d'une "matrix", comme - # "tofile", "min"... - def plot(self, steps = None, title = "", -- 2.39.2