From bd9eda86ec988924fa98879466a94787c1bd80fe Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Thu, 1 Oct 2020 17:30:37 +0200 Subject: [PATCH] Improvement of algorithms arguments validation and tests --- .../Physical_simulation_functions.py | 40 +++++++--------- src/daComposant/daAlgorithms/3DVAR.py | 2 +- src/daComposant/daAlgorithms/4DVAR.py | 2 +- src/daComposant/daAlgorithms/AdjointTest.py | 2 +- src/daComposant/daAlgorithms/Blue.py | 2 +- .../DerivativeFreeOptimization.py | 2 +- .../daAlgorithms/DifferentialEvolution.py | 2 +- src/daComposant/daAlgorithms/EnsembleBlue.py | 2 +- .../daAlgorithms/EnsembleKalmanFilter.py | 2 +- src/daComposant/daAlgorithms/ExtendedBlue.py | 2 +- .../daAlgorithms/ExtendedKalmanFilter.py | 2 +- src/daComposant/daAlgorithms/FunctionTest.py | 2 +- src/daComposant/daAlgorithms/GradientTest.py | 2 +- .../daAlgorithms/InputValuesTest.py | 2 +- src/daComposant/daAlgorithms/KalmanFilter.py | 2 +- .../daAlgorithms/LinearLeastSquares.py | 6 +-- src/daComposant/daAlgorithms/LinearityTest.py | 2 +- .../daAlgorithms/LocalSensitivityTest.py | 2 +- .../daAlgorithms/NonLinearLeastSquares.py | 2 +- src/daComposant/daAlgorithms/ObserverTest.py | 2 +- .../daAlgorithms/ParallelFunctionTest.py | 2 +- .../daAlgorithms/ParticleSwarmOptimization.py | 8 ++-- .../daAlgorithms/QuantileRegression.py | 2 +- src/daComposant/daAlgorithms/SamplingTest.py | 2 +- src/daComposant/daAlgorithms/TabuSearch.py | 2 +- src/daComposant/daAlgorithms/TangentTest.py | 2 +- .../daAlgorithms/UnscentedKalmanFilter.py | 2 +- src/daComposant/daCore/BasicObjects.py | 47 ++++++++++++++----- src/daComposant/daCore/NumericObjects.py | 1 + 29 files changed, 83 insertions(+), 67 deletions(-) diff --git a/examples/daSkeletons/External_data_definition_by_scripts/Physical_simulation_functions.py b/examples/daSkeletons/External_data_definition_by_scripts/Physical_simulation_functions.py index f65ecab..9426fd1 100644 --- a/examples/daSkeletons/External_data_definition_by_scripts/Physical_simulation_functions.py +++ b/examples/daSkeletons/External_data_definition_by_scripts/Physical_simulation_functions.py @@ -30,34 +30,28 @@ __doc__ = """ """ __author__ = "Jean-Philippe ARGAUD" # -import os, numpy, time -# # ============================================================================== # +import os, numpy, time +# def DirectOperator( XX ): - """ Direct non-linear simulation operator """ - # - # ------------------------------------------> EXAMPLE TO BE REMOVED - if isinstance(XX, type(numpy.matrix([]))): # EXAMPLE TO BE REMOVED - HX = XX.A1.tolist() # EXAMPLE TO BE REMOVED - elif isinstance(XX, type(numpy.array([]))): # EXAMPLE TO BE REMOVED - HX = numpy.matrix(XX).A1.tolist() # EXAMPLE TO BE REMOVED - else: # EXAMPLE TO BE REMOVED - HX = numpy.ravel(XX) # EXAMPLE TO BE REMOVED - # ------------------------------------------> EXAMPLE TO BE REMOVED - # - return numpy.array( HX ) - -# ============================================================================== -from adao.daCore.NumericObjects import FDApproximation -FDA = FDApproximation( DirectOperator ) -TangentOperator = FDA.TangentOperator -AdjointOperator = FDA.AdjointOperator - + # Opérateur identité + return numpy.ravel(XX) +# +def TangentOperator(paire ): + # Opérateur identité + (XX, dX) = paire + return numpy.ravel(dX) +# +def AdjointOperator( paire ): + # Opérateur identité + (XX, YY) = paire + return numpy.ravel(YY) +# # ============================================================================== if __name__ == "__main__": - print("") + print() print("AUTODIAGNOSTIC") print("==============") @@ -66,4 +60,4 @@ if __name__ == "__main__": FX = DirectOperator( X0 ) print("FX =", FX) - print("") + print() diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index 9517df6..0a0f31f 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -147,7 +147,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # # Correction pour pallier a un bug de TNC sur le retour du Minimum if "Minimizer" in self._parameters and self._parameters["Minimizer"] == "TNC": diff --git a/src/daComposant/daAlgorithms/4DVAR.py b/src/daComposant/daAlgorithms/4DVAR.py index d39a5f1..0b191c6 100644 --- a/src/daComposant/daAlgorithms/4DVAR.py +++ b/src/daComposant/daAlgorithms/4DVAR.py @@ -118,7 +118,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # # Correction pour pallier a un bug de TNC sur le retour du Minimum if "Minimizer" in self._parameters and self._parameters["Minimizer"] == "TNC": diff --git a/src/daComposant/daAlgorithms/AdjointTest.py b/src/daComposant/daAlgorithms/AdjointTest.py index 31561ba..61ee8f9 100644 --- a/src/daComposant/daAlgorithms/AdjointTest.py +++ b/src/daComposant/daAlgorithms/AdjointTest.py @@ -89,7 +89,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # Hm = HO["Direct"].appliedTo Ht = HO["Tangent"].appliedInXTo diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index d8d5b86..4ff41be 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -104,7 +104,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # Hm = HO["Tangent"].asMatrix(Xb) Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape diff --git a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py index d6b4cb2..cac169d 100644 --- a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py +++ b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py @@ -119,7 +119,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # if not PlatformInfo.has_nlopt and not self._parameters["Minimizer"] in ["COBYLA", "POWELL", "SIMPLEX"]: logging.warning("%s Minimization by SIMPLEX is forced because %s is unavailable (COBYLA, POWELL are also available)"%(self._name,self._parameters["Minimizer"])) diff --git a/src/daComposant/daAlgorithms/DifferentialEvolution.py b/src/daComposant/daAlgorithms/DifferentialEvolution.py index b36407a..4db00f7 100644 --- a/src/daComposant/daAlgorithms/DifferentialEvolution.py +++ b/src/daComposant/daAlgorithms/DifferentialEvolution.py @@ -149,7 +149,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # len_X = numpy.asarray(Xb).size popsize = round(self._parameters["PopulationSize"]/len_X) diff --git a/src/daComposant/daAlgorithms/EnsembleBlue.py b/src/daComposant/daAlgorithms/EnsembleBlue.py index 75be71e..9a45f75 100644 --- a/src/daComposant/daAlgorithms/EnsembleBlue.py +++ b/src/daComposant/daAlgorithms/EnsembleBlue.py @@ -64,7 +64,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # # Précalcul des inversions de B et R # ---------------------------------- diff --git a/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py b/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py index 5e68396..8905c21 100644 --- a/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py +++ b/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py @@ -96,7 +96,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # if self._parameters["EstimationOf"] == "Parameters": self._parameters["StoreInternalVariables"] = True diff --git a/src/daComposant/daAlgorithms/ExtendedBlue.py b/src/daComposant/daAlgorithms/ExtendedBlue.py index 6a9ebd5..73451e0 100644 --- a/src/daComposant/daAlgorithms/ExtendedBlue.py +++ b/src/daComposant/daAlgorithms/ExtendedBlue.py @@ -104,7 +104,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # Hm = HO["Tangent"].asMatrix(Xb) Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape diff --git a/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py b/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py index 1cd932a..6b831e5 100644 --- a/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py +++ b/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py @@ -93,7 +93,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # if self._parameters["EstimationOf"] == "Parameters": self._parameters["StoreInternalVariables"] = True diff --git a/src/daComposant/daAlgorithms/FunctionTest.py b/src/daComposant/daAlgorithms/FunctionTest.py index 2914c43..18986f8 100644 --- a/src/daComposant/daAlgorithms/FunctionTest.py +++ b/src/daComposant/daAlgorithms/FunctionTest.py @@ -76,7 +76,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # Hm = HO["Direct"].appliedTo # diff --git a/src/daComposant/daAlgorithms/GradientTest.py b/src/daComposant/daAlgorithms/GradientTest.py index acde000..81814c3 100644 --- a/src/daComposant/daAlgorithms/GradientTest.py +++ b/src/daComposant/daAlgorithms/GradientTest.py @@ -114,7 +114,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # Hm = HO["Direct"].appliedTo if self._parameters["ResiduFormula"] in ["Taylor", "TaylorOnNorm"]: diff --git a/src/daComposant/daAlgorithms/InputValuesTest.py b/src/daComposant/daAlgorithms/InputValuesTest.py index a49957a..7bda3da 100644 --- a/src/daComposant/daAlgorithms/InputValuesTest.py +++ b/src/daComposant/daAlgorithms/InputValuesTest.py @@ -71,7 +71,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # _p = self._parameters["NumberOfPrintedDigits"] numpy.set_printoptions(precision=_p) diff --git a/src/daComposant/daAlgorithms/KalmanFilter.py b/src/daComposant/daAlgorithms/KalmanFilter.py index 4bc9686..bfa4167 100644 --- a/src/daComposant/daAlgorithms/KalmanFilter.py +++ b/src/daComposant/daAlgorithms/KalmanFilter.py @@ -82,7 +82,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # if self._parameters["EstimationOf"] == "Parameters": self._parameters["StoreInternalVariables"] = True diff --git a/src/daComposant/daAlgorithms/LinearLeastSquares.py b/src/daComposant/daAlgorithms/LinearLeastSquares.py index f36aa16..d63bbde 100644 --- a/src/daComposant/daAlgorithms/LinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/LinearLeastSquares.py @@ -65,11 +65,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # - Hm = HO["Tangent"].asMatrix(None) + Hm = HO["Tangent"].asMatrix(Xb) Hm = Hm.reshape(Y.size,-1) # ADAO & check shape - Ha = HO["Adjoint"].asMatrix(None) + Ha = HO["Adjoint"].asMatrix(Xb) Ha = Ha.reshape(-1,Y.size) # ADAO & check shape # RI = R.getI() diff --git a/src/daComposant/daAlgorithms/LinearityTest.py b/src/daComposant/daAlgorithms/LinearityTest.py index 02a0963..ca896bd 100644 --- a/src/daComposant/daAlgorithms/LinearityTest.py +++ b/src/daComposant/daAlgorithms/LinearityTest.py @@ -96,7 +96,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # def RMS(V1, V2): import math diff --git a/src/daComposant/daAlgorithms/LocalSensitivityTest.py b/src/daComposant/daAlgorithms/LocalSensitivityTest.py index f8ed934..d14c71c 100644 --- a/src/daComposant/daAlgorithms/LocalSensitivityTest.py +++ b/src/daComposant/daAlgorithms/LocalSensitivityTest.py @@ -53,7 +53,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # if self._parameters["SetDebug"]: CUR_LEVEL = logging.getLogger().getEffectiveLevel() diff --git a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py index 231764c..0d976d9 100644 --- a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py @@ -110,7 +110,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # # Correction pour pallier a un bug de TNC sur le retour du Minimum if "Minimizer" in self._parameters and self._parameters["Minimizer"] == "TNC": diff --git a/src/daComposant/daAlgorithms/ObserverTest.py b/src/daComposant/daAlgorithms/ObserverTest.py index c13f0aa..82551b8 100644 --- a/src/daComposant/daAlgorithms/ObserverTest.py +++ b/src/daComposant/daAlgorithms/ObserverTest.py @@ -33,7 +33,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) print("Results of observer check on all potential variables or commands,") print(" only activated on selected ones by explicit association.") print("") diff --git a/src/daComposant/daAlgorithms/ParallelFunctionTest.py b/src/daComposant/daAlgorithms/ParallelFunctionTest.py index 8fc2465..acb4dd2 100644 --- a/src/daComposant/daAlgorithms/ParallelFunctionTest.py +++ b/src/daComposant/daAlgorithms/ParallelFunctionTest.py @@ -76,7 +76,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # Hm = HO["Direct"].appliedTo # diff --git a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py index 0b62667..3125052 100644 --- a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py +++ b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py @@ -120,16 +120,16 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # if ("BoxBounds" in self._parameters) and isinstance(self._parameters["BoxBounds"], (list, tuple)) and (len(self._parameters["BoxBounds"]) > 0): BoxBounds = self._parameters["BoxBounds"] - logging.debug("%s Prise en compte des bornes d'incréments de paramètres effectuee"%(self._name,)) + logging.debug("%s Prise en compte des bornes d'incréments de paramètres effectuée"%(self._name,)) else: - raise ValueError("Particle Swarm Optimization requires bounds on all variables to be given.") + raise ValueError("Particle Swarm Optimization requires bounds on all variables increments to be truly given (BoxBounds).") BoxBounds = numpy.array(BoxBounds) if numpy.isnan(BoxBounds).any(): - raise ValueError("Particle Swarm Optimization requires bounds on all variables increments to be truly given, \"None\" is not allowed. The actual increments bounds are:\n%s"%BoxBounds) + raise ValueError("Particle Swarm Optimization requires bounds on all variables increments to be truly given (BoxBounds), \"None\" is not allowed. The actual increments bounds are:\n%s"%BoxBounds) # Phig = float( self._parameters["GroupRecallRate"] ) Phip = 1. - Phig diff --git a/src/daComposant/daAlgorithms/QuantileRegression.py b/src/daComposant/daAlgorithms/QuantileRegression.py index d50c3a4..c661567 100644 --- a/src/daComposant/daAlgorithms/QuantileRegression.py +++ b/src/daComposant/daAlgorithms/QuantileRegression.py @@ -96,7 +96,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # Hm = HO["Direct"].appliedTo # diff --git a/src/daComposant/daAlgorithms/SamplingTest.py b/src/daComposant/daAlgorithms/SamplingTest.py index d6fc163..f71d9f4 100644 --- a/src/daComposant/daAlgorithms/SamplingTest.py +++ b/src/daComposant/daAlgorithms/SamplingTest.py @@ -96,7 +96,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # Hm = HO["Direct"].appliedTo # diff --git a/src/daComposant/daAlgorithms/TabuSearch.py b/src/daComposant/daAlgorithms/TabuSearch.py index 0b7da4e..f239a15 100644 --- a/src/daComposant/daAlgorithms/TabuSearch.py +++ b/src/daComposant/daAlgorithms/TabuSearch.py @@ -132,7 +132,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # if self._parameters["NoiseDistribution"] == "Uniform": nrange = numpy.ravel(self._parameters["NoiseHalfRange"]) # Vecteur diff --git a/src/daComposant/daAlgorithms/TangentTest.py b/src/daComposant/daAlgorithms/TangentTest.py index 6b507b4..7ad1ce8 100644 --- a/src/daComposant/daAlgorithms/TangentTest.py +++ b/src/daComposant/daAlgorithms/TangentTest.py @@ -96,7 +96,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # Hm = HO["Direct"].appliedTo Ht = HO["Tangent"].appliedInXTo diff --git a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py index 23d160e..496f5d3 100644 --- a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py +++ b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py @@ -113,7 +113,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): )) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, Xb, Y, R, B, Q) + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # if self._parameters["EstimationOf"] == "Parameters": self._parameters["StoreInternalVariables"] = True diff --git a/src/daComposant/daCore/BasicObjects.py b/src/daComposant/daCore/BasicObjects.py index c27247f..a389360 100644 --- a/src/daComposant/daCore/BasicObjects.py +++ b/src/daComposant/daCore/BasicObjects.py @@ -680,7 +680,7 @@ class Algorithm(object): self.__canonical_parameter_name["algorithm"] = "Algorithm" self.__canonical_parameter_name["storesupplementarycalculations"] = "StoreSupplementaryCalculations" - def _pre_run(self, Parameters, Xb=None, Y=None, R=None, B=None, Q=None ): + def _pre_run(self, Parameters, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None ): "Pré-calcul" logging.debug("%s Lancement", self._name) logging.debug("%s Taille mémoire utilisée de %.0f Mio"%(self._name, self._m.getUsedMemory("Mio"))) @@ -691,36 +691,57 @@ class Algorithm(object): for k, v in self.__variable_names_not_public.items(): if k not in self._parameters: self.__setParameters( {k:v} ) # - # Corrections et compléments - def __test_vvalue(argument, variable, argname): + # Corrections et compléments des vecteurs + def __test_vvalue(argument, variable, argname, symbol=None): + if symbol is None: symbol = variable if argument is None: if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]: - raise ValueError("%s %s vector %s has to be properly defined!"%(self._name,argname,variable)) + raise ValueError("%s %s vector %s is not set and has to be properly defined!"%(self._name,argname,symbol)) elif variable in self.__required_inputs["RequiredInputValues"]["optional"]: - logging.debug("%s %s vector %s is not set, but is optional."%(self._name,argname,variable)) + logging.debug("%s %s vector %s is not set, but is optional."%(self._name,argname,symbol)) else: - logging.debug("%s %s vector %s is not set, but is not required."%(self._name,argname,variable)) + logging.debug("%s %s vector %s is not set, but is not required."%(self._name,argname,symbol)) else: - logging.debug("%s %s vector %s is set, and its size is %i."%(self._name,argname,variable,numpy.array(argument).size)) + logging.debug("%s %s vector %s is set, and its size is %i."%(self._name,argname,symbol,numpy.array(argument).size)) return 0 __test_vvalue( Xb, "Xb", "Background or initial state" ) __test_vvalue( Y, "Y", "Observation" ) + __test_vvalue( U, "U", "Control" ) # - def __test_cvalue(argument, variable, argname): + # Corrections et compléments des covariances + def __test_cvalue(argument, variable, argname, symbol=None): + if symbol is None: symbol = variable if argument is None: if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]: - raise ValueError("%s %s error covariance matrix %s has to be properly defined!"%(self._name,argname,variable)) + raise ValueError("%s %s error covariance matrix %s is not set and has to be properly defined!"%(self._name,argname,symbol)) elif variable in self.__required_inputs["RequiredInputValues"]["optional"]: - logging.debug("%s %s error covariance matrix %s is not set, but is optional."%(self._name,argname,variable)) + logging.debug("%s %s error covariance matrix %s is not set, but is optional."%(self._name,argname,symbol)) else: - logging.debug("%s %s error covariance matrix %s is not set, but is not required."%(self._name,argname,variable)) + logging.debug("%s %s error covariance matrix %s is not set, but is not required."%(self._name,argname,symbol)) else: - logging.debug("%s %s error covariance matrix %s is set."%(self._name,argname,variable)) + logging.debug("%s %s error covariance matrix %s is set."%(self._name,argname,symbol)) return 0 - __test_cvalue( R, "R", "Observation" ) __test_cvalue( B, "B", "Background" ) + __test_cvalue( R, "R", "Observation" ) __test_cvalue( Q, "Q", "Evolution" ) # + # Corrections et compléments des opérateurs + def __test_ovalue(argument, variable, argname, symbol=None): + if symbol is None: symbol = variable + if argument is None or (isinstance(argument,dict) and len(argument)==0): + if variable in self.__required_inputs["RequiredInputValues"]["mandatory"]: + raise ValueError("%s %s operator %s is not set and has to be properly defined!"%(self._name,argname,symbol)) + elif variable in self.__required_inputs["RequiredInputValues"]["optional"]: + logging.debug("%s %s operator %s is not set, but is optional."%(self._name,argname,symbol)) + else: + logging.debug("%s %s operator %s is not set, but is not required."%(self._name,argname,symbol)) + else: + logging.debug("%s %s operator %s is set."%(self._name,argname,symbol)) + return 0 + __test_ovalue( HO, "HO", "Observation", "H" ) + __test_ovalue( EM, "EM", "Evolution", "M" ) + __test_ovalue( CM, "CM", "Control Model", "C" ) + # if ("Bounds" in self._parameters) and isinstance(self._parameters["Bounds"], (list, tuple)) and (len(self._parameters["Bounds"]) > 0): logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) else: diff --git a/src/daComposant/daCore/NumericObjects.py b/src/daComposant/daCore/NumericObjects.py index c09d5b0..97b0e36 100644 --- a/src/daComposant/daCore/NumericObjects.py +++ b/src/daComposant/daCore/NumericObjects.py @@ -483,6 +483,7 @@ def mmqr( # variables = variables + step if bounds is not None: + # Attention : boucle infinie à éviter si un intervalle est trop petit while( (variables < numpy.ravel(numpy.asmatrix(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asmatrix(bounds)[:,1])).any() ): step = step/2. variables = variables - step -- 2.39.2