From: Jean-Philippe ARGAUD Date: Fri, 21 Feb 2014 07:51:12 +0000 (+0100) Subject: Minot improvement of debug or error informations X-Git-Tag: V7_4_0rc1~17^2~5 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=432dbf39f890416df00609f11cdec2076ca29125;p=modules%2Fadao.git Minot improvement of debug or error informations --- diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index 57fa078..e3e119e 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -343,6 +343,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): else: YQ = numpy.hstack((YQ,YfQ[:,indice])) self.StoredVariables["SimulationQuantiles"].store( YQ ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/AdjointTest.py b/src/daComposant/daAlgorithms/AdjointTest.py index 31bb832..3309a0c 100644 --- a/src/daComposant/daAlgorithms/AdjointTest.py +++ b/src/daComposant/daAlgorithms/AdjointTest.py @@ -157,6 +157,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): print "Results of adjoint check by \"%s\" formula:"%self._parameters["ResiduFormula"] print msgs # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index d83be73..796d701 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -201,6 +201,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): else: YQ = numpy.hstack((YQ,YfQ[:,indice])) self.StoredVariables["SimulationQuantiles"].store( YQ ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/EnsembleBlue.py b/src/daComposant/daAlgorithms/EnsembleBlue.py index a3db38e..4d3f382 100644 --- a/src/daComposant/daAlgorithms/EnsembleBlue.py +++ b/src/daComposant/daAlgorithms/EnsembleBlue.py @@ -92,6 +92,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Xa = numpy.matrix( Members ).mean(axis=0) self.StoredVariables["Analysis"].store( Xa.A1 ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) return 0 diff --git a/src/daComposant/daAlgorithms/ExtendedBlue.py b/src/daComposant/daAlgorithms/ExtendedBlue.py index ee14628..a63c01d 100644 --- a/src/daComposant/daAlgorithms/ExtendedBlue.py +++ b/src/daComposant/daAlgorithms/ExtendedBlue.py @@ -148,6 +148,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # --------------------------------- if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"] or \ "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: + if (Y.size <= Xb.size) and (Y.size > 100): K = B * Ha * (R + Hm * B * Ha).I + elif (Y.size > Xb.size) and (Y.size > 100): K = (BI + Ha * RI * Hm).I * Ha * RI + else: pass # K deja calcule A = B - K * Hm * B if min(A.shape) != max(A.shape): raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(self._name,str(A.shape))) @@ -204,6 +207,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): else: YQ = numpy.hstack((YQ,YfQ[:,indice])) self.StoredVariables["SimulationQuantiles"].store( YQ ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py b/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py index cf8406d..d8ab6a1 100644 --- a/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py +++ b/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py @@ -200,6 +200,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "BMA" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/FunctionTest.py b/src/daComposant/daAlgorithms/FunctionTest.py index ac8d86d..180f417 100644 --- a/src/daComposant/daAlgorithms/FunctionTest.py +++ b/src/daComposant/daAlgorithms/FunctionTest.py @@ -29,7 +29,7 @@ import numpy, copy # ============================================================================== class ElementaryAlgorithm(BasicObjects.Algorithm): def __init__(self): - BasicObjects.Algorithm.__init__(self, "REPEATEDFUNCTIONTEST") + BasicObjects.Algorithm.__init__(self, "FUNCTIONTEST") self.defineRequiredParameter( name = "NumberOfPrintedDigits", default = 5, @@ -151,6 +151,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msg += ("\n %s\n"%("-"*75,)) print(msg) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/GradientTest.py b/src/daComposant/daAlgorithms/GradientTest.py index e8edcc5..d92c852 100644 --- a/src/daComposant/daAlgorithms/GradientTest.py +++ b/src/daComposant/daAlgorithms/GradientTest.py @@ -262,6 +262,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): filename = str(self._parameters["ResultFile"])+".ps", ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/KalmanFilter.py b/src/daComposant/daAlgorithms/KalmanFilter.py index 84a835b..b5b5085 100644 --- a/src/daComposant/daAlgorithms/KalmanFilter.py +++ b/src/daComposant/daAlgorithms/KalmanFilter.py @@ -175,6 +175,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "BMA" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/LinearLeastSquares.py b/src/daComposant/daAlgorithms/LinearLeastSquares.py index 5812df0..32c65eb 100644 --- a/src/daComposant/daAlgorithms/LinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/LinearLeastSquares.py @@ -84,6 +84,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "OMA" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["OMA"].store( numpy.ravel(oma) ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/LinearityTest.py b/src/daComposant/daAlgorithms/LinearityTest.py index 768b89a..c93b814 100644 --- a/src/daComposant/daAlgorithms/LinearityTest.py +++ b/src/daComposant/daAlgorithms/LinearityTest.py @@ -287,6 +287,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): print "Results of linearity check by \"%s\" formula:"%self._parameters["ResiduFormula"] print msgs # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py index 757cda1..a1bb549 100644 --- a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py @@ -291,6 +291,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "OMB" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["OMB"].store( numpy.ravel(d) ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py index aa3c236..07a1c9b 100644 --- a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py +++ b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py @@ -236,6 +236,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "OMB" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["OMB"].store( numpy.ravel(d) ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/QuantileRegression.py b/src/daComposant/daAlgorithms/QuantileRegression.py index eebc24e..6022aed 100644 --- a/src/daComposant/daAlgorithms/QuantileRegression.py +++ b/src/daComposant/daAlgorithms/QuantileRegression.py @@ -170,6 +170,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "OMB" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["OMB"].store( numpy.ravel(d) ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py index f07dc70..8a42368 100644 --- a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py +++ b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py @@ -296,6 +296,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "BMA" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) # + logging.debug("%s Nombre d'évaluation(s) de l'opérateur d'observation direct/tangent/adjoint : %i/%i/%i"%(self._name, HO["Direct"].nbcalls()[0],HO["Tangent"].nbcalls()[0],HO["Adjoint"].nbcalls()[0])) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daCore/AssimilationStudy.py b/src/daComposant/daCore/AssimilationStudy.py index 25f9593..d6e2759 100644 --- a/src/daComposant/daCore/AssimilationStudy.py +++ b/src/daComposant/daCore/AssimilationStudy.py @@ -662,85 +662,85 @@ class AssimilationStudy: elif hasattr(self.__Xb,"shape"): if type(self.__Xb.shape) is tuple: __Xb_shape = self.__Xb.shape else: __Xb_shape = self.__Xb.shape() - else: raise TypeError("Xb has no attribute of shape: problem !") + else: raise TypeError("The background (Xb) has no attribute of shape: problem !") # if self.__Y is None: __Y_shape = (0,) elif hasattr(self.__Y,"size"): __Y_shape = (self.__Y.size,) elif hasattr(self.__Y,"shape"): if type(self.__Y.shape) is tuple: __Y_shape = self.__Y.shape else: __Y_shape = self.__Y.shape() - else: raise TypeError("Y has no attribute of shape: problem !") + else: raise TypeError("The observation (Y) has no attribute of shape: problem !") # if self.__U is None: __U_shape = (0,) elif hasattr(self.__U,"size"): __U_shape = (self.__U.size,) elif hasattr(self.__U,"shape"): if type(self.__U.shape) is tuple: __U_shape = self.__U.shape else: __U_shape = self.__U.shape() - else: raise TypeError("U has no attribute of shape: problem !") + else: raise TypeError("The control (U) has no attribute of shape: problem !") # if self.__B is None: __B_shape = (0,0) elif hasattr(self.__B,"shape"): if type(self.__B.shape) is tuple: __B_shape = self.__B.shape else: __B_shape = self.__B.shape() - else: raise TypeError("B has no attribute of shape: problem !") + else: raise TypeError("The a priori errors covariance matrix (B) has no attribute of shape: problem !") # if self.__R is None: __R_shape = (0,0) elif hasattr(self.__R,"shape"): if type(self.__R.shape) is tuple: __R_shape = self.__R.shape else: __R_shape = self.__R.shape() - else: raise TypeError("R has no attribute of shape: problem !") + else: raise TypeError("The observation errors covariance matrix (R) has no attribute of shape: problem !") # if self.__Q is None: __Q_shape = (0,0) elif hasattr(self.__Q,"shape"): if type(self.__Q.shape) is tuple: __Q_shape = self.__Q.shape else: __Q_shape = self.__Q.shape() - else: raise TypeError("Q has no attribute of shape: problem !") + else: raise TypeError("The evolution errors covariance matrix (Q) has no attribute of shape: problem !") # if len(self.__HO) == 0: __HO_shape = (0,0) elif type(self.__HO) is type({}): __HO_shape = (0,0) elif hasattr(self.__HO["Direct"],"shape"): if type(self.__HO["Direct"].shape) is tuple: __HO_shape = self.__HO["Direct"].shape - else: __HO_shape = self.__HO["Direct"].shape() - else: raise TypeError("H has no attribute of shape: problem !") + else: __HO_shape = self.__HO["Direct"].shape() + else: raise TypeError("The observation operator (H) has no attribute of shape: problem !") # if len(self.__EM) == 0: __EM_shape = (0,0) elif type(self.__EM) is type({}): __EM_shape = (0,0) elif hasattr(self.__EM["Direct"],"shape"): if type(self.__EM["Direct"].shape) is tuple: __EM_shape = self.__EM["Direct"].shape - else: __EM_shape = self.__EM["Direct"].shape() - else: raise TypeError("EM has no attribute of shape: problem !") + else: __EM_shape = self.__EM["Direct"].shape() + else: raise TypeError("The evolution model (EM) has no attribute of shape: problem !") # if len(self.__CM) == 0: __CM_shape = (0,0) elif type(self.__CM) is type({}): __CM_shape = (0,0) elif hasattr(self.__CM["Direct"],"shape"): if type(self.__CM["Direct"].shape) is tuple: __CM_shape = self.__CM["Direct"].shape - else: __CM_shape = self.__CM["Direct"].shape() - else: raise TypeError("CM has no attribute of shape: problem !") + else: __CM_shape = self.__CM["Direct"].shape() + else: raise TypeError("The control model (CM) has no attribute of shape: problem !") # # Vérification des conditions # --------------------------- if not( len(__Xb_shape) == 1 or min(__Xb_shape) == 1 ): - raise ValueError("Shape characteristic of Xb is incorrect: \"%s\""%(__Xb_shape,)) + raise ValueError("Shape characteristic of background (Xb) is incorrect: \"%s\"."%(__Xb_shape,)) if not( len(__Y_shape) == 1 or min(__Y_shape) == 1 ): - raise ValueError("Shape characteristic of Y is incorrect: \"%s\""%(__Y_shape,)) + raise ValueError("Shape characteristic of observation (Y) is incorrect: \"%s\"."%(__Y_shape,)) # if not( min(__B_shape) == max(__B_shape) ): - raise ValueError("Shape characteristic of B is incorrect: \"%s\""%(__B_shape,)) + raise ValueError("Shape characteristic of a priori errors covariance matrix (B) is incorrect: \"%s\"."%(__B_shape,)) if not( min(__R_shape) == max(__R_shape) ): - raise ValueError("Shape characteristic of R is incorrect: \"%s\""%(__R_shape,)) + raise ValueError("Shape characteristic of observation errors covariance matrix (R) is incorrect: \"%s\"."%(__R_shape,)) if not( min(__Q_shape) == max(__Q_shape) ): - raise ValueError("Shape characteristic of Q is incorrect: \"%s\""%(__Q_shape,)) + raise ValueError("Shape characteristic of evolution errors covariance matrix (Q) is incorrect: \"%s\"."%(__Q_shape,)) if not( min(__EM_shape) == max(__EM_shape) ): - raise ValueError("Shape characteristic of EM is incorrect: \"%s\""%(__EM_shape,)) + raise ValueError("Shape characteristic of evolution operator (EM) is incorrect: \"%s\"."%(__EM_shape,)) # if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and not( __HO_shape[1] == max(__Xb_shape) ): - raise ValueError("Shape characteristic of H \"%s\" and X \"%s\" are incompatible"%(__HO_shape,__Xb_shape)) + raise ValueError("Shape characteristic of observation operator (H) \"%s\" and state (X) \"%s\" are incompatible."%(__HO_shape,__Xb_shape)) if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and not( __HO_shape[0] == max(__Y_shape) ): - raise ValueError("Shape characteristic of H \"%s\" and Y \"%s\" are incompatible"%(__HO_shape,__Y_shape)) + raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation (Y) \"%s\" are incompatible."%(__HO_shape,__Y_shape)) if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and len(self.__B) > 0 and not( __HO_shape[1] == __B_shape[0] ): - raise ValueError("Shape characteristic of H \"%s\" and B \"%s\" are incompatible"%(__HO_shape,__B_shape)) + raise ValueError("Shape characteristic of observation operator (H) \"%s\" and a priori errors covariance matrix (B) \"%s\" are incompatible."%(__HO_shape,__B_shape)) if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and len(self.__R) > 0 and not( __HO_shape[0] == __R_shape[1] ): - raise ValueError("Shape characteristic of H \"%s\" and R \"%s\" are incompatible"%(__HO_shape,__R_shape)) + raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation errors covariance matrix (R) \"%s\" are incompatible."%(__HO_shape,__R_shape)) # if self.__B is not None and len(self.__B) > 0 and not( __B_shape[1] == max(__Xb_shape) ): if self.__StoredInputs["AlgorithmName"] in ["EnsembleBlue",]: @@ -750,16 +750,22 @@ class AssimilationStudy: self.__Xb.store( numpy.matrix( numpy.ravel(member), numpy.float ).T ) __Xb_shape = min(__B_shape) else: - raise ValueError("Shape characteristic of B \"%s\" and Xb \"%s\" are incompatible"%(__B_shape,__Xb_shape)) + raise ValueError("Shape characteristic of a priori errors covariance matrix (B) \"%s\" and background (Xb) \"%s\" are incompatible."%(__B_shape,__Xb_shape)) # if self.__R is not None and len(self.__R) > 0 and not( __R_shape[1] == max(__Y_shape) ): - raise ValueError("Shape characteristic of R \"%s\" and Y \"%s\" are incompatible"%(__R_shape,__Y_shape)) + raise ValueError("Shape characteristic of observation errors covariance matrix (R) \"%s\" and observation (Y) \"%s\" are incompatible."%(__R_shape,__Y_shape)) # if self.__EM is not None and len(self.__EM) > 0 and not(type(self.__EM) is type({})) and not( __EM_shape[1] == max(__Xb_shape) ): - raise ValueError("Shape characteristic of EM \"%s\" and X \"%s\" are incompatible"%(__EM_shape,__Xb_shape)) + raise ValueError("Shape characteristic of evolution model (EM) \"%s\" and state (X) \"%s\" are incompatible."%(__EM_shape,__Xb_shape)) # if self.__CM is not None and len(self.__CM) > 0 and not(type(self.__CM) is type({})) and not( __CM_shape[1] == max(__U_shape) ): - raise ValueError("Shape characteristic of CM \"%s\" and U \"%s\" are incompatible"%(__CM_shape,__U_shape)) + raise ValueError("Shape characteristic of control model (CM) \"%s\" and control (U) \"%s\" are incompatible."%(__CM_shape,__U_shape)) + # + if self.__StoredInputs.has_key("AlgorithmParameters") \ + and self.__StoredInputs["AlgorithmParameters"].has_key("Bounds") \ + and (type(self.__StoredInputs["AlgorithmParameters"]["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) \ + and (len(self.__StoredInputs["AlgorithmParameters"]["Bounds"]) != max(__Xb_shape)): + raise ValueError("The number \"%s\" of bound pairs for the state (X) components is different of the size \"%s\" of the state itself."%(len(self.__StoredInputs["AlgorithmParameters"]["Bounds"]),max(__Xb_shape))) # return 1 diff --git a/src/daComposant/daCore/BasicObjects.py b/src/daComposant/daCore/BasicObjects.py index e4b6e90..27591c5 100644 --- a/src/daComposant/daCore/BasicObjects.py +++ b/src/daComposant/daCore/BasicObjects.py @@ -45,6 +45,7 @@ class Operator: - fromMethod : argument de type fonction Python - fromMatrix : argument adapté au constructeur numpy.matrix """ + self.__NbCallsAsMatrix, self.__NbCallsAsMethod = 0, 0 if fromMethod is not None: self.__Method = fromMethod self.__Matrix = None @@ -70,8 +71,10 @@ class Operator: - xValue : argument adapté pour appliquer l'opérateur """ if self.__Matrix is not None: + self.__NbCallsAsMatrix += 1 return self.__Matrix * xValue else: + self.__NbCallsAsMethod += 1 return self.__Method( xValue ) def appliedControledFormTo(self, (xValue, uValue) ): @@ -85,10 +88,13 @@ class Operator: - uValue : argument U adapté pour appliquer l'opérateur """ if self.__Matrix is not None: + self.__NbCallsAsMatrix += 1 return self.__Matrix * xValue elif uValue is not None: + self.__NbCallsAsMethod += 1 return self.__Method( (xValue, uValue) ) else: + self.__NbCallsAsMethod += 1 return self.__Method( xValue ) def appliedInXTo(self, (xNominal, xValue) ): @@ -105,8 +111,10 @@ class Operator: - xValue : argument adapté pour appliquer l'opérateur """ if self.__Matrix is not None: + self.__NbCallsAsMatrix += 1 return self.__Matrix * xValue else: + self.__NbCallsAsMethod += 1 return self.__Method( (xNominal, xValue) ) def asMatrix(self, ValueForMethodForm = "UnknownVoidValue"): @@ -114,8 +122,10 @@ class Operator: Permet de renvoyer l'opérateur sous la forme d'une matrice """ if self.__Matrix is not None: + self.__NbCallsAsMatrix += 1 return self.__Matrix elif ValueForMethodForm is not "UnknownVoidValue": # Ne pas utiliser "None" + self.__NbCallsAsMethod += 1 return numpy.matrix( self.__Method( (ValueForMethodForm, None) ) ) else: raise ValueError("Matrix form of the operator defined as a function/method requires to give an operating point.") @@ -130,6 +140,12 @@ class Operator: else: raise ValueError("Matrix form of the operator is not available, nor the shape") + def nbcalls(self): + """ + Renvoie le nombre d'évaluations de l'opérateurs (total, matrice, méthode) + """ + return (self.__NbCallsAsMatrix+self.__NbCallsAsMethod,self.__NbCallsAsMatrix,self.__NbCallsAsMethod) + # ============================================================================== class Algorithm: """