From: Jean-Philippe ARGAUD Date: Tue, 25 Oct 2022 18:12:02 +0000 (+0200) Subject: Minor documentation and code review corrections (32) X-Git-Tag: V9_10_0rc1~1 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=c93e1d2aa493eafe6b942a1169a01732e4f0a27b;p=modules%2Fadao.git Minor documentation and code review corrections (32) Stability improvements --- diff --git a/bin/module_version.py b/bin/module_version.py index 7a5b255..87b8017 100644 --- a/bin/module_version.py +++ b/bin/module_version.py @@ -31,7 +31,7 @@ __all__ = [] name = "ADAO" version = "9.10.0" year = "2022" -date = "lundi 14 novembre 2022, 12:12:12 (UTC+0100)" +date = "lundi 12 décembre 2022, 12:12:12 (UTC+0100)" longname = name + ", a module for Data Assimilation and Optimization" cata = "V" + version.replace(".","_") diff --git a/doc/en/bibliography.rst b/doc/en/bibliography.rst index 5fa1ec8..ead80eb 100644 --- a/doc/en/bibliography.rst +++ b/doc/en/bibliography.rst @@ -37,6 +37,8 @@ exhaustive bibliography. .. [Asch16] Asch M., Bocquet M., Nodet M., *Data Assimilation - Methods, Algorithms and Applications*, SIAM, 2016 +.. [Barrault04] Barrault M., Maday Y., Nguyen N. C., Patera A. T., *An 'empirical interpolation' method: application to efficient reduced-basis discretization of partial differential equations*, Comptes Rendus Mathématique, 339(9), pp.667–672, 2004 + .. [Bishop01] Bishop C. H., Etherton B. J., Majumdar S. J., *Adaptive sampling with the ensemble transform Kalman filter. Part I: theoretical aspects*, Monthly Weather Review, 129, pp.420–436, 2001 .. [Bocquet04] Bocquet M., *Introduction aux principes et méthodes de l'assimilation de données en géophysique*, Lecture Notes, 2014 @@ -75,6 +77,8 @@ exhaustive bibliography. .. [Glover90] Glover F., *Tabu Search-Part II*, ORSA Journal on Computing, 2(1), pp.4-32, 1990 +.. [Gong18] Gong H., *Data assimilation with reduced basis and noisy measurement: Applications to nuclear reactor cores*, PhD Thesis, Sorbonne Université (France), 2018 + .. [Hamill00] Hamill T. M., Snyder C., *A Hybrid Ensemble Kalman Filter-3D Variational Analysis Scheme*, Monthly Weather Review, 128(8), pp.2905-2919, 2000 .. [Ide97] Ide K., Courtier P., Ghil M., Lorenc A. C., *Unified notation for data assimilation: operational, sequential and variational*, Journal of the Meteorological Society of Japan, 75(1B), pp.181-189, 1997 @@ -117,6 +121,8 @@ exhaustive bibliography. .. [Python] *Python programming language*, http://www.python.org/ +.. [Quarteroni16] Quarteroni A., Manzoni A., Negri F., *Reduced Basis Methods for Partial Differential Equations - An introduction*, Springer, 2016 + .. [R] *The R Project for Statistical Computing*, http://www.r-project.org/ .. [Rowan90] Rowan T., *Functional Stability Analysis of Numerical Algorithms*, Ph.D. thesis, Department of Computer Sciences, University of Texas at Austin, 1990 diff --git a/doc/en/ref_algorithm_MeasurementsOptimalPositioningTask.rst b/doc/en/ref_algorithm_MeasurementsOptimalPositioningTask.rst index 29db43e..80f25eb 100644 --- a/doc/en/ref_algorithm_MeasurementsOptimalPositioningTask.rst +++ b/doc/en/ref_algorithm_MeasurementsOptimalPositioningTask.rst @@ -35,21 +35,24 @@ Task algorithm "*MeasurementsOptimalPositioningTask*" .. warning:: - This algorithm is for now only available in textual user interface (TUI) and not in graphical user interface (GUI). + This algorithm is only available in textual user interface (TUI) and not in + graphical user interface (GUI). .. ------------------------------------ .. .. include:: snippets/Header2Algo01.rst This algorithm provides optimal positioning of measurement points by an EIM -(Empirical Interpolation Method) analysis, from a set of state vectors (usually -called "snapshots" in reduced basis methodology). +(Empirical Interpolation Method) analysis, in a iterative greedy way from a set +of state vectors (usually called "snapshots" in reduced basis methodology). Each of these state vectors is usually (but not necessarily) the result :math:`\mathbf{y}` of a simulation :math:`H` for a given set of parameters -:math:`\mathbf{x}=\mu`. +:math:`\mathbf{x}=\mu`. In its simplest use, if the set of state vectors is +pre-existing, it is only necessary to provide it through the algorithm options. -In its simplest use, if the set of state vectors is pre-existing, it is only -necessary to provide it through the algorithm options. +It is also possible to exclude a priori potential locations for optimal +measurement points, using the "*lcEIM*" analysis for a constrained positioning +search. .. ------------------------------------ .. .. include:: snippets/Header2Algo02.rst @@ -90,6 +93,8 @@ StoreSupplementaryCalculations Example : ``{"StoreSupplementaryCalculations":["BMA", "CurrentState"]}`` +.. include:: snippets/Variant_MOP.rst + .. ------------------------------------ .. .. include:: snippets/Header2Algo04.rst @@ -106,6 +111,8 @@ StoreSupplementaryCalculations .. ------------------------------------ .. .. _section_ref_algorithm_MeasurementsOptimalPositioningTask_examples: -.. include:: snippets/Header2Algo06.rst +.. include:: snippets/Header2Algo07.rst -- :ref:`section_ref_algorithm_FunctionTest` +- [Barrault04]_ +- [Gong18]_ +- [Quarteroni16]_ diff --git a/doc/en/snippets/Variant_3DVAR.rst b/doc/en/snippets/Variant_3DVAR.rst index 80b826a..cedc29f 100644 --- a/doc/en/snippets/Variant_3DVAR.rst +++ b/doc/en/snippets/Variant_3DVAR.rst @@ -6,9 +6,9 @@ pair: Variant ; 3DVAR-PSAS Variant - *Predifined name*. This key allows to choose one of the possible variants + *Predefined name*. This key allows to choose one of the possible variants for the main algorithm. The default variant is the original "3DVAR", and the - possible ones are + possible choices are "3DVAR" (Classical 3D Variational analysis), "3DVAR-VAN" (3D Variational Analysis with No inversion of B), "3DVAR-Incr" (Incremental 3DVAR), diff --git a/doc/en/snippets/Variant_EKF.rst b/doc/en/snippets/Variant_EKF.rst index 09f3cd8..e835057 100644 --- a/doc/en/snippets/Variant_EKF.rst +++ b/doc/en/snippets/Variant_EKF.rst @@ -4,11 +4,12 @@ pair: Variant ; CEKF Variant - *Predifined name*. This key allows to choose one of the possible variants for + *Predefined name*. This key allows to choose one of the possible variants for the main algorithm. The default variant is the constrained version "CEKF" of the original algorithm "EKF", and the possible choices are "EKF" (Extended Kalman Filter), "CEKF" (Constrained Extended Kalman Filter). + It is highly recommended to keep the default value. Example : ``{"Variant":"CEKF"}`` diff --git a/doc/en/snippets/Variant_EnKF.rst b/doc/en/snippets/Variant_EnKF.rst index 2322dc9..25b09b7 100644 --- a/doc/en/snippets/Variant_EnKF.rst +++ b/doc/en/snippets/Variant_EnKF.rst @@ -9,9 +9,9 @@ pair: Variant ; EnKS Variant - *Predifined name*. This key allows to choose one of the possible variants + *Predefined name*. This key allows to choose one of the possible variants for the main algorithm. The default variant is the original "EnKF" - formulation, and the possible ones are + formulation, and the possible choices are "EnKF" (Ensemble Kalman Filter), "ETKF" (Ensemble-Transform Kalman Filter), "ETKF-N" (Ensemble-Transform Kalman Filter), diff --git a/doc/en/snippets/Variant_MOP.rst b/doc/en/snippets/Variant_MOP.rst new file mode 100644 index 0000000..a8def45 --- /dev/null +++ b/doc/en/snippets/Variant_MOP.rst @@ -0,0 +1,15 @@ +.. index:: + single: Variant + pair: Variant ; PositioningByEIM + pair: Variant ; PositioningBylcEIM + +Variant + *Predefined name*. This key allows to choose one of the possible variants + for the optimal positioning search. The default variant is the constrained by + excluded locations "PositioningBylcEIM", and the possible choices are + "PositioningByEIM" (using the original EIM algorithm), + "PositioningBylcEIM" (using the constrained by excluded locations EIM, named "Location Constrained EIM"). + It is highly recommended to keep the default value. + + Example : + ``{"Variant":"PositioningBylcEIM"}`` diff --git a/doc/en/snippets/Variant_UKF.rst b/doc/en/snippets/Variant_UKF.rst index 64c6678..a6a2ef4 100644 --- a/doc/en/snippets/Variant_UKF.rst +++ b/doc/en/snippets/Variant_UKF.rst @@ -4,11 +4,12 @@ pair: Variant ; 2UKF Variant - *Predifined name*. This key allows to choose one of the possible variants for + *Predefined name*. This key allows to choose one of the possible variants for the main algorithm. The default variant is the constrained version "2UKF" of the original algorithm "UKF", and the possible choices are "UKF" (Unscented Kalman Filter), "2UKF" (Constrained Unscented Kalman Filter). + It is highly recommended to keep the default value. Example : ``{"Variant":"2UKF"}`` diff --git a/doc/fr/bibliography.rst b/doc/fr/bibliography.rst index c5506d9..fbfd87c 100644 --- a/doc/fr/bibliography.rst +++ b/doc/fr/bibliography.rst @@ -37,6 +37,8 @@ néanmoins d'intention de constituer une bibliographie exhaustive. .. [Asch16] Asch M., Bocquet M., Nodet M., *Data Assimilation - Methods, Algorithms and Applications*, SIAM, 2016 +.. [Barrault04] Barrault M., Maday Y., Nguyen N. C., Patera A. T., *An 'empirical interpolation' method: application to efficient reduced-basis discretization of partial differential equations*, Comptes Rendus Mathématique, 339(9), pp.667–672, 2004 + .. [Bishop01] Bishop C. H., Etherton B. J., Majumdar S. J., *Adaptive sampling with the ensemble transform Kalman filter. Part I: theoretical aspects*, Monthly Weather Review, 129, pp.420–436, 2001 .. [Bocquet04] Bocquet M., *Introduction aux principes et méthodes de l'assimilation de données en géophysique*, Lecture Notes, 2014 @@ -75,6 +77,8 @@ néanmoins d'intention de constituer une bibliographie exhaustive. .. [Glover90] Glover F., *Tabu Search-Part II*, ORSA Journal on Computing, 2(1), pp.4-32, 1990 +.. [Gong18] Gong H., *Data assimilation with reduced basis and noisy measurement: Applications to nuclear reactor cores*, PhD Thesis, Sorbonne Université (France), 2018 + .. [Hamill00] Hamill T. M., Snyder C., *A Hybrid Ensemble Kalman Filter-3D Variational Analysis Scheme*, Monthly Weather Review, 128(8), pp.2905-2919, 2000 .. [Ide97] Ide K., Courtier P., Ghil M., Lorenc A. C., *Unified notation for data assimilation: operational, sequential and variational*, Journal of the Meteorological Society of Japan, 75(1B), pp.181-189, 1997 @@ -117,6 +121,8 @@ néanmoins d'intention de constituer une bibliographie exhaustive. .. [Python] *Python programming language*, http://www.python.org/ +.. [Quarteroni16] Quarteroni A., Manzoni A., Negri F., *Reduced Basis Methods for Partial Differential Equations - An introduction*, Springer, 2016 + .. [R] *The R Project for Statistical Computing*, http://www.r-project.org/ .. [Rowan90] Rowan T., *Functional Stability Analysis of Numerical Algorithms*, Ph.D. thesis, Department of Computer Sciences, University of Texas at Austin, 1990 diff --git a/doc/fr/ref_algorithm_MeasurementsOptimalPositioningTask.rst b/doc/fr/ref_algorithm_MeasurementsOptimalPositioningTask.rst index 9886fe0..c795791 100644 --- a/doc/fr/ref_algorithm_MeasurementsOptimalPositioningTask.rst +++ b/doc/fr/ref_algorithm_MeasurementsOptimalPositioningTask.rst @@ -35,23 +35,26 @@ Algorithme de tâche "*MeasurementsOptimalPositioningTask*" .. warning:: - Cet algorithme n'est pour l'instant utilisable qu'en interface textuelle - (TUI) et pas en interface graphique (GUI). + Cet algorithme n'est utilisable qu'en interface textuelle (TUI) et pas en + interface graphique (GUI). .. ------------------------------------ .. .. include:: snippets/Header2Algo01.rst Cet algorithme permet d'établir la position de points de mesures optimaux par -une analyse EIM (Empirical Interpolation Method), à partir d'un ensemble de -vecteurs d'état (usuellement appelés "*snapshots*" en méthodologie de bases -réduites). +une analyse EIM (Empirical Interpolation Method), de manière itérative à partir +d'un ensemble de vecteurs d'état (usuellement appelés "*snapshots*" en +méthodologie de bases réduites). Chacun de ces vecteurs d'état est habituellement (mais pas obligatoirement) le résultat :math:`\mathbf{y}` d'une simulation :math:`H` pour un jeu de -paramètres donné :math:`\mathbf{x}=\mu`. +paramètres donné :math:`\mathbf{x}=\mu`. Dans son usage le plus simple, si +l'ensemble des vecteurs d'état est pré-existant, il suffit de le fournir par +les options d'algorithme. -Dans son usage le plus simple, si l'ensemble des vecteurs d'état est -pré-existant, il suffit de le fournir par les options d'algorithme. +Il est aussi possible d'exclure a priori des positions potentielles pour les +points de mesures optimaux, en utilisant l'analyse "*lcEIM*" pour une recherche +de positionnement contraint. .. ------------------------------------ .. .. include:: snippets/Header2Algo02.rst @@ -92,6 +95,8 @@ StoreSupplementaryCalculations Exemple : ``{"StoreSupplementaryCalculations":["BMA", "CurrentState"]}`` +.. include:: snippets/Variant_MOP.rst + .. ------------------------------------ .. .. include:: snippets/Header2Algo04.rst @@ -108,6 +113,8 @@ StoreSupplementaryCalculations .. ------------------------------------ .. .. _section_ref_algorithm_MeasurementsOptimalPositioningTask_examples: -.. include:: snippets/Header2Algo06.rst +.. include:: snippets/Header2Algo07.rst -- :ref:`section_ref_algorithm_FunctionTest` +- [Barrault04]_ +- [Gong18]_ +- [Quarteroni16]_ diff --git a/doc/fr/snippets/Variant_3DVAR.rst b/doc/fr/snippets/Variant_3DVAR.rst index 84f9fd6..86284c6 100644 --- a/doc/fr/snippets/Variant_3DVAR.rst +++ b/doc/fr/snippets/Variant_3DVAR.rst @@ -13,7 +13,7 @@ Variant "3DVAR-VAN" (3D Variational Analysis with No inversion of B), "3DVAR-Incr" (Incremental 3DVAR), "3DVAR-PSAS" (Physical-space Statistical Analysis Scheme for 3DVAR), - Il est fortement conseillé de conserver la valeur par défaut. + Il est fortement recommandé de conserver la valeur par défaut. Exemple : ``{"Variant":"3DVAR"}`` diff --git a/doc/fr/snippets/Variant_EKF.rst b/doc/fr/snippets/Variant_EKF.rst index 5c97ed8..badcbac 100644 --- a/doc/fr/snippets/Variant_EKF.rst +++ b/doc/fr/snippets/Variant_EKF.rst @@ -9,6 +9,7 @@ Variant "CEKF" de l'algorithme original "EKF", et les choix possibles sont "EKF" (Extended Kalman Filter), "CEKF" (Constrained Extended Kalman Filter). + Il est fortement recommandé de conserver la valeur par défaut. Exemple : ``{"Variant":"CEKF"}`` diff --git a/doc/fr/snippets/Variant_MOP.rst b/doc/fr/snippets/Variant_MOP.rst new file mode 100644 index 0000000..72279fd --- /dev/null +++ b/doc/fr/snippets/Variant_MOP.rst @@ -0,0 +1,16 @@ +.. index:: + single: Variant + pair: Variant ; PositioningByEIM + pair: Variant ; PositioningBylcEIM + +Variant + *Nom prédéfini*. Cette clé permet de choisir l'une des variantes possibles + pour la recherche du positionnement optimal. La variante par défaut est la + version contrainte par des positions exclues "PositioningBylcEIM", et les + choix possibles sont + "PositioningByEIM" (utilisant l'algorithme EIM original), + "PositioningBylcEIM" (utilisant l'algorithme EIM contraint par des positions exclues, nommé "Location Constrained EIM"). + Il est fortement recommandé de conserver la valeur par défaut. + + Exemple : + ``{"Variant":"PositioningBylcEIM"}`` diff --git a/doc/fr/snippets/Variant_UKF.rst b/doc/fr/snippets/Variant_UKF.rst index 069a6ff..4696838 100644 --- a/doc/fr/snippets/Variant_UKF.rst +++ b/doc/fr/snippets/Variant_UKF.rst @@ -9,6 +9,7 @@ Variant "2UKF" de l'algorithme original "UKF", et les choix possibles sont "UKF" (Unscented Kalman Filter), "2UKF" (Constrained Unscented Kalman Filter). + Il est fortement recommandé de conserver la valeur par défaut. Exemple : ``{"Variant":"2UKF"}`` diff --git a/src/daComposant/daAlgorithms/Atoms/ecweim.py b/src/daComposant/daAlgorithms/Atoms/ecweim.py index a783407..39e9f70 100644 --- a/src/daComposant/daAlgorithms/Atoms/ecweim.py +++ b/src/daComposant/daAlgorithms/Atoms/ecweim.py @@ -40,18 +40,22 @@ def EIM_offline(selfA, Verbose = False): elif isinstance(selfA._parameters["EnsembleOfSnapshots"], (list,tuple)): __EOS = numpy.asarray(selfA._parameters["EnsembleOfSnapshots"]).T else: - raise ValueError("EOS has to be an array/matrix (each column is a snapshot vector) or a list/tuple (each element is a snapshot vector).") + raise ValueError("EnsembleOfSnapshots has to be an array/matrix (each column is a snapshot vector) or a list/tuple (each element is a snapshot vector).") # if selfA._parameters["ErrorNorm"] == "L2": MaxNormByColumn = MaxL2NormByColumn else: MaxNormByColumn = MaxLinfNormByColumn # - if "ExcludeLocations" in selfA._parameters: + if selfA._parameters["Variant"] == "PositioningByEIM": + __LcCsts = False + else: + __LcCsts = True + if __LcCsts and "ExcludeLocations" in selfA._parameters: __ExcludedMagicPoints = selfA._parameters["ExcludeLocations"] else: __ExcludedMagicPoints = [] - if len(__ExcludedMagicPoints) > 0: + if __LcCsts and len(__ExcludedMagicPoints) > 0: __ExcludedMagicPoints = numpy.ravel(numpy.asarray(__ExcludedMagicPoints, dtype=int)) __IncludedMagicPoints = numpy.setdiff1d( numpy.arange(__EOS.shape[0]), @@ -85,7 +89,7 @@ def EIM_offline(selfA, Verbose = False): __iM = -1 __rhoM = numpy.empty(__dimS) # - __eM, __muM = MaxNormByColumn(__EOS, __IncludedMagicPoints) + __eM, __muM = MaxNormByColumn(__EOS, __LcCsts, __IncludedMagicPoints) __residuM = __EOS[:,__muM] __errors.append(__eM) # @@ -101,7 +105,7 @@ def EIM_offline(selfA, Verbose = False): __iM = numpy.argmax(__abs_residuM) __rhoM = __residuM / __abs_residuM[__iM] # - if __iM in __ExcludedMagicPoints: + if __LcCsts and __iM in __ExcludedMagicPoints: __sIndices = numpy.argsort(__abs_residuM) __rang = -1 assert __iM == __sIndices[__rang] @@ -130,7 +134,7 @@ def EIM_offline(selfA, Verbose = False): __interpolator = numpy.outer(__Q,numpy.outer(__Qi_inv,__restrictedEOSi)) # __dataForNextIter = __EOS - __interpolator - __eM, __muM = MaxNormByColumn(__dataForNextIter, __IncludedMagicPoints) + __eM, __muM = MaxNormByColumn(__dataForNextIter, __LcCsts, __IncludedMagicPoints) __errors.append(__eM) # __residuM = __dataForNextIter[:,__muM] @@ -150,9 +154,9 @@ def EIM_online(selfA, QEIM, mu, iEIM): raise NotImplementedError() # ============================================================================== -def MaxL2NormByColumn(Ensemble, IncludedPoints=[]): +def MaxL2NormByColumn(Ensemble, LcCsts = False, IncludedPoints = []): nmax, imax = -1, -1 - if len(IncludedPoints) > 0: + if LcCsts and len(IncludedPoints) > 0: for indice in range(Ensemble.shape[1]): norme = numpy.linalg.norm( numpy.take(Ensemble[:,indice], IncludedPoints, mode='clip'), @@ -168,9 +172,9 @@ def MaxL2NormByColumn(Ensemble, IncludedPoints=[]): nmax, imax, = norme, indice return nmax, imax -def MaxLinfNormByColumn(Ensemble, IncludedPoints=[]): +def MaxLinfNormByColumn(Ensemble, LcCsts = False, IncludedPoints = []): nmax, imax = -1, -1 - if len(IncludedPoints) > 0: + if LcCsts and len(IncludedPoints) > 0: for indice in range(Ensemble.shape[1]): norme = numpy.linalg.norm( numpy.take(Ensemble[:,indice], IncludedPoints, mode='clip'), diff --git a/src/daComposant/daAlgorithms/MeasurementsOptimalPositioningTask.py b/src/daComposant/daAlgorithms/MeasurementsOptimalPositioningTask.py index bc91b67..346c12e 100644 --- a/src/daComposant/daAlgorithms/MeasurementsOptimalPositioningTask.py +++ b/src/daComposant/daAlgorithms/MeasurementsOptimalPositioningTask.py @@ -30,12 +30,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): BasicObjects.Algorithm.__init__(self, "MEASUREMENTSOPTIMALPOSITIONING") self.defineRequiredParameter( name = "Variant", - default = "Positioning", + default = "PositioningBylcEIM", typecast = str, message = "Variant ou formulation de la méthode", listval = [ - "Positioning", - # "PositioningByEIM", + "PositioningByEIM", + "PositioningBylcEIM", ], ) self.defineRequiredParameter( @@ -95,9 +95,17 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # #-------------------------- - if self._parameters["Variant"] in ["Positioning", "PositioningByEIM"]: + if self._parameters["Variant"] == "PositioningBylcEIM": if len(self._parameters["EnsembleOfSnapshots"]) > 0: ecweim.EIM_offline(self) + else: + raise ValueError("Snapshots have to be given in order to launch the positionning analysis") + # + elif self._parameters["Variant"] == "PositioningByEIM": + if len(self._parameters["EnsembleOfSnapshots"]) > 0: + ecweim.EIM_offline(self) + else: + raise ValueError("Snapshots have to be given in order to launch the positionning analysis") # #-------------------------- else: diff --git a/src/daComposant/daCore/ExtendedLogging.py b/src/daComposant/daCore/ExtendedLogging.py index 2da4d7e..f787869 100644 --- a/src/daComposant/daCore/ExtendedLogging.py +++ b/src/daComposant/daCore/ExtendedLogging.py @@ -107,16 +107,20 @@ class ExtendedLogging(object): logging.info( "--------------------------------------------------" ) logging.info( "Library availability:" ) logging.info( "- Python.......: True" ) - logging.info( "- Numpy........: True" ) + logging.info( "- Numpy........: "+str(PlatformInfo.has_numpy) ) logging.info( "- Scipy........: "+str(PlatformInfo.has_scipy) ) logging.info( "- Matplotlib...: "+str(PlatformInfo.has_matplotlib) ) - logging.info( "- Gnuplot......: "+str(PlatformInfo.has_scipy) ) + logging.info( "- Gnuplot......: "+str(PlatformInfo.has_gnuplot) ) logging.info( "- Sphinx.......: "+str(PlatformInfo.has_sphinx) ) logging.info( "- Nlopt........: "+str(PlatformInfo.has_nlopt) ) logging.info( "Library versions:" ) logging.info( "- Python.......: "+p.getPythonVersion() ) logging.info( "- Numpy........: "+p.getNumpyVersion() ) logging.info( "- Scipy........: "+p.getScipyVersion() ) + logging.info( "- Matplotlib...: "+p.getMatplotlibVersion() ) + logging.info( "- Gnuplot......: "+p.getGnuplotVersion() ) + logging.info( "- Sphinx.......: "+p.getSphinxVersion() ) + logging.info( "- Nlopt........: "+p.getNloptVersion() ) logging.info( "" ) def setLogfile(self, filename=LOGFILE, filemode="w", level=logging.NOTSET): diff --git a/src/daComposant/daCore/Interfaces.py b/src/daComposant/daCore/Interfaces.py index 28be7e6..189508d 100644 --- a/src/daComposant/daCore/Interfaces.py +++ b/src/daComposant/daCore/Interfaces.py @@ -42,6 +42,10 @@ class GenericCaseViewer(object): """ Gestion des commandes de création d'une vue de cas """ + __slots__ = ( + "_name", "_objname", "_lineSerie", "_switchoff", "_content", + "_numobservers", "_object", "_missing") + # def __init__(self, __name="", __objname="case", __content=None, __object=None): "Initialisation et enregistrement de l'entete" self._name = str(__name) @@ -72,6 +76,7 @@ class GenericCaseViewer(object): for k,v in __translation.items(): __multilines = __multilines.replace(k,v) return __multilines + # def _finalize(self, __upa=None): "Enregistrement du final" __hasNotExecute = True @@ -82,11 +87,14 @@ class GenericCaseViewer(object): if __upa is not None and len(__upa)>0: __upa = __upa.replace("ADD",str(self._objname)) self._lineSerie.append(__upa) + # def _addLine(self, line=""): "Ajoute un enregistrement individuel" self._lineSerie.append(line) + # def _get_objname(self): return self._objname + # def dump(self, __filename=None, __upa=None): "Restitution normalisée des commandes" self._finalize(__upa) @@ -98,6 +106,7 @@ class GenericCaseViewer(object): __fid.write(__text) __fid.close() return __text + # def load(self, __filename=None, __content=None, __object=None): "Chargement normalisé des commandes" if __filename is not None and os.path.exists(__filename): @@ -116,6 +125,8 @@ class _TUIViewer(GenericCaseViewer): """ Établissement des commandes d'un cas ADAO TUI (Cas<->TUI) """ + __slots__ = () + # def __init__(self, __name="", __objname="case", __content=None, __object=None): "Initialisation et enregistrement de l'entete" GenericCaseViewer.__init__(self, __name, __objname, __content, __object) @@ -127,6 +138,7 @@ class _TUIViewer(GenericCaseViewer): if self._content is not None: for command in self._content: self._append(*command) + # def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False): "Transformation d'une commande individuelle en un enregistrement" if __command is not None and __keys is not None and __local is not None: @@ -160,6 +172,7 @@ class _TUIViewer(GenericCaseViewer): __text = __text.rstrip(", ") __text += " )" self._addLine(__text) + # def _extract(self, __multilines="", __object=None): "Transformation d'enregistrement(s) en commande(s) individuelle(s)" __is_case = False @@ -182,6 +195,8 @@ class _COMViewer(GenericCaseViewer): """ Établissement des commandes d'un cas COMM (Eficas Native Format/Cas<-COM) """ + __slots__ = ("_observerIndex", "_objdata") + # def __init__(self, __name="", __objname="case", __content=None, __object=None): "Initialisation et enregistrement de l'entete" GenericCaseViewer.__init__(self, __name, __objname, __content, __object) @@ -194,6 +209,7 @@ class _COMViewer(GenericCaseViewer): if self._content is not None: for command in self._content: self._append(*command) + # def _extract(self, __multilines=None, __object=None): "Transformation d'enregistrement(s) en commande(s) individuelle(s)" __suppparameters = {} @@ -352,6 +368,10 @@ class _SCDViewer(GenericCaseViewer): Remarque : le fichier généré est différent de celui obtenu par EFICAS """ + __slots__ = ( + "__DebugCommandNotSet", "__ObserverCommandNotSet", + "__UserPostAnalysisNotSet", "__hasAlgorithm") + # def __init__(self, __name="", __objname="case", __content=None, __object=None): "Initialisation et enregistrement de l'entête" GenericCaseViewer.__init__(self, __name, __objname, __content, __object) @@ -591,11 +611,14 @@ class _YACSViewer(GenericCaseViewer): """ Etablissement des commandes d'un cas YACS (Cas->SCD->YACS) """ + __slots__ = ("__internalSCD") + # def __init__(self, __name="", __objname="case", __content=None, __object=None): "Initialisation et enregistrement de l'entete" GenericCaseViewer.__init__(self, __name, __objname, __content, __object) self.__internalSCD = _SCDViewer(__name, __objname, __content, __object) self._append = self.__internalSCD._append + # def dump(self, __filename=None, __upa=None): "Restitution normalisée des commandes" # ----- @@ -635,6 +658,8 @@ class _ReportViewer(GenericCaseViewer): """ Partie commune de restitution simple """ + __slots__ = ("_r") + # def __init__(self, __name="", __objname="case", __content=None, __object=None): "Initialisation et enregistrement de l'entete" GenericCaseViewer.__init__(self, __name, __objname, __content, __object) @@ -647,6 +672,7 @@ class _ReportViewer(GenericCaseViewer): if self._content is not None: for command in self._content: self._append(*command) + # def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False): "Transformation d'une commande individuelle en un enregistrement" if __command is not None and __keys is not None and __local is not None: @@ -675,6 +701,7 @@ class _ReportViewer(GenericCaseViewer): __text += " with values:" + __ktext __text = __text.rstrip(", ") self._r.append(__text, "uli") + # def _finalize(self, __upa=None): "Enregistrement du final" raise NotImplementedError() @@ -683,6 +710,8 @@ class _SimpleReportInRstViewer(_ReportViewer): """ Restitution simple en RST """ + __slots__ = () + # def _finalize(self, __upa=None): self._lineSerie.append(Reporting.ReportViewInRst(self._r).__str__()) @@ -690,6 +719,8 @@ class _SimpleReportInHtmlViewer(_ReportViewer): """ Restitution simple en HTML """ + __slots__ = () + # def _finalize(self, __upa=None): self._lineSerie.append(Reporting.ReportViewInHtml(self._r).__str__()) @@ -697,6 +728,8 @@ class _SimpleReportInPlainTxtViewer(_ReportViewer): """ Restitution simple en TXT """ + __slots__ = () + # def _finalize(self, __upa=None): self._lineSerie.append(Reporting.ReportViewInPlainTxt(self._r).__str__()) @@ -706,6 +739,7 @@ class ImportFromScript(object): Obtention d'une variable nommee depuis un fichier script importé """ __slots__ = ("__basename", "__filenspace", "__filestring") + # def __init__(self, __filename=None): "Verifie l'existence et importe le script" if __filename is None: @@ -727,6 +761,7 @@ class ImportFromScript(object): self.__filenspace = "" with open(__filename,'r') as fid: self.__filestring = fid.read() + # def getvalue(self, __varname=None, __synonym=None ): "Renvoie la variable demandee par son nom ou son synonyme" if __varname is None: @@ -746,6 +781,7 @@ class ImportFromScript(object): return getattr(self.__filenspace, __synonym) else: return getattr(self.__filenspace, __varname) + # def getstring(self): "Renvoie le script complet" return self.__filestring @@ -755,8 +791,8 @@ class ImportDetector(object): """ Détection des caractéristiques de fichiers ou objets en entrée """ - __slots__ = ( - "__url", "__usr", "__root", "__end") + __slots__ = ("__url", "__usr", "__root", "__end") + # def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): @@ -790,8 +826,10 @@ class ImportDetector(object): return True else: return False + # def is_not_local_file(self): return not self.is_local_file() + # def raise_error_if_not_local_file(self): if self.is_not_local_file(): raise ValueError("The name or the url of the file object doesn't seem to exist. The given name is:\n \"%s\""%str(self.__url)) @@ -805,8 +843,10 @@ class ImportDetector(object): return True else: return False + # def is_not_local_dir(self): return not self.is_local_dir() + # def raise_error_if_not_local_dir(self): if self.is_not_local_dir(): raise ValueError("The name or the url of the directory object doesn't seem to exist. The given name is:\n \"%s\""%str(self.__url)) @@ -818,10 +858,12 @@ class ImportDetector(object): def get_standard_mime(self): (__mtype, __encoding) = mimetypes.guess_type(self.__url, strict=False) return __mtype + # def get_user_mime(self): __fake = "fake."+self.__usr.lower() (__mtype, __encoding) = mimetypes.guess_type(__fake, strict=False) return __mtype + # def get_comprehensive_mime(self): if self.get_standard_mime() is not None: return self.get_standard_mime() @@ -834,8 +876,10 @@ class ImportDetector(object): # ---------------------- def get_user_name(self): return self.__url + # def get_absolute_name(self): return os.path.abspath(os.path.realpath(self.__url)) + # def get_extension(self): return self.__end @@ -856,8 +900,10 @@ class ImportFromFile(object): "_filename", "_colnames", "_colindex", "_varsline", "_format", "_delimiter", "_skiprows", "__url", "__filestring", "__header", "__allowvoid", "__binaryformats", "__supportedformats") + # def __enter__(self): return self + # def __exit__(self, exc_type, exc_val, exc_tb): return False # @@ -910,7 +956,7 @@ class ImportFromFile(object): else: self._colindex = None # self.__allowvoid = bool(AllowVoidNameList) - + # def __getentete(self, __nblines = 3): "Lit l'entête du fichier pour trouver la définition des variables" # La première ligne non vide non commentée est toujours considérée @@ -929,7 +975,7 @@ class ImportFromFile(object): for i in range(max(0,__nblines)): __header.append(fid.readline()) return (__header, __varsline, __skiprows) - + # def __getindices(self, __colnames, __colindex, __delimiter=None ): "Indices de colonnes correspondants à l'index et aux variables" if __delimiter is None: @@ -961,7 +1007,7 @@ class ImportFromFile(object): __useindex = None # return (__usecols, __useindex) - + # def getsupported(self): self.__supportedformats = {} self.__supportedformats["text/plain"] = True @@ -971,7 +1017,7 @@ class ImportFromFile(object): self.__supportedformats["application/numpy.npz"] = True self.__supportedformats["application/dymola.sdf"] = PlatformInfo.has_sdf return self.__supportedformats - + # def getvalue(self, ColNames=None, ColIndex=None ): "Renvoie la ou les variables demandées par la liste de leurs noms" # Uniquement si mise à jour @@ -1053,7 +1099,7 @@ class ImportFromFile(object): __index = tuple([toString(v) for v in __index]) # return (self._colnames, __columns, self._colindex, __index) - + # def getstring(self): "Renvoie le fichier texte complet" if self._format in self.__binaryformats: @@ -1061,7 +1107,7 @@ class ImportFromFile(object): else: with open(self._filename,'r') as fid: return fid.read() - + # def getformat(self): return self._format @@ -1075,8 +1121,11 @@ class ImportScalarLinesFromFile(ImportFromFile): Seule la méthode "getvalue" est changée. """ + __slots__ = () + # def __enter__(self): return self + # def __exit__(self, exc_type, exc_val, exc_tb): return False # @@ -1181,6 +1230,8 @@ class EficasGUI(object): """ Lancement autonome de l'interface EFICAS/ADAO """ + __slots__ = ("__msg", "__path_settings_ok") + # def __init__(self, __addpath = None): # Chemin pour l'installation (ordre important) self.__msg = "" @@ -1246,7 +1297,7 @@ class EficasGUI(object): else: print(self.__msg) logging.debug("Errors in path settings have been found") - + # def gui(self): if self.__path_settings_ok: logging.debug("Launching standalone EFICAS/ADAO interface...") diff --git a/src/daComposant/daCore/PlatformInfo.py b/src/daComposant/daCore/PlatformInfo.py index 4c53c98..f576e70 100644 --- a/src/daComposant/daCore/PlatformInfo.py +++ b/src/daComposant/daCore/PlatformInfo.py @@ -56,30 +56,32 @@ class PlatformInfo(object): """ Rassemblement des informations sur le code et la plateforme """ + __slots__ = () + # def __init__(self): "Sans effet" pass - + # def getName(self): "Retourne le nom de l'application" import daCore.version as dav return dav.name - + # def getVersion(self): "Retourne le numéro de la version" import daCore.version as dav return dav.version - + # def getDate(self): "Retourne la date de création de la version" import daCore.version as dav return dav.date - + # def getYear(self): "Retourne l'année de création de la version" import daCore.version as dav return dav.year - + # def getSystemInformation(self, __prefix=""): __msg = "" __msg += "\n%s%30s : %s" %(__prefix,"platform.system",platform.system()) @@ -131,16 +133,16 @@ class PlatformInfo(object): __msg += "\n%s%30s : %s" %(__prefix,"platform.node",platform.node()) __msg += "\n%s%30s : %s" %(__prefix,"os.path.expanduser",os.path.expanduser('~')) return __msg - + # def getPythonVersion(self): "Retourne la version de python disponible" return ".".join([str(x) for x in sys.version_info[0:3]]) # map(str,sys.version_info[0:3])) - + # def getNumpyVersion(self): "Retourne la version de numpy disponible" import numpy.version return numpy.version.version - + # def getScipyVersion(self): "Retourne la version de scipy disponible" if has_scipy: @@ -148,7 +150,7 @@ class PlatformInfo(object): else: __version = "0.0.0" return __version - + # def getMatplotlibVersion(self): "Retourne la version de matplotlib disponible" if has_matplotlib: @@ -156,7 +158,7 @@ class PlatformInfo(object): else: __version = "0.0.0" return __version - + # def getGnuplotVersion(self): "Retourne la version de gnuplotpy disponible" if has_gnuplot: @@ -164,7 +166,7 @@ class PlatformInfo(object): else: __version = "0.0" return __version - + # def getSphinxVersion(self): "Retourne la version de sphinx disponible" if has_sphinx: @@ -172,7 +174,7 @@ class PlatformInfo(object): else: __version = "0.0.0" return __version - + # def getNloptVersion(self): "Retourne la version de nlopt disponible" if has_nlopt: @@ -184,7 +186,7 @@ class PlatformInfo(object): else: __version = "0.0.0" return __version - + # def getSdfVersion(self): "Retourne la version de sdf disponible" if has_sdf: @@ -192,11 +194,11 @@ class PlatformInfo(object): else: __version = "0.0.0" return __version - + # def getCurrentMemorySize(self): "Retourne la taille mémoire courante utilisée" return 1 - + # def MaximumPrecision(self): "Retourne la precision maximale flottante pour Numpy" import numpy @@ -206,7 +208,7 @@ class PlatformInfo(object): except Exception: mfp = 'float64' return mfp - + # def MachinePrecision(self): # Alternative sans module : # eps = 2.38 @@ -214,12 +216,18 @@ class PlatformInfo(object): # old_eps = eps # eps = (1.0 + eps/2) - 1.0 return sys.float_info.epsilon - + # def __str__(self): import daCore.version as dav return "%s %s (%s)"%(dav.name,dav.version,dav.date) # ============================================================================== +try: + import numpy + has_numpy = True +except ImportError: + raise ImportError("Numpy is not available, despites the fact it is mandatory.") + try: import scipy import scipy.version @@ -374,6 +382,8 @@ class PathManagement(object): """ Mise à jour du path système pour les répertoires d'outils """ + __slots__ = ("__paths") + # def __init__(self): "Déclaration des répertoires statiques" parent = os.path.abspath(os.path.join(os.path.dirname(__file__),"..")) @@ -386,7 +396,7 @@ class PathManagement(object): # Conserve en unique exemplaire chaque chemin sys.path = uniq( sys.path ) del parent - + # def getpaths(self): """ Renvoie le dictionnaire des chemins ajoutés @@ -398,6 +408,7 @@ class SystemUsage(object): """ Permet de récupérer les différentes tailles mémoires du process courant """ + __slots__ = () # # Le module resource renvoie 0 pour les tailles mémoire. On utilise donc # plutôt : http://code.activestate.com/recipes/286222/ et Wikipedia diff --git a/src/daComposant/daCore/Reporting.py b/src/daComposant/daCore/Reporting.py index 4f526f7..7cbfdfa 100644 --- a/src/daComposant/daCore/Reporting.py +++ b/src/daComposant/daCore/Reporting.py @@ -35,11 +35,13 @@ class _ReportPartM__(object): """ Store and retrieve the data for C: internal class """ + __slots__ = ("__part", "__styles", "__content") + # def __init__(self, part="default"): self.__part = str(part) self.__styles = [] self.__content = [] - + # def append(self, content, style="p", position=-1): if position == -1: self.__styles.append(style) @@ -48,10 +50,10 @@ class _ReportPartM__(object): self.__styles.insert(position, style) self.__content.insert(position, content) return 0 - + # def get_styles(self): return self.__styles - + # def get_content(self): return self.__content @@ -59,24 +61,26 @@ class _ReportM__(object): """ Store and retrieve the data for C: internal class """ + __slots__ = ("__document") + # def __init__(self, part='default'): self.__document = {} self.__document[part] = _ReportPartM__(part) - + # def append(self, content, style="p", position=-1, part='default'): if part not in self.__document: self.__document[part] = _ReportPartM__(part) self.__document[part].append(content, style, position) return 0 - + # def get_styles(self): op = list(self.__document.keys()) ; op.sort() return [self.__document[k].get_styles() for k in op] - + # def get_content(self): op = list(self.__document.keys()) ; op.sort() return [self.__document[k].get_content() for k in op] - + # def clear(self): self.__init__() @@ -84,16 +88,18 @@ class __ReportC__(object): """ Get user commands, update M and V: user intertace to create the report """ + __slots__ = () + # m = _ReportM__() - + # def append(self, content="", style="p", position=-1, part="default"): return self.m.append(content, style, position, part) - + # def retrieve(self): st = self.m.get_styles() ct = self.m.get_content() return st, ct - + # def clear(self): self.m.clear() @@ -101,12 +107,13 @@ class __ReportV__(object): """ Interact with user and C: template for reports """ - + __slots__ = ("c") + # default_filename="report.txt" - + # def __init__(self, c): self.c = c - + # def save(self, filename=None): if filename is None: filename = self.default_filename @@ -117,13 +124,13 @@ class __ReportV__(object): fid.write(h) fid.close() return filename, _filename - + # def retrieve(self): return self.c.retrieve() - + # def __str__(self): return self.get() - + # def close(self): del self.c return 0 @@ -136,13 +143,14 @@ class ReportViewInHtml(__ReportV__): """ Report in HTML """ - + __slots__ = () + # default_filename="report.html" tags = { "oli":"li", "uli":"li", } - + # def get(self): st, ct = self.retrieve() inuLi, inoLi = False, False @@ -183,7 +191,8 @@ class ReportViewInRst(__ReportV__): """ Report in RST """ - + __slots__ = () + # default_filename="report.rst" tags = { "p":["\n\n",""], @@ -202,7 +211,7 @@ class ReportViewInRst(__ReportV__): "":"**", "":"*", } - + # def get(self): st, ct = self.retrieve() inuLi, inoLi = False, False @@ -242,7 +251,7 @@ class ReportViewInPlainTxt(__ReportV__): """ Report in plain TXT """ - + # default_filename="report.txt" tags = { "p":["\n",""], @@ -261,7 +270,7 @@ class ReportViewInPlainTxt(__ReportV__): "":"", "":"", } - + # def get(self): st, ct = self.retrieve() inuLi, inoLi = False, False @@ -293,7 +302,6 @@ class ReportViewInPlainTxt(__ReportV__): pg += "\n" return pg - # Interface utilisateur de stockage des informations ReportStorage = __ReportC__ diff --git a/src/daComposant/daCore/Templates.py b/src/daComposant/daCore/Templates.py index 1d0ce76..21010a9 100644 --- a/src/daComposant/daCore/Templates.py +++ b/src/daComposant/daCore/Templates.py @@ -34,11 +34,13 @@ class TemplateStorage(object): Classe générale de stockage de type dictionnaire étendu (Template) """ + __slots__ = ("__preferedLanguage", "__values", "__order") + # def __init__( self, language = "fr_FR" ): self.__preferedLanguage = language self.__values = {} self.__order = -1 - + # def store( self, name = None, content = None, fr_FR = "", en_EN = "", order = "next" ): "D.store(k, c, fr_FR, en_EN, o) -> Store template k and its main characteristics" if name is None or content is None: @@ -53,29 +55,29 @@ class TemplateStorage(object): 'en_EN' : str(en_EN), 'order' : int(self.__order), } - + # def keys(self): "D.keys() -> list of D's keys" __keys = sorted(self.__values.keys()) return __keys - + # def __contains__(self, name): "D.__contains__(k) -> True if D has a key k, else False" return name in self.__values - + # def __len__(self): "x.__len__() <==> len(x)" return len(self.__values) - + # def __getitem__(self, name=None ): "x.__getitem__(y) <==> x[y]" return self.__values[name]['content'] - + # def getdoc(self, name = None, lang = "fr_FR"): "D.getdoc(k, l) -> Return documentation of key k in language l" if lang not in self.__values[name]: lang = self.__preferedLanguage return self.__values[name][lang] - + # def keys_in_presentation_order(self): "D.keys_in_presentation_order() -> list of D's keys in presentation order" __orders = [] diff --git a/src/daComposant/daCore/version.py b/src/daComposant/daCore/version.py index 578357c..f966e82 100644 --- a/src/daComposant/daCore/version.py +++ b/src/daComposant/daCore/version.py @@ -29,7 +29,7 @@ __all__ = [] name = "ADAO" version = "9.10.0" year = "2022" -date = "lundi 14 novembre 2022, 12:12:12 (UTC+0100)" +date = "lundi 12 décembre 2022, 12:12:12 (UTC+0100)" longname = name + ", a module for Data Assimilation and Optimization" cata = "V" + version.replace(".","_")