name = "ADAO"
version = "9.10.0"
year = "2022"
-date = "lundi 14 novembre 2022, 12:12:12 (UTC+0100)"
+date = "lundi 12 décembre 2022, 12:12:12 (UTC+0100)"
longname = name + ", a module for Data Assimilation and Optimization"
cata = "V" + version.replace(".","_")
.. [Asch16] Asch M., Bocquet M., Nodet M., *Data Assimilation - Methods, Algorithms and Applications*, SIAM, 2016
+.. [Barrault04] Barrault M., Maday Y., Nguyen N. C., Patera A. T., *An 'empirical interpolation' method: application to efficient reduced-basis discretization of partial differential equations*, Comptes Rendus Mathématique, 339(9), pp.667–672, 2004
+
.. [Bishop01] Bishop C. H., Etherton B. J., Majumdar S. J., *Adaptive sampling with the ensemble transform Kalman filter. Part I: theoretical aspects*, Monthly Weather Review, 129, pp.420–436, 2001
.. [Bocquet04] Bocquet M., *Introduction aux principes et méthodes de l'assimilation de données en géophysique*, Lecture Notes, 2014
.. [Glover90] Glover F., *Tabu Search-Part II*, ORSA Journal on Computing, 2(1), pp.4-32, 1990
+.. [Gong18] Gong H., *Data assimilation with reduced basis and noisy measurement: Applications to nuclear reactor cores*, PhD Thesis, Sorbonne Université (France), 2018
+
.. [Hamill00] Hamill T. M., Snyder C., *A Hybrid Ensemble Kalman Filter-3D Variational Analysis Scheme*, Monthly Weather Review, 128(8), pp.2905-2919, 2000
.. [Ide97] Ide K., Courtier P., Ghil M., Lorenc A. C., *Unified notation for data assimilation: operational, sequential and variational*, Journal of the Meteorological Society of Japan, 75(1B), pp.181-189, 1997
.. [Python] *Python programming language*, http://www.python.org/
+.. [Quarteroni16] Quarteroni A., Manzoni A., Negri F., *Reduced Basis Methods for Partial Differential Equations - An introduction*, Springer, 2016
+
.. [R] *The R Project for Statistical Computing*, http://www.r-project.org/
.. [Rowan90] Rowan T., *Functional Stability Analysis of Numerical Algorithms*, Ph.D. thesis, Department of Computer Sciences, University of Texas at Austin, 1990
.. warning::
- This algorithm is for now only available in textual user interface (TUI) and not in graphical user interface (GUI).
+ This algorithm is only available in textual user interface (TUI) and not in
+ graphical user interface (GUI).
.. ------------------------------------ ..
.. include:: snippets/Header2Algo01.rst
This algorithm provides optimal positioning of measurement points by an EIM
-(Empirical Interpolation Method) analysis, from a set of state vectors (usually
-called "snapshots" in reduced basis methodology).
+(Empirical Interpolation Method) analysis, in a iterative greedy way from a set
+of state vectors (usually called "snapshots" in reduced basis methodology).
Each of these state vectors is usually (but not necessarily) the result
:math:`\mathbf{y}` of a simulation :math:`H` for a given set of parameters
-:math:`\mathbf{x}=\mu`.
+:math:`\mathbf{x}=\mu`. In its simplest use, if the set of state vectors is
+pre-existing, it is only necessary to provide it through the algorithm options.
-In its simplest use, if the set of state vectors is pre-existing, it is only
-necessary to provide it through the algorithm options.
+It is also possible to exclude a priori potential locations for optimal
+measurement points, using the "*lcEIM*" analysis for a constrained positioning
+search.
.. ------------------------------------ ..
.. include:: snippets/Header2Algo02.rst
Example :
``{"StoreSupplementaryCalculations":["BMA", "CurrentState"]}``
+.. include:: snippets/Variant_MOP.rst
+
.. ------------------------------------ ..
.. include:: snippets/Header2Algo04.rst
.. ------------------------------------ ..
.. _section_ref_algorithm_MeasurementsOptimalPositioningTask_examples:
-.. include:: snippets/Header2Algo06.rst
+.. include:: snippets/Header2Algo07.rst
-- :ref:`section_ref_algorithm_FunctionTest`
+- [Barrault04]_
+- [Gong18]_
+- [Quarteroni16]_
pair: Variant ; 3DVAR-PSAS
Variant
- *Predifined name*. This key allows to choose one of the possible variants
+ *Predefined name*. This key allows to choose one of the possible variants
for the main algorithm. The default variant is the original "3DVAR", and the
- possible ones are
+ possible choices are
"3DVAR" (Classical 3D Variational analysis),
"3DVAR-VAN" (3D Variational Analysis with No inversion of B),
"3DVAR-Incr" (Incremental 3DVAR),
pair: Variant ; CEKF
Variant
- *Predifined name*. This key allows to choose one of the possible variants for
+ *Predefined name*. This key allows to choose one of the possible variants for
the main algorithm. The default variant is the constrained version "CEKF" of
the original algorithm "EKF", and the possible choices are
"EKF" (Extended Kalman Filter),
"CEKF" (Constrained Extended Kalman Filter).
+ It is highly recommended to keep the default value.
Example :
``{"Variant":"CEKF"}``
pair: Variant ; EnKS
Variant
- *Predifined name*. This key allows to choose one of the possible variants
+ *Predefined name*. This key allows to choose one of the possible variants
for the main algorithm. The default variant is the original "EnKF"
- formulation, and the possible ones are
+ formulation, and the possible choices are
"EnKF" (Ensemble Kalman Filter),
"ETKF" (Ensemble-Transform Kalman Filter),
"ETKF-N" (Ensemble-Transform Kalman Filter),
--- /dev/null
+.. index::
+ single: Variant
+ pair: Variant ; PositioningByEIM
+ pair: Variant ; PositioningBylcEIM
+
+Variant
+ *Predefined name*. This key allows to choose one of the possible variants
+ for the optimal positioning search. The default variant is the constrained by
+ excluded locations "PositioningBylcEIM", and the possible choices are
+ "PositioningByEIM" (using the original EIM algorithm),
+ "PositioningBylcEIM" (using the constrained by excluded locations EIM, named "Location Constrained EIM").
+ It is highly recommended to keep the default value.
+
+ Example :
+ ``{"Variant":"PositioningBylcEIM"}``
pair: Variant ; 2UKF
Variant
- *Predifined name*. This key allows to choose one of the possible variants for
+ *Predefined name*. This key allows to choose one of the possible variants for
the main algorithm. The default variant is the constrained version "2UKF" of
the original algorithm "UKF", and the possible choices are
"UKF" (Unscented Kalman Filter),
"2UKF" (Constrained Unscented Kalman Filter).
+ It is highly recommended to keep the default value.
Example :
``{"Variant":"2UKF"}``
.. [Asch16] Asch M., Bocquet M., Nodet M., *Data Assimilation - Methods, Algorithms and Applications*, SIAM, 2016
+.. [Barrault04] Barrault M., Maday Y., Nguyen N. C., Patera A. T., *An 'empirical interpolation' method: application to efficient reduced-basis discretization of partial differential equations*, Comptes Rendus Mathématique, 339(9), pp.667–672, 2004
+
.. [Bishop01] Bishop C. H., Etherton B. J., Majumdar S. J., *Adaptive sampling with the ensemble transform Kalman filter. Part I: theoretical aspects*, Monthly Weather Review, 129, pp.420–436, 2001
.. [Bocquet04] Bocquet M., *Introduction aux principes et méthodes de l'assimilation de données en géophysique*, Lecture Notes, 2014
.. [Glover90] Glover F., *Tabu Search-Part II*, ORSA Journal on Computing, 2(1), pp.4-32, 1990
+.. [Gong18] Gong H., *Data assimilation with reduced basis and noisy measurement: Applications to nuclear reactor cores*, PhD Thesis, Sorbonne Université (France), 2018
+
.. [Hamill00] Hamill T. M., Snyder C., *A Hybrid Ensemble Kalman Filter-3D Variational Analysis Scheme*, Monthly Weather Review, 128(8), pp.2905-2919, 2000
.. [Ide97] Ide K., Courtier P., Ghil M., Lorenc A. C., *Unified notation for data assimilation: operational, sequential and variational*, Journal of the Meteorological Society of Japan, 75(1B), pp.181-189, 1997
.. [Python] *Python programming language*, http://www.python.org/
+.. [Quarteroni16] Quarteroni A., Manzoni A., Negri F., *Reduced Basis Methods for Partial Differential Equations - An introduction*, Springer, 2016
+
.. [R] *The R Project for Statistical Computing*, http://www.r-project.org/
.. [Rowan90] Rowan T., *Functional Stability Analysis of Numerical Algorithms*, Ph.D. thesis, Department of Computer Sciences, University of Texas at Austin, 1990
.. warning::
- Cet algorithme n'est pour l'instant utilisable qu'en interface textuelle
- (TUI) et pas en interface graphique (GUI).
+ Cet algorithme n'est utilisable qu'en interface textuelle (TUI) et pas en
+ interface graphique (GUI).
.. ------------------------------------ ..
.. include:: snippets/Header2Algo01.rst
Cet algorithme permet d'établir la position de points de mesures optimaux par
-une analyse EIM (Empirical Interpolation Method), à partir d'un ensemble de
-vecteurs d'état (usuellement appelés "*snapshots*" en méthodologie de bases
-réduites).
+une analyse EIM (Empirical Interpolation Method), de manière itérative à partir
+d'un ensemble de vecteurs d'état (usuellement appelés "*snapshots*" en
+méthodologie de bases réduites).
Chacun de ces vecteurs d'état est habituellement (mais pas obligatoirement) le
résultat :math:`\mathbf{y}` d'une simulation :math:`H` pour un jeu de
-paramètres donné :math:`\mathbf{x}=\mu`.
+paramètres donné :math:`\mathbf{x}=\mu`. Dans son usage le plus simple, si
+l'ensemble des vecteurs d'état est pré-existant, il suffit de le fournir par
+les options d'algorithme.
-Dans son usage le plus simple, si l'ensemble des vecteurs d'état est
-pré-existant, il suffit de le fournir par les options d'algorithme.
+Il est aussi possible d'exclure a priori des positions potentielles pour les
+points de mesures optimaux, en utilisant l'analyse "*lcEIM*" pour une recherche
+de positionnement contraint.
.. ------------------------------------ ..
.. include:: snippets/Header2Algo02.rst
Exemple :
``{"StoreSupplementaryCalculations":["BMA", "CurrentState"]}``
+.. include:: snippets/Variant_MOP.rst
+
.. ------------------------------------ ..
.. include:: snippets/Header2Algo04.rst
.. ------------------------------------ ..
.. _section_ref_algorithm_MeasurementsOptimalPositioningTask_examples:
-.. include:: snippets/Header2Algo06.rst
+.. include:: snippets/Header2Algo07.rst
-- :ref:`section_ref_algorithm_FunctionTest`
+- [Barrault04]_
+- [Gong18]_
+- [Quarteroni16]_
"3DVAR-VAN" (3D Variational Analysis with No inversion of B),
"3DVAR-Incr" (Incremental 3DVAR),
"3DVAR-PSAS" (Physical-space Statistical Analysis Scheme for 3DVAR),
- Il est fortement conseillé de conserver la valeur par défaut.
+ Il est fortement recommandé de conserver la valeur par défaut.
Exemple :
``{"Variant":"3DVAR"}``
"CEKF" de l'algorithme original "EKF", et les choix possibles sont
"EKF" (Extended Kalman Filter),
"CEKF" (Constrained Extended Kalman Filter).
+ Il est fortement recommandé de conserver la valeur par défaut.
Exemple :
``{"Variant":"CEKF"}``
--- /dev/null
+.. index::
+ single: Variant
+ pair: Variant ; PositioningByEIM
+ pair: Variant ; PositioningBylcEIM
+
+Variant
+ *Nom prédéfini*. Cette clé permet de choisir l'une des variantes possibles
+ pour la recherche du positionnement optimal. La variante par défaut est la
+ version contrainte par des positions exclues "PositioningBylcEIM", et les
+ choix possibles sont
+ "PositioningByEIM" (utilisant l'algorithme EIM original),
+ "PositioningBylcEIM" (utilisant l'algorithme EIM contraint par des positions exclues, nommé "Location Constrained EIM").
+ Il est fortement recommandé de conserver la valeur par défaut.
+
+ Exemple :
+ ``{"Variant":"PositioningBylcEIM"}``
"2UKF" de l'algorithme original "UKF", et les choix possibles sont
"UKF" (Unscented Kalman Filter),
"2UKF" (Constrained Unscented Kalman Filter).
+ Il est fortement recommandé de conserver la valeur par défaut.
Exemple :
``{"Variant":"2UKF"}``
elif isinstance(selfA._parameters["EnsembleOfSnapshots"], (list,tuple)):
__EOS = numpy.asarray(selfA._parameters["EnsembleOfSnapshots"]).T
else:
- raise ValueError("EOS has to be an array/matrix (each column is a snapshot vector) or a list/tuple (each element is a snapshot vector).")
+ raise ValueError("EnsembleOfSnapshots has to be an array/matrix (each column is a snapshot vector) or a list/tuple (each element is a snapshot vector).")
#
if selfA._parameters["ErrorNorm"] == "L2":
MaxNormByColumn = MaxL2NormByColumn
else:
MaxNormByColumn = MaxLinfNormByColumn
#
- if "ExcludeLocations" in selfA._parameters:
+ if selfA._parameters["Variant"] == "PositioningByEIM":
+ __LcCsts = False
+ else:
+ __LcCsts = True
+ if __LcCsts and "ExcludeLocations" in selfA._parameters:
__ExcludedMagicPoints = selfA._parameters["ExcludeLocations"]
else:
__ExcludedMagicPoints = []
- if len(__ExcludedMagicPoints) > 0:
+ if __LcCsts and len(__ExcludedMagicPoints) > 0:
__ExcludedMagicPoints = numpy.ravel(numpy.asarray(__ExcludedMagicPoints, dtype=int))
__IncludedMagicPoints = numpy.setdiff1d(
numpy.arange(__EOS.shape[0]),
__iM = -1
__rhoM = numpy.empty(__dimS)
#
- __eM, __muM = MaxNormByColumn(__EOS, __IncludedMagicPoints)
+ __eM, __muM = MaxNormByColumn(__EOS, __LcCsts, __IncludedMagicPoints)
__residuM = __EOS[:,__muM]
__errors.append(__eM)
#
__iM = numpy.argmax(__abs_residuM)
__rhoM = __residuM / __abs_residuM[__iM]
#
- if __iM in __ExcludedMagicPoints:
+ if __LcCsts and __iM in __ExcludedMagicPoints:
__sIndices = numpy.argsort(__abs_residuM)
__rang = -1
assert __iM == __sIndices[__rang]
__interpolator = numpy.outer(__Q,numpy.outer(__Qi_inv,__restrictedEOSi))
#
__dataForNextIter = __EOS - __interpolator
- __eM, __muM = MaxNormByColumn(__dataForNextIter, __IncludedMagicPoints)
+ __eM, __muM = MaxNormByColumn(__dataForNextIter, __LcCsts, __IncludedMagicPoints)
__errors.append(__eM)
#
__residuM = __dataForNextIter[:,__muM]
raise NotImplementedError()
# ==============================================================================
-def MaxL2NormByColumn(Ensemble, IncludedPoints=[]):
+def MaxL2NormByColumn(Ensemble, LcCsts = False, IncludedPoints = []):
nmax, imax = -1, -1
- if len(IncludedPoints) > 0:
+ if LcCsts and len(IncludedPoints) > 0:
for indice in range(Ensemble.shape[1]):
norme = numpy.linalg.norm(
numpy.take(Ensemble[:,indice], IncludedPoints, mode='clip'),
nmax, imax, = norme, indice
return nmax, imax
-def MaxLinfNormByColumn(Ensemble, IncludedPoints=[]):
+def MaxLinfNormByColumn(Ensemble, LcCsts = False, IncludedPoints = []):
nmax, imax = -1, -1
- if len(IncludedPoints) > 0:
+ if LcCsts and len(IncludedPoints) > 0:
for indice in range(Ensemble.shape[1]):
norme = numpy.linalg.norm(
numpy.take(Ensemble[:,indice], IncludedPoints, mode='clip'),
BasicObjects.Algorithm.__init__(self, "MEASUREMENTSOPTIMALPOSITIONING")
self.defineRequiredParameter(
name = "Variant",
- default = "Positioning",
+ default = "PositioningBylcEIM",
typecast = str,
message = "Variant ou formulation de la méthode",
listval = [
- "Positioning",
- # "PositioningByEIM",
+ "PositioningByEIM",
+ "PositioningBylcEIM",
],
)
self.defineRequiredParameter(
self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
#
#--------------------------
- if self._parameters["Variant"] in ["Positioning", "PositioningByEIM"]:
+ if self._parameters["Variant"] == "PositioningBylcEIM":
if len(self._parameters["EnsembleOfSnapshots"]) > 0:
ecweim.EIM_offline(self)
+ else:
+ raise ValueError("Snapshots have to be given in order to launch the positionning analysis")
+ #
+ elif self._parameters["Variant"] == "PositioningByEIM":
+ if len(self._parameters["EnsembleOfSnapshots"]) > 0:
+ ecweim.EIM_offline(self)
+ else:
+ raise ValueError("Snapshots have to be given in order to launch the positionning analysis")
#
#--------------------------
else:
logging.info( "--------------------------------------------------" )
logging.info( "Library availability:" )
logging.info( "- Python.......: True" )
- logging.info( "- Numpy........: True" )
+ logging.info( "- Numpy........: "+str(PlatformInfo.has_numpy) )
logging.info( "- Scipy........: "+str(PlatformInfo.has_scipy) )
logging.info( "- Matplotlib...: "+str(PlatformInfo.has_matplotlib) )
- logging.info( "- Gnuplot......: "+str(PlatformInfo.has_scipy) )
+ logging.info( "- Gnuplot......: "+str(PlatformInfo.has_gnuplot) )
logging.info( "- Sphinx.......: "+str(PlatformInfo.has_sphinx) )
logging.info( "- Nlopt........: "+str(PlatformInfo.has_nlopt) )
logging.info( "Library versions:" )
logging.info( "- Python.......: "+p.getPythonVersion() )
logging.info( "- Numpy........: "+p.getNumpyVersion() )
logging.info( "- Scipy........: "+p.getScipyVersion() )
+ logging.info( "- Matplotlib...: "+p.getMatplotlibVersion() )
+ logging.info( "- Gnuplot......: "+p.getGnuplotVersion() )
+ logging.info( "- Sphinx.......: "+p.getSphinxVersion() )
+ logging.info( "- Nlopt........: "+p.getNloptVersion() )
logging.info( "" )
def setLogfile(self, filename=LOGFILE, filemode="w", level=logging.NOTSET):
"""
Gestion des commandes de création d'une vue de cas
"""
+ __slots__ = (
+ "_name", "_objname", "_lineSerie", "_switchoff", "_content",
+ "_numobservers", "_object", "_missing")
+ #
def __init__(self, __name="", __objname="case", __content=None, __object=None):
"Initialisation et enregistrement de l'entete"
self._name = str(__name)
for k,v in __translation.items():
__multilines = __multilines.replace(k,v)
return __multilines
+ #
def _finalize(self, __upa=None):
"Enregistrement du final"
__hasNotExecute = True
if __upa is not None and len(__upa)>0:
__upa = __upa.replace("ADD",str(self._objname))
self._lineSerie.append(__upa)
+ #
def _addLine(self, line=""):
"Ajoute un enregistrement individuel"
self._lineSerie.append(line)
+ #
def _get_objname(self):
return self._objname
+ #
def dump(self, __filename=None, __upa=None):
"Restitution normalisée des commandes"
self._finalize(__upa)
__fid.write(__text)
__fid.close()
return __text
+ #
def load(self, __filename=None, __content=None, __object=None):
"Chargement normalisé des commandes"
if __filename is not None and os.path.exists(__filename):
"""
Établissement des commandes d'un cas ADAO TUI (Cas<->TUI)
"""
+ __slots__ = ()
+ #
def __init__(self, __name="", __objname="case", __content=None, __object=None):
"Initialisation et enregistrement de l'entete"
GenericCaseViewer.__init__(self, __name, __objname, __content, __object)
if self._content is not None:
for command in self._content:
self._append(*command)
+ #
def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
"Transformation d'une commande individuelle en un enregistrement"
if __command is not None and __keys is not None and __local is not None:
__text = __text.rstrip(", ")
__text += " )"
self._addLine(__text)
+ #
def _extract(self, __multilines="", __object=None):
"Transformation d'enregistrement(s) en commande(s) individuelle(s)"
__is_case = False
"""
Établissement des commandes d'un cas COMM (Eficas Native Format/Cas<-COM)
"""
+ __slots__ = ("_observerIndex", "_objdata")
+ #
def __init__(self, __name="", __objname="case", __content=None, __object=None):
"Initialisation et enregistrement de l'entete"
GenericCaseViewer.__init__(self, __name, __objname, __content, __object)
if self._content is not None:
for command in self._content:
self._append(*command)
+ #
def _extract(self, __multilines=None, __object=None):
"Transformation d'enregistrement(s) en commande(s) individuelle(s)"
__suppparameters = {}
Remarque : le fichier généré est différent de celui obtenu par EFICAS
"""
+ __slots__ = (
+ "__DebugCommandNotSet", "__ObserverCommandNotSet",
+ "__UserPostAnalysisNotSet", "__hasAlgorithm")
+ #
def __init__(self, __name="", __objname="case", __content=None, __object=None):
"Initialisation et enregistrement de l'entête"
GenericCaseViewer.__init__(self, __name, __objname, __content, __object)
"""
Etablissement des commandes d'un cas YACS (Cas->SCD->YACS)
"""
+ __slots__ = ("__internalSCD")
+ #
def __init__(self, __name="", __objname="case", __content=None, __object=None):
"Initialisation et enregistrement de l'entete"
GenericCaseViewer.__init__(self, __name, __objname, __content, __object)
self.__internalSCD = _SCDViewer(__name, __objname, __content, __object)
self._append = self.__internalSCD._append
+ #
def dump(self, __filename=None, __upa=None):
"Restitution normalisée des commandes"
# -----
"""
Partie commune de restitution simple
"""
+ __slots__ = ("_r")
+ #
def __init__(self, __name="", __objname="case", __content=None, __object=None):
"Initialisation et enregistrement de l'entete"
GenericCaseViewer.__init__(self, __name, __objname, __content, __object)
if self._content is not None:
for command in self._content:
self._append(*command)
+ #
def _append(self, __command=None, __keys=None, __local=None, __pre=None, __switchoff=False):
"Transformation d'une commande individuelle en un enregistrement"
if __command is not None and __keys is not None and __local is not None:
__text += " with values:" + __ktext
__text = __text.rstrip(", ")
self._r.append(__text, "uli")
+ #
def _finalize(self, __upa=None):
"Enregistrement du final"
raise NotImplementedError()
"""
Restitution simple en RST
"""
+ __slots__ = ()
+ #
def _finalize(self, __upa=None):
self._lineSerie.append(Reporting.ReportViewInRst(self._r).__str__())
"""
Restitution simple en HTML
"""
+ __slots__ = ()
+ #
def _finalize(self, __upa=None):
self._lineSerie.append(Reporting.ReportViewInHtml(self._r).__str__())
"""
Restitution simple en TXT
"""
+ __slots__ = ()
+ #
def _finalize(self, __upa=None):
self._lineSerie.append(Reporting.ReportViewInPlainTxt(self._r).__str__())
Obtention d'une variable nommee depuis un fichier script importé
"""
__slots__ = ("__basename", "__filenspace", "__filestring")
+ #
def __init__(self, __filename=None):
"Verifie l'existence et importe le script"
if __filename is None:
self.__filenspace = ""
with open(__filename,'r') as fid:
self.__filestring = fid.read()
+ #
def getvalue(self, __varname=None, __synonym=None ):
"Renvoie la variable demandee par son nom ou son synonyme"
if __varname is None:
return getattr(self.__filenspace, __synonym)
else:
return getattr(self.__filenspace, __varname)
+ #
def getstring(self):
"Renvoie le script complet"
return self.__filestring
"""
Détection des caractéristiques de fichiers ou objets en entrée
"""
- __slots__ = (
- "__url", "__usr", "__root", "__end")
+ __slots__ = ("__url", "__usr", "__root", "__end")
+ #
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return True
else:
return False
+ #
def is_not_local_file(self):
return not self.is_local_file()
+ #
def raise_error_if_not_local_file(self):
if self.is_not_local_file():
raise ValueError("The name or the url of the file object doesn't seem to exist. The given name is:\n \"%s\""%str(self.__url))
return True
else:
return False
+ #
def is_not_local_dir(self):
return not self.is_local_dir()
+ #
def raise_error_if_not_local_dir(self):
if self.is_not_local_dir():
raise ValueError("The name or the url of the directory object doesn't seem to exist. The given name is:\n \"%s\""%str(self.__url))
def get_standard_mime(self):
(__mtype, __encoding) = mimetypes.guess_type(self.__url, strict=False)
return __mtype
+ #
def get_user_mime(self):
__fake = "fake."+self.__usr.lower()
(__mtype, __encoding) = mimetypes.guess_type(__fake, strict=False)
return __mtype
+ #
def get_comprehensive_mime(self):
if self.get_standard_mime() is not None:
return self.get_standard_mime()
# ----------------------
def get_user_name(self):
return self.__url
+ #
def get_absolute_name(self):
return os.path.abspath(os.path.realpath(self.__url))
+ #
def get_extension(self):
return self.__end
"_filename", "_colnames", "_colindex", "_varsline", "_format",
"_delimiter", "_skiprows", "__url", "__filestring", "__header",
"__allowvoid", "__binaryformats", "__supportedformats")
+ #
def __enter__(self):
return self
+ #
def __exit__(self, exc_type, exc_val, exc_tb):
return False
#
else: self._colindex = None
#
self.__allowvoid = bool(AllowVoidNameList)
-
+ #
def __getentete(self, __nblines = 3):
"Lit l'entête du fichier pour trouver la définition des variables"
# La première ligne non vide non commentée est toujours considérée
for i in range(max(0,__nblines)):
__header.append(fid.readline())
return (__header, __varsline, __skiprows)
-
+ #
def __getindices(self, __colnames, __colindex, __delimiter=None ):
"Indices de colonnes correspondants à l'index et aux variables"
if __delimiter is None:
__useindex = None
#
return (__usecols, __useindex)
-
+ #
def getsupported(self):
self.__supportedformats = {}
self.__supportedformats["text/plain"] = True
self.__supportedformats["application/numpy.npz"] = True
self.__supportedformats["application/dymola.sdf"] = PlatformInfo.has_sdf
return self.__supportedformats
-
+ #
def getvalue(self, ColNames=None, ColIndex=None ):
"Renvoie la ou les variables demandées par la liste de leurs noms"
# Uniquement si mise à jour
__index = tuple([toString(v) for v in __index])
#
return (self._colnames, __columns, self._colindex, __index)
-
+ #
def getstring(self):
"Renvoie le fichier texte complet"
if self._format in self.__binaryformats:
else:
with open(self._filename,'r') as fid:
return fid.read()
-
+ #
def getformat(self):
return self._format
Seule la méthode "getvalue" est changée.
"""
+ __slots__ = ()
+ #
def __enter__(self):
return self
+ #
def __exit__(self, exc_type, exc_val, exc_tb):
return False
#
"""
Lancement autonome de l'interface EFICAS/ADAO
"""
+ __slots__ = ("__msg", "__path_settings_ok")
+ #
def __init__(self, __addpath = None):
# Chemin pour l'installation (ordre important)
self.__msg = ""
else:
print(self.__msg)
logging.debug("Errors in path settings have been found")
-
+ #
def gui(self):
if self.__path_settings_ok:
logging.debug("Launching standalone EFICAS/ADAO interface...")
"""
Rassemblement des informations sur le code et la plateforme
"""
+ __slots__ = ()
+ #
def __init__(self):
"Sans effet"
pass
-
+ #
def getName(self):
"Retourne le nom de l'application"
import daCore.version as dav
return dav.name
-
+ #
def getVersion(self):
"Retourne le numéro de la version"
import daCore.version as dav
return dav.version
-
+ #
def getDate(self):
"Retourne la date de création de la version"
import daCore.version as dav
return dav.date
-
+ #
def getYear(self):
"Retourne l'année de création de la version"
import daCore.version as dav
return dav.year
-
+ #
def getSystemInformation(self, __prefix=""):
__msg = ""
__msg += "\n%s%30s : %s" %(__prefix,"platform.system",platform.system())
__msg += "\n%s%30s : %s" %(__prefix,"platform.node",platform.node())
__msg += "\n%s%30s : %s" %(__prefix,"os.path.expanduser",os.path.expanduser('~'))
return __msg
-
+ #
def getPythonVersion(self):
"Retourne la version de python disponible"
return ".".join([str(x) for x in sys.version_info[0:3]]) # map(str,sys.version_info[0:3]))
-
+ #
def getNumpyVersion(self):
"Retourne la version de numpy disponible"
import numpy.version
return numpy.version.version
-
+ #
def getScipyVersion(self):
"Retourne la version de scipy disponible"
if has_scipy:
else:
__version = "0.0.0"
return __version
-
+ #
def getMatplotlibVersion(self):
"Retourne la version de matplotlib disponible"
if has_matplotlib:
else:
__version = "0.0.0"
return __version
-
+ #
def getGnuplotVersion(self):
"Retourne la version de gnuplotpy disponible"
if has_gnuplot:
else:
__version = "0.0"
return __version
-
+ #
def getSphinxVersion(self):
"Retourne la version de sphinx disponible"
if has_sphinx:
else:
__version = "0.0.0"
return __version
-
+ #
def getNloptVersion(self):
"Retourne la version de nlopt disponible"
if has_nlopt:
else:
__version = "0.0.0"
return __version
-
+ #
def getSdfVersion(self):
"Retourne la version de sdf disponible"
if has_sdf:
else:
__version = "0.0.0"
return __version
-
+ #
def getCurrentMemorySize(self):
"Retourne la taille mémoire courante utilisée"
return 1
-
+ #
def MaximumPrecision(self):
"Retourne la precision maximale flottante pour Numpy"
import numpy
except Exception:
mfp = 'float64'
return mfp
-
+ #
def MachinePrecision(self):
# Alternative sans module :
# eps = 2.38
# old_eps = eps
# eps = (1.0 + eps/2) - 1.0
return sys.float_info.epsilon
-
+ #
def __str__(self):
import daCore.version as dav
return "%s %s (%s)"%(dav.name,dav.version,dav.date)
# ==============================================================================
+try:
+ import numpy
+ has_numpy = True
+except ImportError:
+ raise ImportError("Numpy is not available, despites the fact it is mandatory.")
+
try:
import scipy
import scipy.version
"""
Mise à jour du path système pour les répertoires d'outils
"""
+ __slots__ = ("__paths")
+ #
def __init__(self):
"Déclaration des répertoires statiques"
parent = os.path.abspath(os.path.join(os.path.dirname(__file__),".."))
# Conserve en unique exemplaire chaque chemin
sys.path = uniq( sys.path )
del parent
-
+ #
def getpaths(self):
"""
Renvoie le dictionnaire des chemins ajoutés
"""
Permet de récupérer les différentes tailles mémoires du process courant
"""
+ __slots__ = ()
#
# Le module resource renvoie 0 pour les tailles mémoire. On utilise donc
# plutôt : http://code.activestate.com/recipes/286222/ et Wikipedia
"""
Store and retrieve the data for C: internal class
"""
+ __slots__ = ("__part", "__styles", "__content")
+ #
def __init__(self, part="default"):
self.__part = str(part)
self.__styles = []
self.__content = []
-
+ #
def append(self, content, style="p", position=-1):
if position == -1:
self.__styles.append(style)
self.__styles.insert(position, style)
self.__content.insert(position, content)
return 0
-
+ #
def get_styles(self):
return self.__styles
-
+ #
def get_content(self):
return self.__content
"""
Store and retrieve the data for C: internal class
"""
+ __slots__ = ("__document")
+ #
def __init__(self, part='default'):
self.__document = {}
self.__document[part] = _ReportPartM__(part)
-
+ #
def append(self, content, style="p", position=-1, part='default'):
if part not in self.__document:
self.__document[part] = _ReportPartM__(part)
self.__document[part].append(content, style, position)
return 0
-
+ #
def get_styles(self):
op = list(self.__document.keys()) ; op.sort()
return [self.__document[k].get_styles() for k in op]
-
+ #
def get_content(self):
op = list(self.__document.keys()) ; op.sort()
return [self.__document[k].get_content() for k in op]
-
+ #
def clear(self):
self.__init__()
"""
Get user commands, update M and V: user intertace to create the report
"""
+ __slots__ = ()
+ #
m = _ReportM__()
-
+ #
def append(self, content="", style="p", position=-1, part="default"):
return self.m.append(content, style, position, part)
-
+ #
def retrieve(self):
st = self.m.get_styles()
ct = self.m.get_content()
return st, ct
-
+ #
def clear(self):
self.m.clear()
"""
Interact with user and C: template for reports
"""
-
+ __slots__ = ("c")
+ #
default_filename="report.txt"
-
+ #
def __init__(self, c):
self.c = c
-
+ #
def save(self, filename=None):
if filename is None:
filename = self.default_filename
fid.write(h)
fid.close()
return filename, _filename
-
+ #
def retrieve(self):
return self.c.retrieve()
-
+ #
def __str__(self):
return self.get()
-
+ #
def close(self):
del self.c
return 0
"""
Report in HTML
"""
-
+ __slots__ = ()
+ #
default_filename="report.html"
tags = {
"oli":"li",
"uli":"li",
}
-
+ #
def get(self):
st, ct = self.retrieve()
inuLi, inoLi = False, False
"""
Report in RST
"""
-
+ __slots__ = ()
+ #
default_filename="report.rst"
tags = {
"p":["\n\n",""],
"</b>":"**",
"</i>":"*",
}
-
+ #
def get(self):
st, ct = self.retrieve()
inuLi, inoLi = False, False
"""
Report in plain TXT
"""
-
+ #
default_filename="report.txt"
tags = {
"p":["\n",""],
"</b>":"",
"</i>":"",
}
-
+ #
def get(self):
st, ct = self.retrieve()
inuLi, inoLi = False, False
pg += "\n"
return pg
-
# Interface utilisateur de stockage des informations
ReportStorage = __ReportC__
Classe générale de stockage de type dictionnaire étendu
(Template)
"""
+ __slots__ = ("__preferedLanguage", "__values", "__order")
+ #
def __init__( self, language = "fr_FR" ):
self.__preferedLanguage = language
self.__values = {}
self.__order = -1
-
+ #
def store( self, name = None, content = None, fr_FR = "", en_EN = "", order = "next" ):
"D.store(k, c, fr_FR, en_EN, o) -> Store template k and its main characteristics"
if name is None or content is None:
'en_EN' : str(en_EN),
'order' : int(self.__order),
}
-
+ #
def keys(self):
"D.keys() -> list of D's keys"
__keys = sorted(self.__values.keys())
return __keys
-
+ #
def __contains__(self, name):
"D.__contains__(k) -> True if D has a key k, else False"
return name in self.__values
-
+ #
def __len__(self):
"x.__len__() <==> len(x)"
return len(self.__values)
-
+ #
def __getitem__(self, name=None ):
"x.__getitem__(y) <==> x[y]"
return self.__values[name]['content']
-
+ #
def getdoc(self, name = None, lang = "fr_FR"):
"D.getdoc(k, l) -> Return documentation of key k in language l"
if lang not in self.__values[name]: lang = self.__preferedLanguage
return self.__values[name][lang]
-
+ #
def keys_in_presentation_order(self):
"D.keys_in_presentation_order() -> list of D's keys in presentation order"
__orders = []
name = "ADAO"
version = "9.10.0"
year = "2022"
-date = "lundi 14 novembre 2022, 12:12:12 (UTC+0100)"
+date = "lundi 12 décembre 2022, 12:12:12 (UTC+0100)"
longname = name + ", a module for Data Assimilation and Optimization"
cata = "V" + version.replace(".","_")