From: Jean-Philippe ARGAUD Date: Sat, 22 Apr 2023 05:08:15 +0000 (+0200) Subject: Documentation review and user simplification for embedddded files X-Git-Tag: V9_11_0b1~6 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=f73bb43bbeafb14a6c47362f9c43c60a0b9323ba;p=modules%2Fadao.git Documentation review and user simplification for embedddded files --- diff --git a/bin/AdaoYacsSchemaCreator.py b/bin/AdaoYacsSchemaCreator.py index a2c0de9..84d4e5c 100644 --- a/bin/AdaoYacsSchemaCreator.py +++ b/bin/AdaoYacsSchemaCreator.py @@ -49,4 +49,8 @@ my_parser.add_argument('config_file') my_parser.add_argument('yacs_schema_filename') args = my_parser.parse_args() +if os.path.dirname(args.config_file) != '': + # Ajout dans le sys.path pour permettre l'import des fichiers inclus + sys.path.insert(0, os.path.dirname(args.config_file)) + run.create_schema_from_file(args.config_file, args.yacs_schema_filename) diff --git a/doc/en/ref_algorithm_MeasurementsOptimalPositioningTask.rst b/doc/en/ref_algorithm_MeasurementsOptimalPositioningTask.rst index cbab474..78ca51c 100644 --- a/doc/en/ref_algorithm_MeasurementsOptimalPositioningTask.rst +++ b/doc/en/ref_algorithm_MeasurementsOptimalPositioningTask.rst @@ -70,8 +70,8 @@ Beware of the size of the hyper-cube (and then to the number of computations) that can be reached, it can grow quickly to be quite large. It is possible to exclude a priori potential positions for optimal measurement -points, using the analysis variant "*lcEIM*" for a constrained positioning -search. +points, using the analysis variant "*PositioningBylcEIM*" for a constrained +positioning search. .. ------------------------------------ .. .. include:: snippets/Header2Algo02.rst diff --git a/doc/en/snippets/BoundsWithNone.rst b/doc/en/snippets/BoundsWithNone.rst index 52904a6..9045c18 100644 --- a/doc/en/snippets/BoundsWithNone.rst +++ b/doc/en/snippets/BoundsWithNone.rst @@ -4,8 +4,8 @@ Bounds *List of pairs of real values*. This key allows to define pairs of upper and lower bounds for every state variable being optimized. Bounds have to be given by a list of list of pairs of lower/upper bounds for each variable, - with possibly ``None`` every time there is no bound. The bounds can always be - specified, but they are taken into account only by the constrained + with a value of ``None`` each time there is no bound. The bounds can always + be specified, but they are taken into account only by the constrained optimizers. Example: diff --git a/doc/fr/ref_algorithm_MeasurementsOptimalPositioningTask.rst b/doc/fr/ref_algorithm_MeasurementsOptimalPositioningTask.rst index 5fcdc3f..876e515 100644 --- a/doc/fr/ref_algorithm_MeasurementsOptimalPositioningTask.rst +++ b/doc/fr/ref_algorithm_MeasurementsOptimalPositioningTask.rst @@ -72,8 +72,8 @@ nombre de calculs) qu'il est possible d'atteindre, elle peut rapidement devenir importante. Il est possible d'exclure a priori des positions potentielles pour les points -de mesures optimaux, en utilisant le variant "*lcEIM*" d'analyse pour une -recherche de positionnement contraint. +de mesures optimaux, en utilisant le variant "*PositioningBylcEIM*" d'analyse +pour une recherche de positionnement contraint. .. ------------------------------------ .. .. include:: snippets/Header2Algo02.rst diff --git a/src/daComposant/daCore/Interfaces.py b/src/daComposant/daCore/Interfaces.py index f4ff030..5c2d66b 100644 --- a/src/daComposant/daCore/Interfaces.py +++ b/src/daComposant/daCore/Interfaces.py @@ -289,7 +289,7 @@ class _COMViewer(GenericCaseViewer): __from = r['data'] if 'STRING' in __from: __parameters = ", Parameters=%s"%(repr(eval(__from['STRING'])),) - elif 'SCRIPT_FILE' in __from and os.path.exists(__from['SCRIPT_FILE']): + elif 'SCRIPT_FILE' in __from: # Pas de test d'existence du fichier pour accepter un fichier relatif __parameters = ", Script='%s'"%(__from['SCRIPT_FILE'],) else: # if 'Parameters' in r and r['Parameters'] == 'Defaults': __Dict = copy.deepcopy(r) @@ -748,10 +748,18 @@ class ImportFromScript(object): "Verifie l'existence et importe le script" if __filename is None: raise ValueError("The name of the file, containing the variable to be read, has to be specified.") - if not os.path.isfile(__filename): - raise ValueError( - "The file containing the variable to be imported doesn't seem to"+\ - " exist. Please check the file. The given file name is:\n \"%s\""%str(__filename)) + __fullname, __i = __filename, 0 + while not os.path.exists(__fullname) and __i < len(sys.path): + # Correction avec le sys.path si nécessaire + __fullname = os.path.join(sys.path[__i], __filename) + __i += 1 + if not os.path.exists(__filename): + if os.path.exists(__fullname): + __filename = __fullname + else: + raise ValueError( + "The file containing the variable to be imported doesn't seem to"+\ + " exist. Please check the file. The given file name is:\n \"%s\""%str(__filename)) if os.path.dirname(__filename) != '': sys.path.insert(0, os.path.dirname(__filename)) __basename = os.path.basename(__filename).rstrip(".py") diff --git a/src/daComposant/daCore/NumericObjects.py b/src/daComposant/daCore/NumericObjects.py index f38e780..ee25868 100644 --- a/src/daComposant/daCore/NumericObjects.py +++ b/src/daComposant/daCore/NumericObjects.py @@ -544,7 +544,7 @@ def EnsembleOfCenteredPerturbations( __bgCenter, __bgCovariance, __nbMembers ): return _Perturbations # ============================================================================== -def EnsembleOfBackgroundPerturbations( __bgCenter, __bgCovariance, __nbMembers, __withSVD = True): +def EnsembleOfBackgroundPerturbations( __bgCenter, __bgCovariance, __nbMembers, __withSVD = True ): "Génération d'un ensemble de taille __nbMembers-1 d'états aléatoires centrés" def __CenteredRandomAnomalies(Zr, N): """ @@ -593,7 +593,7 @@ def EnsembleMean( __Ensemble ): return numpy.asarray(__Ensemble).mean(axis=1, dtype=mfp).astype('float').reshape((-1,1)) # ============================================================================== -def EnsembleOfAnomalies( __Ensemble, __OptMean = None, __Normalisation = 1.): +def EnsembleOfAnomalies( __Ensemble, __OptMean = None, __Normalisation = 1. ): "Renvoie les anomalies centrées à partir d'un ensemble" if __OptMean is None: __Em = EnsembleMean( __Ensemble ) @@ -744,7 +744,7 @@ def CovarianceInflation( return __OutputCovOrEns # ============================================================================== -def HessienneEstimation(__selfA, __nb, __HaM, __HtM, __BI, __RI): +def HessienneEstimation( __selfA, __nb, __HaM, __HtM, __BI, __RI ): "Estimation de la Hessienne" # __HessienneI = [] @@ -781,7 +781,7 @@ def HessienneEstimation(__selfA, __nb, __HaM, __HtM, __BI, __RI): return __A # ============================================================================== -def QuantilesEstimations(selfA, A, Xa, HXa = None, Hm = None, HtM = None): +def QuantilesEstimations( selfA, A, Xa, HXa = None, Hm = None, HtM = None ): "Estimation des quantiles a posteriori à partir de A>0 (selfA est modifié)" nbsamples = selfA._parameters["NumberOfSamplesForQuantiles"] # @@ -857,7 +857,7 @@ def ForceNumericBounds( __Bounds, __infNumbers = True ): return __Bounds # ============================================================================== -def RecentredBounds( __Bounds, __Center, __Scale = None): +def RecentredBounds( __Bounds, __Center, __Scale = None ): "Recentre les bornes autour de 0, sauf si globalement None" # Conserve une valeur par défaut à None s'il n'y a pas de bornes if __Bounds is None: return None @@ -869,7 +869,7 @@ def RecentredBounds( __Bounds, __Center, __Scale = None): return __Scale @ (ForceNumericBounds( __Bounds, False ) - numpy.ravel( __Center ).reshape((-1,1))) # ============================================================================== -def ApplyBounds( __Vector, __Bounds, __newClip = True): +def ApplyBounds( __Vector, __Bounds, __newClip = True ): "Applique des bornes numériques à un point" # Conserve une valeur par défaut s'il n'y a pas de bornes if __Bounds is None: return __Vector @@ -896,7 +896,7 @@ def ApplyBounds( __Vector, __Bounds, __newClip = True): return __Vector # ============================================================================== -def Apply3DVarRecentringOnEnsemble(__EnXn, __EnXf, __Ynpu, __HO, __R, __B, __SuppPars): +def Apply3DVarRecentringOnEnsemble( __EnXn, __EnXf, __Ynpu, __HO, __R, __B, __SuppPars ): "Recentre l'ensemble Xn autour de l'analyse 3DVAR" __Betaf = __SuppPars["HybridCovarianceEquilibrium"] # @@ -923,7 +923,7 @@ def Apply3DVarRecentringOnEnsemble(__EnXn, __EnXf, __Ynpu, __HO, __R, __B, __Sup return Xa + EnsembleOfAnomalies( __EnXn ) # ============================================================================== -def FindIndexesFromNames( __NameOfLocations = None, __ExcludeLocations = None, ForceArray = False): +def FindIndexesFromNames( __NameOfLocations = None, __ExcludeLocations = None, ForceArray = False ): "Exprime les indices des noms exclus, en ignorant les absents" if __ExcludeLocations is None: __ExcludeIndexes = () diff --git a/src/daSalome/daGUI/daGuiImpl/adaoCase.py b/src/daSalome/daGUI/daGuiImpl/adaoCase.py index 7a0fc2b..464a3e6 100644 --- a/src/daSalome/daGUI/daGuiImpl/adaoCase.py +++ b/src/daSalome/daGUI/daGuiImpl/adaoCase.py @@ -22,6 +22,7 @@ # import os +import sys import time import subprocess import traceback @@ -40,7 +41,7 @@ class AdaoCase: self.filename = "not yet defined" # Python filename generated by Eficas self.yacs_filename = "not yet defined" # Yacs schema filename - self.tui_filename = "not yet defined" # Yacs schema filename + self.tui_filename = "not yet defined" # TUI filename #~ self.salome_study_id = -1 # Study of the case self.salome_study_item = None # Study item object @@ -121,6 +122,10 @@ class AdaoCase: self.eficas_editor.modified = True self.eficas_editor.saveFile() # + if os.path.dirname(self.filename) != '': + # Ajout dans le sys.path pour permettre l'import des fichiers inclus + sys.path.insert(0, os.path.dirname(self.filename)) + # import adaoBuilder fullcase = adaoBuilder.New() rtn = fullcase.convert(FileNameFrom=self.filename, FormaterFrom="COM", diff --git a/src/daSalome/daYacsSchemaCreator/help_methods.py b/src/daSalome/daYacsSchemaCreator/help_methods.py index 5551300..859a0b3 100644 --- a/src/daSalome/daYacsSchemaCreator/help_methods.py +++ b/src/daSalome/daYacsSchemaCreator/help_methods.py @@ -106,12 +106,28 @@ def check_study(study_config): raise ValueError("\n\nAnalysis found but Data is not defined in the analysis configuration!\n") if analysis_config["From"] == "Script": - check_file_name = analysis_config["Data"] - if check_repertory and not os.path.exists(check_file_name): - check_file_name = os.path.join(repertory, os.path.basename(analysis_config["Data"])) - if not os.path.exists(check_file_name): + # Recherche d'un nom valide : + checked_file_name = analysis_config["Data"] + if not os.path.exists(checked_file_name): + if check_repertory: + if os.path.exists( os.path.join(repertory, analysis_config["Data"]) ): + checked_file_name = os.path.join(repertory, analysis_config["Data"]) + if os.path.exists( os.path.join(repertory, os.path.basename(analysis_config["Data"])) ): + checked_file_name = os.path.join(repertory, os.path.basename(analysis_config["Data"])) + else: + __i = 0 + while not os.path.exists(checked_file_name) and __i < len(sys.path): + # Correction avec le sys.path si nécessaire + checked_file_name = os.path.join(sys.path[__i], analysis_config["Data"]) + __i += 1 + __i = 0 + while not os.path.exists(checked_file_name) and __i < len(sys.path): + # Correction avec le sys.path si nécessaire + checked_file_name = os.path.join(sys.path[__i], os.path.basename(analysis_config["Data"])) + __i += 1 + if not os.path.exists(checked_file_name): raise ValueError("\n\n The script file cannot be found for UserPostAnalysis,\n please check its availability.\n"+ - " The given user file is:\n %s\n" % check_file_name) + " The given user file is:\n %s\n" % checked_file_name) # Check observers if "Observers" in study_config.keys(): @@ -173,7 +189,7 @@ def check_variables(name, study_config): except: raise ValueError("\n\n Variable %s value cannot be converted in an integer \n in the %s configuration!\n" % name) -def check_data(data_name, data_config, repertory_check=False, repertory=""): +def check_data(data_name, data_config, check_repertory=False, repertory=""): logging.debug("[check_data] " + data_name) data_name_data = "Data" @@ -199,16 +215,47 @@ def check_data(data_name, data_config, repertory_check=False, repertory=""): # Check des fichiers from_type = data_config["From"] if from_type == "Script": - check_file_name = data_config["Data"] - if repertory_check and not os.path.exists(check_file_name): - check_file_name = os.path.join(repertory, os.path.basename(data_config["Data"])) - if not os.path.exists(check_file_name): - raise ValueError("\n\n The script file cannot be found for the \"%s\" keyword, please \n check its availability. The given user file is:\n %s\n"%(from_type,check_file_name)) + # Recherche d'un nom valide : + checked_file_name = data_config["Data"] + if not os.path.exists(checked_file_name): + if check_repertory: + if os.path.exists( os.path.join(repertory, data_config["Data"]) ): + checked_file_name = os.path.join(repertory, data_config["Data"]) + if os.path.exists( os.path.join(repertory, os.path.basename(data_config["Data"])) ): + checked_file_name = os.path.join(repertory, os.path.basename(data_config["Data"])) + else: + # Correction avec le sys.path si nécessaire + __i = 0 + while not os.path.exists(checked_file_name) and __i < len(sys.path): + checked_file_name = os.path.join(sys.path[__i], data_config["Data"]) + __i += 1 + __i = 0 + while not os.path.exists(checked_file_name) and __i < len(sys.path): + checked_file_name = os.path.join(sys.path[__i], os.path.basename(data_config["Data"])) + __i += 1 + if not os.path.exists(checked_file_name): + raise ValueError("\n\n The script file cannot be found for the \"%s\" keyword,\n please check its availability.\n The given user file is:\n %s\n"%(from_type,data_config["Data"])) + # elif (from_type == "FunctionDict" or from_type == "ScriptWithSwitch" or from_type == "ScriptWithFunctions" or from_type == "ScriptWithOneFunction"): TheData = data_config["Data"] for FunctionName in TheData["Function"]: - check_file_name = TheData["Script"][FunctionName] - if repertory_check and not os.path.exists(check_file_name): - check_file_name = os.path.join(repertory, os.path.basename(TheData["Script"][FunctionName])) - if not os.path.exists(check_file_name): - raise ValueError("\n\n The script file cannot be found for the \"%s\" keyword, please \n check its availability. The given user file is:\n %s\n"%(from_type,check_file_name)) + # Recherche d'un nom valide : + checked_file_name = TheData["Script"][FunctionName] + if not os.path.exists(checked_file_name): + if check_repertory: + if os.path.exists( os.path.join(repertory, TheData["Script"][FunctionName]) ): + checked_file_name = os.path.join(repertory, TheData["Script"][FunctionName]) + if os.path.exists( os.path.join(repertory, os.path.basename(TheData["Script"][FunctionName])) ): + checked_file_name = os.path.join(repertory, os.path.basename(TheData["Script"][FunctionName])) + else: + # Correction avec le sys.path si nécessaire + __i = 0 + while not os.path.exists(checked_file_name) and __i < len(sys.path): + checked_file_name = os.path.join(sys.path[__i], TheData["Script"][FunctionName]) + __i += 1 + __i = 0 + while not os.path.exists(checked_file_name) and __i < len(sys.path): + checked_file_name = os.path.join(sys.path[__i], os.path.basename(TheData["Script"][FunctionName])) + __i += 1 + if not os.path.exists(checked_file_name): + raise ValueError("\n\n The script file cannot be found for the \"%s\" keyword, please \n check its availability. The given user file is:\n %s\n"%(from_type,checked_file_name))