--- /dev/null
+# -*- coding: utf-8 -*-
+
+# --------------------------------------------------
+# debut entete
+# --------------------------------------------------
+
+import Accas
+from Accas import *
+
+JdC = JDC_CATA (code = 'DATASSIM',
+ execmodul = None,
+ regles = ( AU_MOINS_UN ('ASSIM_STUDY')),
+ )
+
+def F_VECTOR(statut) : return FACT(statut = statut,
+ FROM = SIMP(statut = "o", typ = "TXM", into=("String")),
+ DATA = SIMP(statut = "o", typ = "TXM"),
+ )
+
+def F_MATRIX(statut) : return FACT(statut = statut,
+ FROM = SIMP(statut = "o", typ = "TXM", into=("String")),
+ DATA = SIMP(statut = "o", typ = "TXM"),
+ )
+
+def F_BACKGROUND(statut) : return FACT(statut=statut,
+ regles = ( UN_PARMI ("VECTOR")),
+ VECTOR = F_VECTOR("o"),
+ )
+
+def F_BACKGROUND_ERROR(statut) : return FACT(statut=statut,
+ regles = ( UN_PARMI ("MATRIX")),
+ MATRIX = F_MATRIX("o"),
+ )
+
+def F_OBSERVATION(statut) : return FACT(statut=statut,
+ regles = ( UN_PARMI ("VECTOR")),
+ VECTOR = F_VECTOR("o"),
+ )
+
+def F_OBSERVATION_ERROR(statut) : return FACT(statut=statut,
+ regles = ( UN_PARMI ("MATRIX")),
+ MATRIX = F_MATRIX("o"),
+ )
+
+def F_OBSERVATION_OPERATOR(statut) : return FACT(statut=statut,
+ regles = ( UN_PARMI ("MATRIX", "FUNCTION")),
+ MATRIX = F_MATRIX("o"),
+ )
+
+def F_ANALYSIS(statut) : return FACT(statut = statut,
+ FROM = SIMP(statut = "o", typ = "TXM", into=("String", "File")),
+ STRING_DATA = BLOC ( condition = " FROM in ( 'String', ) ",
+
+ STRING = SIMP(statut = "o", typ = "TXM"),
+ ),
+ FILE_DATA = BLOC ( condition = " FROM in ( 'File', ) ",
+
+ FILE = SIMP(statut = "o", typ = "Fichier"),
+ ),
+ )
+
+
+ASSIM_STUDY = PROC(nom="ASSIM_STUDY",
+ op=None,
+ repetable = "n",
+ STUDY_NAME = SIMP(statut="o", typ = "TXM"),
+ ALGORITHM = FACT(statut='o',
+ regles = ( UN_PARMI ("Blue", "ENSEMBLEBLUE"),),
+
+ Blue = FACT(regles = ( ENSEMBLE ("Background", "BackgroundError",
+ "Observation", "ObservationError",
+ "ObservationOperator")),
+ Background = F_BACKGROUND("o"),
+ BackgroundError = F_BACKGROUND_ERROR("o"),
+ Observation = F_OBSERVATION("o"),
+ ObservationError = F_OBSERVATION_ERROR("o"),
+ ObservationOperator = F_OBSERVATION_OPERATOR("o"),
+ Analysis = F_ANALYSIS("f"),
+ ),
+ ENSEMBLEBLUE = FACT(BACKGROUND = F_BACKGROUND("o"),
+ ),
+ ),
+ )
+
--- /dev/null
+# -*- coding: utf-8 -*-
+"""
+ Ce module sert pour charger les paramètres de configuration d'EFICAS
+"""
+# Modules Python
+print "passage dans la surcharge de configuration pour Datassim"
+import os, sys, string, types, re
+import traceback
+from PyQt4.QtGui import *
+
+# Modules Eficas
+from Editeur import utils
+
+# Classe de base permettant de lire, afficher
+# et sauvegarder les fichiers utilisateurs
+class CONFIG:
+
+ def __init__(self,appli,repIni):
+
+ self.rep_user = os.environ["HOME"]
+ self.appli = appli
+ self.code = appli.code
+ self.rep_ini = repIni
+ self.savedir = self.rep_user
+ self.generator_module = "generator_datassim"
+ self.convert_module = "convert_datassim"
+
+ # Format des catalogues...
+ # (code,version,catalogue,formatIn) et ,formatOut) ?
+ self.catalogues = []
+ self.catalogues.append(["DATASSIM", "V0", os.path.join(self.rep_ini, 'DATASSIM_Cata_V0.py'), "datassim"])
+
+def make_config(appli,rep):
+
+ return CONFIG(appli,rep)
+
+def make_config_style(appli,rep):
+
+ return None
--- /dev/null
+# -*- coding: utf-8 -*-
+print "import convert_datassim"
+
+import convert.parseur_python
+from convert.convert_python import *
+
+def entryPoint():
+ """
+ Retourne les informations nécessaires pour le chargeur de plugins
+ Ces informations sont retournées dans un dictionnaire
+ """
+ return {
+ # Le nom du plugin
+ 'name' : 'datassim',
+ # La factory pour créer une instance du plugin
+ 'factory' : PythonParser,
+ }
+
+
--- /dev/null
+# -*- coding: utf-8 -*-
+print "import generator_datassim"
+
+from generator.generator_python import PythonGenerator
+
+def entryPoint():
+ """
+ Retourne les informations necessaires pour le chargeur de plugins
+
+ Ces informations sont retournees dans un dictionnaire
+ """
+ return {
+ # Le nom du plugin
+ 'name' : 'datassim',
+ # La factory pour creer une instance du plugin
+ 'factory' : DatassimGenerator,
+ }
+
+class DatassimGenerator(PythonGenerator):
+
+ def __init__(self,cr=None):
+ PythonGenerator.__init__(self, cr)
+ self.dictMCVal={}
+ self.text_comm = ""
+ self.text_da = ""
+
+ def gener(self,obj,format='brut',config=None):
+ print "DatassimGenerator gener"
+ self.text_comm = PythonGenerator.gener(self, obj, format, config)
+
+ print "Dictionnaire"
+ print self.dictMCVal
+
+ self.generate_da()
+ return self.text_comm
+
+ def writeDefault(self, fn):
+ print "writeDefault"
+ filename = fn[:fn.rfind(".")] + '.py'
+ f = open( str(filename), 'wb')
+ f.write( self.text_da )
+ f.close()
+
+ def generMCSIMP(self,obj) :
+ """
+ Convertit un objet MCSIMP en texte python
+ """
+ clef=""
+ for i in obj.get_genealogie() :
+ clef=clef+"__"+i
+ self.dictMCVal[clef]=obj.valeur
+
+ s=PythonGenerator.generMCSIMP(self,obj)
+ return s
+
+ def generMCFACT(self,obj):
+
+ if obj.parent.nom == "ALGORITHM":
+ self.dictMCVal["ALGORITHM_NAME"] = obj.nom
+
+ s=PythonGenerator.generMCFACT(self,obj)
+ return s
+
+ def generate_da(self):
+
+ self.text_da += "#-*-coding:iso-8859-1-*- \n"
+ self.text_da += "study_config = {} \n"
+ self.text_da += "study_config[\"Name\"] = \"" + self.dictMCVal["__ASSIM_STUDY__STUDY_NAME"] + "\"\n"
+ self.text_da += "study_config[\"Algorithm\"] = \"" + self.dictMCVal["ALGORITHM_NAME"] + "\"\n"
+
+
+ self.add_data("Background")
+ self.add_data("BackgroundError")
+ self.add_data("Observation")
+ self.add_data("ObservationError")
+ self.add_data("ObservationOperator")
+ self.add_analysis()
+
+ def add_data(self, data_name):
+ search_text = "__ASSIM_STUDY__ALGORITHM__" + self.dictMCVal["ALGORITHM_NAME"] + "__"
+ back_search_text = search_text + data_name + "__"
+ try :
+ back_from = self.dictMCVal[back_search_text + "VECTOR__FROM"]
+ back_data = self.dictMCVal[back_search_text + "VECTOR__DATA"]
+
+ self.text_da += data_name + "_config = {} \n"
+ self.text_da += data_name + "_config[\"Type\"] = \"Vector\" \n"
+ self.text_da += data_name + "_config[\"From\"] = \"" + back_from + "\" \n"
+ self.text_da += data_name + "_config[\"Data\"] = \"" + back_data + "\" \n"
+ self.text_da += "study_config[\"" + data_name + "\"] = " + data_name + "_config \n"
+ except:
+ pass
+ try :
+ back_from = self.dictMCVal[back_search_text + "MATRIX__FROM"]
+ back_data = self.dictMCVal[back_search_text + "MATRIX__DATA"]
+
+ self.text_da += data_name + "_config = {} \n"
+ self.text_da += data_name + "_config[\"Type\"] = \"Matrix\" \n"
+ self.text_da += data_name + "_config[\"From\"] = \"" + back_from + "\" \n"
+ self.text_da += data_name + "_config[\"Data\"] = \"" + back_data + "\" \n"
+ self.text_da += "study_config[\"" + data_name + "\"] = " + data_name + "_config \n"
+ except:
+ pass
+
+ def add_analysis(self):
+ search_text = "__ASSIM_STUDY__ALGORITHM__" + self.dictMCVal["ALGORITHM_NAME"] + "__Analysis__"
+ try :
+ ana_from = self.dictMCVal[search_text + "FROM"]
+ print ana_from
+ if ana_from == "String":
+ ana_data = self.dictMCVal[search_text + "STRING_DATA__STRING"]
+ self.text_da += "Analysis_config = {} \n"
+ self.text_da += "Analysis_config[\"From\"] = \"String\" \n"
+ self.text_da += "Analysis_config[\"Data\"] = \"\"\"" + ana_data + "\"\"\" \n"
+ self.text_da += "study_config[\"Analysis\"] = Analysis_config \n"
+ pass
+ if ana_from == "File":
+ ana_data = self.dictMCVal[search_text + "FILE_DATA__FILE"]
+ self.text_da += "Analysis_config = {} \n"
+ self.text_da += "Analysis_config[\"From\"] = \"File\" \n"
+ self.text_da += "Analysis_config[\"Data\"] = \"" + ana_data + "\" \n"
+ self.text_da += "study_config[\"Analysis\"] = Analysis_config \n"
+ pass
+ except:
+ pass
+
--- /dev/null
+# Fichier obligatoire ne devant que contenir la variable code...
+code = "DATASSIM"
--- /dev/null
+# -*- coding: utf-8 -*-
+import os, sys
+
+print "import des prefs de Datassim"
+
+# Configuration de Eficas
+# INSTALLDIR, REPINI est obligatoire
+INSTALLDIR = "/home/aribes/Dev/Src/CVS_SALOME/EficasV1"
+sys.path[:0]=[INSTALLDIR]
+# positionnee a repin au debut mise a jour dans configuration
+REPINI=os.path.dirname(os.path.abspath(__file__))
+
+# initialdir sert comme directory initial des QFileDialog
+initialdir=os.environ["PWD"]
+# Codage des strings qui accepte les accents (en remplacement de 'ascii')
+encoding='iso-8859-1'
+# lang indique la langue utilisée pour les chaines d'aide : fr ou ang
+lang='fr'
+
+
+# INSTALLDIR sert � localiser l'installation d'Eficas
+#INSTALLDIR=os.path.join(REPINI,'..')
+
+
+
+# Acces a la documentation
+#rep_cata = INSTALLDIR
+#path_doc = os.path.join(rep_cata,'Doc')
+#exec_acrobat = "/usr/bin/xpdf"
+#savedir = os.environ['HOME']
+
+
+# OpenTURNS Python module
+#OpenTURNS_path="/local00/home/dutka/OpenTURNS/trunk-cmake/build-autotools/install/lib/python2.4/site-packages"
+# Choix des catalogues
+#sys.path[:0]=[INSTALLDIR, OpenTURNS_path]
+
--- /dev/null
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+ Ce module sert à lancer EFICAS pour Datassim
+"""
+# Configuration
+import prefs
+import prefs_DATASSIM
+
+# Modules Eficas
+import sys
+from InterfaceQT4 import eficas_go
+eficas_go.lance_eficas(code=prefs.code)
# -- Infos pour le parser --
AnalysisData = {}
-AnalysisFromList = ["string", "file"]
+AnalysisFromList = ["String", "File"]
# -- Infos from daCore --
AssimData = ["Background", "BackgroundError",
AssimType["ObservationOperatorAppliedToX"] = ["List"]
FromNumpyList = {}
-FromNumpyList["Vector"] = ["string"]
-FromNumpyList["Matrix"] = ["string"]
+FromNumpyList["Vector"] = ["String"]
+FromNumpyList["Matrix"] = ["String"]
FromNumpyList["Function"] = ["Dict"]
FromNumpyList["List"] = ["List"]
key_type = key + "Type"
- if data_config["Type"] == "Vector" and data_config["From"] == "string":
+ if data_config["Type"] == "Vector" and data_config["From"] == "String":
# Create node
factory_back_node = catalogAd._nodeMap["CreateNumpyVectorFromString"]
back_node = factory_back_node.cloneNode("Get" + key)
proc.edAddDFLink(back_node.getOutputPort("vector"), CAS_node.getInputPort(key))
proc.edAddDFLink(back_node.getOutputPort("type"), CAS_node.getInputPort(key_type))
- if data_config["Type"] == "Matrix" and data_config["From"] == "string":
+ if data_config["Type"] == "Matrix" and data_config["From"] == "String":
# Create node
factory_back_node = catalogAd._nodeMap["CreateNumpyMatrixFromString"]
back_node = factory_back_node.cloneNode("Get" + key)
# Step 4: create post-processing from user configuration
if "Analysis" in study_config.keys():
analysis_config = study_config["Analysis"]
- if analysis_config["From"] == "string":
+ if analysis_config["From"] == "String":
factory_analysis_node = catalogAd._nodeMap["SimpleUserAnalysis"]
analysis_node = factory_analysis_node.cloneNode("User Analysis")
default_script = analysis_node.getScript()
proc.edAddCFLink(compute_bloc, analysis_node)
proc.edAddDFLink(execute_node.getOutputPort("Study"), analysis_node.getInputPort("Study"))
- elif analysis_config["From"] == "file":
+ elif analysis_config["From"] == "File":
factory_analysis_node = catalogAd._nodeMap["SimpleUserAnalysis"]
analysis_node = factory_analysis_node.cloneNode("User Analysis")
default_script = analysis_node.getScript()
Background_config = {}
Background_config["Data"] = "0,1,2"
Background_config["Type"] = "Vector"
-Background_config["From"] = "string"
+Background_config["From"] = "String"
study_config["Background"] = Background_config
BackgroundError_config = {}
BackgroundError_config["Data"] = "1 0 0;0 1 0;0 0 1"
BackgroundError_config["Type"] = "Matrix"
-BackgroundError_config["From"] = "string"
+BackgroundError_config["From"] = "String"
study_config["BackgroundError"] = BackgroundError_config
Observation_config = {}
Observation_config["Data"] = "0.5,1.5,2.5"
Observation_config["Type"] = "Vector"
-Observation_config["From"] = "string"
+Observation_config["From"] = "String"
study_config["Observation"] = Observation_config
ObservationError_config = {}
ObservationError_config["Data"] = "1 0 0;0 1 0;0 0 1"
ObservationError_config["Type"] = "Matrix"
-ObservationError_config["From"] = "string"
+ObservationError_config["From"] = "String"
study_config["ObservationError"] = ObservationError_config
ObservationOperator_config = {}
ObservationOperator_config["Data"] = "1 0 0;0 1 0;0 0 1"
ObservationOperator_config["Type"] = "Matrix"
-ObservationOperator_config["From"] = "string"
+ObservationOperator_config["From"] = "String"
study_config["ObservationOperator"] = ObservationOperator_config
Analysis_config = {}
print " Test correct, erreur maximale inférieure à %s"%precision
print
"""
-Analysis_config["From"] = "string"
+Analysis_config["From"] = "String"
study_config["Analysis"] = Analysis_config
Background_config = {}
Background_config["Data"] = "0,1,2"
Background_config["Type"] = "Vector"
-Background_config["From"] = "string"
+Background_config["From"] = "String"
study_config["Background"] = Background_config
BackgroundError_config = {}
BackgroundError_config["Data"] = "1 0 0;0 1 0;0 0 1"
BackgroundError_config["Type"] = "Matrix"
-BackgroundError_config["From"] = "string"
+BackgroundError_config["From"] = "String"
study_config["BackgroundError"] = BackgroundError_config
Observation_config = {}
Observation_config["Data"] = "0.5,1.5,2.5"
Observation_config["Type"] = "Vector"
-Observation_config["From"] = "string"
+Observation_config["From"] = "String"
study_config["Observation"] = Observation_config
ObservationError_config = {}
ObservationError_config["Data"] = "1 0 0;0 1 0;0 0 1"
ObservationError_config["Type"] = "Matrix"
-ObservationError_config["From"] = "string"
+ObservationError_config["From"] = "String"
study_config["ObservationError"] = ObservationError_config
ObservationOperator_config = {}
ObservationOperator_config["Data"] = "1 0 0;0 1 0;0 0 1"
ObservationOperator_config["Type"] = "Matrix"
-ObservationOperator_config["From"] = "string"
+ObservationOperator_config["From"] = "String"
study_config["ObservationOperator"] = ObservationOperator_config
Analysis_config = {}
Analysis_config["Data"] = "/home/aribes/Projets/DATASSIM_SRC/src/tests/daSalome/test000_Blue_AnalysisCode.py"
-Analysis_config["From"] = "file"
+Analysis_config["From"] = "File"
study_config["Analysis"] = Analysis_config