2 # Copyright (C) 2008-2009 EDF R&D
4 # This library is free software; you can redistribute it and/or
5 # modify it under the terms of the GNU General Public
6 # License as published by the Free Software Foundation; either
7 # version 2.1 of the License.
9 # This library is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 # Lesser General Public License for more details.
14 # You should have received a copy of the GNU Lesser General Public
15 # License along with this library; if not, write to the Free Software
16 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # Author : André RIBES (EDF R&D)
28 logging.basicConfig(level=logging.DEBUG)
30 #----------- Templates Part ---------------#
31 begin_catalog_file = """
32 # -*- coding: utf-8 -*-
34 # --------------------------------------------------------
35 # generated by AdaoCatalogGenerator at ${date}
36 # --------------------------------------------------------
41 JdC = JDC_CATA (code = 'ADAO',
43 regles = ( AU_MOINS_UN ('ASSIM_STUDY'), AU_PLUS_UN ('ASSIM_STUDY')),
47 String_data_bloc = """
48 STRING_DATA = BLOC ( condition = " FROM in ( 'String', ) ",
50 STRING = SIMP(statut = "o", typ = "TXM"),
54 Script_data_bloc = """
55 SCRIPT_DATA = BLOC ( condition = " FROM in ( 'Script', ) ",
57 SCRIPT_FILE = SIMP(statut = "o", typ = "Fichier"),
62 DICT_DATA = BLOC ( condition = " FROM in ( 'Script', ) ",
64 SCRIPT_FILE = SIMP(statut = "o", typ = "Fichier"),
68 # Pour l'instant on ne gère qu'un seul script pour toutes les functions
69 FunctionDict_data_bloc = """
70 FUNCTIONDICT_DATA = BLOC ( condition = " FROM in ( 'FunctionDict', ) ",
72 FUNCTIONDICT_FILE = SIMP(statut = "o", typ = "Fichier"),
77 def F_${data_name}(statut) : return FACT(statut = statut,
78 FROM = SIMP(statut = "o", typ = "TXM", into=(${data_into})),
84 def F_InitChoice() : return ("Background",
88 "ObservationOperator",
89 "AlgorithmParameters",
92 def F_Init(statut) : return FACT(statut = statut,
93 INIT_FILE = SIMP(statut = "o", typ = "Fichier"),
94 TARGET_LIST = SIMP(statut = "o", typ = "TXM", min=1, max="**", into=F_InitChoice(), validators=(VerifExiste(2))),
97 assim_data_method = """
98 def F_${assim_name}(statut) : return FACT(statut=statut,
99 regles = ( UN_PARMI (${choices})),
104 assim_data_choice = """
105 ${choice_name} = F_${choice_name}("f"),
108 assim_opt_choice = """
109 ${choice_name} = F_${choice_name}("f"),
113 ${name} = FACT(regles = ( ENSEMBLE ("Background", "BackgroundError",
114 "Observation", "ObservationError",
115 "ObservationOperator")),
116 Background = F_Background("o"),
117 BackgroundError = F_BackgroundError("o"),
118 Observation = F_Observation("o"),
119 ObservationError = F_ObservationError("o"),
120 ObservationOperator = F_ObservationOperator("o"),
121 AlgorithmParameters = F_AlgorithmParameters("f"),
127 ASSIM_STUDY = PROC(nom="ASSIM_STUDY",
130 STUDY_NAME = SIMP(statut="o", typ = "TXM"),
131 ALGORITHM_NAME = SIMP(statut="o", typ = "TXM", into=(${algos_names})),
132 STUDY_REPERTORY = SIMP(statut="f", typ
133 ALGORITHM = FACT(statut='o',
134 regles = ( UN_PARMI (${algos}),),
140 begin_catalog_file = string.Template(begin_catalog_file)
141 data_method = string.Template(data_method)
142 assim_data_method = string.Template(assim_data_method)
143 assim_data_choice = string.Template(assim_data_choice)
144 assim_opt_choice = string.Template(assim_opt_choice)
145 assim_algo = string.Template(assim_algo)
146 assim_study = string.Template(assim_study)
148 #----------- End of Templates Part ---------------#
152 #----------- Begin generation script -----------#
153 print "-- Starting AdaoCalatogGenerator.py --"
157 import daYacsSchemaCreator
158 import daCore.AssimilationStudy
160 logging.fatal("Import of ADAO python modules failed !" +
161 "\n add ADAO python installation directory in your PYTHONPATH")
162 traceback.print_exc()
165 def check_args(args):
166 logging.debug("Arguments are :" + str(args))
168 logging.fatal("Bad number of arguments: you have to provide two arguments (%d given)" % (len(args)))
172 from optparse import OptionParser
173 usage = "usage: %prog [options] catalog_path catalog_name"
175 my_parser = OptionParser(usage=usage, version=version)
176 (options, args) = my_parser.parse_args()
179 catalog_path = args[0]
180 catalog_name = args[1]
182 # Generates into a string
183 mem_file = StringIO.StringIO()
186 from time import strftime
187 mem_file.write(begin_catalog_file.substitute(date=strftime("%Y-%m-%d %H:%M:%S")))
189 # Step 1: Check basic data input types
190 import daYacsSchemaCreator.infos_daComposant as infos
191 for basic_type in infos.BasicDataInputs:
192 logging.debug('A basic data input type is found: ' + basic_type)
193 if basic_type + '_data_bloc' not in locals().keys():
194 logging.fatal("Basic data input type not found: " + basic_type)
197 # Step 2: Add data input dict
198 for data_input_name in infos.DataTypeDict.keys():
199 logging.debug('A data input is found: ' + data_input_name)
200 data_name = data_input_name
204 for basic_type in infos.DataTypeDict[data_input_name]:
205 data_into += "\"" + basic_type + "\", "
206 data_bloc += locals()[basic_type + '_data_bloc'] + "\n"
208 mem_file.write(data_method.substitute(data_name = data_name,
209 data_into = data_into,
210 data_bloc = data_bloc))
212 # Step 3: Add assimilation algorithm data input
213 for assim_data_input_name in infos.AssimDataDict.keys():
214 logging.debug("An assimilation algorithm data input is found: " + assim_data_input_name)
215 assim_name = assim_data_input_name
219 for choice in infos.AssimDataDict[assim_data_input_name]:
220 choices += "\"" + choice + "\", "
221 decl_choices += assim_data_choice.substitute(choice_name = choice)
223 mem_file.write(assim_data_method.substitute(assim_name = assim_name,
225 decl_choices = decl_choices))
227 # Step 4: Add optional nodes
229 for opt_name in infos.OptDict.keys():
230 logging.debug("An optional node is found: " + opt_name)
235 for choice in infos.OptDict[opt_name]:
236 data_into += "\"" + choice + "\", "
237 data_bloc += locals()[choice + '_data_bloc'] + "\n"
239 mem_file.write(data_method.substitute(data_name = data_name,
240 data_into = data_into,
241 data_bloc = data_bloc))
243 opt_names.append(opt_name)
245 # Step 5: Add init node
246 mem_file.write(init_method)
248 # Final step: Add algorithm and assim_study
253 for opt_name in opt_names:
254 decl_opts += assim_opt_choice.substitute(choice_name = opt_name)
256 assim_study_object = daCore.AssimilationStudy.AssimilationStudy()
257 algos_list = assim_study_object.get_available_algorithms()
258 for algo_name in algos_list:
259 logging.debug("An assimilation algorithm is found: " + algo_name)
260 algos_names += "\"" + algo_name + "\", "
261 if algo_name == "3DVAR":
262 algo_name = "ThreeDVAR"
263 algos += "\"" + algo_name + "\", "
264 decl_algos += assim_algo.substitute(name = algo_name, decl_opts=decl_opts) + "\n"
266 mem_file.write(assim_study.substitute(algos=algos,
267 algos_names=algos_names,
268 decl_algos=decl_algos))
270 final_file = open(catalog_path + "/" + catalog_name, "wr")
271 final_file.write(mem_file.getvalue())