1 // Copyright (C) 2007-2012 CEA/DEN, EDF R&D
3 // This library is free software; you can redistribute it and/or
4 // modify it under the terms of the GNU Lesser General Public
5 // License as published by the Free Software Foundation; either
6 // version 2.1 of the License.
8 // This library is distributed in the hope that it will be useful,
9 // but WITHOUT ANY WARRANTY; without even the implied warranty of
10 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 // Lesser General Public License for more details.
13 // You should have received a copy of the GNU Lesser General Public
14 // License along with this library; if not, write to the Free Software
15 // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17 // See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
24 mpirun -np 2 medpartitioner_para --input-file=blade.med --output-file=ttmp1_ --ndomains=2 --dump-cpu-memory --verbose=$verb
25 mpirun -np 5 medpartitioner_para --input-file=blade.med --output-file=ttmp1_ --ndomains=2 --dump-cpu-memory --verbose=$verb
26 mpirun -np 4 medpartitioner_para --input-file=ttmp1_.xml --output-file=tttmp1_ --ndomains=4 --verbose=$verb
28 mpirun -np 2 medpartitioner_para --ndomains=2 --input-file=tmp_testMesh_20x30x50.med --output-file=ttmp2_ --verbose=$verb
29 mpirun -np 2 medpartitioner_para --ndomains=2 --input-file=ttmp2_.xml --output-file=ttmp3_ --verbose=$verb
31 mpirun -np 2 medpartitioner_para --ndomains=2 --input-file=tmp_testMeshWithFaces_20x30x50.med --output-file=ttmp2_ --verbose=$verb
32 mpirun -np 2 medpartitioner_para --ndomains=2 --input-file=ttmp2_.xml --output-file=ttmp3_ --verbose=$verb
34 mpirun -np 2 medpartitioner_para --ndomains=2 --input-file=tmp_testMesh_20x30x50_WithVecFieldOnCells.med --output-file=ttmp2_ --verbose=$verb
35 mpirun -np 2 medpartitioner_para --ndomains=2 --input-file=tmp_testMesh_20x30x50_WithVecFieldOnNodes.med --output-file=ttmp2_ --verbose=$verb
36 mpirun -np 2 medpartitioner_para --ndomains=2 --input-file=ttmp2_.xml --output-file=ttmp3_ --verbose=$verb
38 mpirun -np 4 medpartitioner_para --ndomains=4 --input-file=tmp_testMeshHuge_20x30x50_6.xml --output-file=ttmp3_ --verbose=1
42 #include "MEDPARTITIONER_MeshCollection.hxx"
43 #include "MEDPARTITIONER_ParallelTopology.hxx"
44 #include "MEDPARTITIONER_ParaDomainSelector.hxx"
45 #include "MEDPARTITIONER_Utils.hxx"
47 #include "MEDLoader.hxx"
60 using namespace MEDPARTITIONER;
62 int main(int argc, char** argv)
64 #if !defined(MED_ENABLE_PARMETIS)
65 cout << "Sorry, no one split method is available. Please, compile with ParMETIS."<<endl;
70 // by parsing the command line
72 bool split_family=false;
73 bool empty_groups=false;
74 bool mesure_memory=false;
75 bool filter_face=true;
80 string library="metis"; //default
84 MPI_Init(&argc,&argv);
85 MPI_Comm_size(MPI_COMM_WORLD, &MyGlobals::_World_Size);
86 MPI_Comm_rank(MPI_COMM_WORLD, &MyGlobals::_Rank);
88 //cout<<"proc "<<MyGlobals::_Rank<<" of "<<MyGlobals::_World_Size<<endl; //for debug
89 //TestVectorOfStringMpi(); //cvw
92 //Primitive parsing of command-line options
93 string desc ("Available options of medpartitioner_para V1.0.3:\n"
94 "\t--help : produces this help message\n"
95 "\t--verbose : echoes arguments\n"
96 "\t--input-file=<string> : name of the input .med file or .xml master file\n"
97 "\t--output-file=<string> : name of the resulting file (without extension)\n"
98 "\t--ndomains=<number> : number of subdomains in the output file, default is 1\n"
99 #if defined(MED_ENABLE_PARMETIS)
100 // || defined(MED_ENABLE_PTSCOTCH)
101 //user can choose! (not yet)
102 "\t--split-method=<string> : name of the splitting library (metis/scotch), default is metis\n"
104 "\t--creates-boundary-faces : creates boundary faces mesh in the output files\n"
105 "\t--dump-cpu-memory : dumps passed CPU time and maximal increase of used memory\n"
106 //"\t--randomize=<number> : random seed for other partitionning (only on one proc)\n"
107 //"\t--atomize : do the opposite of a good partitionner (only on one proc)\n"
112 for (int i = 1; i < argc; i++)
114 if (strlen(argv[i]) < 3)
116 if (MyGlobals::_Rank==0) cerr << "bad argument : "<< argv[i] << endl;
117 MPI_Finalize(); return 1;
120 if (TestArg(argv[i],"--verbose",value))
122 MyGlobals::_Verbose=1;
123 if (value!="") MyGlobals::_Verbose = atoi(value.c_str());
125 else if (TestArg(argv[i],"--help",value)) help=1;
126 else if (TestArg(argv[i],"--test",value)) test=1;
127 else if (TestArg(argv[i],"--input-file",value)) input=value;
128 else if (TestArg(argv[i],"--output-file",value)) output=value;
129 else if (TestArg(argv[i],"--split-method",value)) library=value;
130 else if (TestArg(argv[i],"--ndomains",value)) ndomains=atoi(value.c_str());
131 else if (TestArg(argv[i],"--randomize",value)) MyGlobals::_Randomize=atoi(value.c_str());
132 else if (TestArg(argv[i],"--atomize",value)) MyGlobals::_Atomize=atoi(value.c_str());
133 else if (TestArg(argv[i],"--creates-boundary-faces",value)) MyGlobals::_Creates_Boundary_Faces=1;
134 else if (TestArg(argv[i],"--dump-cpu-memory",value)) mesure_memory=true;
137 if (MyGlobals::_Rank==0) cerr << "unknown argument : "<< argv[i] << endl;
138 MPI_Finalize(); return 1;
143 if (MyGlobals::_Randomize!=0 && MyGlobals::_World_Size!=1)
145 if (MyGlobals::_Rank==0) cerr << "randomize only available on 1 proc (mpirun -np 1)" << endl;
146 MyGlobals::_Randomize=0;
150 #if defined(MED_ENABLE_PARMETIS) && !defined(MED_ENABLE_SCOTCH)
153 #if !defined(MED_ENABLE_PARMETIS) && defined(MED_ENABLE_SCOTCH)
157 #if defined(MED_ENABLE_PARMETIS) && defined(MED_ENABLE_SCOTCH)
158 if ((library!="metis") && (library!="scotch"))
160 if (MyGlobals::_Rank==0) cerr << "split-method only available : metis, scotch" << endl;
161 MPI_Finalize(); return 1;
167 if (MyGlobals::_Rank==0) cout<<desc<<"\n";
168 MPI_Finalize(); return 0;
171 MyGlobals::_Is0verbose=0;
172 if (MyGlobals::_Rank==0) MyGlobals::_Is0verbose=MyGlobals::_Verbose;
173 //MyGlobals::_Is0verbose=((MyGlobals::_Rank==0) && MyGlobals::_Verbose);
174 if (test==1) //only for debugger
176 if (MyGlobals::_Is0verbose>0) cout<<"tests on "<<MyGlobals::_Atomize<<" "<<ndomains<<endl;
177 //TestPersistantMpi0To1(MyGlobals::_Atomize, ndomains);
178 //TestPersistantMpiRing(MyGlobals::_Atomize, ndomains);
179 TestPersistantMpiRingOnCommSplit(MyGlobals::_Atomize, ndomains);
180 //MPI_Barrier(MPI_COMM_WORLD);
183 TestVectorOfStringMpi();
184 TestMapOfStringIntMpi();
185 TestMapOfStringVectorOfStringMpi();
191 if (MyGlobals::_Is0verbose)
193 cout << "medpartitioner_para V1.0 :" << endl;
194 cout << " input-file = " << input << endl;
195 cout << " output-file = " << output << endl;
196 cout << " split-method = " << library << endl;
197 cout << " ndomains = " << ndomains << endl;
198 cout << " creates_boundary_faces = " << MyGlobals::_Creates_Boundary_Faces << endl;
199 cout << " dump-cpu-memory = " << mesure_memory<< endl;
200 cout << " verbose = " << MyGlobals::_Verbose << endl;
202 //testing whether it is possible to write a file at the specified location
203 if (MyGlobals::_Rank==0)
205 string outputtest = output + ".testioms.";
206 ofstream testfile (outputtest.c_str());
209 cerr << "output-file directory does not exist or is in read-only access" << endl;
210 MPI_Finalize(); return 1;
213 remove(outputtest.c_str());
216 // Beginning of the computation
218 // Loading the mesh collection
219 if (MyGlobals::_Is0verbose) cout << "Reading input files "<<endl;
223 MEDPARTITIONER::ParaDomainSelector parallelizer(mesure_memory);
224 MEDPARTITIONER::MeshCollection collection(input,parallelizer);
225 MEDPARTITIONER::ParallelTopology* aPT = (MEDPARTITIONER::ParallelTopology*) collection.getTopology();
226 aPT->setGlobalNumerotationDefault(collection.getParaDomainSelector());
227 //int nbfiles=MyGlobals::_fileMedNames->size(); //nb domains
228 //to have unique valid fields names/pointers/descriptions for partitionning
229 collection.prepareFieldDescriptions();
230 //int nbfields=collection.getFieldDescriptions().size(); //on all domains
231 //cout<<ReprVectorOfString(collection.getFieldDescriptions());
233 if (MyGlobals::_Is0verbose)
235 cout<<"fileNames :"<<endl
236 <<ReprVectorOfString(MyGlobals::_File_Names);
237 cout<<"fieldDescriptions :"<<endl
238 <<ReprFieldDescriptions(collection.getFieldDescriptions()," ");
239 cout<<"familyInfo :\n"
240 <<ReprMapOfStringInt(collection.getFamilyInfo())<<endl;
241 cout<<"groupInfo :\n"
242 <<ReprMapOfStringVectorOfString(collection.getGroupInfo())<<endl;
245 //Creating the graph and partitioning it
246 if (MyGlobals::_Is0verbose) cout << "Computing partition "<< endl;
248 auto_ptr< MEDPARTITIONER::Topology > new_topo;
249 if (library == "metis")
250 new_topo.reset( collection.createPartition(ndomains,MEDPARTITIONER::Graph::METIS));
252 new_topo.reset( collection.createPartition(ndomains,MEDPARTITIONER::Graph::SCOTCH));
253 parallelizer.evaluateMemory();
255 //Creating a new mesh collection from the partitioning
256 if (MyGlobals::_Is0verbose) cout << "Creating new meshes"<< endl;
257 MEDPARTITIONER::MeshCollection new_collection(collection,new_topo.get(),split_family,empty_groups);
258 //cout<<"proc "<<MyGlobals::_Rank<<" : new_collection done"<<endl;
259 parallelizer.evaluateMemory();
261 //if (!xml_output_master) new_collection.setDriverType(MEDPARTITIONER::MedAscii);
262 if (filter_face) new_collection.filterFaceOnCell();
264 //to get infos on all procs
267 vector<string> finalInformations;
268 vector<string> r1,r2;
269 r1=AllgathervVectorOfString(MyGlobals::_General_Informations);
270 //if (MyGlobals::_Is0verbose>1000) cout << "generalInformations : \n"<<ReprVectorOfString(r1);
271 r2=SelectTagsInVectorOfString(r1,"ioldDomain=");
272 r2=SelectTagsInVectorOfString(r2,"meshName=");
273 if (r2.size()==(collection.getMesh()).size())
275 for (std::size_t i=0; i<r2.size(); i++)
276 r2[i]=EraseTagSerialized(r2[i],"ioldDomain=");
277 r2=DeleteDuplicatesInVectorOfString(r2);
280 string finalMesh="finalMeshName="+ExtractFromDescription(r2[0], "meshName=");
281 finalInformations.push_back(SerializeFromString(finalMesh));
284 if (finalInformations.size()==0)
286 if (MyGlobals::_Rank==0)
287 cerr<<"Problem on final meshName : set at 'Merge'"<<endl;
288 finalInformations.push_back(SerializeFromString("finalMeshName=Merge"));
291 //see field info nbComponents & componentInfo (if fields present)
292 r2=SelectTagsInVectorOfString(r1,"fieldName=");
293 r2=SelectTagsInVectorOfString(r2,"nbComponents=");
294 //may be yes? or not?
295 for (std::size_t i=0; i<r2.size(); i++)
296 r2[i]=EraseTagSerialized(r2[i],"ioldFieldDouble=");
297 r2=DeleteDuplicatesInVectorOfString(r2);
298 for (std::size_t i=0; i<r2.size(); i++)
299 finalInformations.push_back(r2[i]);
301 MyGlobals::_General_Informations=finalInformations;
302 if (MyGlobals::_Is0verbose)
303 cout << "generalInformations : \n"<<ReprVectorOfString(finalInformations);
305 //new_collection.setSubdomainBoundaryCreates(creates_boundary_faces);
306 if (MyGlobals::_Is0verbose) cout << "Writing "<<ndomains<<" output files "<<output<<"xx.med"<<" and "<<output<<".xml"<<endl;
307 new_collection.write(output);
310 if ( parallelizer.isOnDifferentHosts() || MyGlobals::_Rank==0 )
312 cout << "Elapsed time = " << parallelizer.getPassedTime()
313 << ", max memory usage = " << parallelizer.evaluateMemory() << " KB"
316 if(MyGlobals::_World_Size>1)
317 MPI_Barrier(MPI_COMM_WORLD);
318 if (MyGlobals::_Is0verbose>0) cout<<"OK END"<< endl;
322 catch(const char *mess)
324 cerr<<"proc "<<MyGlobals::_Rank<<" : "<<mess<<endl;
329 catch(INTERP_KERNEL::Exception& e)
331 cerr<<"proc "<<MyGlobals::_Rank<<" : INTERP_KERNEL_Exception : "<<e.what()<<endl;
336 catch(std::exception& e)
338 cerr<<"proc "<<MyGlobals::_Rank<<" : std_Exception : "<<e.what()<<endl;
345 cerr<<"proc "<<MyGlobals::_Rank<<" : an unknown type exception error was occured"<<endl;