1 //============================================================================
2 // Name : Tests of using a subcommnicator for sending and receiving a 3D MEDCoupling field on cells (P0) lying on the same mesh between two groups of two processors
3 // Author : Michael NDJINGA
4 // Date : November 2021
5 // Description : Use of the parallel Data Exchange Channel StructuredCoincidentDEC of MEDCoupling
6 //============================================================================
12 #include "StructuredCoincidentDEC.hxx"
13 #include "MPIProcessorGroup.hxx"
14 #include "MEDCouplingFieldDouble.hxx"
15 #include "MEDCouplingCMesh.hxx"
16 #include "MEDCouplingUMesh.hxx"
17 #include "MEDLoader.hxx"
25 int main(int argc, char *argv[])
27 /* PETSc initialisation */
28 MPI_Init(&argc, &argv);
29 int size; /* size of communicator */
30 int rank; /* processor rank */
31 int sub_rank, sub_size;/* rank in subcommunicator */
32 int color;/* tells if I belong to the sub_communicator */
33 MPI_Comm sub_comm ;/*subcommunicator will be used in exchanges */
34 std::set<int> procs_source, procs_target;/* group of procs that will send or receive data */
35 MEDCoupling::MEDCouplingFieldDouble * field;/*field used to send or receive data */
36 MPI_Comm_rank(MPI_COMM_WORLD,&rank);
37 MPI_Comm_size(MPI_COMM_WORLD,&size);
40 printf("Processor %d : aborting.\n Simulation should done on four processors.\n %d processors given\n",rank,size);
44 printf("My rank is %d among %d processors, my color is %d\n",rank, size,color);
46 MPI_Comm_split(MPI_COMM_WORLD, color, rank, &sub_comm); /* two groups (0,1) and (2,3) */
47 MPI_Comm_rank(sub_comm, &sub_rank);
48 MPI_Comm_size(sub_comm, &sub_size);
50 printf("WORLD RANK/SIZE: %d/%d \t subcommunicator RANK/SIZE: %d/%d\n", rank, size, sub_rank, sub_size);
53 procs_source.insert(0);/* sub rank 0 will send data */
54 procs_target.insert(1);/* sub rank 1 will receive data */
56 MEDCoupling::CommInterface interface = MEDCoupling::CommInterface();
57 MEDCoupling::MPIProcessorGroup source_group = MEDCoupling::MPIProcessorGroup(interface, procs_source,sub_comm);
58 MEDCoupling::MPIProcessorGroup target_group = MEDCoupling::MPIProcessorGroup(interface, procs_target,sub_comm);
59 MEDCoupling::StructuredCoincidentDEC dec = MEDCoupling::StructuredCoincidentDEC(source_group, target_group);
61 //Create a MEDCouplingUMesh from a 2D cartesian mesh
62 MEDCoupling::DataArrayDouble * xarr=MEDCoupling::DataArrayDouble::New();
65 MEDCoupling::MEDCouplingCMesh * cmesh=MEDCoupling::MEDCouplingCMesh::New("My2D_CMesh");
66 cmesh->setCoords(xarr,xarr,xarr);
67 MEDCoupling::MEDCouplingUMesh * mesh=cmesh->buildUnstructured();
68 mesh->setName("RegularSquare");
72 field = mesh->fillFromAnalytic(MEDCoupling::ON_CELLS,1,"(x-5.)*(x-5.)+(y-5.)*(y-5.)+(z-5.)*(z-5.)");
73 field->setName("SourceField");
74 MEDCoupling::WriteField("source_field"+to_string(rank)+".med", field, true);
75 printf("Processor with global rank %d has created and saved the source field\n", rank);
79 field=mesh->fillFromAnalytic(MEDCoupling::ON_CELLS,1,"0");
80 field->setName("TargetField");
81 printf("Processor with global rank %d has created the target field\n", rank);
84 dec.attachLocalField(field);
90 printf("Processor with global rank %d has sent the source field\n", rank);
95 printf("Processor with global rank %d has received the source field on the target mesh\n", rank);
96 /* Solve the bug in StructuredCoincidentDEC then uncomment the lines below to check the result */
97 //MEDCoupling::MEDCouplingFieldDouble * exact_field=mesh->fillFromAnalytic(MEDCoupling::ON_CELLS,1,"(x-5.)*(x-5.)+(y-5.)*(y-5.)+(z-5.)*(z-5.)");
98 //exact_field->setName("ExactField");
99 //double error=((*field)-(*exact_field))->normMax(0)/exact_field->normMax(0);
100 //printf("Processor with global rank %d received source field that differs from theoretical value by %d (maximum relative norm on cells)\n", rank, error );
101 //assert( fabs(error)<1.e-6 );
102 //MEDCoupling::WriteField("target_field"+to_string(rank)+".med", field, true);
103 //MEDCoupling::WriteField("exact_field"+to_string(rank)+".med", exact_field, true);
106 MPI_Comm_free(&sub_comm);