From 5805e8d90db68130af5e9c4e5612b19ae3a5035f Mon Sep 17 00:00:00 2001 From: michael Date: Fri, 19 Nov 2021 13:33:50 +0100 Subject: [PATCH] Tested use of sub communicators --- .../Python/MPI4PY/testSendRecvFieldSubComm.py | 39 ++++++++++++------- .../Python/MPI4PY/testTwoSimulations.py | 11 ++++-- 2 files changed, 32 insertions(+), 18 deletions(-) diff --git a/CoreFlows/examples/Python/MPI4PY/testSendRecvFieldSubComm.py b/CoreFlows/examples/Python/MPI4PY/testSendRecvFieldSubComm.py index ed8a3a8..79e3765 100644 --- a/CoreFlows/examples/Python/MPI4PY/testSendRecvFieldSubComm.py +++ b/CoreFlows/examples/Python/MPI4PY/testSendRecvFieldSubComm.py @@ -2,7 +2,7 @@ # -*-coding:utf-8 -* #=============================================================================================================================== -# Name : Tests of using a subcommnicator for sending and receiving a 3D MEDCoupling field on cells (P0) lying on the same mesh between two processors +# Name : Tests of using a subcommnicator for sending and receiving a 3D MEDCoupling field on cells (P0) lying on the same mesh between two groups of two processors # Author : Michaël Ndjinga # Copyright : CEA Saclay 2021 # Description : Use of the parallel Data Exchange Channel StructuredCoincidentDEC of MEDCoupling @@ -11,22 +11,29 @@ from mpi4py import MPI import medcoupling as mc -comm = MPI.COMM_WORLD -size = comm.Get_size() -rank = comm.Get_rank() -if(size!=3): - raise ValueError("Processor ", rank, " : aborting.\n Simulation should done on three processors.\n", size, " processors given") +size = MPI.COMM_WORLD.Get_size() +rank = MPI.COMM_WORLD.Get_rank() + +if(size!=4): + raise ValueError("Processor ", rank, " : aborting.\n Simulation should done on four processors.\n", size, " processors given") + +color=rank%2; -print("My rank is ", rank, " among ", size, "processors") +print("My rank is ", rank, " among ", size, "processors, my color is ", color) + +sub_comm = MPI.COMM_WORLD.Split(color, rank); # two groups (0,2) and (1,3) +sub_rank = sub_comm.Get_rank(); +sub_size = sub_comm.Get_size(); procs_source = [0] procs_target = [1] -procs_idle = [2] + +print("WORLD RANK/SIZE: ",rank,"/",size," \t subcommunicator RANK/SIZE: ",sub_rank,"/",sub_size,"\n") interface = mc.CommInterface() -source_group = mc.MPIProcessorGroup(interface, procs_source) -target_group = mc.MPIProcessorGroup(interface, procs_target) +source_group = mc.MPIProcessorGroup(interface, procs_source,sub_comm) +target_group = mc.MPIProcessorGroup(interface, procs_target,sub_comm) dec = mc.StructuredCoincidentDEC(source_group, target_group) # Create a MEDCouplingUMesh from a 3D cartesian mesh @@ -41,7 +48,7 @@ mesh.setName("RegularSquare") if source_group.containsMyRank(): field=mesh.fillFromAnalytic(mc.ON_CELLS,1,"(x-5.)*(x-5.)+(y-5.)*(y-5.)+(z-5.)*(z-5.)") field.setName("SourceField") - mc.WriteField("source_field.med", field, True) + mc.WriteField("source_field"+str(rank)+".med", field, True) print("Processor ", rank, " has created and saved the source field") else: field=mesh.fillFromAnalytic(mc.ON_CELLS,1,"0") @@ -59,10 +66,12 @@ elif target_group.containsMyRank(): print("Processor ", rank, " has received the source field on the target mesh") exact_field=mesh.fillFromAnalytic(mc.ON_CELLS,1,"(x-5.)*(x-5.)+(y-5.)*(y-5.)+(z-5.)*(z-5.)") exact_field.setName("ExactField") - error=(field-exact_field).normL2()[0] - print("Processor ", rank, " received source field differs from theoretical value by ", error, " (L2 norm on cells)" ) + error=(field-exact_field).normMax()[0]/exact_field.normMax()[0] + print("Processor ", rank, " received source field that differs from theoretical value by ", error, " (maximum relative norm on cell values)" ) assert abs(error)<1.e-6 - mc.WriteField("target_field.med", field, True) - mc.WriteField("exact_field.med", exact_field, True) + mc.WriteField("target_field"+str(rank)+".med", field, True) + mc.WriteField("exact_field"+str(rank)+".med", exact_field, True) else: print("Processor ", rank, " did nothing" ) + +sub_comm.Free() diff --git a/CoreFlows/examples/Python/MPI4PY/testTwoSimulations.py b/CoreFlows/examples/Python/MPI4PY/testTwoSimulations.py index a970217..52bbe9a 100644 --- a/CoreFlows/examples/Python/MPI4PY/testTwoSimulations.py +++ b/CoreFlows/examples/Python/MPI4PY/testTwoSimulations.py @@ -14,7 +14,6 @@ import solverlab from math import sin, pi def StationaryDiffusionEquation_2DEF_StructuredTriangles_par(split_direction, rank): - spaceDim = 2; # Prepare for the mesh print("Processor ", rank, " : Building mesh " ); xinf = 0 ; @@ -33,8 +32,15 @@ def StationaryDiffusionEquation_2DEF_StructuredTriangles_par(split_direction, ra print("Processor ", rank, " : Built a regular triangular 2D mesh from a square mesh with ", nx,"x" ,ny, " cells.") print("Processor ", rank, " : Each square was split in two in direction ",split_direction) + FEComputation=True - myProblem = solverlab.StationaryDiffusionEquation(spaceDim,FEComputation); + Lambda=1.#Thermal conductivity + spaceDim = 2 + + color = rank % 2 + sub_comm = comm.Split(color) + + myProblem = solverlab.StationaryDiffusionEquation(spaceDim,FEComputation, Lambda, sub_comm); myProblem.setMesh(M); # set the limit value for each boundary @@ -58,7 +64,6 @@ def StationaryDiffusionEquation_2DEF_StructuredTriangles_par(split_direction, ra my_RHSfield[i]=2*pi*pi*sin(pi*x)*sin(pi*y)#mettre la fonction definie au second membre de l'edp myProblem.setHeatPowerField(my_RHSfield) - myProblem.setLinearSolver(solverlab.GMRES,solverlab.ILU); # name of result file fileName = "StationnaryDiffusion_2DEF_StructuredTriangles"+str(rank); -- 2.39.2