Salome HOME
[bos #38048] [EDF] (2023-T3) PARAMEDMEM Ergonomy.
[tools/medcoupling.git] / src / ParaMEDMEM_Swig / test_InterpKernelDEC_easy.py
1 #!/usr/bin/env python
2 #  -*- coding: iso-8859-1 -*-
3 # Copyright (C) 2007-2023  CEA, EDF
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License, or (at your option) any later version.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21
22 from medcoupling import *
23 #from ParaMEDMEMTestTools import WriteInTmpDir
24 import sys, os
25 import unittest
26 import math
27 from mpi4py import MPI
28
29 def ranksByGroup(groupString, jobPerWorldRank):
30     ranks=[]
31     for key,value in jobPerWorldRank.items():
32         if (groupString == value ):
33             ranks.append(key)
34     return ranks
35
36
37 class ParaMEDMEM_IK_DEC_Tests(unittest.TestCase):
38     def test_InterpKernelDEC_easy_comm_creation(self):
39         """
40         [EDF26706] :
41         """
42         size = MPI.COMM_WORLD.size
43         rank = MPI.COMM_WORLD.rank
44         if size != 5:
45             print("Should be run on 5 procs!")
46             return
47         jobPerWorldRank = {0:"A",1:"B",2:"B",3:"C",4:"C"}
48         interface = CommInterface()
49         group = ByStringMPIProcessorGroup(interface, jobPerWorldRank[rank])
50         decBC = InterpKernelDEC(group,"B<->C")
51         decAC = InterpKernelDEC(group,"A<->C")
52         eval("Easy_comm_creation_{}".format(rank))(decBC,decAC)
53         #
54         MPI.COMM_WORLD.Barrier() 
55     
56     def test_InterpKernelDEC_easy_comm_creation_2(self):
57         """
58         [EDF26706] :
59         """
60         size = MPI.COMM_WORLD.size
61         rank = MPI.COMM_WORLD.rank
62         if size != 5:
63             print("Should be run on 5 procs!")
64             return
65         jobPerWorldRank = {0:"A",1:"B",2:"B",3:"C",4:"C"}
66         interface = CommInterface()
67         group = ByStringMPIProcessorGroup(interface, jobPerWorldRank[rank])
68         decBC = InterpKernelDEC(group,"B","C")
69         decAC = InterpKernelDEC(group,"A","C")
70         eval("Easy_comm_creation_{}".format(rank))(decBC,decAC)
71         #
72         MPI.COMM_WORLD.Barrier()    
73
74 def Easy_comm_creation_0(decBC,decAC):
75     """ Proc 0 of A"""
76     m = MEDCouplingCMesh() ; m.setCoords(DataArrayDouble([0,1]),DataArrayDouble([0,1])) ; m = m.buildUnstructured()
77     field = MEDCouplingFieldDouble(ON_CELLS)
78     field.setNature(IntensiveMaximum)
79     field.setMesh( m )
80     field.setArray( DataArrayDouble([1.2]))
81     decAC.attachLocalField( field )
82     decAC.synchronize()
83     decAC.sendData()
84     pass
85
86 def Easy_comm_creation_1(decBC,decAC):
87     """ Proc 0 of B"""
88     m = MEDCouplingCMesh() ; m.setCoords(DataArrayDouble([2,3]),DataArrayDouble([1,2])) ; m = m.buildUnstructured()
89     field = MEDCouplingFieldDouble(ON_CELLS)
90     field.setNature(IntensiveMaximum)
91     field.setMesh( m )
92     field.setArray( DataArrayDouble([2.3]))
93     decBC.attachLocalField( field )
94     decBC.synchronize()
95     decBC.sendData()
96     pass
97
98 def Easy_comm_creation_2(decBC,decAC):
99     """ Proc 1 of B"""
100     m = MEDCouplingCMesh() ; m.setCoords(DataArrayDouble([3,4]),DataArrayDouble([1,2])) ; m = m.buildUnstructured()
101     field = MEDCouplingFieldDouble(ON_CELLS)
102     field.setNature(IntensiveMaximum)
103     field.setMesh( m )
104     field.setArray( DataArrayDouble([3.3]))
105     decBC.attachLocalField( field )
106     decBC.synchronize()
107     decBC.sendData()
108     pass
109
110 def Easy_comm_creation_3(decBC,decAC):
111     """ Proc 0 of C"""
112     m = MEDCouplingCMesh() ; m.setCoords(DataArrayDouble([0.5,3.5]),DataArrayDouble([0,1.5])) ; m = m.buildUnstructured()
113     field = MEDCouplingFieldDouble(ON_CELLS)
114     field.setNature(IntensiveMaximum)
115     field.setMesh( m )
116     field.setArray( DataArrayDouble([0.]))
117     decBC.attachLocalField( field )
118     decAC.attachLocalField( field )
119     decBC.synchronize()
120     decAC.synchronize()
121     decBC.recvData()
122     print(field.getArray().getValues())
123     decAC.recvData()
124     print(field.getArray().getValues())
125     pass
126
127 def Easy_comm_creation_4(decBC,decAC):
128     """ Proc 1 of C"""
129     m = MEDCouplingCMesh() ; m.setCoords(DataArrayDouble([0.7,3.5]),DataArrayDouble([0,1.5])) ; m = m.buildUnstructured()
130     field = MEDCouplingFieldDouble(ON_CELLS)
131     field.setNature(IntensiveMaximum)
132     field.setMesh( m )
133     field.setArray( DataArrayDouble([0.]))
134     decBC.attachLocalField( field )
135     decAC.attachLocalField( field )
136     decBC.synchronize()
137     decAC.synchronize()
138     decBC.recvData()
139     print(field.getArray().getValues())
140     decAC.recvData()
141     print(field.getArray().getValues())
142     pass
143
144 if __name__ == "__main__":
145     unittest.main()
146     MPI.Finalize()
147