]> SALOME platform Git repositories - tools/medcoupling.git/blob - src/ParaMEDMEM_Swig/test_NonCoincidentDEC.py
Salome HOME
Merge branch 'V9_11_BR'
[tools/medcoupling.git] / src / ParaMEDMEM_Swig / test_NonCoincidentDEC.py
1 #!/usr/bin/env python
2 #  -*- coding: iso-8859-1 -*-
3 # Copyright (C) 2007-2023  CEA, EDF
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License, or (at your option) any later version.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21
22 from medcoupling import *
23 from mpi4py import MPI
24 import unittest
25 import os
26
27 class ParaMEDMEM_DEC_Tests(unittest.TestCase):
28     def test_NonCoincidentDEC_py(self):
29         size = MPI.COMM_WORLD.size
30         rank = MPI.COMM_WORLD.rank
31
32         if size != 5:
33             raise RuntimeError("Expect MPI.MPI_COMM_WORLD size == 5")
34
35         nproc_source = 3
36         procs_source = list(range(nproc_source))
37         procs_target = list(range(size - nproc_source + 1, size))
38
39         interface = CommInterface()
40
41         target_group = MPIProcessorGroup(interface, procs_target)
42         source_group = MPIProcessorGroup(interface, procs_source)
43
44         dec = NonCoincidentDEC(source_group, target_group)
45
46         data_dir = os.path.join(os.environ['MEDCOUPLING_ROOT_DIR'], "share", "resources", "med")
47         if not os.path.isdir(data_dir):
48             data_dir = os.environ.get('MED_RESOURCES_DIR',"::").split(":")[1]
49         tmp_dir  = os.environ.get('TMP', "")
50         if tmp_dir == '':
51             tmp_dir = "/tmp"
52
53         filename_xml1 = os.path.join(data_dir, "square1_split")
54         filename_xml2 = os.path.join(data_dir, "square2_split")
55
56         MPI.COMM_WORLD.Barrier()
57
58         if source_group.containsMyRank():
59             filename = filename_xml1 + str(rank+1) + ".med"
60             meshname = "Mesh_2_" + str(rank+1)
61
62             mesh = MESH(MED_DRIVER, filename, meshname)
63             support = SUPPORT(mesh, "all elements", MED_CELL)
64             paramesh = ParaMESH(mesh, source_group, "source mesh")
65
66             parasupport = UnstructuredParaSUPPORT( support, source_group)
67             comptopo = ComponentTopology()
68
69             parafield = ParaFIELD(parasupport, comptopo)
70
71             nb_local = support.getNumberOfElements(MED_ALL_ELEMENTS);
72
73             value = [1.0]*nb_local
74
75             parafield.getField().setValue(value)
76             icocofield = ICoCoMEDDoubleField(paramesh,parafield)
77             dec.attachLocalField(icocofield,'P0')
78             pass
79
80         if target_group.containsMyRank():
81             filename = filename_xml2 + str(rank - nproc_source + 1) + ".med"
82             meshname = "Mesh_3_" + str(rank - nproc_source + 1)
83
84             mesh = MESH(MED_DRIVER, filename, meshname)
85             support = SUPPORT(mesh, "all elements", MED_CELL)
86             paramesh = ParaMESH(mesh, target_group, "target mesh")
87
88             parasupport = UnstructuredParaSUPPORT( support, target_group)
89             comptopo = ComponentTopology()
90             parafield = ParaFIELD(parasupport, comptopo)
91
92             nb_local = support.getNumberOfElements(MED_ALL_ELEMENTS)
93             value = [0.0]*nb_local
94
95             parafield.getField().setValue(value)
96             icocofield = ICoCoMEDDoubleField(paramesh,parafield)
97
98             dec.attachLocalField(icocofield, 'P0')
99             pass
100
101         field_before_int = [0.0]
102         field_after_int = [0.0]
103
104         if source_group.containsMyRank():
105             field_before_int = [parafield.getVolumeIntegral(1)]
106             MPI.MPI_Bcast(field_before_int, 1, MPI.MPI_DOUBLE, 0, MPI.MPI_COMM_WORLD);
107             dec.synchronize()
108             print("DEC usage")
109             dec.setForcedRenormalization(False)
110             dec.sendData()
111             pass
112
113         if target_group.containsMyRank():
114             MPI.MPI_Bcast(field_before_int, 1, MPI.MPI_DOUBLE, 0, MPI.MPI_COMM_WORLD)
115             dec.synchronize()
116             dec.setForcedRenormalization(False)
117             dec.recvData()
118             field_after_int = [parafield.getVolumeIntegral(1)]
119             pass
120
121         MPI.MPI_Bcast(field_before_int, 1, MPI.MPI_DOUBLE, 0, MPI.MPI_COMM_WORLD)
122         MPI.MPI_Bcast(field_after_int , 1, MPI.MPI_DOUBLE, size-1, MPI.MPI_COMM_WORLD)
123
124         epsilon = 1e-6
125         self.assertDoubleEquals(field_before_int[0], field_after_int[0], epsilon)
126
127         # Some clean up that still needs MPI communication, so to be done before MPI_Finalize()
128         dec.release()
129         target_group.release()
130         source_group.release()
131
132         MPI.COMM_WORLD.Barrier()
133         MPI.Finalize()
134
135 if __name__ == "__main__":
136     unittest.main()
137