--- /dev/null
+// Copyright (C) 2007-2023 CEA, EDF
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ByStringMPIProcessorGroup.hxx"
+
+#include "mpi.h"
+
+namespace MEDCoupling
+{
+ /*!
+ \anchor ByStringMPIProcessorGroup-det
+ \class ByStringMPIProcessorGroup
+
+ TODO: add description
+ */
+
+
+ /*!
+ * TODO: add description
+ */
+ ByStringMPIProcessorGroup::ByStringMPIProcessorGroup(const CommInterface& interface, const std::string& simCodeTag, const MPI_Comm& world_comm):
+ MPIProcessorGroup(interface), _world_comm(world_comm)
+ {
+
+ }
+
+
+ ByStringMPIProcessorGroup::~ByStringMPIProcessorGroup()
+ {
+ release();
+ }
--- /dev/null
+// Copyright (C) 2007-2023 CEA, EDF
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __BYSTRINGMPIPROCESSORGROUP_HXX__
+#define __BYSTRINGMPIPROCESSORGROUP_HXX__
+
+#include "MPIProcessorGroup.hxx"
+
+namespace MEDCoupling
+{
+ class ByStringMPIProcessorGroup : public MPIProcessorGroup
+ {
+ public:
+ ByStringMPIProcessorGroup(const CommInterface& interface, const std::string& simCodeTag, const MPI_Comm& world_comm = MPI_COMM_WORLD);
+
+ ~ByStringMPIProcessorGroup();
+ };
+}
+
+#endif
InterpolationMatrix.cxx
LinearTimeInterpolator.cxx
MPIProcessorGroup.cxx
+ ByStringMPIProcessorGroup.cxx
MxN_Mapping.cxx
OverlapDEC.cxx
OverlapElementLocator.cxx
{
}
+ /*!
+ * Creates an InterpKernelDEC from a group created as ByStringMPIProcessorGroup class.
+ * TODO: not implemented yet
+ */
+ InterpKernelDEC::InterpKernelDEC(ProcessorGroup& group, const std::string& simCodeToBeCoupled):
+ DisjointDEC(),
+ _interpolation_matrix(0)
+ {
+ }
+
+ /*!
+ * Creates an InterpKernelDEC from a group created as ByStringMPIProcessorGroup class.
+ * TODO: not implemented yet
+ */
+ InterpKernelDEC::InterpKernelDEC(ProcessorGroup& group, const std::string& simCodeToBeCoupledSrc, const std::string& simCodeToBeCoupledTrg):
+ DisjointDEC(),
+ _interpolation_matrix(0)
+ {
+ }
+
InterpKernelDEC::~InterpKernelDEC()
{
release();
InterpKernelDEC();
InterpKernelDEC(ProcessorGroup& source_group, ProcessorGroup& target_group);
InterpKernelDEC(const std::set<int>& src_ids, const std::set<int>& trg_ids, const MPI_Comm& world_comm=MPI_COMM_WORLD);
+
+ // Constructors below use the groups created as ByStringMPIProcessorGroup class
+ InterpKernelDEC(ProcessorGroup& group, const std::string& simCodeToBeCoupled);
+ InterpKernelDEC(ProcessorGroup& group, const std::string& simCodeToBeCoupledSrc, const std::string& simCodeToBeCoupledTrg);
+
virtual ~InterpKernelDEC();
void release();
SET_TESTS_PROPERTIES(PyPara_InterpKernelDEC_Proc4 PROPERTIES ENVIRONMENT "${tests_env}")
ADD_TEST(NAME PyPara_InterpKernelDEC_Proc5
COMMAND ${MPIEXEC} -np 5 ${_oversub_opt} ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test_InterpKernelDEC.py)
+ ADD_TEST(NAME PyPara_InterpKernelDEC_easy_Proc5
+ COMMAND ${MPIEXEC} -np 5 ${_oversub_opt} ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test_InterpKernelDEC_easy.py)
SET_TESTS_PROPERTIES(PyPara_InterpKernelDEC_Proc5 PROPERTIES ENVIRONMENT "${tests_env}")
#ADD_TEST(NAME PyPara_NonCoincidentDEC_Proc5
SET(_tst_scripts
test_InterpKernelDEC.py
+ test_InterpKernelDEC_easy.py
test_NonCoincidentDEC.py
test_StructuredCoincidentDEC.py
test_OverlapDEC.py
INSTALL(FILES ${_tst_scripts} DESTINATION ${MEDCOUPLING_INSTALL_SCRIPT_PYTHON})
SALOME_INSTALL_SCRIPTS(${CMAKE_CURRENT_BINARY_DIR}/ParaMEDMEM.py ${MEDCOUPLING_INSTALL_PYTHON} EXTRA_DPYS "${SWIG_MODULE_ParaMEDMEM_REAL_NAME}")
-INSTALL(FILES test_InterpKernelDEC.py test_NonCoincidentDEC.py test_StructuredCoincidentDEC.py DESTINATION ${MEDCOUPLING_INSTALL_SCRIPT_PYTHON})
+INSTALL(FILES test_InterpKernelDEC.py test_InterpKernelDEC_easy.py test_NonCoincidentDEC.py test_StructuredCoincidentDEC.py DESTINATION ${MEDCOUPLING_INSTALL_SCRIPT_PYTHON})
set(TEST_INSTALL_DIRECTORY ${MEDCOUPLING_INSTALL_TESTS}/ParaMEDMEM_Swig)
-install(FILES test_InterpKernelDEC.py test_NonCoincidentDEC.py test_OverlapDEC.py test_StructuredCoincidentDEC.py ParaMEDMEMTestTools.py test_BasicOperation.py DESTINATION ${TEST_INSTALL_DIRECTORY})
+install(FILES test_InterpKernelDEC.py test_InterpKernelDEC_easy.py test_NonCoincidentDEC.py test_OverlapDEC.py test_StructuredCoincidentDEC.py ParaMEDMEMTestTools.py test_BasicOperation.py DESTINATION ${TEST_INSTALL_DIRECTORY})
# export MPIEXEC and _oversub_opt to CTestTestfile.cmake of salome test mechanism
configure_file(CTestTestfileInstall.cmake.in "CTestTestfileST.cmake" @ONLY)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/CTestTestfileST.cmake DESTINATION ${TEST_INSTALL_DIRECTORY} RENAME CTestTestfile.cmake)
add_test(${TEST_NAME} ${MPIEXEC} -np 5 ${_oversub_opt} -path "${PATH_FOR_PYTHON}" python3 test_InterpKernelDEC.py)
set_tests_properties(${TEST_NAME} PROPERTIES LABELS "${COMPONENT_NAME}" TIMEOUT ${TIMEOUT})
+set(tfile PyPara_test_InterpKernelDEC_easy_Proc5)
+set(TEST_NAME ${COMPONENT_NAME}_${TEST_NAMES}_${tfile})
+add_test(${TEST_NAME} ${MPIEXEC} -np 5 ${_oversub_opt} -path "${PATH_FOR_PYTHON}" python3 test_test_InterpKernelDEC_easy.py)
+set_tests_properties(${TEST_NAME} PROPERTIES LABELS "${COMPONENT_NAME}" TIMEOUT ${TIMEOUT})
+
set(tfile PyPara_StructuredCoincidentDEC_Proc4)
set(TEST_NAME ${COMPONENT_NAME}_${TEST_NAMES}_${tfile})
add_test(${TEST_NAME} ${MPIEXEC} -np 4 ${_oversub_opt} -path "${PATH_FOR_PYTHON}" python3 test_StructuredCoincidentDEC.py)
--- /dev/null
+#!/usr/bin/env python
+# -*- coding: iso-8859-1 -*-
+# Copyright (C) 2007-2023 CEA, EDF
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+from medcoupling import *
+from ParaMEDMEMTestTools import WriteInTmpDir
+import sys, os
+import unittest
+import math
+from mpi4py import MPI
+
+
+class ParaMEDMEM_IK_DEC_Tests(unittest.TestCase):
+ def test_InterpKernelDEC_easy_comm_creation(self):
+ """
+ [EDF26706] :
+ """
+ size = MPI.COMM_WORLD.size
+ rank = MPI.COMM_WORLD.rank
+ if size != 5:
+ print("Should be run on 5 procs!")
+ return
+ jobPerWorldRank = {0:"A",1:"B",2:"B",3:"C",4:"C"}
+ interface = CommInterface()
+ group = ByStringMPIProcessorGroup(interface, jobPerWorldRank[rank])
+ decBC = InterpKernelDEC(group,"B<->C")
+ decAC = InterpKernelDEC(group,"A<->C")
+ eval("Easy_comm_creation_{}".format(rank))(decBC,decAC)
+ #
+ MPI.COMM_WORLD.Barrier()
+
+ def test_InterpKernelDEC_easy_comm_creation_2(self):
+ """
+ [EDF26706] :
+ """
+ size = MPI.COMM_WORLD.size
+ rank = MPI.COMM_WORLD.rank
+ if size != 5:
+ print("Should be run on 5 procs!")
+ return
+ jobPerWorldRank = {0:"A",1:"B",2:"B",3:"C",4:"C"}
+ interface = CommInterface()
+ group = ByStringMPIProcessorGroup(interface, jobPerWorldRank[rank])
+ decBC = InterpKernelDEC(group,"B","C")
+ decAC = InterpKernelDEC(group,"A","C")
+ eval("Easy_comm_creation_{}".format(rank))(decBC,decAC)
+ #
+ MPI.COMM_WORLD.Barrier()
+
+def Easy_comm_creation_0(decBC,decAC):
+ """ Proc 0 of A"""
+ m = MEDCouplingCMesh() ; m.setCoords(DataArrayDouble([0,1]),DataArrayDouble([0,1])) ; m = m.buildUnstructured()
+ field = MEDCouplingFieldDouble(ON_CELLS)
+ field.setNature(IntensiveMaximum)
+ field.setMesh( m )
+ field.setArray( DataArrayDouble([1.2]))
+ decAC.attachLocalField( field )
+ decAC.synchronize()
+ decAC.sendData()
+ pass
+
+def Easy_comm_creation_1(decBC,decAC):
+ """ Proc 0 of B"""
+ m = MEDCouplingCMesh() ; m.setCoords(DataArrayDouble([2,3]),DataArrayDouble([1,2])) ; m = m.buildUnstructured()
+ field = MEDCouplingFieldDouble(ON_CELLS)
+ field.setNature(IntensiveMaximum)
+ field.setMesh( m )
+ field.setArray( DataArrayDouble([2.3]))
+ decBC.attachLocalField( field )
+ decBC.synchronize()
+ decBC.sendData()
+ pass
+
+def Easy_comm_creation_2(decBC,decAC):
+ """ Proc 1 of B"""
+ m = MEDCouplingCMesh() ; m.setCoords(DataArrayDouble([3,4]),DataArrayDouble([1,2])) ; m = m.buildUnstructured()
+ field = MEDCouplingFieldDouble(ON_CELLS)
+ field.setNature(IntensiveMaximum)
+ field.setMesh( m )
+ field.setArray( DataArrayDouble([3.3]))
+ decBC.attachLocalField( field )
+ decBC.synchronize()
+ decBC.sendData()
+ pass
+
+def Easy_comm_creation_3(decBC,decAC):
+ """ Proc 0 of C"""
+ m = MEDCouplingCMesh() ; m.setCoords(DataArrayDouble([0.5,3.5]),DataArrayDouble([0,1.5])) ; m = m.buildUnstructured()
+ field = MEDCouplingFieldDouble(ON_CELLS)
+ field.setNature(IntensiveMaximum)
+ field.setMesh( m )
+ field.setArray( DataArrayDouble([0.]))
+ decBC.attachLocalField( field )
+ decAC.attachLocalField( field )
+ decBC.synchronize()
+ decAC.synchronize()
+ decBC.recvData()
+ print(field.getArray().getValues())
+ decAC.recvData()
+ print(field.getArray().getValues())
+ pass
+
+def Easy_comm_creation_4(decBC,decAC):
+ """ Proc 1 of C"""
+ m = MEDCouplingCMesh() ; m.setCoords(DataArrayDouble([0.7,3.5]),DataArrayDouble([0,1.5])) ; m = m.buildUnstructured()
+ field = MEDCouplingFieldDouble(ON_CELLS)
+ field.setNature(IntensiveMaximum)
+ field.setMesh( m )
+ field.setArray( DataArrayDouble([0.]))
+ decBC.attachLocalField( field )
+ decAC.attachLocalField( field )
+ decBC.synchronize()
+ decAC.synchronize()
+ decBC.recvData()
+ print(field.getArray().getValues())
+ decAC.recvData()
+ print(field.getArray().getValues())
+ pass
+
+if __name__ == "__main__":
+ unittest.main()
+ MPI.Finalize()
+