]> SALOME platform Git repositories - tools/medcoupling.git/commitdiff
Salome HOME
add mpi part
authorCédric Aguerre <cedric.aguerre@edf.fr>
Tue, 27 Oct 2015 13:23:34 +0000 (14:23 +0100)
committerCédric Aguerre <cedric.aguerre@edf.fr>
Thu, 29 Oct 2015 13:22:36 +0000 (14:22 +0100)
190 files changed:
CMakeLists.txt
cmake_files/FindMEDFile.cmake
cmake_files/FindMetis.cmake
cmake_files/FindParMetis.cmake [new file with mode: 0644]
cmake_files/FindScotch.cmake
resources/BDC-714.sauv [new file with mode: 0644]
resources/Box1.med [new file with mode: 0644]
resources/Box1Moderate.med [new file with mode: 0644]
resources/Box2.med [new file with mode: 0644]
resources/Box2Moderate.med [new file with mode: 0644]
resources/Box3.med [new file with mode: 0644]
resources/BoxEvenSmaller1.med [new file with mode: 0644]
resources/BoxHexa1.med [new file with mode: 0644]
resources/BoxHexa2.med [new file with mode: 0644]
resources/BoxModSmall1.med [new file with mode: 0644]
resources/BoxModSmall2.med [new file with mode: 0644]
resources/BoxTetra2.med [new file with mode: 0644]
resources/CMakeLists.txt
resources/ComplexIncludedTetra.med [new file with mode: 0644]
resources/ComplexIncludingTetra.med [new file with mode: 0644]
resources/CornerTetra.med [new file with mode: 0644]
resources/DegenEdgeXY.med [new file with mode: 0644]
resources/DegenFaceXYZ.med [new file with mode: 0644]
resources/DegenTranslatedInPlane.med [new file with mode: 0644]
resources/DividedGenTetra1.med [new file with mode: 0644]
resources/DividedGenTetra2.med [new file with mode: 0644]
resources/DividedUnitTetra.med [new file with mode: 0644]
resources/DividedUnitTetraSimpler.med [new file with mode: 0644]
resources/GenTetra1.med [new file with mode: 0644]
resources/GenTetra2.med [new file with mode: 0644]
resources/GeneralTetra.med [new file with mode: 0644]
resources/HalfstripOnly.med [new file with mode: 0644]
resources/HalfstripOnly2.med [new file with mode: 0644]
resources/MovedHexaBox1.med [new file with mode: 0644]
resources/MovedHexaBox2.med [new file with mode: 0644]
resources/NudgedDividedUnitTetra.med [new file with mode: 0644]
resources/NudgedDividedUnitTetraSimpler.med [new file with mode: 0644]
resources/NudgedSimpler.med [new file with mode: 0644]
resources/NudgedTetra.med [new file with mode: 0644]
resources/SimpleHalfstripOnly.med [new file with mode: 0644]
resources/SimpleIncludedTetra.med [new file with mode: 0644]
resources/SimpleIncludingTetra.med [new file with mode: 0644]
resources/TinyBox.med [new file with mode: 0644]
resources/TrickyTetra1.med [new file with mode: 0644]
resources/UnitTetra.med [new file with mode: 0644]
resources/UnitTetraDegenT.med [new file with mode: 0644]
resources/allPillesTest.sauv [new file with mode: 0644]
resources/portico_3subs.sauv [new file with mode: 0644]
src/INTERP_KERNELTest/CMakeLists.txt
src/INTERP_KERNELTest/Interpolation3DTest.cxx
src/INTERP_KERNELTest/MeshTestToolkit.txx
src/INTERP_KERNELTest/PerfTest.cxx
src/INTERP_KERNELTest/TestInterpKernelUtils.cxx
src/MEDCoupling/Test/CMakeLists.txt
src/MEDCoupling_Swig/CMakeLists.txt
src/MEDLoader/Swig/CMakeLists.txt
src/MEDLoader/Swig/MEDLoaderCouplingTrainingSession.py
src/MEDLoader/Swig/SauvLoaderTest.py
src/MEDLoader/Test/CMakeLists.txt
src/MEDLoader/Test/SauvLoaderTest.cxx
src/MEDPartitioner/CMakeLists.txt
src/MEDPartitioner/MEDPARTITIONER_metis.c
src/MEDPartitioner/Test/CMakeLists.txt
src/MEDPartitioner/Test/MEDPARTITIONERTest.cxx
src/MEDPartitioner/Test/MEDPARTITIONERTest.hxx
src/MEDPartitioner/Test/MEDPARTITIONERTestPara.cxx
src/MEDPartitioner_Swig/CMakeLists.txt
src/MEDPartitioner_Swig/MEDPartitionerTest.py
src/ParaMEDLoader/CMakeLists.txt [new file with mode: 0644]
src/ParaMEDLoader/ParaMEDFileMesh.cxx [new file with mode: 0644]
src/ParaMEDLoader/ParaMEDFileMesh.hxx [new file with mode: 0644]
src/ParaMEDLoader/ParaMEDLoader.cxx [new file with mode: 0644]
src/ParaMEDLoader/ParaMEDLoader.hxx [new file with mode: 0644]
src/ParaMEDMEM/BASICS_JR [new file with mode: 0644]
src/ParaMEDMEM/BlockTopology.cxx [new file with mode: 0644]
src/ParaMEDMEM/BlockTopology.hxx [new file with mode: 0644]
src/ParaMEDMEM/CMakeLists.txt [new file with mode: 0644]
src/ParaMEDMEM/CommInterface.cxx [new file with mode: 0644]
src/ParaMEDMEM/CommInterface.hxx [new file with mode: 0644]
src/ParaMEDMEM/ComponentTopology.cxx [new file with mode: 0644]
src/ParaMEDMEM/ComponentTopology.hxx [new file with mode: 0644]
src/ParaMEDMEM/DEC.cxx [new file with mode: 0644]
src/ParaMEDMEM/DEC.hxx [new file with mode: 0644]
src/ParaMEDMEM/DECOptions.hxx [new file with mode: 0644]
src/ParaMEDMEM/DisjointDEC.cxx [new file with mode: 0644]
src/ParaMEDMEM/DisjointDEC.hxx [new file with mode: 0644]
src/ParaMEDMEM/ElementLocator.cxx [new file with mode: 0644]
src/ParaMEDMEM/ElementLocator.hxx [new file with mode: 0644]
src/ParaMEDMEM/ExplicitCoincidentDEC.cxx [new file with mode: 0644]
src/ParaMEDMEM/ExplicitCoincidentDEC.hxx [new file with mode: 0644]
src/ParaMEDMEM/ExplicitMapping.hxx [new file with mode: 0644]
src/ParaMEDMEM/ExplicitTopology.cxx [new file with mode: 0644]
src/ParaMEDMEM/ExplicitTopology.hxx [new file with mode: 0644]
src/ParaMEDMEM/ICoCoField.cxx [new file with mode: 0644]
src/ParaMEDMEM/ICoCoField.hxx [new file with mode: 0644]
src/ParaMEDMEM/ICoCoMEDField.cxx [new file with mode: 0644]
src/ParaMEDMEM/ICoCoMEDField.hxx [new file with mode: 0644]
src/ParaMEDMEM/InterpKernelDEC.cxx [new file with mode: 0644]
src/ParaMEDMEM/InterpKernelDEC.hxx [new file with mode: 0644]
src/ParaMEDMEM/InterpolationMatrix.cxx [new file with mode: 0644]
src/ParaMEDMEM/InterpolationMatrix.hxx [new file with mode: 0644]
src/ParaMEDMEM/LinearTimeInterpolator.cxx [new file with mode: 0644]
src/ParaMEDMEM/LinearTimeInterpolator.hxx [new file with mode: 0644]
src/ParaMEDMEM/MPIAccess.cxx [new file with mode: 0644]
src/ParaMEDMEM/MPIAccess.hxx [new file with mode: 0644]
src/ParaMEDMEM/MPIAccessDEC.cxx [new file with mode: 0644]
src/ParaMEDMEM/MPIAccessDEC.hxx [new file with mode: 0644]
src/ParaMEDMEM/MPIProcessorGroup.cxx [new file with mode: 0644]
src/ParaMEDMEM/MPIProcessorGroup.hxx [new file with mode: 0644]
src/ParaMEDMEM/MxN_Mapping.cxx [new file with mode: 0644]
src/ParaMEDMEM/MxN_Mapping.hxx [new file with mode: 0644]
src/ParaMEDMEM/NonCoincidentDEC.cxx [new file with mode: 0644]
src/ParaMEDMEM/NonCoincidentDEC.hxx [new file with mode: 0644]
src/ParaMEDMEM/OverlapDEC.cxx [new file with mode: 0644]
src/ParaMEDMEM/OverlapDEC.hxx [new file with mode: 0644]
src/ParaMEDMEM/OverlapElementLocator.cxx [new file with mode: 0644]
src/ParaMEDMEM/OverlapElementLocator.hxx [new file with mode: 0644]
src/ParaMEDMEM/OverlapInterpolationMatrix.cxx [new file with mode: 0644]
src/ParaMEDMEM/OverlapInterpolationMatrix.hxx [new file with mode: 0644]
src/ParaMEDMEM/OverlapMapping.cxx [new file with mode: 0644]
src/ParaMEDMEM/OverlapMapping.hxx [new file with mode: 0644]
src/ParaMEDMEM/ParaFIELD.cxx [new file with mode: 0644]
src/ParaMEDMEM/ParaFIELD.hxx [new file with mode: 0644]
src/ParaMEDMEM/ParaGRID.cxx [new file with mode: 0644]
src/ParaMEDMEM/ParaGRID.hxx [new file with mode: 0644]
src/ParaMEDMEM/ParaMESH.cxx [new file with mode: 0644]
src/ParaMEDMEM/ParaMESH.hxx [new file with mode: 0644]
src/ParaMEDMEM/ProcessorGroup.cxx [new file with mode: 0644]
src/ParaMEDMEM/ProcessorGroup.hxx [new file with mode: 0644]
src/ParaMEDMEM/README_JR [new file with mode: 0644]
src/ParaMEDMEM/StructuredCoincidentDEC.cxx [new file with mode: 0644]
src/ParaMEDMEM/StructuredCoincidentDEC.hxx [new file with mode: 0644]
src/ParaMEDMEM/TODO_JR [new file with mode: 0644]
src/ParaMEDMEM/TimeInterpolator.cxx [new file with mode: 0644]
src/ParaMEDMEM/TimeInterpolator.hxx [new file with mode: 0644]
src/ParaMEDMEM/Topology.cxx [new file with mode: 0644]
src/ParaMEDMEM/Topology.hxx [new file with mode: 0644]
src/ParaMEDMEMTest/MPI2Connector.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/MPI2Connector.hxx [new file with mode: 0644]
src/ParaMEDMEMTest/MPIAccessDECTest.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/MPIAccessDECTest.hxx [new file with mode: 0644]
src/ParaMEDMEMTest/MPIAccessTest.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/MPIAccessTest.hxx [new file with mode: 0644]
src/ParaMEDMEMTest/MPIMainTest.hxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest.hxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTestMPI2_1.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTestMPI2_2.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest_BlockTopology.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest_FabienAPI.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest_Gauthier1.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest_ICoco.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest_InterpKernelDEC.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest_MEDLoader.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest_MPIProcessorGroup.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest_NonCoincidentDEC.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest_OverlapDEC.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/ParaMEDMEMTest_StructuredCoincidentDEC.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/TestMPIAccess.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/TestMPIAccessDEC.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/TestParaMEDMEM.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_AllToAllDEC.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_AllToAllTimeDEC.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_AllToAllvDEC.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_AllToAllvTimeDEC.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_AllToAllvTimeDoubleDEC.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_Cancel.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_Cyclic_ISend_IRecv.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_Cyclic_Send_Recv.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_IProbe.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_ISendRecv.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv_BottleNeck.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv_Length.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv_Length_1.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_Probe.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_SendRecv.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_Send_Recv.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_Send_Recv_Length.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_Time.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_MPI_Access_Time_0.cxx [new file with mode: 0644]
src/ParaMEDMEMTest/test_perf.cxx [new file with mode: 0644]
src/ParaMEDMEM_Swig/CMakeLists.txt [new file with mode: 0644]
src/ParaMEDMEM_Swig/ParaMEDMEM.i [new file with mode: 0644]
src/ParaMEDMEM_Swig/ParaMEDMEM.typemap [new file with mode: 0644]
src/ParaMEDMEM_Swig/test_InterpKernelDEC.py [new file with mode: 0755]
src/ParaMEDMEM_Swig/test_NonCoincidentDEC.py [new file with mode: 0755]
src/ParaMEDMEM_Swig/test_StructuredCoincidentDEC.py [new file with mode: 0755]
src/RENUMBER/testRenumbering.py
src/RENUMBER_Swig/CMakeLists.txt

index 4bbaae984f209a5e3af1b869b03db0811c3d7756..ba75c06cb03a3ce070c37e2332962cd13928774f 100644 (file)
@@ -86,16 +86,10 @@ IF(NOT SALOME_MED_MICROMED)
     #FIND_PACKAGE(SalomeLibXml2)
     #SALOME_LOG_OPTIONAL_PACKAGE(LibXml2 SALOME_MED_ENABLE_PARTITIONER)
     IF(DEFINED ENV{LIBXML2_ROOT_DIR})
-      SET(CMAKE_PREFIX_PATH "$ENV{LIBXML2_ROOT_DIR}")
+      SET(LIBXML2_ROOT_DIR $ENV{LIBXML2_ROOT_DIR} CACHE PATH "Path to the LibXml2.")
+      LIST(APPEND CMAKE_PREFIX_PATH "${LIBXML2_ROOT_DIR}")
     ENDIF()
     FIND_PACKAGE(LibXml2)
-    MESSAGE("LIBXML2_FOUND: ${LIBXML2_FOUND}")
-    MESSAGE("LIBXML2_INCLUDE_DIR: ${LIBXML2_INCLUDE_DIR}")
-    MESSAGE("LIBXML2_LIBRARIES: ${LIBXML2_LIBRARIES}")
-    MESSAGE("LIBXML2_DEFINITIONS: ${LIBXML2_DEFINITIONS}")
-    MESSAGE("LIBXML2_XMLLINT_EXECUTABLE: ${LIBXML2_XMLLINT_EXECUTABLE}")
-    MESSAGE("LIBXML2_VERSION_STRING: ${LIBXML2_VERSION_STRING}")
-
     IF(SALOME_MED_PARTITIONER_METIS)
       #FIND_PACKAGE(SalomeMetis)
       #SALOME_LOG_OPTIONAL_PACKAGE(Metis SALOME_MED_PARTITIONER_METIS)
@@ -120,11 +114,17 @@ IF(SALOME_BUILD_TESTS)
 ENDIF(SALOME_BUILD_TESTS)
 
 IF(SALOME_USE_MPI)
-  FIND_PACKAGE(SalomeMPI REQUIRED)
+  #FIND_PACKAGE(SalomeMPI REQUIRED)
+  FIND_PACKAGE(MPI REQUIRED)
   ADD_DEFINITIONS("-DHAVE_MPI")
+  SET(MPI_INCLUDE_DIRS ${MPI_C_INCLUDE_PATH} ${MPI_CXX_INCLUDE_PATH})
+  SET(MPI_LIBRARIES ${MPI_C_LIBRARIES} ${MPI_CXX_LIBRARIES})
+  SET(MPI_DEFINITIONS "${MPI_CXX_COMPILE_FLAGS}")
+
   IF(SALOME_MED_PARTITIONER_PARMETIS)
-    FIND_PACKAGE(SalomeParMetis)
-    SALOME_LOG_OPTIONAL_PACKAGE(ParMetis SALOME_MED_PARTITIONER_PARMETIS)
+    #FIND_PACKAGE(SalomeParMetis)
+    FIND_PACKAGE(ParMetis)
+    #SALOME_LOG_OPTIONAL_PACKAGE(ParMetis SALOME_MED_PARTITIONER_PARMETIS)
     ADD_DEFINITIONS("-DMED_ENABLE_PARMETIS")
   ENDIF(SALOME_MED_PARTITIONER_PARMETIS)
 ENDIF(SALOME_USE_MPI)
@@ -133,7 +133,8 @@ IF(SALOME_MED_ENABLE_RENUMBER)
   #FIND_PACKAGE(SalomeBoost)
   #SALOME_LOG_OPTIONAL_PACKAGE(Boost SALOME_MED_ENABLE_RENUMBER)
   IF(DEFINED ENV{BOOST_ROOT_DIR})
-    SET(CMAKE_PREFIX_PATH "$ENV{BOOST_ROOT_DIR}")
+    SET(BOOST_ROOT_DIR $ENV{BOOST_ROOT_DIR} CACHE PATH "Path to the Boost.")
+    LIST(APPEND CMAKE_PREFIX_PATH "${BOOST_ROOT_DIR}")
   ENDIF()
   SET(Boost_USE_STATIC_LIBS        OFF)
   SET(Boost_USE_MULTITHREADED      ON)
@@ -152,7 +153,8 @@ IF(SALOME_BUILD_DOC)
   #SALOME_LOG_OPTIONAL_PACKAGE(Sphinx SALOME_BUILD_DOC)
   FIND_PACKAGE(Doxygen)
   IF(DEFINED ENV{GRAPHVIZ_ROOT_DIR})
-    SET(CMAKE_PREFIX_PATH "$ENV{GRAPHVIZ_ROOT_DIR}")
+    SET(GRAPHVIZ_ROOT_DIR $ENV{GRAPHVIZ_ROOT_DIR} CACHE PATH "Path to the Graphviz.")
+    LIST(APPEND CMAKE_PREFIX_PATH "${GRAPHVIZ_ROOT_DIR}")
   ENDIF()
   FIND_PACKAGE(Graphviz)
   FIND_PACKAGE(Sphinx)
@@ -245,6 +247,7 @@ IF(WIN32)
   ADD_DEFINITIONS("-D_USE_MATH_DEFINES")
 ENDIF(WIN32)
 
+#ADD_DEFINITIONS("-DMEDTOOL_ROOT_DIR=${CMAKE_INSTALL_PREFIX}")
 ADD_SUBDIRECTORY(src)
 #ADD_SUBDIRECTORY(adm_local)
 
index 82b8eee53d6207214a8cecd224e7e34a10821663..546969240cfa63f27cfb3d682d9fa4d6e1e4ca4d 100644 (file)
@@ -50,5 +50,10 @@ ELSE(MEDFILE_F_LIBRARIES)
     SET(MEDFILE_LIBRARIES ${MEDFILE_C_LIBRARIES})
 ENDIF(MEDFILE_F_LIBRARIES)
 
+IF(NOT MEDFILE_INCLUDE_DIRS
+    OR (NOT MEDFILE_C_LIBRARIES AND NOT MEDFILE_F_LIBRARIES))
+  MESSAGE(FATAL_ERROR "MEDFile not found; please set MEDFILE_ROOT_DIR and check target directory.")
+ENDIF()
+
 INCLUDE(FindPackageHandleStandardArgs)
 FIND_PACKAGE_HANDLE_STANDARD_ARGS(MEDFile REQUIRED_VARS MEDFILE_INCLUDE_DIRS MEDFILE_LIBRARIES)
index 5173cba6d5d3f76606033fa89799544faa63ce91..c4f6273620a99e38c8e9860be7626694c3816487 100644 (file)
@@ -32,6 +32,10 @@ ENDIF(METIS_ROOT_DIR)
 FIND_LIBRARY(METIS_LIBRARIES metis)
 FIND_PATH(METIS_INCLUDE_DIRS metis.h)
 
+IF(NOT METIS_LIBRARIES OR NOT METIS_INCLUDE_DIRS)
+  MESSAGE(FATAL_ERROR "Metis not found; please set METIS_ROOT_DIR and check target directory.")
+ENDIF()
+
 INCLUDE(FindPackageHandleStandardArgs)
 FIND_PACKAGE_HANDLE_STANDARD_ARGS(Metis REQUIRED_VARS METIS_INCLUDE_DIRS METIS_LIBRARIES)
 FILE(READ ${METIS_INCLUDE_DIRS}/metis.h metis_h_content)
diff --git a/cmake_files/FindParMetis.cmake b/cmake_files/FindParMetis.cmake
new file mode 100644 (file)
index 0000000..7eb0701
--- /dev/null
@@ -0,0 +1,39 @@
+# Copyright (C) 2007-2015  CEA/DEN, EDF R&D, OPEN CASCADE
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+# ------
+
+MESSAGE(STATUS "Check for parmetis ...")
+
+SET(PARMETIS_ROOT_DIR $ENV{PARMETIS_ROOT_DIR} CACHE PATH "Path to the PARMETIS.")
+IF(PARMETIS_ROOT_DIR)
+  LIST(APPEND CMAKE_PREFIX_PATH "${PARMETIS_ROOT_DIR}")
+ENDIF(PARMETIS_ROOT_DIR)
+
+FIND_LIBRARY(PARMETIS_LIBRARIES parmetis)
+FIND_LIBRARY(PARMETIS_SEQ_LIBRARIES metis)
+SET(PARMETIS_LIBRARIES ${PARMETIS_LIBRARIES} ${PARMETIS_SEQ_LIBRARIES})
+FIND_PATH(PARMETIS_INCLUDE_DIRS parmetis.h)
+
+IF(NOT PARMETIS_LIBRARIES OR NOT PARMETIS_INCLUDE_DIRS)
+  MESSAGE(FATAL_ERROR "Parallel Metis not found; please set PARMETIS_ROOT_DIR and check target directory.")
+ENDIF()
+
+INCLUDE(FindPackageHandleStandardArgs)
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(ParMetis REQUIRED_VARS PARMETIS_INCLUDE_DIRS PARMETIS_LIBRARIES)
index b63d30dcb12f05920c54521f9afe4e7c78527ed8..351c3e92e5586ea5455adbc2e787761e3a471fa7 100644 (file)
@@ -31,5 +31,9 @@ FIND_LIBRARY(SCOTCH_ERR_LIBRARIES scotcherr)
 SET(SCOTCH_LIBRARIES ${SCOTCH_LIBRARIES} ${SCOTCH_ERR_LIBRARIES})
 FIND_PATH(SCOTCH_INCLUDE_DIRS scotch.h PATH_SUFFIXES "/scotch")
 
+IF(NOT SCOTCH_LIBRARIES OR NOT SCOTCH_ERR_LIBRARIES OR NOT SCOTCH_INCLUDE_DIRS)
+  MESSAGE(FATAL_ERROR "Scotch not found; please set SCOTCH_ROOT_DIR and check target directory.")
+ENDIF()
+
 INCLUDE(FindPackageHandleStandardArgs)
 FIND_PACKAGE_HANDLE_STANDARD_ARGS(Scotch REQUIRED_VARS SCOTCH_INCLUDE_DIRS SCOTCH_LIBRARIES)
diff --git a/resources/BDC-714.sauv b/resources/BDC-714.sauv
new file mode 100644 (file)
index 0000000..fe80630
Binary files /dev/null and b/resources/BDC-714.sauv differ
diff --git a/resources/Box1.med b/resources/Box1.med
new file mode 100644 (file)
index 0000000..d168775
Binary files /dev/null and b/resources/Box1.med differ
diff --git a/resources/Box1Moderate.med b/resources/Box1Moderate.med
new file mode 100644 (file)
index 0000000..28d53d5
Binary files /dev/null and b/resources/Box1Moderate.med differ
diff --git a/resources/Box2.med b/resources/Box2.med
new file mode 100644 (file)
index 0000000..a7c9e04
Binary files /dev/null and b/resources/Box2.med differ
diff --git a/resources/Box2Moderate.med b/resources/Box2Moderate.med
new file mode 100644 (file)
index 0000000..c68570b
Binary files /dev/null and b/resources/Box2Moderate.med differ
diff --git a/resources/Box3.med b/resources/Box3.med
new file mode 100644 (file)
index 0000000..d9158ff
Binary files /dev/null and b/resources/Box3.med differ
diff --git a/resources/BoxEvenSmaller1.med b/resources/BoxEvenSmaller1.med
new file mode 100644 (file)
index 0000000..79a495a
Binary files /dev/null and b/resources/BoxEvenSmaller1.med differ
diff --git a/resources/BoxHexa1.med b/resources/BoxHexa1.med
new file mode 100644 (file)
index 0000000..da4ba0e
Binary files /dev/null and b/resources/BoxHexa1.med differ
diff --git a/resources/BoxHexa2.med b/resources/BoxHexa2.med
new file mode 100644 (file)
index 0000000..f76852e
Binary files /dev/null and b/resources/BoxHexa2.med differ
diff --git a/resources/BoxModSmall1.med b/resources/BoxModSmall1.med
new file mode 100644 (file)
index 0000000..7b0ebc7
Binary files /dev/null and b/resources/BoxModSmall1.med differ
diff --git a/resources/BoxModSmall2.med b/resources/BoxModSmall2.med
new file mode 100644 (file)
index 0000000..4d2e505
Binary files /dev/null and b/resources/BoxModSmall2.med differ
diff --git a/resources/BoxTetra2.med b/resources/BoxTetra2.med
new file mode 100644 (file)
index 0000000..3d21669
Binary files /dev/null and b/resources/BoxTetra2.med differ
index 6d185dc176c484e556e07de44f07b6ee78f00c6a..e891af65c585b2ba52de4fc7bb699a2af8d01c1a 100644 (file)
@@ -134,51 +134,51 @@ SET(MED_RESOURCES_FILES
   # TimeStamps.med
   # zzzz121b.med
   # zzzz121b_without_tr6.med
-  UnitTetra.med
-  GeneralTetra.med
-  NudgedSimpler.med
-  NudgedTetra.med
-  CornerTetra.med
-  SimpleIncludedTetra.med
-  SimpleIncludingTetra.med
+  UnitTetra.med
+  GeneralTetra.med
+  NudgedSimpler.med
+  NudgedTetra.med
+  CornerTetra.med
+  SimpleIncludedTetra.med
+  SimpleIncludingTetra.med
   Test2D.med
   Test2Dpoly.med
   Test3D.med
   Test3Dpoly.med
-  #UnitTetraDegenT.med
-  DegenEdgeXY.med
-  DegenFaceXYZ.med
-  DegenTranslatedInPlane.med
-  ComplexIncludedTetra.med
-  ComplexIncludingTetra.med
-  HalfstripOnly.med
-  HalfstripOnly2.med
-  #SimpleHalfstripOnly.med
-  #GenTetra1.med
-  #GenTetra2.med
-  #TrickyTetra1.med
+  UnitTetraDegenT.med
+  DegenEdgeXY.med
+  DegenFaceXYZ.med
+  DegenTranslatedInPlane.med
+  ComplexIncludedTetra.med
+  ComplexIncludingTetra.med
+  HalfstripOnly.med
+  HalfstripOnly2.med
+  SimpleHalfstripOnly.med
+  GenTetra1.med
+  GenTetra2.med
+  TrickyTetra1.med
   LargeUnitTetra.med
   # LargeInconsistentTetra.med
-  DividedUnitTetraSimpler.med
-  DividedUnitTetra.med
-  NudgedDividedUnitTetra.med
-  NudgedDividedUnitTetraSimpler.med
-  DividedGenTetra1.med
-  DividedGenTetra2.med
-  Box1.med
-  Box2.med
-  Box3.med
-  Box1Moderate.med
-  Box2Moderate.med
-  BoxModSmall1.med
-  BoxModSmall2.med
-  BoxEvenSmaller1.med
-  TinyBox.med
-  BoxHexa1.med
-  BoxHexa2.med
-  MovedHexaBox1.med
-  MovedHexaBox2.med
-  BoxTetra2.med
+  DividedUnitTetraSimpler.med
+  DividedUnitTetra.med
+  NudgedDividedUnitTetra.med
+  NudgedDividedUnitTetraSimpler.med
+  DividedGenTetra1.med
+  DividedGenTetra2.med
+  Box1.med
+  Box2.med
+  Box3.med
+  Box1Moderate.med
+  Box2Moderate.med
+  BoxModSmall1.med
+  BoxModSmall2.med
+  BoxEvenSmaller1.med
+  TinyBox.med
+  BoxHexa1.med
+  BoxHexa2.med
+  MovedHexaBox1.med
+  MovedHexaBox2.med
+  BoxTetra2.med
   square1.med
   # square1_split
   # square1_split1.med
@@ -203,9 +203,9 @@ SET(MED_RESOURCES_FILES
   # blow5_ascii_pd_displacement
   # blow5_ascii_pd_thickness
   #test_2D.sauve
-  #allPillesTest.sauv
-  #BDC-714.sauv
-  #portico_3subs.sauv
+  allPillesTest.sauv
+  BDC-714.sauv
+  portico_3subs.sauv
   agitateur.med
   )
 
@@ -225,6 +225,10 @@ SET(MED_RESOURCES_FILES
 #     )
 # ENDIF(MED_ENABLE_GUI)
 
+FOREACH(resfile ${MED_RESOURCES_FILES})
+  CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/${resfile}" "${CMAKE_CURRENT_BINARY_DIR}/${resfile}" COPYONLY)
+ENDFOREACH(resfile)
+
 INSTALL(FILES ${MED_RESOURCES_FILES} DESTINATION ${MEDTOOL_INSTALL_RES_DATA})
 
 #MESSAGE(STATUS "Creation of ${CMAKE_CURRENT_BINARY_DIR}/MEDCatalog.xml")
diff --git a/resources/ComplexIncludedTetra.med b/resources/ComplexIncludedTetra.med
new file mode 100644 (file)
index 0000000..09a5927
Binary files /dev/null and b/resources/ComplexIncludedTetra.med differ
diff --git a/resources/ComplexIncludingTetra.med b/resources/ComplexIncludingTetra.med
new file mode 100644 (file)
index 0000000..e6aedf5
Binary files /dev/null and b/resources/ComplexIncludingTetra.med differ
diff --git a/resources/CornerTetra.med b/resources/CornerTetra.med
new file mode 100644 (file)
index 0000000..f006c17
Binary files /dev/null and b/resources/CornerTetra.med differ
diff --git a/resources/DegenEdgeXY.med b/resources/DegenEdgeXY.med
new file mode 100644 (file)
index 0000000..bd42d14
Binary files /dev/null and b/resources/DegenEdgeXY.med differ
diff --git a/resources/DegenFaceXYZ.med b/resources/DegenFaceXYZ.med
new file mode 100644 (file)
index 0000000..f0ecdd6
Binary files /dev/null and b/resources/DegenFaceXYZ.med differ
diff --git a/resources/DegenTranslatedInPlane.med b/resources/DegenTranslatedInPlane.med
new file mode 100644 (file)
index 0000000..d83f96c
Binary files /dev/null and b/resources/DegenTranslatedInPlane.med differ
diff --git a/resources/DividedGenTetra1.med b/resources/DividedGenTetra1.med
new file mode 100644 (file)
index 0000000..71274ea
Binary files /dev/null and b/resources/DividedGenTetra1.med differ
diff --git a/resources/DividedGenTetra2.med b/resources/DividedGenTetra2.med
new file mode 100644 (file)
index 0000000..14f63cd
Binary files /dev/null and b/resources/DividedGenTetra2.med differ
diff --git a/resources/DividedUnitTetra.med b/resources/DividedUnitTetra.med
new file mode 100644 (file)
index 0000000..320bdfa
Binary files /dev/null and b/resources/DividedUnitTetra.med differ
diff --git a/resources/DividedUnitTetraSimpler.med b/resources/DividedUnitTetraSimpler.med
new file mode 100644 (file)
index 0000000..a826cfb
Binary files /dev/null and b/resources/DividedUnitTetraSimpler.med differ
diff --git a/resources/GenTetra1.med b/resources/GenTetra1.med
new file mode 100644 (file)
index 0000000..629b299
Binary files /dev/null and b/resources/GenTetra1.med differ
diff --git a/resources/GenTetra2.med b/resources/GenTetra2.med
new file mode 100644 (file)
index 0000000..32e3bb5
Binary files /dev/null and b/resources/GenTetra2.med differ
diff --git a/resources/GeneralTetra.med b/resources/GeneralTetra.med
new file mode 100644 (file)
index 0000000..11b2e27
Binary files /dev/null and b/resources/GeneralTetra.med differ
diff --git a/resources/HalfstripOnly.med b/resources/HalfstripOnly.med
new file mode 100644 (file)
index 0000000..11dee64
Binary files /dev/null and b/resources/HalfstripOnly.med differ
diff --git a/resources/HalfstripOnly2.med b/resources/HalfstripOnly2.med
new file mode 100644 (file)
index 0000000..67bca23
Binary files /dev/null and b/resources/HalfstripOnly2.med differ
diff --git a/resources/MovedHexaBox1.med b/resources/MovedHexaBox1.med
new file mode 100644 (file)
index 0000000..6d826ae
Binary files /dev/null and b/resources/MovedHexaBox1.med differ
diff --git a/resources/MovedHexaBox2.med b/resources/MovedHexaBox2.med
new file mode 100644 (file)
index 0000000..c0bdb6d
Binary files /dev/null and b/resources/MovedHexaBox2.med differ
diff --git a/resources/NudgedDividedUnitTetra.med b/resources/NudgedDividedUnitTetra.med
new file mode 100644 (file)
index 0000000..67aabde
Binary files /dev/null and b/resources/NudgedDividedUnitTetra.med differ
diff --git a/resources/NudgedDividedUnitTetraSimpler.med b/resources/NudgedDividedUnitTetraSimpler.med
new file mode 100644 (file)
index 0000000..2eb145e
Binary files /dev/null and b/resources/NudgedDividedUnitTetraSimpler.med differ
diff --git a/resources/NudgedSimpler.med b/resources/NudgedSimpler.med
new file mode 100644 (file)
index 0000000..a0fb4ea
Binary files /dev/null and b/resources/NudgedSimpler.med differ
diff --git a/resources/NudgedTetra.med b/resources/NudgedTetra.med
new file mode 100644 (file)
index 0000000..0672d1a
Binary files /dev/null and b/resources/NudgedTetra.med differ
diff --git a/resources/SimpleHalfstripOnly.med b/resources/SimpleHalfstripOnly.med
new file mode 100644 (file)
index 0000000..f15aedb
Binary files /dev/null and b/resources/SimpleHalfstripOnly.med differ
diff --git a/resources/SimpleIncludedTetra.med b/resources/SimpleIncludedTetra.med
new file mode 100644 (file)
index 0000000..9ac90ee
Binary files /dev/null and b/resources/SimpleIncludedTetra.med differ
diff --git a/resources/SimpleIncludingTetra.med b/resources/SimpleIncludingTetra.med
new file mode 100644 (file)
index 0000000..e0d6066
Binary files /dev/null and b/resources/SimpleIncludingTetra.med differ
diff --git a/resources/TinyBox.med b/resources/TinyBox.med
new file mode 100644 (file)
index 0000000..2ae80ef
Binary files /dev/null and b/resources/TinyBox.med differ
diff --git a/resources/TrickyTetra1.med b/resources/TrickyTetra1.med
new file mode 100644 (file)
index 0000000..50c0aa9
Binary files /dev/null and b/resources/TrickyTetra1.med differ
diff --git a/resources/UnitTetra.med b/resources/UnitTetra.med
new file mode 100644 (file)
index 0000000..d4548f2
Binary files /dev/null and b/resources/UnitTetra.med differ
diff --git a/resources/UnitTetraDegenT.med b/resources/UnitTetraDegenT.med
new file mode 100644 (file)
index 0000000..5a08006
Binary files /dev/null and b/resources/UnitTetraDegenT.med differ
diff --git a/resources/allPillesTest.sauv b/resources/allPillesTest.sauv
new file mode 100644 (file)
index 0000000..99efda1
--- /dev/null
@@ -0,0 +1,11246 @@
+ ENREGISTREMENT DE TYPE   4
+ NIVEAU  16 NIVEAU ERREUR   0 DIMENSION   2
+ DENSITE 0.00000E+00
+ ENREGISTREMENT DE TYPE   7
+ NOMBRE INFO CASTEM2000   8
+ IFOUR  -1 NIFOUR   0 IFOMOD  -1 IECHO   0 IIMPI   0 IOSPI   0 ISOTYP   1
+ NSDPGE     0
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO   1NBRE OBJETS NOMMES      17NBRE OBJETS      43
+ BOTTOM   C_3D     C_5F     C_6      C_65     C_6F     C_75     FAM_LEFT
+ FAM_STOT FAM_TOP  GE_1     GE_3     LEFT     RIGHT    TOP      C_5     
+ C_9     
+       1       2       3       4       5       6       7       8       9      10
+      11      12      13      14      15       4      16
+       2       0       0       2       1
+       0
+      11      12
+       8       0       0       4     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      12      13      14      11      14      13      15      16      16      15
+      17      18      18      17      19      20      20      19      21      22
+      22      21      23      24      24      23      25      26      26      25
+      27      28      28      27      29      30      30      29      31      32
+      32      31      33      34      34      33      35      36      36      35
+      37      38      38      37      39      40      40      39      41      42
+      42      41      43      44      44      43      45      46      46      45
+      47      48      48      47      49      50      50      49      51      52
+      52      51      53      54      54      53      55      56      56      55
+      57      58      58      57      59      60      60      59      61      62
+      62      61      63      64      64      63      65      66      66      65
+      67      68      68      67      69      70      70      69      71      72
+      72      71      73      74      74      73      75      76      76      75
+      77      78      78      77      79      80      79      81      82      80
+      81      83      84      82      83      85      86      84      85      87
+      88      86      87      89      90      88      89      91      92      90
+      91      93      94      92      93      95      96      94      95      97
+      98      96      97      99     100      98      99     101     102     100
+     101     103     104     102     103     105     106     104     105     107
+     108     106     107     109     110     108     109     111     112     110
+     111     113     114     112     113     115     116     114     115     117
+     118     116     117     119     120     118     119     121     122     120
+     121     123     124     122     123     125     126     124     125     127
+     128     126     127     129     130     128     129     131     132     130
+     131     133     134     132     133     135     136     134     135     137
+     138     136     137     139     140     138     139     141     142     140
+     141     143     144     142     143     145     146     144     145     147
+     148     146     147     149     150     148     149     151     152     150
+     151     153     154     152     153     155     156     154     155     157
+     158     156     157     159     160     158     159     161     162     160
+     161     163     164     162     163     165     166     164     165     167
+     168     166     167     169     170     168     169     171     172     170
+     171     173     174     172     173     175     176     174     175     177
+     178     176     177     179     180     178     179     181     182     180
+     181     183     184     182     183     185     186     184     185     187
+     188     186     187     189     190     188     189     191     192     190
+     191     193     194     192     193     195     196     194     195     197
+     198     196     197     199     200     198     199     201     202     200
+     201     203     204     202     203     205     206     204     205     207
+     208     206     207     209     210     208     209     211     212     210
+       2       0       0       2       1
+       0
+      11      12
+       8       0       0       4     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      12      13      14      11      14      13      15      16      16      15
+      17      18      18      17      19      20      20      19      21      22
+      22      21      23      24      24      23      25      26      26      25
+      27      28      28      27      29      30      30      29      31      32
+      32      31      33      34      34      33      35      36      36      35
+      37      38      38      37      39      40      40      39      41      42
+      42      41      43      44      44      43      45      46      46      45
+      47      48      48      47      49      50      50      49      51      52
+      52      51      53      54      54      53      55      56      56      55
+      57      58      58      57      59      60      60      59      61      62
+      62      61      63      64      64      63      65      66      66      65
+      67      68      68      67      69      70      70      69      71      72
+      72      71      73      74      74      73      75      76      76      75
+      77      78      78      77      79      80      79      81      82      80
+      81      83      84      82      83      85      86      84      85      87
+      88      86      87      89      90      88      89      91      92      90
+      91      93      94      92      93      95      96      94      95      97
+      98      96      97      99     100      98      99     101     102     100
+     101     103     104     102     103     105     106     104     105     107
+     108     106     107     109     110     108     109     111     112     110
+     111     113     114     112     113     115     116     114     115     117
+     118     116     117     119     120     118     119     121     122     120
+     121     123     124     122     123     125     126     124     125     127
+     128     126     127     129     130     128     129     131     132     130
+     131     133     134     132     133     135     136     134     135     137
+     138     136     137     139     140     138     139     141     142     140
+     141     143     144     142     143     145     146     144     145     147
+     148     146     147     149     150     148     149     151     152     150
+     151     153     154     152     153     155     156     154     155     157
+     158     156     157     159     160     158     159     161     162     160
+     161     163     164     162     163     165     166     164     165     167
+     168     166     167     169     170     168     169     171     172     170
+     171     173     174     172     173     175     176     174     175     177
+     178     176     177     179     180     178     179     181     182     180
+     181     183     184     182     183     185     186     184     185     187
+     188     186     187     189     190     188     189     191     192     190
+     191     193     194     192     193     195     196     194     195     197
+     198     196     197     199     200     198     199     201     202     200
+     201     203     204     202     203     205     206     204     205     207
+     208     206     207     209     210     208     209     211     212     210
+       2       0       0       2       1
+       0
+     211     212
+       2       0       0       2     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      12      13      13      15      15      17      17      19      19      21
+      21      23      23      25      25      27      27      29      29      31
+      31      33      33      35      35      37      37      39      39      41
+      41      43      43      45      45      47      47      49      49      51
+      51      53      53      55      55      57      57      59      59      61
+      61      63      63      65      65      67      67      69      69      71
+      71      73      73      75      75      77      77      79      79      81
+      81      83      83      85      85      87      87      89      89      91
+      91      93      93      95      95      97      97      99      99     101
+     101     103     103     105     105     107     107     109     109     111
+     111     113     113     115     115     117     117     119     119     121
+     121     123     123     125     125     127     127     129     129     131
+     131     133     133     135     135     137     137     139     139     141
+     141     143     143     145     145     147     147     149     149     151
+     151     153     153     155     155     157     157     159     159     161
+     161     163     163     165     165     167     167     169     169     171
+     171     173     173     175     175     177     177     179     179     181
+     181     183     183     185     185     187     187     189     189     191
+     191     193     193     195     195     197     197     199     199     201
+     201     203     203     205     205     207     207     209     209     211
+       2       0       0       2     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      14      11      16      14      18      16      20      18      22      20
+      24      22      26      24      28      26      30      28      32      30
+      34      32      36      34      38      36      40      38      42      40
+      44      42      46      44      48      46      50      48      52      50
+      54      52      56      54      58      56      60      58      62      60
+      64      62      66      64      68      66      70      68      72      70
+      74      72      76      74      78      76      80      78      82      80
+      84      82      86      84      88      86      90      88      92      90
+      94      92      96      94      98      96     100      98     102     100
+     104     102     106     104     108     106     110     108     112     110
+     114     112     116     114     118     116     120     118     122     120
+     124     122     126     124     128     126     130     128     132     130
+     134     132     136     134     138     136     140     138     142     140
+     144     142     146     144     148     146     150     148     152     150
+     154     152     156     154     158     156     160     158     162     160
+     164     162     166     164     168     166     170     168     172     170
+     174     172     176     174     178     176     180     178     182     180
+     184     182     186     184     188     186     190     188     192     190
+     194     192     196     194     198     196     200     198     202     200
+     204     202     206     204     208     206     210     208     212     210
+       2       0       0       2     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      12      13      13      15      15      17      17      19      19      21
+      21      23      23      25      25      27      27      29      29      31
+      31      33      33      35      35      37      37      39      39      41
+      41      43      43      45      45      47      47      49      49      51
+      51      53      53      55      55      57      57      59      59      61
+      61      63      63      65      65      67      67      69      69      71
+      71      73      73      75      75      77      77      79      79      81
+      81      83      83      85      85      87      87      89      89      91
+      91      93      93      95      95      97      97      99      99     101
+     101     103     103     105     105     107     107     109     109     111
+     111     113     113     115     115     117     117     119     119     121
+     121     123     123     125     125     127     127     129     129     131
+     131     133     133     135     135     137     137     139     139     141
+     141     143     143     145     145     147     147     149     149     151
+     151     153     153     155     155     157     157     159     159     161
+     161     163     163     165     165     167     167     169     169     171
+     171     173     173     175     175     177     177     179     179     181
+     181     183     183     185     185     187     187     189     189     191
+     191     193     193     195     195     197     197     199     199     201
+     201     203     203     205     205     207     207     209     209     211
+       8       0       0       4     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      12      13      14      11      14      13      15      16      16      15
+      17      18      18      17      19      20      20      19      21      22
+      22      21      23      24      24      23      25      26      26      25
+      27      28      28      27      29      30      30      29      31      32
+      32      31      33      34      34      33      35      36      36      35
+      37      38      38      37      39      40      40      39      41      42
+      42      41      43      44      44      43      45      46      46      45
+      47      48      48      47      49      50      50      49      51      52
+      52      51      53      54      54      53      55      56      56      55
+      57      58      58      57      59      60      60      59      61      62
+      62      61      63      64      64      63      65      66      66      65
+      67      68      68      67      69      70      70      69      71      72
+      72      71      73      74      74      73      75      76      76      75
+      77      78      78      77      79      80      79      81      82      80
+      81      83      84      82      83      85      86      84      85      87
+      88      86      87      89      90      88      89      91      92      90
+      91      93      94      92      93      95      96      94      95      97
+      98      96      97      99     100      98      99     101     102     100
+     101     103     104     102     103     105     106     104     105     107
+     108     106     107     109     110     108     109     111     112     110
+     111     113     114     112     113     115     116     114     115     117
+     118     116     117     119     120     118     119     121     122     120
+     121     123     124     122     123     125     126     124     125     127
+     128     126     127     129     130     128     129     131     132     130
+     131     133     134     132     133     135     136     134     135     137
+     138     136     137     139     140     138     139     141     142     140
+     141     143     144     142     143     145     146     144     145     147
+     148     146     147     149     150     148     149     151     152     150
+     151     153     154     152     153     155     156     154     155     157
+     158     156     157     159     160     158     159     161     162     160
+     161     163     164     162     163     165     166     164     165     167
+     168     166     167     169     170     168     169     171     172     170
+     171     173     174     172     173     175     176     174     175     177
+     178     176     177     179     180     178     179     181     182     180
+     181     183     184     182     183     185     186     184     185     187
+     188     186     187     189     190     188     189     191     192     190
+     191     193     194     192     193     195     196     194     195     197
+     198     196     197     199     200     198     199     201     202     200
+     201     203     204     202     203     205     206     204     205     207
+     208     206     207     209     210     208     209     211     212     210
+       2       0       0       2       1
+       0
+     211     212
+       2       0       0       2       1
+       0
+      11      12
+       2       0       0       2     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      14      11      16      14      18      16      20      18      22      20
+      24      22      26      24      28      26      30      28      32      30
+      34      32      36      34      38      36      40      38      42      40
+      44      42      46      44      48      46      50      48      52      50
+      54      52      56      54      58      56      60      58      62      60
+      64      62      66      64      68      66      70      68      72      70
+      74      72      76      74      78      76      80      78      82      80
+      84      82      86      84      88      86      90      88      92      90
+      94      92      96      94      98      96     100      98     102     100
+     104     102     106     104     108     106     110     108     112     110
+     114     112     116     114     118     116     120     118     122     120
+     124     122     126     124     128     126     130     128     132     130
+     134     132     136     134     138     136     140     138     142     140
+     144     142     146     144     148     146     150     148     152     150
+     154     152     156     154     158     156     160     158     162     160
+     164     162     166     164     168     166     170     168     172     170
+     174     172     176     174     178     176     180     178     182     180
+     184     182     186     184     188     186     190     188     192     190
+     194     192     196     194     198     196     200     198     202     200
+     204     202     206     204     208     206     210     208     212     210
+       2       0       0       2     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      12      13      13      15      15      17      17      19      19      21
+      21      23      23      25      25      27      27      29      29      31
+      31      33      33      35      35      37      37      39      39      41
+      41      43      43      45      45      47      47      49      49      51
+      51      53      53      55      55      57      57      59      59      61
+      61      63      63      65      65      67      67      69      69      71
+      71      73      73      75      75      77      77      79      79      81
+      81      83      83      85      85      87      87      89      89      91
+      91      93      93      95      95      97      97      99      99     101
+     101     103     103     105     105     107     107     109     109     111
+     111     113     113     115     115     117     117     119     119     121
+     121     123     123     125     125     127     127     129     129     131
+     131     133     133     135     135     137     137     139     139     141
+     141     143     143     145     145     147     147     149     149     151
+     151     153     153     155     155     157     157     159     159     161
+     161     163     163     165     165     167     167     169     169     171
+     171     173     173     175     175     177     177     179     179     181
+     181     183     183     185     185     187     187     189     189     191
+     191     193     193     195     195     197     197     199     199     201
+     201     203     203     205     205     207     207     209     209     211
+       2       0       0       2     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      14      11      16      14      18      16      20      18      22      20
+      24      22      26      24      28      26      30      28      32      30
+      34      32      36      34      38      36      40      38      42      40
+      44      42      46      44      48      46      50      48      52      50
+      54      52      56      54      58      56      60      58      62      60
+      64      62      66      64      68      66      70      68      72      70
+      74      72      76      74      78      76      80      78      82      80
+      84      82      86      84      88      86      90      88      92      90
+      94      92      96      94      98      96     100      98     102     100
+     104     102     106     104     108     106     110     108     112     110
+     114     112     116     114     118     116     120     118     122     120
+     124     122     126     124     128     126     130     128     132     130
+     134     132     136     134     138     136     140     138     142     140
+     144     142     146     144     148     146     150     148     152     150
+     154     152     156     154     158     156     160     158     162     160
+     164     162     166     164     168     166     170     168     172     170
+     174     172     176     174     178     176     180     178     182     180
+     184     182     186     184     188     186     190     188     192     190
+     194     192     196     194     198     196     200     198     202     200
+     204     202     206     204     208     206     210     208     212     210
+       2       0       0       2       1
+       0
+     211     212
+       1       0       0       1     202
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0
+      11      12     211     212      13      15      17      19      21      23
+      25      27      29      31      33      35      37      39      41      43
+      45      47      49      51      53      55      57      59      61      63
+      65      67      69      71      73      75      77      79      81      83
+      85      87      89      91      93      95      97      99     101     103
+     105     107     109     111     113     115     117     119     121     123
+     125     127     129     131     133     135     137     139     141     143
+     145     147     149     151     153     155     157     159     161     163
+     165     167     169     171     173     175     177     179     181     183
+     185     187     189     191     193     195     197     199     201     203
+     205     207     209     210     208     206     204     202     200     198
+     196     194     192     190     188     186     184     182     180     178
+     176     174     172     170     168     166     164     162     160     158
+     156     154     152     150     148     146     144     142     140     138
+     136     134     132     130     128     126     124     122     120     118
+     116     114     112     110     108     106     104     102     100      98
+      96      94      92      90      88      86      84      82      80      78
+      76      74      72      70      68      66      64      62      60      58
+      56      54      52      50      48      46      44      42      40      38
+      36      34      32      30      28      26      24      22      20      18
+      16      14
+      11       0       0       9     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      12     213      13     214      14     215      11     216     217      14
+     214      13     218      15     219      16     220     221      16     219
+      15     222      17     223      18     224     225      18     223      17
+     226      19     227      20     228     229      20     227      19     230
+      21     231      22     232     233      22     231      21     234      23
+     235      24     236     237      24     235      23     238      25     239
+      26     240     241      26     239      25     242      27     243      28
+     244     245      28     243      27     246      29     247      30     248
+     249      30     247      29     250      31     251      32     252     253
+      32     251      31     254      33     255      34     256     257      34
+     255      33     258      35     259      36     260     261      36     259
+      35     262      37     263      38     264     265      38     263      37
+     266      39     267      40     268     269      40     267      39     270
+      41     271      42     272     273      42     271      41     274      43
+     275      44     276     277      44     275      43     278      45     279
+      46     280     281      46     279      45     282      47     283      48
+     284     285      48     283      47     286      49     287      50     288
+     289      50     287      49     290      51     291      52     292     293
+      52     291      51     294      53     295      54     296     297      54
+     295      53     298      55     299      56     300     301      56     299
+      55     302      57     303      58     304     305      58     303      57
+     306      59     307      60     308     309      60     307      59     310
+      61     311      62     312     313      62     311      61     314      63
+     315      64     316     317      64     315      63     318      65     319
+      66     320     321      66     319      65     322      67     323      68
+     324     325      68     323      67     326      69     327      70     328
+     329      70     327      69     330      71     331      72     332     333
+      72     331      71     334      73     335      74     336     337      74
+     335      73     338      75     339      76     340     341      76     339
+      75     342      77     343      78     344     345      78     343      77
+     346      79     347      80     348     349      79     350      81     351
+      82     352      80     347     353      81     354      83     355      84
+     356      82     351     357      83     358      85     359      86     360
+      84     355     361      85     362      87     363      88     364      86
+     359     365      87     366      89     367      90     368      88     363
+     369      89     370      91     371      92     372      90     367     373
+      91     374      93     375      94     376      92     371     377      93
+     378      95     379      96     380      94     375     381      95     382
+      97     383      98     384      96     379     385      97     386      99
+     387     100     388      98     383     389      99     390     101     391
+     102     392     100     387     393     101     394     103     395     104
+     396     102     391     397     103     398     105     399     106     400
+     104     395     401     105     402     107     403     108     404     106
+     399     405     107     406     109     407     110     408     108     403
+     409     109     410     111     411     112     412     110     407     413
+     111     414     113     415     114     416     112     411     417     113
+     418     115     419     116     420     114     415     421     115     422
+     117     423     118     424     116     419     425     117     426     119
+     427     120     428     118     423     429     119     430     121     431
+     122     432     120     427     433     121     434     123     435     124
+     436     122     431     437     123     438     125     439     126     440
+     124     435     441     125     442     127     443     128     444     126
+     439     445     127     446     129     447     130     448     128     443
+     449     129     450     131     451     132     452     130     447     453
+     131     454     133     455     134     456     132     451     457     133
+     458     135     459     136     460     134     455     461     135     462
+     137     463     138     464     136     459     465     137     466     139
+     467     140     468     138     463     469     139     470     141     471
+     142     472     140     467     473     141     474     143     475     144
+     476     142     471     477     143     478     145     479     146     480
+     144     475     481     145     482     147     483     148     484     146
+     479     485     147     486     149     487     150     488     148     483
+     489     149     490     151     491     152     492     150     487     493
+     151     494     153     495     154     496     152     491     497     153
+     498     155     499     156     500     154     495     501     155     502
+     157     503     158     504     156     499     505     157     506     159
+     507     160     508     158     503     509     159     510     161     511
+     162     512     160     507     513     161     514     163     515     164
+     516     162     511     517     163     518     165     519     166     520
+     164     515     521     165     522     167     523     168     524     166
+     519     525     167     526     169     527     170     528     168     523
+     529     169     530     171     531     172     532     170     527     533
+     171     534     173     535     174     536     172     531     537     173
+     538     175     539     176     540     174     535     541     175     542
+     177     543     178     544     176     539     545     177     546     179
+     547     180     548     178     543     549     179     550     181     551
+     182     552     180     547     553     181     554     183     555     184
+     556     182     551     557     183     558     185     559     186     560
+     184     555     561     185     562     187     563     188     564     186
+     559     565     187     566     189     567     190     568     188     563
+     569     189     570     191     571     192     572     190     567     573
+     191     574     193     575     194     576     192     571     577     193
+     578     195     579     196     580     194     575     581     195     582
+     197     583     198     584     196     579     585     197     586     199
+     587     200     588     198     583     589     199     590     201     591
+     202     592     200     587     593     201     594     203     595     204
+     596     202     591     597     203     598     205     599     206     600
+     204     595     601     205     602     207     603     208     604     206
+     599     605     207     606     209     607     210     608     208     603
+     609     209     610     211     611     212     612     210     607     613
+      11       0       0       9     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      12     213      13     214      14     215      11     216     217      14
+     214      13     218      15     219      16     220     221      16     219
+      15     222      17     223      18     224     225      18     223      17
+     226      19     227      20     228     229      20     227      19     230
+      21     231      22     232     233      22     231      21     234      23
+     235      24     236     237      24     235      23     238      25     239
+      26     240     241      26     239      25     242      27     243      28
+     244     245      28     243      27     246      29     247      30     248
+     249      30     247      29     250      31     251      32     252     253
+      32     251      31     254      33     255      34     256     257      34
+     255      33     258      35     259      36     260     261      36     259
+      35     262      37     263      38     264     265      38     263      37
+     266      39     267      40     268     269      40     267      39     270
+      41     271      42     272     273      42     271      41     274      43
+     275      44     276     277      44     275      43     278      45     279
+      46     280     281      46     279      45     282      47     283      48
+     284     285      48     283      47     286      49     287      50     288
+     289      50     287      49     290      51     291      52     292     293
+      52     291      51     294      53     295      54     296     297      54
+     295      53     298      55     299      56     300     301      56     299
+      55     302      57     303      58     304     305      58     303      57
+     306      59     307      60     308     309      60     307      59     310
+      61     311      62     312     313      62     311      61     314      63
+     315      64     316     317      64     315      63     318      65     319
+      66     320     321      66     319      65     322      67     323      68
+     324     325      68     323      67     326      69     327      70     328
+     329      70     327      69     330      71     331      72     332     333
+      72     331      71     334      73     335      74     336     337      74
+     335      73     338      75     339      76     340     341      76     339
+      75     342      77     343      78     344     345      78     343      77
+     346      79     347      80     348     349      79     350      81     351
+      82     352      80     347     353      81     354      83     355      84
+     356      82     351     357      83     358      85     359      86     360
+      84     355     361      85     362      87     363      88     364      86
+     359     365      87     366      89     367      90     368      88     363
+     369      89     370      91     371      92     372      90     367     373
+      91     374      93     375      94     376      92     371     377      93
+     378      95     379      96     380      94     375     381      95     382
+      97     383      98     384      96     379     385      97     386      99
+     387     100     388      98     383     389      99     390     101     391
+     102     392     100     387     393     101     394     103     395     104
+     396     102     391     397     103     398     105     399     106     400
+     104     395     401     105     402     107     403     108     404     106
+     399     405     107     406     109     407     110     408     108     403
+     409     109     410     111     411     112     412     110     407     413
+     111     414     113     415     114     416     112     411     417     113
+     418     115     419     116     420     114     415     421     115     422
+     117     423     118     424     116     419     425     117     426     119
+     427     120     428     118     423     429     119     430     121     431
+     122     432     120     427     433     121     434     123     435     124
+     436     122     431     437     123     438     125     439     126     440
+     124     435     441     125     442     127     443     128     444     126
+     439     445     127     446     129     447     130     448     128     443
+     449     129     450     131     451     132     452     130     447     453
+     131     454     133     455     134     456     132     451     457     133
+     458     135     459     136     460     134     455     461     135     462
+     137     463     138     464     136     459     465     137     466     139
+     467     140     468     138     463     469     139     470     141     471
+     142     472     140     467     473     141     474     143     475     144
+     476     142     471     477     143     478     145     479     146     480
+     144     475     481     145     482     147     483     148     484     146
+     479     485     147     486     149     487     150     488     148     483
+     489     149     490     151     491     152     492     150     487     493
+     151     494     153     495     154     496     152     491     497     153
+     498     155     499     156     500     154     495     501     155     502
+     157     503     158     504     156     499     505     157     506     159
+     507     160     508     158     503     509     159     510     161     511
+     162     512     160     507     513     161     514     163     515     164
+     516     162     511     517     163     518     165     519     166     520
+     164     515     521     165     522     167     523     168     524     166
+     519     525     167     526     169     527     170     528     168     523
+     529     169     530     171     531     172     532     170     527     533
+     171     534     173     535     174     536     172     531     537     173
+     538     175     539     176     540     174     535     541     175     542
+     177     543     178     544     176     539     545     177     546     179
+     547     180     548     178     543     549     179     550     181     551
+     182     552     180     547     553     181     554     183     555     184
+     556     182     551     557     183     558     185     559     186     560
+     184     555     561     185     562     187     563     188     564     186
+     559     565     187     566     189     567     190     568     188     563
+     569     189     570     191     571     192     572     190     567     573
+     191     574     193     575     194     576     192     571     577     193
+     578     195     579     196     580     194     575     581     195     582
+     197     583     198     584     196     579     585     197     586     199
+     587     200     588     198     583     589     199     590     201     591
+     202     592     200     587     593     201     594     203     595     204
+     596     202     591     597     203     598     205     599     206     600
+     204     595     601     205     602     207     603     208     604     206
+     599     605     207     606     209     607     210     608     208     603
+     609     209     610     211     611     212     612     210     607     613
+       8       0       0       4     100
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+     213     214     215     216     214     218     219     220     219     222
+     223     224     223     226     227     228     227     230     231     232
+     231     234     235     236     235     238     239     240     239     242
+     243     244     243     246     247     248     247     250     251     252
+     251     254     255     256     255     258     259     260     259     262
+     263     264     263     266     267     268     267     270     271     272
+     271     274     275     276     275     278     279     280     279     282
+     283     284     283     286     287     288     287     290     291     292
+     291     294     295     296     295     298     299     300     299     302
+     303     304     303     306     307     308     307     310     311     312
+     311     314     315     316     315     318     319     320     319     322
+     323     324     323     326     327     328     327     330     331     332
+     331     334     335     336     335     338     339     340     339     342
+     343     344     343     346     347     348     350     351     352     347
+     354     355     356     351     358     359     360     355     362     363
+     364     359     366     367     368     363     370     371     372     367
+     374     375     376     371     378     379     380     375     382     383
+     384     379     386     387     388     383     390     391     392     387
+     394     395     396     391     398     399     400     395     402     403
+     404     399     406     407     408     403     410     411     412     407
+     414     415     416     411     418     419     420     415     422     423
+     424     419     426     427     428     423     430     431     432     427
+     434     435     436     431     438     439     440     435     442     443
+     444     439     446     447     448     443     450     451     452     447
+     454     455     456     451     458     459     460     455     462     463
+     464     459     466     467     468     463     470     471     472     467
+     474     475     476     471     478     479     480     475     482     483
+     484     479     486     487     488     483     490     491     492     487
+     494     495     496     491     498     499     500     495     502     503
+     504     499     506     507     508     503     510     511     512     507
+     514     515     516     511     518     519     520     515     522     523
+     524     519     526     527     528     523     530     531     532     527
+     534     535     536     531     538     539     540     535     542     543
+     544     539     546     547     548     543     550     551     552     547
+     554     555     556     551     558     559     560     555     562     563
+     564     559     566     567     568     563     570     571     572     567
+     574     575     576     571     578     579     580     575     582     583
+     584     579     586     587     588     583     590     591     592     587
+     594     595     596     591     598     599     600     595     602     603
+     604     599     606     607     608     603     610     611     612     607
+       8       0       0       4     100
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+       1       1       1       1       1       1       1       1       1       1
+     213     214     215     216     214     218     219     220     219     222
+     223     224     223     226     227     228     227     230     231     232
+     231     234     235     236     235     238     239     240     239     242
+     243     244     243     246     247     248     247     250     251     252
+     251     254     255     256     255     258     259     260     259     262
+     263     264     263     266     267     268     267     270     271     272
+     271     274     275     276     275     278     279     280     279     282
+     283     284     283     286     287     288     287     290     291     292
+     291     294     295     296     295     298     299     300     299     302
+     303     304     303     306     307     308     307     310     311     312
+     311     314     315     316     315     318     319     320     319     322
+     323     324     323     326     327     328     327     330     331     332
+     331     334     335     336     335     338     339     340     339     342
+     343     344     343     346     347     348     350     351     352     347
+     354     355     356     351     358     359     360     355     362     363
+     364     359     366     367     368     363     370     371     372     367
+     374     375     376     371     378     379     380     375     382     383
+     384     379     386     387     388     383     390     391     392     387
+     394     395     396     391     398     399     400     395     402     403
+     404     399     406     407     408     403     410     411     412     407
+     414     415     416     411     418     419     420     415     422     423
+     424     419     426     427     428     423     430     431     432     427
+     434     435     436     431     438     439     440     435     442     443
+     444     439     446     447     448     443     450     451     452     447
+     454     455     456     451     458     459     460     455     462     463
+     464     459     466     467     468     463     470     471     472     467
+     474     475     476     471     478     479     480     475     482     483
+     484     479     486     487     488     483     490     491     492     487
+     494     495     496     491     498     499     500     495     502     503
+     504     499     506     507     508     503     510     511     512     507
+     514     515     516     511     518     519     520     515     522     523
+     524     519     526     527     528     523     530     531     532     527
+     534     535     536     531     538     539     540     535     542     543
+     544     539     546     547     548     543     550     551     552     547
+     554     555     556     551     558     559     560     555     562     563
+     564     559     566     567     568     563     570     571     572     567
+     574     575     576     571     578     579     580     575     582     583
+     584     579     586     587     588     583     590     591     592     587
+     594     595     596     591     598     599     600     595     602     603
+     604     599     606     607     608     603     610     611     612     607
+       8       0       0       4     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      12      13      14      11      14      13      15      16      16      15
+      17      18      18      17      19      20      20      19      21      22
+      22      21      23      24      24      23      25      26      26      25
+      27      28      28      27      29      30      30      29      31      32
+      32      31      33      34      34      33      35      36      36      35
+      37      38      38      37      39      40      40      39      41      42
+      42      41      43      44      44      43      45      46      46      45
+      47      48      48      47      49      50      50      49      51      52
+      52      51      53      54      54      53      55      56      56      55
+      57      58      58      57      59      60      60      59      61      62
+      62      61      63      64      64      63      65      66      66      65
+      67      68      68      67      69      70      70      69      71      72
+      72      71      73      74      74      73      75      76      76      75
+      77      78      78      77      79      80      79      81      82      80
+      81      83      84      82      83      85      86      84      85      87
+      88      86      87      89      90      88      89      91      92      90
+      91      93      94      92      93      95      96      94      95      97
+      98      96      97      99     100      98      99     101     102     100
+     101     103     104     102     103     105     106     104     105     107
+     108     106     107     109     110     108     109     111     112     110
+     111     113     114     112     113     115     116     114     115     117
+     118     116     117     119     120     118     119     121     122     120
+     121     123     124     122     123     125     126     124     125     127
+     128     126     127     129     130     128     129     131     132     130
+     131     133     134     132     133     135     136     134     135     137
+     138     136     137     139     140     138     139     141     142     140
+     141     143     144     142     143     145     146     144     145     147
+     148     146     147     149     150     148     149     151     152     150
+     151     153     154     152     153     155     156     154     155     157
+     158     156     157     159     160     158     159     161     162     160
+     161     163     164     162     163     165     166     164     165     167
+     168     166     167     169     170     168     169     171     172     170
+     171     173     174     172     173     175     176     174     175     177
+     178     176     177     179     180     178     179     181     182     180
+     181     183     184     182     183     185     186     184     185     187
+     188     186     187     189     190     188     189     191     192     190
+     191     193     194     192     193     195     196     194     195     197
+     198     196     197     199     200     198     199     201     202     200
+     201     203     204     202     203     205     206     204     205     207
+     208     206     207     209     210     208     209     211     212     210
+       2       0       0       2     301
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0
+      12      13      13      14      14      11      11      12      13      15
+      15      16      16      14      15      17      17      18      18      16
+      17      19      19      20      20      18      19      21      21      22
+      22      20      21      23      23      24      24      22      23      25
+      25      26      26      24      25      27      27      28      28      26
+      27      29      29      30      30      28      29      31      31      32
+      32      30      31      33      33      34      34      32      33      35
+      35      36      36      34      35      37      37      38      38      36
+      37      39      39      40      40      38      39      41      41      42
+      42      40      41      43      43      44      44      42      43      45
+      45      46      46      44      45      47      47      48      48      46
+      47      49      49      50      50      48      49      51      51      52
+      52      50      51      53      53      54      54      52      53      55
+      55      56      56      54      55      57      57      58      58      56
+      57      59      59      60      60      58      59      61      61      62
+      62      60      61      63      63      64      64      62      63      65
+      65      66      66      64      65      67      67      68      68      66
+      67      69      69      70      70      68      69      71      71      72
+      72      70      71      73      73      74      74      72      73      75
+      75      76      76      74      75      77      77      78      78      76
+      77      79      79      80      80      78      79      81      81      82
+      82      80      81      83      83      84      84      82      83      85
+      85      86      86      84      85      87      87      88      88      86
+      87      89      89      90      90      88      89      91      91      92
+      92      90      91      93      93      94      94      92      93      95
+      95      96      96      94      95      97      97      98      98      96
+      97      99      99     100     100      98      99     101     101     102
+     102     100     101     103     103     104     104     102     103     105
+     105     106     106     104     105     107     107     108     108     106
+     107     109     109     110     110     108     109     111     111     112
+     112     110     111     113     113     114     114     112     113     115
+     115     116     116     114     115     117     117     118     118     116
+     117     119     119     120     120     118     119     121     121     122
+     122     120     121     123     123     124     124     122     123     125
+     125     126     126     124     125     127     127     128     128     126
+     127     129     129     130     130     128     129     131     131     132
+     132     130     131     133     133     134     134     132     133     135
+     135     136     136     134     135     137     137     138     138     136
+     137     139     139     140     140     138     139     141     141     142
+     142     140     141     143     143     144     144     142     143     145
+     145     146     146     144     145     147     147     148     148     146
+     147     149     149     150     150     148     149     151     151     152
+     152     150     151     153     153     154     154     152     153     155
+     155     156     156     154     155     157     157     158     158     156
+     157     159     159     160     160     158     159     161     161     162
+     162     160     161     163     163     164     164     162     163     165
+     165     166     166     164     165     167     167     168     168     166
+     167     169     169     170     170     168     169     171     171     172
+     172     170     171     173     173     174     174     172     173     175
+     175     176     176     174     175     177     177     178     178     176
+     177     179     179     180     180     178     179     181     181     182
+     182     180     181     183     183     184     184     182     183     185
+     185     186     186     184     185     187     187     188     188     186
+     187     189     189     190     190     188     189     191     191     192
+     192     190     191     193     193     194     194     192     193     195
+     195     196     196     194     195     197     197     198     198     196
+     197     199     199     200     200     198     199     201     201     202
+     202     200     201     203     203     204     204     202     203     205
+     205     206     206     204     205     207     207     208     208     206
+     207     209     209     210     210     208     209     211     211     212
+     212     210
+       1       0       0       1     100
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+     217     221     225     229     233     237     241     245     249     253
+     257     261     265     269     273     277     281     285     289     293
+     297     301     305     309     313     317     321     325     329     333
+     337     341     345     349     353     357     361     365     369     373
+     377     381     385     389     393     397     401     405     409     413
+     417     421     425     429     433     437     441     445     449     453
+     457     461     465     469     473     477     481     485     489     493
+     497     501     505     509     513     517     521     525     529     533
+     537     541     545     549     553     557     561     565     569     573
+     577     581     585     589     593     597     601     605     609     613
+       1       0       0       1     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+     217     221     225     229     233     237     241     245     249     253
+     257     261     265     269     273     277     281     285     289     293
+     297     301     305     309     313     317     321     325     329     333
+     337     341     345     349     353     357     361     365     369     373
+     377     381     385     389     393     397     401     405     409     413
+     417     421     425     429     433     437     441     445     449     453
+     457     461     465     469     473     477     481     485     489     493
+     497     501     505     509     513     517     521     525     529     533
+     537     541     545     549     553     557     561     565     569     573
+     577     581     585     589     593     597     601     605     609     613
+       1       0       0       1     100
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+       6       6       6       6       6       6       6       6       6       6
+     217     221     225     229     233     237     241     245     249     253
+     257     261     265     269     273     277     281     285     289     293
+     297     301     305     309     313     317     321     325     329     333
+     337     341     345     349     353     357     361     365     369     373
+     377     381     385     389     393     397     401     405     409     413
+     417     421     425     429     433     437     441     445     449     453
+     457     461     465     469     473     477     481     485     489     493
+     497     501     505     509     513     517     521     525     529     533
+     537     541     545     549     553     557     561     565     569     573
+     577     581     585     589     593     597     601     605     609     613
+       8       0       0       4     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      12      13      14      11      14      13      15      16      16      15
+      17      18      18      17      19      20      20      19      21      22
+      22      21      23      24      24      23      25      26      26      25
+      27      28      28      27      29      30      30      29      31      32
+      32      31      33      34      34      33      35      36      36      35
+      37      38      38      37      39      40      40      39      41      42
+      42      41      43      44      44      43      45      46      46      45
+      47      48      48      47      49      50      50      49      51      52
+      52      51      53      54      54      53      55      56      56      55
+      57      58      58      57      59      60      60      59      61      62
+      62      61      63      64      64      63      65      66      66      65
+      67      68      68      67      69      70      70      69      71      72
+      72      71      73      74      74      73      75      76      76      75
+      77      78      78      77      79      80      79      81      82      80
+      81      83      84      82      83      85      86      84      85      87
+      88      86      87      89      90      88      89      91      92      90
+      91      93      94      92      93      95      96      94      95      97
+      98      96      97      99     100      98      99     101     102     100
+     101     103     104     102     103     105     106     104     105     107
+     108     106     107     109     110     108     109     111     112     110
+     111     113     114     112     113     115     116     114     115     117
+     118     116     117     119     120     118     119     121     122     120
+     121     123     124     122     123     125     126     124     125     127
+     128     126     127     129     130     128     129     131     132     130
+     131     133     134     132     133     135     136     134     135     137
+     138     136     137     139     140     138     139     141     142     140
+     141     143     144     142     143     145     146     144     145     147
+     148     146     147     149     150     148     149     151     152     150
+     151     153     154     152     153     155     156     154     155     157
+     158     156     157     159     160     158     159     161     162     160
+     161     163     164     162     163     165     166     164     165     167
+     168     166     167     169     170     168     169     171     172     170
+     171     173     174     172     173     175     176     174     175     177
+     178     176     177     179     180     178     179     181     182     180
+     181     183     184     182     183     185     186     184     185     187
+     188     186     187     189     190     188     189     191     192     190
+     191     193     194     192     193     195     196     194     195     197
+     198     196     197     199     200     198     199     201     202     200
+     201     203     204     202     203     205     206     204     205     207
+     208     206     207     209     210     208     209     211     212     210
+       1       0       0       1     301
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4
+     213     214     215     216     218     219     220     222     223     224
+     226     227     228     230     231     232     234     235     236     238
+     239     240     242     243     244     246     247     248     250     251
+     252     254     255     256     258     259     260     262     263     264
+     266     267     268     270     271     272     274     275     276     278
+     279     280     282     283     284     286     287     288     290     291
+     292     294     295     296     298     299     300     302     303     304
+     306     307     308     310     311     312     314     315     316     318
+     319     320     322     323     324     326     327     328     330     331
+     332     334     335     336     338     339     340     342     343     344
+     346     347     348     350     351     352     354     355     356     358
+     359     360     362     363     364     366     367     368     370     371
+     372     374     375     376     378     379     380     382     383     384
+     386     387     388     390     391     392     394     395     396     398
+     399     400     402     403     404     406     407     408     410     411
+     412     414     415     416     418     419     420     422     423     424
+     426     427     428     430     431     432     434     435     436     438
+     439     440     442     443     444     446     447     448     450     451
+     452     454     455     456     458     459     460     462     463     464
+     466     467     468     470     471     472     474     475     476     478
+     479     480     482     483     484     486     487     488     490     491
+     492     494     495     496     498     499     500     502     503     504
+     506     507     508     510     511     512     514     515     516     518
+     519     520     522     523     524     526     527     528     530     531
+     532     534     535     536     538     539     540     542     543     544
+     546     547     548     550     551     552     554     555     556     558
+     559     560     562     563     564     566     567     568     570     571
+     572     574     575     576     578     579     580     582     583     584
+     586     587     588     590     591     592     594     595     596     598
+     599     600     602     603     604     606     607     608     610     611
+     612
+       3       0       0       3     301
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2
+     217     213     217     217     214     221     217     215     217     217
+     216     217     221     218     221     221     219     225     221     220
+     221     225     222     225     225     223     229     225     224     225
+     229     226     229     229     227     233     229     228     229     233
+     230     233     233     231     237     233     232     233     237     234
+     237     237     235     241     237     236     237     241     238     241
+     241     239     245     241     240     241     245     242     245     245
+     243     249     245     244     245     249     246     249     249     247
+     253     249     248     249     253     250     253     253     251     257
+     253     252     253     257     254     257     257     255     261     257
+     256     257     261     258     261     261     259     265     261     260
+     261     265     262     265     265     263     269     265     264     265
+     269     266     269     269     267     273     269     268     269     273
+     270     273     273     271     277     273     272     273     277     274
+     277     277     275     281     277     276     277     281     278     281
+     281     279     285     281     280     281     285     282     285     285
+     283     289     285     284     285     289     286     289     289     287
+     293     289     288     289     293     290     293     293     291     297
+     293     292     293     297     294     297     297     295     301     297
+     296     297     301     298     301     301     299     305     301     300
+     301     305     302     305     305     303     309     305     304     305
+     309     306     309     309     307     313     309     308     309     313
+     310     313     313     311     317     313     312     313     317     314
+     317     317     315     321     317     316     317     321     318     321
+     321     319     325     321     320     321     325     322     325     325
+     323     329     325     324     325     329     326     329     329     327
+     333     329     328     329     333     330     333     333     331     337
+     333     332     333     337     334     337     337     335     341     337
+     336     337     341     338     341     341     339     345     341     340
+     341     345     342     345     345     343     349     345     344     345
+     349     346     349     349     347     353     349     348     349     353
+     350     353     353     351     357     353     352     353     357     354
+     357     357     355     361     357     356     357     361     358     361
+     361     359     365     361     360     361     365     362     365     365
+     363     369     365     364     365     369     366     369     369     367
+     373     369     368     369     373     370     373     373     371     377
+     373     372     373     377     374     377     377     375     381     377
+     376     377     381     378     381     381     379     385     381     380
+     381     385     382     385     385     383     389     385     384     385
+     389     386     389     389     387     393     389     388     389     393
+     390     393     393     391     397     393     392     393     397     394
+     397     397     395     401     397     396     397     401     398     401
+     401     399     405     401     400     401     405     402     405     405
+     403     409     405     404     405     409     406     409     409     407
+     413     409     408     409     413     410     413     413     411     417
+     413     412     413     417     414     417     417     415     421     417
+     416     417     421     418     421     421     419     425     421     420
+     421     425     422     425     425     423     429     425     424     425
+     429     426     429     429     427     433     429     428     429     433
+     430     433     433     431     437     433     432     433     437     434
+     437     437     435     441     437     436     437     441     438     441
+     441     439     445     441     440     441     445     442     445     445
+     443     449     445     444     445     449     446     449     449     447
+     453     449     448     449     453     450     453     453     451     457
+     453     452     453     457     454     457     457     455     461     457
+     456     457     461     458     461     461     459     465     461     460
+     461     465     462     465     465     463     469     465     464     465
+     469     466     469     469     467     473     469     468     469     473
+     470     473     473     471     477     473     472     473     477     474
+     477     477     475     481     477     476     477     481     478     481
+     481     479     485     481     480     481     485     482     485     485
+     483     489     485     484     485     489     486     489     489     487
+     493     489     488     489     493     490     493     493     491     497
+     493     492     493     497     494     497     497     495     501     497
+     496     497     501     498     501     501     499     505     501     500
+     501     505     502     505     505     503     509     505     504     505
+     509     506     509     509     507     513     509     508     509     513
+     510     513     513     511     517     513     512     513     517     514
+     517     517     515     521     517     516     517     521     518     521
+     521     519     525     521     520     521     525     522     525     525
+     523     529     525     524     525     529     526     529     529     527
+     533     529     528     529     533     530     533     533     531     537
+     533     532     533     537     534     537     537     535     541     537
+     536     537     541     538     541     541     539     545     541     540
+     541     545     542     545     545     543     549     545     544     545
+     549     546     549     549     547     553     549     548     549     553
+     550     553     553     551     557     553     552     553     557     554
+     557     557     555     561     557     556     557     561     558     561
+     561     559     565     561     560     561     565     562     565     565
+     563     569     565     564     565     569     566     569     569     567
+     573     569     568     569     573     570     573     573     571     577
+     573     572     573     577     574     577     577     575     581     577
+     576     577     581     578     581     581     579     585     581     580
+     581     585     582     585     585     583     589     585     584     585
+     589     586     589     589     587     593     589     588     589     593
+     590     593     593     591     597     593     592     593     597     594
+     597     597     595     601     597     596     597     601     598     601
+     601     599     605     601     600     601     605     602     605     605
+     603     609     605     604     605     609     606     609     609     607
+     613     609     608     609     613     610     613     613     611     613
+     613     612     613
+       3       0       0       3     301
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5
+      12      13     213      13      14     214      14      11     215      11
+      12     216      13      15     218      15      16     219      16      14
+     220      15      17     222      17      18     223      18      16     224
+      17      19     226      19      20     227      20      18     228      19
+      21     230      21      22     231      22      20     232      21      23
+     234      23      24     235      24      22     236      23      25     238
+      25      26     239      26      24     240      25      27     242      27
+      28     243      28      26     244      27      29     246      29      30
+     247      30      28     248      29      31     250      31      32     251
+      32      30     252      31      33     254      33      34     255      34
+      32     256      33      35     258      35      36     259      36      34
+     260      35      37     262      37      38     263      38      36     264
+      37      39     266      39      40     267      40      38     268      39
+      41     270      41      42     271      42      40     272      41      43
+     274      43      44     275      44      42     276      43      45     278
+      45      46     279      46      44     280      45      47     282      47
+      48     283      48      46     284      47      49     286      49      50
+     287      50      48     288      49      51     290      51      52     291
+      52      50     292      51      53     294      53      54     295      54
+      52     296      53      55     298      55      56     299      56      54
+     300      55      57     302      57      58     303      58      56     304
+      57      59     306      59      60     307      60      58     308      59
+      61     310      61      62     311      62      60     312      61      63
+     314      63      64     315      64      62     316      63      65     318
+      65      66     319      66      64     320      65      67     322      67
+      68     323      68      66     324      67      69     326      69      70
+     327      70      68     328      69      71     330      71      72     331
+      72      70     332      71      73     334      73      74     335      74
+      72     336      73      75     338      75      76     339      76      74
+     340      75      77     342      77      78     343      78      76     344
+      77      79     346      79      80     347      80      78     348      79
+      81     350      81      82     351      82      80     352      81      83
+     354      83      84     355      84      82     356      83      85     358
+      85      86     359      86      84     360      85      87     362      87
+      88     363      88      86     364      87      89     366      89      90
+     367      90      88     368      89      91     370      91      92     371
+      92      90     372      91      93     374      93      94     375      94
+      92     376      93      95     378      95      96     379      96      94
+     380      95      97     382      97      98     383      98      96     384
+      97      99     386      99     100     387     100      98     388      99
+     101     390     101     102     391     102     100     392     101     103
+     394     103     104     395     104     102     396     103     105     398
+     105     106     399     106     104     400     105     107     402     107
+     108     403     108     106     404     107     109     406     109     110
+     407     110     108     408     109     111     410     111     112     411
+     112     110     412     111     113     414     113     114     415     114
+     112     416     113     115     418     115     116     419     116     114
+     420     115     117     422     117     118     423     118     116     424
+     117     119     426     119     120     427     120     118     428     119
+     121     430     121     122     431     122     120     432     121     123
+     434     123     124     435     124     122     436     123     125     438
+     125     126     439     126     124     440     125     127     442     127
+     128     443     128     126     444     127     129     446     129     130
+     447     130     128     448     129     131     450     131     132     451
+     132     130     452     131     133     454     133     134     455     134
+     132     456     133     135     458     135     136     459     136     134
+     460     135     137     462     137     138     463     138     136     464
+     137     139     466     139     140     467     140     138     468     139
+     141     470     141     142     471     142     140     472     141     143
+     474     143     144     475     144     142     476     143     145     478
+     145     146     479     146     144     480     145     147     482     147
+     148     483     148     146     484     147     149     486     149     150
+     487     150     148     488     149     151     490     151     152     491
+     152     150     492     151     153     494     153     154     495     154
+     152     496     153     155     498     155     156     499     156     154
+     500     155     157     502     157     158     503     158     156     504
+     157     159     506     159     160     507     160     158     508     159
+     161     510     161     162     511     162     160     512     161     163
+     514     163     164     515     164     162     516     163     165     518
+     165     166     519     166     164     520     165     167     522     167
+     168     523     168     166     524     167     169     526     169     170
+     527     170     168     528     169     171     530     171     172     531
+     172     170     532     171     173     534     173     174     535     174
+     172     536     173     175     538     175     176     539     176     174
+     540     175     177     542     177     178     543     178     176     544
+     177     179     546     179     180     547     180     178     548     179
+     181     550     181     182     551     182     180     552     181     183
+     554     183     184     555     184     182     556     183     185     558
+     185     186     559     186     184     560     185     187     562     187
+     188     563     188     186     564     187     189     566     189     190
+     567     190     188     568     189     191     570     191     192     571
+     192     190     572     191     193     574     193     194     575     194
+     192     576     193     195     578     195     196     579     196     194
+     580     195     197     582     197     198     583     198     196     584
+     197     199     586     199     200     587     200     198     588     199
+     201     590     201     202     591     202     200     592     201     203
+     594     203     204     595     204     202     596     203     205     598
+     205     206     599     206     204     600     205     207     602     207
+     208     603     208     206     604     207     209     606     209     210
+     607     210     208     608     209     211     610     211     212     611
+     212     210     612
+       0       2       0       0       0
+      40      41
+       3       0       0       3     301
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5
+      12     213      13      13     214      14      14     215      11      11
+     216      12      13     218      15      15     219      16      16     220
+      14      15     222      17      17     223      18      18     224      16
+      17     226      19      19     227      20      20     228      18      19
+     230      21      21     231      22      22     232      20      21     234
+      23      23     235      24      24     236      22      23     238      25
+      25     239      26      26     240      24      25     242      27      27
+     243      28      28     244      26      27     246      29      29     247
+      30      30     248      28      29     250      31      31     251      32
+      32     252      30      31     254      33      33     255      34      34
+     256      32      33     258      35      35     259      36      36     260
+      34      35     262      37      37     263      38      38     264      36
+      37     266      39      39     267      40      40     268      38      39
+     270      41      41     271      42      42     272      40      41     274
+      43      43     275      44      44     276      42      43     278      45
+      45     279      46      46     280      44      45     282      47      47
+     283      48      48     284      46      47     286      49      49     287
+      50      50     288      48      49     290      51      51     291      52
+      52     292      50      51     294      53      53     295      54      54
+     296      52      53     298      55      55     299      56      56     300
+      54      55     302      57      57     303      58      58     304      56
+      57     306      59      59     307      60      60     308      58      59
+     310      61      61     311      62      62     312      60      61     314
+      63      63     315      64      64     316      62      63     318      65
+      65     319      66      66     320      64      65     322      67      67
+     323      68      68     324      66      67     326      69      69     327
+      70      70     328      68      69     330      71      71     331      72
+      72     332      70      71     334      73      73     335      74      74
+     336      72      73     338      75      75     339      76      76     340
+      74      75     342      77      77     343      78      78     344      76
+      77     346      79      79     347      80      80     348      78      79
+     350      81      81     351      82      82     352      80      81     354
+      83      83     355      84      84     356      82      83     358      85
+      85     359      86      86     360      84      85     362      87      87
+     363      88      88     364      86      87     366      89      89     367
+      90      90     368      88      89     370      91      91     371      92
+      92     372      90      91     374      93      93     375      94      94
+     376      92      93     378      95      95     379      96      96     380
+      94      95     382      97      97     383      98      98     384      96
+      97     386      99      99     387     100     100     388      98      99
+     390     101     101     391     102     102     392     100     101     394
+     103     103     395     104     104     396     102     103     398     105
+     105     399     106     106     400     104     105     402     107     107
+     403     108     108     404     106     107     406     109     109     407
+     110     110     408     108     109     410     111     111     411     112
+     112     412     110     111     414     113     113     415     114     114
+     416     112     113     418     115     115     419     116     116     420
+     114     115     422     117     117     423     118     118     424     116
+     117     426     119     119     427     120     120     428     118     119
+     430     121     121     431     122     122     432     120     121     434
+     123     123     435     124     124     436     122     123     438     125
+     125     439     126     126     440     124     125     442     127     127
+     443     128     128     444     126     127     446     129     129     447
+     130     130     448     128     129     450     131     131     451     132
+     132     452     130     131     454     133     133     455     134     134
+     456     132     133     458     135     135     459     136     136     460
+     134     135     462     137     137     463     138     138     464     136
+     137     466     139     139     467     140     140     468     138     139
+     470     141     141     471     142     142     472     140     141     474
+     143     143     475     144     144     476     142     143     478     145
+     145     479     146     146     480     144     145     482     147     147
+     483     148     148     484     146     147     486     149     149     487
+     150     150     488     148     149     490     151     151     491     152
+     152     492     150     151     494     153     153     495     154     154
+     496     152     153     498     155     155     499     156     156     500
+     154     155     502     157     157     503     158     158     504     156
+     157     506     159     159     507     160     160     508     158     159
+     510     161     161     511     162     162     512     160     161     514
+     163     163     515     164     164     516     162     163     518     165
+     165     519     166     166     520     164     165     522     167     167
+     523     168     168     524     166     167     526     169     169     527
+     170     170     528     168     169     530     171     171     531     172
+     172     532     170     171     534     173     173     535     174     174
+     536     172     173     538     175     175     539     176     176     540
+     174     175     542     177     177     543     178     178     544     176
+     177     546     179     179     547     180     180     548     178     179
+     550     181     181     551     182     182     552     180     181     554
+     183     183     555     184     184     556     182     183     558     185
+     185     559     186     186     560     184     185     562     187     187
+     563     188     188     564     186     187     566     189     189     567
+     190     190     568     188     189     570     191     191     571     192
+     192     572     190     191     574     193     193     575     194     194
+     576     192     193     578     195     195     579     196     196     580
+     194     195     582     197     197     583     198     198     584     196
+     197     586     199     199     587     200     200     588     198     199
+     590     201     201     591     202     202     592     200     201     594
+     203     203     595     204     204     596     202     203     598     205
+     205     599     206     206     600     204     205     602     207     207
+     603     208     208     604     206     207     606     209     209     607
+     210     210     608     208     209     610     211     211     611     212
+     212     612     210
+       1       0       0       1     202
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0
+      12      14      16      18      20      22      24      26      28      30
+      32      34      36      38      40      42      44      46      48      50
+      52      54      56      58      60      62      64      66      68      70
+      72      74      76      78      79      81      83      85      87      89
+      91      93      95      97      99     101     103     105     107     109
+     111     113     115     117     119     121     123     125     127     129
+     131     133     135     137     139     141     143     145     147     149
+     151     153     155     157     159     161     163     165     167     169
+     171     173     175     177     179     181     183     185     187     189
+     191     193     195     197     199     201     203     205     207     209
+      13      15      17      19      21      23      25      27      29      31
+      33      35      37      39      41      43      45      47      49      51
+      53      55      57      59      61      63      65      67      69      71
+      73      75      77     211      82      84      86      88      90      92
+      94      96      98     100     102     104     106     108     110     112
+     114     116     118     120     122     124     126     128     130     132
+     134     136     138     140     142     144     146     148     150     152
+     154     156     158     160     162     164     166     168     170     172
+     174     176     178     180     182     184     186     188     190     192
+     194     196     198     200     202     204     206     208     210     212
+      11      80
+       8       0       0       4     100
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+      12      13      14      11      14      13      15      16      16      15
+      17      18      18      17      19      20      20      19      21      22
+      22      21      23      24      24      23      25      26      26      25
+      27      28      28      27      29      30      30      29      31      32
+      32      31      33      34      34      33      35      36      36      35
+      37      38      38      37      39      40      40      39      41      42
+      42      41      43      44      44      43      45      46      46      45
+      47      48      48      47      49      50      50      49      51      52
+      52      51      53      54      54      53      55      56      56      55
+      57      58      58      57      59      60      60      59      61      62
+      62      61      63      64      64      63      65      66      66      65
+      67      68      68      67      69      70      70      69      71      72
+      72      71      73      74      74      73      75      76      76      75
+      77      78      78      77      79      80      79      81      82      80
+      81      83      84      82      83      85      86      84      85      87
+      88      86      87      89      90      88      89      91      92      90
+      91      93      94      92      93      95      96      94      95      97
+      98      96      97      99     100      98      99     101     102     100
+     101     103     104     102     103     105     106     104     105     107
+     108     106     107     109     110     108     109     111     112     110
+     111     113     114     112     113     115     116     114     115     117
+     118     116     117     119     120     118     119     121     122     120
+     121     123     124     122     123     125     126     124     125     127
+     128     126     127     129     130     128     129     131     132     130
+     131     133     134     132     133     135     136     134     135     137
+     138     136     137     139     140     138     139     141     142     140
+     141     143     144     142     143     145     146     144     145     147
+     148     146     147     149     150     148     149     151     152     150
+     151     153     154     152     153     155     156     154     155     157
+     158     156     157     159     160     158     159     161     162     160
+     161     163     164     162     163     165     166     164     165     167
+     168     166     167     169     170     168     169     171     172     170
+     171     173     174     172     173     175     176     174     175     177
+     178     176     177     179     180     178     179     181     182     180
+     181     183     184     182     183     185     186     184     185     187
+     188     186     187     189     190     188     189     191     192     190
+     191     193     194     192     193     195     196     194     195     197
+     198     196     197     199     200     198     199     201     202     200
+     201     203     204     202     203     205     206     204     205     207
+     208     206     207     209     210     208     209     211     212     210
+       1       0       0       1     301
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4       4       4       4       4       4       4       4       4       4
+       4
+     213     214     215     216     218     219     220     222     223     224
+     226     227     228     230     231     232     234     235     236     238
+     239     240     242     243     244     246     247     248     250     251
+     252     254     255     256     258     259     260     262     263     264
+     266     267     268     270     271     272     274     275     276     278
+     279     280     282     283     284     286     287     288     290     291
+     292     294     295     296     298     299     300     302     303     304
+     306     307     308     310     311     312     314     315     316     318
+     319     320     322     323     324     326     327     328     330     331
+     332     334     335     336     338     339     340     342     343     344
+     346     347     348     350     351     352     354     355     356     358
+     359     360     362     363     364     366     367     368     370     371
+     372     374     375     376     378     379     380     382     383     384
+     386     387     388     390     391     392     394     395     396     398
+     399     400     402     403     404     406     407     408     410     411
+     412     414     415     416     418     419     420     422     423     424
+     426     427     428     430     431     432     434     435     436     438
+     439     440     442     443     444     446     447     448     450     451
+     452     454     455     456     458     459     460     462     463     464
+     466     467     468     470     471     472     474     475     476     478
+     479     480     482     483     484     486     487     488     490     491
+     492     494     495     496     498     499     500     502     503     504
+     506     507     508     510     511     512     514     515     516     518
+     519     520     522     523     524     526     527     528     530     531
+     532     534     535     536     538     539     540     542     543     544
+     546     547     548     550     551     552     554     555     556     558
+     559     560     562     563     564     566     567     568     570     571
+     572     574     575     576     578     579     580     582     583     584
+     586     587     588     590     591     592     594     595     596     598
+     599     600     602     603     604     606     607     608     610     611
+     612
+       3       0       0       3     301
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2       2       2       2       2       2       2       2       2       2
+       2
+     217     213     217     217     214     221     217     215     217     217
+     216     217     221     218     221     221     219     225     221     220
+     221     225     222     225     225     223     229     225     224     225
+     229     226     229     229     227     233     229     228     229     233
+     230     233     233     231     237     233     232     233     237     234
+     237     237     235     241     237     236     237     241     238     241
+     241     239     245     241     240     241     245     242     245     245
+     243     249     245     244     245     249     246     249     249     247
+     253     249     248     249     253     250     253     253     251     257
+     253     252     253     257     254     257     257     255     261     257
+     256     257     261     258     261     261     259     265     261     260
+     261     265     262     265     265     263     269     265     264     265
+     269     266     269     269     267     273     269     268     269     273
+     270     273     273     271     277     273     272     273     277     274
+     277     277     275     281     277     276     277     281     278     281
+     281     279     285     281     280     281     285     282     285     285
+     283     289     285     284     285     289     286     289     289     287
+     293     289     288     289     293     290     293     293     291     297
+     293     292     293     297     294     297     297     295     301     297
+     296     297     301     298     301     301     299     305     301     300
+     301     305     302     305     305     303     309     305     304     305
+     309     306     309     309     307     313     309     308     309     313
+     310     313     313     311     317     313     312     313     317     314
+     317     317     315     321     317     316     317     321     318     321
+     321     319     325     321     320     321     325     322     325     325
+     323     329     325     324     325     329     326     329     329     327
+     333     329     328     329     333     330     333     333     331     337
+     333     332     333     337     334     337     337     335     341     337
+     336     337     341     338     341     341     339     345     341     340
+     341     345     342     345     345     343     349     345     344     345
+     349     346     349     349     347     353     349     348     349     353
+     350     353     353     351     357     353     352     353     357     354
+     357     357     355     361     357     356     357     361     358     361
+     361     359     365     361     360     361     365     362     365     365
+     363     369     365     364     365     369     366     369     369     367
+     373     369     368     369     373     370     373     373     371     377
+     373     372     373     377     374     377     377     375     381     377
+     376     377     381     378     381     381     379     385     381     380
+     381     385     382     385     385     383     389     385     384     385
+     389     386     389     389     387     393     389     388     389     393
+     390     393     393     391     397     393     392     393     397     394
+     397     397     395     401     397     396     397     401     398     401
+     401     399     405     401     400     401     405     402     405     405
+     403     409     405     404     405     409     406     409     409     407
+     413     409     408     409     413     410     413     413     411     417
+     413     412     413     417     414     417     417     415     421     417
+     416     417     421     418     421     421     419     425     421     420
+     421     425     422     425     425     423     429     425     424     425
+     429     426     429     429     427     433     429     428     429     433
+     430     433     433     431     437     433     432     433     437     434
+     437     437     435     441     437     436     437     441     438     441
+     441     439     445     441     440     441     445     442     445     445
+     443     449     445     444     445     449     446     449     449     447
+     453     449     448     449     453     450     453     453     451     457
+     453     452     453     457     454     457     457     455     461     457
+     456     457     461     458     461     461     459     465     461     460
+     461     465     462     465     465     463     469     465     464     465
+     469     466     469     469     467     473     469     468     469     473
+     470     473     473     471     477     473     472     473     477     474
+     477     477     475     481     477     476     477     481     478     481
+     481     479     485     481     480     481     485     482     485     485
+     483     489     485     484     485     489     486     489     489     487
+     493     489     488     489     493     490     493     493     491     497
+     493     492     493     497     494     497     497     495     501     497
+     496     497     501     498     501     501     499     505     501     500
+     501     505     502     505     505     503     509     505     504     505
+     509     506     509     509     507     513     509     508     509     513
+     510     513     513     511     517     513     512     513     517     514
+     517     517     515     521     517     516     517     521     518     521
+     521     519     525     521     520     521     525     522     525     525
+     523     529     525     524     525     529     526     529     529     527
+     533     529     528     529     533     530     533     533     531     537
+     533     532     533     537     534     537     537     535     541     537
+     536     537     541     538     541     541     539     545     541     540
+     541     545     542     545     545     543     549     545     544     545
+     549     546     549     549     547     553     549     548     549     553
+     550     553     553     551     557     553     552     553     557     554
+     557     557     555     561     557     556     557     561     558     561
+     561     559     565     561     560     561     565     562     565     565
+     563     569     565     564     565     569     566     569     569     567
+     573     569     568     569     573     570     573     573     571     577
+     573     572     573     577     574     577     577     575     581     577
+     576     577     581     578     581     581     579     585     581     580
+     581     585     582     585     585     583     589     585     584     585
+     589     586     589     589     587     593     589     588     589     593
+     590     593     593     591     597     593     592     593     597     594
+     597     597     595     601     597     596     597     601     598     601
+     601     599     605     601     600     601     605     602     605     605
+     603     609     605     604     605     609     606     609     609     607
+     613     609     608     609     613     610     613     613     611     613
+     613     612     613
+       3       0       0       3     301
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5
+      12      13     213      13      14     214      14      11     215      11
+      12     216      13      15     218      15      16     219      16      14
+     220      15      17     222      17      18     223      18      16     224
+      17      19     226      19      20     227      20      18     228      19
+      21     230      21      22     231      22      20     232      21      23
+     234      23      24     235      24      22     236      23      25     238
+      25      26     239      26      24     240      25      27     242      27
+      28     243      28      26     244      27      29     246      29      30
+     247      30      28     248      29      31     250      31      32     251
+      32      30     252      31      33     254      33      34     255      34
+      32     256      33      35     258      35      36     259      36      34
+     260      35      37     262      37      38     263      38      36     264
+      37      39     266      39      40     267      40      38     268      39
+      41     270      41      42     271      42      40     272      41      43
+     274      43      44     275      44      42     276      43      45     278
+      45      46     279      46      44     280      45      47     282      47
+      48     283      48      46     284      47      49     286      49      50
+     287      50      48     288      49      51     290      51      52     291
+      52      50     292      51      53     294      53      54     295      54
+      52     296      53      55     298      55      56     299      56      54
+     300      55      57     302      57      58     303      58      56     304
+      57      59     306      59      60     307      60      58     308      59
+      61     310      61      62     311      62      60     312      61      63
+     314      63      64     315      64      62     316      63      65     318
+      65      66     319      66      64     320      65      67     322      67
+      68     323      68      66     324      67      69     326      69      70
+     327      70      68     328      69      71     330      71      72     331
+      72      70     332      71      73     334      73      74     335      74
+      72     336      73      75     338      75      76     339      76      74
+     340      75      77     342      77      78     343      78      76     344
+      77      79     346      79      80     347      80      78     348      79
+      81     350      81      82     351      82      80     352      81      83
+     354      83      84     355      84      82     356      83      85     358
+      85      86     359      86      84     360      85      87     362      87
+      88     363      88      86     364      87      89     366      89      90
+     367      90      88     368      89      91     370      91      92     371
+      92      90     372      91      93     374      93      94     375      94
+      92     376      93      95     378      95      96     379      96      94
+     380      95      97     382      97      98     383      98      96     384
+      97      99     386      99     100     387     100      98     388      99
+     101     390     101     102     391     102     100     392     101     103
+     394     103     104     395     104     102     396     103     105     398
+     105     106     399     106     104     400     105     107     402     107
+     108     403     108     106     404     107     109     406     109     110
+     407     110     108     408     109     111     410     111     112     411
+     112     110     412     111     113     414     113     114     415     114
+     112     416     113     115     418     115     116     419     116     114
+     420     115     117     422     117     118     423     118     116     424
+     117     119     426     119     120     427     120     118     428     119
+     121     430     121     122     431     122     120     432     121     123
+     434     123     124     435     124     122     436     123     125     438
+     125     126     439     126     124     440     125     127     442     127
+     128     443     128     126     444     127     129     446     129     130
+     447     130     128     448     129     131     450     131     132     451
+     132     130     452     131     133     454     133     134     455     134
+     132     456     133     135     458     135     136     459     136     134
+     460     135     137     462     137     138     463     138     136     464
+     137     139     466     139     140     467     140     138     468     139
+     141     470     141     142     471     142     140     472     141     143
+     474     143     144     475     144     142     476     143     145     478
+     145     146     479     146     144     480     145     147     482     147
+     148     483     148     146     484     147     149     486     149     150
+     487     150     148     488     149     151     490     151     152     491
+     152     150     492     151     153     494     153     154     495     154
+     152     496     153     155     498     155     156     499     156     154
+     500     155     157     502     157     158     503     158     156     504
+     157     159     506     159     160     507     160     158     508     159
+     161     510     161     162     511     162     160     512     161     163
+     514     163     164     515     164     162     516     163     165     518
+     165     166     519     166     164     520     165     167     522     167
+     168     523     168     166     524     167     169     526     169     170
+     527     170     168     528     169     171     530     171     172     531
+     172     170     532     171     173     534     173     174     535     174
+     172     536     173     175     538     175     176     539     176     174
+     540     175     177     542     177     178     543     178     176     544
+     177     179     546     179     180     547     180     178     548     179
+     181     550     181     182     551     182     180     552     181     183
+     554     183     184     555     184     182     556     183     185     558
+     185     186     559     186     184     560     185     187     562     187
+     188     563     188     186     564     187     189     566     189     190
+     567     190     188     568     189     191     570     191     192     571
+     192     190     572     191     193     574     193     194     575     194
+     192     576     193     195     578     195     196     579     196     194
+     580     195     197     582     197     198     583     198     196     584
+     197     199     586     199     200     587     200     198     588     199
+     201     590     201     202     591     202     200     592     201     203
+     594     203     204     595     204     202     596     203     205     598
+     205     206     599     206     204     600     205     207     602     207
+     208     603     208     206     604     207     209     606     209     210
+     607     210     208     608     209     211     610     211     212     611
+     212     210     612
+       0       2       0       0       0
+      42      43
+       3       0       0       3     301
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5       5       5       5       5       5       5       5       5       5
+       5
+      12     213      13      13     214      14      14     215      11      11
+     216      12      13     218      15      15     219      16      16     220
+      14      15     222      17      17     223      18      18     224      16
+      17     226      19      19     227      20      20     228      18      19
+     230      21      21     231      22      22     232      20      21     234
+      23      23     235      24      24     236      22      23     238      25
+      25     239      26      26     240      24      25     242      27      27
+     243      28      28     244      26      27     246      29      29     247
+      30      30     248      28      29     250      31      31     251      32
+      32     252      30      31     254      33      33     255      34      34
+     256      32      33     258      35      35     259      36      36     260
+      34      35     262      37      37     263      38      38     264      36
+      37     266      39      39     267      40      40     268      38      39
+     270      41      41     271      42      42     272      40      41     274
+      43      43     275      44      44     276      42      43     278      45
+      45     279      46      46     280      44      45     282      47      47
+     283      48      48     284      46      47     286      49      49     287
+      50      50     288      48      49     290      51      51     291      52
+      52     292      50      51     294      53      53     295      54      54
+     296      52      53     298      55      55     299      56      56     300
+      54      55     302      57      57     303      58      58     304      56
+      57     306      59      59     307      60      60     308      58      59
+     310      61      61     311      62      62     312      60      61     314
+      63      63     315      64      64     316      62      63     318      65
+      65     319      66      66     320      64      65     322      67      67
+     323      68      68     324      66      67     326      69      69     327
+      70      70     328      68      69     330      71      71     331      72
+      72     332      70      71     334      73      73     335      74      74
+     336      72      73     338      75      75     339      76      76     340
+      74      75     342      77      77     343      78      78     344      76
+      77     346      79      79     347      80      80     348      78      79
+     350      81      81     351      82      82     352      80      81     354
+      83      83     355      84      84     356      82      83     358      85
+      85     359      86      86     360      84      85     362      87      87
+     363      88      88     364      86      87     366      89      89     367
+      90      90     368      88      89     370      91      91     371      92
+      92     372      90      91     374      93      93     375      94      94
+     376      92      93     378      95      95     379      96      96     380
+      94      95     382      97      97     383      98      98     384      96
+      97     386      99      99     387     100     100     388      98      99
+     390     101     101     391     102     102     392     100     101     394
+     103     103     395     104     104     396     102     103     398     105
+     105     399     106     106     400     104     105     402     107     107
+     403     108     108     404     106     107     406     109     109     407
+     110     110     408     108     109     410     111     111     411     112
+     112     412     110     111     414     113     113     415     114     114
+     416     112     113     418     115     115     419     116     116     420
+     114     115     422     117     117     423     118     118     424     116
+     117     426     119     119     427     120     120     428     118     119
+     430     121     121     431     122     122     432     120     121     434
+     123     123     435     124     124     436     122     123     438     125
+     125     439     126     126     440     124     125     442     127     127
+     443     128     128     444     126     127     446     129     129     447
+     130     130     448     128     129     450     131     131     451     132
+     132     452     130     131     454     133     133     455     134     134
+     456     132     133     458     135     135     459     136     136     460
+     134     135     462     137     137     463     138     138     464     136
+     137     466     139     139     467     140     140     468     138     139
+     470     141     141     471     142     142     472     140     141     474
+     143     143     475     144     144     476     142     143     478     145
+     145     479     146     146     480     144     145     482     147     147
+     483     148     148     484     146     147     486     149     149     487
+     150     150     488     148     149     490     151     151     491     152
+     152     492     150     151     494     153     153     495     154     154
+     496     152     153     498     155     155     499     156     156     500
+     154     155     502     157     157     503     158     158     504     156
+     157     506     159     159     507     160     160     508     158     159
+     510     161     161     511     162     162     512     160     161     514
+     163     163     515     164     164     516     162     163     518     165
+     165     519     166     166     520     164     165     522     167     167
+     523     168     168     524     166     167     526     169     169     527
+     170     170     528     168     169     530     171     171     531     172
+     172     532     170     171     534     173     173     535     174     174
+     536     172     173     538     175     175     539     176     176     540
+     174     175     542     177     177     543     178     178     544     176
+     177     546     179     179     547     180     180     548     178     179
+     550     181     181     551     182     182     552     180     181     554
+     183     183     555     184     184     556     182     183     558     185
+     185     559     186     186     560     184     185     562     187     187
+     563     188     188     564     186     187     566     189     189     567
+     190     190     568     188     189     570     191     191     571     192
+     192     572     190     191     574     193     193     575     194     194
+     576     192     193     578     195     195     579     196     196     580
+     194     195     582     197     197     583     198     198     584     196
+     197     586     199     199     587     200     200     588     198     199
+     590     201     201     591     202     202     592     200     201     594
+     203     203     595     204     204     596     202     203     598     205
+     205     599     206     206     600     204     205     602     207     207
+     603     208     208     604     206     207     606     209     209     607
+     210     210     608     208     209     610     211     211     611     212
+     212     612     210
+       1       0       0       1     202
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0       0       0       0       0       0       0       0       0
+       0       0
+      12      14      16      18      20      22      24      26      28      30
+      32      34      36      38      40      42      44      46      48      50
+      52      54      56      58      60      62      64      66      68      70
+      72      74      76      78      79      81      83      85      87      89
+      91      93      95      97      99     101     103     105     107     109
+     111     113     115     117     119     121     123     125     127     129
+     131     133     135     137     139     141     143     145     147     149
+     151     153     155     157     159     161     163     165     167     169
+     171     173     175     177     179     181     183     185     187     189
+     191     193     195     197     199     201     203     205     207     209
+      13      15      17      19      21      23      25      27      29      31
+      33      35      37      39      41      43      45      47      49      51
+      53      55      57      59      61      63      65      67      69      71
+      73      75      77     211      82      84      86      88      90      92
+      94      96      98     100     102     104     106     108     110     112
+     114     116     118     120     122     124     126     128     130     132
+     134     136     138     140     142     144     146     148     150     152
+     154     156     158     160     162     164     166     168     170     172
+     174     176     178     180     182     184     186     188     190     192
+     194     196     198     200     202     204     206     208     210     212
+      11      80
+       3       0       0       3      99
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3
+     217     214     221     221     219     225     225     223     229     229
+     227     233     233     231     237     237     235     241     241     239
+     245     245     243     249     249     247     253     253     251     257
+     257     255     261     261     259     265     265     263     269     269
+     267     273     273     271     277     277     275     281     281     279
+     285     285     283     289     289     287     293     293     291     297
+     297     295     301     301     299     305     305     303     309     309
+     307     313     313     311     317     317     315     321     321     319
+     325     325     323     329     329     327     333     333     331     337
+     337     335     341     341     339     345     345     343     349     349
+     347     353     353     351     357     357     355     361     361     359
+     365     365     363     369     369     367     373     373     371     377
+     377     375     381     381     379     385     385     383     389     389
+     387     393     393     391     397     397     395     401     401     399
+     405     405     403     409     409     407     413     413     411     417
+     417     415     421     421     419     425     425     423     429     429
+     427     433     433     431     437     437     435     441     441     439
+     445     445     443     449     449     447     453     453     451     457
+     457     455     461     461     459     465     465     463     469     469
+     467     473     473     471     477     477     475     481     481     479
+     485     485     483     489     489     487     493     493     491     497
+     497     495     501     501     499     505     505     503     509     509
+     507     513     513     511     517     517     515     521     521     519
+     525     525     523     529     529     527     533     533     531     537
+     537     535     541     541     539     545     545     543     549     549
+     547     553     553     551     557     557     555     561     561     559
+     565     565     563     569     569     567     573     573     571     577
+     577     575     581     581     579     585     585     583     589     589
+     587     593     593     591     597     597     595     601     601     599
+     605     605     603     609     609     607     613
+       2       0       0       2     202
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3
+     217     213     217     215     217     216     221     218     221     220
+     225     222     225     224     229     226     229     228     233     230
+     233     232     237     234     237     236     241     238     241     240
+     245     242     245     244     249     246     249     248     253     250
+     253     252     257     254     257     256     261     258     261     260
+     265     262     265     264     269     266     269     268     273     270
+     273     272     277     274     277     276     281     278     281     280
+     285     282     285     284     289     286     289     288     293     290
+     293     292     297     294     297     296     301     298     301     300
+     305     302     305     304     309     306     309     308     313     310
+     313     312     317     314     317     316     321     318     321     320
+     325     322     325     324     329     326     329     328     333     330
+     333     332     337     334     337     336     341     338     341     340
+     345     342     345     344     349     346     349     348     353     350
+     353     352     357     354     357     356     361     358     361     360
+     365     362     365     364     369     366     369     368     373     370
+     373     372     377     374     377     376     381     378     381     380
+     385     382     385     384     389     386     389     388     393     390
+     393     392     397     394     397     396     401     398     401     400
+     405     402     405     404     409     406     409     408     413     410
+     413     412     417     414     417     416     421     418     421     420
+     425     422     425     424     429     426     429     428     433     430
+     433     432     437     434     437     436     441     438     441     440
+     445     442     445     444     449     446     449     448     453     450
+     453     452     457     454     457     456     461     458     461     460
+     465     462     465     464     469     466     469     468     473     470
+     473     472     477     474     477     476     481     478     481     480
+     485     482     485     484     489     486     489     488     493     490
+     493     492     497     494     497     496     501     498     501     500
+     505     502     505     504     509     506     509     508     513     510
+     513     512     517     514     517     516     521     518     521     520
+     525     522     525     524     529     526     529     528     533     530
+     533     532     537     534     537     536     541     538     541     540
+     545     542     545     544     549     546     549     548     553     550
+     553     552     557     554     557     556     561     558     561     560
+     565     562     565     564     569     566     569     568     573     570
+     573     572     577     574     577     576     581     578     581     580
+     585     582     585     584     589     586     589     588     593     590
+     593     592     597     594     597     596     601     598     601     600
+     605     602     605     604     609     606     609     608     613     610
+     613     611     613     612
+       3       0       0       3      99
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3
+     217     214     221     221     219     225     225     223     229     229
+     227     233     233     231     237     237     235     241     241     239
+     245     245     243     249     249     247     253     253     251     257
+     257     255     261     261     259     265     265     263     269     269
+     267     273     273     271     277     277     275     281     281     279
+     285     285     283     289     289     287     293     293     291     297
+     297     295     301     301     299     305     305     303     309     309
+     307     313     313     311     317     317     315     321     321     319
+     325     325     323     329     329     327     333     333     331     337
+     337     335     341     341     339     345     345     343     349     349
+     347     353     353     351     357     357     355     361     361     359
+     365     365     363     369     369     367     373     373     371     377
+     377     375     381     381     379     385     385     383     389     389
+     387     393     393     391     397     397     395     401     401     399
+     405     405     403     409     409     407     413     413     411     417
+     417     415     421     421     419     425     425     423     429     429
+     427     433     433     431     437     437     435     441     441     439
+     445     445     443     449     449     447     453     453     451     457
+     457     455     461     461     459     465     465     463     469     469
+     467     473     473     471     477     477     475     481     481     479
+     485     485     483     489     489     487     493     493     491     497
+     497     495     501     501     499     505     505     503     509     509
+     507     513     513     511     517     517     515     521     521     519
+     525     525     523     529     529     527     533     533     531     537
+     537     535     541     541     539     545     545     543     549     549
+     547     553     553     551     557     557     555     561     561     559
+     565     565     563     569     569     567     573     573     571     577
+     577     575     581     581     579     585     585     583     589     589
+     587     593     593     591     597     597     595     601     601     599
+     605     605     603     609     609     607     613
+       2       0       0       2     202
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3       3       3       3       3       3       3       3       3
+       3       3
+     217     213     217     215     217     216     221     218     221     220
+     225     222     225     224     229     226     229     228     233     230
+     233     232     237     234     237     236     241     238     241     240
+     245     242     245     244     249     246     249     248     253     250
+     253     252     257     254     257     256     261     258     261     260
+     265     262     265     264     269     266     269     268     273     270
+     273     272     277     274     277     276     281     278     281     280
+     285     282     285     284     289     286     289     288     293     290
+     293     292     297     294     297     296     301     298     301     300
+     305     302     305     304     309     306     309     308     313     310
+     313     312     317     314     317     316     321     318     321     320
+     325     322     325     324     329     326     329     328     333     330
+     333     332     337     334     337     336     341     338     341     340
+     345     342     345     344     349     346     349     348     353     350
+     353     352     357     354     357     356     361     358     361     360
+     365     362     365     364     369     366     369     368     373     370
+     373     372     377     374     377     376     381     378     381     380
+     385     382     385     384     389     386     389     388     393     390
+     393     392     397     394     397     396     401     398     401     400
+     405     402     405     404     409     406     409     408     413     410
+     413     412     417     414     417     416     421     418     421     420
+     425     422     425     424     429     426     429     428     433     430
+     433     432     437     434     437     436     441     438     441     440
+     445     442     445     444     449     446     449     448     453     450
+     453     452     457     454     457     456     461     458     461     460
+     465     462     465     464     469     466     469     468     473     470
+     473     472     477     474     477     476     481     478     481     480
+     485     482     485     484     489     486     489     488     493     490
+     493     492     497     494     497     496     501     498     501     500
+     505     502     505     504     509     506     509     508     513     510
+     513     512     517     514     517     516     521     518     521     520
+     525     522     525     524     529     526     529     528     533     530
+     533     532     537     534     537     536     541     538     541     540
+     545     542     545     544     549     546     549     548     553     550
+     553     552     557     554     557     556     561     558     561     560
+     565     562     565     564     569     566     569     568     573     570
+     573     572     577     574     577     576     581     578     581     580
+     585     582     585     584     589     586     589     588     593     590
+     593     592     597     594     597     596     601     598     601     600
+     605     602     605     604     609     606     609     608     613     610
+     613     611     613     612
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO   2NBRE OBJETS NOMMES       0NBRE OBJETS       8
+       1       1      -1       1
+     -23     100       1
+ SCAL
+       0
\0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0
\0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0
+       1
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00
+       1       1      -1       2
+     -24     100       1
+ SCAL
+       0
+ CENT RE   GIBI  FEC IT                                                         
+                    
+       2       0
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01
+       1       1      -1       2
+     -24     100       1
+ SCAL
+       0
+ CENT RE   GIBI  FEC IT                                                         
+                    
+       2       0
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02  5.49990000000000E-02  5.49990000000000E-02
+  5.49990000000000E-02
+       1       1      -1       1
+     -25     100       1
+ SCAL
+       0
\0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0
\0\0\0\0 \0\0\0\0 \0\0\0\0 \0\0\0\0
+       1
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00  4.00000000000000E+00  4.00000000000000E+00
+  4.00000000000000E+00
+       1       1      -1       1
+     -27     301       1
+ SCAL
+       0
+ FACE                                                                           
+                    
+       2
+  3.00000000000000E-02  1.00000000000000E+00  2.99999999999980E-02
+  1.00000000000000E+00  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000001E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  2.99999999999999E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999999E-02  1.00000000000000E+00
+  3.00000000000009E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999999E-02  1.00000000000000E+00
+  2.99999999999999E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999999E-02  1.00000000000000E+00
+  2.99999999999999E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999999E-02  1.00000000000000E+00
+  2.99999999999999E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000010E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02
+       1       2      -1       1
+     -27     301       2
+ UX   UY  
+       0       0
+ FACE                                                                           
+                    
+       2
+ -1.00000000000000E+00  1.99840144432528E-15  1.00000000000000E+00
+  0.00000000000000E+00 -1.00000000000000E+00  1.89431803576667E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.90125692967058E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  1.00000000000000E+00
+  0.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00
+       1       1      -1       1
+     -34     301       1
+ SCAL
+       0
+ FACE                                                                           
+                    
+       2
+  3.00000000000000E-02  1.00000000000000E+00  2.99999999999980E-02
+  1.00000000000000E+00  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000001E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  2.99999999999999E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999999E-02  1.00000000000000E+00
+  3.00000000000009E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999999E-02  1.00000000000000E+00
+  2.99999999999999E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999999E-02  1.00000000000000E+00
+  2.99999999999999E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999999E-02  1.00000000000000E+00
+  2.99999999999999E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000010E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  3.00000000000000E-02  1.00000000000000E+00
+  3.00000000000000E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02  3.00000000000002E-02  1.00000000000000E+00
+  3.00000000000002E-02  2.99999999999998E-02  1.00000000000000E+00
+  2.99999999999998E-02
+       1       2      -1       1
+     -34     301       2
+ UX   UY  
+       0       0
+ FACE                                                                           
+                    
+       2
+ -1.00000000000000E+00  1.99840144432528E-15  1.00000000000000E+00
+  0.00000000000000E+00 -1.00000000000000E+00  1.89431803576667E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.90125692967058E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  1.99840144432528E-15
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  9.99200722162641E-16
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  1.00000000000000E+00
+  0.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  10NBRE OBJETS NOMMES      36NBRE OBJETS      40
+ C_2      C_3      C_4      C_E      C_F      C_10     C_11     C_12    
+ C_13     C_14     C_18     C_1C     C_20     C_24     C_28     C_2C    
+ C_30     C_34     C_38     C_3C     C_3E     C_43     C_45     C_5C    
+ C_5D     C_5E     C_6B     C_6C     C_6D     C_6E     C_8B     C_8C    
+ C_8D     C_8E     C_8F     C_90    
+       1       2       3       4       5       6       7       8       9      10
+      11      12      13      14      15      16      17      18      19      20
+      21      22      23      24      25      26      27      28      29      30
+      31      32      33      34      35      36
+      64
+      27      80      27      81      27      82      27      57      27      83
+      25       8      27      84      25       9      27      85      27      58
+      27      86      25      10      27      87      27      59      27      88
+      27      60      27      89      25      11      27      90      27      61
+      27      91      27      62      27      92      26      11      27      93
+      26      12      27      94      25      12      27      95      18       1
+      27      96      18       2
+       4
+      27      97      10       9
+       0
+      40
+      26       1      10      10      26       2      10      11      26       3
+      10      12      26       4      10      13      26       5      10      14
+      26       6      10      15      26       7      10      16      26       8
+      10      17      26       9      10      18      26      10      10      19
+       0
+       0
+       0
+       0
+      40
+      26       1      32       1      26       2      32       2      26       3
+      32       3      26       4      32       4      26       5      32       5
+      26       6      32       6      26       7      32       7      26       8
+      32       8      26       9      32       9      26      10      32      10
+      12
+      27      97      26       1      27      98      27      44      27      99
+      18       3
+      12
+      27      97      26       2      27      98      27      45      27      99
+      18       4
+      12
+      27      97      26       3      27      98      27      46      27      99
+      18       5
+      12
+      27      97      26       4      27      98      27      47      27      99
+      18       6
+      12
+      27      97      26       5      27      98      27      48      27      99
+      18       7
+      12
+      27      97      26       6      27      98      27      49      27      99
+      18       8
+      12
+      27      97      26       7      27      98      27      50      27      99
+      18       9
+      12
+      27      97      26       8      27      98      27      51      27      99
+      18      10
+      12
+      27      97      26       9      27      98      27      52      27      99
+      18      11
+      12
+      27      97      26      10      27      98      27      53      27      99
+      18      12
+      12
+      27      80      27     100      26       1      10      21      27     101
+      10      37
+      36
+      27     102       1       2      27      80      27      54      27     103
+      25       2      27     104      25       3      27     105      25       4
+      27     106      38       1      27     107      27     108      27     109
+      39      10      27     110       2       1
+      12
+      27      80      27     100      26       1      10      23      27     101
+      10      38
+      48
+      27     102       1       2      27      80      27      56      27     111
+      25       5      27     112      25       6      27     113      25       7
+      27     106      38       2      27     107      27     108      27     114
+       2       2      27     115       2       3      27     116      25      13
+      27     109      39      11      27     110       2       4
+       8
+      27     117      10      25      27     118      10      26
+       8
+      26       1      39       2      26       2      39       3
+       8
+      26       1      22       1      26       2      22       2
+       0
+       8
+      27     117      10      29      27     118      10      30
+       8
+      26       1      39       4      26       2      39       5
+       8
+      26       1      22       3      26       2      22       4
+       0
+       0
+      84
+      26      13      39      12      26       1      39      13      26       2
+      39      14      26       3      39      15      26       4      39      16
+      26       5      39      17      26       6      39      18      26       7
+      39      19      26       8      39      20      26       9      39      21
+      26      10      39      22      26      14      39      23      26      15
+      39      24      26      16      39      25      26      17      39      26
+      26      18      39      27      26      19      39      28      26      20
+      39      29      26      21      39      30      26      22      39      31
+      26      23      39      32
+      84
+      26      13      39      33      26       1      39      34      26       2
+      39      35      26       3      39      36      26       4      39      37
+      26       5      39      38      26       6      39      39      26       7
+      39      40      26       8      39      41      26       9      39      42
+      26      10      39      43      26      14      39      44      26      15
+      39      45      26      16      39      46      26      17      39      47
+      26      18      39      48      26      19      39      49      26      20
+      39      50      26      21      39      51      26      22      39      52
+      26      23      39      53
+      84
+      26      13      39      54      26       1      39      55      26       2
+      39      56      26       3      39      57      26       4      39      58
+      26       5      39      59      26       6      39      60      26       7
+      39      61      26       8      39      62      26       9      39      63
+      26      10      39      64      26      14      39      65      26      15
+      39      66      26      16      39      67      26      17      39      68
+      26      18      39      69      26      19      39      70      26      20
+      39      71      26      21      39      72      26      22      39      73
+      26      23      39      74
+      84
+      26      13      39      75      26       1      39      76      26       2
+      39      77      26       3      39      78      26       4      39      79
+      26       5      39      80      26       6      39      81      26       7
+      39      82      26       8      39      83      26       9      39      84
+      26      10      39      85      26      14      39      86      26      15
+      39      87      26      16      39      88      26      17      39      89
+      26      18      39      90      26      19      39      91      26      20
+      39      92      26      21      39      93      26      22      39      94
+      26      23      39      95
+       4
+      26       1      26       1
+       4
+      26       1      26       1
+      88
+      27      80      27     119      27     120      26       1      27     121
+       1      17      27     122      27     123      27     102       1      26
+      27     124      26       1      27     125       1      27      27     126
+       1      28      27     127       1      29      27     128       1      23
+      27     129       1      19      27     130       1      30      27     131
+       1      31      27     132      25       8      27     133       1      32
+      27     134      26      24      27     135      26      25      27     136
+      26      26      27     137      29       1      27     138      39      96
+      27     139       2       5      27     140       2       6
+      88
+      27      80      27     119      27     120      26       1      27     121
+       1      18      27     122      27     123      27     102       1      33
+      27     124      26       1      27     125       1      34      27     126
+       1      35      27     127       1      36      27     128       1      25
+      27     129       1      20      27     130       1      37      27     131
+       1      38      27     132      25       8      27     133       1      39
+      27     134      26      24      27     135      26      25      27     136
+      26      26      27     137      29       2      27     138      39      97
+      27     139       2       7      27     140       2       8
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  18NBRE OBJETS NOMMES       2NBRE OBJETS      20
+ C_57     C_58    
+       1       2
+      21
+  0.00000000000000E+00  1.80000000000000E+02  3.60000000000000E+02
+  5.40000000000000E+02  7.20000000000000E+02  9.00000000000000E+02
+  1.08000000000000E+03  1.26000000000000E+03  1.44000000000000E+03
+  1.62000000000000E+03  1.80000000000000E+03  1.98000000000000E+03
+  2.16000000000000E+03  2.34000000000000E+03  2.52000000000000E+03
+  2.70000000000000E+03  2.88000000000000E+03  3.06000000000000E+03
+  3.24000000000000E+03  3.42000000000000E+03  3.60000000000000E+03
+      21
+  0.00000000000000E+00  1.80000000000000E+02  3.60000000000000E+02
+  5.40000000000000E+02  7.20000000000000E+02  9.00000000000000E+02
+  1.08000000000000E+03  1.26000000000000E+03  1.44000000000000E+03
+  1.62000000000000E+03  1.80000000000000E+03  1.98000000000000E+03
+  2.16000000000000E+03  2.34000000000000E+03  2.52000000000000E+03
+  2.70000000000000E+03  2.88000000000000E+03  3.06000000000000E+03
+  3.24000000000000E+03  3.42000000000000E+03  3.60000000000000E+03
+      21
+ -1.15000000000000E+00 -2.16540006497621E-01 -8.41397770009844E-02
+ -4.85976677678687E-02 -3.37455570371690E-02 -2.52459832108218E-02
+ -1.97690425436327E-02 -1.59409962757219E-02 -1.31888625377558E-02
+ -1.09119860531204E-02 -9.19290101181534E-03 -7.84294096039159E-03
+ -6.74365940030929E-03 -5.82979435469586E-03 -5.06740412419820E-03
+ -4.41975400584346E-03 -3.86658567110400E-03 -3.39449654285649E-03
+ -2.98504207216772E-03 -2.53776152358996E-03 -2.31476016363927E-03
+      21
+ -1.15000000000000E+00 -9.30397404678052E-01 -3.99006393092861E-01
+ -2.07168724171530E-01 -1.29350727394083E-01 -9.03551366564161E-02
+ -6.76129559558960E-02 -5.28388160049688E-02 -4.26312674227759E-02
+ -3.45377273330808E-02 -2.86788164188325E-02 -2.41927361441432E-02
+ -2.06123089316590E-02 -1.76887851915472E-02 -1.52792695565113E-02
+ -1.32536610116376E-02 -1.15427250669061E-02 -1.00903141297685E-02
+ -8.84056690841387E-03 -7.48544137316366E-03 -6.80674510584511E-03
+      21
+ -1.15000000000000E+00 -1.15060872024149E+00 -1.13499905693040E+00
+ -7.95394822100051E-01 -4.30905323840183E-01 -2.64917049285670E-01
+ -1.81256800501063E-01 -1.32961427219939E-01 -1.02231526111713E-01
+ -8.01552310619662E-02 -6.48410172548005E-02 -5.35915529885089E-02
+ -4.49298823577743E-02 -3.80662136598705E-02 -3.25312588159042E-02
+ -2.79676615199501E-02 -2.41759217163720E-02 -2.09930359371299E-02
+ -1.82881621622230E-02 -1.55047223983316E-02 -1.39381228920544E-02
+      21
+ -1.15000000000000E+00 -1.15003050279579E+00 -1.14945324999156E+00
+ -1.15216622941086E+00 -1.14194776513999E+00 -9.31257334949404E-01
+ -5.67940566609100E-01 -3.60643458507160E-01 -2.50817800828769E-01
+ -1.84496520865302E-01 -1.41808693317095E-01 -1.12656876351951E-01
+ -9.16244319997735E-02 -7.58227570362588E-02 -6.35890561852581E-02
+ -5.38457707225434E-02 -4.59685747877699E-02 -3.94954594598532E-02
+ -3.41018315130958E-02 -2.89695162340258E-02 -2.56219655982434E-02
+      21
+ -1.15000000000000E+00 -1.15000151469562E+00 -1.14997228758946E+00
+ -1.15012078743789E+00 -1.14964997086609E+00 -1.15034594794479E+00
+ -1.15242834219045E+00 -1.10346040120499E+00 -8.26415561949390E-01
+ -5.30787359698162E-01 -3.58863584011360E-01 -2.60114236348006E-01
+ -1.97855922317559E-01 -1.55882626060270E-01 -1.25947669247034E-01
+ -1.03629896223960E-01 -8.64832401557986E-02 -7.29512134106926E-02
+ -6.20519737259673E-02 -5.25659618901367E-02 -4.56082435019248E-02
+      21
+ -1.15000000000000E+00 -1.15000007516150E+00 -1.14999858497049E+00
+ -1.15000648601938E+00 -1.14998091221237E+00 -1.15002201148601E+00
+ -1.15012292571263E+00 -1.14916546120541E+00 -1.15151945793081E+00
+ -1.15140124164690E+00 -1.09441898963168E+00 -8.38270772945895E-01
+ -5.54048026462153E-01 -3.82909893086666E-01 -2.82442837703037E-01
+ -2.17561993419072E-01 -1.72968908719407E-01 -1.40614485535019E-01
+ -1.16224003093473E-01 -9.70000399779595E-02 -8.21272011753830E-02
+      21
+ -1.15000000000000E+00 -1.15000000372952E+00 -1.14999992775752E+00
+ -1.15000034647027E+00 -1.14999895267042E+00 -1.15000134148075E+00
+ -1.15000601799045E+00 -1.14995662642460E+00 -1.15009096148427E+00
+ -1.15006229202357E+00 -1.14914889879884E+00 -1.15134713449563E+00
+ -1.15235921190085E+00 -1.11260787601159E+00 -8.99467334965939E-01
+ -6.13129474211469E-01 -4.25293436586229E-01 -3.14332083383740E-01
+ -2.42570221057817E-01 -1.93671486405551E-01 -1.57261914282897E-01
+      21
+ -1.15000000000000E+00 -1.15000000018511E+00 -1.14999999631362E+00
+ -1.15000001845649E+00 -1.14999994261664E+00 -1.15000008029727E+00
+ -1.15000029118890E+00 -1.14999771852574E+00 -1.15000518273646E+00
+ -1.15000241369967E+00 -1.14995590195655E+00 -1.15008167585997E+00
+ -1.15011240661199E+00 -1.14920280894521E+00 -1.15048413637776E+00
+ -1.15355271999449E+00 -1.13489684822653E+00 -9.88079014600367E-01
+ -7.06575620556291E-01 -4.89547416771393E-01 -3.57755578976403E-01
+      21
+ -1.15000000000000E+00 -1.15000000000919E+00 -1.14999999981198E+00
+ -1.15000000098080E+00 -1.14999999686222E+00 -1.15000000474282E+00
+ -1.15000001391583E+00 -1.14999988021859E+00 -1.15000029238419E+00
+ -1.15000007693173E+00 -1.14999769428415E+00 -1.15000469239171E+00
+ -1.15000502071806E+00 -1.14995808942615E+00 -1.15003535574102E+00
+ -1.15018676578197E+00 -1.14948762713892E+00 -1.14955847860536E+00
+ -1.15335609441927E+00 -1.14777221562769E+00 -1.07288169976893E+00
+      21
+ -1.15000000000000E+00 -1.15000000000046E+00 -1.14999999999041E+00
+ -1.15000000005201E+00 -1.14999999982878E+00 -1.15000000027714E+00
+ -1.15000000065495E+00 -1.14999999372727E+00 -1.15000001638543E+00
+ -1.15000000121241E+00 -1.14999987981461E+00 -1.15000026657349E+00
+ -1.15000021282329E+00 -1.14999777671329E+00 -1.15000230040975E+00
+ -1.15000939152630E+00 -1.14997173675939E+00 -1.14998304877008E+00
+ -1.15018720080803E+00 -1.14987984887642E+00 -1.14909664847054E+00
+       2
+  0.00000000000000E+00  3.60000000000000E+03
+       2
+  1.00000000000000E+00  1.00000000000000E+00
+       2
+  0.00000000000000E+00  3.60000000000000E+03
+       2
+  1.00000000000000E+00  1.00000000000000E+00
+       2
+  0.00000000000000E+00  3.60000000000000E+03
+       2
+  1.00000000000000E+00  1.00000000000000E+00
+       2
+  0.00000000000000E+00  3.60000000000000E+03
+       2
+  1.00000000000000E+00  1.00000000000000E+00
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  22NBRE OBJETS NOMMES       4NBRE OBJETS       4
+ C_62     C_68     C_72     C_78    
+       1       2       3       4
+       1
+ REEL      GIBI  FEC IT                                                         
+                    
+     -13     -14       0
+ PROG _X        PROG _Y        REEL LIST REEL LIST REEL
+ GIBI  FEC IT                                                                   
+          
+       1
+ REEL      GIBI  FEC IT                                                         
+                    
+     -15     -16       0
+ PROG _X        PROG _Y        REEL LIST REEL LIST REEL
+ GIBI  FEC IT                                                                   
+          
+       1
+ REEL      GIBI  FEC IT                                                         
+                    
+     -17     -18       0
+ PROG _X        PROG _Y        REEL LIST REEL LIST REEL
+ GIBI  FEC IT                                                                   
+          
+       1
+ REEL      GIBI  FEC IT                                                         
+                    
+     -19     -20       0
+ PROG _X        PROG _Y        REEL LIST REEL LIST REEL
+ GIBI  FEC IT                                                                   
+          
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  25NBRE OBJETS NOMMES      12NBRE OBJETS      13
+ PI       C_40     C_41     C_42     C_47     C_48     C_49     C_4B    
+ C_4C     C_4E     C_51     C_56    
+       1       2       3       4       5       6       7       8       9      10
+      11      12
+      13
+  3.14159265358979E+00 -1.00000000000000E+00  7.30000000000000E+00
+  1.00000000000000E+00  2.92270000000000E+00  2.03040000000000E+00
+  5.07500000000000E-01  0.00000000000000E+00  3.00000000000000E+00
+  8.00000000000000E-01  1.00000000000000E-14  5.00000000000000E-04
+  3.42149382420365E-05
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  26NBRE OBJETS NOMMES      13NBRE OBJETS      26
+ C_16     C_1A     C_1E     C_22     C_26     C_2A     C_2E     C_32    
+ C_36     C_3A     C_54     C_55     C_8A    
+       1       2       3       4       5       6       7       8       9      10
+      11      12       1
+      26
+          1          2          3          4          5          6          7
+          8          9         10       1000         40          0         11
+         12         13         14         15         16         17         18
+         19         20        202        100        301
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  27NBRE OBJETS NOMMES      79NBRE OBJETS     140
+ #2       #3       #4       D        C        GRAV     *        DENS    
+ MESH0    TROPIC   PARAMET  PERMEA   POROSITY SRESIDU  MCOMPRES SATLAW  
+ PERMLAW  SOURCE   BC_DIRI  BC_NEUMA HINI     MODD     FIELD    MOTC    
+ /        DEUXINDI LOI      PW       SAT      SATUR    H_TFIELD OUTPUTS 
+ OUT_SUP  TABLE    OUBL     OPTI     MODE     PLAN     DEFO     TASS    
+ C_A      MOT      DIME     C_17     C_1B     C_1F     C_23     C_27    
+ C_2B     C_2F     C_33     C_37     C_3B     C_3F     CHAIN    C_46    
+ C_4A     C_4D     C_4F     C_50     C_52     C_53     PROG     PROG_Y  
+ ALL      X        N        INDIC    CHP1     NIV      MODARCY  TRANS1  
+ TRANS2   QELEM    OPTRESOL TYPDI    MASSEFMH MATTR    MATTM   
+       1       2       3       4       5       6       7       8       9      10
+      11      12      13      14      15      16      17      18      19      20
+      21      22      23      24      25      26      27      28      29      30
+      31      32      33      34      35      36      37      38      39      40
+      41      42      43      44      45      46      47      48      49      50
+      51      52      53      54      55      56      57      58      59      60
+      61      62      63      64      65      66      67      68      69      70
+      71      72      73      74      75      76      77      78      79
+     807     140
+ #2#3#4DROICERCGRAV*DENSMESH0TROPICPARAMETPERMEAPOROSITYSRESIDUMCOMPRESS
+ ATLAWPERMLAWSOURCEBC_DIRIBC_NEUMAHINIMODDFIELDMOTC/DEUXINDILOIPWSATSATU
+ RH_TFIELDOUTPUTSOUT_SUPTABLEOUBLOPTIMODEPLANDEFOTASSISOTROPEMOTDIMEOUT2
+ .955OUT2.895OUT2.835OUT2.775OUT2.715OUT2.655OUT2.595OUT2.535OUT2.475OUT
+ 2.415EXPONENTIELLECHAINVAN_GENUCHTENLUYESDECENTRENOEFMHILU0PROGPROG_YAL
+ LXNINDICCHP1NIVMODARCYTRANS1TRANS2QELEMOPTRESOLTYPDIMASSEFMHMATTRMATTMS
+ OUSTYPEDARCYSATURSOLVERPREFZREFMASSLUMPSOUS_RELAXATIONHOMOGENEISATIONLT
+ RACEEPSILONSSCHEMEPRECONDMAXITERITMAXRESIDU_MAXTEMPS_CALCULESTEMPS_SAUV
+ ESCPOINTNAMEVALUEMULTIZONEINDEXMAILLAGECOEF_NALPHACOEF_CMODELENOMZONE1A
+ BMCNFBHETANEXPMEXPPOROTERESIDUPRECCHAMPEVOLDOMAINEPRECONDIQUAFNOMDOMINE
+ FMDFACEFACELFACEPCENTREELTFAFACEL2MAILFACETOLERSOMMETNPTDNELDNBFDOBJINC
+                                              LUXXNORMAEXXSURFACXXNORMAF
+       2       4       6      10      14      18      19      23      28      34
+      41      47      55      62      70      76      83      89      96     104
+     108     112     117     121     122     130     133     135     138     143
+     151     158     165     170     174     178     182     186     190     194
+     202     205     209     217     225     233     241     249     257     265
+     273     281     289     302     307     320     322     325     333     335
+     339     343     347     353     356     357     358     363     367     370
+     377     383     389     394     402     407     415     420     425     433
+     443     449     453     457     465     480     495     501     508     515
+     522     529     534     544     558     570     576     580     585     594
+     599     607     613     618     624     630     637     638     642     644
+     649     653     657     661     669     673     678     682     689     697
+     701     707     707     713     717     722     727     733     738     744
+     752     757     763     767     771     775     783     791     799     807
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  29NBRE OBJETS NOMMES       0NBRE OBJETS       2
+       8      18
+ C_9     C_5     TOP     RIGHT   LEFT    GE_3    GE_1    FAM_TOP FAM_STO
+ TFAM_LEFTC_75    C_6F    C_65    C_6     C_5F    C_3D    BOTTOM  MESH0 
+                                                                        
+       8      18
+ C_9     C_5     TOP     RIGHT   LEFT    GE_3    GE_1    FAM_TOP FAM_STO
+ TFAM_LEFTC_75    C_6F    C_65    C_6     C_5F    C_3D    BOTTOM  MESH0 
+                                                                        
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  32NBRE OBJETS NOMMES      10NBRE OBJETS     613
+ C_15     C_19     C_1D     C_21     C_25     C_29     C_2D     C_31    
+ C_35     C_39    
+       1       2       3       4       5       6       7       8       9      10
+     613
+     604     605     606     607     608     609     610     611     612     613
+     201       1     101       2     102       3     103       4     104       5
+     105       6     106       7     107       8     108       9     109      10
+     110      11     111      12     112      13     113      14     114      15
+     115      16     116      17     117      18     118      19     119      20
+     120      21     121      22     122      23     123      24     124      25
+     125      26     126      27     127      28     128      29     129      30
+     130      31     131      32     132      33     133      34      35     202
+      36     135      37     136      38     137      39     138      40     139
+      41     140      42     141      43     142      44     143      45     144
+      46     145      47     146      48     147      49     148      50     149
+      51     150      52     151      53     152      54     153      55     154
+      56     155      57     156      58     157      59     158      60     159
+      61     160      62     161      63     162      64     163      65     164
+      66     165      67     166      68     167      69     168      70     169
+      71     170      72     171      73     172      74     173      75     174
+      76     175      77     176      78     177      79     178      80     179
+      81     180      82     181      83     182      84     183      85     184
+      86     185      87     186      88     187      89     188      90     189
+      91     190      92     191      93     192      94     193      95     194
+      96     195      97     196      98     197      99     198     100     199
+     134     200     203     204     205     206     207     208     209     210
+     211     212     213     214     215     216     217     218     219     220
+     221     222     223     224     225     226     227     228     229     230
+     231     232     233     234     235     236     237     238     239     240
+     241     242     243     244     245     246     247     248     249     250
+     251     252     253     254     255     256     257     258     259     260
+     261     262     263     264     265     266     267     268     269     270
+     271     272     273     274     275     276     277     278     279     280
+     281     282     283     284     285     286     287     288     289     290
+     291     292     293     294     295     296     297     298     299     300
+     301     302     303     304     305     306     307     308     309     310
+     311     312     313     314     315     316     317     318     319     320
+     321     322     323     324     325     326     327     328     329     330
+     331     332     333     334     335     336     337     338     339     340
+     341     342     343     344     345     346     347     348     349     350
+     351     352     353     354     355     356     357     358     359     360
+     361     362     363     364     365     366     367     368     369     370
+     371     372     373     374     375     376     377     378     379     380
+     381     382     383     384     385     386     387     388     389     390
+     391     392     393     394     395     396     397     398     399     400
+     401     402     403     404     405     406     407     408     409     410
+     411     412     413     414     415     416     417     418     419     420
+     421     422     423     424     425     426     427     428     429     430
+     431     432     433     434     435     436     437     438     439     440
+     441     442     443     444     445     446     447     448     449     450
+     451     452     453     454     455     456     457     458     459     460
+     461     462     463     464     465     466     467     468     469     470
+     471     472     473     474     475     476     477     478     479     480
+     481     482     483     484     485     486     487     488     489     490
+     491     492     493     494     495     496     497     498     499     500
+     501     502     503     504     505     506     507     508     509     510
+     511     512     513     514     515     516     517     518     519     520
+     521     522     523     524     525     526     527     528     529     530
+     531     532     533     534     535     536     537     538     539     540
+     541     542     543     544     545     546     547     548     549     550
+     551     552     553     554     555     556     557     558     559     560
+     561     562     563     564     565     566     567     568     569     570
+     571     572     573     574     575     576     577     578     579     580
+     581     582     583     584     585     586     587     588     589     590
+     591     592     593     594     595     596     597     598     599     600
+     601     602     603
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  33NBRE OBJETS NOMMES       0NBRE OBJETS       1
+    3042
+ -5.00000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.99999999999980E-02  0.00000000000000E+00
+  5.00000000000000E-01  5.99999999999981E-02  0.00000000000000E+00
+  5.00000000000000E-01  8.99999999999981E-02  0.00000000000000E+00
+  5.00000000000000E-01  1.19999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  1.49999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  1.79999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  2.09999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  2.39999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  2.69999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  2.99999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  3.29999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  3.59999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  3.89999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  4.19999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  4.49999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  4.79999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  5.09999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  5.39999999999998E-01  0.00000000000000E+00
+  5.00000000000000E-01  5.69999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  5.99999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  6.29999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  6.59999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  6.89999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  7.19999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  7.49999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  7.79999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  8.09999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  8.39999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  8.69999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  8.99999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  9.29999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  9.59999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  9.89999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  1.02000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.05000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.08000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.11000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.14000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.17000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.20000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.23000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.26000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.29000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.32000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.35000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.38000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.41000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.44000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.47000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.50000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.53000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.56000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.59000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.62000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.65000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.68000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.71000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.74000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.77000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.80000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.83000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.86000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.89000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.92000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.95000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.98000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.01000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.04000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.07000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.10000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.13000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.16000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.19000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.22000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.25000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.28000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.31000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.34000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.37000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.40000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.43000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.46000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.49000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.52000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.55000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.58000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.61000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.64000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.67000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.70000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.73000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.76000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.79000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.82000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.85000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.88000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.91000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.94000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.97000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  3.00000000000000E-02  0.00000000000000E+00
+ -5.00000000000000E-01  6.00000000000000E-02  0.00000000000000E+00
+ -5.00000000000000E-01  9.00000000000000E-02  0.00000000000000E+00
+ -5.00000000000000E-01  1.20000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  1.50000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  1.80000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  2.10000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  2.40000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  2.70000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  3.00000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  3.30000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  3.60000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  3.90000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  4.20000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  4.50000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  4.80000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  5.10000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  5.40000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  5.70000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  6.00000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  6.30000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  6.60000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  6.90000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  7.20000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  7.50000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  7.80000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  8.10000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  8.40000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  8.70000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  9.00000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  9.30000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  9.60000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  9.90000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  3.00000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.05000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.08000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.11000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.14000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.17000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.20000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.23000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.26000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.29000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.32000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.35000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.38000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.41000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.44000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.47000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.50000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.53000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.56000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.59000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.62000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.65000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.68000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.71000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.74000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.77000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.80000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.83000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.86000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.89000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.92000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.95000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.98000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.01000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.04000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.07000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.10000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.13000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.16000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.19000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.22000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.25000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.28000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.31000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.34000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.37000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.40000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.43000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.46000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.49000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.52000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.55000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.58000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.61000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.64000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.67000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.70000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.73000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.76000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.79000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.82000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.85000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.88000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.91000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.94000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.97000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  3.00000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.02000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.50000000000000E-02  0.00000000000000E+00
+  0.00000000000000E+00  2.99999999999990E-02  0.00000000000000E+00
+  5.00000000000000E-01  1.49999999999990E-02  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.49999999999995E-02  0.00000000000000E+00
+ -5.00000000000000E-01  4.50000000000000E-02  0.00000000000000E+00
+  0.00000000000000E+00  5.99999999999991E-02  0.00000000000000E+00
+  5.00000000000000E-01  4.49999999999981E-02  0.00000000000000E+00
+  0.00000000000000E+00  4.49999999999990E-02  0.00000000000000E+00
+ -5.00000000000000E-01  7.50000000000000E-02  0.00000000000000E+00
+  0.00000000000000E+00  8.99999999999991E-02  0.00000000000000E+00
+  5.00000000000000E-01  7.49999999999981E-02  0.00000000000000E+00
+  0.00000000000000E+00  7.49999999999991E-02  0.00000000000000E+00
+ -5.00000000000000E-01  1.05000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.19999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  1.04999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.04999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  1.35000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.49999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  1.34999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.34999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  1.65000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.79999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  1.64999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.64999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  1.95000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.09999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  1.94999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.94999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  2.25000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.39999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  2.24999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.24999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  2.55000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.69999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  2.54999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.54999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  2.85000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.99999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  2.84999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.84999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  3.15000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.29999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  3.14999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.14999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  3.45000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.59999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  3.44999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.44999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  3.75000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.89999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  3.74999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.74999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  4.05000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.19999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  4.04999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.04999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  4.35000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.49999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  4.34999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.34999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  4.65000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.79999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  4.64999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.64999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  4.95000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.09999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  4.94999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.94999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  5.25000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.39999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  5.24999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.24999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  5.55000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.69999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  5.54999999999998E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.54999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  5.85000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.99999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  5.84999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.84999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  6.15000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.29999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  6.14999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.14999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  6.45000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.59999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  6.44999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.44999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  6.75000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.90000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  6.74999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.74999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  7.05000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.20000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  7.04999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.05000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  7.35000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.50000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  7.34999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.35000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  7.65000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.80000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  7.64999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.65000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  7.95000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.10000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  7.94999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.95000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  8.25000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.39999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  8.24999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.25000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  8.55000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.69999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  8.54999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.54999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  8.85000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.99999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  8.84999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.84999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  9.15000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.29999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  9.14999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.14999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  9.45000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.60000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  9.44999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.45000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  9.75000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.90000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  9.74999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.75000000000000E-01  0.00000000000000E+00
+ -5.00000000000000E-01  1.00500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.02000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.00500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.03500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.05000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.03500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.03500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.06500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.08000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.06500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.06500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.09500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.11000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.09500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.09500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.12500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.14000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.12500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.12500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.15500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.17000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.15500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.15500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.18500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.20000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.18500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.18500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.21500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.23000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.21500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.21500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.24500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.26000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.24500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.24500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.27500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.29000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.27500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.27500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.30500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.32000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.30500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.30500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.33500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.35000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.33500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.33500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.36500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.38000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.36500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.36500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.39500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.41000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.39500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.39500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.42500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.44000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.42500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.42500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.45500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.47000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.45500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.45500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.48500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.50000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.48500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.48500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.51500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.53000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.51500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.51500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.54500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.56000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.54500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.54500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.57500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.59000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.57500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.57500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.60500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.62000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.60500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.60500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.63500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.65000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.63500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.63500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.66500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.68000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.66500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.66500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.69500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.71000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.69500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.69500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.72500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.74000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.72500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.72500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.75500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.77000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.75500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.75500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.78500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.80000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.78500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.78500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.81500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.83000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.81500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.81500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.84500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.86000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.84500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.84500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.87500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.89000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.87500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.87500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.90500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.92000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.90500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.90500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.93500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.95000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.93500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.93500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.96500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.98000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.96500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.96500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.99500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.01000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.99500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.99500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.02500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.04000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.02500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.02500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.05500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.07000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.05500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.05500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.08500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.10000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.08500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.08500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.11500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.13000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.11500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.11500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.14500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.16000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.14500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.14500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.17500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.19000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.17500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.17500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.20500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.22000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.20500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.20500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.23500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.25000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.23500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.23500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.26500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.28000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.26500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.26500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.29500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.31000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.29500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.29500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.32500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.34000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.32500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.32500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.35500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.37000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.35500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.35500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.38500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.40000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.38500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.38500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.41500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.43000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.41500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.41500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.44500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.46000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.44500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.44500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.47500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.49000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.47500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.47500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.50500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.52000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.50500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.53500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.55000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.53500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.53500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.56500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.58000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.56500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.56500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.59500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.61000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.59500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.59500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.62500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.64000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.62500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.62500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.65500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.67000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.65500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.65500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.68500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.70000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.68500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.68500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.71500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.73000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.71500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.71500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.74500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.76000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.74500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.74500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.77500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.79000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.77500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.77500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.80500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.82000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.80500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.80500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.83500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.85000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.83500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.83500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.86500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.88000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.86500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.86500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.89500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.91000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.89500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.89500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.92500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.94000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.92500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.92500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.95500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.97000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.95500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.95500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.98500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  3.00000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.98500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.98500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.95500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.89500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.83500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.77500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.71500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.65500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.59500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.53500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.47500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.41500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.50000000000000E-02  0.00000000000000E+00
+  0.00000000000000E+00  2.99999999999990E-02  0.00000000000000E+00
+  5.00000000000000E-01  1.49999999999990E-02  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  4.50000000000000E-02  0.00000000000000E+00
+  0.00000000000000E+00  5.99999999999991E-02  0.00000000000000E+00
+  5.00000000000000E-01  4.49999999999981E-02  0.00000000000000E+00
+ -5.00000000000000E-01  7.50000000000000E-02  0.00000000000000E+00
+  0.00000000000000E+00  8.99999999999991E-02  0.00000000000000E+00
+  5.00000000000000E-01  7.49999999999981E-02  0.00000000000000E+00
+ -5.00000000000000E-01  1.05000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.19999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  1.04999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  1.35000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.49999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  1.34999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  1.65000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.79999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  1.64999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  1.95000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.09999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  1.94999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  2.25000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.39999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  2.24999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  2.55000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.69999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  2.54999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  2.85000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.99999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  2.84999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  3.15000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.29999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  3.14999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  3.45000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.59999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  3.44999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  3.75000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.89999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  3.74999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  4.05000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.19999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  4.04999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  4.35000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.49999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  4.34999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  4.65000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.79999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  4.64999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  4.95000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.09999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  4.94999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  5.25000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.39999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  5.24999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  5.55000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.69999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  5.54999999999998E-01  0.00000000000000E+00
+ -5.00000000000000E-01  5.85000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.99999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  5.84999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  6.15000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.29999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  6.14999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  6.45000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.59999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  6.44999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  6.75000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.90000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  6.74999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  7.05000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.20000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  7.04999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  7.35000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.50000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  7.34999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  7.65000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.80000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  7.64999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  7.95000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.10000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  7.94999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  8.25000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.39999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  8.24999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  8.55000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.69999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  8.54999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  8.85000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.99999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  8.84999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  9.15000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.29999999999999E-01  0.00000000000000E+00
+  5.00000000000000E-01  9.14999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  9.45000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.60000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  9.44999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  9.75000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.90000000000000E-01  0.00000000000000E+00
+  5.00000000000000E-01  9.74999999999999E-01  0.00000000000000E+00
+ -5.00000000000000E-01  1.00500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.02000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.00500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.03500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.05000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.03500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.06500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.08000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.06500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.09500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.11000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.09500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.12500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.14000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.12500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.15500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.17000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.15500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.18500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.20000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.18500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.21500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.23000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.21500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.24500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.26000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.24500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.27500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.29000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.27500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.30500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.32000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.30500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.33500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.35000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.33500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.36500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.38000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.36500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.39500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.41000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.39500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.42500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.44000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.42500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.45500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.47000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.45500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.48500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.50000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.48500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.51500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.53000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.51500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.54500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.56000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.54500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.57500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.59000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.57500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.60500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.62000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.60500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.63500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.65000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.63500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.66500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.68000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.66500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.69500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.71000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.69500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.72500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.74000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.72500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.75500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.77000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.75500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.78500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.80000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.78500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.81500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.83000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.81500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.84500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.86000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.84500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.87500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.89000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.87500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.90500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.92000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.90500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.93500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.95000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.93500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.96500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.98000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.96500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  1.99500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.01000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  1.99500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.02500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.04000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.02500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.05500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.07000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.05500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.08500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.10000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.08500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.11500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.13000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.11500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.14500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.16000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.14500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.17500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.19000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.17500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.20500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.22000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.20500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.23500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.25000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.23500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.26500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.28000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.26500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.29500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.31000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.29500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.32500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.34000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.32500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.35500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.37000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.35500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.38500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.40000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.38500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.41500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.43000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.41500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.44500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.46000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.44500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.47500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.49000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.47500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.50500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.52000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.50500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.53500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.55000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.53500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.56500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.58000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.56500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.59500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.61000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.59500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.62500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.64000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.62500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.65500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.67000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.65500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.68500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.70000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.68500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.71500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.73000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.71500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.74500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.76000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.74500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.77500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.79000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.77500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.80500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.82000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.80500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.83500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.85000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.83500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.86500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.88000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.86500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.89500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.91000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.89500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.92500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.94000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.92500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.95500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.97000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.95500000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  2.98500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  3.00000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  2.98500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.49999999999995E-02  0.00000000000000E+00
+  0.00000000000000E+00  4.49999999999990E-02  0.00000000000000E+00
+  0.00000000000000E+00  7.49999999999991E-02  0.00000000000000E+00
+  0.00000000000000E+00  1.04999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.34999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.64999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.94999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.24999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.54999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  2.84999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.14999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.44999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  3.74999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.04999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.34999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.64999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  4.94999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.24999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.54999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  5.84999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.14999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.44999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  6.74999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.05000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.35000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.65000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  7.95000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.25000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.54999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  8.84999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.14999999999999E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.45000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  9.75000000000000E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.00500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.03500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.06500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.09500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.12500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.15500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.18500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.21500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.24500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.27500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.30500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.33500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.36500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.39500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.42500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.45500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.48500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.51500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.54500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.57500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.60500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.63500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.66500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.69500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.72500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.75500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.78500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.81500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.84500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.87500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.90500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.93500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.96500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.99500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.02500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.05500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.08500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.11500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.14500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.17500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.20500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.23500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.26500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.29500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.32500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.35500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.38500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.41500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.44500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.47500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.53500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.56500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.59500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.62500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.65500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.68500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.71500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.74500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.77500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.80500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.83500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.86500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.89500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.92500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.95500000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.98500000000000E+00  0.00000000000000E+00
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  38NBRE OBJETS NOMMES       0NBRE OBJETS       2
+       1       2       2       2       3      11       1       0       0       0
+     -17     100       1       1       2       0       0       1       0       0
+       0
+            318745         
+ DARCY            
+ ISOTROPE         
+       0     -39
+ K       
+       1       2       2       2       3      11       1       0       0       0
+     -18     100       1       1       2       0       0       1       0       0
+       0
+            318877         
+ DARCY            
+ ISOTROPE         
+       0     -40
+ K       
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  39NBRE OBJETS NOMMES       9NBRE OBJETS      97
+ C_5B     C_61     C_67     C_71     C_77     C_7D     C_80     C_83    
+ C_D     
+       1       2       3       4       5       6       7       8       9
+       1      -1       6      72
+ Field                                                                  
+                                                                        
+      -4  317635       1       0       0       0       0       0       2
+                  
+         
+  317623
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000000E+00 -2.03500000000000E+00 -2.00500000000000E+00
+ -1.97500000000000E+00 -1.94500000000000E+00 -1.91500000000000E+00
+ -1.88500000000000E+00 -1.85500000000000E+00 -1.82500000000000E+00
+ -1.79500000000000E+00 -1.76500000000000E+00 -1.73500000000000E+00
+ -1.70500000000000E+00 -1.67500000000000E+00 -1.64500000000000E+00
+ -1.61500000000000E+00 -1.58500000000000E+00 -1.55500000000000E+00
+ -1.52500000000000E+00 -1.49500000000000E+00 -1.46500000000000E+00
+ -1.43500000000000E+00 -1.40500000000000E+00 -1.37500000000000E+00
+ -1.34500000000000E+00 -1.31500000000000E+00 -1.28500000000000E+00
+ -1.25500000000000E+00 -1.22500000000000E+00 -1.19500000000000E+00
+ -1.16500000000000E+00
+       1      -1       6      72
+ Field                                                                  
+                                                                        
+      -3  317653       1       0       0       0       0       0       2
+                  
+         
+  317641
+ TH      
+ REAL*8           
+       1       1       0       0
+ -4.15000000000000E+00
+       1      -1       6      72
+ Field                                                                  
+                                                                        
+      -5  317671       1       0       0       0       0       0       2
+                  
+         
+  317659
+ TH      
+ REAL*8           
+       1       1       0       0
+  0.00000000000000E+00
+       1      -1       6      72
+ Field                                                                  
+                                                                        
+      -6  317689       1       0       0       0       0       0       2
+                  
+         
+  317677
+ FLUX    
+ REAL*8           
+       1     100       0       0
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00
+       1      -1       6      72
+ Field                                                                  
+                                                                        
+      -7  317707       1       0       0       0       0       0       2
+                  
+         
+  317695
+ FLUX    
+ REAL*8           
+       1     100       0       0
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00
+       1      -1       6      72
+ Field                                                                  
+                                                                        
+      -4  317725       1       0       0       0       0       0       2
+                  
+         
+  317713
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05  1.62500000000000E-05  1.62500000000000E-05
+  1.62500000000000E-05
+       1      -1       6      72
+ Field                                                                  
+                                                                        
+      -4  317761       1       0       0       0       0       0       2
+                  
+         
+  317749
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00
+       1      -1       6      72
+ Field                                                                  
+                                                                        
+      -4  317779       1       0       0       0       0       0       2
+                  
+         
+  317767
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01  3.00000000000000E-01  3.00000000000000E-01
+  3.00000000000000E-01
+       1      -1       6      72
+ Field                                                                  
+                                                                        
+      -4  317797       1       0       0       0       0       0       2
+                  
+         
+  317785
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01  1.83330000000000E-01  1.83330000000000E-01
+  1.83330000000000E-01
+       1      -1       6       0
+     -19  323917       1       0       0       0       0       0       0
\0\0\0\0\0\0\0\0 \0\0\0\0\0\0\0\0
\0\0\0\0\0\0\0\0
+  323911
+ SCAL    
+ REAL*8           
+       4     100       0       0
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00
+       1      -1       6       0
+     -20  322831       1       0       0       0       0       0       0
\0\0\0\0\0\0\0\0 \0\0\0\0\0\0\0\0
\0\0\0\0\0\0\0\0
+  322825
+ SCAL    
+ REAL*8           
+       4     100       0       0
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324895       1       0       0       0      -1       0       2
+                  
\0\0\0\0\0\0\0\0
+  324913
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000000E+00 -2.03500000000000E+00 -2.00500000000000E+00
+ -1.97500000000000E+00 -1.94500000000000E+00 -1.91500000000000E+00
+ -1.88500000000000E+00 -1.85500000000000E+00 -1.82500000000000E+00
+ -1.79500000000000E+00 -1.76500000000000E+00 -1.73500000000000E+00
+ -1.70500000000000E+00 -1.67500000000000E+00 -1.64500000000000E+00
+ -1.61500000000000E+00 -1.58500000000000E+00 -1.55500000000000E+00
+ -1.52500000000000E+00 -1.49500000000000E+00 -1.46500000000000E+00
+ -1.43500000000000E+00 -1.40500000000000E+00 -1.37500000000000E+00
+ -1.34500000000000E+00 -1.31500000000000E+00 -1.28500000000000E+00
+ -1.25500000000000E+00 -1.22500000000000E+00 -1.19500000000000E+00
+ -1.16500000000000E+00
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  325177       1       0       0       0      -2       0       2
+                  
\0\0\0\0\0\0\0\0
+  325189
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000000E+00 -2.03500000000000E+00 -2.00500000000000E+00
+ -1.97500000000000E+00 -1.94500000000000E+00 -1.91500000000000E+00
+ -1.88500000000000E+00 -1.85500000000000E+00 -1.82499999999999E+00
+ -1.79500000000002E+00 -1.76499999999990E+00 -1.73500000000046E+00
+ -1.70499999999795E+00 -1.67500000000919E+00 -1.64499999995875E+00
+ -1.61500000018511E+00 -1.58499999916916E+00 -1.55500000372952E+00
+ -1.52499998325759E+00 -1.49500007516150E+00 -1.46499966257587E+00
+ -1.43500151469562E+00 -1.40499319973773E+00 -1.37503050279579E+00
+ -1.34486272161241E+00 -1.31560872024149E+00 -1.28208143964912E+00
+ -1.03539740467805E+00 -5.49145954159549E-01 -2.61540006497621E-01
+ -7.90437383301150E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  326443       1       0       0       0      -3       0       2
+                  
\0\0\0\0\0\0\0\0
+  326491
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000000E+00 -2.03500000000000E+00 -2.00500000000000E+00
+ -1.97500000000000E+00 -1.94500000000000E+00 -1.91500000000000E+00
+ -1.88500000000001E+00 -1.85499999999998E+00 -1.82500000000011E+00
+ -1.79499999999951E+00 -1.76500000000216E+00 -1.73499999999041E+00
+ -1.70500000004245E+00 -1.67499999981198E+00 -1.64500000083258E+00
+ -1.61499999631362E+00 -1.58500001632013E+00 -1.55499992775752E+00
+ -1.52500031974302E+00 -1.49499858497048E+00 -1.46500626037860E+00
+ -1.43497228758946E+00 -1.40512229123405E+00 -1.37445324999156E+00
+ -1.34732398415236E+00 -1.29999905693040E+00 -9.39039023392856E-01
+ -5.04006393092861E-01 -2.68247218769097E-01 -1.29139777000984E-01
+ -3.75078673593804E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324649       1       0       0       0      -4       0       2
+                  
\0\0\0\0\0\0\0\0
+  324637
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000000E+00 -2.03500000000000E+00 -2.00500000000000E+00
+ -1.97500000000000E+00 -1.94500000000000E+00 -1.91500000000001E+00
+ -1.88499999999997E+00 -1.85500000000015E+00 -1.82499999999937E+00
+ -1.79500000000275E+00 -1.76499999998803E+00 -1.73500000005201E+00
+ -1.70499999977409E+00 -1.67500000098080E+00 -1.64499999574411E+00
+ -1.61500001845649E+00 -1.58499992000793E+00 -1.55500034647027E+00
+ -1.52499850024683E+00 -1.49500648601938E+00 -1.46497194543986E+00
+ -1.43512078743789E+00 -1.40447123268857E+00 -1.37716622941086E+00
+ -1.32999410273643E+00 -9.60394822100051E-01 -5.35159666296827E-01
+ -3.12168724171530E-01 -1.81111056228492E-01 -9.35976677678686E-02
+ -2.85937362206644E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324409       1       0       0       0      -5       0       2
+                  
\0\0\0\0\0\0\0\0
+  324397
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000000E+00 -2.03500000000000E+00 -2.00500000000000E+00
+ -1.97500000000000E+00 -1.94500000000001E+00 -1.91499999999997E+00
+ -1.88500000000012E+00 -1.85499999999949E+00 -1.82500000000217E+00
+ -1.79499999999068E+00 -1.76500000003996E+00 -1.73499999982878E+00
+ -1.70500000073316E+00 -1.67499999686222E+00 -1.64500001342196E+00
+ -1.61499994261664E+00 -1.58500024520669E+00 -1.55499895267043E+00
+ -1.52500447046912E+00 -1.49498091221237E+00 -1.46508121144364E+00
+ -1.43464997086609E+00 -1.40643115235774E+00 -1.36694776513999E+00
+ -1.04002512536374E+00 -5.95905323840183E-01 -3.67098547585758E-01
+ -2.34350727394083E-01 -1.45267147117089E-01 -7.87455570371689E-02
+ -2.47231504493155E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324169       1       0       0       0      -6       0       2
+                  
\0\0\0\0\0\0\0\0
+  324133
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000000E+00 -2.03500000000000E+00 -2.00500000000000E+00
+ -1.97500000000000E+00 -1.94499999999999E+00 -1.91500000000005E+00
+ -1.88499999999978E+00 -1.85500000000092E+00 -1.82499999999615E+00
+ -1.79500000001605E+00 -1.76499999993324E+00 -1.73500000027714E+00
+ -1.70499999885212E+00 -1.67500000474282E+00 -1.64499998045600E+00
+ -1.61500008029727E+00 -1.58499967117228E+00 -1.55500134148075E+00
+ -1.52499454751231E+00 -1.49502201148601E+00 -1.46491087912687E+00
+ -1.43534594794479E+00 -1.40343758225318E+00 -1.15625733494940E+00
+ -6.81591577109316E-01 -4.29917049285670E-01 -2.88797648041618E-01
+ -1.95355136656416E-01 -1.26089220082132E-01 -7.02459832108218E-02
+ -2.23956508279002E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323839       1       0       0       0      -7       0       2
+                  
\0\0\0\0\0\0\0\0
+  323827
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000000E+00 -2.03500000000000E+00 -2.00500000000000E+00
+ -1.97500000000000E+00 -1.94499999999999E+00 -1.91500000000006E+00
+ -1.88499999999972E+00 -1.85500000000136E+00 -1.82499999999356E+00
+ -1.79500000003023E+00 -1.76499999985890E+00 -1.73500000065495E+00
+ -1.70499999697457E+00 -1.67500001391583E+00 -1.64499993623505E+00
+ -1.61500029118890E+00 -1.58499867429844E+00 -1.55500601799045E+00
+ -1.52497273589662E+00 -1.49512292571263E+00 -1.46444078115727E+00
+ -1.43742834219045E+00 -1.28263638756659E+00 -7.92940566609100E-01
+ -5.01859648381398E-01 -3.46256800501063E-01 -2.45860445229598E-01
+ -1.72612955955896E-01 -1.14269489149970E-01 -6.47690425436326E-02
+ -2.08541229081103E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323587       1       0       0       0      -8       0       2
+                  
\0\0\0\0\0\0\0\0
+  323575
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000000E+00 -2.03500000000000E+00 -2.00500000000001E+00
+ -1.97499999999995E+00 -1.94500000000020E+00 -1.91499999999911E+00
+ -1.88500000000389E+00 -1.85499999998293E+00 -1.82500000007480E+00
+ -1.79499999967238E+00 -1.76500000143404E+00 -1.73499999372727E+00
+ -1.70500002741978E+00 -1.67499988021859E+00 -1.64500052291084E+00
+ -1.61499771852574E+00 -1.58500994501613E+00 -1.55495662642460E+00
+ -1.52518809456395E+00 -1.49416546120541E+00 -1.46840898382644E+00
+ -1.38846040120499E+00 -9.35682918089455E-01 -5.85643458507160E-01
+ -4.08184793116873E-01 -2.97961427219939E-01 -2.19395918470747E-01
+ -1.57838816004969E-01 -1.06260677206624E-01 -6.09409962757218E-02
+ -1.97564413836509E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323413       1       0       0       0      -9       0       2
+                  
\0\0\0\0\0\0\0\0
+  323395
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000000E+00 -2.03500000000001E+00 -2.00499999999996E+00
+ -1.97500000000015E+00 -1.94499999999935E+00 -1.91500000000279E+00
+ -1.88499999998811E+00 -1.85500000005060E+00 -1.82499999978494E+00
+ -1.79500000091293E+00 -1.76499999612968E+00 -1.73500001638543E+00
+ -1.70499993073060E+00 -1.67500029238419E+00 -1.64499876778899E+00
+ -1.61500518273646E+00 -1.58497821846118E+00 -1.55509096148427E+00
+ -1.52461371682233E+00 -1.49651945793081E+00 -1.45550522404072E+00
+ -1.11141556194939E+00 -6.85617467062108E-01 -4.75817800828769E-01
+ -3.52493664636066E-01 -2.67231526111714E-01 -2.01735043236646E-01
+ -1.47631267422776E-01 -1.00592612668865E-01 -5.81888625377557E-02
+ -1.89603860496890E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323221       1       0       0       0     -10       0       2
+                  
\0\0\0\0\0\0\0\0
+  323209
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000000E+00 -2.03499999999999E+00 -2.00500000000002E+00
+ -1.97499999999991E+00 -1.94500000000034E+00 -1.91499999999878E+00
+ -1.88500000000425E+00 -1.85499999998610E+00 -1.82500000004098E+00
+ -1.79499999990457E+00 -1.76500000006362E+00 -1.73500000121241E+00
+ -1.70499998842712E+00 -1.67500007693173E+00 -1.64499955319541E+00
+ -1.61500241369967E+00 -1.58498749940464E+00 -1.55506229202357E+00
+ -1.52468781009987E+00 -1.49640124164690E+00 -1.29828539904695E+00
+ -8.15787359698163E-01 -5.52564260334102E-01 -4.09496520865303E-01
+ -3.15342837430113E-01 -2.45155231061966E-01 -1.88306354219557E-01
+ -1.39537727333081E-01 -9.59717936652752E-02 -5.59119860531203E-02
+ -1.82986686913840E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323059       1       0       0       0     -11       0       2
+                  
\0\0\0\0\0\0\0\0
+  323047
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000001E+00 -2.03499999999996E+00 -2.00500000000019E+00
+ -1.97499999999915E+00 -1.94500000000375E+00 -1.91499999998343E+00
+ -1.88500000007314E+00 -1.85499999967763E+00 -1.82500000141909E+00
+ -1.79499999376094E+00 -1.76500002739826E+00 -1.73499987981461E+00
+ -1.70500052665675E+00 -1.67499769428415E+00 -1.64501008296745E+00
+ -1.61495590195655E+00 -1.58519189020477E+00 -1.55414889879884E+00
+ -1.52851191983636E+00 -1.43941898963168E+00 -9.81842080685448E-01
+ -6.43863584011360E-01 -4.73139086315934E-01 -3.66808693317095E-01
+ -2.90338102902997E-01 -2.29841017254800E-01 -1.78775092857409E-01
+ -1.33678816418832E-01 -9.25593652683946E-02 -5.41929010118153E-02
+ -1.77895189025899E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  322561       1       0       0       0     -12       0       2
+                  
\0\0\0\0\0\0\0\0
+  322471
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000001E+00
+ -2.06499999999997E+00 -2.03500000000014E+00 -2.00499999999939E+00
+ -1.97500000000260E+00 -1.94499999998894E+00 -1.91500000004693E+00
+ -1.88499999980106E+00 -1.85500000084225E+00 -1.82499999643920E+00
+ -1.79500001503169E+00 -1.76499993664479E+00 -1.73500026657349E+00
+ -1.70499888033726E+00 -1.67500469239171E+00 -1.64498035522495E+00
+ -1.61508167585997E+00 -1.58465446764326E+00 -1.55634713449563E+00
+ -1.51635524464944E+00 -1.18327077294589E+00 -7.55733803743065E-01
+ -5.45114236348007E-01 -4.22306189906336E-01 -3.37656876351951E-01
+ -2.72525392337286E-01 -2.18591552988509E-01 -1.71608240957917E-01
+ -1.29192736144143E-01 -8.99091049467646E-02 -5.28429409603915E-02
+ -1.73868596837357E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  322099       1       0       0       0     -13       0       2
+                  
\0\0\0\0\0\0\0\0
+  322087
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000000E+00 -2.09500000000000E+00
+ -2.06500000000001E+00 -2.03499999999996E+00 -2.00500000000010E+00
+ -1.97499999999980E+00 -1.94499999999978E+00 -1.91500000000559E+00
+ -1.88499999995589E+00 -1.85500000027617E+00 -1.82499999843995E+00
+ -1.79500000831078E+00 -1.76499995737749E+00 -1.73500021282329E+00
+ -1.70499895816906E+00 -1.67500502071806E+00 -1.64497607675388E+00
+ -1.61511240661199E+00 -1.58446815158855E+00 -1.55735921190085E+00
+ -1.38188676739605E+00 -8.99048026462153E-01 -6.28308879412534E-01
+ -4.82855922317560E-01 -3.87568689490143E-01 -3.16624431999774E-01
+ -2.59181507666836E-01 -2.09929882357774E-01 -1.65975829802754E-01
+ -1.25612308931659E-01 -8.77692139374181E-02 -5.17436594003092E-02
+ -1.70572940941870E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  321907       1       0       0       0     -14       0       2
+                  
\0\0\0\0\0\0\0\0
+  321895
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12500000000001E+00 -2.09499999999996E+00
+ -2.06500000000020E+00 -2.03499999999913E+00 -2.00500000000383E+00
+ -1.97499999998318E+00 -1.94500000007378E+00 -1.91499999967679E+00
+ -1.88500000141394E+00 -1.85499999382253E+00 -1.82500002695510E+00
+ -1.79499988252698E+00 -1.76500051133874E+00 -1.73499777671329E+00
+ -1.70500965323825E+00 -1.67495808942615E+00 -1.64518091197089E+00
+ -1.61420280894521E+00 -1.58824697663436E+00 -1.51760787601159E+00
+ -1.08543736306801E+00 -7.27909893086666E-01 -5.50321650130748E-01
+ -4.40882626060270E-01 -3.62575415113305E-01 -3.00822757036259E-01
+ -2.48843364845485E-01 -2.03066213659871E-01 -1.61436270837271E-01
+ -1.22688785191547E-01 -8.60041424118693E-02 -5.08297943546958E-02
+ -1.67819474408829E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  321691       1       0       0       0     -15       0       2
+                  
\0\0\0\0\0\0\0\0
+  321679
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000001E+00 -2.12499999999998E+00 -2.09500000000010E+00
+ -2.06499999999957E+00 -2.03500000000177E+00 -2.00499999999262E+00
+ -1.97500000003061E+00 -1.94499999987343E+00 -1.91500000052155E+00
+ -1.88499999785939E+00 -1.85500000874691E+00 -1.82499996443679E+00
+ -1.79500014376692E+00 -1.76499942260985E+00 -1.73500230040975E+00
+ -1.70499090918539E+00 -1.67503535574102E+00 -1.64486149739304E+00
+ -1.61548413637776E+00 -1.58236356215296E+00 -1.30446733496594E+00
+ -8.53144609758120E-01 -6.27442837703036E-01 -4.98709850686808E-01
+ -4.10947669247034E-01 -3.43861516019437E-01 -2.88589056185258E-01
+ -2.40647818875179E-01 -1.97531258815904E-01 -1.57729398777222E-01
+ -1.20279269556511E-01 -8.45392657922977E-02 -5.00674041241981E-02
+ -1.65514929378359E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  321469       1       0       0       0     -16       0       2
+                  
\0\0\0\0\0\0\0\0
+  321445
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000000E+00
+ -2.15500000000000E+00 -2.12499999999998E+00 -2.09500000000010E+00
+ -2.06499999999951E+00 -2.03500000000233E+00 -2.00499999998904E+00
+ -1.97500000005111E+00 -1.94499999976330E+00 -1.91500000108945E+00
+ -1.88499999501316E+00 -1.85500002271558E+00 -1.82499989698235E+00
+ -1.79500046532613E+00 -1.76499790577032E+00 -1.73500939152630E+00
+ -1.70495798442831E+00 -1.67518676578197E+00 -1.64415923729329E+00
+ -1.61855271999449E+00 -1.49449212288334E+00 -1.01812947421147E+00
+ -7.18323423488807E-01 -5.62561993419072E-01 -4.62394581843326E-01
+ -3.88629896223960E-01 -3.29358866942754E-01 -2.78845770722544E-01
+ -2.33989572297653E-01 -1.92967661519950E-01 -1.54638985801121E-01
+ -1.18253661011638E-01 -8.33001983139831E-02 -4.94197540058434E-02
+ -1.63552445631558E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  320965       1       0       0       0     -17       0       2
+                  
\0\0\0\0\0\0\0\0
+  320425
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18500000000001E+00
+ -2.15499999999996E+00 -2.12500000000017E+00 -2.09499999999925E+00
+ -2.06500000000325E+00 -2.03499999998600E+00 -2.00500000006021E+00
+ -1.97499999974149E+00 -1.94500000110828E+00 -1.91499999525586E+00
+ -1.88500002027688E+00 -1.85499991346874E+00 -1.82500036868679E+00
+ -1.79499843153118E+00 -1.76500666007237E+00 -1.73497173675939E+00
+ -1.70511918852121E+00 -1.67448762713892E+00 -1.64703246136924E+00
+ -1.59989684822653E+00 -1.23068537872209E+00 -8.30293436586229E-01
+ -6.34936848362107E-01 -5.17968908719407E-01 -4.35677384763109E-01
+ -3.71483240155799E-01 -3.17879990570377E-01 -2.70968574787770E-01
+ -2.28522077526631E-01 -1.89175921716372E-01 -1.52047710788681E-01
+ -1.16542725066906E-01 -8.22472559032441E-02 -4.88665856711039E-02
+ -1.61870424730015E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  320281       1       0       0       0     -18       0       2
+                  
\0\0\0\0\0\0\0\0
+  320269
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000000E+00 -2.18499999999999E+00
+ -2.15500000000003E+00 -2.12499999999987E+00 -2.09500000000049E+00
+ -2.06499999999818E+00 -2.03500000000650E+00 -2.00499999997787E+00
+ -1.97500000006975E+00 -1.94499999981103E+00 -1.91500000032912E+00
+ -1.88500000068871E+00 -1.85499998789205E+00 -1.82500009177635E+00
+ -1.79499943295436E+00 -1.76500318344073E+00 -1.73498304877008E+00
+ -1.70508638477966E+00 -1.67455847860536E+00 -1.64702131434164E+00
+ -1.45307901460037E+00 -9.76343132097054E-01 -7.19332083383740E-01
+ -5.78992719586475E-01 -4.85614485535019E-01 -4.15290953068750E-01
+ -3.57951213410693E-01 -3.08605124448837E-01 -2.64495459459853E-01
+ -2.23973592811258E-01 -1.85993035937130E-01 -1.49858247317575E-01
+ -1.15090314129768E-01 -8.13505236277189E-02 -4.83944965428564E-02
+ -1.60433356194304E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  320101       1       0       0       0     -19       0       2
+                  
\0\0\0\0\0\0\0\0
+  320089
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21500000000001E+00 -2.18499999999996E+00
+ -2.15500000000019E+00 -2.12499999999915E+00 -2.09500000000376E+00
+ -2.06499999998335E+00 -2.03500000007360E+00 -2.00499999967542E+00
+ -1.97500000142846E+00 -1.94499999372593E+00 -1.91500002750383E+00
+ -1.88499987965708E+00 -1.85500052558087E+00 -1.82499770865601E+00
+ -1.79500996887489E+00 -1.76495666210619E+00 -1.73518720080803E+00
+ -1.70417535842507E+00 -1.67835609441927E+00 -1.60565784801097E+00
+ -1.17157562055629E+00 -8.21560351878633E-01 -6.47570221057817E-01
+ -5.39323223872264E-01 -4.61224003093473E-01 -3.99308541774042E-01
+ -3.47051973725967E-01 -3.00987993886565E-01 -2.59101831513096E-01
+ -2.20141499462282E-01 -1.83288162162223E-01 -1.47984729132786E-01
+ -1.13840566908414E-01 -8.05754656628490E-02 -4.79850420721677E-02
+ -1.59184235380158E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  319939       1       0       0       0     -20       0       2
+                  
\0\0\0\0\0\0\0\0
+  319927
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000001E+00 -2.21499999999998E+00 -2.18500000000010E+00
+ -2.15499999999959E+00 -2.12500000000171E+00 -2.09499999999288E+00
+ -2.06500000002950E+00 -2.03499999987831E+00 -2.00500000049984E+00
+ -1.97499999795685E+00 -1.94500000830635E+00 -1.91499996643941E+00
+ -1.88500013462757E+00 -1.85499946442300E+00 -1.82500210889192E+00
+ -1.79499178635361E+00 -1.76503134459040E+00 -1.73487984887641E+00
+ -1.70540159991715E+00 -1.67277221562769E+00 -1.40162620326480E+00
+ -9.54547416771393E-01 -7.28488147764493E-01 -5.98671486405551E-01
+ -5.09923046753073E-01 -4.42000039977959E-01 -3.86043009820908E-01
+ -3.37565961890137E-01 -2.94050371875220E-01 -2.53969516234026E-01
+ -2.16341488218916E-01 -1.80504722398332E-01 -1.45996856626893E-01
+ -1.12485441373164E-01 -7.97263482747270E-02 -4.75377615235899E-02
+ -1.57833121775161E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  319063       1       0       0       0     -21       0       2
+                  
\0\0\0\0\0\0\0\0
+  319051
+ H       
+ REAL*8           
+       1     100       0       0
+ -4.13500000000000E+00 -4.10500000000000E+00 -4.07500000000000E+00
+ -4.04500000000000E+00 -4.01500000000000E+00 -3.98500000000000E+00
+ -3.95500000000000E+00 -3.92500000000000E+00 -3.89500000000000E+00
+ -3.86500000000000E+00 -3.83500000000000E+00 -3.80500000000000E+00
+ -3.77500000000000E+00 -3.74500000000000E+00 -3.71500000000000E+00
+ -3.68500000000000E+00 -3.65500000000000E+00 -3.62500000000000E+00
+ -3.59500000000000E+00 -3.56500000000000E+00 -3.53500000000000E+00
+ -3.50500000000000E+00 -3.47500000000000E+00 -3.44500000000000E+00
+ -3.41500000000000E+00 -3.38500000000000E+00 -3.35500000000000E+00
+ -3.32500000000000E+00 -3.29500000000000E+00 -3.26500000000000E+00
+ -3.23500000000000E+00 -3.20500000000000E+00 -3.17500000000000E+00
+ -3.14500000000000E+00 -3.11500000000000E+00 -3.08500000000000E+00
+ -3.05500000000000E+00 -3.02500000000000E+00 -2.99500000000000E+00
+ -2.96500000000000E+00 -2.93500000000000E+00 -2.90500000000000E+00
+ -2.87500000000000E+00 -2.84500000000000E+00 -2.81500000000000E+00
+ -2.78500000000000E+00 -2.75500000000000E+00 -2.72500000000000E+00
+ -2.69500000000000E+00 -2.66500000000000E+00 -2.63500000000000E+00
+ -2.60500000000000E+00 -2.57500000000000E+00 -2.54500000000000E+00
+ -2.51500000000000E+00 -2.48500000000000E+00 -2.45500000000000E+00
+ -2.42500000000000E+00 -2.39500000000000E+00 -2.36500000000000E+00
+ -2.33500000000000E+00 -2.30500000000000E+00 -2.27500000000000E+00
+ -2.24500000000000E+00 -2.21499999999998E+00 -2.18500000000012E+00
+ -2.15499999999944E+00 -2.12500000000263E+00 -2.09499999998770E+00
+ -2.06500000005700E+00 -2.03499999973747E+00 -2.00500000120243E+00
+ -1.97499999451969E+00 -1.94500002486672E+00 -1.91499988762354E+00
+ -1.88500050595717E+00 -1.85499772973232E+00 -1.82501015222710E+00
+ -1.79495469916735E+00 -1.76520080678885E+00 -1.73409664847054E+00
+ -1.70879144835943E+00 -1.59788169976893E+00 -1.12953755674489E+00
+ -8.22755578976403E-01 -6.64153880913141E-01 -5.62261914282898E-01
+ -4.87285649346261E-01 -4.27127201175383E-01 -3.75955278447332E-01
+ -3.30608243501925E-01 -2.89221144244088E-01 -2.50621965598243E-01
+ -2.14037478711910E-01 -1.78938122892054E-01 -1.44950743646838E-01
+ -1.11806745105845E-01 -7.93097828201385E-02 -4.73147601636392E-02
+ -1.57136364979199E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324955       1       0       0       0     -22       0       2
+                  
\0\0\0\0\0\0\0\0
+  324967
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  325309       1       0       0       0     -23       0       2
+                  
\0\0\0\0\0\0\0\0
+  325381
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000002E+00 -1.14999999999990E+00 -1.15000000000046E+00
+ -1.14999999999795E+00 -1.15000000000919E+00 -1.14999999995875E+00
+ -1.15000000018511E+00 -1.14999999916916E+00 -1.15000000372952E+00
+ -1.14999998325759E+00 -1.15000007516150E+00 -1.14999966257587E+00
+ -1.15000151469562E+00 -1.14999319973773E+00 -1.15003050279579E+00
+ -1.14986272161241E+00 -1.15060872024149E+00 -1.14708143964912E+00
+ -9.30397404678052E-01 -4.74145954159549E-01 -2.16540006497621E-01
+ -6.40437383301153E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  326617       1       0       0       0     -24       0       2
+                  
\0\0\0\0\0\0\0\0
+  324799
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000001E+00 -1.14999999999998E+00 -1.15000000000011E+00
+ -1.14999999999951E+00 -1.15000000000216E+00 -1.14999999999041E+00
+ -1.15000000004245E+00 -1.14999999981198E+00 -1.15000000083258E+00
+ -1.14999999631362E+00 -1.15000001632013E+00 -1.14999992775752E+00
+ -1.15000031974302E+00 -1.14999858497049E+00 -1.15000626037860E+00
+ -1.14997228758946E+00 -1.15012229123405E+00 -1.14945324999156E+00
+ -1.15232398415236E+00 -1.13499905693040E+00 -8.04039023392856E-01
+ -3.99006393092861E-01 -1.93247218769097E-01 -8.41397770009844E-02
+ -2.25078673593807E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324595       1       0       0       0     -25       0       2
+                  
\0\0\0\0\0\0\0\0
+  324511
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000001E+00
+ -1.14999999999997E+00 -1.15000000000015E+00 -1.14999999999937E+00
+ -1.15000000000275E+00 -1.14999999998803E+00 -1.15000000005201E+00
+ -1.14999999977409E+00 -1.15000000098080E+00 -1.14999999574411E+00
+ -1.15000001845649E+00 -1.14999992000793E+00 -1.15000034647027E+00
+ -1.14999850024683E+00 -1.15000648601938E+00 -1.14997194543986E+00
+ -1.15012078743789E+00 -1.14947123268857E+00 -1.15216622941086E+00
+ -1.13499410273643E+00 -7.95394822100051E-01 -4.00159666296827E-01
+ -2.07168724171530E-01 -1.06111056228492E-01 -4.85976677678687E-02
+ -1.35937362206647E-02
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324373       1       0       0       0     -26       0       2
+                  
\0\0\0\0\0\0\0\0
+  324319
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000001E+00 -1.14999999999997E+00
+ -1.15000000000012E+00 -1.14999999999949E+00 -1.15000000000217E+00
+ -1.14999999999068E+00 -1.15000000003996E+00 -1.14999999982878E+00
+ -1.15000000073316E+00 -1.14999999686222E+00 -1.15000001342196E+00
+ -1.14999994261664E+00 -1.15000024520669E+00 -1.14999895267042E+00
+ -1.15000447046912E+00 -1.14998091221237E+00 -1.15008121144364E+00
+ -1.14964997086609E+00 -1.15143115235774E+00 -1.14194776513999E+00
+ -8.45025125363738E-01 -4.30905323840183E-01 -2.32098547585758E-01
+ -1.29350727394083E-01 -7.02671471170892E-02 -3.37455570371690E-02
+ -9.72315044931582E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324109       1       0       0       0     -27       0       2
+                  
\0\0\0\0\0\0\0\0
+  324031
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.14999999999999E+00 -1.15000000000005E+00
+ -1.14999999999978E+00 -1.15000000000092E+00 -1.14999999999615E+00
+ -1.15000000001605E+00 -1.14999999993324E+00 -1.15000000027714E+00
+ -1.14999999885212E+00 -1.15000000474282E+00 -1.14999998045600E+00
+ -1.15000008029727E+00 -1.14999967117228E+00 -1.15000134148075E+00
+ -1.14999454751231E+00 -1.15002201148601E+00 -1.14991087912687E+00
+ -1.15034594794479E+00 -1.14843758225318E+00 -9.31257334949404E-01
+ -4.86591577109316E-01 -2.64917049285670E-01 -1.53797648041618E-01
+ -9.03551366564161E-02 -5.10892200821322E-02 -2.52459832108218E-02
+ -7.39565082790055E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323797       1       0       0       0     -28       0       2
+                  
\0\0\0\0\0\0\0\0
+  323773
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.14999999999999E+00 -1.15000000000006E+00
+ -1.14999999999971E+00 -1.15000000000136E+00 -1.14999999999357E+00
+ -1.15000000003023E+00 -1.14999999985890E+00 -1.15000000065495E+00
+ -1.14999999697457E+00 -1.15000001391583E+00 -1.14999993623505E+00
+ -1.15000029118890E+00 -1.14999867429844E+00 -1.15000601799045E+00
+ -1.14997273589662E+00 -1.15012292571263E+00 -1.14944078115727E+00
+ -1.15242834219045E+00 -1.02763638756659E+00 -5.67940566609100E-01
+ -3.06859648381398E-01 -1.81256800501063E-01 -1.10860445229599E-01
+ -6.76129559558960E-02 -3.92694891499696E-02 -1.97690425436327E-02
+ -5.85412290811059E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323551       1       0       0       0     -29       0       2
+                  
\0\0\0\0\0\0\0\0
+  323539
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000001E+00
+ -1.14999999999995E+00 -1.15000000000020E+00 -1.14999999999911E+00
+ -1.15000000000389E+00 -1.14999999998293E+00 -1.15000000007480E+00
+ -1.14999999967238E+00 -1.15000000143404E+00 -1.14999999372727E+00
+ -1.15000002741979E+00 -1.14999988021859E+00 -1.15000052291084E+00
+ -1.14999771852574E+00 -1.15000994501613E+00 -1.14995662642460E+00
+ -1.15018809456395E+00 -1.14916546120541E+00 -1.15340898382644E+00
+ -1.10346040120499E+00 -6.80682918089455E-01 -3.60643458507160E-01
+ -2.13184793116873E-01 -1.32961427219939E-01 -8.43959184707471E-02
+ -5.28388160049688E-02 -3.12606772066239E-02 -1.59409962757219E-02
+ -4.75644138365122E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323371       1       0       0       0     -30       0       2
+                  
\0\0\0\0\0\0\0\0
+  323359
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000001E+00 -1.14999999999996E+00
+ -1.15000000000015E+00 -1.14999999999935E+00 -1.15000000000279E+00
+ -1.14999999998811E+00 -1.15000000005060E+00 -1.14999999978494E+00
+ -1.15000000091293E+00 -1.14999999612968E+00 -1.15000001638543E+00
+ -1.14999993073060E+00 -1.15000029238419E+00 -1.14999876778899E+00
+ -1.15000518273646E+00 -1.14997821846118E+00 -1.15009096148427E+00
+ -1.14961371682233E+00 -1.15151945793081E+00 -1.14050522404072E+00
+ -8.26415561949390E-01 -4.30617467062108E-01 -2.50817800828769E-01
+ -1.57493664636066E-01 -1.02231526111713E-01 -6.67350432366463E-02
+ -4.26312674227759E-02 -2.55926126688644E-02 -1.31888625377558E-02
+ -3.96038604968927E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323185       1       0       0       0     -31       0       2
+                  
\0\0\0\0\0\0\0\0
+  323173
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.14999999999999E+00 -1.15000000000002E+00
+ -1.14999999999991E+00 -1.15000000000034E+00 -1.14999999999878E+00
+ -1.15000000000425E+00 -1.14999999998610E+00 -1.15000000004098E+00
+ -1.14999999990457E+00 -1.15000000006362E+00 -1.15000000121241E+00
+ -1.14999998842712E+00 -1.15000007693173E+00 -1.14999955319541E+00
+ -1.15000241369967E+00 -1.14998749940464E+00 -1.15006229202357E+00
+ -1.14968781009987E+00 -1.15140124164690E+00 -9.83285399046954E-01
+ -5.30787359698162E-01 -2.97564260334102E-01 -1.84496520865302E-01
+ -1.20342837430113E-01 -8.01552310619662E-02 -5.33063542195572E-02
+ -3.45377273330808E-02 -2.09717936652750E-02 -1.09119860531204E-02
+ -3.29866869138428E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323023       1       0       0       0     -32       0       2
+                  
\0\0\0\0\0\0\0\0
+  323011
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000001E+00 -1.14999999999996E+00 -1.15000000000019E+00
+ -1.14999999999915E+00 -1.15000000000375E+00 -1.14999999998343E+00
+ -1.15000000007314E+00 -1.14999999967763E+00 -1.15000000141909E+00
+ -1.14999999376094E+00 -1.15000002739826E+00 -1.14999987981461E+00
+ -1.15000052665675E+00 -1.14999769428415E+00 -1.15001008296745E+00
+ -1.14995590195655E+00 -1.15019189020477E+00 -1.14914889879884E+00
+ -1.15351191983636E+00 -1.09441898963168E+00 -6.66842080685448E-01
+ -3.58863584011360E-01 -2.18139086315934E-01 -1.41808693317095E-01
+ -9.53381029029966E-02 -6.48410172548005E-02 -4.37750928574093E-02
+ -2.86788164188325E-02 -1.75593652683945E-02 -9.19290101181534E-03
+ -2.78951890259027E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  322333       1       0       0       0     -33       0       2
+                  
\0\0\0\0\0\0\0\0
+  322315
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000001E+00
+ -1.14999999999997E+00 -1.15000000000014E+00 -1.14999999999939E+00
+ -1.15000000000260E+00 -1.14999999998894E+00 -1.15000000004693E+00
+ -1.14999999980106E+00 -1.15000000084225E+00 -1.14999999643920E+00
+ -1.15000001503169E+00 -1.14999993664479E+00 -1.15000026657349E+00
+ -1.14999888033726E+00 -1.15000469239171E+00 -1.14998035522495E+00
+ -1.15008167585997E+00 -1.14965446764326E+00 -1.15134713449563E+00
+ -1.14135524464944E+00 -8.38270772945895E-01 -4.40733803743065E-01
+ -2.60114236348006E-01 -1.67306189906336E-01 -1.12656876351951E-01
+ -7.75253923372858E-02 -5.35915529885089E-02 -3.66082409579170E-02
+ -2.41927361441432E-02 -1.49091049467644E-02 -7.84294096039159E-03
+ -2.38685968373603E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  322063       1       0       0       0     -34       0       2
+                  
\0\0\0\0\0\0\0\0
+  322051
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000001E+00 -1.14999999999996E+00 -1.15000000000010E+00
+ -1.14999999999980E+00 -1.14999999999978E+00 -1.15000000000559E+00
+ -1.14999999995589E+00 -1.15000000027617E+00 -1.14999999843995E+00
+ -1.15000000831078E+00 -1.14999995737749E+00 -1.15000021282329E+00
+ -1.14999895816906E+00 -1.15000502071806E+00 -1.14997607675388E+00
+ -1.15011240661199E+00 -1.14946815158854E+00 -1.15235921190085E+00
+ -1.00688676739605E+00 -5.54048026462153E-01 -3.13308879412534E-01
+ -1.97855922317559E-01 -1.32568689490143E-01 -9.16244319997735E-02
+ -6.41815076668359E-02 -4.49298823577743E-02 -3.09758298027547E-02
+ -2.06123089316590E-02 -1.27692139374180E-02 -6.74365940030929E-03
+ -2.05729409418735E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  321805       1       0       0       0     -35       0       2
+                  
\0\0\0\0\0\0\0\0
+  321793
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000001E+00 -1.14999999999996E+00
+ -1.15000000000020E+00 -1.14999999999913E+00 -1.15000000000383E+00
+ -1.14999999998318E+00 -1.15000000007378E+00 -1.14999999967679E+00
+ -1.15000000141394E+00 -1.14999999382253E+00 -1.15000002695510E+00
+ -1.14999988252698E+00 -1.15000051133873E+00 -1.14999777671329E+00
+ -1.15000965323825E+00 -1.14995808942615E+00 -1.15018091197089E+00
+ -1.14920280894521E+00 -1.15324697663436E+00 -1.11260787601159E+00
+ -7.10437363068010E-01 -3.82909893086666E-01 -2.35321650130748E-01
+ -1.55882626060270E-01 -1.07575415113305E-01 -7.58227570362588E-02
+ -5.38433648454849E-02 -3.80662136598705E-02 -2.64362708372711E-02
+ -1.76887851915472E-02 -1.10041424118691E-02 -5.82979435469586E-03
+ -1.78194744088324E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  321655       1       0       0       0     -36       0       2
+                  
\0\0\0\0\0\0\0\0
+  321643
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000001E+00 -1.14999999999998E+00 -1.15000000000010E+00
+ -1.14999999999957E+00 -1.15000000000177E+00 -1.14999999999262E+00
+ -1.15000000003061E+00 -1.14999999987343E+00 -1.15000000052155E+00
+ -1.14999999785939E+00 -1.15000000874691E+00 -1.14999996443679E+00
+ -1.15000014376692E+00 -1.14999942260984E+00 -1.15000230040975E+00
+ -1.14999090918539E+00 -1.15003535574102E+00 -1.14986149739304E+00
+ -1.15048413637776E+00 -1.14736356215296E+00 -8.99467334965939E-01
+ -4.78144609758120E-01 -2.82442837703037E-01 -1.83709850686808E-01
+ -1.25947669247034E-01 -8.88615160194366E-02 -6.35890561852581E-02
+ -4.56478188751788E-02 -3.25312588159042E-02 -2.27293987772218E-02
+ -1.52792695565113E-02 -9.53926579229748E-03 -5.06740412419820E-03
+ -1.55149293783622E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  321325       1       0       0       0     -37       0       2
+                  
\0\0\0\0\0\0\0\0
+  321307
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.14999999999998E+00 -1.15000000000010E+00
+ -1.14999999999951E+00 -1.15000000000233E+00 -1.14999999998904E+00
+ -1.15000000005111E+00 -1.14999999976330E+00 -1.15000000108945E+00
+ -1.14999999501315E+00 -1.15000002271558E+00 -1.14999989698235E+00
+ -1.15000046532613E+00 -1.14999790577032E+00 -1.15000939152630E+00
+ -1.14995798442831E+00 -1.15018676578197E+00 -1.14915923729329E+00
+ -1.15355271999449E+00 -1.05949212288334E+00 -6.13129474211469E-01
+ -3.43323423488807E-01 -2.17561993419072E-01 -1.47394581843326E-01
+ -1.03629896223960E-01 -7.43588669427536E-02 -5.38457707225434E-02
+ -3.89895722976527E-02 -2.79676615199501E-02 -1.96389858011214E-02
+ -1.32536610116376E-02 -8.30019831398295E-03 -4.41975400584346E-03
+ -1.35524456315610E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  320395       1       0       0       0     -38       0       2
+                  
\0\0\0\0\0\0\0\0
+  320383
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000001E+00
+ -1.14999999999996E+00 -1.15000000000017E+00 -1.14999999999925E+00
+ -1.15000000000325E+00 -1.14999999998600E+00 -1.15000000006021E+00
+ -1.14999999974149E+00 -1.15000000110828E+00 -1.14999999525586E+00
+ -1.15000002027688E+00 -1.14999991346874E+00 -1.15000036868679E+00
+ -1.14999843153118E+00 -1.15000666007237E+00 -1.14997173675939E+00
+ -1.15011918852121E+00 -1.14948762713892E+00 -1.15203246136924E+00
+ -1.13489684822653E+00 -7.95685378722091E-01 -4.25293436586229E-01
+ -2.59936848362107E-01 -1.72968908719407E-01 -1.20677384763109E-01
+ -8.64832401557986E-02 -6.28799905703766E-02 -4.59685747877699E-02
+ -3.35220775266305E-02 -2.41759217163720E-02 -1.70477107886816E-02
+ -1.15427250669061E-02 -7.24725590324395E-03 -3.86658567110400E-03
+ -1.18704247300186E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  320245       1       0       0       0     -39       0       2
+                  
\0\0\0\0\0\0\0\0
+  320233
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.14999999999999E+00
+ -1.15000000000003E+00 -1.14999999999987E+00 -1.15000000000049E+00
+ -1.14999999999818E+00 -1.15000000000650E+00 -1.14999999997787E+00
+ -1.15000000006975E+00 -1.14999999981103E+00 -1.15000000032912E+00
+ -1.15000000068871E+00 -1.14999998789205E+00 -1.15000009177635E+00
+ -1.14999943295436E+00 -1.15000318344073E+00 -1.14998304877008E+00
+ -1.15008638477966E+00 -1.14955847860536E+00 -1.15202131434164E+00
+ -9.88079014600367E-01 -5.41343132097054E-01 -3.14332083383740E-01
+ -2.03992719586475E-01 -1.40614485535019E-01 -1.00290953068750E-01
+ -7.29512134106926E-02 -5.36051244488369E-02 -3.94954594598532E-02
+ -2.89735928112576E-02 -2.09930359371299E-02 -1.48582473175757E-02
+ -1.00903141297685E-02 -6.35052362771876E-03 -3.39449654285649E-03
+ -1.04333561943075E-03
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  320065       1       0       0       0     -40       0       2
+                  
\0\0\0\0\0\0\0\0
+  320053
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000001E+00 -1.14999999999996E+00
+ -1.15000000000019E+00 -1.14999999999915E+00 -1.15000000000376E+00
+ -1.14999999998335E+00 -1.15000000007360E+00 -1.14999999967542E+00
+ -1.15000000142846E+00 -1.14999999372593E+00 -1.15000002750383E+00
+ -1.14999987965708E+00 -1.15000052558087E+00 -1.14999770865601E+00
+ -1.15000996887489E+00 -1.14995666210619E+00 -1.15018720080803E+00
+ -1.14917535842507E+00 -1.15335609441927E+00 -1.11065784801097E+00
+ -7.06575620556291E-01 -3.86560351878633E-01 -2.42570221057817E-01
+ -1.64323223872264E-01 -1.16224003093473E-01 -8.43085417740423E-02
+ -6.20519737259673E-02 -4.59879938865652E-02 -3.41018315130958E-02
+ -2.51414994622821E-02 -1.82881621622230E-02 -1.29847291327866E-02
+ -8.84056690841387E-03 -5.57546566284878E-03 -2.98504207216772E-03
+ -9.18423538016081E-04
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  319867       1       0       0       0     -41       0       2
+                  
\0\0\0\0\0\0\0\0
+  319177
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000001E+00 -1.14999999999998E+00 -1.15000000000010E+00
+ -1.14999999999959E+00 -1.15000000000171E+00 -1.14999999999288E+00
+ -1.15000000002950E+00 -1.14999999987831E+00 -1.15000000049984E+00
+ -1.14999999795685E+00 -1.15000000830635E+00 -1.14999996643941E+00
+ -1.15000013462757E+00 -1.14999946442300E+00 -1.15000210889192E+00
+ -1.14999178635361E+00 -1.15003134459040E+00 -1.14987984887642E+00
+ -1.15040159991715E+00 -1.14777221562769E+00 -9.06626203264796E-01
+ -4.89547416771393E-01 -2.93488147764493E-01 -1.93671486405551E-01
+ -1.34923046753073E-01 -9.70000399779595E-02 -7.10430098209084E-02
+ -5.25659618901367E-02 -3.90503718752204E-02 -2.89695162340258E-02
+ -2.13414882189154E-02 -1.55047223983316E-02 -1.09968566268930E-02
+ -7.48544137316366E-03 -4.72634827472679E-03 -2.53776152358996E-03
+ -7.83312177516423E-04
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  318895       1       0       0       0     -42       0       2
+                  
\0\0\0\0\0\0\0\0
+  318805
+ SCAL    
+ REAL*8           
+       1     100       0       0
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.15000000000000E+00 -1.15000000000000E+00
+ -1.15000000000000E+00 -1.14999999999998E+00 -1.15000000000012E+00
+ -1.14999999999944E+00 -1.15000000000263E+00 -1.14999999998770E+00
+ -1.15000000005700E+00 -1.14999999973747E+00 -1.15000000120243E+00
+ -1.14999999451969E+00 -1.15000002486672E+00 -1.14999988762354E+00
+ -1.15000050595717E+00 -1.14999772973232E+00 -1.15001015222710E+00
+ -1.14995469916735E+00 -1.15020080678885E+00 -1.14909664847054E+00
+ -1.15379144835943E+00 -1.07288169976893E+00 -6.34537556744892E-01
+ -3.57755578976403E-01 -2.29153880913141E-01 -1.57261914282897E-01
+ -1.12285649346261E-01 -8.21272011753830E-02 -6.09552784473316E-02
+ -4.56082435019248E-02 -3.42211442440880E-02 -2.56219655982434E-02
+ -1.90374787119092E-02 -1.39381228920544E-02 -9.95074364683848E-03
+ -6.80674510584511E-03 -4.30978282013827E-03 -2.31476016363927E-03
+ -7.13636497920257E-04
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324991       1       0       0       0     -43       0       2
+                  
\0\0\0\0\0\0\0\0
+  325003
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  325603       1       0       0       0     -44       0       2
+                  
\0\0\0\0\0\0\0\0
+  325723
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243884E-01
+  2.75076403243878E-01  2.75076403243906E-01  2.75076403243779E-01
+  2.75076403244348E-01  2.75076403241795E-01  2.75076403253250E-01
+  2.75076403201845E-01  2.75076403432567E-01  2.75076402396908E-01
+  2.75076407046083E-01  2.75076386174716E-01  2.75076479872885E-01
+  2.75076059257084E-01  2.75077947590852E-01  2.75069476226816E-01
+  2.75107582481038E-01  2.74938228399967E-01  2.75740716552630E-01
+  3.35088285191157E-01  5.78559124771428E-01  8.44554067695688E-01
+  9.83514651550140E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324757       1       0       0       0     -45       0       2
+                  
\0\0\0\0\0\0\0\0
+  324745
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243882E-01  2.75076403243888E-01  2.75076403243858E-01
+  2.75076403243994E-01  2.75076403243391E-01  2.75076403246060E-01
+  2.75076403234241E-01  2.75076403286582E-01  2.75076403054805E-01
+  2.75076404081059E-01  2.75076399537584E-01  2.75076419650144E-01
+  2.75076330630297E-01  2.75076724597280E-01  2.75074981519534E-01
+  2.75082696864921E-01  2.75048633561102E-01  2.75200622959918E-01
+  2.74549579549443E-01  2.78523327342428E-01  3.81798147501242E-01
+  6.45886388339689E-01  8.70752849056533E-01  9.71815912108816E-01
+  9.97984223966455E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324481       1       0       0       0     -46       0       2
+                  
\0\0\0\0\0\0\0\0
+  324469
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243881E-01
+  2.75076403243891E-01  2.75076403243850E-01  2.75076403244027E-01
+  2.75076403243258E-01  2.75076403246600E-01  2.75076403232072E-01
+  2.75076403295187E-01  2.75076403021142E-01  2.75076404210396E-01
+  2.75076399052417E-01  2.75076421410073E-01  2.75076324560545E-01
+  2.75076743838056E-01  2.75074930277047E-01  2.75082774570429E-01
+  2.75048975008449E-01  2.75196535671437E-01  2.74585280759558E-01
+  2.78524479152279E-01  3.85423995339201E-01  6.44776844558968E-01
+  8.55175906606314E-01  9.55953899835618E-01  9.90487419249991E-01
+  9.99274516744765E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  324295       1       0       0       0     -47       0       2
+                  
\0\0\0\0\0\0\0\0
+  324277
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243881E-01  2.75076403243889E-01
+  2.75076403243856E-01  2.75076403243998E-01  2.75076403243389E-01
+  2.75076403246000E-01  2.75076403234807E-01  2.75076403282766E-01
+  2.75076403077382E-01  2.75076403956472E-01  2.75076400195756E-01
+  2.75076416275639E-01  2.75076347557482E-01  2.75076641092505E-01
+  2.75075388004380E-01  2.75080738142696E-01  2.75057961299655E-01
+  2.75155916478797E-01  2.74751750291351E-01  2.76916591719028E-01
+  3.65410829911533E-01  6.16074043280492E-01  8.26765266988173E-01
+  9.36182856971901E-01  9.80198909919764E-01  9.95430171969058E-01
+  9.99632404189964E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323995       1       0       0       0     -48       0       2
+                  
\0\0\0\0\0\0\0\0
+  323959
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243882E-01  2.75076403243886E-01  2.75076403243871E-01
+  2.75076403243933E-01  2.75076403243673E-01  2.75076403244757E-01
+  2.75076403240238E-01  2.75076403259044E-01  2.75076403180945E-01
+  2.75076403504566E-01  2.75076402166788E-01  2.75076407682324E-01
+  2.75076385008383E-01  2.75076477920641E-01  2.75076098594074E-01
+  2.75077641508478E-01  2.75071404523509E-01  2.75096643987876E-01
+  2.74997859649614E-01  2.75431660046904E-01  3.34806381021184E-01
+  5.68367466392874E-01  7.89076430871077E-01  9.12704213645399E-01
+  9.67636599287993E-01  9.89486842677331E-01  9.97457078703781E-01
+  9.99789041049580E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323743       1       0       0       0     -49       0       2
+                  
\0\0\0\0\0\0\0\0
+  323677
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243882E-01  2.75076403243886E-01  2.75076403243869E-01
+  2.75076403243948E-01  2.75076403243574E-01  2.75076403245344E-01
+  2.75076403237018E-01  2.75076403275927E-01  2.75076403095143E-01
+  2.75076403930959E-01  2.75076400083600E-01  2.75076417724897E-01
+  2.75076337114932E-01  2.75076704310868E-01  2.75075036565330E-01
+  2.75082595050061E-01  2.75048489498580E-01  2.75203457069334E-01
+  2.74525967266966E-01  3.05771180228018E-01  5.08006685133833E-01
+  7.41722364815973E-01  8.83905415084670E-01  9.52145467738794E-01
+  9.81647655265086E-01  9.93798177210084E-01  9.98449995669623E-01
+  9.99868739454027E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323497       1       0       0       0     -50       0       2
+                  
\0\0\0\0\0\0\0\0
+  323485
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243880E-01
+  2.75076403243893E-01  2.75076403243837E-01  2.75076403244084E-01
+  2.75076403242999E-01  2.75076403247758E-01  2.75076403226896E-01
+  2.75076403318286E-01  2.75076402918213E-01  2.75076404668418E-01
+  2.75076397016853E-01  2.75076430446229E-01  2.75076284490938E-01
+  2.75076921367351E-01  2.75074144749317E-01  2.75086253710453E-01
+  2.75033693241166E-01  2.75266050066841E-01  2.74304272012834E-01
+  2.86041854807574E-01  4.39999467546227E-01  6.84122227688821E-01
+  8.48367183610111E-01  9.32871929985208E-01  9.71648712251219E-01
+  9.88755311025829E-01  9.96083688701043E-01  9.98997913961015E-01
+  9.99913888234155E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323323       1       0       0       0     -51       0       2
+                  
\0\0\0\0\0\0\0\0
+  323299
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243881E-01  2.75076403243891E-01
+  2.75076403243848E-01  2.75076403244031E-01  2.75076403243249E-01
+  2.75076403246583E-01  2.75076403232392E-01  2.75076403292723E-01
+  2.75076403036556E-01  2.75076404122832E-01  2.75076399522752E-01
+  2.75076418974956E-01  2.75076336843481E-01  2.75076683079132E-01
+  2.75075226249762E-01  2.75081349912743E-01  2.75055747363164E-01
+  2.75164154487523E-01  2.74731742077390E-01  2.77248709718158E-01
+  3.72691065982872E-01  6.16334916531130E-01  8.05254012427773E-01
+  9.08958218909807E-01  9.58969219685787E-01  9.82115560135372E-01
+  9.92684804027684E-01  9.97385963671390E-01  9.99317672207359E-01
+  9.99940629277427E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  323149       1       0       0       0     -52       0       2
+                  
\0\0\0\0\0\0\0\0
+  323137
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243884E-01  2.75076403243877E-01
+  2.75076403243904E-01  2.75076403243806E-01  2.75076403244161E-01
+  2.75076403242918E-01  2.75076403247040E-01  2.75076403234576E-01
+  2.75076403265555E-01  2.75076403229435E-01  2.75076402968544E-01
+  2.75076405872082E-01  2.75076385772696E-01  2.75076504713190E-01
+  2.75075855093989E-01  2.75079242155298E-01  2.75062257418054E-01
+  2.75147318763092E-01  2.74758528061837E-01  3.18528679762045E-01
+  5.34271958706114E-01  7.52074179273654E-01  8.80380047313657E-01
+  9.44175445405613E-01  9.74359982893834E-01  9.88555807346621E-01
+  9.95211289778203E-01  9.98253030755661E-01  9.99535458491857E-01
+  9.99959038784235E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  322951       1       0       0       0     -53       0       2
+                  
\0\0\0\0\0\0\0\0
+  322939
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243881E-01  2.75076403243892E-01  2.75076403243839E-01
+  2.75076403244075E-01  2.75076403243032E-01  2.75076403247646E-01
+  2.75076403227273E-01  2.75076403317094E-01  2.75076402921608E-01
+  2.75076404660773E-01  2.75076397021741E-01  2.75076430537973E-01
+  2.75076283640243E-01  2.75076926872633E-01  2.75074113421077E-01
+  2.75086418248265E-01  2.75032831506396E-01  2.75269816325956E-01
+  2.74281020677927E-01  2.88268002535811E-01  4.47498099617187E-01
+  6.85957263065727E-01  8.42733266340498E-01  9.24518929653974E-01
+  9.64106210414430E-01  9.83105754143116E-01  9.92285401972344E-01
+  9.96709437690328E-01  9.98780931315179E-01  9.99671944858330E-01
+  9.99970856059199E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  322261       1       0       0       0     -54       0       2
+                  
\0\0\0\0\0\0\0\0
+  322201
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243881E-01
+  2.75076403243891E-01  2.75076403243850E-01  2.75076403244022E-01
+  2.75076403243292E-01  2.75076403246393E-01  2.75076403233226E-01
+  2.75076403289062E-01  2.75076403052608E-01  2.75076404052540E-01
+  2.75076399830189E-01  2.75076417631844E-01  2.75076342705086E-01
+  2.75076657519386E-01  2.75075337606145E-01  2.75080864638288E-01
+  2.75057855844128E-01  2.75154894708238E-01  2.74770789549403E-01
+  2.77052917690813E-01  3.68023253039494E-01  6.07255829957364E-01
+  7.94579662623554E-01  8.98802294939202E-01  9.50672320596031E-01
+  9.75979733567630E-01  9.88433279849384E-01  9.94615186713931E-01
+  9.97667124480411E-01  9.99125064985235E-01  9.99762337260503E-01
+  9.99978763136121E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  322027       1       0       0       0     -55       0       2
+                  
\0\0\0\0\0\0\0\0
+  322015
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243884E-01
+  2.75076403243880E-01  2.75076403243891E-01  2.75076403243859E-01
+  2.75076403243928E-01  2.75076403243933E-01  2.75076403242613E-01
+  2.75076403253900E-01  2.75076403181165E-01  2.75076403598169E-01
+  2.75076401356505E-01  2.75076412923451E-01  2.75076354911745E-01
+  2.75076639843763E-01  2.75075263043832E-01  2.75081836311531E-01
+  2.75050877947728E-01  2.75197235966756E-01  2.74541608412357E-01
+  3.11619295542107E-01  5.17582681532861E-01  7.34600300746943E-01
+  8.65627865400592E-01  9.33234910320546E-01  9.66752249413485E-01
+  9.83444330892054E-01  9.91871618014800E-01  9.96155393041772E-01
+  9.98313144143876E-01  9.99360989853670E-01  9.99825079299029E-01
+  9.99984293797073E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  321769       1       0       0       0     -56       0       2
+                  
\0\0\0\0\0\0\0\0
+  321757
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243881E-01  2.75076403243893E-01
+  2.75076403243838E-01  2.75076403244080E-01  2.75076403243013E-01
+  2.75076403247703E-01  2.75076403227127E-01  2.75076403317284E-01
+  2.75076402922777E-01  2.75076404646786E-01  2.75076397122384E-01
+  2.75076429921994E-01  2.75076287118960E-01  2.75076908152918E-01
+  2.75074211011031E-01  2.75085921440915E-01  2.75035323938940E-01
+  2.75257557619757E-01  2.74340873960896E-01  2.83822219078321E-01
+  4.24593815142912E-01  6.61618294691164E-01  8.23065282704934E-01
+  9.10596678590429E-01  9.54793200102124E-01  9.77002838064811E-01
+  9.88324573827211E-01  9.94174651925386E-01  9.97208765652739E-01
+  9.98762652430626E-01  9.99527463675546E-01  9.99869844436761E-01
+  9.99988267953268E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  321619       1       0       0       0     -57       0       2
+                  
\0\0\0\0\0\0\0\0
+  321589
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243881E-01  2.75076403243888E-01  2.75076403243860E-01
+  2.75076403243979E-01  2.75076403243480E-01  2.75076403245558E-01
+  2.75076403236932E-01  2.75076403272627E-01  2.75076403125439E-01
+  2.75076403730014E-01  2.75076401257460E-01  2.75076411320285E-01
+  2.75076370594436E-01  2.75076534369181E-01  2.75075880822022E-01
+  2.75078467781463E-01  2.75068374179981E-01  2.75107860560792E-01
+  2.74966497334450E-01  2.75676368750234E-01  3.45525648600928E-01
+  5.75255619914131E-01  7.69105886950918E-01  8.81238135868439E-01
+  9.39248300942959E-01  9.68663976127688E-01  9.83745732173656E-01
+  9.91609035405005E-01  9.95755801640328E-01  9.97943859954538E-01
+  9.99080455341518E-01  9.99646374660908E-01  9.99902074057537E-01
+  9.99991143603448E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  321277       1       0       0       0     -58       0       2
+                  
\0\0\0\0\0\0\0\0
+  321031
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243882E-01  2.75076403243888E-01  2.75076403243860E-01
+  2.75076403243994E-01  2.75076403243354E-01  2.75076403246371E-01
+  2.75076403232276E-01  2.75076403297638E-01  2.75076402996470E-01
+  2.75076404376395E-01  2.75076398085178E-01  2.75076426639182E-01
+  2.75076297568409E-01  2.75076878843983E-01  2.75074270444905E-01
+  2.75085945287469E-01  2.75033994919394E-01  2.75267465360987E-01
+  2.74271805707605E-01  2.97181172385417E-01  4.78768164364373E-01
+  7.02197815355443E-01  8.43390621444724E-01  9.19081051771557E-01
+  9.57892452985096E-01  9.77866244245520E-01  9.88323532860446E-01
+  9.93886795585721E-01  9.96872236776175E-01  9.98470583008462E-01
+  9.99310855330837E-01  9.99733369648950E-01  9.99925812145758E-01
+  9.99993270098462E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  320359       1       0       0       0     -59       0       2
+                  
\0\0\0\0\0\0\0\0
+  320347
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243881E-01
+  2.75076403243892E-01  2.75076403243843E-01  2.75076403244054E-01
+  2.75076403243145E-01  2.75076403247063E-01  2.75076403230209E-01
+  2.75076403302591E-01  2.75076402992193E-01  2.75076404321277E-01
+  2.75076398639007E-01  2.75076422895124E-01  2.75076319515179E-01
+  2.75076759443410E-01  2.75074890750035E-01  2.75082821963817E-01
+  2.75049338054446E-01  2.75192809475366E-01  2.74615560438652E-01
+  2.78547091851997E-01  3.85301119135236E-01  6.21186648249266E-01
+  7.94783100461535E-01  8.92814917870383E-01  9.43885719287350E-01
+  9.70269922021216E-01  9.84103028781021E-01  9.91490401464924E-01
+  9.95491000100106E-01  9.97670404002984E-01  9.98851851465030E-01
+  9.99479357861243E-01  9.99797544195274E-01  9.99943449308032E-01
+  9.99994857700813E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  320191       1       0       0       0     -60       0       2
+                  
\0\0\0\0\0\0\0\0
+  320179
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243882E-01  2.75076403243885E-01
+  2.75076403243875E-01  2.75076403243913E-01  2.75076403243771E-01
+  2.75076403244297E-01  2.75076403242406E-01  2.75076403248910E-01
+  2.75076403228043E-01  2.75076403286797E-01  2.75076403169140E-01
+  2.75076403087476E-01  2.75076405993597E-01  2.75076382401485E-01
+  2.75076532019941E-01  2.75075680286642E-01  2.75080252917071E-01
+  2.75056786588840E-01  2.75176707165086E-01  2.74618083959426E-01
+  3.17102445319390E-01  5.26594655048742E-01  7.33475146296911E-01
+  8.58753498126137E-01  9.25665515370157E-01  9.60444341881162E-01
+  9.78682208281852E-01  9.88427433595326E-01  9.93726178404508E-01
+  9.96640745510548E-01  9.98249445428024E-01  9.99131106382427E-01
+  9.99603688428033E-01  9.99845157931926E-01  9.99956586712335E-01
+  9.99996042964252E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  320023       1       0       0       0     -61       0       2
+                  
\0\0\0\0\0\0\0\0
+  320011
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243881E-01  2.75076403243893E-01
+  2.75076403243840E-01  2.75076403244075E-01  2.75076403243029E-01
+  2.75076403247665E-01  2.75076403227168E-01  2.75076403317595E-01
+  2.75076402919480E-01  2.75076404668724E-01  2.75076396997765E-01
+  2.75076430573749E-01  2.75076283884574E-01  2.75076923608774E-01
+  2.75074139331080E-01  2.75086245606620E-01  2.75033896153769E-01
+  2.75263799504664E-01  2.74316220194198E-01  2.84292674912493E-01
+  4.26540022790520E-01  6.58010510863171E-01  8.14735482565288E-01
+  9.01920389609861E-01  9.47695560123662E-01  9.71705797830911E-01
+  9.84515519404856E-01  9.91483193046957E-01  9.95332360678094E-01
+  9.97478321699428E-01  9.98676207458368E-01  9.99338923161316E-01
+  9.99696958512170E-01  9.99881112303148E-01  9.99966558773075E-01
+  9.99996945607005E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  319147       1       0       0       0     -62       0       2
+                  
\0\0\0\0\0\0\0\0
+  319135
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243882E-01  2.75076403243888E-01  2.75076403243861E-01
+  2.75076403243976E-01  2.75076403243494E-01  2.75076403245501E-01
+  2.75076403237183E-01  2.75076403271519E-01  2.75076403130370E-01
+  2.75076403707882E-01  2.75076401357512E-01  2.75076410865489E-01
+  2.75076372669981E-01  2.75076524873409E-01  2.75075924315552E-01
+  2.75078268573976E-01  2.75069285064556E-01  2.75103692102007E-01
+  2.74985228442121E-01  2.75583211354218E-01  3.43057286145631E-01
+  5.65985865759580E-01  7.56643364380001E-01  8.70282521264471E-01
+  9.31048736801026E-01  9.62894182038538E-01  9.79765795602946E-01
+  9.88870958864737E-01  9.93867600695666E-01  9.96641700320888E-01
+  9.98190104312052E-01  9.99052735814466E-01  9.99528098252560E-01
+  9.99783809832887E-01  9.99914990677645E-01  9.99975948300227E-01
+  9.99997788900343E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -21  318721       1       0       0       0     -63       0       2
+                  
\0\0\0\0\0\0\0\0
+  318673
+ SCAL    
+ REAL*8           
+       1     100       0       0
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243883E-01  2.75076403243883E-01  2.75076403243883E-01
+  2.75076403243882E-01  2.75076403243888E-01  2.75076403243856E-01
+  2.75076403244010E-01  2.75076403243284E-01  2.75076403246676E-01
+  2.75076403230938E-01  2.75076403303503E-01  2.75076402970810E-01
+  2.75076404488461E-01  2.75076397596653E-01  2.75076428764566E-01
+  2.75076288341111E-01  2.75076918822328E-01  2.75074097692465E-01
+  2.75086691420561E-01  2.75030807169828E-01  2.75281698585845E-01
+  2.74217899115999E-01  2.93703945394787E-01  4.65875386319161E-01
+  6.87102250960072E-01  8.30142339263955E-01  9.09194441577445E-01
+  9.50978184442015E-01  9.73114334153185E-01  9.85053957900761E-01
+  9.91623616241485E-01  9.95299374491461E-01  9.97379896381979E-01
+  9.98563993772111E-01  9.99236754546396E-01  9.99614733108286E-01
+  9.99821741720774E-01  9.99929511687589E-01  9.99980045333358E-01
+  9.99998169949833E-01
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  325111       1       0       0       0     -64       0       2
+                  
\0\0\0\0\0\0\0\0
+  325147
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  326071       1       0       0       0     -65       0       2
+                  
\0\0\0\0\0\0\0\0
+  326209
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  1.07234596012123E-25 -3.67241024039310E-09  1.06830699228650E-25
+  3.67241024039376E-09 -3.08981039356966E-25 -3.67241024039319E-09
+ -4.90330695136414E-25  1.95687991592745E-25 -3.67241024039276E-09
+  2.25576353569759E-25  7.83559759937927E-26 -3.67241024039276E-09
+  1.31064506237040E-25 -5.14968398928277E-26 -3.67241024039320E-09
+ -3.27964188180204E-25 -7.29033694169050E-26 -3.67241024039320E-09
+ -1.47422325967703E-26  1.08850183146016E-25 -3.67241024039287E-09
+  6.66429692730711E-27 -7.89618211690024E-26 -3.67241024039332E-09
+ -3.05547916697444E-25  2.50214057361621E-25 -3.67241024039331E-09
+ -3.42504472385238E-25  2.14671140415984E-25 -3.67241024039266E-09
+  3.72594782753988E-25  3.64920743867998E-25 -3.67241024039329E-09
+  6.11095833394888E-25  1.42575564566025E-25 -3.67241024039342E-09
+ -1.42575564566025E-25 -4.48325429655205E-26 -3.67241024039285E-09
+ -5.35163238101934E-26 -2.73640070803065E-25 -3.67241024039277E-09
+  1.79936017037292E-25 -2.01140598169633E-25 -3.67241024039332E-09
+ -1.93668507675379E-25  1.50047655060278E-25 -3.67241024039335E-09
+ -6.28059498300761E-26  3.95818847803695E-26 -3.67241024039290E-09
+ -2.28403631054071E-25  6.70468660565442E-26 -3.67241024039288E-09
+ -3.60679827641530E-25 -3.23723271953736E-25 -3.67241024039336E-09
+ -4.49335171613888E-25  1.58327539121478E-25 -3.67241024039338E-09
+  6.03825691292371E-26 -2.30827011754910E-25 -3.67241024039330E-09
+ -7.60537643279957E-25  2.62532909257553E-27 -3.67241024039267E-09
+ -2.62532909257553E-27 -6.32098466135492E-26 -3.67241024039364E-09
+ -2.30625063363173E-25  2.96864135852771E-26 -3.67241024039305E-09
+ -2.96864135852771E-26  1.13494996155957E-25 -3.67241024039328E-09
+ -1.13494996155957E-25 -2.98277774594927E-25 -3.67241024039359E-09
+  1.79734068645555E-25  1.83167191305077E-25 -3.67241024039301E-09
+  5.23854128164686E-25 -1.88417849490228E-25 -3.67241024039304E-09
+  2.38904947424373E-25  1.34497628896562E-25 -3.67241024039350E-09
+  1.58933384296688E-25  7.95676663442121E-26 -3.67241024039301E-09
+  2.13661398457301E-25 -7.47209049425342E-26 -3.67241024039329E-09
+  7.47209049425342E-26  1.39344390298240E-26 -3.67241024039291E-09
+ -3.45533698261287E-25 -2.63138754432762E-25 -3.67241024039352E-09
+ -1.59943126255371E-25 -5.13554760186120E-25 -3.67241024039318E-09
+ -4.98004734022404E-25  5.04870979341448E-26 -3.67241024039333E-09
+  1.10465770279909E-25  6.60371240978613E-26 -3.67241024039257E-09
+  2.01948391736579E-25  9.89547119509237E-26 -3.67241024039312E-09
+ -7.67403888599000E-26 -2.06189307963047E-25 -3.67241024039342E-09
+ -2.43751708826051E-25 -2.71216690102226E-25 -3.67241024039295E-09
+ -1.77108739552980E-25  6.82585564069637E-26 -3.67241024039308E-09
+ -6.82585564069637E-26  6.84605047987003E-26 -3.67241024039339E-09
+  7.35092145921148E-26 -5.55358077275592E-26 -3.67241024039344E-09
+ -9.47137957244556E-26  1.11273563846855E-25 -3.67241024039318E-09
+  1.82359397738131E-25 -1.22582673784103E-25 -3.67241024039329E-09
+ -1.70848339409146E-25 -1.31266454628776E-27 -3.67241024039349E-09
+  3.83701944299500E-27  6.01806207375005E-26 -3.67241024039345E-09
+ -6.01806207375005E-26  8.69387826425973E-26 -3.67241024039303E-09
+  5.54348335316909E-26 -1.60548971430580E-26 -3.67241024039354E-09
+  1.56913900379322E-25  1.30458661061830E-25 -3.67241024039286E-09
+ -1.45200893658600E-25 -2.70610844927016E-26 -3.67241024039375E-09
+  2.69601102968333E-26  3.17260923418166E-25 -3.67241024039307E-09
+  4.27221822718733E-25  2.02352288520052E-25 -3.67241024039317E-09
+  4.08945493266573E-25  9.61274344666116E-26 -3.67241024039324E-09
+ -9.61274344666116E-26  2.44357554001261E-26 -3.67241024039336E-09
+  1.07133621816255E-25  1.43484332328839E-25 -3.67241024039331E-09
+  1.99928907819213E-26 -7.38121371797196E-26 -3.67241024039303E-09
+ -5.46270399647446E-26  1.37223932185005E-25 -3.67241024039319E-09
+  1.56510003595849E-25 -4.01877299555792E-26 -3.67241024039309E-09
+  1.65698655419863E-25  2.65562135133601E-26 -3.67241024039328E-09
+ -2.64552393174919E-26  2.32240650497066E-26 -3.67241024039348E-09
+ -2.32240650497066E-26  3.23117426778526E-26 -3.67241024039283E-09
+  8.83524213847533E-26 -6.40176401804956E-26 -3.67241024039304E-09
+  6.41186143763638E-26 -2.63542651216236E-26 -3.67241024039296E-09
+  1.43686280720576E-25 -9.22904150236166E-26 -3.67241024039328E-09
+  3.85721428216866E-25  5.46270399647446E-26 -3.67241024039307E-09
+  5.98776981498957E-26  8.96650859310411E-26 -3.67241024039318E-09
+  3.16857026634692E-25 -1.10263821888172E-25 -3.67241024039353E-09
+ -3.65425614847340E-25 -6.45225111598370E-26 -3.67241024039358E-09
+ -4.53374139448620E-26  2.78688780596479E-26 -3.67241024039309E-09
+  2.65663109329470E-25  7.81540276020561E-26 -3.67241024039303E-09
+  2.85756974307259E-26  1.04003421744338E-26 -3.67241024039319E-09
+ -9.34011311781678E-26  2.15377959787062E-25 -3.67241024039314E-09
+  7.81540276020561E-26  4.73568978622278E-26 -3.67241024039311E-09
+ -4.73568978622278E-26 -1.20462215670869E-25 -3.67241024039352E-09
+ -2.63542651216236E-26  1.48432067926386E-25 -3.67241024039259E-09
+  9.74400990128994E-26 -7.16916790664856E-27 -3.67241024039531E-09
+ -2.37087411898744E-25  1.14403763918772E-25 -3.67241024038472E-09
+  2.75053709545221E-25 -8.27988406119974E-27 -3.67241024043177E-09
+ -4.41257235944425E-26 -2.86766716265942E-26 -3.67241024022065E-09
+ -2.53445231629407E-26 -5.11939173052228E-26 -3.67241024116607E-09
+ -9.55215892914019E-26 -9.59254860748750E-27 -3.67241023692872E-09
+ -8.01735115194219E-26 -1.67617165141361E-26 -3.67241025593145E-09
+  7.54277243136123E-26  1.11071615455118E-27 -3.67241017068603E-09
+  2.32139676301198E-25 -2.47386779877309E-26 -3.67241055317633E-09
+  8.64339116632558E-26 -1.57519745554532E-26 -3.67240883665447E-09
+  9.91566603426603E-26 -7.77501308185829E-27 -3.67241654107518E-09
+ -7.41150597673245E-26 -2.07300024117598E-25 -3.67238195705987E-09
+ -2.33149418259880E-25 -1.17533963990689E-25 -3.67253721004644E-09
+ -1.86903236552204E-25 -2.06997101529993E-26 -3.67184023473504E-09
+ -4.87705366043838E-26  9.00689827145142E-26 -3.67496918095292E-09
+  1.27732357773386E-25  7.77501308185829E-26 -3.66092335343000E-09
+  1.41666796803210E-25 -6.46234853557053E-27 -3.72398243270705E-09
+  6.46234853557053E-27  6.00796465416323E-27 -3.44112923141631E-09
+  6.95207338553173E-26 -7.83559759937927E-26 -4.71430921074279E-09
+ -6.07359788147761E-26 -1.25712873856020E-26  9.27421964195406E-10
+ -6.32098466135492E-26 -8.11832534781048E-26 -2.62092929521969E-08
+ -2.16185753354008E-25 -5.94536065272489E-25 -2.48955785161512E-06
+ -4.51556603922991E-25  5.64809262008864E-24 -1.48867407977123E-05
+ -5.64809262008864E-24 -8.24207932226665E-23 -3.81233169248468E-05
+ -8.74614250804115E-23 -1.17924936077091E-22 -6.70527946976353E-05
+ -1.06060064165784E-22
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  324721       1       0       0       0     -66       0       2
+                  
\0\0\0\0\0\0\0\0
+  324673
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  8.90592407558313E-26 -3.67241024039288E-09 -8.88572923640948E-26
+  3.67241024039353E-09 -7.06819371078027E-27 -3.67241024039253E-09
+  2.19517901817661E-25 -2.01948391736579E-28 -3.67241024039320E-09
+ -3.76633750588720E-25 -3.97838331721061E-26 -3.67241024039298E-09
+ -1.69232752275253E-25  4.03896783473158E-26 -3.67241024039233E-09
+  1.67011319966151E-25  2.10228275797779E-25 -3.67241024039354E-09
+  1.70848339409146E-25 -8.68378084467290E-26 -3.67241024039333E-09
+ -5.89285407087338E-25 -3.75825957021774E-25 -3.67241024039342E-09
+ -4.13994203059987E-25  2.47386779877309E-25 -3.67241024039320E-09
+ -4.60442333159400E-26 -1.74685358852141E-25 -3.67241024039333E-09
+ -2.12449708106881E-25  6.86624531904369E-26 -3.67241024039320E-09
+  2.17296469508559E-25  1.42777512957761E-25 -3.67241024039375E-09
+  5.41221689854032E-26 -4.44286461820474E-26 -3.67241024039307E-09
+ -1.50855448627225E-25 -7.99715631276853E-26 -3.67241024039344E-09
+  7.99715631276853E-26 -2.97268032636244E-25 -3.67241024039290E-09
+  3.63507105125842E-27  1.57923642338005E-25 -3.67241024039320E-09
+  1.35507370855245E-25  3.93799363886329E-26 -3.67241024039333E-09
+  6.52293305309150E-26  1.60750919822317E-25 -3.67241024039306E-09
+ -1.60750919822317E-25 -1.47624274359439E-25 -3.67241024039303E-09
+  3.97838331721061E-26  1.40959977432132E-25 -3.67241024039348E-09
+ -1.40959977432132E-25 -1.13696944547694E-25 -3.67241024039361E-09
+ -6.90663499739100E-26  9.89547119509237E-27 -3.67241024039296E-09
+ -1.91245126974540E-25 -6.30078982218127E-26 -3.67241024039386E-09
+ -2.30221166579700E-25 -1.57115848771058E-25 -3.67241024039359E-09
+ -2.08006843488676E-26 -1.51259345410698E-25 -3.67241024039328E-09
+ -3.18674562160322E-25  8.27988406119974E-26 -3.67241024039290E-09
+ -2.01342546561369E-25 -2.94844651935405E-25 -3.67241024039308E-09
+ -1.72261978151302E-25 -3.10192729707385E-25 -3.67241024039331E-09
+ -2.26788043920178E-25 -3.03932329563551E-25 -3.67241024039308E-09
+ -4.53374139448620E-25 -3.15039491109063E-26 -3.67241024039391E-09
+  2.00332804602686E-25  1.06628750836914E-25 -3.67241024039308E-09
+ -6.58351757061248E-26 -6.86624531904369E-26 -3.67241024039323E-09
+  3.24329117128946E-25 -6.72488144482808E-26 -3.67241024039318E-09
+ -6.22001046548663E-26  5.69494464697153E-26 -3.67241024039301E-09
+  2.68187464226177E-25 -1.45200893658600E-25 -3.67241024039377E-09
+ -2.80910212905581E-25  9.14826214566703E-26 -3.67241024039300E-09
+  3.61487621208476E-25 -7.77501308185829E-26 -3.67241024039317E-09
+  7.77501308185829E-26 -2.06189307963047E-25 -3.67241024039310E-09
+ -2.43751708826051E-25 -3.89760396051598E-26 -3.67241024039290E-09
+  5.51319109440861E-26 -1.27429435185781E-25 -3.67241024039354E-09
+ -1.27227486794045E-26  2.46377037918626E-26 -3.67241024039328E-09
+ -1.66203526399205E-25  1.70040545842200E-25 -3.67241024039333E-09
+  1.23794364134523E-25 -1.08042389579070E-25 -3.67241024039339E-09
+ -3.69565556877940E-26  4.84676140167790E-26 -3.67241024039361E-09
+ -3.42100575601765E-25 -7.65384404681634E-26 -3.67241024039317E-09
+ -6.88644015821734E-26 -2.13055553282091E-25 -3.67241024039331E-09
+ -8.04764341070267E-26  2.11439966148198E-25 -3.67241024039343E-09
+  2.33250392455749E-25  6.62390724895979E-26 -3.67241024039318E-09
+  2.27393889095388E-25 -1.34901525680035E-25 -3.67241024039311E-09
+  1.34901525680035E-25  8.18900728491828E-26 -3.67241024039357E-09
+  1.93668507675379E-25  3.23117426778526E-26 -3.67241024039314E-09
+ -5.34153496143252E-26 -5.43241173771398E-26 -3.67241024039307E-09
+  5.44250915730080E-26  1.62669429543814E-25 -3.67241024039284E-09
+  1.30761583649435E-25 -4.12984461101304E-26 -3.67241024039350E-09
+ -9.02709311062508E-26  2.08612688663886E-25 -3.67241024039307E-09
+ -4.51354655531254E-26  2.39308844207846E-26 -3.67241024039306E-09
+  1.40959977432132E-25 -9.28962601988264E-26 -3.67241024039349E-09
+  9.28962601988264E-26 -5.14968398928277E-27 -3.67241024039352E-09
+  1.30458661061830E-25 -9.73391248170311E-26 -3.67241024039339E-09
+  2.67177722267494E-25  2.32240650497066E-26 -3.67241024039338E-09
+  9.90556861467920E-26 -1.02892705589787E-25 -3.67241024039340E-09
+  1.02993679785655E-25 -1.61962610172736E-25 -3.67241024039337E-09
+ -3.06052787676786E-25  1.10768692867514E-25 -3.67241024039322E-09
+  6.52293305309150E-26 -9.20884666318800E-26 -3.67241024039350E-09
+ -8.54241697045729E-26 -1.41060951628000E-25 -3.67241024039317E-09
+ -3.80672718423451E-26 -2.34260134414432E-26 -3.67241024039306E-09
+  2.03866901458077E-25  4.33179300274962E-26 -3.67241024039341E-09
+ -4.32169558316279E-26 -6.45225111598370E-26 -3.67241024039337E-09
+ -4.53374139448620E-26  8.19910470450511E-26 -3.67241024039320E-09
+ -8.20920212409194E-26  7.81540276020561E-26 -3.67241024039314E-09
+  2.85756974307259E-26 -4.22072138729450E-26 -3.67241024039330E-09
+ -4.39641648810533E-25  2.15377959787062E-25 -3.67241024039292E-09
+  7.81540276020561E-26 -5.04870979341448E-26 -3.67241024039388E-09
+ -5.15978140886959E-26  1.75897049202560E-25 -3.67241024039071E-09
+  3.18876510552058E-25 -4.73568978622278E-26 -3.67241024040324E-09
+ -2.93935884172591E-25  9.04728794979874E-26 -3.67241024034861E-09
+  3.00499206904030E-25 -8.03754599111585E-26 -3.67241024059032E-09
+ -1.62265532760341E-25 -3.02922587604869E-27 -3.67241023952035E-09
+  5.54348335316909E-26 -1.21471957629552E-25 -3.67241024426136E-09
+  1.21370983433684E-25  1.47826222751176E-25 -3.67241022324977E-09
+  5.45260657688763E-26 -5.45260657688763E-27 -3.67241031637190E-09
+ -8.42124793541535E-26  3.69565556877940E-26 -3.67240990368664E-09
+ -6.65419950772028E-26 -3.08981039356966E-26 -3.67241173242214E-09
+  9.10787246731971E-26  6.68449176648077E-26 -3.67240362942430E-09
+ -6.67439434689394E-26  1.80036991233160E-25 -3.67243952988475E-09
+  1.13494996155957E-25  9.79449699922408E-26 -3.67228048833603E-09
+  4.87705366043838E-26 -1.09355054125358E-25 -3.67298497952106E-09
+  4.30150074398913E-26 -4.81646914291741E-26 -3.66986473778849E-09
+  4.82656656250424E-26 -8.99680085186460E-26 -3.68368333162175E-09
+ -2.03563978870472E-25 -1.43383358132971E-25 -3.62250163492353E-09
+ -2.25879276157364E-25  1.02791731393919E-25 -3.89352501416930E-09
+  1.92456817324960E-25 -4.20557525791426E-26 -2.69653362574500E-09
+ -1.04205370136075E-25  1.51158371214829E-25 -8.05177318462775E-09
+  6.93692725615149E-26  1.16978605913413E-25  1.47323925086573E-08
+  8.82009600909509E-26 -4.58422849242034E-26 -1.31703887083274E-07
+  1.71858081367829E-25  3.72877510502420E-24 -4.47711692554017E-06
+  4.30230853755608E-24  7.67727006025779E-24 -1.71768467880227E-05
+  1.62851183096377E-23 -1.30280946477102E-23 -2.91071220692787E-05
+ -1.52511425439464E-24 -9.88739325942291E-24 -3.43072691406396E-05
+  9.90031795649405E-24  8.91157863055176E-24 -3.48986932858369E-05
+  8.42690249038397E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  324445       1       0       0       0     -67       0       2
+                  
\0\0\0\0\0\0\0\0
+  324433
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  4.03896783473158E-28 -3.67241024039266E-09 -2.01948391736579E-28
+  3.67241024039396E-09 -7.06819371078027E-27 -3.67241024039318E-09
+  2.19517901817661E-25  3.91375983185490E-25 -3.67241024039363E-09
+  2.98883619770137E-26  2.74043967586538E-25 -3.67241024039320E-09
+  3.13423903975171E-25  1.32276196587459E-25 -3.67241024039321E-09
+ -3.39879143292662E-25  2.20527643776344E-25 -3.67241024039299E-09
+  7.31053178086416E-26  9.99644539096066E-26 -3.67241024039332E-09
+ -1.89225643057175E-25 -7.51248017260074E-26 -3.67241024039353E-09
+ -1.55500261637166E-26  3.50986304838174E-25 -3.67241024039341E-09
+ -1.49643758276805E-25 -3.76633750588720E-25 -3.67241024039375E-09
+ -6.10086091436205E-25  1.69232752275253E-25 -3.67241024039374E-09
+  1.24602157701469E-25 -5.25065818515105E-26 -3.67241024039331E-09
+ -1.41161925823869E-25  5.31124270267203E-26 -3.67241024039318E-09
+ -5.31124270267203E-26 -1.77916533119926E-25 -3.67241024039374E-09
+ -3.09182987748702E-25  1.84580830047233E-25 -3.67241024039299E-09
+  1.99525011035740E-25 -1.40758029040396E-25 -3.67241024039320E-09
+ -1.52672984152854E-25  2.35067927981378E-25 -3.67241024039331E-09
+  1.62972352131419E-25  1.60750919822317E-25 -3.67241024039360E-09
+ -1.60750919822317E-25 -4.48325429655205E-26 -3.67241024039337E-09
+ -2.48800418619465E-25 -3.73604524712671E-26 -3.67241024039337E-09
+ -3.73604524712671E-26 -2.86766716265942E-26 -3.67241024039351E-09
+ -2.64754341566655E-25  1.22380725392367E-25 -3.67241024039318E-09
+ -1.22380725392367E-25  1.40556080648659E-25 -3.67241024039309E-09
+  3.89760396051598E-26  2.08006843488676E-26 -3.67241024039337E-09
+  2.72630328844382E-25  1.55500261637166E-26 -3.67241024039361E-09
+ -1.57519745554532E-26  2.01342546561369E-25 -3.67241024039340E-09
+  9.20884666318800E-26 -3.47351233786916E-26 -3.67241024039318E-09
+  8.80494987971485E-26  9.30982085905629E-26 -3.67241024039379E-09
+ -4.28130590481548E-26  6.19981562631298E-26 -3.67241024039318E-09
+  2.31432856930120E-25  5.27085302432471E-26 -3.67241024039297E-09
+ -1.77714584728190E-25 -6.07864659127103E-26 -3.67241024039335E-09
+ -2.33250392455749E-25  1.11879409022065E-25 -3.67241024039297E-09
+  1.43787254916444E-25 -8.30007890037340E-26 -3.67241024039325E-09
+ -4.69328062395810E-25 -1.05820957269967E-25 -3.67241024039351E-09
+  2.68389412617914E-25  2.46175089526890E-25 -3.67241024039333E-09
+ -1.13696944547694E-25  2.25576353569759E-25 -3.67241024039290E-09
+  6.80566080152271E-26  8.01735115194219E-26 -3.67241024039355E-09
+ -5.79591884283982E-26 -1.27833331969255E-25 -3.67241024039304E-09
+  1.27833331969255E-25 -1.36719061205664E-25 -3.67241024039327E-09
+ -1.40758029040396E-25 -5.06890463258813E-26 -3.67241024039312E-09
+  3.57448653373745E-25  1.66203526399205E-25 -3.67241024039318E-09
+  2.69197206184860E-25 -1.23592415742786E-25 -3.67241024039355E-09
+ -1.97909423901847E-26 -1.08042389579070E-25 -3.67241024039300E-09
+  1.08042389579070E-25  2.36279618331797E-26 -3.67241024039322E-09
+ -2.44357554001261E-26 -2.64552393174919E-26 -3.67241024039334E-09
+  3.22410607407448E-25  6.00796465416323E-26 -3.67241024039345E-09
+ -2.04169824045681E-25 -1.09052131537753E-26 -3.67241024039340E-09
+  1.10061873496436E-26 -1.29448919103147E-25 -3.67241024039350E-09
+ -1.14100841331167E-26 -2.04573720829155E-25 -3.67241024039329E-09
+ -8.89582665599631E-26  5.28095044391154E-26 -3.67241024039303E-09
+  2.22749076085447E-25 -1.72060029759565E-25 -3.67241024039339E-09
+ -1.21471957629552E-25 -1.15413505877455E-25 -3.67241024039314E-09
+ -1.78118481511663E-25  2.32240650497066E-27 -3.67241024039357E-09
+ -2.96864135852771E-26  8.85543697764899E-26 -3.67241024039315E-09
+  7.35092145921148E-26  1.10768692867514E-25 -3.67241024039331E-09
+  1.82763294521604E-25 -1.71656132976092E-25 -3.67241024039292E-09
+ -2.50416005753358E-25  6.83595306028320E-26 -3.67241024039372E-09
+  5.86660077994762E-26  2.24162714827603E-26 -3.67241024039316E-09
+ -2.24162714827603E-26 -7.14897306747490E-26 -3.67241024039341E-09
+ -9.82478925798457E-26  2.31331882734251E-25 -3.67241024039326E-09
+  2.33654289239222E-25 -1.02892705589787E-25 -3.67241024039318E-09
+  1.02993679785655E-25  1.25208002876679E-26 -3.67241024039314E-09
+ -1.31771325608118E-25  9.13816472608020E-26 -3.67241024039318E-09
+  2.62532909257553E-26 -1.68424958708307E-25 -3.67241024039326E-09
+ -6.37147175928907E-26  3.79662976464769E-26 -3.67241024039306E-09
+  1.41161925823869E-25 -1.21169035041947E-25 -3.67241024039328E-09
+  1.21169035041947E-25 -1.65900603811600E-25 -3.67241024039341E-09
+ -2.39005921620241E-25  3.33214846365355E-26 -3.67241024039337E-09
+ -1.43080435545366E-25 -1.24198260917996E-25 -3.67241024039298E-09
+  1.58529487513215E-26  1.59539229471897E-26 -3.67241024039336E-09
+ -1.22683647979972E-25  1.08244337970806E-25 -3.67241024039254E-09
+ -1.91346101170409E-25 -1.75998023398429E-25 -3.67241024039370E-09
+ -1.17533963990689E-25 -1.61558713389263E-27 -3.67241024038972E-09
+  1.93062662500170E-25 -1.69333726471122E-25 -3.67241024040869E-09
+ -7.79520792103195E-26  1.00974195868290E-27 -3.67241024032582E-09
+ -1.00974195868290E-27  9.00689827145142E-26 -3.67241024068759E-09
+  7.27014210251684E-27  6.54312789226516E-26 -3.67241023910895E-09
+ -1.16322273640270E-25 -1.06224854053441E-25 -3.67241024599752E-09
+  5.36172980060617E-26 -2.86766716265942E-26 -3.67241021594828E-09
+  1.21471957629552E-25 -2.32240650497066E-27 -3.67241034697880E-09
+ -1.44494074287522E-25 -1.56308055204112E-25 -3.67240977583599E-09
+  9.59254860748750E-27 -6.57342015102565E-26 -3.67241226438450E-09
+  6.58351757061248E-26  1.59135332688424E-25 -3.67240142586568E-09
+  4.77607946457009E-26 -2.26788043920178E-25 -3.67244861142530E-09
+  7.99715631276853E-26 -1.57519745554532E-26 -3.67224327981185E-09
+  9.91566603426603E-26  2.01948391736579E-28 -3.67313638625655E-09
+  6.48254337474419E-26 -1.75897049202560E-25 -3.66925364726284E-09
+ -1.17634938186557E-25  4.02887041514475E-26 -3.68612589317128E-09
+ -1.87004210748072E-25 -1.78017507315794E-25 -3.61285901818769E-09
+ -1.15514480073323E-25  1.39344390298240E-26 -3.93107607298909E-09
+ -1.39344390298240E-26  2.99893361728820E-26 -2.55409793676906E-09
+  1.16221299444401E-25  1.72665874934775E-26 -8.59351902682153E-09
+  5.54348335316909E-26 -2.57989070443480E-26  1.65320211216982E-08
+  2.04977617612628E-26  2.05078591808496E-25 -1.45795184154468E-07
+  6.69458918606759E-26 -4.74982617364434E-25 -4.52527430267040E-06
+ -2.39914689383056E-24  9.19592196611686E-24 -1.52793802463874E-05
+  4.96308367531817E-24 -2.01625274309800E-24 -2.38252364893103E-05
+  7.35415263347926E-24 -1.25886549472914E-23 -2.73069401262468E-05
+ -6.35895095900140E-24  1.18777966083786E-23 -2.80706164505380E-05
+  1.67374827071277E-23 -1.19488824422699E-23 -2.81234131669965E-05
+ -9.61597462092895E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  324223       1       0       0       0     -68       0       2
+                  
\0\0\0\0\0\0\0\0
+  324211
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  2.01948391736579E-28 -3.67241024039310E-09  2.13459450065564E-25
+  3.67241024039375E-09 -1.29852815886620E-25 -3.67241024039340E-09
+ -4.57211158891615E-25  0.00000000000000E+00 -3.67241024039319E-09
+  0.00000000000000E+00 -1.44191151699917E-25 -3.67241024039299E-09
+ -6.46234853557053E-26  1.32276196587459E-25 -3.67241024039342E-09
+  2.47184831485573E-25  2.04977617612628E-25 -3.67241024039354E-09
+  2.63542651216236E-25  1.04407318527811E-25 -3.67241024039330E-09
+  9.99644539096066E-26 -1.76704842769507E-25 -3.67241024039288E-09
+ -2.62532909257553E-26 -1.38334648339557E-25 -3.67241024039373E-09
+ -4.60442333159400E-26  1.20765138258474E-25 -3.67241024039331E-09
+ -2.14267243632510E-25  7.39131113755879E-26 -3.67241024039286E-09
+ -1.69030803883517E-25  1.42777512957761E-25 -3.67241024039321E-09
+  1.51057397018961E-25  2.48396521835992E-25 -3.67241024039243E-09
+  4.50344913572571E-26  2.14671140415984E-25 -3.67241024039339E-09
+ -2.14469192024247E-25 -1.07032647620387E-25 -3.67241024039277E-09
+ -2.88180355008098E-25  3.53611633930750E-25 -3.67241024039329E-09
+  2.33452340847485E-25  2.31634805321856E-25 -3.67241024039308E-09
+ -3.36446020633141E-25 -2.14065295240774E-25 -3.67241024039309E-09
+ -1.60750919822317E-25 -4.96793043671984E-26 -3.67241024039306E-09
+ -3.51592150013384E-25 -1.46816480792493E-25 -3.67241024039350E-09
+ -3.30993414056253E-25 -1.26621641618835E-25 -3.67241024039329E-09
+ -2.77477090246060E-25 -2.61725115690606E-25 -3.67241024039340E-09
+ -1.00368350693080E-25  4.26111106564182E-26 -3.67241024039331E-09
+  1.36921009597401E-25  2.08006843488676E-26 -3.67241024039294E-09
+  2.72630328844382E-25  1.51865190585907E-25 -3.67241024039278E-09
+  8.21929954367877E-26  1.03397576569128E-25 -3.67241024039362E-09
+  1.89831488232384E-25 -2.36279618331797E-26 -3.67241024039302E-09
+  1.97101630334901E-25 -2.98277774594927E-25 -3.67241024039341E-09
+  5.49299625523495E-26 -1.08244337970806E-25 -3.67241024039318E-09
+ -3.55631117848116E-25 -1.29448919103147E-25 -3.67241024039325E-09
+ -3.33012897973619E-25  5.14968398928277E-26 -3.67241024039315E-09
+  7.47209049425342E-26 -9.87527635591871E-26 -3.67241024039352E-09
+ -6.68449176648077E-26 -1.64991836048785E-25 -3.67241024039297E-09
+ -2.57888096247611E-25  7.33072662003782E-26 -3.67241024039350E-09
+ -7.35092145921148E-26 -2.08410740272150E-25 -3.67241024039349E-09
+ -2.46377037918626E-25  9.14826214566703E-26 -3.67241024039333E-09
+  3.61487621208476E-25  1.77916533119926E-25 -3.67241024039312E-09
+ -2.01948391736579E-26  2.82929696822947E-25 -3.67241024039357E-09
+  1.47624274359439E-25  1.77310687944716E-25 -3.67241024039305E-09
+  1.16322273640270E-25  1.23794364134523E-25 -3.67241024039307E-09
+ -1.10667718671645E-25 -1.49441809885068E-25 -3.67241024039355E-09
+ -1.44191151699917E-25 -4.84676140167790E-26 -3.67241024039329E-09
+ -9.47137957244556E-26 -1.08042389579070E-25 -3.67241024039318E-09
+ -1.81753552562921E-25 -6.05845175209737E-28 -3.67241024039329E-09
+  6.05845175209737E-28  2.11036069364725E-26 -3.67241024039364E-09
+ -2.13055553282091E-26 -2.13055553282091E-25 -3.67241024039352E-09
+ -8.04764341070267E-26  4.24091622646816E-26 -3.67241024039343E-09
+  2.51223799320304E-25 -1.56812926183454E-25 -3.67241024039339E-09
+ -1.36719061205664E-25 -1.34901525680035E-25 -3.67241024039315E-09
+ -3.12818058799961E-25  7.08838854995392E-26 -3.67241024039374E-09
+  2.22850050281315E-25 -7.41150597673245E-26 -3.67241024039331E-09
+ -6.19981562631298E-26 -1.21673906021289E-25 -3.67241024039314E-09
+ -1.31266454628776E-26 -1.97404552922506E-25 -3.67241024039317E-09
+  6.41186143763638E-26  1.88114926902623E-25 -3.67241024039347E-09
+  3.99150996267348E-25  1.75796075006692E-25 -3.67241024039363E-09
+ -4.58422849242034E-26  1.89124668861306E-25 -3.67241024039306E-09
+ -2.40318586166529E-26  1.66203526399205E-25 -3.67241024039351E-09
+ -3.92789621927646E-26  1.20260267279133E-25 -3.67241024039339E-09
+  5.14968398928277E-27  3.65526589043208E-26 -3.67241024039286E-09
+  4.26817925935260E-25 -3.78653234506086E-26 -3.67241024039349E-09
+  3.78653234506086E-26  6.98741435408563E-26 -3.67241024039294E-09
+  1.02993679785655E-25 -8.54241697045729E-26 -3.67241024039304E-09
+ -3.38263556158770E-26  1.30458661061830E-25 -3.67241024039321E-09
+ -1.28237228752728E-26  2.41227353929344E-25 -3.67241024039295E-09
+  5.24056076556423E-26 -4.32169558316279E-26 -3.67241024039306E-09
+  1.57721693946268E-25 -1.21169035041947E-25 -3.67241024039339E-09
+  1.21169035041947E-25 -5.45260657688763E-26 -3.67241024039331E-09
+  5.46270399647446E-26  3.32205104406672E-26 -3.67241024039326E-09
+ -2.16993546920954E-25  2.78688780596479E-26 -3.67241024039330E-09
+  2.65663109329470E-25  7.81540276020561E-26 -3.67241024039303E-09
+  2.85756974307259E-26 -4.95783301713301E-26 -3.67241024039386E-09
+ -1.38738545123030E-25  7.14897306747490E-26 -3.67241024038983E-09
+ -7.14897306747490E-26  9.84498409715823E-26 -3.67241024040755E-09
+  4.84676140167790E-26  2.62532909257553E-26 -3.67241024033189E-09
+ -7.24994726334319E-26 -9.62284086624799E-26 -3.67241024065768E-09
+  1.95284094809272E-25  4.13994203059987E-26 -3.67241023925444E-09
+  1.05417060486494E-25  2.14166269436642E-25 -3.67241024529885E-09
+  2.77073193462586E-25 -4.84676140167790E-27 -3.67241021927662E-09
+  5.72523690573202E-26 -8.01735115194219E-26 -3.67241033124529E-09
+  1.72968797522380E-25  4.00867557597109E-26 -3.67240984970189E-09
+  1.42474590370157E-25 -5.44250915730080E-26 -3.67241191965898E-09
+ -9.24923634153532E-26  2.72630328844382E-26 -3.67240302615240E-09
+  1.19452473712186E-25  1.64789887657048E-25 -3.67244121793013E-09
+ -1.04609266919548E-25 -2.13964321044905E-25 -3.67227729050905E-09
+ -1.78724326686872E-26 -2.28201682662334E-26 -3.67298055619205E-09
+  1.06426802445177E-25 -7.77501308185829E-27 -3.66996497593654E-09
+  1.54692468070220E-25 -7.78511050144512E-26 -3.68288972557482E-09
+ -5.49299625523495E-26 -1.06830699228650E-25 -3.62752750801944E-09
+ -2.54555947783958E-25  2.94844651935405E-26 -3.86467195985321E-09
+  2.64249470587314E-25 -3.33214846365355E-26 -2.85145474916674E-09
+ -1.88619797881965E-25 -3.68555814919257E-26 -7.21816146023862E-09
+ -3.77643492547403E-26 -1.37476367674676E-25  1.09455213600853E-08
+ -1.42474590370157E-25 -2.09824379014306E-25 -8.41961845613521E-08
+ -2.02251314324184E-25 -4.02765872479433E-24 -3.61381822391411E-06
+ -4.64804418420910E-24 -2.27927032849573E-23 -1.30446904442429E-05
+ -2.28508644217774E-23 -4.00665609205373E-25 -2.04805546634357E-05
+  3.87740912134232E-25 -5.82128356084193E-23 -2.37828557507934E-05
+ -3.93169284904111E-23 -1.23043116117263E-23 -2.47708212411996E-05
+  1.23043116117263E-23  4.96308367531817E-24 -2.49723748419041E-05
+ -4.95015897824702E-24 -6.91471293306047E-25 -2.49869805498920E-05
+ -3.53490464895708E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  323905       1       0       0       0     -69       0       2
+                  
\0\0\0\0\0\0\0\0
+  323893
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  7.08838854995392E-26 -3.67241024039375E-09 -2.84747232348576E-25
+  3.67241024039331E-09  8.23949438285242E-26 -3.67241024039275E-09
+  1.29852815886620E-25  9.02709311062508E-26 -3.67241024039362E-09
+  4.96793043671984E-25 -3.97838331721061E-26 -3.67241024039363E-09
+ -1.69232752275253E-25 -1.43383358132971E-25 -3.67241024039299E-09
+ -2.36077669940061E-25  2.20527643776344E-25 -3.67241024039397E-09
+ -2.20527643776344E-25  1.55500261637166E-26 -3.67241024039321E-09
+  9.99644539096066E-26 -7.87598727772658E-26 -3.67241024039322E-09
+  1.69232752275253E-25 -4.32169558316279E-26 -3.67241024039331E-09
+  4.32169558316279E-26 -7.89618211690024E-26 -3.67241024039373E-09
+ -2.14267243632510E-25  3.66132434218418E-25 -3.67241024039362E-09
+  2.28201682662334E-26 -1.51865190585907E-25 -3.67241024039332E-09
+ -4.35198784192328E-25  1.50855448627225E-25 -3.67241024039288E-09
+ -2.49204315402939E-25 -1.78926275078609E-25 -3.67241024039330E-09
+ -5.08102153609233E-25  9.04728794979874E-26 -3.67241024039334E-09
+ -9.04728794979874E-26 -3.33820691540565E-25 -3.67241024039332E-09
+ -8.40105309624169E-25  1.33891783721352E-25 -3.67241024039319E-09
+  1.59741177863634E-25 -2.30625063363173E-25 -3.67241024039297E-09
+ -6.30078982218127E-26 -4.26515003347655E-25 -3.67241024039357E-09
+  1.32680093370932E-25 -2.38904947424373E-25 -3.67241024039348E-09
+ -2.38904947424373E-25 -1.39546338689976E-25 -3.67241024039327E-09
+ -3.75624008630037E-25  2.42338070083895E-26 -3.67241024039276E-09
+  2.00130856210950E-25 -7.12877822830124E-26 -3.67241024039360E-09
+ -4.28130590481548E-26 -1.66001578007468E-25 -3.67241024039344E-09
+ -1.27429435185781E-25  4.44286461820474E-26 -3.67241024039325E-09
+ -2.20931540559817E-25  1.91043178582804E-25 -3.67241024039308E-09
+  1.02589783002182E-25 -1.85792520397653E-25 -3.67241024039344E-09
+ -2.81314109689055E-25 -7.89618211690024E-26 -3.67241024039330E-09
+ -2.14873088807720E-25  8.76456020136753E-26 -3.67241024039292E-09
+  1.58933384296688E-25 -1.42979461349498E-25 -3.67241024039301E-09
+ -5.69090567913680E-25  1.06628750836914E-25 -3.67241024039286E-09
+ -6.58351757061248E-26  1.79734068645555E-25 -3.67241024039320E-09
+  2.41732224908685E-25  1.47422325967703E-26 -3.67241024039286E-09
+  1.49239861493332E-25  5.69494464697153E-26 -3.67241024039355E-09
+  2.68187464226177E-25 -1.10667718671645E-25 -3.67241024039343E-09
+ -3.44120059519131E-25  1.45806738833810E-25 -3.67241024039322E-09
+  1.47624274359439E-25  8.01735115194219E-26 -3.67241024039388E-09
+ -5.79591884283982E-26 -3.62901259950633E-25 -3.67241024039331E-09
+  6.94702467573832E-26 -1.16322273640270E-25 -3.67241024039306E-09
+ -2.24162714827603E-26  4.70539752746229E-26 -3.67241024039339E-09
+ -4.70539752746229E-26 -1.05417060486494E-25 -3.67241024039343E-09
+ -1.88215901098492E-25 -4.84676140167790E-26 -3.67241024039339E-09
+ -9.47137957244556E-26 -1.35103474071771E-25 -3.67241024039343E-09
+  1.35305422463508E-25  2.36279618331797E-26 -3.67241024039350E-09
+ -1.70646391017409E-25 -1.49441809885068E-25 -3.67241024039354E-09
+  3.93799363886329E-27 -6.35127692011541E-26 -3.67241024039335E-09
+ -8.03754599111585E-26  2.11439966148198E-25 -3.67241024039310E-09
+  2.33250392455749E-25 -2.27393889095388E-25 -3.67241024039292E-09
+  8.64339116632558E-26  1.30458661061830E-25 -3.67241024039311E-09
+  8.78475504054119E-27 -2.70610844927016E-26 -3.67241024039364E-09
+  2.69601102968333E-26  2.37289360290480E-26 -3.67241024039318E-09
+  1.33790809525484E-25 -1.15413505877455E-25 -3.67241024039350E-09
+  1.15312531681587E-25 -6.42195885722321E-26 -3.67241024039357E-09
+ -9.62284086624799E-26  8.85543697764899E-26 -3.67241024039304E-09
+  7.35092145921148E-26  7.79520792103195E-26 -3.67241024039331E-09
+ -2.41530276516949E-25 -6.66429692730711E-27 -3.67241024039336E-09
+  1.71656132976092E-25 -9.28962601988264E-26 -3.67241024039339E-09
+  9.28962601988264E-26 -4.01877299555792E-26 -3.67241024039320E-09
+  1.65698655419863E-25 -7.13887564788807E-26 -3.67241024039328E-09
+  7.14897306747490E-26  2.32240650497066E-26 -3.67241024039348E-09
+ -2.32240650497066E-26 -1.25914822247757E-25 -3.67241024039305E-09
+ -1.67718139337229E-25  1.25208002876679E-26 -3.67241024039314E-09
+ -1.31771325608118E-25  9.13816472608020E-26 -3.67241024039297E-09
+ -9.14826214566703E-26 -7.05809629119344E-26 -3.67241024039318E-09
+ -4.54383881407303E-26 -2.39005921620241E-25 -3.67241024039338E-09
+ -2.33755263435090E-25 -3.83701944299500E-26 -3.67241024039339E-09
+  3.83701944299500E-26 -5.45260657688763E-26 -3.67241024039319E-09
+ -1.27530409381650E-25 -1.50350577647883E-25 -3.67241024039325E-09
+ -3.32205104406672E-26 -2.22042256714369E-25 -3.67241024039298E-09
+  5.45260657688763E-27 -2.84747232348576E-26 -3.67241024039369E-09
+ -1.58226564925610E-25  1.00873221672421E-25 -3.67241024039133E-09
+  1.92759739912565E-25 -1.75897049202560E-25 -3.67241024040141E-09
+ -4.11166925575675E-25  5.14968398928277E-26 -3.67241024035965E-09
+  2.42035147496290E-25  2.36279618331797E-26 -3.67241024053529E-09
+  1.23188518959313E-25 -4.73568978622278E-26 -3.67241023979269E-09
+  4.73568978622278E-26 -7.37111629838513E-27 -3.67241024293143E-09
+ -1.88720772077833E-25  6.35127692011541E-26 -3.67241022968243E-09
+ -2.61119270515397E-25 -2.00534752994423E-25 -3.67241028554567E-09
+  1.21169035041947E-27 -7.26004468293002E-26 -3.67241005027592E-09
+ -7.43170081590611E-26  9.56225634872702E-26 -3.67241103992250E-09
+  1.42474590370157E-25 -1.76704842769507E-26 -3.67240688233238E-09
+  1.76704842769507E-26 -6.08874401085786E-26 -3.67242432529182E-09
+ -1.15413505877455E-25 -1.51259345410698E-25 -3.67235124816126E-09
+ -2.02453262715920E-25 -1.28944048123806E-25 -3.67265694278546E-09
+ -1.78724326686872E-26 -8.48183245293632E-27 -3.67138022513029E-09
+ -5.48289883564812E-26 -7.87598727772658E-27 -3.67670326507487E-09
+  7.87598727772658E-27 -6.93692725615149E-26 -3.65455170399297E-09
+ -1.43787254916444E-25 -3.84711686258183E-26 -3.74656700307015E-09
+  3.83701944299500E-26 -4.27120848522865E-26 -3.36544160750021E-09
+  4.27120848522865E-26 -7.01770661284612E-26 -4.94496986160892E-09
+  7.00760919325929E-26  4.94773559754619E-26  1.50941977348801E-09
+ -4.55393623365986E-26  2.10026327406042E-26 -2.62460699675534E-08
+ -2.10026327406042E-26 -1.40394521935270E-24 -2.38137905264311E-06
+ -1.99848128462519E-24  1.58973773975035E-24 -1.06758299602607E-05
+  7.42523846737054E-24 -6.96641172134503E-24 -1.77464553400661E-05
+  6.96641172134503E-24 -1.98523347012727E-23 -2.12523037521504E-05
+ -4.16175245690742E-24 -9.64699389389969E-23 -2.25349190595267E-05
+ -7.33605805757966E-23  3.02696405406124E-23 -2.29138857114356E-05
+  2.58752435364244E-23  3.45089411799466E-23 -2.29966672954561E-05
+  3.31001491991922E-23 -3.98080669791145E-24 -2.30030697359713E-05
+  3.98080669791145E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  323653       1       0       0       0     -70       0       2
+                  
\0\0\0\0\0\0\0\0
+  323635
+ FLUX    
+ REAL*8           
+       1     301       0       0
+ -3.91174034793754E-25 -3.67241024039354E-09  1.77714584728190E-26
+  3.67241024039354E-09  2.78082935421269E-25 -3.67241024039319E-09
+  9.69352280335579E-26  3.01105052079239E-25 -3.67241024039363E-09
+ -9.02709311062508E-26 -1.30862557845303E-25 -3.67241024039343E-09
+ -7.81540276020561E-26 -1.43383358132971E-25 -3.67241024039299E-09
+ -2.36077669940061E-25  2.20527643776344E-25 -3.67241024039353E-09
+  7.31053178086416E-26  1.11071615455118E-26 -3.67241024039385E-09
+  1.04407318527811E-25 -7.51248017260074E-26 -3.67241024039353E-09
+ -1.55500261637166E-26 -4.32169558316279E-26 -3.67241024039278E-09
+  1.52269087369381E-25 -7.89618211690024E-26 -3.67241024039384E-09
+ -2.14267243632510E-25 -2.64552393174919E-26 -3.67241024039374E-09
+ -2.66975773875757E-25  1.42979461349498E-25 -3.67241024039352E-09
+  3.47149285395179E-25  5.29104786349837E-26 -3.67241024039286E-09
+ -5.29104786349837E-26 -7.89618211690024E-26 -3.67241024039384E-09
+  1.78926275078609E-25 -3.95213002628485E-25 -3.67241024039334E-09
+ -4.85483933734736E-25  3.50986304838174E-25 -3.67241024039320E-09
+  3.39071349725716E-25 -6.17962078713932E-26 -3.67241024039341E-09
+ -4.28130590481548E-26 -1.24400209309733E-25 -3.67241024039331E-09
+  1.24400209309733E-25 -2.30625063363173E-25 -3.67241024039322E-09
+  1.52672984152854E-25  4.88715108002521E-26 -3.67241024039322E-09
+  6.03825691292371E-26 -3.53409685539013E-26 -3.67241024039351E-09
+ -3.69161660094466E-25  1.15110583289850E-25 -3.67241024039307E-09
+ -2.62532909257553E-27 -2.51021850928568E-25 -3.67241024039330E-09
+ -3.36244072241404E-25  2.96864135852771E-26 -3.67241024039305E-09
+ -2.96864135852771E-26  1.13494996155957E-25 -3.67241024039296E-09
+ -1.13494996155957E-25 -4.64481300994132E-27 -3.67241024039314E-09
+ -2.88988148575045E-25 -1.96899681943165E-25 -3.67241024039370E-09
+ -9.65313312500848E-26 -7.87598727772658E-26 -3.67241024039331E-09
+ -3.36446020633141E-25 -1.33689835329615E-25 -3.67241024039293E-09
+  1.02993679785655E-26  5.27085302432471E-26 -3.67241024039302E-09
+  1.15918376856796E-25  4.14398099843460E-25 -3.67241024039286E-09
+  3.39879143292662E-25 -9.89547119509237E-26 -3.67241024039314E-09
+ -2.32644547280539E-25  1.47422325967703E-26 -3.67241024039297E-09
+  1.49239861493332E-25 -1.05820957269967E-25 -3.67241024039346E-09
+  1.05619008878231E-25 -1.10667718671645E-25 -3.67241024039354E-09
+ -3.44120059519131E-25  6.60371240978613E-26 -3.67241024039300E-09
+  2.01948391736579E-25  1.36921009597401E-25 -3.67241024039366E-09
+  2.12045811323408E-26 -1.27833331969255E-25 -3.67241024039365E-09
+ -1.65597681223995E-25 -1.36719061205664E-25 -3.67241024039328E-09
+ -1.40556080648659E-25 -5.08909947176179E-26 -3.67241024039339E-09
+ -2.29817269796227E-25  2.42338070083895E-27 -3.67241024039329E-09
+  1.49239861493332E-25 -2.21537385735027E-25 -3.67241024039339E-09
+ -7.20955758499587E-26  1.85590572005916E-25 -3.67241024039334E-09
+  2.56676405897192E-25  9.73391248170311E-26 -3.67241024039339E-09
+  4.92754075837253E-26  1.69232752275253E-25 -3.67241024039342E-09
+  2.72327406256777E-25 -6.36137433970224E-26 -3.67241024039349E-09
+ -8.03754599111585E-26  1.57519745554532E-26 -3.67241024039310E-09
+ -1.58226564925610E-25 -1.59539229471897E-26 -3.67241024039340E-09
+ -1.36618087009796E-25 -1.48331093730517E-25 -3.67241024039297E-09
+ -2.84646258152708E-25  2.77477090246060E-25 -3.67241024039361E-09
+  1.53783700307405E-25  2.19214979230057E-25 -3.67241024039328E-09
+  2.31533831125988E-25  1.41262900019737E-25 -3.67241024039314E-09
+  1.52269087369381E-25  9.62284086624799E-26 -3.67241024039352E-09
+ -9.62284086624799E-26 -1.05518034682363E-25 -3.67241024039336E-09
+  1.05619008878231E-25  1.10061873496436E-25 -3.67241024039344E-09
+ -1.43585306524708E-25  9.12806730649337E-26 -3.67241024039304E-09
+  7.39131113755879E-26  3.92789621927646E-26 -3.67241024039341E-09
+  2.54253025196353E-25 -5.14968398928277E-27 -3.67241024039341E-09
+  1.30458661061830E-25 -9.73391248170311E-26 -3.67241024039339E-09
+  2.67177722267494E-25  2.33250392455749E-26 -3.67241024039373E-09
+ -2.32240650497066E-26  5.51319109440861E-26 -3.67241024039308E-09
+  6.55322531185199E-26  3.37253814200087E-26 -3.67241024039282E-09
+  2.59806605969109E-25  1.30458661061830E-25 -3.67241024039342E-09
+ -1.28237228752728E-26 -1.90033436624121E-25 -3.67241024039306E-09
+ -1.03599524960865E-25 -8.38085825706803E-26 -3.67241024039328E-09
+  8.37076083748120E-26 -1.06022905661704E-25 -3.67241024039340E-09
+  2.19013030838320E-25  7.19946016540904E-26 -3.67241024039353E-09
+ -1.21169035041947E-27 -6.45225111598370E-26 -3.67241024039358E-09
+ -4.53374139448620E-26  1.25712873856020E-25 -3.67241024039309E-09
+  1.67920087728965E-25 -7.30043436127733E-26 -3.67241024039282E-09
+  2.86564767874206E-25  9.35021053740361E-26 -3.67241024039386E-09
+ -9.35021053740361E-26 -1.30054764278357E-25 -3.67241024039184E-09
+  2.62532909257553E-26  1.00570299084816E-25 -3.67241024039684E-09
+  4.63471559035449E-26  2.36279618331797E-26 -3.67241024039191E-09
+  1.23087544763445E-25  1.00974195868290E-27 -3.67241024033383E-09
+ -1.00974195868290E-27 -7.57306469012171E-27 -3.67241024092811E-09
+ -1.39243416102371E-25 -3.33214846365355E-26 -3.67241023687463E-09
+ -3.11000523274332E-25  9.13816472608020E-26 -3.67241026083658E-09
+ -9.13816472608020E-26 -1.78017507315794E-25 -3.67241012933954E-09
+  3.12010265233015E-26 -1.03498550764997E-25 -3.67241081869888E-09
+  4.79627430374375E-26  3.53409685539013E-26 -3.67240731517883E-09
+  1.11475512238592E-25 -6.57342015102565E-26 -3.67242472581844E-09
+ -8.10822792822365E-26  5.56367819234275E-26 -3.67233966420491E-09
+  3.08981039356966E-26  3.06961555439600E-26 -3.67274972667956E-09
+ -5.40211947895349E-26 -1.06325828249309E-25 -3.67079412118892E-09
+ -4.03896783473158E-26  9.79449699922408E-26 -3.68003817324006E-09
+  4.89724849961204E-26  1.93870456067116E-26 -3.63667281930962E-09
+  6.09884143044469E-26  2.09016585447359E-26 -3.83893444180349E-09
+ -2.09016585447359E-26 -5.69494464697153E-26 -2.90324120563540E-09
+  5.69494464697153E-26 -1.14605712310509E-25 -7.26494328943760E-09
+ -1.08143363774938E-25 -2.05987359571311E-26  1.20013250860920E-08
+ -4.28130590481548E-26 -8.32835167521652E-25 -1.22383101138835E-06
+  8.32835167521652E-25  7.27014210251684E-24 -8.29640894066741E-06
+  6.48819792971281E-24  2.14032983498096E-23 -1.53829689953952E-05
+  1.76292868050364E-23  4.32977351883225E-23 -1.92533983599839E-05
+  9.97528119950667E-23 -1.76034374108941E-23 -2.08622916601194E-05
+  1.75775880167518E-23 -2.58493941422821E-24 -2.14340684376363E-05
+  6.56316117272543E-23  4.00665609205373E-24 -2.16101573362036E-05
+  2.32644547280539E-24  3.13294657004459E-23 -2.16507499071785E-05
+  2.52677827740808E-23  1.00941884125612E-23 -2.16540105355530E-05
+  8.46567658159739E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  323455       1       0       0       0     -71       0       2
+                  
\0\0\0\0\0\0\0\0
+  323443
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  1.07234596012123E-25 -3.67241024039310E-09  1.06830699228650E-25
+  3.67241024039376E-09  6.58351757061248E-26 -3.67241024039340E-09
+ -6.58351757061248E-26 -1.49441809885068E-26 -3.67241024039320E-09
+  1.49441809885068E-26 -2.21941282518500E-25 -3.67241024039321E-09
+ -1.55904158420639E-25  4.05916267390524E-26 -3.67241024039343E-09
+ -4.05916267390524E-26  1.12485254197275E-25 -3.67241024039364E-09
+ -4.06118215782260E-25 -1.89023694665438E-25 -3.67241024039342E-09
+ -1.04407318527811E-25  1.17130067207216E-26 -3.67241024039333E-09
+ -1.24198260917996E-25 -2.41934173300422E-25 -3.67241024039309E-09
+ -3.45331749869550E-25 -1.74685358852141E-25 -3.67241024039344E-09
+ -2.12449708106881E-25 -1.21774880217157E-25 -3.67241024039351E-09
+ -1.71656132976092E-25  2.40924431341739E-25 -3.67241024039311E-09
+ -4.42266977903108E-26 -4.46305945737840E-26 -3.67241024039350E-09
+ -5.37182722019300E-26  1.15716428465060E-25 -3.67241024039363E-09
+ -1.15716428465060E-25 -1.11071615455118E-26 -3.67241024039321E-09
+ -1.91850972149750E-25  5.73533432531884E-26 -3.67241024039352E-09
+ -5.75552916449250E-26  3.93799363886329E-26 -3.67241024039277E-09
+ -3.93799363886329E-26 -3.06961555439600E-26 -3.67241024039289E-09
+ -1.56711951987585E-25  1.55702210028902E-25 -3.67241024039335E-09
+ -1.55904158420639E-25  2.61725115690606E-25 -3.67241024039307E-09
+  2.50416005753358E-25  2.77679038637796E-25 -3.67241024039326E-09
+ -1.66809371574414E-25 -3.66940227785364E-25 -3.67241024039340E-09
+ -2.20123746992871E-25  4.26111106564182E-26 -3.67241024039374E-09
+  2.51021850928568E-25  2.07602946705203E-25 -3.67241024039317E-09
+  8.58280664880461E-26 -1.51259345410698E-25 -3.67241024039295E-09
+ -3.18674562160322E-25  7.24994726334319E-26 -3.67241024039345E-09
+  1.02387834610446E-25  9.65313312500848E-26 -3.67241024039340E-09
+  2.36279618331797E-26  9.30982085905629E-26 -3.67241024039330E-09
+  2.50819902536831E-25 -1.33689835329615E-25 -3.67241024039326E-09
+  1.02993679785655E-26 -3.38869401333980E-25 -3.67241024039326E-09
+ -5.42231431812715E-25 -7.49228533342708E-26 -3.67241024039296E-09
+ -1.77714584728190E-25  1.11879409022065E-25 -3.67241024039331E-09
+ -2.77679038637796E-25 -1.65193784440522E-25 -3.67241024039352E-09
+ -2.57888096247611E-25 -7.87598727772658E-27 -3.67241024039383E-09
+  1.70444442625673E-25  6.78546596234906E-26 -3.67241024039311E-09
+ -6.78546596234906E-26  4.78617688415692E-26 -3.67241024039295E-09
+ -4.80637172333058E-26 -2.54656921979826E-25 -3.67241024039327E-09
+ -1.74685358852141E-25  2.82929696822947E-25 -3.67241024039324E-09
+  1.47624274359439E-25 -2.71216690102226E-25 -3.67241024039328E-09
+ -1.77108739552980E-25  1.66001578007468E-25 -3.67241024039350E-09
+  1.27631383577518E-25  1.44191151699917E-25 -3.67241024039306E-09
+  1.49239861493332E-25  9.49157441161921E-26 -3.67241024039312E-09
+  1.98717217468794E-25 -1.08042389579070E-25 -3.67241024039306E-09
+ -3.69565556877940E-26 -6.05845175209737E-28 -3.67241024039362E-09
+  6.05845175209737E-28  7.13887564788807E-26 -3.67241024039357E-09
+  7.66394146640317E-26 -2.39005921620241E-25 -3.67241024039337E-09
+ -2.04169824045681E-25 -5.55358077275592E-26 -3.67241024039350E-09
+  6.42195885722321E-26 -1.29448919103147E-25 -3.67241024039350E-09
+ -1.14100841331167E-26 -2.04573720829155E-25 -3.67241024039329E-09
+ -8.89582665599631E-26  8.18900728491828E-26 -3.67241024039368E-09
+  1.93668507675379E-25 -2.31533831125988E-25 -3.67241024039328E-09
+  7.42160339631928E-26  8.02744857152902E-26 -3.67241024039314E-09
+ -8.03754599111585E-26 -1.97404552922506E-25 -3.67241024039338E-09
+  6.41186143763638E-26  9.01699569103825E-26 -3.67241024039314E-09
+  2.03463004674603E-25 -5.28095044391154E-26 -3.67241024039349E-09
+  1.92860714108433E-26 -1.71656132976092E-25 -3.67241024039282E-09
+ -2.50416005753358E-25  2.00635727190291E-25 -3.67241024039375E-09
+  9.29972343946946E-26 -1.65698655419863E-25 -3.67241024039359E-09
+  1.65698655419863E-25  4.03896783473158E-28 -3.67241024039306E-09
+  1.69232752275253E-25 -3.78653234506086E-26 -3.67241024039347E-09
+ -1.33386912742010E-25  3.24127168737209E-26 -3.67241024039315E-09
+  2.61119270515397E-25 -1.61962610172736E-25 -3.67241024039340E-09
+ -1.31670351412250E-25  9.13816472608020E-26 -3.67241024039297E-09
+  2.62532909257553E-26  1.43383358132971E-25 -3.67241024039338E-09
+  1.50249603452015E-25  9.52186667037970E-26 -3.67241024039340E-09
+  1.98212346489452E-25 -3.83701944299500E-26 -3.67241024039306E-09
+  3.83701944299500E-26 -1.10263821888172E-25 -3.67241024039353E-09
+ -3.65425614847340E-25  2.42338070083895E-27 -3.67241024039304E-09
+  1.07537518599728E-25 -3.19886252510741E-25 -3.67241024039429E-09
+ -2.67278696463362E-25  2.20527643776344E-25 -3.67241024038884E-09
+  1.79734068645555E-25 -3.35840175457931E-25 -3.67241024041174E-09
+ -1.46109661421415E-25 -1.29953790082489E-25 -3.67241024031090E-09
+ -1.63578197306629E-25 -1.61558713389263E-27 -3.67241024075875E-09
+  2.37693257073954E-25  2.76669296679113E-26 -3.67241023877604E-09
+  1.19048576928713E-25  4.87705366043838E-26 -3.67241024754211E-09
+  1.45806738833810E-25 -5.65455496862421E-26 -3.67241020882449E-09
+  5.65455496862421E-26 -1.30357686865962E-25 -3.67241037967676E-09
+ -1.63174300523156E-25  4.24091622646816E-26 -3.67240962639660E-09
+  1.04306344331943E-25 -2.61523167298870E-26 -3.67241294477900E-09
+ -2.13459450065564E-25  1.11071615455118E-27 -3.67239833845266E-09
+ -9.22904150236166E-26  1.78017507315794E-25 -3.67246257907850E-09
+  2.05280540200233E-25 -1.58731435904951E-25 -3.67218025945504E-09
+ -1.75695100810824E-26 -1.02286860414577E-25 -3.67342003876045E-09
+ -1.80743810604238E-26 -1.71454184584356E-25 -3.66797977224315E-09
+ -4.15609790193880E-25  2.62532909257553E-26 -3.69183616047043E-09
+  5.72523690573202E-26  4.17023428936036E-26 -3.58732216990726E-09
+  2.30221166579700E-26 -4.74578720580961E-26 -4.04535763371071E-09
+ -1.79936017037292E-25 -1.14403763918772E-25 -2.04852851387296E-09
+ -9.84498409715823E-26  1.54086622895010E-25 -1.09323357115035E-08
+  1.48230119534649E-25  4.18538041874060E-26  2.57837085903936E-08
+ -4.18033170894719E-26  1.14908634898113E-25 -4.43951352575007E-07
+ -1.14706686506377E-25  8.17487089749672E-25 -5.91945509373330E-06
+ -2.45569244351680E-24  1.59749255799303E-23 -1.31189825371438E-05
+  1.15417544845290E-23 -3.12260681238768E-23 -1.74735215059716E-05
+ -7.66434536318665E-23 -6.20902447297616E-23 -1.94715448771937E-05
+ -3.20790981305721E-23 -3.85155972720004E-24 -2.02675213940120E-05
+ -4.02992054678178E-23 -7.52217369540410E-23 -2.05577972547547E-05
+ -9.07572228335525E-23  3.49742302745077E-23 -2.06512160672106E-05
+  3.94720248552648E-23  1.16322273640270E-23 -2.06736310268074E-05
+  2.45052256468834E-23 -5.79349546213898E-24 -2.06754791963205E-05
+ -9.63213049226787E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  323275       1       0       0       0     -72       0       2
+                  
\0\0\0\0\0\0\0\0
+  323245
+ FLUX    
+ REAL*8           
+       1     301       0       0
+ -3.91174034793754E-25 -3.67241024039265E-09 -5.69292516305416E-25
+  3.67241024039396E-09  1.72060029759565E-25 -3.67241024039275E-09
+ -1.71858081367829E-25  3.91375983185490E-25 -3.67241024039320E-09
+  4.06320164173997E-25 -2.21941282518500E-25 -3.67241024039277E-09
+ -1.55904158420639E-25 -6.34117950052858E-26 -3.67241024039276E-09
+ -5.23854128164686E-25  1.12485254197275E-25 -3.67241024039311E-09
+  2.68591361009650E-25 -8.68378084467290E-26 -3.67241024039398E-09
+ -9.12806730649337E-26  1.06224854053441E-25 -3.67241024039364E-09
+  2.77880987029533E-25 -1.40959977432132E-25 -3.67241024039319E-09
+ -1.52471035761117E-25 -7.91637695607390E-26 -3.67241024039331E-09
+ -3.08173245790020E-25  2.64552393174919E-25 -3.67241024039265E-09
+  2.19921798601135E-25 -2.48800418619465E-25 -3.67241024039398E-09
+ -3.38263556158770E-25 -1.41969719390815E-25 -3.67241024039318E-09
+ -3.46745388611706E-25  4.11368873967411E-25 -3.67241024039322E-09
+  3.75624008630037E-25 -1.99525011035740E-25 -3.67241024039312E-09
+ -9.41079505492458E-26  6.01806207375005E-26 -3.67241024039288E-09
+ -2.50416005753358E-25  3.22915478386790E-25 -3.67241024039350E-09
+  5.49299625523495E-26 -1.20361241475001E-25 -3.67241024039268E-09
+ -6.70468660565442E-26  5.29104786349837E-26 -3.67241024039306E-09
+  5.33951547751515E-25  1.41161925823869E-25 -3.67241024039322E-09
+ -4.34592939017118E-25 -1.26621641618835E-25 -3.67241024039384E-09
+ -1.66809371574414E-25 -2.83737490389894E-25 -3.67241024039308E-09
+ -1.91043178582804E-25  2.46377037918626E-25 -3.67241024039302E-09
+  1.60952868214053E-25 -7.71442856433732E-26 -3.67241024039304E-09
+ -3.83701944299500E-26 -2.58897838206294E-25 -3.67241024039316E-09
+ -3.49370717704282E-26 -1.90033436624121E-25 -3.67241024039352E-09
+ -2.78486832204742E-25 -1.96899681943165E-25 -3.67241024039283E-09
+ -9.65313312500848E-26 -2.74447864370011E-25 -3.67241024039320E-09
+ -1.40758029040396E-25  1.47220377575966E-25 -3.67241024039302E-09
+ -1.47220377575966E-25 -3.38869401333980E-25 -3.67241024039347E-09
+ -5.42231431812715E-25 -8.90592407558313E-26 -3.67241024039303E-09
+ -4.57211158891615E-25 -1.81551604171185E-25 -3.67241024039341E-09
+ -1.49845706668542E-25 -1.49441809885068E-25 -3.67241024039335E-09
+ -1.44191151699917E-25 -3.34426536715775E-25 -3.67241024039367E-09
+ -3.83499995907764E-25  6.78546596234906E-26 -3.67241024039366E-09
+ -6.78546596234906E-26  9.14826214566703E-26 -3.67241024039365E-09
+  3.61487621208476E-25  1.99928907819213E-26 -3.67241024039339E-09
+ -1.55702210028902E-25  3.02720639213132E-25 -3.67241024039358E-09
+  2.65158238350128E-25 -1.16322273640270E-25 -3.67241024039306E-09
+ -2.24162714827603E-26  8.94631375393045E-26 -3.67241024039328E-09
+  5.08909947176179E-26 -5.14968398928277E-26 -3.67241024039328E-09
+ -1.00368350693080E-25 -4.84676140167790E-26 -3.67241024039367E-09
+ -9.47137957244556E-26 -1.35103474071771E-25 -3.67241024039365E-09
+ -1.58327539121478E-25  9.73391248170311E-26 -3.67241024039351E-09
+ -9.71371764252945E-26 -1.49441809885068E-25 -3.67241024039361E-09
+  3.83701944299500E-27 -8.94631375393045E-26 -3.67241024039331E-09
+ -5.45260657688763E-26  1.13595970351826E-25 -3.67241024039354E-09
+  3.75624008630037E-26 -1.29448919103147E-25 -3.67241024039329E-09
+ -1.14100841331167E-26  1.92860714108433E-26 -3.67241024039319E-09
+ -1.93870456067116E-26  8.18900728491828E-26 -3.67241024039346E-09
+  1.93668507675379E-25 -6.05845175209737E-27 -3.67241024039326E-09
+  1.42373616174288E-25 -1.15413505877455E-25 -3.67241024039339E-09
+  1.15312531681587E-25 -1.97404552922506E-25 -3.67241024039338E-09
+  6.41186143763638E-26  9.01699569103825E-26 -3.67241024039336E-09
+  2.03463004674603E-25  2.08612688663886E-25 -3.67241024039361E-09
+ -4.51354655531254E-26  1.21875854413025E-25 -3.67241024039308E-09
+  1.71656132976092E-25  1.37223932185005E-25 -3.67241024039319E-09
+  1.56510003595849E-25 -4.02887041514475E-26 -3.67241024039329E-09
+  4.03896783473158E-26  3.65526589043208E-26 -3.67241024039351E-09
+  4.26817925935260E-25  1.21068060846079E-25 -3.67241024039338E-09
+  1.21169035041947E-27  1.53076880936327E-25 -3.67241024039297E-09
+ -3.23117426778526E-26 -4.44286461820474E-27 -3.67241024039295E-09
+  1.23592415742786E-25  1.30458661061830E-25 -3.67241024039308E-09
+ -1.29246970711411E-26 -2.48093599248387E-25 -3.67241024039327E-09
+ -1.61558713389263E-25 -2.96258290677561E-25 -3.67241024039328E-09
+ -2.90906658296542E-25 -2.19013030838320E-25 -3.67241024039339E-09
+ -7.45189565507977E-26 -1.94476301242326E-25 -3.67241024039308E-09
+  1.24198260917996E-26  4.53374139448620E-26 -3.67241024039425E-09
+ -1.19048576928713E-25  3.82692202340817E-26 -3.67241024038944E-09
+  2.55262767155036E-25  1.75998023398429E-25 -3.67241024041189E-09
+  3.74614266671354E-26 -4.95783301713301E-26 -3.67241024031195E-09
+ -1.38738545123030E-25  1.38334648339557E-26 -3.67241024074168E-09
+ -2.21234463147422E-25 -4.62461817076766E-26 -3.67241023889439E-09
+  1.93163636696038E-25  2.36279618331797E-26 -3.67241024683450E-09
+ -7.79520792103195E-26  1.00974195868290E-27 -3.67241021273262E-09
+  1.93567533479511E-25 -5.67474980779787E-26 -3.67241035907448E-09
+ -2.36885463507007E-25  6.44215369639687E-26 -3.67240973161857E-09
+ -1.66203526399205E-25 -4.74578720580961E-27 -3.67241241950008E-09
+ -4.76598204498326E-26  7.16916790664856E-26 -3.67240091578795E-09
+  2.21840308322632E-25  1.00974195868290E-27 -3.67245010328658E-09
+ -1.47725248555308E-25 -7.06819371078027E-26 -3.67223999333808E-09
+ -4.35198784192328E-26 -5.60406787069007E-26 -3.67313659561857E-09
+ -1.20260267279133E-25  9.89547119509237E-26 -3.66931456831474E-09
+  1.34396654700693E-25 -3.74614266671354E-26 -3.68558986379105E-09
+ -1.71050287800882E-25 -6.50273821391784E-26 -3.61637372058597E-09
+ -1.44897971070995E-25 -5.83630852118713E-26 -3.91057079186339E-09
+ -2.36279618331797E-26 -2.19114005034188E-26 -2.66488595451117E-09
+ -1.24299235113864E-25 -5.60406787069007E-26 -7.99200438801827E-09
+ -1.71858081367829E-25  3.58458395332428E-26  1.38268645244581E-08
+ -3.59468137291111E-26  1.70242494233936E-25 -1.06378433713651E-07
+ -1.70242494233936E-25 -5.39444544006750E-24 -3.63018616491496E-06
+ -3.69646336234634E-24  8.59492355230880E-25 -1.08242147884387E-05
+ -4.32977351883225E-25 -1.61946454301397E-23 -1.57718481429636E-05
+ -1.15934532728135E-23  1.90510034828619E-23 -1.82435119926449E-05
+  1.82496722644512E-23  6.69757802226530E-23 -1.93068569886447E-05
+  8.88702170611659E-23 -4.99927282711736E-23 -1.97331217072005E-05
+ -5.78767934845697E-23 -3.71714287766017E-23 -1.98931622614145E-05
+ -2.24889729037854E-23  5.14402943431414E-23 -1.99462274840331E-05
+  6.21419435180462E-23 -2.74003577908190E-24 -1.99592671714597E-05
+  2.75296047615305E-24 -5.96151652406381E-24 -1.99603584460771E-05
+  1.13091099372484E-25
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  323095       1       0       0       0     -73       0       2
+                  
\0\0\0\0\0\0\0\0
+  323083
+ FLUX    
+ REAL*8           
+       1     301       0       0
+ -1.06628750836914E-25 -3.67241024039288E-09  1.06628750836914E-25
+  3.67241024039353E-09  2.78082935421269E-25 -3.67241024039296E-09
+  5.21228799072110E-25 -1.05417060486494E-25 -3.67241024039340E-09
+  1.05215112094758E-25  7.83559759937927E-26 -3.67241024039277E-09
+ -7.81540276020561E-26 -6.34117950052858E-26 -3.67241024039298E-09
+ -1.44191151699917E-25 -7.79520792103195E-26 -3.67241024039311E-09
+ -2.15276985591193E-25  1.09052131537753E-26 -3.67241024039298E-09
+ -1.89023694665438E-25 -1.73069771718248E-25 -3.67241024039352E-09
+ -2.11238017756462E-25  2.53041334845934E-25 -3.67241024039297E-09
+ -5.16987882845642E-26 -2.72630328844382E-25 -3.67241024039340E-09
+ -1.14504738114640E-25 -2.19517901817661E-25 -3.67241024039405E-09
+ -7.37111629838513E-26 -1.49845706668542E-25 -3.67241024039342E-09
+ -3.37253814200087E-25  2.48396521835992E-25 -3.67241024039254E-09
+  4.50344913572571E-26  2.15680882374666E-25 -3.67241024039333E-09
+ -1.57519745554532E-26 -7.27014210251684E-27 -3.67241024039288E-09
+ -9.41079505492458E-26 -3.77643492547403E-26 -3.67241024039332E-09
+  3.77643492547403E-26  3.93799363886329E-26 -3.67241024039320E-09
+ -3.93799363886329E-26 -1.28439177144464E-25 -3.67241024039330E-09
+ -6.45830956773580E-25  1.55702210028902E-25 -3.67241024039314E-09
+ -1.55904158420639E-25  1.58327539121478E-25 -3.67241024039337E-09
+  6.03825691292371E-26  2.77679038637796E-25 -3.67241024039315E-09
+ -1.66809371574414E-25 -1.92860714108433E-25 -3.67241024039309E-09
+  8.05774083028950E-26 -3.91779879968963E-26 -3.67241024039317E-09
+  3.32810949581882E-25  2.08006843488676E-26 -3.67241024039348E-09
+  2.72630328844382E-25 -5.52328851399544E-25 -3.67241024039310E-09
+ -7.98503940926433E-25 -1.02589783002182E-25 -3.67241024039362E-09
+ -1.59539229471897E-26 -1.96899681943165E-25 -3.67241024039359E-09
+ -9.65313312500848E-26  1.89831488232384E-26 -3.67241024039326E-09
+ -3.12616110408224E-25 -3.03730381171815E-25 -3.67241024039258E-09
+ -2.83333593606420E-25 -1.42979461349498E-25 -3.67241024039301E-09
+ -5.69090567913680E-25  1.06628750836914E-25 -3.67241024039308E-09
+ -6.58351757061248E-26 -3.11000523274332E-26 -3.67241024039330E-09
+ -9.69352280335579E-26 -1.49441809885068E-25 -3.67241024039309E-09
+ -1.44191151699917E-25 -2.20123746992871E-25 -3.67241024039329E-09
+ -2.04371772437418E-25 -1.45200893658600E-25 -3.67241024039355E-09
+ -2.80910212905581E-25 -1.66001578007468E-25 -3.67241024039295E-09
+  3.17058975026429E-26  2.34664031197905E-25 -3.67241024039333E-09
+  5.10323585918335E-25 -1.08446286362543E-25 -3.67241024039309E-09
+ -4.82656656250424E-26 -1.36921009597401E-25 -3.67241024039321E-09
+  1.36719061205664E-25 -5.08909947176179E-26 -3.67241024039339E-09
+ -2.29817269796227E-25  6.84605047987003E-26 -3.67241024039329E-09
+  2.25172456786286E-25 -1.23592415742786E-25 -3.67241024039367E-09
+ -1.95889939984482E-26 -8.44144277458900E-26 -3.67241024039350E-09
+  2.33048444064012E-25  9.73391248170311E-26 -3.67241024039335E-09
+ -9.71371764252945E-26  2.42035147496290E-25 -3.67241024039338E-09
+  3.45129801477814E-25 -8.94631375393045E-26 -3.67241024039342E-09
+ -5.45260657688763E-26  1.13595970351826E-25 -3.67241024039307E-09
+  3.75624008630037E-26  3.88750654092915E-26 -3.67241024039328E-09
+  2.54858870371563E-25 -1.34901525680035E-25 -3.67241024039315E-09
+ -3.12818058799961E-25  8.18900728491828E-26 -3.67241024039368E-09
+  1.93668507675379E-25 -6.05845175209737E-27 -3.67241024039342E-09
+  6.05845175209737E-27 -1.15413505877455E-25 -3.67241024039328E-09
+  1.15312531681587E-25 -1.97404552922506E-25 -3.67241024039338E-09
+ -9.62284086624799E-26 -1.05518034682363E-25 -3.67241024039358E-09
+  1.05619008878231E-25  2.08612688663886E-25 -3.67241024039372E-09
+ -4.51354655531254E-26 -1.71656132976092E-25 -3.67241024039303E-09
+ -2.50416005753358E-25  1.66203526399205E-25 -3.67241024039351E-09
+ -3.92789621927646E-26 -5.14968398928277E-27 -3.67241024039352E-09
+  1.30458661061830E-25  2.64552393174919E-26 -3.67241024039318E-09
+ -1.96192862572087E-25 -9.89547119509237E-26 -3.67241024039371E-09
+ -2.32240650497066E-26  6.98741435408563E-26 -3.67241024039294E-09
+  1.02993679785655E-25  1.25208002876679E-26 -3.67241024039304E-09
+ -1.31771325608118E-25  1.30458661061830E-25 -3.67241024039309E-09
+ -1.28237228752728E-26 -9.22904150236166E-26 -3.67241024039328E-09
+  3.85721428216866E-25  5.46270399647446E-26 -3.67241024039307E-09
+  5.98776981498957E-26 -2.33250392455749E-26 -3.67241024039285E-09
+  2.33250392455749E-26  1.41161925823869E-25 -3.67241024039374E-09
+  1.52471035761117E-25  1.86095442985258E-25 -3.67241024039183E-09
+  2.17397443704427E-25 -5.35163238101934E-27 -3.67241024040013E-09
+  1.13696944547694E-25  2.20527643776344E-25 -3.67241024036626E-09
+  7.30043436127733E-26  2.43953657217787E-25 -3.67241024050241E-09
+  4.96793043671984E-26  1.11677460630328E-25 -3.67241023994561E-09
+  2.67985515834440E-25  1.47321351771834E-25 -3.67241024221549E-09
+ -5.04870979341448E-28 -2.18205237271374E-25 -3.67241023300174E-09
+ -2.22042256714369E-25  1.61558713389263E-27 -3.67241027023173E-09
+ -1.48331093730517E-25  1.39647312885844E-25 -3.67241012057040E-09
+  2.03261056282867E-25 -1.29347944907279E-25 -3.67241071869039E-09
+ -1.64285016677707E-25  1.89124668861306E-25 -3.67240834440596E-09
+  2.51122825124436E-25 -2.19315953425925E-25 -3.67241769497842E-09
+ -1.67011319966151E-25  5.33143754184569E-26 -3.67238121542601E-09
+  2.32240650497066E-27 -6.66429692730711E-26 -3.67252191540260E-09
+ -1.37324906380874E-25 -2.07703920901072E-25 -3.67198689341817E-09
+ -1.15413505877455E-25 -1.34396654700693E-25 -3.67398479101008E-09
+ -9.89547119509237E-26 -6.09884143044469E-26 -3.66670167477851E-09
+  3.76633750588720E-26  1.38637570927161E-25 -3.69237884649335E-09
+  9.17855440442752E-26  7.06819371078027E-28 -3.60631339505549E-09
+ -7.06819371078027E-28  4.12984461101304E-26 -3.87214898067419E-09
+  2.54454973588090E-26 -1.81551604171185E-25 -3.18477037194116E-09
+ -1.89528565644779E-25  3.16049233067746E-26 -4.27913275502452E-09
+  4.00867557597109E-26 -1.09698366391310E-24 -1.70308071332777E-06
+  1.09617587034615E-24  2.71418638493962E-24 -8.28937201405086E-06
+ -2.71418638493962E-24  1.16968508493827E-23 -1.38170743729813E-05
+  2.93390623514902E-24  6.85008944770476E-24 -1.68883280063703E-05
+ -4.89070537171978E-23 -1.25886549472914E-23 -1.83295843332460E-05
+  4.59343733908353E-23 -1.73449434694713E-23 -1.89586559438941E-05
+ -2.79690444619492E-23 -2.66507253606929E-23 -1.92212312789595E-05
+  1.13220346343196E-23 -5.70754622661589E-23 -1.93239142361664E-05
+ -6.95607196368812E-23  5.96604016803871E-23 -1.93590336395289E-05
+  6.54765153624006E-23 -1.88829824209371E-23 -1.93678253171371E-05
+ -2.56296742920727E-23 -3.78047389330876E-24 -1.93685666714608E-05
+  1.61235595962485E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  322915       1       0       0       0     -74       0       2
+                  
\0\0\0\0\0\0\0\0
+  322897
+ FLUX    
+ REAL*8           
+       1     301       0       0
+ -2.13459450065564E-25 -3.67241024039266E-09 -2.01948391736579E-28
+  3.67241024039396E-09 -2.92421271234566E-25 -3.67241024039341E-09
+ -2.94642703543669E-25  1.95687991592745E-25 -3.67241024039320E-09
+  1.47422325967703E-26  7.81540276020561E-26 -3.67241024039341E-09
+  1.30862557845303E-25 -1.55298313245429E-25 -3.67241024039320E-09
+ -2.24364663219339E-25  2.20527643776344E-25 -3.67241024039397E-09
+ -2.20527643776344E-25 -1.80339913820765E-25 -3.67241024039343E-09
+ -2.91411529275884E-25 -1.73069771718248E-25 -3.67241024039330E-09
+ -2.11238017756462E-25  1.52471035761117E-25 -3.67241024039232E-09
+ -1.52269087369381E-25  1.16928118815479E-25 -3.67241024039351E-09
+ -1.16524222032006E-25 -2.19517901817661E-25 -3.67241024039351E-09
+ -7.37111629838513E-26 -1.51057397018961E-25 -3.67241024039356E-09
+ -1.42575564566025E-25  4.44690358603947E-25 -3.67241024039286E-09
+  2.40520534558266E-25  2.12449708106881E-25 -3.67241024039374E-09
+  8.11832534781048E-26  2.86160871090732E-25 -3.67241024039364E-09
+ -1.95687991592745E-25 -1.40959977432132E-25 -3.67241024039321E-09
+  1.40556080648659E-25  1.30660609453567E-25 -3.67241024039320E-09
+  5.83630852118713E-26  6.70468660565442E-26 -3.67241024039298E-09
+ -3.60679827641530E-25 -2.25576353569759E-25 -3.67241024039382E-09
+ -2.53647180021143E-25  1.52471035761117E-25 -3.67241024039326E-09
+  4.34592939017118E-25  1.79734068645555E-25 -3.67241024039310E-09
+ -6.90663499739100E-26  2.12853604890354E-25 -3.67241024039297E-09
+  8.05774083028950E-26 -3.91779879968963E-26 -3.67241024039299E-09
+  1.52874932544590E-25  3.14231697542117E-25 -3.67241024039315E-09
+  4.50950758747781E-25 -2.68389412617914E-25 -3.67241024039329E-09
+ -3.18876510552058E-25 -1.49441809885068E-26 -3.67241024039308E-09
+  1.49441809885068E-26 -2.08006843488676E-25 -3.67241024039346E-09
+ -8.56261180963095E-26  9.30982085905629E-26 -3.67241024039298E-09
+ -2.14873088807720E-25  1.34497628896562E-25 -3.67241024039285E-09
+  1.58933384296688E-25 -1.15918376856796E-25 -3.67241024039356E-09
+  1.16120325248533E-25  1.06628750836914E-25 -3.67241024039304E-09
+ -6.58351757061248E-26 -1.00974195868290E-27 -3.67241024039287E-09
+ -2.92623219626303E-25  4.64481300994132E-26 -3.67241024039345E-09
+ -4.62461817076766E-26 -2.20325695384608E-25 -3.67241024039339E-09
+ -2.04371772437418E-25  2.46175089526890E-25 -3.67241024039312E-09
+ -1.13696944547694E-25 -6.80566080152271E-26 -3.67241024039284E-09
+  2.02150340128316E-25  3.89760396051598E-26 -3.67241024039350E-09
+  3.90164292835071E-25 -1.08244337970806E-25 -3.67241024039341E-09
+  2.45367295959944E-25 -3.89760396051598E-26 -3.67241024039301E-09
+  5.51319109440861E-26  6.82585564069637E-26 -3.67241024039312E-09
+  3.78653234506086E-25 -5.14968398928277E-26 -3.67241024039313E-09
+ -9.04728794979874E-26  1.17533963990689E-25 -3.67241024039322E-09
+  1.76098997594297E-25  1.11273563846855E-25 -3.67241024039307E-09
+  3.30993414056253E-25 -4.92754075837253E-26 -3.67241024039314E-09
+ -3.90770138010280E-25  9.39060021575092E-26 -3.67241024039300E-09
+  1.99625985231608E-25  6.00796465416323E-26 -3.67241024039357E-09
+  2.38904947424373E-25  4.23081880688133E-26 -3.67241024039340E-09
+  1.08749208950148E-25 -1.56913900379322E-25 -3.67241024039339E-09
+ -1.36618087009796E-25  4.74578720580961E-26 -3.67241024039271E-09
+  1.06729725032782E-25  3.35537252870326E-25 -3.67241024039339E-09
+  2.51728670299646E-25  3.23117426778526E-26 -3.67241024039325E-09
+ -5.34153496143252E-26  1.04609266919548E-25 -3.67241024039339E-09
+ -8.03754599111585E-26 -1.62164558564473E-25 -3.67241024039316E-09
+ -1.31468403020513E-25 -7.57306469012171E-27 -3.67241024039347E-09
+  7.77501308185829E-27 -8.56261180963095E-26 -3.67241024039329E-09
+ -7.78511050144512E-26 -1.04407318527811E-25 -3.67241024039331E-09
+ -2.40318586166529E-26 -9.28962601988264E-26 -3.67241024039328E-09
+  9.28962601988264E-26 -1.73271720109985E-25 -3.67241024039328E-09
+  5.14968398928277E-27 -9.73391248170311E-26 -3.67241024039317E-09
+  2.67177722267494E-25  1.82157449346394E-25 -3.67241024039337E-09
+  2.33654289239222E-25  6.98741435408563E-26 -3.67241024039283E-09
+  1.02993679785655E-25  1.31569377216381E-25 -3.67241024039304E-09
+ -1.31771325608118E-25  9.13816472608020E-26 -3.67241024039318E-09
+  2.62532909257553E-26 -2.66268954504679E-25 -3.67241024039327E-09
+ -1.43383358132971E-25 -3.12818058799961E-25 -3.67241024039350E-09
+ -2.74245915978274E-25 -1.62568455347946E-25 -3.67241024039328E-09
+ -1.80743810604238E-26 -1.23794364134523E-25 -3.67241024039418E-09
+ -2.81112161297318E-25 -9.59254860748750E-27 -3.67241024039017E-09
+ -1.74079513676931E-25 -6.99751177367246E-26 -3.67241024040621E-09
+ -2.23556869652393E-25  1.58529487513215E-26 -3.67241024033184E-09
+ -1.59539229471897E-26 -2.00029882015082E-25 -3.67241024067214E-09
+ -3.87034092763154E-25  6.56332273143882E-26 -3.67241023912656E-09
+  1.24198260917996E-25  4.72559236663595E-26 -3.67241024612763E-09
+ -2.52435489670724E-27  2.49406263794675E-26 -3.67241021451625E-09
+ -1.25510925464284E-25  1.48936938905727E-25 -3.67241035681707E-09
+ -9.76420474046360E-26 -5.64445754903738E-26 -3.67240971801033E-09
+ -1.39647312885844E-25 -8.13852018698413E-26 -3.67241257848886E-09
+ -6.54312789226516E-26 -1.02690757198050E-25 -3.67239979925296E-09
+ -1.90942204386935E-25  2.28201682662334E-26 -3.67245676938233E-09
+ -2.28201682662334E-26  4.33179300274962E-26 -3.67220329428053E-09
+ -4.33179300274962E-26  7.61345436846903E-26 -3.67332902047773E-09
+ -1.33184964350274E-25 -7.16916790664856E-27 -3.66833799748444E-09
+ -1.69131778079385E-25  9.34011311781678E-26 -3.69043237250492E-09
+  2.86665742070074E-25 -1.89831488232384E-26 -3.59278934569882E-09
+  8.05774083028950E-26 -1.24198260917996E-26 -4.02411894015268E-09
+  9.58245118790067E-26 -9.48147699203239E-26 -2.12899188816421E-09
+ -1.15110583289850E-25  1.64587939265312E-26 -1.06040100329596E-08
+  6.39166659846273E-26  1.15766915562994E-25  2.47757728731998E-08
+  7.42160339631928E-26 -8.27988406119974E-25 -5.29319265901949E-07
+ -4.76598204498326E-25  4.68520268828863E-24 -5.80759890841139E-06
+ -4.68520268828863E-24 -1.77973078669612E-23 -1.19788991956802E-05
+ -1.82625969615223E-23  4.03250548619601E-24 -1.56808927965243E-05
+ -1.27179019180028E-23 -5.97121004686717E-24 -1.74868094137154E-05
+  5.99705944100945E-24 -7.33605805757966E-23 -1.83017529352925E-05
+ -8.90253134260196E-23  3.48449833037963E-23 -1.86586083909881E-05
+  6.66138887046610E-23 -6.71825753757912E-23 -1.88103486194971E-05
+ -1.33382873774176E-23  2.85635805272217E-23 -1.88711001879951E-05
+  3.75333202945936E-23 -5.87556728854072E-23 -1.88924337160956E-05
+ -8.45275188452625E-23 -1.04431552334820E-23 -1.88979314879779E-05
+  1.04560799305531E-23  3.48320586067251E-24 -1.88984064073565E-05
+  6.46557970983831E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  322171       1       0       0       0     -75       0       2
+                  
\0\0\0\0\0\0\0\0
+  322123
+ FLUX    
+ REAL*8           
+       1     301       0       0
+ -3.91174034793754E-25 -3.67241024039309E-09 -5.69292516305416E-25
+  3.67241024039374E-09 -9.69352280335579E-26 -3.67241024039296E-09
+ -6.58351757061248E-26  9.04728794979874E-26 -3.67241024039342E-09
+ -9.02709311062508E-26 -3.95818847803695E-26 -3.67241024039407E-09
+  3.97838331721061E-26 -6.34117950052858E-26 -3.67241024039342E-09
+ -1.44191151699917E-25 -2.68591361009650E-25 -3.67241024039384E-09
+ -1.12283305805538E-25  2.22143230910237E-27 -3.67241024039376E-09
+ -2.22143230910237E-27 -1.76704842769507E-25 -3.67241024039343E-09
+ -1.16928118815479E-25 -1.38334648339557E-25 -3.67241024039297E-09
+ -4.60442333159400E-26  1.16928118815479E-25 -3.67241024039351E-09
+ -1.16524222032006E-25 -2.19517901817661E-25 -3.67241024039383E-09
+ -7.37111629838513E-26  4.35602680975801E-25 -3.67241024039353E-09
+  4.45094255387420E-25  1.51057397018961E-25 -3.67241024039329E-09
+  1.42373616174288E-25  1.16726170423743E-25 -3.67241024039375E-09
+ -1.16726170423743E-25 -1.11071615455118E-26 -3.67241024039353E-09
+ -1.91850972149750E-25  5.73533432531884E-26 -3.67241024039344E-09
+ -3.50986304838174E-25  3.93799363886329E-26 -3.67241024039277E-09
+ -3.93799363886329E-26 -3.06961555439600E-26 -3.67241024039289E-09
+ -1.56711951987585E-25 -2.30625063363173E-25 -3.67241024039355E-09
+  1.52672984152854E-25 -3.75624008630037E-26 -3.67241024039332E-09
+ -3.75624008630037E-26  1.66809371574414E-25 -3.67241024039348E-09
+  1.57519745554532E-26 -2.61927064082343E-25 -3.67241024039316E-09
+ -2.12853604890354E-25  3.47351233786916E-26 -3.67241024039331E-09
+ -3.28166136571941E-25  2.08006843488676E-26 -3.67241024039304E-09
+  2.72630328844382E-25  2.40116637774792E-25 -3.67241024039292E-09
+  1.70444442625673E-25 -2.87776458224625E-25 -3.67241024039308E-09
+ -2.99287516553610E-25 -2.34260134414432E-26 -3.67241024039363E-09
+ -3.90164292835071E-25  1.89831488232384E-26 -3.67241024039348E-09
+ -3.12616110408224E-25  6.19981562631298E-26 -3.67241024039274E-09
+  1.08244337970806E-25  7.95676663442121E-26 -3.67241024039341E-09
+ -7.97696147359487E-26  2.30221166579700E-26 -3.67241024039320E-09
+ -1.49441809885068E-25 -1.00974195868290E-27 -3.67241024039287E-09
+ -2.92623219626303E-25 -1.49441809885068E-25 -3.67241024039352E-09
+ -1.44191151699917E-25 -8.92611891475679E-26 -3.67241024039339E-09
+ -2.04371772437418E-25  1.65597681223995E-25 -3.67241024039377E-09
+  1.27833331969255E-25 -1.66001578007468E-25 -3.67241024039274E-09
+  3.17058975026429E-26 -1.75695100810824E-26 -3.67241024039323E-09
+  3.33416794757092E-25 -3.00903103687503E-26 -3.67241024039331E-09
+  3.00903103687503E-26 -1.57317797162795E-25 -3.67241024039322E-09
+  3.12010265233015E-25  2.63744599607972E-25 -3.67241024039328E-09
+  2.96864135852771E-26  2.42136121692158E-25 -3.67241024039317E-09
+  5.12948915010911E-26  9.49157441161921E-26 -3.67241024039375E-09
+  1.98919165860530E-25 -1.35103474071771E-25 -3.67241024039332E-09
+  1.35305422463508E-25 -6.05845175209737E-28 -3.67241024039318E-09
+  6.05845175209737E-28  1.69232752275253E-25 -3.67241024039342E-09
+  2.72327406256777E-25 -8.94631375393045E-26 -3.67241024039363E-09
+ -5.45260657688763E-26  4.23081880688133E-26 -3.67241024039361E-09
+  1.08749208950148E-25 -5.90699045829494E-26 -3.67241024039349E-09
+ -8.17890986533145E-26  1.17130067207216E-25 -3.67241024039264E-09
+  3.30690491468648E-25  1.29246970711411E-26 -3.67241024039329E-09
+  1.24804106093206E-25 -1.72060029759565E-25 -3.67241024039317E-09
+ -1.21471957629552E-25 -1.74685358852141E-26 -3.67241024039314E-09
+  1.74685358852141E-26 -1.66102552203336E-25 -3.67241024039339E-09
+  3.29175878530624E-26 -4.12984461101304E-26 -3.67241024039371E-09
+ -9.01699569103825E-26 -8.56261180963095E-26 -3.67241024039329E-09
+ -7.78511050144512E-26  9.12806730649337E-26 -3.67241024039304E-09
+  7.39131113755879E-26 -1.90841230191067E-25 -3.67241024039362E-09
+  1.90841230191067E-25 -1.30357686865962E-25 -3.67241024039326E-09
+ -1.20159293083265E-25  4.03896783473158E-28 -3.67241024039339E-09
+  1.69232752275253E-25  2.80203393534503E-25 -3.67241024039360E-09
+ -3.54419427497696E-26  1.53076880936327E-25 -3.67241024039286E-09
+ -3.23117426778526E-26  9.34011311781678E-26 -3.67241024039306E-09
+  2.57484199464138E-26 -6.52293305309150E-26 -3.67241024039321E-09
+  1.82864268717472E-25  1.43383358132971E-25 -3.67241024039316E-09
+  1.50249603452015E-25 -1.17130067207216E-25 -3.67241024039294E-09
+  2.62532909257553E-27 -2.34058186022695E-25 -3.67241024039429E-09
+ -3.52904814559672E-25  1.41161925823869E-25 -3.67241024038932E-09
+ -2.97873877811454E-26 -9.55215892914019E-26 -3.67241024041112E-09
+ -3.81783434578003E-25  9.24923634153532E-26 -3.67241024031789E-09
+ -9.23913892194849E-26 -2.85756974307259E-26 -3.67241024071875E-09
+ -1.58226564925610E-25 -8.74436536219387E-26 -3.67241023898999E-09
+  4.34189042233645E-27  7.14897306747490E-26 -3.67241024642919E-09
+  1.18341757557635E-25 -4.62461817076766E-26 -3.67241021445703E-09
+  4.62461817076766E-26  2.22042256714369E-25 -3.67241035173176E-09
+  1.17735912382426E-25 -9.57235376831385E-26 -3.67240976292330E-09
+  2.01948391736579E-28  1.88720772077833E-25 -3.67241228591498E-09
+ -9.00689827145142E-26  6.64410208813345E-26 -3.67240148630648E-09
+  8.03754599111585E-26  8.77465762095436E-26 -3.67244766498289E-09
+  6.46234853557053E-27 -2.61523167298870E-26 -3.67225042056438E-09
+ -2.78688780596479E-26 -2.53546205825275E-25 -3.67309198072820E-09
+ -1.51057397018961E-25  9.23913892194849E-26 -3.66950555093795E-09
+  5.44250915730080E-26  1.34901525680035E-25 -3.68477191885339E-09
+  1.58630461709083E-25 -4.25101364605499E-26 -3.61987798016023E-09
+  4.25101364605499E-26 -3.59468137291111E-26 -3.89554067850580E-09
+ -1.10970641259250E-25 -5.98776981498957E-26 -2.72924382412303E-09
+  5.99786723457640E-26 -3.73604524712671E-26 -7.71363301018465E-09
+ -2.60917322123660E-25 -5.96757497581591E-26  1.26613011808582E-08
+ -1.59438255276029E-25  8.60300148797827E-26 -1.00650861534201E-07
+  7.06819371078027E-26 -3.95818847803695E-25 -3.37618415512387E-06
+ -1.56227275847418E-24 -9.94555439624304E-24 -9.87864129022174E-06
+ -6.17154285146985E-24 -3.19498511598607E-23 -1.43085331856003E-05
+ -1.86115637824431E-23  1.69313531631948E-23 -1.66011565380491E-05
+ -1.69055037690525E-23 -9.15844034461055E-23 -1.76706025932332E-05
+ -1.58198292150767E-23 -5.54986492234797E-23 -1.81553546586637E-05
+  5.54727998293374E-23  4.34528315531762E-23 -1.83723150365528E-05
+  1.16580767581692E-23 -6.36153589841563E-23 -1.84669084631304E-05
+ -2.48929665590177E-23  5.24742701088327E-24 -1.85056653765262E-05
+  8.34935430795712E-24 -8.40105309624169E-25 -1.85195466602756E-05
+ -7.23783035983899E-25 -2.10801809230311E-23 -1.85231818848799E-05
+ -1.29246970711411E-24  2.07247517535747E-23 -1.85234992186534E-05
+  1.91964063249123E-23
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  321949       1       0       0       0     -76       0       2
+                  
\0\0\0\0\0\0\0\0
+  321937
+ FLUX    
+ REAL*8           
+       1     301       0       0
+ -8.86553439723582E-26 -3.67241024039332E-09  8.86553439723582E-26
+  3.67241024039397E-09  3.67748021352310E-25 -3.67241024039297E-09
+  2.19517901817661E-25  2.86160871090732E-25 -3.67241024039319E-09
+  3.01105052079239E-25  7.81540276020561E-26 -3.67241024039320E-09
+  1.30862557845303E-25 -1.55298313245429E-25 -3.67241024039320E-09
+ -2.24364663219339E-25  1.45402842050337E-26 -3.67241024039342E-09
+  3.66536331001891E-25  3.04538174738761E-25 -3.67241024039299E-09
+  1.04407318527811E-25 -7.87598727772658E-26 -3.67241024039354E-09
+  1.69232752275253E-25  4.88715108002521E-26 -3.67241024039309E-09
+ -3.42504472385238E-25 -1.74685358852141E-25 -3.67241024039333E-09
+ -2.12449708106881E-25 -1.21774880217157E-25 -3.67241024039329E-09
+ -1.71656132976092E-25  4.56403365324669E-26 -3.67241024039321E-09
+  1.50855448627225E-25  5.35163238101934E-26 -3.67241024039329E-09
+ -3.47149285395179E-25 -2.73640070803065E-25 -3.67241024039372E-09
+ -3.13423903975171E-25 -9.08767762814606E-27 -3.67241024039320E-09
+ -1.93668507675379E-25  3.53611633930750E-25 -3.67241024039329E-09
+  2.33452340847485E-25  3.33012897973619E-25 -3.67241024039254E-09
+  6.50273821391784E-26 -1.28439177144464E-25 -3.67241024039243E-09
+ -6.45830956773580E-25  6.78546596234906E-26 -3.67241024039365E-09
+  3.99857815638426E-26 -2.44761450784734E-25 -3.67241024039266E-09
+ -2.33250392455749E-25 -1.26621641618835E-25 -3.67241024039353E-09
+ -4.60644281551137E-25 -2.61725115690606E-25 -3.67241024039361E-09
+ -1.00368350693080E-25 -3.48764872529072E-25 -3.67241024039364E-09
+ -5.89689303870811E-26 -7.69423372516366E-26 -3.67241024039319E-09
+ -2.16488675941613E-25  4.44286461820474E-26 -3.67241024039303E-09
+ -2.20931540559817E-25  7.24994726334319E-26 -3.67241024039334E-09
+  1.02387834610446E-25  1.07638492795597E-25 -3.67241024039341E-09
+ -1.60952868214053E-25  9.30982085905629E-26 -3.67241024039314E-09
+ -4.28130590481548E-26 -1.33487886937879E-25 -3.67241024039334E-09
+ -1.59943126255371E-25 -1.42979461349498E-25 -3.67241024039352E-09
+  1.81753552562921E-26  3.71585040795305E-26 -3.67241024039290E-09
+  2.56070560721982E-25 -9.89547119509237E-26 -3.67241024039347E-09
+ -2.32644547280539E-25 -8.30007890037340E-26 -3.67241024039304E-09
+ -4.69328062395810E-25 -3.01307000470976E-25 -3.67241024039361E-09
+ -2.85756974307259E-25 -1.45200893658600E-25 -3.67241024039333E-09
+  1.25208002876679E-26  2.96864135852771E-26 -3.67241024039274E-09
+  1.29650867494884E-25  2.15680882374666E-25 -3.67241024039355E-09
+  7.77501308185829E-26 -2.25778301961495E-25 -3.67241024039326E-09
+ -5.17795676412589E-25 -3.89760396051598E-26 -3.67241024039323E-09
+  5.51319109440861E-26  2.21537385735027E-25 -3.67241024039329E-09
+  3.78451286114349E-25  2.46377037918626E-26 -3.67241024039339E-09
+ -1.66203526399205E-25  1.95889939984482E-26 -3.67241024039349E-09
+  2.73842019194801E-25  8.78475504054119E-26 -3.67241024039350E-09
+  6.09884143044469E-26 -9.83488667757140E-26 -3.67241024039333E-09
+ -1.95082146417535E-25  1.69232752275253E-25 -3.67241024039332E-09
+  2.72327406256777E-25 -1.15312531681587E-25 -3.67241024039320E-09
+ -1.78219455707531E-25  2.82525800039474E-25 -3.67241024039280E-09
+  4.46911790913049E-25  6.61380982937296E-26 -3.67241024039350E-09
+  2.27292914899520E-25  1.58630461709083E-25 -3.67241024039290E-09
+ -1.58630461709083E-25 -1.13797918743562E-25 -3.67241024039343E-09
+  9.59254860748750E-26  6.19981562631298E-26 -3.67241024039317E-09
+ -6.19981562631298E-26  1.41262900019737E-25 -3.67241024039292E-09
+  1.52269087369381E-25 -1.93365585087774E-25 -3.67241024039327E-09
+ -1.00065428105475E-25 -7.67403888599000E-27 -3.67241024039350E-09
+  7.67403888599000E-27 -8.56261180963095E-26 -3.67241024039318E-09
+ -7.78511050144512E-26  1.86196417181126E-25 -3.67241024039283E-09
+  2.35976695744193E-25  3.92789621927646E-26 -3.67241024039339E-09
+  8.76456020136753E-26 -5.14968398928277E-27 -3.67241024039330E-09
+  1.30458661061830E-25  9.83488667757140E-26 -3.67241024039306E-09
+  7.14897306747490E-26 -1.35305422463508E-26 -3.67241024039349E-09
+ -1.57822668142137E-25 -5.14968398928277E-27 -3.67241024039329E-09
+  5.25065818515105E-27 -4.71549494704912E-26 -3.67241024039315E-09
+  2.21537385735027E-25 -1.23996312526260E-25 -3.67241024039374E-09
+ -5.19007366763008E-26 -2.08309766076281E-25 -3.67241024039317E-09
+ -2.01241572365501E-25  1.11879409022065E-25 -3.67241024039319E-09
+  2.52435489670724E-27  1.16019351052665E-25 -3.67241024039184E-09
+  3.58357421136559E-25 -1.10263821888172E-25 -3.67241024039837E-09
+ -1.83369139696814E-25  3.33214846365355E-26 -3.67241024037210E-09
+  2.60311476948450E-25  1.25712873856020E-25 -3.67241024047737E-09
+  1.67920087728965E-25 -1.35305422463508E-25 -3.67241024005577E-09
+ -1.58226564925610E-25  4.82656656250424E-26 -3.67241024173497E-09
+  2.45367295959944E-25  6.57342015102565E-26 -3.67241023510011E-09
+  1.24097286722128E-25 -5.04870979341448E-26 -3.67241026106876E-09
+ -2.42943915259105E-25 -2.39308844207846E-26 -3.67241016058758E-09
+  7.01770661284612E-26 -4.79627430374375E-26 -3.67241054388886E-09
+ -5.10929431093545E-26  9.01699569103825E-26 -3.67240910812809E-09
+  1.54086622895010E-25 -8.04764341070267E-26 -3.67241435744275E-09
+ -2.13257501673827E-25 -5.72523690573202E-26 -3.67239580438676E-09
+ -8.95641117351728E-26 -7.76491566227146E-26 -3.67245812754514E-09
+ -1.23188518959313E-25 -1.42474590370157E-25 -3.67226587249112E-09
+ -4.24091622646816E-27 -1.15514480073323E-25 -3.67276429984079E-09
+ -8.83524213847533E-26 -6.08874401085786E-26 -3.67204256148415E-09
+  3.14029749150380E-26 -9.66323054459531E-26 -3.66899800156275E-09
+ -5.00832011506716E-26 -3.73604524712671E-26 -3.70867363661401E-09
+ -2.44357554001261E-26  2.43347812042578E-26 -3.42374203898481E-09
+ -1.50552526039620E-25  8.98670343227777E-26 -5.15252803383940E-09
+  1.23188518959313E-25 -2.26182198744969E-26  4.20938191100619E-09
+  2.26182198744969E-26  5.17795676412589E-25 -1.41564736595331E-06
+  1.46372194330672E-24  5.53823269498394E-24 -7.51502709705433E-06
+  4.07127957740943E-25 -1.34546096510578E-23 -1.27022004617453E-05
+ -3.86448442427118E-23  3.28545799548406E-23 -1.56051647397627E-05
+  6.04875822929401E-24 -7.97195315347980E-23 -1.70132529917506E-05
+ -8.52771512753887E-23  3.98597657673990E-23 -1.76701491577021E-05
+  5.99964438042368E-23 -9.59012522678666E-24 -1.79744532224153E-05
+ -4.53139879314205E-23 -3.83088021188621E-23 -1.81142387519693E-05
+ -3.47157363330849E-23 -1.85857143883008E-23 -1.81766611461520E-05
+  5.37667398159468E-23  1.98523347012727E-23 -1.82027577838721E-05
+  5.02253728184541E-23 -3.19627758569318E-23 -1.82122577438387E-05
+ -6.07073021431495E-23 -2.28637891188485E-23 -1.82147769293799E-05
+ -4.89846018996246E-24  4.95015897824702E-24 -1.82149985647079E-05
+  5.05355655481615E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  321727       1       0       0       0     -77       0       2
+                  
\0\0\0\0\0\0\0\0
+  321715
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  8.90592407558313E-26 -3.67241024039331E-09  1.24602157701469E-25
+  3.67241024039331E-09 -1.13494996155957E-25 -3.67241024039296E-09
+  1.13494996155957E-25 -1.20361241475001E-25 -3.67241024039364E-09
+ -9.02709311062508E-26 -1.30862557845303E-25 -3.67241024039299E-09
+ -7.81540276020561E-26 -5.14968398928277E-26 -3.67241024039276E-09
+ -3.27964188180204E-25 -2.63340702824499E-25 -3.67241024039354E-09
+  8.84533955806216E-26 -2.22143230910237E-27 -3.67241024039331E-09
+  9.12806730649337E-26 -7.87598727772658E-26 -3.67241024039311E-09
+  1.69232752275253E-25 -2.36077669940061E-25 -3.67241024039331E-09
+ -2.41732224908685E-25  2.16488675941613E-25 -3.67241024039355E-09
+  2.76871245070850E-25 -2.76669296679113E-26 -3.67241024039353E-09
+ -3.68555814919257E-25 -1.50855448627225E-25 -3.67241024039366E-09
+  5.41221689854032E-26  5.29104786349837E-26 -3.67241024039285E-09
+  1.42373616174288E-25  1.87812004315018E-26 -3.67241024039318E-09
+ -2.12449708106881E-25  9.04728794979874E-26 -3.67241024039353E-09
+ -1.92052920541487E-25  1.55500261637166E-25 -3.67241024039310E-09
+ -1.55298313245429E-25  2.25172456786286E-25 -3.67241024039307E-09
+  2.57282251072402E-25  6.70468660565442E-26 -3.67241024039266E-09
+ -3.60679827641530E-25 -2.35673773156588E-25 -3.67241024039346E-09
+ -2.43953657217787E-25 -3.75624008630037E-26 -3.67241024039375E-09
+ -3.75624008630037E-26 -1.59539229471897E-26 -3.67241024039342E-09
+ -4.60442333159400E-25  9.89547119509237E-27 -3.67241024039329E-09
+ -1.91245126974540E-25 -1.44998945266864E-25 -3.67241024039316E-09
+ -3.28368084963677E-25 -1.66001578007468E-25 -3.67241024039372E-09
+  1.66203526399205E-25 -3.46947337003443E-25 -3.67241024039328E-09
+ -7.10252493737548E-25 -1.90235385015857E-25 -3.67241024039311E-09
+  4.83464449817370E-25  9.65313312500848E-26 -3.67241024039308E-09
+  2.36279618331797E-26 -2.12247759715145E-25 -3.67241024039320E-09
+ -3.24531065520682E-25 -1.46210635617283E-25 -3.67241024039334E-09
+ -1.47220377575966E-25 -4.50344913572571E-26 -3.67241024039369E-09
+  3.38465504550506E-25 -8.90592407558313E-26 -3.67241024039325E-09
+ -1.63780145698366E-25 -6.86624531904369E-26 -3.67241024039313E-09
+  3.24329117128946E-25 -1.64991836048785E-25 -3.67241024039352E-09
+ -2.57888096247611E-25 -1.05820957269967E-25 -3.67241024039324E-09
+ -1.87812004315018E-25 -2.77679038637796E-25 -3.67241024039289E-09
+ -4.42065029511371E-25 -1.65799629615731E-25 -3.67241024039360E-09
+ -2.87170613049415E-25  1.99928907819213E-26 -3.67241024039339E-09
+ -1.55702210028902E-25  3.02720639213132E-25 -3.67241024039358E-09
+  2.65158238350128E-25  9.99644539096066E-26 -3.67241024039306E-09
+  3.48764872529072E-25  4.70539752746229E-26 -3.67241024039306E-09
+  2.59503683381504E-25 -1.49441809885068E-25 -3.67241024039355E-09
+ -1.44191151699917E-25 -4.84676140167790E-26 -3.67241024039307E-09
+ -9.47137957244556E-26 -2.05785411179574E-25 -3.67241024039322E-09
+ -3.81278563598661E-25  2.36279618331797E-26 -3.67241024039350E-09
+ -1.70646391017409E-25 -1.49441809885068E-25 -3.67241024039332E-09
+  3.93799363886329E-27  8.38085825706803E-27 -3.67241024039331E-09
+  1.41161925823869E-25  4.24091622646816E-26 -3.67241024039343E-09
+  2.51223799320304E-25 -1.29549893299015E-25 -3.67241024039310E-09
+ -1.64184042481839E-25 -6.52293305309150E-26 -3.67241024039323E-09
+  5.03861237382765E-26  4.19042912853401E-26 -3.67241024039340E-09
+ -4.19042912853401E-26 -6.05845175209737E-27 -3.67241024039315E-09
+  1.42373616174288E-25 -1.21673906021289E-25 -3.67241024039347E-09
+ -1.31266454628776E-26  6.49264079433102E-26 -3.67241024039305E-09
+ -6.49264079433102E-26  2.44357554001261E-26 -3.67241024039314E-09
+  1.07133621816255E-25 -5.21026850680374E-26 -3.67241024039352E-09
+ -7.79520792103195E-26  1.21875854413025E-25 -3.67241024039297E-09
+  1.71656132976092E-25 -2.94844651935405E-26 -3.67241024039341E-09
+ -1.00974195868290E-26 -5.14968398928277E-27 -3.67241024039352E-09
+  1.30458661061830E-25 -1.59135332688424E-25 -3.67241024039304E-09
+ -1.34295680504825E-25  3.55429169456379E-26 -3.67241024039306E-09
+ -3.56438911415062E-26  1.07234596012123E-25 -3.67241024039261E-09
+  6.55322531185199E-26  1.31670351412250E-25 -3.67241024039304E-09
+  1.61962610172736E-25  3.26146652654575E-26 -3.67241024039353E-09
+ -2.08612688663886E-25 -1.26217744835362E-26 -3.67241024039306E-09
+ -1.03599524960865E-25 -8.38085825706803E-26 -3.67241024039416E-09
+  1.92860714108433E-26  1.80743810604238E-26 -3.67241024038976E-09
+ -1.79734068645555E-26 -1.52370061565249E-25 -3.67241024040874E-09
+ -3.23319375170263E-25 -6.45225111598370E-26 -3.67241024032398E-09
+ -4.52364397489937E-26 -1.57418771358663E-25 -3.67241024070688E-09
+ -2.78688780596479E-26  1.22683647979972E-25 -3.67241023898450E-09
+  1.70949313605014E-25  1.46210635617283E-25 -3.67241024670657E-09
+  4.22072138729450E-26  1.96899681943165E-26 -3.67241021216743E-09
+ -1.96899681943165E-26  1.00469324888948E-25 -3.67241036629365E-09
+  4.62461817076766E-26  2.50416005753358E-26 -3.67240968006121E-09
+  2.12045811323408E-26 -9.80459441881091E-26 -3.67241272907087E-09
+ -1.47725248555308E-25  1.39344390298240E-25 -3.67239920851796E-09
+  5.66465238821104E-26 -3.24127168737209E-26 -3.67245905333379E-09
+ -1.14302789722904E-25 -5.72523690573202E-26 -3.67219463136957E-09
+  4.74578720580961E-27 -2.11036069364725E-26 -3.67336102911222E-09
+ -7.16916790664856E-26 -5.12948915010911E-26 -3.66822412598072E-09
+ -1.51057397018961E-25  8.83524213847533E-26 -3.69081425509343E-09
+  1.15514480073323E-25 -6.67439434689394E-26 -3.59163832889835E-09
+ -7.99715631276853E-26  6.36137433970224E-26 -4.02685766887640E-09
+ -3.33214846365355E-27  3.70575298836623E-26 -2.12758930404420E-09
+  4.80637172333058E-26 -1.26722615814703E-25 -1.05669531351905E-08
+ -1.07537518599728E-25 -3.72594782753988E-26  2.44028666277002E-08
+ -9.71371764252945E-26  1.43787254916444E-25 -3.55567622349283E-07
+ -1.43787254916444E-25 -1.88054342385102E-24 -4.98138494292076E-06
+ -1.70929118765840E-24 -2.36263462460459E-23 -1.08450822193204E-05
+ -2.18039639590150E-23 -2.34970992753344E-23 -1.44560557198934E-05
+ -7.23783035983899E-24  7.02586532787228E-23 -1.62947009153702E-05
+  1.39379933215185E-22  2.38848401874687E-23 -1.71758381229689E-05
+  5.04063185774501E-23  5.33531495096703E-23 -1.75941360872711E-05
+  1.04069660816828E-22  4.10488378979440E-23 -1.77928203206775E-05
+  7.12926290444141E-23  2.66248759665506E-23 -1.78863175128531E-05
+  3.51293266393614E-23  4.49262470192863E-23 -1.79289491078582E-05
+  2.22821777506472E-23 -4.90363006879092E-23 -1.79470832766236E-05
+ -2.25665210862123E-23 -1.54320883029424E-23 -1.79537785986010E-05
+ -4.03638289531735E-23  1.23301610058686E-23 -1.79555741694020E-05
+  7.14735748034100E-24  5.87750599310140E-24 -1.79557332960063E-05
+  1.12541799746961E-23
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  321541       1       0       0       0     -78       0       2
+                  
\0\0\0\0\0\0\0\0
+  321511
+ FLUX    
+ REAL*8           
+       1     301       0       0
+ -3.91174034793754E-25 -3.67241024039354E-09  1.77714584728190E-26
+  3.67241024039354E-09 -2.38299102249163E-26 -3.67241024039318E-09
+  2.38299102249163E-26  1.95687991592745E-25 -3.67241024039298E-09
+  1.47422325967703E-26 -3.97838331721061E-26 -3.67241024039320E-09
+  3.97838331721061E-26 -5.14968398928277E-26 -3.67241024039343E-09
+  5.14968398928277E-26  1.47422325967703E-26 -3.67241024039354E-09
+ -1.47422325967703E-26  9.99644539096066E-26 -3.67241024039310E-09
+ -1.89225643057175E-25 -1.83974984872023E-25 -3.67241024039300E-09
+ -4.03290938297948E-25 -2.36077669940061E-25 -3.67241024039321E-09
+ -2.41732224908685E-25  2.18710108250715E-25 -3.67241024039332E-09
+  2.74851761153484E-25  7.00760919325929E-26 -3.67241024039319E-09
+ -1.73069771718248E-25  4.48325429655205E-26 -3.67241024039334E-09
+ -3.38263556158770E-25  1.51057397018961E-25 -3.67241024039253E-09
+ -1.51057397018961E-25  1.16726170423743E-25 -3.67241024039351E-09
+ -3.10192729707385E-25  9.04728794979874E-26 -3.67241024039288E-09
+ -1.92052920541487E-25  1.55500261637166E-25 -3.67241024039310E-09
+ -1.55298313245429E-25 -1.59539229471897E-25 -3.67241024039330E-09
+  1.59741177863634E-25 -1.20159293083265E-25 -3.67241024039320E-09
+  3.91779879968963E-26 -2.30625063363173E-25 -3.67241024039344E-09
+  1.52672984152854E-25  1.58125590729741E-25 -3.67241024039330E-09
+ -4.90734591919887E-26 -1.13696944547694E-25 -3.67241024039296E-09
+ -6.90663499739100E-26  4.15811738585616E-25 -3.67241024039306E-09
+ -9.89547119509237E-27 -3.91779879968963E-26 -3.67241024039339E-09
+  3.32810949581882E-25 -7.71442856433732E-26 -3.67241024039337E-09
+ -3.83701944299500E-26 -3.66132434218418E-25 -3.67241024039316E-09
+ -3.97636383329324E-25 -1.51461293802434E-26 -3.67241024039320E-09
+ -1.03801473352602E-25 -1.21169035041947E-27 -3.67241024039318E-09
+ -1.72261978151302E-25  1.16928118815479E-25 -3.67241024039331E-09
+ -2.38702999032636E-25 -4.01675351164056E-25 -3.67241024039334E-09
+ -3.55833066239852E-25 -1.29448919103147E-25 -3.67241024039351E-09
+ -1.64184042481839E-25  2.18710108250715E-25 -3.67241024039286E-09
+ -5.14968398928277E-26 -6.86624531904369E-26 -3.67241024039323E-09
+  3.24329117128946E-25  1.44191151699917E-25 -3.67241024039302E-09
+ -1.44191151699917E-25 -2.44357554001261E-26 -3.67241024039319E-09
+ -2.68995257793123E-25 -2.77679038637796E-25 -3.67241024039332E-09
+ -4.42065029511371E-25  1.63982094090102E-25 -3.67241024039312E-09
+  1.29650867494884E-25 -7.77501308185829E-26 -3.67241024039339E-09
+  7.77501308185829E-26 -8.86553439723582E-26 -3.67241024039341E-09
+ -6.78546596234906E-26 -1.36719061205664E-25 -3.67241024039338E-09
+ -1.40758029040396E-25  1.29246970711411E-26 -3.67241024039329E-09
+ -1.66001578007468E-25  2.46377037918626E-26 -3.67241024039317E-09
+ -1.66203526399205E-25  7.20955758499587E-26 -3.67241024039339E-09
+  3.71786989187042E-25 -1.00974195868290E-26 -3.67241024039350E-09
+ -2.79698522555162E-25 -2.44963399176470E-25 -3.67241024039303E-09
+ -1.95082146417535E-25  7.12877822830124E-26 -3.67241024039339E-09
+  2.24667585806944E-25  8.04764341070267E-26 -3.67241024039359E-09
+  2.13156527477959E-25  1.57519745554532E-26 -3.67241024039332E-09
+ -1.58226564925610E-25 -1.29448919103147E-25 -3.67241024039307E-09
+ -1.14100841331167E-26 -1.34901525680035E-25 -3.67241024039289E-09
+  1.34901525680035E-25  7.08838854995392E-26 -3.67241024039339E-09
+  8.49192987252315E-26 -2.31533831125988E-25 -3.67241024039317E-09
+  7.42160339631928E-26  8.02744857152902E-26 -3.67241024039314E-09
+ -8.03754599111585E-26 -1.71656132976092E-27 -3.67241024039360E-09
+  1.61558713389263E-27  2.44357554001261E-26 -3.67241024039317E-09
+ -2.44357554001261E-26  1.10061873496436E-25 -3.67241024039365E-09
+ -1.43585306524708E-25  1.21875854413025E-25 -3.67241024039352E-09
+  1.71656132976092E-25  4.94773559754619E-27 -3.67241024039340E-09
+  2.88685225987440E-25  1.55500261637166E-25 -3.67241024039341E-09
+  1.38132699947820E-25  5.04870979341448E-28 -3.67241024039317E-09
+  1.69232752275253E-25 -3.78653234506086E-26 -3.67241024039305E-09
+  3.78653234506086E-26  1.07234596012123E-25 -3.67241024039282E-09
+  6.55322531185199E-26  1.25208002876679E-26 -3.67241024039314E-09
+ -1.31771325608118E-25 -6.52293305309150E-26 -3.67241024039299E-09
+ -1.10667718671645E-25 -3.06052787676786E-25 -3.67241024039406E-09
+ -2.19618876013530E-25 -5.98776981498957E-26 -3.67241024038965E-09
+  5.98776981498957E-26 -3.83701944299500E-26 -3.67241024040750E-09
+ -7.45189565507977E-26  1.69838597450463E-25 -3.67241024033254E-09
+  1.24198260917996E-26 -1.50350577647883E-25 -3.67241024065134E-09
+  7.66394146640317E-26  2.78688780596479E-26 -3.67241023929447E-09
+  1.57418771358663E-25  1.13797918743562E-25 -3.67241024506611E-09
+  1.79734068645555E-25  2.06088333767179E-25 -3.67241022055434E-09
+  1.92658765716696E-25  1.39344390298240E-26 -3.67241032447343E-09
+ -2.21234463147422E-25  4.72559236663595E-26 -3.67240988467279E-09
+  9.95605571261335E-26 -7.14897306747490E-26 -3.67241174257651E-09
+ -1.21471957629552E-25 -4.79627430374375E-26 -3.67240390907499E-09
+  4.79627430374375E-26  9.06748278897240E-26 -3.67243687051309E-09
+ -2.37592282878085E-25 -3.04942071522234E-26 -3.67229847617418E-09
+  3.04942071522234E-26 -8.38085825706803E-27 -3.67287821070506E-09
+  1.96899681943165E-25 -2.86766716265942E-26 -3.67045572285493E-09
+  2.86766716265942E-26  5.00832011506716E-26 -3.68055153739757E-09
+  1.52168113173512E-25 -1.36315164422191E-26 -3.63860299048642E-09
+  7.07829113036709E-26 -5.35163238101934E-27 -3.81241466280927E-09
+  6.40176401804956E-26 -1.48634016318122E-25 -3.09590192026221E-09
+ -5.76562658407933E-26 -1.48129145338781E-25 -6.05788840844362E-09
+ -1.70141520038068E-25  1.00974195868290E-28  5.82604926563694E-09
+  0.00000000000000E+00 -5.20017108721691E-26 -4.87380196349415E-08
+  2.32745521476407E-25  7.51248017260074E-25 -2.58132668003456E-06
+  1.40717639362048E-24  9.31224423975713E-24 -8.71919391713703E-06
+  3.55429169456379E-24  6.43649914142825E-24 -1.31019766382739E-05
+ -6.43649914142825E-24  1.03914564451974E-23 -1.54793032716819E-05
+ -4.35820785238876E-23  9.46087825607525E-23 -1.66505032150849E-05
+  1.44136221737365E-22 -3.65510433171869E-23 -1.72168609640783E-05
+ -3.62408505874795E-23  1.05155335370804E-22 -1.74916386198471E-05
+  9.97786613892090E-23  1.37311981683803E-22 -1.76251455312477E-05
+  3.05022850878929E-23  1.05982515983357E-24 -1.76892303828135E-05
+ -4.28582954879037E-23 -6.57867080921080E-23 -1.77189319913074E-05
+ -7.23007554159631E-23 -8.03916157824974E-24 -1.77317330604970E-05
+  2.25665210862123E-23 -3.28287305606983E-24 -1.77365089130635E-05
+  2.33290782134096E-23 -6.46234853557053E-24 -1.77378002342758E-05
+  6.47527323264167E-24 -4.29746177615440E-25 -1.77379152761724E-05
+  4.32977351883225E-25
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  321007       1       0       0       0     -79       0       2
+                  
\0\0\0\0\0\0\0\0
+  320989
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  7.10858338912758E-26 -3.67241024039287E-09  8.86553439723582E-26
+  3.67241024039352E-09  3.67546072960574E-25 -3.67241024039317E-09
+  5.94334116880752E-25 -1.95687991592745E-25 -3.67241024039319E-09
+ -1.80743810604238E-25 -1.17533963990689E-25 -3.67241024039320E-09
+  1.17533963990689E-25  3.27964188180204E-25 -3.67241024039321E-09
+  5.14968398928277E-26  1.47422325967703E-26 -3.67241024039332E-09
+ -1.47422325967703E-26 -4.73770927014014E-25 -3.67241024039364E-09
+ -3.80470770031715E-25 -1.73069771718248E-25 -3.67241024039309E-09
+ -2.11238017756462E-25  1.55298313245429E-25 -3.67241024039342E-09
+ -4.62461817076766E-26 -3.70373350444886E-25 -3.67241024039375E-09
+ -3.10192729707385E-25  6.86624531904369E-26 -3.67241024039320E-09
+  2.17296469508559E-25  4.52364397489937E-26 -3.67241024039330E-09
+  1.51461293802434E-25  2.48598470227729E-25 -3.67241024039275E-09
+ -5.35163238101934E-26  1.15716428465060E-25 -3.67241024039342E-09
+ -1.55500261637166E-26 -1.07032647620387E-25 -3.67241024039322E-09
+ -3.89558447659861E-25  5.49299625523495E-26 -3.67241024039311E-09
+  1.35507370855245E-25  1.40556080648659E-25 -3.67241024039333E-09
+ -2.24970508394549E-25  7.95676663442121E-26 -3.67241024039309E-09
+  2.66571877092284E-26 -4.21668241945977E-25 -3.67241024039293E-09
+  4.96793043671984E-26 -1.52471035761117E-25 -3.67241024039317E-09
+ -3.19078458943795E-26 -3.53409685539013E-26 -3.67241024039372E-09
+ -3.69161660094466E-25  2.05785411179574E-25 -3.67241024039285E-09
+  8.78475504054119E-26 -7.12877822830124E-26 -3.67241024039349E-09
+ -4.28130590481548E-26 -4.50748810356044E-25 -3.67241024039362E-09
+ -6.07864659127103E-25 -2.68187464226177E-25 -3.67241024039329E-09
+ -4.35804629367538E-25 -2.98277774594927E-25 -3.67241024039338E-09
+  1.79734068645555E-25  3.79057131289559E-25 -3.67241024039323E-09
+  3.28368084963677E-25  1.16928118815479E-25 -3.67241024039320E-09
+ -2.38702999032636E-25  1.72665874934775E-25 -3.67241024039336E-09
+  2.44155605609524E-25 -4.50142965180835E-25 -3.67241024039345E-09
+ -2.62129012474080E-25  5.14968398928277E-26 -3.67241024039336E-09
+  7.47209049425342E-26  1.11879409022065E-25 -3.67241024039286E-09
+  1.43787254916444E-25 -1.80945758995975E-25 -3.67241024039330E-09
+ -3.71383092403569E-25 -2.20123746992871E-25 -3.67241024039319E-09
+ -3.66940227785364E-25  1.65597681223995E-25 -3.67241024039377E-09
+  1.27833331969255E-25  9.14826214566703E-26 -3.67241024039268E-09
+  3.61487621208476E-25  3.32609001190146E-25 -3.67241024039322E-09
+  4.12580564317831E-25  4.80637172333058E-26 -3.67241024039324E-09
+  2.45569244351680E-25 -2.71216690102226E-25 -3.67241024039306E-09
+ -1.77108739552980E-25 -1.27429435185781E-25 -3.67241024039334E-09
+ -1.29246970711411E-26  4.66500784911498E-26 -3.67241024039317E-09
+ -1.88215901098492E-25 -1.23592415742786E-25 -3.67241024039296E-09
+ -1.95889939984482E-26  1.35305422463508E-26 -3.67241024039307E-09
+ -1.58327539121478E-25 -9.83488667757140E-26 -3.67241024039344E-09
+ -1.95082146417535E-25  2.11036069364725E-26 -3.67241024039299E-09
+ -2.13055553282091E-26 -6.36137433970224E-26 -3.67241024039327E-09
+ -8.03754599111585E-26  4.23081880688133E-26 -3.67241024039318E-09
+  1.08749208950148E-25 -1.60548971430580E-26 -3.67241024039321E-09
+  1.56913900379322E-25 -1.48331093730517E-25 -3.67241024039318E-09
+ -2.84646258152708E-25 -4.50344913572571E-26 -3.67241024039382E-09
+  2.71620586885699E-26  2.19416927621793E-25 -3.67241024039306E-09
+  7.41150597673245E-26 -1.15413505877455E-25 -3.67241024039361E-09
+  1.15312531681587E-25 -6.42195885722321E-26 -3.67241024039309E-09
+ -2.29312398816885E-25 -7.67403888599000E-27 -3.67241024039283E-09
+  1.39344390298240E-25  1.28237228752728E-26 -3.67241024039364E-09
+ -1.29246970711411E-26 -1.71656132976092E-25 -3.67241024039303E-09
+ -2.50416005753358E-25 -9.28962601988264E-26 -3.67241024039386E-09
+  9.29972343946946E-26  2.24162714827603E-26 -3.67241024039316E-09
+ -2.24162714827603E-26  2.64552393174919E-26 -3.67241024039306E-09
+ -2.64552393174919E-26 -9.89547119509237E-26 -3.67241024039349E-09
+ -2.32240650497066E-26 -1.63376248914892E-25 -3.67241024039317E-09
+ -2.51021850928568E-25  1.31569377216381E-25 -3.67241024039295E-09
+ -1.31670351412250E-25 -6.53303047267833E-26 -3.67241024039322E-09
+ -1.10768692867514E-25 -1.50148629256147E-25 -3.67241024039327E-09
+  3.42302523993501E-26  3.07567400614810E-25 -3.67241024039438E-09
+  2.79496574163425E-25  1.87509081727414E-25 -3.67241024038987E-09
+ -7.44179823549294E-26  1.27732357773386E-25 -3.67241024040381E-09
+ -1.27732357773386E-25  7.62355178805586E-26 -3.67241024037322E-09
+ -7.61345436846903E-26 -1.57418771358663E-25 -3.67241024036949E-09
+ -3.21400865448766E-25  2.65057264154260E-25 -3.67241024097486E-09
+  1.35204448267640E-25  2.43953657217787E-25 -3.67241023581727E-09
+  2.37895205465690E-25 -3.21097942861161E-26 -3.67241026900556E-09
+  3.21097942861161E-26 -1.01479066847631E-25 -3.67241007889590E-09
+ -1.47321351771834E-25  2.77679038637796E-26 -3.67241110014965E-09
+ -2.75659554720430E-26  1.48331093730517E-25 -3.67240583402067E-09
+  1.45301867854469E-25  4.18033170894719E-26 -3.67243222729908E-09
+  7.57306469012171E-27  1.65597681223995E-26 -3.67230269536434E-09
+ -6.73497886441491E-26  2.88685225987440E-25 -3.67292820288685E-09
+  1.51562267998303E-25 -7.76491566227146E-26 -3.66994637327746E-09
+ -2.15983804962271E-25  4.67510526870180E-26 -3.68401209400644E-09
+ -4.67510526870180E-26  7.59325952929537E-26 -3.61825614792585E-09
+  1.37324906380874E-26  8.91602149516996E-26 -3.92357309176902E-09
+  5.76562658407933E-26 -2.38198128053295E-25 -2.51973002858363E-09
+ -1.40657054844527E-25  7.57306469012171E-27 -9.03659666759729E-09
+ -7.57306469012171E-27 -6.47244595515736E-26  1.93364806333660E-08
+ -1.54995390657824E-25 -1.13091099372484E-25 -8.62449047588266E-07
+  7.31457074869889E-25  6.24262868536113E-24 -6.30426733966067E-06
+  6.15861815439871E-24  3.15362608535842E-24 -1.14752219114733E-05
+ -3.14070138828728E-24  3.67319890761829E-23 -1.45088873074069E-05
+  3.09934235765963E-23  8.07535073004893E-23 -1.60559227018534E-05
+  1.91052872105607E-22 -5.17504870728488E-23 -1.68176487092601E-05
+ -6.58642562745348E-23 -1.05930817195072E-22 -1.71930230383733E-05
+ -5.85230283381267E-23 -1.03397576569128E-23 -1.73793423962185E-05
+  2.32644547280539E-24 -3.67578384703252E-23 -1.74717988743411E-05
+ -9.77107098578264E-23 -5.23708725322636E-23 -1.75169794511500E-05
+ -1.06163461742353E-22  3.76884166594473E-23 -1.75382275855495E-05
+ -3.76625672653050E-23  2.30059607866311E-23 -1.75474928523005E-05
+ -1.10635406928967E-23 -3.94203260669802E-24 -1.75509812426694E-05
+  2.15842441088056E-23 -4.56241806611279E-24 -1.75519308993191E-05
+  4.54949336904165E-24  3.41858237531681E-24 -1.75520158509334E-05
+ -3.41858237531681E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  320323       1       0       0       0     -80       0       2
+                  
\0\0\0\0\0\0\0\0
+  320311
+ FLUX    
+ REAL*8           
+       1     301       0       0
+ -2.13459450065564E-25 -3.67241024039309E-09 -2.01948391736579E-28
+  3.67241024039374E-09 -2.92421271234566E-25 -3.67241024039319E-09
+ -2.94642703543669E-25  2.86160871090732E-25 -3.67241024039364E-09
+  3.01105052079239E-25  6.48254337474419E-26 -3.67241024039385E-09
+ -2.73842019194801E-25 -5.14968398928277E-26 -3.67241024039342E-09
+  5.12948915010911E-26  1.17533963990689E-25 -3.67241024039278E-09
+  1.76098997594297E-25  9.99644539096066E-26 -3.67241024039321E-09
+ -1.89225643057175E-25  1.20563189866738E-25 -3.67241024039363E-09
+  1.72867823326512E-25  1.46816480792493E-25 -3.67241024039298E-09
+  1.46614532400756E-25  1.67617165141361E-26 -3.67241024039320E-09
+ -2.16690624333349E-25 -2.20931540559817E-25 -3.67241024039350E-09
+ -3.66132434218418E-25 -1.51865190585907E-25 -3.67241024039387E-09
+ -4.35198784192328E-25 -4.46305945737840E-26 -3.67241024039273E-09
+  3.38263556158770E-25  2.11439966148198E-25 -3.67241024039333E-09
+ -1.79734068645555E-26 -3.95213002628485E-25 -3.67241024039323E-09
+ -4.85483933734736E-25  2.55868612330246E-25 -3.67241024039329E-09
+ -2.55868612330246E-25  2.92825168018040E-26 -3.67241024039340E-09
+ -1.34093732113088E-25  7.95676663442121E-26 -3.67241024039320E-09
+  2.66571877092284E-26 -2.45569244351680E-25 -3.67241024039263E-09
+ -4.49537120005625E-25 -3.48360975745599E-25 -3.67241024039310E-09
+ -2.38904947424373E-25 -1.33083990154406E-25 -3.67241024039350E-09
+ -2.71216690102226E-25 -2.76467348287377E-25 -3.67241024039354E-09
+  3.88750654092915E-25 -4.72559236663595E-26 -3.67241024039368E-09
+  4.72559236663595E-26  4.72559236663595E-26 -3.67241024039257E-09
+ -2.25374405178022E-25  4.44286461820474E-26 -3.67241024039325E-09
+ -2.20931540559817E-25  8.27988406119974E-26 -3.67241024039327E-09
+ -8.27988406119974E-26  3.79057131289559E-25 -3.67241024039268E-09
+  3.28368084963677E-25  1.16928118815479E-25 -3.67241024039320E-09
+ -2.38702999032636E-25  1.85186675222443E-25 -3.67241024039330E-09
+  2.31432856930120E-25 -5.87669819953445E-26 -3.67241024039384E-09
+ -2.35067927981378E-25  8.68378084467290E-27 -3.67241024039324E-09
+  1.58327539121478E-25  1.39344390298240E-26 -3.67241024039292E-09
+ -5.21026850680374E-26  1.47422325967703E-26 -3.67241024039329E-09
+  1.49239861493332E-25 -8.92611891475679E-26 -3.67241024039350E-09
+ -2.04371772437418E-25 -6.48254337474419E-26 -3.67241024039371E-09
+  2.25778301961495E-25 -1.66001578007468E-25 -3.67241024039274E-09
+  3.17058975026429E-26  2.15680882374666E-25 -3.67241024039345E-09
+  7.77501308185829E-26 -2.06189307963047E-25 -3.67241024039321E-09
+ -2.43751708826051E-25 -1.57317797162795E-25 -3.67241024039311E-09
+  3.12010265233015E-25 -1.27429435185781E-25 -3.67241024039334E-09
+  1.27429435185781E-25  4.66500784911498E-26 -3.67241024039350E-09
+ -1.88215901098492E-25  1.95889939984482E-26 -3.67241024039349E-09
+  2.73842019194801E-25 -8.44144277458900E-26 -3.67241024039307E-09
+  2.33048444064012E-25  2.36279618331797E-26 -3.67241024039333E-09
+ -2.44357554001261E-26 -1.31266454628776E-27 -3.67241024039328E-09
+  3.83701944299500E-27 -2.85151129132050E-25 -3.67241024039370E-09
+ -3.01912845646186E-25 -1.35507370855245E-25 -3.67241024039350E-09
+ -1.58125590729741E-25 -1.13797918743562E-25 -3.67241024039310E-09
+  2.54757896175694E-25  1.92860714108433E-26 -3.67241024039329E-09
+  4.28433513069152E-25 -1.13797918743562E-25 -3.67241024039357E-09
+  9.59254860748750E-26  2.19416927621793E-25 -3.67241024039317E-09
+  7.41150597673245E-26 -1.74685358852141E-26 -3.67241024039328E-09
+  1.52168113173512E-25 -1.97404552922506E-25 -3.67241024039338E-09
+  6.41186143763638E-26  9.01699569103825E-26 -3.67241024039336E-09
+  2.03463004674603E-25  2.08612688663886E-25 -3.67241024039361E-09
+ -4.51354655531254E-26 -1.71656132976092E-25 -3.67241024039325E-09
+ -2.50416005753358E-25  4.94773559754619E-27 -3.67241024039340E-09
+  2.88685225987440E-25 -6.78546596234906E-26 -3.67241024039328E-09
+  6.78546596234906E-26 -2.31028960146646E-25 -3.67241024039304E-09
+  6.13923110879200E-26  2.33250392455749E-26 -3.67241024039340E-09
+ -2.32240650497066E-26 -6.54312789226516E-26 -3.67241024039315E-09
+ -1.07335570207992E-25  3.37253814200087E-26 -3.67241024039294E-09
+  8.54241697045729E-26  7.16916790664856E-26 -3.67241024039398E-09
+  4.59432591200717E-26  8.53231955087046E-26 -3.67241024038919E-09
+  9.21894408277483E-26 -2.39005921620241E-25 -3.67241024041104E-09
+ -5.46270399647446E-26  1.87509081727414E-25 -3.67241024031642E-09
+  1.06022905661704E-25 -3.05951813480917E-25 -3.67241024072959E-09
+ -1.69838597450463E-25  1.74180487872799E-25 -3.67241023891744E-09
+  1.19452473712186E-25  8.20920212409194E-26 -3.67241024685356E-09
+  2.63542651216236E-26  2.73942993390669E-25 -3.67241021215777E-09
+  2.33149418259880E-25 -3.43110317560448E-25 -3.67241036359670E-09
+ -2.43953657217787E-25  2.61422193103002E-25 -3.67240970367814E-09
+  3.21097942861161E-26 -1.61558713389263E-27 -3.67241257480795E-09
+  4.63471559035449E-26 -2.12045811323408E-26 -3.67240010274169E-09
+ -1.71757107171960E-25  4.92754075837253E-26 -3.67245419782431E-09
+  1.45200893658600E-25 -1.05417060486494E-25 -3.67221992641354E-09
+  5.60406787069007E-26  2.11238017756462E-25 -3.67323297628686E-09
+  1.27328460989913E-25 -5.72523690573202E-26 -3.66885884315946E-09
+  1.51562267998303E-25  1.20664164062606E-25 -3.68771828403649E-09
+  1.72867823326512E-25 -1.07436544403860E-25 -3.60653905820639E-09
+ -3.53409685539013E-27 -1.72665874934775E-26 -3.95572769114796E-09
+ -4.00867557597109E-26  1.69636649058726E-26 -2.46093046140696E-09
+  4.09955235225255E-26  2.40318586166529E-26 -8.94597395804359E-09
+ -1.73978539481063E-25 -1.91850972149750E-26  1.76720761351751E-08
+  1.90841230191067E-26  1.00772247476553E-25 -1.57101478830654E-07
+ -1.00772247476553E-25  4.36531643577789E-24 -3.76874819374065E-06
+  4.79344702625944E-24  7.90991460753833E-24 -9.56990432756230E-06
+  3.07607790293157E-24 -6.30208229188838E-23 -1.33474547521495E-05
+ -3.80761575715816E-23  7.13443278326986E-24 -1.53673825057499E-05
+ -7.13443278326986E-24  6.01773895632328E-23 -1.63810943817335E-05
+ -3.87223924251386E-23 -5.73856549958663E-23 -1.68863543377959E-05
+ -2.93132129573479E-23 -3.12260681238768E-23 -1.71404709690535E-05
+ -4.44609579247252E-24 -8.91287110025887E-23 -1.72691960877876E-05
+ -8.65954703766451E-23 -1.03397576569128E-25 -1.73342214396680E-05
+  1.03397576569128E-25 -3.70163324117480E-23 -1.73664762356253E-05
+ -5.95828534979603E-23 -1.52511425439464E-24 -1.73818355972555E-05
+ -3.82054045422930E-23 -7.90991460753833E-24 -1.73886031515654E-05
+  2.06278165255411E-23  2.27733162393505E-23 -1.73911737238040E-05
+ -2.27603915422794E-23  1.57422810326498E-23 -1.73918787514344E-05
+  1.34158355598444E-23  3.91618321255574E-24 -1.73919421406017E-05
+  2.93713740941681E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  320137       1       0       0       0     -81       0       2
+                  
\0\0\0\0\0\0\0\0
+  320125
+ FLUX    
+ REAL*8           
+       1     301       0       0
+ -3.91174034793754E-25 -3.67241024039310E-09  1.77714584728190E-26
+  3.67241024039375E-09  2.94642703543669E-25 -3.67241024039318E-09
+ -8.23949438285242E-26  2.85958922698996E-25 -3.67241024039340E-09
+  3.01105052079239E-25  7.83559759937927E-26 -3.67241024039298E-09
+  1.31064506237040E-25  1.32276196587459E-25 -3.67241024039320E-09
+  2.47184831485573E-25  1.45402842050337E-26 -3.67241024039321E-09
+ -2.20527643776344E-25 -8.68378084467290E-26 -3.67241024039311E-09
+ -5.89285407087338E-25  1.53480777719800E-26 -3.67241024039308E-09
+ -1.55500261637166E-26  2.47386779877309E-25 -3.67241024039320E-09
+ -3.39677194900926E-25  1.67617165141361E-26 -3.67241024039287E-09
+  7.69423372516366E-26  7.12877822830124E-26 -3.67241024039361E-09
+  6.11095833394888E-25  1.42575564566025E-25 -3.67241024039364E-09
+ -1.42575564566025E-25  2.48396521835992E-25 -3.67241024039264E-09
+  4.50344913572571E-26  2.13459450065564E-25 -3.67241024039309E-09
+  5.67273032388050E-25  9.04728794979874E-26 -3.67241024039290E-09
+ -9.04728794979874E-26 -3.77643492547403E-26 -3.67241024039354E-09
+ -2.55868612330246E-25  3.93799363886329E-26 -3.67241024039277E-09
+ -3.93799363886329E-26 -3.06961555439600E-26 -3.67241024039311E-09
+ -1.56711951987585E-25 -1.28035280360991E-25 -3.67241024039304E-09
+  4.96793043671984E-26 -4.32169558316279E-26 -3.67241024039326E-09
+ -1.41161925823869E-25 -1.13696944547694E-25 -3.67241024039318E-09
+ -6.90663499739100E-26  1.71656132976092E-26 -3.67241024039294E-09
+  3.88548705701178E-25  4.26111106564182E-26 -3.67241024039341E-09
+  1.36921009597401E-25 -4.50748810356044E-25 -3.67241024039318E-09
+ -6.07864659127103E-25 -1.70444442625673E-25 -3.67241024039325E-09
+ -5.33547650968042E-25  2.88786200183308E-25 -3.67241024039316E-09
+  4.16821480544299E-25  1.83167191305077E-25 -3.67241024039312E-09
+  5.23854128164686E-25 -9.06748278897240E-26 -3.67241024039319E-09
+ -3.24531065520682E-25  1.34497628896562E-25 -3.67241024039296E-09
+  1.58933384296688E-25 -3.38869401333980E-25 -3.67241024039347E-09
+ -5.42231431812715E-25 -8.90592407558313E-26 -3.67241024039281E-09
+ -4.57211158891615E-25 -1.00974195868290E-27 -3.67241024039347E-09
+ -3.30589517272780E-25 -2.47184831485573E-25 -3.67241024039357E-09
+ -6.33512104877648E-25 -1.05820957269967E-25 -3.67241024039333E-09
+ -1.87812004315018E-25 -2.08410740272150E-25 -3.67241024039360E-09
+ -2.46377037918626E-25  1.45806738833810E-25 -3.67241024039301E-09
+  1.47826222751176E-25  9.89547119509237E-26 -3.67241024039355E-09
+  3.52399943580330E-25 -1.08446286362543E-25 -3.67241024039309E-09
+ -4.82656656250424E-26 -1.36719061205664E-25 -3.67241024039317E-09
+ -1.40758029040396E-25  1.10667718671645E-25 -3.67241024039307E-09
+ -2.64148496391445E-25 -5.14968398928277E-26 -3.67241024039313E-09
+ -9.04728794979874E-26 -3.02922587604869E-27 -3.67241024039365E-09
+  1.46412584009020E-25  8.78475504054119E-26 -3.67241024039306E-09
+  6.09884143044469E-26  9.73391248170311E-26 -3.67241024039324E-09
+ -9.71371764252945E-26  1.69232752275253E-25 -3.67241024039310E-09
+  2.72327406256777E-25 -1.35608345051113E-25 -3.67241024039378E-09
+  1.41060951628000E-25 -2.77981961225401E-25 -3.67241024039335E-09
+ -1.58024616533873E-25 -1.29448919103147E-25 -3.67241024039361E-09
+ -1.14100841331167E-26 -8.88572923640948E-27 -3.67241024039326E-09
+  8.88572923640948E-27  1.50653500235488E-25 -3.67241024039369E-09
+  1.24804106093206E-25 -7.41150597673245E-26 -3.67241024039342E-09
+ -6.19981562631298E-26  1.41464848411474E-25 -3.67241024039292E-09
+  1.52168113173512E-25 -1.93365585087774E-25 -3.67241024039305E-09
+ -1.00065428105475E-25 -4.12984461101304E-26 -3.67241024039361E-09
+ -9.02709311062508E-26  2.08612688663886E-25 -3.67241024039350E-09
+ -4.51354655531254E-26  2.40318586166529E-26 -3.67241024039306E-09
+  1.41060951628000E-25  4.94773559754619E-27 -3.67241024039329E-09
+  2.88685225987440E-25  1.27934306165123E-25 -3.67241024039306E-09
+  1.65597681223995E-25  5.04870979341448E-28 -3.67241024039328E-09
+  1.69232752275253E-25  3.55429169456379E-26 -3.67241024039361E-09
+ -3.55429169456379E-26 -2.98681671378400E-25 -3.67241024039295E-09
+ -1.67718139337229E-25 -1.23693389938655E-25 -3.67241024039403E-09
+ -1.69939571646331E-25 -2.22042256714369E-25 -3.67241024039131E-09
+  4.59432591200717E-26  5.65455496862421E-27 -3.67241024040122E-09
+ -1.21774880217157E-25  2.50214057361621E-25 -3.67241024036101E-09
+  1.57620719750400E-25 -6.47244595515736E-26 -3.67241024052816E-09
+  6.46234853557053E-26 -1.23188518959313E-26 -3.67241023983736E-09
+  1.23794364134523E-25  7.63364920764269E-26 -3.67241024267130E-09
+  1.07436544403860E-25 -6.99751177367246E-26 -3.67241023109740E-09
+ -3.83701944299500E-26 -8.19910470450511E-26 -3.67241027813751E-09
+  8.18900728491828E-26  1.91346101170409E-25 -3.67241008800946E-09
+  1.02185886218709E-25  1.11778434826196E-25 -3.67241085158396E-09
+ -2.54454973588090E-26  9.83488667757140E-26 -3.67240780761044E-09
+  9.30982085905629E-26  2.76669296679113E-26 -3.67241983701927E-09
+ -2.76669296679113E-26 -2.01948391736579E-28 -3.67237279248025E-09
+  1.94678249634062E-25 -5.62426270986373E-26 -3.67255443421343E-09
+ -9.05738536938557E-26 -3.04942071522234E-26 -3.67186430405844E-09
+ -6.54312789226516E-26 -2.00534752994423E-25 -3.67443210311541E-09
+ -9.30982085905629E-26  2.28201682662334E-26 -3.66514584687697E-09
+  6.99751177367246E-26  9.91566603426603E-26 -3.69738444431288E-09
+  2.50012108969885E-25 -1.37324906380874E-26 -3.59248313420328E-09
+ -4.32169558316279E-26  8.83524213847533E-26 -3.89663772383702E-09
+ -2.95854393894088E-26  1.02993679785655E-25 -3.23917083348685E-09
+ -1.63578197306629E-26 -1.26217744835362E-25 -3.34224756146355E-09
+ -2.02958133695262E-26 -5.65455496862421E-25 -1.58487825807655E-06
+  5.65455496862421E-27  1.26209666899692E-23 -7.34757047487880E-06
+  1.38940493514766E-24  3.31130738962634E-23 -1.19246462832618E-05
+  1.97747865188458E-24 -7.36707733055040E-24 -1.45377675536451E-05
+ -3.64993445289023E-23 -2.48671171648754E-23 -1.58804450984939E-05
+ -3.37076099615359E-23 -1.03397576569128E-23 -1.65567383079266E-05
+  8.87151206963122E-23 -6.26072326126073E-23 -1.68997832054134E-05
+ -1.48634016318122E-22  1.24697477342369E-22 -1.70757506132719E-05
+  2.06019671313988E-22 -5.19572822259871E-23 -1.71664911720159E-05
+ -7.02586532787228E-23  5.94536065272489E-23 -1.72130164990172E-05
+  6.56574611213966E-24 -6.96899666075926E-23 -1.72363736891111E-05
+ -3.08124778176003E-23  1.60783231564995E-23 -1.72476021977859E-05
+ -1.61041725506418E-23 -3.25702366192755E-24 -1.72525863793049E-05
+ -1.02105106862014E-23  1.06241009924779E-23 -1.72544902576826E-05
+  6.59159550628194E-24 -1.31056428301370E-23 -1.72550146062442E-05
+ -2.65473277841237E-23 -5.90012421297589E-24 -1.72550618687095E-05
+ -8.95035272176518E-25
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  319987       1       0       0       0     -82       0       2
+                  
\0\0\0\0\0\0\0\0
+  319969
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  7.08838854995392E-26 -3.67241024039375E-09  8.86553439723582E-26
+  3.67241024039375E-09 -3.08981039356966E-25 -3.67241024039363E-09
+ -6.58351757061248E-26  3.01105052079239E-25 -3.67241024039341E-09
+ -9.02709311062508E-26 -2.21941282518500E-25 -3.67241024039299E-09
+ -1.55904158420639E-25  1.32276196587459E-25 -3.67241024039298E-09
+  2.47184831485573E-25  2.20527643776344E-25 -3.67241024039397E-09
+ -2.20527643776344E-25  1.04407318527811E-25 -3.67241024039385E-09
+  9.99644539096066E-26 -1.83773036480287E-25 -3.67241024039331E-09
+ -1.09859925104699E-25  2.47386779877309E-25 -3.67241024039321E-09
+  2.47386779877309E-25 -2.72630328844382E-25 -3.67241024039311E-09
+ -1.14504738114640E-25 -3.18876510552058E-25 -3.67241024039242E-09
+  2.54454973588090E-26  2.40924431341739E-25 -3.67241024039355E-09
+ -4.42266977903108E-26 -2.40520534558266E-25 -3.67241024039289E-09
+ -4.44892306995684E-25 -7.79520792103195E-26 -3.67241024039373E-09
+ -1.15514480073323E-25 -2.02958133695262E-25 -3.67241024039290E-09
+ -9.04728794979874E-26  5.73533432531884E-26 -3.67241024039341E-09
+ -5.75552916449250E-26  3.93799363886329E-26 -3.67241024039387E-09
+  6.52293305309150E-26 -1.28439177144464E-25 -3.67241024039330E-09
+ -6.45830956773580E-25 -2.30625063363173E-25 -3.67241024039355E-09
+  1.52672984152854E-25  1.40959977432132E-25 -3.67241024039380E-09
+ -1.40959977432132E-25 -1.26621641618835E-25 -3.67241024039329E-09
+ -4.60442333159400E-25 -8.78475504054119E-26 -3.67241024039319E-09
+ -4.99216424372823E-25 -3.91779879968963E-26 -3.67241024039317E-09
+  3.32810949581882E-25  2.08006843488676E-26 -3.67241024039283E-09
+  2.72630328844382E-25  2.52435489670724E-26 -3.67241024039314E-09
+ -2.52435489670724E-26  5.65455496862421E-27 -3.67241024039338E-09
+  1.12687202589011E-25 -2.36279618331797E-26 -3.67241024039280E-09
+  1.97101630334901E-25 -2.98277774594927E-25 -3.67241024039324E-09
+ -1.16928118815479E-25  1.34497628896562E-25 -3.67241024039274E-09
+  1.58933384296688E-25 -3.15039491109063E-26 -3.67241024039348E-09
+  2.00332804602686E-25 -8.90592407558313E-26 -3.67241024039336E-09
+ -4.57211158891615E-25 -1.81551604171185E-25 -3.67241024039330E-09
+ -1.49845706668542E-25 -1.64991836048785E-25 -3.67241024039308E-09
+ -2.57888096247611E-25 -5.13554760186120E-25 -3.67241024039329E-09
+ -4.98004734022404E-25 -2.08410740272150E-25 -3.67241024039371E-09
+ -2.46377037918626E-25  5.00832011506716E-25 -3.67241024039300E-09
+  3.79662976464769E-25  1.17937860774162E-25 -3.67241024039371E-09
+ -1.17937860774162E-25 -1.08446286362543E-25 -3.67241024039331E-09
+ -4.82656656250424E-26  1.97707475510111E-25 -3.67241024039322E-09
+  3.89558447659861E-25  4.70539752746229E-26 -3.67241024039323E-09
+ -3.39273298117453E-26  1.88215901098492E-25 -3.67241024039329E-09
+  1.05215112094758E-25  9.47137957244556E-26 -3.67241024039340E-09
+  5.55358077275592E-26  1.34699577288298E-25 -3.67241024039314E-09
+  3.07365452223073E-25 -4.92754075837253E-26 -3.67241024039358E-09
+ -9.71371764252945E-26  4.62461817076766E-26 -3.67241024039350E-09
+ -4.63471559035449E-26 -8.94631375393045E-26 -3.67241024039342E-09
+ -5.45260657688763E-26  2.11439966148198E-25 -3.67241024039343E-09
+  2.33250392455749E-25 -1.29448919103147E-25 -3.67241024039339E-09
+ -1.14100841331167E-26 -8.88572923640948E-27 -3.67241024039294E-09
+  8.88572923640948E-27  3.35537252870326E-25 -3.67241024039328E-09
+  2.51728670299646E-25  1.89629539840648E-25 -3.67241024039321E-09
+  1.03902447548470E-25 -3.11101497470200E-25 -3.67241024039335E-09
+ -2.76063451503904E-25 -6.42195885722321E-26 -3.67241024039336E-09
+ -9.62284086624799E-26 -7.34082403962465E-26 -3.67241024039372E-09
+  7.35092145921148E-26  2.08612688663886E-25 -3.67241024039350E-09
+ -4.51354655531254E-26  2.19820824405266E-25 -3.67241024039319E-09
+  3.67344124568837E-25  3.92789621927646E-26 -3.67241024039330E-09
+  2.54253025196353E-25 -4.01877299555792E-26 -3.67241024039342E-09
+  1.65698655419863E-25  2.65562135133601E-26 -3.67241024039339E-09
+ -2.64552393174919E-26 -1.35305422463508E-26 -3.67241024039338E-09
+ -1.57822668142137E-25  1.90538307603462E-25 -3.67241024039319E-09
+  1.02993679785655E-25 -2.57484199464138E-26 -3.67241024039371E-09
+  2.57484199464138E-26 -6.52293305309150E-26 -3.67241024039053E-09
+ -2.28403631054071E-25  1.03599524960865E-25 -3.67241024040508E-09
+  1.90134410819989E-25  2.33553315043354E-25 -3.67241024033732E-09
+  3.53409685539013E-25 -1.21270009237816E-25 -3.67241024065173E-09
+ -1.72362952347170E-25  4.34189042233645E-26 -3.67241023920345E-09
+ -2.25475379373890E-25  1.86095442985258E-25 -3.67241024584230E-09
+ -7.62355178805586E-26  7.15907048706173E-26 -3.67241021555809E-09
+ -7.14897306747490E-26  2.48396521835992E-26 -3.67241035310348E-09
+  1.88619797881965E-25  1.04003421744338E-26 -3.67240973087812E-09
+ -9.35021053740361E-26  1.95889939984482E-26 -3.67241253548250E-09
+  1.70141520038068E-25 -4.62461817076766E-26 -3.67239993610636E-09
+ -1.00469324888948E-25  2.76669296679113E-26 -3.67245636348669E-09
+ -2.76669296679113E-26  2.01948391736579E-27 -3.67220437136776E-09
+  2.43751708826051E-25 -1.05013163703021E-25 -3.67332670213141E-09
+ -4.17023428936036E-26 -3.35234330282721E-26 -3.66834069401166E-09
+  1.80339913820765E-25  1.40455106452791E-25 -3.69043945245977E-09
+  6.36137433970224E-27 -7.49228533342708E-26 -3.59273939212511E-09
+ -7.17926532623538E-26  9.29972343946946E-26 -4.02415810440273E-09
+  8.95641117351728E-26 -2.05987359571311E-26 -2.13124439066565E-09
+  2.05987359571311E-26  3.63507105125842E-26 -1.05842241240242E-08
+  2.63542651216236E-26  5.43241173771398E-26  2.45538906686823E-08
+ -1.41060951628000E-25  1.39748287081713E-25 -3.71339668052768E-07
+ -1.39546338689976E-25  4.45578931527588E-24 -4.84934751541688E-06
+  7.16674452594772E-24  5.23450231381213E-24 -1.02029946298330E-05
+  1.68150308895545E-23  2.85377311330795E-23 -1.35198853655860E-05
+ -2.85377311330795E-23 -8.47860127866853E-24 -1.52841049549540E-05
+  8.47860127866853E-24  7.85304594042531E-23 -1.61842703320657E-05
+ -8.73709522009135E-24 -3.99114645556836E-23 -1.66438853440873E-05
+ -4.81315718929293E-23 -4.60119215732622E-24 -1.68815258698557E-05
+  4.60119215732622E-24 -1.08205563879593E-22 -1.70057202381540E-05
+ -1.88648878450375E-22 -4.63738130912541E-23 -1.70708183521222E-05
+ -6.33827144368757E-23 -6.82940993239093E-23 -1.71046598950915E-05
+ -1.14047526955749E-22 -1.06757997807625E-23 -1.71218466010197E-05
+ -1.31961157096350E-22 -9.43502886193297E-24 -1.71301886836876E-05
+  7.20939602628248E-23 -1.19682694878766E-23 -1.71339215111459E-05
+ -2.61854362661318E-23 -2.30059607866311E-24 -1.71353569402940E-05
+ -1.47600040552431E-23 -2.68833699079734E-24 -1.71357544042738E-05
+ -1.71898471046176E-23 -2.19073615355841E-24 -1.71357903542904E-05
+  2.19073615355841E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  319099       1       0       0       0     -83       0       2
+                  
\0\0\0\0\0\0\0\0
+  319087
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  7.08838854995392E-26 -3.67241024039331E-09 -2.84747232348576E-25
+  3.67241024039352E-09  6.58351757061248E-26 -3.67241024039340E-09
+ -6.58351757061248E-26 -1.49441809885068E-26 -3.67241024039320E-09
+  1.49441809885068E-26 -2.62532909257553E-26 -3.67241024039233E-09
+  2.64552393174919E-26  3.27964188180204E-25 -3.67241024039299E-09
+  5.14968398928277E-26 -2.68591361009650E-25 -3.67241024039384E-09
+ -1.12283305805538E-25  1.93668507675379E-25 -3.67241024039364E-09
+  1.89023694665438E-25 -7.51248017260074E-26 -3.67241024039288E-09
+ -1.55500261637166E-26 -3.31195362447990E-25 -3.67241024039339E-09
+ -1.46816480792493E-25 -7.49228533342708E-26 -3.67241024039298E-09
+  2.74851761153484E-25  2.68389412617914E-25 -3.67241024039300E-09
+  1.20563189866738E-25 -1.49845706668542E-25 -3.67241024039331E-09
+ -3.37253814200087E-25  5.29104786349837E-26 -3.67241024039340E-09
+  1.42373616174288E-25  1.15716428465060E-25 -3.67241024039342E-09
+  1.77916533119926E-25  1.86398365572862E-25 -3.67241024039352E-09
+ -9.59254860748750E-26  1.57923642338005E-25 -3.67241024039331E-09
+  1.35507370855245E-25  2.28403631054071E-25 -3.67241024039296E-09
+ -2.28403631054071E-25 -1.28439177144464E-25 -3.67241024039342E-09
+ -1.64991836048785E-25 -4.48325429655205E-26 -3.67241024039261E-09
+ -2.48800418619465E-25  1.46816480792493E-25 -3.67241024039363E-09
+ -1.46816480792493E-25 -1.13696944547694E-25 -3.67241024039348E-09
+  2.24566611611076E-25  1.22380725392367E-25 -3.67241024039327E-09
+ -9.89547119509237E-27 -4.72559236663595E-26 -3.67241024039346E-09
+  4.72559236663595E-26 -1.66001578007468E-25 -3.67241024039366E-09
+ -1.27429435185781E-25  1.32680093370932E-25 -3.67241024039338E-09
+ -1.32680093370932E-25  2.78486832204742E-25 -3.67241024039323E-09
+ -1.03599524960865E-25  1.83167191305077E-25 -3.67241024039334E-09
+  5.23854128164686E-25  1.16928118815479E-25 -3.67241024039341E-09
+ -2.38702999032636E-25 -3.54621375889433E-25 -3.67241024039329E-09
+ -6.22001046548663E-26 -1.42979461349498E-25 -3.67241024039362E-09
+  1.81753552562921E-26  5.14968398928277E-26 -3.67241024039336E-09
+  7.47209049425342E-26  1.94880198025799E-25 -3.67241024039309E-09
+ -6.68449176648077E-26  1.47422325967703E-26 -3.67241024039341E-09
+  1.49239861493332E-25  5.69494464697153E-26 -3.67241024039366E-09
+  2.68187464226177E-25 -1.10667718671645E-25 -3.67241024039354E-09
+ -3.44120059519131E-25  1.63982094090102E-25 -3.67241024039333E-09
+  1.29650867494884E-25  3.89760396051598E-26 -3.67241024039291E-09
+ -1.96899681943165E-25 -1.27833331969255E-25 -3.67241024039299E-09
+ -1.65597681223995E-25  5.87669819953445E-26 -3.67241024039327E-09
+ -4.28130590481548E-26 -2.03967875653945E-25 -3.67241024039356E-09
+  2.04169824045681E-25 -2.94844651935405E-26 -3.67241024039317E-09
+  1.71252236192619E-25  1.95889939984482E-26 -3.67241024039349E-09
+  2.73842019194801E-25  1.35305422463508E-26 -3.67241024039332E-09
+  1.35305422463508E-25  2.20123746992871E-25 -3.67241024039333E-09
+  2.20527643776344E-25 -2.64552393174919E-26 -3.67241024039334E-09
+  3.22410607407448E-25  8.04764341070267E-26 -3.67241024039359E-09
+  2.13156527477959E-25 -8.21929954367877E-26 -3.67241024039354E-09
+ -6.02815949333688E-26  8.17890986533145E-26 -3.67241024039317E-09
+  5.90699045829494E-26  6.06854917168420E-26 -3.67241024039325E-09
+  7.85579243855292E-26  7.08838854995392E-26 -3.67241024039350E-09
+  8.49192987252315E-26 -2.31533831125988E-25 -3.67241024039328E-09
+  7.42160339631928E-26  4.35198784192328E-26 -3.67241024039346E-09
+ -4.34189042233645E-26  6.49264079433102E-26 -3.67241024039327E-09
+ -6.49264079433102E-26  2.44357554001261E-26 -3.67241024039325E-09
+  1.07133621816255E-25  7.79520792103195E-26 -3.67241024039344E-09
+  5.21026850680374E-26  1.21875854413025E-25 -3.67241024039330E-09
+  1.71656132976092E-25  6.83595306028320E-26 -3.67241024039383E-09
+  5.86660077994762E-26 -1.30357686865962E-25 -3.67241024039337E-09
+ -1.20159293083265E-25 -1.59236306884293E-25 -3.67241024039317E-09
+ -8.83524213847533E-26 -1.11273563846855E-25 -3.67241024039327E-09
+ -5.99786723457640E-26  1.53177855132195E-25 -3.67241024039372E-09
+  1.40556080648659E-25 -4.44286461820474E-27 -3.67241024038987E-09
+ -5.06890463258813E-26  1.69434700666990E-25 -3.67241024040675E-09
+ -2.87170613049415E-25  1.43282383937103E-25 -3.67241024033535E-09
+  3.41292782034819E-26 -8.39095567665486E-26 -3.67241024064094E-09
+ -9.53196408996653E-26  1.30963532041171E-25 -3.67241023933420E-09
+  2.75457606328694E-25 -9.66323054459531E-26 -3.67241024491125E-09
+ -8.54241697045729E-26  8.83524213847533E-26 -3.67241022116221E-09
+  2.17094521116822E-26 -1.58529487513215E-26 -3.67241032208835E-09
+  1.58529487513215E-26  6.92682983656466E-26 -3.67240989409041E-09
+ -6.92682983656466E-26 -2.37895205465690E-25 -3.67241170507284E-09
+ -1.38738545123030E-25  1.96899681943165E-26 -3.67240406019146E-09
+ -1.96899681943165E-26 -9.30982085905629E-26 -3.67243625254478E-09
+ -9.83488667757140E-26 -7.00760919325929E-26 -3.67230104777980E-09
+ -7.65384404681634E-26 -4.79627430374375E-26 -3.67286730035930E-09
+ -9.88537377550554E-26  9.08767762814606E-26 -3.67050294932171E-09
+  7.67403888599000E-27  1.64587939265312E-26 -3.68034309677901E-09
+  7.93657179524756E-26 -6.86624531904369E-27 -3.63953940370348E-09
+  1.01176144260026E-25  6.90663499739100E-26 -3.80814396863972E-09
+  2.38299102249163E-26 -5.76562658407933E-26 -3.11559076103677E-09
+ -3.36244072241404E-26  4.08945493266573E-26 -5.96568804190029E-09
+  1.07335570207992E-25  5.20017108721691E-26  5.40793843461567E-09
+ -5.20017108721691E-26  4.11974719142621E-26 -4.63554694216671E-08
+ -2.21032514755686E-25  5.23450231381213E-25 -2.46859301522864E-06
+ -5.25065818515105E-25 -1.31314922242793E-23 -8.17994673947346E-06
+ -1.11087771326457E-23 -3.99890127381104E-23 -1.22031078427885E-05
+ -8.24595673138799E-23  1.99557322778418E-23 -1.44736911737957E-05
+  5.94794559213911E-23 -6.99484605490154E-23 -1.56675841684025E-05
+ -5.06648125188729E-23  8.01331218410746E-24 -1.62916910416744E-05
+  3.87740912134232E-23  3.68095372586097E-23 -1.66229696591420E-05
+ -1.18597020324790E-22 -1.47289847822723E-22 -1.68019811749265E-05
+ -7.29469902695201E-23  2.74003577908190E-24 -1.68998767144953E-05
+  5.95570041038180E-23  7.02586532787228E-23 -1.69535115578035E-05
+  6.09528713875012E-23  2.65214783899814E-23 -1.69825628902453E-05
+ -2.64697796016969E-23  9.83569447113834E-23 -1.69978525747682E-05
+  4.68132527916729E-23  5.97637992569562E-23 -1.70054836021629E-05
+  1.53028413322310E-23  5.86781247029804E-24 -1.70089587961893E-05
+  7.12150808619872E-23  1.10506159958256E-23 -1.70103018167796E-05
+  1.13608087255330E-23  8.01331218410746E-25 -1.70106703692447E-05
+ -7.88406521339605E-25 -1.19585759650733E-23 -1.70107032160216E-05
+ -8.23626320858464E-24
+       1      -1       6       8
+                                                                SCALAIRE
+     -22  318379       1       0       0       0     -84       0       2
+                  
\0\0\0\0\0\0\0\0
+  318289
+ FLUX    
+ REAL*8           
+       1     301       0       0
+  8.90592407558313E-26 -3.67241024039287E-09  1.24602157701469E-25
+  3.67241024039352E-09 -1.30054764278357E-25 -3.67241024039383E-09
+  1.29852815886620E-25  1.95687991592745E-25 -3.67241024039298E-09
+  2.25576353569759E-25 -3.97838331721061E-26 -3.67241024039320E-09
+ -1.69232752275253E-25  2.24162714827603E-25 -3.67241024039320E-09
+ -5.25065818515105E-26  2.05179566004364E-25 -3.67241024039267E-09
+  8.84533955806216E-26  2.22143230910237E-27 -3.67241024039330E-09
+ -9.12806730649337E-26  1.91850972149750E-26 -3.67241024039277E-09
+ -4.03290938297948E-25  5.45260657688763E-26 -3.67241024039310E-09
+ -2.39106895816110E-25 -7.89618211690024E-26 -3.67241024039373E-09
+ -2.14267243632510E-25  3.63709053517579E-25 -3.67241024039288E-09
+  1.20563189866738E-25  2.40924431341739E-25 -3.67241024039387E-09
+  2.49406263794675E-25  1.50653500235488E-25 -3.67241024039341E-09
+ -5.37182722019300E-26 -7.77501308185829E-26 -3.67241024039329E-09
+  7.79520792103195E-26 -3.95213002628485E-25 -3.67241024039297E-09
+ -9.04728794979874E-26  2.50617954145095E-25 -3.67241024039234E-09
+  2.33654289239222E-25  2.94844651935405E-26 -3.67241024039329E-09
+ -2.92825168018040E-26  1.64991836048785E-25 -3.67241024039319E-09
+ -1.64991836048785E-25 -2.25576353569759E-25 -3.67241024039360E-09
+ -2.53647180021143E-25  1.40959977432132E-25 -3.67241024039358E-09
+ -1.40959977432132E-25 -1.26621641618835E-25 -3.67241024039332E-09
+ -4.60644281551137E-25  1.00570299084816E-25 -3.67241024039293E-09
+  8.07793566946316E-26  4.26111106564182E-26 -3.67241024039334E-09
+ -2.22547127693710E-25 -8.58280664880461E-26 -3.67241024039349E-09
+ -2.94844651935405E-26 -2.68389412617914E-25 -3.67241024039350E-09
+ -3.18876510552058E-25 -1.90033436624121E-25 -3.67241024039338E-09
+ -2.78688780596479E-25 -1.21169035041947E-27 -3.67241024039329E-09
+ -1.72261978151302E-25  9.30982085905629E-26 -3.67241024039341E-09
+ -2.14873088807720E-25 -3.03932329563551E-25 -3.67241024039308E-09
+ -4.53374139448620E-25 -3.17058975026429E-26 -3.67241024039331E-09
+ -2.61927064082343E-25  5.14968398928277E-26 -3.67241024039304E-09
+  7.47209049425342E-26  1.79734068645555E-25 -3.67241024039341E-09
+  2.41732224908685E-25 -1.64991836048785E-25 -3.67241024039319E-09
+ -2.57888096247611E-25 -1.38738545123030E-25 -3.67241024039307E-09
+ -1.23188518959313E-25  6.78546596234906E-26 -3.67241024039344E-09
+ -6.78546596234906E-26 -4.98812527589350E-26 -3.67241024039295E-09
+  4.98812527589350E-26 -5.87669819953445E-26 -3.67241024039360E-09
+ -9.91566603426603E-26 -3.00903103687503E-26 -3.67241024039315E-09
+  3.00903103687503E-26 -3.89760396051598E-26 -3.67241024039290E-09
+  5.51319109440861E-26  1.66001578007468E-25 -3.67241024039339E-09
+  1.27631383577518E-25 -1.93264610891906E-25 -3.67241024039355E-09
+  5.12948915010911E-26 -1.23592415742786E-25 -3.67241024039355E-09
+ -1.97909423901847E-26  1.35305422463508E-26 -3.67241024039339E-09
+  1.35305422463508E-25  4.86695624085155E-26 -3.67241024039317E-09
+ -4.86695624085155E-26  1.69232752275253E-25 -3.67241024039288E-09
+  2.72327406256777E-25  5.44250915730080E-26 -3.67241024039341E-09
+ -2.04068849849813E-25  4.24091622646816E-26 -3.67241024039332E-09
+  2.51223799320304E-25 -1.29549893299015E-25 -3.67241024039321E-09
+ -1.64184042481839E-25  1.30559635257698E-25 -3.67241024039296E-09
+  3.02417716625527E-25  2.77477090246060E-25 -3.67241024039350E-09
+  1.53783700307405E-25  3.17159949222297E-25 -3.67241024039317E-09
+  1.33790809525484E-25  8.02744857152902E-26 -3.67241024039314E-09
+ -8.03754599111585E-26 -1.71656132976092E-27 -3.67241024039273E-09
+  1.62063584368605E-25 -7.67403888599000E-27 -3.67241024039326E-09
+  1.39344390298240E-25  1.29246970711411E-26 -3.67241024039364E-09
+ -1.42979461349498E-25 -7.38121371797196E-26 -3.67241024039292E-09
+ -5.46270399647446E-26  2.00635727190291E-25 -3.67241024039364E-09
+  9.29972343946946E-26 -5.14968398928277E-27 -3.67241024039352E-09
+  1.30458661061830E-25 -7.14897306747490E-26 -3.67241024039330E-09
+ -9.82478925798457E-26 -9.89547119509237E-26 -3.67241024039371E-09
+ -2.32240650497066E-26  5.51319109440861E-26 -3.67241024039262E-09
+  6.56332273143882E-26 -1.61861635976868E-25 -3.67241024039337E-09
+ -1.31670351412250E-25 -6.36137433970224E-27 -3.67241024039123E-09
+  6.46234853557053E-27  5.65455496862421E-27 -3.67241024039619E-09
+ -5.75552916449250E-27 -1.81652578367053E-25 -3.67241024040573E-09
+ -2.90906658296542E-25  1.57317797162795E-25 -3.67241024022561E-09
+  2.33250392455749E-26 -6.80566080152271E-26 -3.67241024160476E-09
+  6.80566080152271E-26  7.63364920764269E-26 -3.67241023306524E-09
+ -7.61345436846903E-26 -1.67920087728965E-25 -3.67241028106278E-09
+  5.95747755622908E-26  6.03825691292371E-26 -3.67241002603596E-09
+ -6.03825691292371E-26 -4.22072138729450E-26 -3.67241133200713E-09
+  1.47523300163571E-25  1.38334648339557E-26 -3.67240481681615E-09
+ -1.17533963990689E-25 -1.97404552922506E-25 -3.67243668973012E-09
+ -1.53682726111537E-25  1.70444442625673E-25 -3.67228312479706E-09
+  2.25172456786286E-26 -2.43044889454973E-25 -3.67301398726464E-09
+ -2.45064373372339E-25 -2.02655211107657E-25 -3.66957063503093E-09
+ -4.16013686977353E-26  1.84782778438970E-26 -3.68565649821336E-09
+ -1.14403763918772E-25 -1.53783700307405E-25 -3.61107060340678E-09
+ -1.39647312885844E-25  1.97909423901847E-26 -3.95500079421264E-09
+  1.27126512598176E-25  1.35103474071771E-25 -2.38380011125780E-09
+  6.58351757061248E-26  9.99644539096066E-26 -9.65215721567542E-09
+  2.00736701386160E-25 -8.27988406119974E-27  2.16853787387587E-08
+ -7.99715631276853E-26 -3.78855182897822E-25 -7.20589351035437E-07
+ -2.81919954864264E-25  3.16655078242956E-24 -5.73897194747979E-06
+  7.77097411402356E-24 -1.29246970711411E-25 -1.06742920457331E-05
+  1.42171667782552E-25 -3.77918142360165E-23 -1.36752360237257E-05
+ -5.97896486510985E-23 -2.52290086828673E-23 -1.52655577613042E-05
+ -3.43279954209506E-23  8.30799527732947E-23 -1.60797779363244E-05
+  6.05392810812247E-23  8.11153988184813E-23 -1.64968021832648E-05
+  8.32350491381484E-24  6.04358835046556E-23 -1.67121660879859E-05
+  4.39956688301642E-23 -1.11410888753236E-22 -1.68239894520389E-05
+ -1.22422730657848E-22 -1.78360819581747E-23 -1.68819784812890E-05
+ -1.75000398343250E-22  1.56440533349091E-22 -1.69117743413299E-05
+  1.67297278888850E-22 -1.81979734761666E-23 -1.69268098587926E-05
+  1.82496722644512E-23  5.40510831515119E-23 -1.69341821237591E-05
+  1.95679913657076E-23 -3.01662429640432E-23 -1.69376405790826E-05
+  3.01920923581855E-23 -5.73856549958663E-24 -1.69391502063646E-05
+  2.94166105339170E-23  3.08771013029560E-23 -1.69397268550251E-05
+ -3.08771013029560E-23  3.75462449916648E-23 -1.69398893856616E-05
+  2.23985000242875E-23  1.72221588472955E-24 -1.69399044986060E-05
+ -8.37843487636719E-24
+       1      -1       6       0
+     -19  323929       1       0       0       0       0       0       0
\0\0\0\0\0\0\0\0 \0\0\0\0\0\0\0\0
\0\0\0\0\0\0\0\0
+  323923
+ SCAL    
+ REAL*8           
+       4     100       0       0
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00
+       1      -1       6       0
+     -20  322843       1       0       0       0       0       0       0
\0\0\0\0\0\0\0\0 \0\0\0\0\0\0\0\0
\0\0\0\0\0\0\0\0
+  322837
+ SCAL    
+ REAL*8           
+       4     100       0       0
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00 -1.00000000000000E+00  1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00 -1.00000000000000E+00
+  1.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+ -1.00000000000000E+00
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  40NBRE OBJETS NOMMES       0NBRE OBJETS      84
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       4       1
+  4.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+ -2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01  2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  2.50000000000000E-01 -2.50000000000000E-01
+  2.50000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+       2       1
+  2.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  5.00000000000000E-01 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  5.00000000000000E-01  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00
+ ENREGISTREMENT DE TYPE   5
+LABEL AUTOMATIQUE :   1                                                 
diff --git a/resources/portico_3subs.sauv b/resources/portico_3subs.sauv
new file mode 100644 (file)
index 0000000..d672a11
--- /dev/null
@@ -0,0 +1,182 @@
+ ENREGISTREMENT DE TYPE   4
+ NIVEAU  18 NIVEAU ERREUR   0 DIMENSION   3
+ DENSITE 0.00000E+00
+ ENREGISTREMENT DE TYPE   7
+ NOMBRE INFO CASTEM2000   8
+ IFOUR   2 NIFOUR   0 IFOMOD   2 IECHO   1 IIMPI   0 IOSPI   0 ISOTYP   1
+ NSDPGE     0
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO   1NBRE OBJETS NOMMES       6NBRE OBJETS       6
+ PBAS     POT1     POT2     POUTL    STOT     EL1     
+       4       1       2       3       5       6
+       2       0       0       2       2
+       0       0
+       1       2       2       3
+       2       0       0       2       3
+       0       0       0
+       4       5       5       6       6       7
+       2       0       0       2       1
+       0
+       3       7
+       1       0       0       1       2
+       0       0
+       1       4
+       2       0       0       2       6
+       0       0       0       0       0       0
+       1       2       2       3       4       5       5       6       6       7
+       3       7
+       1       0       0       1       7
+       0       0       0       0       0       0       0
+       1       2       4       5       6       3       7
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  32NBRE OBJETS NOMMES       4NBRE OBJETS       7
+ 0P0      0P1      1P0      1P1     
+       1       4       3       7
+       7
+       1       2       6       3       4       5       7
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  33NBRE OBJETS NOMMES       0NBRE OBJETS       1
+      96
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  5.00000000000000E-01  5.00000000000000E-01  1.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  3.33333333333333E-01
+  3.33333333333333E-01  1.00000000000000E+00  0.00000000000000E+00
+  6.66666666666667E-01  3.33333333333333E-01  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  1.00000000000000E+00
+  0.00000000000000E+00  3.00000000000000E+03 -2.00000000000000E+04
+  6.00000000000000E+03  0.00000000000000E+00  1.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  1.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  1.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00 -1.00000000000000E+03
+ -1.00000000000000E+03  1.00000000000000E+03  0.00000000000000E+00
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  39NBRE OBJETS NOMMES       1NBRE OBJETS       1
+ CHAM1D  
+       1
+       3       2       6      11
+                                                             CONTRAINTES
+      -1   27665       6       0       0       0      -1       0       5      -2
+   27882       6       0       0       0      -2       0       5      -3   27931
+       6       0       0       0      -3       0       5
+             19363             19664             19888
+                           
+   27833   27840   27847   27854   27861   27868
+ EFFX     EFFY     EFFZ     MOMX     MOMY     MOMZ    
+ REAL*8            REAL*8            REAL*8            REAL*8           
+ REAL*8            REAL*8           
+       2       2       0       0
+ -7.68749999999959E-03 -7.68749999999959E-03 -4.56249999999959E-03
+ -4.56249999999959E-03
+       2       2       0       0
+ -6.11141334691317E-07 -6.11141334691317E-07 -6.11141334690612E-07
+ -6.11141334690612E-07
+       2       2       0       0
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00
+       2       2       0       0
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00
+       2       2       0       0
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00
+       2       2       0       0
+  1.32422443924838E-04  1.32728014592183E-04  1.32728014592184E-04
+  1.33033585259529E-04
+   27826   27903   27889   27917   27910   27896
+ EFFX     EFFY     EFFZ     MOMX     MOMY     MOMZ    
+ REAL*8            REAL*8            REAL*8            REAL*8           
+ REAL*8            REAL*8           
+       2       3       0       0
+ -8.20833333333293E-03 -8.20833333333293E-03 -6.12499999999960E-03
+ -6.12499999999960E-03 -4.04166666666627E-03 -4.04166666666627E-03
+       2       3       0       0
+  6.11141334689149E-07  6.11141334689149E-07  6.11141334689393E-07
+  6.11141334689393E-07  6.11141334688905E-07  6.11141334688905E-07
+       2       3       0       0
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+       2       3       0       0
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+       2       3       0       0
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+       2       3       0       0
+ -1.32422443922787E-04 -1.32626157701017E-04 -1.32626157701017E-04
+ -1.32829871479247E-04 -1.32829871479247E-04 -1.33033585257476E-04
+   27651   27952   27938   27966   27959   27945
+ EFFX     EFFY     EFFZ     MOMX     MOMY     MOMZ    
+ REAL*8            REAL*8            REAL*8            REAL*8           
+ REAL*8            REAL*8           
+       2       1       0       0
+ -6.11141334691013E-07 -6.11141334691013E-07
+       2       1       0       0
+  0.00000000000000E+00  0.00000000000000E+00
+       2       1       0       0
+ -5.81088996365331E-15 -5.81088996365331E-15
+       2       1       0       0
+  0.00000000000000E+00  0.00000000000000E+00
+       2       1       0       0
+ -3.66966414738893E-04 -3.66966414744704E-04
+       2       1       0       0
+  0.00000000000000E+00  0.00000000000000E+00
+ ENREGISTREMENT DE TYPE   2
+ PILE NUMERO  40NBRE OBJETS NOMMES       0NBRE OBJETS       3
+       2       2
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  1.00000000000000E+00
+  5.00000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00
+       2       2
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  1.00000000000000E+00
+  5.00000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00
+       2       2
+  1.00000000000000E+00 -1.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00 -5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  1.00000000000000E+00  0.00000000000000E+00  5.00000000000000E-01
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00  1.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  0.00000000000000E+00
+ -5.00000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  0.00000000000000E+00  1.00000000000000E+00
+  5.00000000000000E-01  0.00000000000000E+00  0.00000000000000E+00
+  0.00000000000000E+00  1.00000000000000E+00
+ ENREGISTREMENT DE TYPE   5
+LABEL_AUTOMATIQUE_2                                                     
index af3ddab5f0f1e0615b9a6125367fcc2930da97cb..e783ed6d4fcc9c3d8b8e5e0967d166752ce319cb 100644 (file)
@@ -86,8 +86,6 @@ TARGET_LINK_LIBRARIES(InterpKernelTest ${InterpKernelTest_LIBS})
 ADD_EXECUTABLE(TestINTERP_KERNEL ${TestINTERP_KERNEL_SOURCES})
 TARGET_LINK_LIBRARIES(TestINTERP_KERNEL InterpKernelTest ${CPPUNIT_LIBRARIES} ${PLATFORM_LIBS})
 ADD_TEST(TestINTERP_KERNEL TestINTERP_KERNEL)
-#SALOME_GENERATE_TESTS_ENVIRONMENT(tests_env)
-SET_TESTS_PROPERTIES(TestINTERP_KERNEL PROPERTIES ENVIRONMENT "${tests_env}")
 
 INSTALL(TARGETS TestINTERP_KERNEL DESTINATION ${MEDTOOL_INSTALL_BINS})
 INSTALL(TARGETS InterpKernelTest DESTINATION ${MEDTOOL_INSTALL_LIBS})
index db32ed895be7bfef57c67df92968125c334ab8fc..dbeb6ec67bfca737ad8fd066156d7d7e48de3a6f 100644 (file)
@@ -31,7 +31,7 @@
 
 #include "VectorUtils.hxx"
 
-// levels : 
+// levels :
 // 1 - titles and volume results
 // 2 - symmetry / diagonal results and intersection matrix output
 // 3 - empty
@@ -79,7 +79,7 @@ void Interpolation3DTest::getVolumes(ParaMEDMEM::MEDCouplingUMesh& mesh, double
 
 double Interpolation3DTest::sumVolume(const IntersectionMatrix& m) const
 {
-  
+
   std::vector<double> volumes;
   for(IntersectionMatrix::const_iterator iter = m.begin() ; iter != m.end() ; ++iter)
     {
@@ -89,7 +89,7 @@ double Interpolation3DTest::sumVolume(const IntersectionMatrix& m) const
           //    vol += std::abs(iter2->second);
         }
     }
-  
+
   // sum in ascending order to avoid rounding errors
 
   sort(volumes.begin(), volumes.end());
@@ -163,7 +163,7 @@ bool Interpolation3DTest::areCompatitable(const IntersectionMatrix& m1, const In
     }
   return compatitable;
 }
-      
+
 bool Interpolation3DTest::testSymmetric(const IntersectionMatrix& m1, const IntersectionMatrix& m2) const
 {
 
@@ -242,9 +242,9 @@ void Interpolation3DTest::dumpIntersectionMatrix(const IntersectionMatrix& m) co
     {
       for(std::map<int, double>::const_iterator iter2 = iter->begin() ; iter2 != iter->end() ; ++iter2)
         {
-    
+
           std::cout << "V(" << i << ", " << iter2->first << ") = " << iter2->second << std::endl;
-    
+
         }
       ++i;
     }
@@ -259,18 +259,25 @@ void Interpolation3DTest::setUp()
 void Interpolation3DTest::tearDown()
 {
   delete interpolator;
-} 
+}
 
 void Interpolation3DTest::calcIntersectionMatrix(const char* mesh1path, const char* mesh1, const char* mesh2path, const char* mesh2, IntersectionMatrix& m) const
 {
-  const string dataBaseDir = getenv("MED_ROOT_DIR");
-  const string dataDir = dataBaseDir + "/share/salome/resources/med/";
+  string dataDir = "";
+  if ( getenv("MEDTOOL_ROOT_DIR") ) {
+    dataDir = getenv("MEDTOOL_ROOT_DIR");
+    dataDir += "/share/resources/med/";
+  }
+  else {
+    dataDir = get_current_dir_name();
+    dataDir += "/../../resources/";
+  }
 
   LOG(1, std::endl << "=== -> intersecting src = " << mesh1 << ", target = " << mesh2 );
 
   LOG(5, "Loading " << mesh1 << " from " << mesh1path);
   MESH sMesh(MED_DRIVER, dataDir+mesh1path, mesh1);
-  
+
   LOG(5, "Loading " << mesh2 << " from " << mesh2path);
   MESH tMesh(MED_DRIVER, dataDir+mesh2path, mesh2);
 
@@ -284,7 +291,7 @@ void Interpolation3DTest::calcIntersectionMatrix(const char* mesh1path, const ch
     }
 
   LOG(1, "Intersection calculation done. " << std::endl );
-  
+
 }
 
 void Interpolation3DTest::intersectMeshes(const char* mesh1path, const char* mesh1, const char* mesh2path, const char* mesh2, const double correctVol, const double prec, bool doubleTest) const
@@ -300,7 +307,7 @@ void Interpolation3DTest::intersectMeshes(const char* mesh1path, const char* mes
   IntersectionMatrix matrix1;
   calcIntersectionMatrix(mesh1path, mesh1, mesh2path, mesh2, matrix1);
 
-#if LOG_LEVEL >= 2 
+#if LOG_LEVEL >= 2
   dumpIntersectionMatrix(matrix1);
 #endif
 
@@ -320,14 +327,14 @@ void Interpolation3DTest::intersectMeshes(const char* mesh1path, const char* mes
     }
   else
     {
-      
+
       IntersectionMatrix matrix2;
-      calcIntersectionMatrix(mesh2path, mesh2, mesh1path, mesh1, matrix2);    
+      calcIntersectionMatrix(mesh2path, mesh2, mesh1path, mesh1, matrix2);
 
 #if LOG_LEVEL >= 2
       dumpIntersectionMatrix(matrix2);
 #endif
-      
+
       const double vol2 = sumVolume(matrix2);
 
       LOG(1, "vol1 =  " << vol1 << ", vol2 = " << vol2 << ", correctVol = " << correctVol );
index 9096963ce1a9f6377d988c1d8d86c2f70b255fef..de2934321592e68fb9d10041e01d8638e27a482b 100644 (file)
@@ -38,7 +38,7 @@
 #include <algorithm>
 
 
-// levels : 
+// levels :
 // 1 - titles and volume results
 // 2 - symmetry / diagonal results and intersection matrix output
 // 3 - empty
@@ -138,7 +138,7 @@ namespace INTERP_TEST
   }
 
   /**
-   * Verifies if for a given intersection matrix the sum of each row is equal to the volumes 
+   * Verifies if for a given intersection matrix the sum of each row is equal to the volumes
    * of the corresponding source elements and the sum of each column is equal to the volumes
    * of the corresponding target elements. This will be true as long as the meshes correspond
    * to the same geometry. The equalities are in the "epsilon-sense", making sure the relative
@@ -252,7 +252,7 @@ namespace INTERP_TEST
             //if(m2[j - 1].count(i+1) > 0)
             //  {
             std::map<int, double> theMap =  m2.at(j);
-            const double v2 = fabs(theMap[i]); 
+            const double v2 = fabs(theMap[i]);
             if(v1 != v2)
               {
                 LOG(2, "V1( " << i << ", " << j << ") = " << v1 << " which is different from V2( " << j << ", " << i << ") = " << v2 << " | diff = " << v1 - v2 );
@@ -267,7 +267,7 @@ namespace INTERP_TEST
       }
     if(!isSymmetric)
       {
-        LOG(1, "*** matrices are not symmetric"); 
+        LOG(1, "*** matrices are not symmetric");
       }
     return isSymmetric;
   }
@@ -335,9 +335,9 @@ namespace INTERP_TEST
    * Calculates the intersection matrix for two meshes.
    * If the source and target meshes are the same, a CppUnit assertion raised if testVolumes() returns false.
    *
-   * @param  mesh1path   the path to the file containing the source mesh, relative to {$MED_ROOT_DIR}/share/salome/resources/med/
+   * @param  mesh1path   the path to the file containing the source mesh, relative to {$MEDTOOL_ROOT_DIR}/share/resources/med/
    * @param  mesh1       the name of the source mesh
-   * @param  mesh2path   the path to the file containing the target mesh, relative to {$MED_ROOT_DIR}/share/salome/resources/med/
+   * @param  mesh2path   the path to the file containing the target mesh, relative to {$MEDTOOL_ROOT_DIR}/share/resources/med/
    * @param  mesh2       the name of the target mesh
    * @param  m           intersection matrix in which to store the result of the intersection
    */
@@ -397,9 +397,9 @@ namespace INTERP_TEST
    * it will be confirmed that the intersection matrix is diagonal, otherwise the intersection matrices will be
    * calculated once which each mesh as source mesh, and it will be verified that the they are each others' transpose.
    *
-   * @param  mesh1path   the path to the file containing the source mesh, relative to {$MED_ROOT_DIR}/share/salome/resources/med/
+   * @param  mesh1path   the path to the file containing the source mesh, relative to {$MEDTOOL_ROOT_DIR}/share/resources/med/
    * @param  mesh1       the name of the source mesh
-   * @param  mesh2path   the path to the file containing the target mesh, relative to {$MED_ROOT_DIR}/share/salome/resources/med/
+   * @param  mesh2path   the path to the file containing the target mesh, relative to {$MEDTOOL_ROOT_DIR}/share/resources/med/
    * @param  mesh2       the name of the target mesh
    * @param  correctVol  the total volume of the intersection of the two meshes
    * @param  prec        maximum relative error to be tolerated in volume comparisions
@@ -420,7 +420,7 @@ namespace INTERP_TEST
     IntersectionMatrix matrix1;
     calcIntersectionMatrix(mesh1path, mesh1, mesh2path, mesh2, matrix1);
 
-#if LOG_LEVEL >= 2 
+#if LOG_LEVEL >= 2
     dumpIntersectionMatrix(matrix1);
 #endif
 
@@ -441,7 +441,7 @@ namespace INTERP_TEST
     else
       {
         IntersectionMatrix matrix2;
-        calcIntersectionMatrix(mesh2path, mesh2, mesh1path, mesh1, matrix2);    
+        calcIntersectionMatrix(mesh2path, mesh2, mesh1path, mesh1, matrix2);
 
 #if LOG_LEVEL >= 2
         dumpIntersectionMatrix(matrix2);
@@ -460,7 +460,7 @@ namespace INTERP_TEST
 
   /**
    * Utility method used to facilitate the call to intersect meshes.
-   * It calls intersectMeshes, using "mesh1.med" as file name for the mesh with name "mesh1" and 
+   * It calls intersectMeshes, using "mesh1.med" as file name for the mesh with name "mesh1" and
    * "mesh2.med" as file name for the mesh with name "mesh2". The rest of the arguments are passed
    * along as they are.
    *
index 85d67be1ac61bdf43d7ba752128f8967a11240dc..8c5c8d6412cc3c08c55340516cc20a463762440e 100644 (file)
 
 /**
  * \file PerfTest.cxx
- * Test program which takes two meshes and calculates their intersection matrix. 
- * 
- * USAGE : PerfTest mesh1 mesh2 
+ * Test program which takes two meshes and calculates their intersection matrix.
+ *
+ * USAGE : PerfTest mesh1 mesh2
  *         where mesh1 and mesh2 are the names of two meshes located in
- *         the files mesh1.med, mesh2.med in {$MED_ROOT_DIR}/share/salome/resources/med/
+ *         the files mesh1.med, mesh2.med in {$MEDTOOL_ROOT_DIR}/share/resources/med/
  *
  */
 
@@ -47,48 +47,48 @@ namespace INTERP_TEST
    */
   class PerfTestToolkit : public MeshTestToolkit<3,3>
   {
-    
+
   public:
 
     /**
      * Calculates the intersection matrix for two meshes.
-     * Outputs the names of the meshes intersected, the number of elements in each mesh, 
+     * Outputs the names of the meshes intersected, the number of elements in each mesh,
      * the number of matrix elements and the number of non-zero matrix elements, etc.
      * These values help to determine how well the filtering algorithm is working.
      *
-     * @param  mesh1path   the path to the file containing the source mesh, relative to {$MED_ROOT_DIR}/share/salome/resources/med/
+     * @param  mesh1path   the path to the file containing the source mesh, relative to {$MEDTOOL_ROOT_DIR}/share/resources/med/
      * @param  mesh1       the name of the source mesh
-     * @param  mesh2path   the path to the file containing the target mesh, relative to {$MED_ROOT_DIR}/share/salome/resources/med/
+     * @param  mesh2path   the path to the file containing the target mesh, relative to {$MEDTOOL_ROOT_DIR}/share/resources/med/
      * @param  mesh2       the name of the target mesh
      * @param  m           intersection matrix in which to store the result of the intersection
      */
-    void calcIntersectionMatrix(const char* mesh1path, const char* mesh1, const char* mesh2path, const char* mesh2, IntersectionMatrix& m) 
+    void calcIntersectionMatrix(const char* mesh1path, const char* mesh1, const char* mesh2path, const char* mesh2, IntersectionMatrix& m)
     {
       LOG(1, std::endl << "=== -> intersecting src = " << mesh1 << ", target = " << mesh2 );
-      
+
       LOG(5, "Loading " << mesh1 << " from " << mesh1path);
       MEDCouplingAutoRefCountObjectPtr<MEDFileUMesh> sMeshML=MEDFileUMesh::New(INTERP_TEST::getResourceFile(mesh1path).c_str(),mesh1);
       MEDCouplingAutoRefCountObjectPtr<MEDCouplingUMesh> sMesh=sMeshML->getMeshAtLevel(0);
-    
-    
+
+
       LOG(5, "Loading " << mesh2 << " from " << mesh2path);
       MEDCouplingAutoRefCountObjectPtr<MEDFileUMesh> tMeshML=MEDFileUMesh::New(INTERP_TEST::getResourceFile(mesh2path).c_str(),mesh2);
     MEDCouplingAutoRefCountObjectPtr<MEDCouplingUMesh> tMesh=tMeshML->getMeshAtLevel(0);
-      
+
       MEDCouplingNormalizedUnstructuredMesh<3,3> sMesh_wrapper(sMesh);
       MEDCouplingNormalizedUnstructuredMesh<3,3> tMesh_wrapper(tMesh);
-      
+
       Interpolation3D interpolator;
       interpolator.interpolateMeshes(sMesh_wrapper, tMesh_wrapper,m,"P0P0");
-    
+
       std::pair<int, int> eff = countNumberOfMatrixEntries(m);
-      LOG(1, eff.first << " of " << numTargetElems * numSrcElems << " intersections calculated : ratio = " 
+      LOG(1, eff.first << " of " << numTargetElems * numSrcElems << " intersections calculated : ratio = "
           << double(eff.first) / double(numTargetElems * numSrcElems));
-      LOG(1, eff.second << " non-zero elements of " << eff.first << " total : filter efficiency = " 
+      LOG(1, eff.second << " non-zero elements of " << eff.first << " total : filter efficiency = "
           << double(eff.second) / double(eff.first));
-    
+
       LOG(1, "Intersection calculation done. " << std::endl );
-    
+
     }
 
     /**
@@ -100,7 +100,7 @@ namespace INTERP_TEST
      */
     std::pair<int,int> countNumberOfMatrixEntries(const IntersectionMatrix& m)
     {
-      
+
       int numElems = 0;
       int numNonZero = 0;
       for(IntersectionMatrix::const_iterator iter = m.begin() ; iter != m.end() ; ++iter)
@@ -116,12 +116,12 @@ namespace INTERP_TEST
         }
       return std::make_pair(numElems, numNonZero);
     }
-    
+
   };
 }
 
 /**
- * Main method of the program. 
+ * Main method of the program.
  * Intersects the meshes and outputs some information about the calculation as well as the
  * intersection matrix on std::cout.
  *
@@ -133,7 +133,7 @@ int main(int argc, char** argv)
   using INTERP_TEST::PerfTestToolkit;
 
   assert(argc == 3);
-  
+
   // load meshes
   const std::string mesh1 = argv[1];
   const std::string mesh2 = argv[2];
@@ -148,7 +148,7 @@ int main(int argc, char** argv)
   testTools.calcIntersectionMatrix(mesh1path.c_str(), mesh1.c_str(), mesh2path.c_str(), mesh2.c_str(), m);
 
   testTools.dumpIntersectionMatrix(m);
-    
+
   return 0;
 
 }
index d82a4c273dd761f36cc7789c72df68519562e9ab..6aa73854a29f61700afca6dedac3f2d4e706f8dd 100644 (file)
 #include "TestInterpKernelUtils.hxx"
 
 #include <cstdlib>
+#include <unistd.h>
 
 namespace INTERP_TEST
 {
   std::string getResourceFile( const std::string& filename )
   {
     std::string resourceFile = "";
-    
-    if ( getenv("top_srcdir") ) {
-      // we are in 'make test' step
-      resourceFile = getenv("top_srcdir");
-      resourceFile += "/resources/";
+
+    if ( getenv("MEDTOOL_ROOT_DIR") ) {
+      // use MEDTOOL_ROOT_DIR env.var
+      resourceFile = getenv("MEDTOOL_ROOT_DIR");
+      resourceFile += "/share/resources/med/";
     }
-    else if ( getenv("MED_ROOT_DIR") ) {
-      // use MED_ROOT_DIR env.var
-      resourceFile = getenv("MED_ROOT_DIR");
-      resourceFile += "/share/salome/resources/med/";
+    else {
+      resourceFile = get_current_dir_name();
+      resourceFile += "/../../resources/";
     }
+
     resourceFile += filename;
     return resourceFile;
   }
index 2b8adefc7f76d01279440e98cc56c051004c4986..5deb19f7fac012ace79a59b677e36c1ab4b10fa2 100644 (file)
@@ -54,22 +54,17 @@ SET(TestMEDCouplingExamples_SOURCES
   MEDCouplingBasicsTest0.cxx
   )
 
-#SALOME_GENERATE_TESTS_ENVIRONMENT(tests_env)
-
 ADD_EXECUTABLE(TestMEDCoupling ${TestMEDCoupling_SOURCES})
 TARGET_LINK_LIBRARIES(TestMEDCoupling medcoupling ${CPPUNIT_LIBRARIES} ${PLATFORM_LIBS})
 ADD_TEST(TestMEDCoupling TestMEDCoupling)
-SET_TESTS_PROPERTIES(TestMEDCoupling PROPERTIES ENVIRONMENT "${tests_env}")
 
 ADD_EXECUTABLE(TestMEDCouplingRemapper ${TestMEDCouplingRemapper_SOURCES})
 TARGET_LINK_LIBRARIES(TestMEDCouplingRemapper medcouplingremapper ${CPPUNIT_LIBRARIES} ${PLATFORM_LIBS})
 ADD_TEST(TestMEDCouplingRemapper TestMEDCouplingRemapper)
-SET_TESTS_PROPERTIES(TestMEDCouplingRemapper PROPERTIES ENVIRONMENT "${tests_env}")
 
 ADD_EXECUTABLE(TestMEDCouplingExamples ${TestMEDCouplingExamples_SOURCES})
 TARGET_LINK_LIBRARIES(TestMEDCouplingExamples medcoupling ${CPPUNIT_LIBRARIES} ${PLATFORM_LIBS})
 ADD_TEST(TestMEDCouplingExamples TestMEDCouplingExamples)
-SET_TESTS_PROPERTIES(TestMEDCouplingExamples PROPERTIES ENVIRONMENT "${tests_env}")
 
 INSTALL(TARGETS TestMEDCoupling TestMEDCouplingRemapper TestMEDCouplingExamples DESTINATION ${MEDTOOL_INSTALL_BINS})
 
index e0186342ac849dfe979cda8e2dd38987c2491125..59cef51d21e19984ce37120a3ffdd6b4cabb4423 100644 (file)
@@ -89,20 +89,13 @@ INSTALL(FILES MEDCoupling.i MEDCouplingCommon.i MEDCouplingRefCountObject.i MEDC
 INSTALL(FILES MEDCouplingBasicsTest.py MEDCouplingRemapperTest.py MEDCouplingDataForTest.py MEDCouplingNumPyTest.py MEDCouplingPickleTest.py DESTINATION ${MEDTOOL_INSTALL_SCRIPT_PYTHON})
 INSTALL(FILES MEDCouplingExamplesTest.py DESTINATION ${MEDTOOL_INSTALL_SCRIPT_PYTHON})
 
-#SALOME_GENERATE_TESTS_ENVIRONMENT(tests_env)
-
 ADD_TEST(MEDCouplingBasicsTest ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDCouplingBasicsTest.py)
-SET_TESTS_PROPERTIES(MEDCouplingBasicsTest PROPERTIES ENVIRONMENT "${tests_env}")
 ADD_TEST(MEDCouplingExamplesTest ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDCouplingExamplesTest.py)
-SET_TESTS_PROPERTIES(MEDCouplingExamplesTest PROPERTIES ENVIRONMENT "${tests_env}")
 ADD_TEST(MEDCouplingRemapperTest ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDCouplingRemapperTest.py)
-SET_TESTS_PROPERTIES(MEDCouplingRemapperTest PROPERTIES ENVIRONMENT "${tests_env}")
 
 IF(NUMPY_FOUND)
   ADD_TEST(MEDCouplingNumPyTest ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDCouplingNumPyTest.py)
-  SET_TESTS_PROPERTIES(MEDCouplingNumPyTest PROPERTIES ENVIRONMENT "${tests_env}")
   ADD_TEST(MEDCouplingPickleTest ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDCouplingPickleTest.py)
-  SET_TESTS_PROPERTIES(MEDCouplingPickleTest PROPERTIES ENVIRONMENT "${tests_env}")
 ENDIF(NUMPY_FOUND)
 
 # Application tests
index 54a9a602cfec85148e204d15063d1d630d904924..99a2821683d5dc82d76fd0e8fb7f68015a5dd619 100644 (file)
@@ -81,31 +81,27 @@ INSTALL(FILES sauv2med PERMISSIONS OWNER_EXECUTE OWNER_WRITE OWNER_READ GROUP_EX
 INSTALL(FILES case2med PERMISSIONS OWNER_EXECUTE OWNER_WRITE OWNER_READ GROUP_EXECUTE GROUP_READ WORLD_EXECUTE WORLD_READ DESTINATION ${MEDTOOL_INSTALL_BINS} )
 INSTALL(FILES med2case PERMISSIONS OWNER_EXECUTE OWNER_WRITE OWNER_READ GROUP_EXECUTE GROUP_READ WORLD_EXECUTE WORLD_READ DESTINATION ${MEDTOOL_INSTALL_BINS} )
 
-#SALOME_GENERATE_TESTS_ENVIRONMENT(tests_env)
-
 ADD_TEST(MEDLoaderTest ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDLoaderTest.py)
-SET_TESTS_PROPERTIES(MEDLoaderTest PROPERTIES ENVIRONMENT "${tests_env}")
 ADD_TEST(MEDLoaderTest2 ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDLoaderTest2.py)
-SET_TESTS_PROPERTIES(MEDLoaderTest2 PROPERTIES ENVIRONMENT "${tests_env}")
 ADD_TEST(MEDLoaderTest3 ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDLoaderTest3.py)
-SET_TESTS_PROPERTIES(MEDLoaderTest3 PROPERTIES ENVIRONMENT "${tests_env}")
 ADD_TEST(MEDLoaderTest4 ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDLoaderTest4.py)
-SET_TESTS_PROPERTIES(MEDLoaderTest4 PROPERTIES ENVIRONMENT "${tests_env}")
 ADD_TEST(MEDLoaderExamplesTest ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDLoaderExamplesTest.py)
-SET_TESTS_PROPERTIES(MEDLoaderExamplesTest PROPERTIES ENVIRONMENT "${tests_env}")
 ADD_TEST(SauvLoaderTest ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/SauvLoaderTest.py)
-SET_TESTS_PROPERTIES(SauvLoaderTest PROPERTIES ENVIRONMENT "${tests_env}")
 
 IF(NUMPY_FOUND)
   ADD_TEST(MEDLoaderCouplingTrainingSession ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDLoaderCouplingTrainingSession.py)
-  SET_TESTS_PROPERTIES(MEDLoaderCouplingTrainingSession PROPERTIES ENVIRONMENT "${tests_env}")
 ENDIF(NUMPY_FOUND)
 
 # Application tests
 
 SET(TEST_INSTALL_DIRECTORY ${MEDTOOL_INSTALL_SCRIPT_SCRIPTS}/test/MEDCoupling/MEDLoader/Swig)
 
-INSTALL(FILES MEDLoaderDataForTest.py MEDLoaderTest.py MEDLoaderTest2.py MEDLoaderTest3.py MEDLoaderTest4.py SauvLoaderTest.py MEDLoaderExamplesTest.py MEDLoaderCouplingTrainingSession.py CaseIO.py CaseReader.py CaseWriter.py VTKReader.py MEDLoaderSplitter.py medutilities.py DESTINATION ${TEST_INSTALL_DIRECTORY})
+SET(MEDLOADER_TEST_FILES MEDLoaderDataForTest.py MEDLoaderTest.py MEDLoaderTest2.py MEDLoaderTest3.py MEDLoaderTest4.py SauvLoaderTest.py MEDLoaderExamplesTest.py MEDLoaderCouplingTrainingSession.py CaseIO.py CaseReader.py CaseWriter.py VTKReader.py MEDLoaderSplitter.py medutilities.py)
+
+FOREACH(testfile ${MEDLOADER_TEST_FILES})
+  CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/${testfile}" "${CMAKE_CURRENT_BINARY_DIR}/${testfile}" COPYONLY)
+ENDFOREACH(testfile)
+INSTALL(FILES ${MEDLOADER_TEST_FILES} DESTINATION ${TEST_INSTALL_DIRECTORY})
 
 INSTALL(FILES CTestTestfileInstall.cmake
         DESTINATION ${TEST_INSTALL_DIRECTORY}
index 8001e2798476be6b5de89d7291d4dd778fe228b9..668b56260484b614556016d1399b4938da34c181 100644 (file)
@@ -480,8 +480,13 @@ print "IntegralGlobConstraint %lf == %lf"%(srcField.getArray().accumulate()[0],t
 from numpy import *
 from math import acos
 
-med_root_dir=os.getenv("MED_ROOT_DIR")
-agitateur_file=os.path.join(os.getenv("MED_ROOT_DIR"),"share","salome","resources","med","agitateur.med")
+med_root_dir=os.getenv("MEDTOOL_ROOT_DIR")
+if med_root_dir:
+  agitateur_file=os.path.join(os.getenv("MEDTOOL_ROOT_DIR"),"share","resources","med","agitateur.med")
+else:
+  current_dir = os.path.dirname(os.path.realpath(__file__))
+  agitateur_file=os.path.join(current_dir, "..", "..", "..", "resources","agitateur.med")
+  pass
 data=MEDFileData(agitateur_file)
 ts=data.getFields()[0].getTimeSteps()
 print ts
index 502e4e4681f40743965cd47646bc828323f75bd3..b60a4fd3d63959d9b065fdb21b6f857eb0535895 100644 (file)
@@ -25,11 +25,18 @@ from MEDLoaderDataForTest import MEDLoaderDataForTest
 
 class SauvLoaderTest(unittest.TestCase):
 
+    def __getResourcesDirectory(self):
+        med_root_dir=os.getenv("MEDTOOL_ROOT_DIR")
+        if med_root_dir:
+            return os.path.join( os.getenv("MEDTOOL_ROOT_DIR"), "share","resources","med")
+        else:
+            current_dir = os.path.dirname(os.path.realpath(__file__))
+            return os.path.join(current_dir, "..", "..", "..", "resources")
+        pass
+
     def testSauv2Med(self):
         # get a file containing all types of readable piles
-        self.assertTrue( os.getenv("MED_ROOT_DIR") )
-        sauvFile = os.path.join( os.getenv("MED_ROOT_DIR"), "share","salome",
-                                 "resources","med","allPillesTest.sauv")
+        sauvFile = os.path.join( self.__getResourcesDirectory(),"allPillesTest.sauv")
         self.assertTrue( os.access( sauvFile, os.F_OK))
 
         # read SAUV and write MED
@@ -38,7 +45,7 @@ class SauvLoaderTest(unittest.TestCase):
         d2=sr.loadInMEDFileDS();
         d2.write(medFile,0);
 
-        # check 
+        # check
         self.assertEqual(1,d2.getNumberOfMeshes())
         self.assertEqual(8+97,d2.getNumberOfFields())
         mm = d2.getMeshes()
@@ -50,9 +57,7 @@ class SauvLoaderTest(unittest.TestCase):
 
     def testMed2Sauv(self):
         # read pointe.med
-        self.assertTrue( os.getenv("MED_ROOT_DIR") )
-        medFile = os.path.join( os.getenv("MED_ROOT_DIR"), "share","salome",
-                                "resources","med","pointe.med")
+        medFile = os.path.join(self.__getResourcesDirectory(),"pointe.med")
         self.assertTrue( os.access( medFile, os.F_OK))
         pointeMed = MEDFileData.New( medFile )
 
@@ -241,9 +246,7 @@ class SauvLoaderTest(unittest.TestCase):
     @unittest.skipUnless(MEDLoader.HasXDR(),"requires XDR")
     def testMissingGroups(self):
         """test for issue 0021749: [CEA 601] Some missing groups in mesh after reading a SAUV file with SauvReader."""
-        self.assertTrue( os.getenv("MED_ROOT_DIR") )
-        sauvFile = os.path.join( os.getenv("MED_ROOT_DIR"), "share","salome",
-                                 "resources","med","BDC-714.sauv")
+        sauvFile = os.path.join(self.__getResourcesDirectory(),"BDC-714.sauv")
         self.assertTrue( os.access( sauvFile, os.F_OK))
         name_of_group_on_cells='Slice10:ABSORBER'
         name_of_group_on_cells2='Slice10:00LR'
@@ -336,13 +339,13 @@ class SauvLoaderTest(unittest.TestCase):
         sw.setCpyGrpIfOnASingleFamilyStatus(True)
         self.assertTrue(sw.getCpyGrpIfOnASingleFamilyStatus())
         sw.write(sauvFile)
-        
+
         f = open(sauvFile)
         # String pattern for the header of the sub meshes record ("PILE" number, number of named objects, number of objects)
         pattern_pile= re.compile(r'\sPILE\sNUMERO\s+(?P<number>[0-9]+)NBRE\sOBJETS\sNOMMES\s+(?P<nbnamed>[0-9]+)NBRE\sOBJETS\s+(?P<nbobjects>[0-9]+)')
         # String pattern for a sub mesh header (cell type, number of components and three numbers)
         pattern_header=re.compile(r'\s+(?P<type>[0-9]+)\s+(?P<nbsubs>[0-9]+)\s+[0-9]+\s+[0-9]+\s+[0-9]+')
-        
+
         nbobjects=0
         line = f.readline()
         while(line):
@@ -356,13 +359,13 @@ class SauvLoaderTest(unittest.TestCase):
                 pass
             line=f.readline()
             pass
-        
+
         # Skipping the objects names
         f.readline()
         # Skipping the objects ids
         f.readline()
 
-        # Looking for each sub-mesh header 
+        # Looking for each sub-mesh header
         line = f.readline()
         cur_object=0
         while(line and cur_object < nbobjects):
index 96d121401a33b6b68faebc17995a0310135947ea..e61fc9378a3c0bbc7c1e6bad65f6f39ed0bc2526 100644 (file)
@@ -39,18 +39,14 @@ SET(TestSauvLoader_SOURCES
   SauvLoaderTest.cxx
   )
 
-#SALOME_GENERATE_TESTS_ENVIRONMENT(tests_env)
-
 ADD_EXECUTABLE(TestMEDLoader ${TestMEDLoader_SOURCES})
 TARGET_LINK_LIBRARIES(TestMEDLoader medloader ${CPPUNIT_LIBRARIES} ${PLATFORM_LIBS})
 ADD_TEST(TestMEDLoader TestMEDLoader)
-SET_TESTS_PROPERTIES(TestMEDLoader PROPERTIES ENVIRONMENT "${tests_env}")
 
 ADD_EXECUTABLE(TestSauvLoader ${TestSauvLoader_SOURCES})
 
 TARGET_LINK_LIBRARIES(TestSauvLoader medloader ${CPPUNIT_LIBRARIES} ${PLATFORM_LIBS})
 ADD_TEST(TestSauvLoader TestSauvLoader)
-SET_TESTS_PROPERTIES(TestSauvLoader PROPERTIES ENVIRONMENT "${tests_env}")
 
 INSTALL(TARGETS TestMEDLoader TestSauvLoader DESTINATION ${MEDTOOL_INSTALL_BINS})
 
index b6195f6df1abfefeea187b429a92e2c50e308779..df15700080148f7af9dc20d63b80682902622136 100644 (file)
@@ -44,7 +44,7 @@ void SauvLoaderTest::testSauv2Med()
   MEDCouplingAutoRefCountObjectPtr<MEDFileData> d2=sr->loadInMEDFileDS();
   // write MED
   d2->write("allPillesTest.med",0);
-  // check 
+  // check
   CPPUNIT_ASSERT_EQUAL(1,d2->getNumberOfMeshes());
   CPPUNIT_ASSERT_EQUAL(8+97,d2->getNumberOfFields());
   MEDFileMesh * m = d2->getMeshes()->getMeshAtPos(0);
@@ -332,16 +332,16 @@ std::string SauvLoaderTest::getResourceFile( const std::string& filename )
 {
   std::string resourceFile = "";
 
-  if ( getenv("top_srcdir") ) {
-    // we are in 'make test' step
-    resourceFile = getenv("top_srcdir");
-    resourceFile += "/resources/";
+  if ( getenv("MEDTOOL_ROOT_DIR") ) {
+    // use MEDTOOL_ROOT_DIR env.var
+    resourceFile = getenv("MEDTOOL_ROOT_DIR");
+    resourceFile += "/share/resources/med/";
   }
-  else if ( getenv("MED_ROOT_DIR") ) {
-    // use MED_ROOT_DIR env.var
-    resourceFile = getenv("MED_ROOT_DIR");
-    resourceFile += "/share/salome/resources/med/";
+  else {
+    resourceFile = get_current_dir_name();
+    resourceFile += "/../../../resources/";
   }
+
   resourceFile += filename;
 #ifdef WIN32
   std::string fixedpath = resourceFile;
index d1642c5cf12daf84adb5e2429fb250d5802f6410..a57506609cb6d568c42139a6e91554a9f4b2273a 100644 (file)
@@ -131,7 +131,7 @@ IF(${SALOME_USE_MPI})
   SET(medpartitionercpp_LDFLAGS ${medpartitionercpp_LDFLAGS} ${MPI_LIBRARIES})
   SET_TARGET_PROPERTIES(medpartitioner_para PROPERTIES COMPILE_FLAGS "${medpartitionercpp_DEFINITIONS}")
   TARGET_LINK_LIBRARIES(medpartitioner_para medpartitionercpp ${medpartitionercpp_LDFLAGS})
-  INSTALL(TARGETS medpartitioner_para DESTINATION ${SALOME_INSTALL_BINS})
+  INSTALL(TARGETS medpartitioner_para DESTINATION ${MEDTOOL_INSTALL_BINS})
 ENDIF(${SALOME_USE_MPI})
 
 ADD_DEFINITIONS(${medpartitionercpp_DEFINITIONS})
index f67c84932e761e6bec16a099a198b49746ea8163..7967cab1c64b01b086894b6c6de5fed0a56e5c6b 100644 (file)
   typedef int idxtype;
 #endif // defined(MED_ENABLE_METIS) & !defined(MED_ENABLE_PARMETIS)
 
-void MEDPARTITIONER_METIS_PartGraphRecursive(int *nvtxs, idxtype *xadj, idxtype *adjncy, idxtype *vwgt, 
-                                             idxtype *adjwgt, int *wgtflag, int *numflag, int *nparts, 
+void MEDPARTITIONER_METIS_PartGraphRecursive(int *nvtxs, idxtype *xadj, idxtype *adjncy, idxtype *vwgt,
+                                             idxtype *adjwgt, int *wgtflag, int *numflag, int *nparts,
                                              int *options, int *edgecut, idxtype *part)
 {
 #if defined(MED_ENABLE_METIS)
   #ifndef MED_ENABLE_METIS_V5
-  METIS_PartGraphRecursive(nvtxs, xadj, adjncy, vwgt, 
-    adjwgt, wgtflag, numflag, nparts, 
+  METIS_PartGraphRecursive(nvtxs, xadj, adjncy, vwgt,
+    adjwgt, wgtflag, numflag, nparts,
                            options, edgecut, part);
   #else
   int ncon=1;
   options[METIS_OPTION_NCUTS]=1;
   options[METIS_OPTION_NITER]=1;
   options[METIS_OPTION_UFACTOR]=1;
-  METIS_PartGraphRecursive(nvtxs, &ncon, xadj, adjncy, vwgt, 0 /* vsize*/, 
+  METIS_PartGraphRecursive(nvtxs, &ncon, xadj, adjncy, vwgt, 0 /* vsize*/,
                            adjwgt, nparts,/* tpwgts*/ 0,/* ubvec */ 0,
                            options, edgecut, part);
   #endif
 #endif
 }
 
-void MEDPARTITIONER_METIS_PartGraphKway(int *nvtxs, idxtype *xadj, idxtype *adjncy, idxtype *vwgt, 
-                                        idxtype *adjwgt, int *wgtflag, int *numflag, int *nparts, 
+void MEDPARTITIONER_METIS_PartGraphKway(int *nvtxs, idxtype *xadj, idxtype *adjncy, idxtype *vwgt,
+                                        idxtype *adjwgt, int *wgtflag, int *numflag, int *nparts,
                                         int *options, int *edgecut, idxtype *part)
 {
 #if defined(MED_ENABLE_METIS)
   #ifndef MED_ENABLE_METIS_V5
-  METIS_PartGraphKway(nvtxs, xadj, adjncy, vwgt, 
-    adjwgt, wgtflag, numflag, nparts, 
+  METIS_PartGraphKway(nvtxs, xadj, adjncy, vwgt,
+    adjwgt, wgtflag, numflag, nparts,
     options, edgecut, part);
   #else
   int ncon=1;
   options[METIS_OPTION_NCUTS]=1;
   options[METIS_OPTION_NITER]=1;
   options[METIS_OPTION_UFACTOR]=1;
-  METIS_PartGraphKway(nvtxs, &ncon, xadj, adjncy, vwgt, 0 /* vsize*/, 
+  METIS_PartGraphKway(nvtxs, &ncon, xadj, adjncy, vwgt, 0 /* vsize*/,
                       adjwgt, nparts, 0 , 0 /* ubvec */,
                       options, edgecut, part);
   #endif
index 25e10b994f47bf10f66d3b841faade900517389b..c1dd6f3bfa87701a0d90965f2b77b83d50ce43f6 100644 (file)
@@ -51,9 +51,7 @@ INSTALL(TARGETS TestMEDPARTITIONER DESTINATION ${MEDTOOL_INSTALL_BINS})
 
 INSTALL(FILES ${MEDPARTITIONERTest_HEADERS_HXX} DESTINATION ${MEDTOOL_INSTALL_HEADERS})
 
-#SALOME_GENERATE_TESTS_ENVIRONMENT(tests_env)
 ADD_TEST(TestMEDPARTITIONER TestMEDPARTITIONER)
-SET_TESTS_PROPERTIES(TestMEDPARTITIONER PROPERTIES ENVIRONMENT "${tests_env}")
 
 # Application tests
 
index 5144d40d31709d64715ebd10a6947815027c625e..9b6743a1c3b67c9c726b4118132e272be301fc5e 100644 (file)
 #include <cppunit/TestAssert.h>
 
 #include <sstream>
+#include <fstream>
 #include <cmath>
 #include <list>
 #include <stdexcept>
 #include <cstdlib>
 #include <vector>
+#include <unistd.h>
 
 #ifdef HAVE_MPI
 #include <mpi.h>
@@ -83,19 +85,17 @@ void MEDPARTITIONERTest::setbigSize()
 std::string MEDPARTITIONERTest::getPartitionerExe() const
 {
   std::string execName;
-  if ( getenv("top_builddir")) // make distcheck
+  if ( getenv("MEDTOOL_ROOT_DIR") )
     {
-      execName = getenv("top_builddir");
-      execName += "/src/MEDPartitioner/medpartitioner";
-    }
-  else if ( getenv("MED_ROOT_DIR") )
-    {
-      execName=getenv("MED_ROOT_DIR");  //.../INSTALL/MED
-      execName+="/bin/salome/medpartitioner";
+      execName=getenv("MEDTOOL_ROOT_DIR");  //.../INSTALL/MED
+      execName+="/bin/medpartitioner";
     }
   else
     {
-      CPPUNIT_FAIL("Can't find medpartitioner, neither MED_ROOT_DIR nor top_builddir is set");
+      execName = get_current_dir_name();
+      execName += "/../../MEDPartitioner/medpartitioner";
+      if (! std::ifstream(execName.c_str()))
+        CPPUNIT_FAIL("Can't find medpartitioner, please set MEDTOOL_ROOT_DIR");
     }
   return execName;
 }
@@ -179,7 +179,7 @@ ParaMEDMEM::MEDCouplingUMesh * MEDPARTITIONERTest::buildCUBE3DMesh()
           ii=ii + _ni + 2 ;
           conn.push_back(ii);
           conn.push_back(ii-1);
-    
+
           ii=i + j*(_ni+1) + (k+1)*(_ni+1)*(_nj+1);
           conn.push_back(ii);
           conn.push_back(ii+1);
@@ -197,7 +197,7 @@ ParaMEDMEM::MEDCouplingUMesh * MEDPARTITIONERTest::buildCUBE3DMesh()
       cout << endl;
       cout << "\nnb conn " << (_ni)*(_nj)*(_nk)*8 << " " << conn.size() << endl;
       for (int i=0; i<(int)conn.size(); i=i+8)
-        { 
+        {
           for (int j=0; j<8; j++)
             cout << conn[i+j] << " ";
           cout << endl;
@@ -205,7 +205,7 @@ ParaMEDMEM::MEDCouplingUMesh * MEDPARTITIONERTest::buildCUBE3DMesh()
       cout << endl;
     }
   */
-  
+
   MEDCouplingUMesh *mesh=MEDCouplingUMesh::New();
   mesh->setMeshDimension(3);
   int nbc=conn.size()/8; //nb of cells
@@ -267,13 +267,13 @@ ParaMEDMEM::MEDCouplingUMesh * MEDPARTITIONERTest::buildCARRE3DMesh()
       cout<<endl;
       cout<<"\nnb conn "<<(_ni)*(_nj)*4<<" "<<conn.size()<<endl;
       for (int i=0; i<(int)conn.size(); i=i+4)
-        { 
+        {
           for (int j=0; j<4; j++) cout<<conn[i+j]<<" ";
           cout<<endl;
         }
       cout<<endl;
     }
-  
+
   MEDCouplingUMesh *mesh=MEDCouplingUMesh::New();
   mesh->setMeshDimension(2);
   int nbc=conn.size()/4; //nb of cells
@@ -335,14 +335,14 @@ ParaMEDMEM::MEDCouplingUMesh * MEDPARTITIONERTest::buildFACE3DMesh()
       cout<<endl;
       cout<<"\nnb conn "<<(_ni)*(_nj)*4<<" "<<conn.size()<<endl;
       for (int i=0; i<(int)conn.size(); i=i+4)
-        { 
+        {
           for (int j=0; j<4; j++)
             cout << conn[i+j] << " ";
           cout << endl;
         }
       cout << endl;
     }
-  
+
   MEDCouplingUMesh *mesh=MEDCouplingUMesh::New();
   mesh->setMeshDimension(2);
   int nbc=conn.size()/4; //nb of cells
@@ -415,7 +415,7 @@ MEDCouplingFieldDouble * MEDPARTITIONERTest::buildVecFieldOnNodes()
           field.push_back(j+.2);
           field.push_back(k+.3);
         }
-  
+
   MEDCouplingUMesh *mesh=MEDLoader::ReadUMeshFromFile(_file_name.c_str(),_mesh_name.c_str(),0);
   int nbOfNodes=mesh->getNumberOfNodes();
   MEDCouplingFieldDouble *f1=MEDCouplingFieldDouble::New(ON_NODES,ONE_TIME);
@@ -452,7 +452,7 @@ void MEDPARTITIONERTest::createTestMeshWithoutField()
       }
     mesh->decrRef();
   }
-  
+
   {
     vector<const ParaMEDMEM::MEDCouplingUMesh*> meshes;
     MEDCouplingUMesh * mesh1 = buildCUBE3DMesh();
@@ -465,7 +465,7 @@ void MEDPARTITIONERTest::createTestMeshWithoutField()
     meshes.push_back(mesh1);
     meshes.push_back(mesh2);
     MEDLoader::WriteUMeshes(_file_name_with_faces.c_str(), meshes, true);
-  
+
     ParaMEDMEM::MEDFileUMesh* mfm=ParaMEDMEM::MEDFileUMesh::New(_file_name_with_faces.c_str(), mesh1->getName().c_str());
     DataArrayInt* FacesFam=DataArrayInt::New();
     FacesFam->alloc(mfm->getSizeAtLevel(-1),1);
@@ -487,7 +487,7 @@ void MEDPARTITIONERTest::createTestMeshWithoutField()
     mfm->write(_file_name_with_faces.c_str(),0);
     FacesFam->decrRef();
     CellsFam->decrRef();
-  
+
     /*ce truc marche pas!
       ParaMEDMEM::MEDFileUMesh* mfm=ParaMEDMEM::MEDFileUMesh::New(_file_name_with_faces.c_str(), mesh1->getName());
       vector<const ParaMEDMEM::MEDCouplingUMesh*> ms;
@@ -495,7 +495,7 @@ void MEDPARTITIONERTest::createTestMeshWithoutField()
       mfm->setGroupsFromScratch(-1, ms);
       mfm->write(_file_name_with_faces.c_str(),0);
     */
-  
+
     if (_verbose) cout<<endl<<_file_name_with_faces<<" created"<<endl;
     if (_ntot<1000000) //too long
       {
@@ -508,7 +508,7 @@ void MEDPARTITIONERTest::createTestMeshWithoutField()
     mesh2->decrRef();
     mfm->decrRef();
   }
-   
+
   {
     MEDCouplingUMesh * mesh = buildCARRE3DMesh();
     MEDLoader::WriteUMesh(_file_name2.c_str(),mesh,true);
@@ -553,7 +553,7 @@ void MEDPARTITIONERTest::createHugeTestMesh(int ni, int nj, int nk, int nbx, int
   <mapping>\n$tagMesh \
   </mapping>\n \
 </root>\n";
-  
+
   string tagSubfiles, tagSubfile="\
     <subfile id=\"$xyz\">\n \
       <name>$fileName</name>\n \
@@ -565,7 +565,7 @@ void MEDPARTITIONERTest::createHugeTestMesh(int ni, int nj, int nk, int nbx, int
         <name>testMesh</name>\n \
       </chunk>\n \
     </mesh>\n";
-  
+
   int xyz=1;
   string sxyz;
   DataArrayDouble* coordsInit=mesh->getCoords()->deepCpy();
@@ -573,7 +573,7 @@ void MEDPARTITIONERTest::createHugeTestMesh(int ni, int nj, int nk, int nbx, int
   double deltax=cooFin[0]-cooDep[0];
   double deltay=cooFin[1]-cooDep[1];
   double deltaz=cooFin[2]-cooDep[2];
-  
+
   double dz=0.;
   for (int z=0; z<nbz; z++)
     {
@@ -586,7 +586,7 @@ void MEDPARTITIONERTest::createHugeTestMesh(int ni, int nj, int nk, int nbx, int
               string fileName;
               sxyz=IntToStr(xyz);
               fileName="tmp_testMeshHuge_"+IntToStr(_ni)+"x"+IntToStr(_nj)+"x"+IntToStr(_nk)+"_"+sxyz+".med";
-        
+
               DataArrayDouble* coords=mesh->getCoords();
               //int nbOfComp=coords->getNumberOfComponents();  //be 3D
               int nbOfTuple=coords->getNumberOfTuples();
@@ -600,11 +600,11 @@ void MEDPARTITIONERTest::createHugeTestMesh(int ni, int nj, int nk, int nbx, int
                 }
 
               MEDLoader::WriteUMesh(fileName.c_str(),mesh,true);
-        
+
               tagSubfiles+=tagSubfile;
               tagSubfiles.replace(tagSubfiles.find("$xyz"),4,sxyz);
               tagSubfiles.replace(tagSubfiles.find("$fileName"),9,fileName);
-        
+
               tagMeshes+=tagMesh;
               tagMeshes.replace(tagMeshes.find("$xyz"),4,sxyz);
               xyz++;
@@ -615,7 +615,7 @@ void MEDPARTITIONERTest::createHugeTestMesh(int ni, int nj, int nk, int nbx, int
       dz+=deltaz;
     }
   coordsInit->decrRef();
-  
+
   tagXml.replace(tagXml.find("$subdomainNumber"),16,sxyz);
   tagXml.replace(tagXml.find("$tagSubfile"),11,tagSubfiles);
   tagXml.replace(tagXml.find("$tagMesh"),8,tagMeshes);
@@ -626,7 +626,7 @@ void MEDPARTITIONERTest::createHugeTestMesh(int ni, int nj, int nk, int nbx, int
   f<<tagXml;
   f.close();
   //cout<<"\n"<<tagXml<<endl;
-  if (_verbose) 
+  if (_verbose)
     cout<<endl<<nameFileXml<<" created"<<endl;
   mesh->decrRef();
 }
@@ -666,17 +666,17 @@ void MEDPARTITIONERTest::createTestMeshWithVecFieldOnCells()
     f3->setDescription("MyDescriptionNE");
     DataArrayDouble *array=DataArrayDouble::New();
     //int nb=f1->getMesh()->getNumberOfNodes();
-  
+
     /*8 pt de gauss by cell
       int nb=f3->getMesh()->getNumberOfCells()*8;
       array->alloc(nb,2);
       double *ptr=array->getPointer();
       for (int i=0; i<nb*2; i=i+2) {ptr[i]=(double)(i/8) ; ptr[i]=2.*(double)(i/8);}
     */
-  
+
     //more nbptgauss=8 by default needs set MEDCouplingFieldDiscretizationPerCell
     //theory: (may be) http://www.code-aster.org/V2/doc/v9/fr/man_r/r3/r3.06.03.pdf
-    int nbptgauss=8; //nb pt de gauss by cell 
+    int nbptgauss=8; //nb pt de gauss by cell
     int nbcell=f3->getMesh()->getNumberOfCells();
     int nb=nbcell*nbptgauss;
     int nbcomp=2;
@@ -753,7 +753,7 @@ void MEDPARTITIONERTest::verifyTestMeshWithVecFieldOnNodes()
     {
       cout<<"\n types in "<<name<<" : ";
       //for (std::set<INTERP_KERNEL::NormalizedCellType>::iterator t=types.begin(); t!=types.end(); ++t) cout<<" "<<*t;
-      for (std::set<INTERP_KERNEL::NormalizedCellType>::const_iterator t=types.begin(); t!=types.end(); ++t) 
+      for (std::set<INTERP_KERNEL::NormalizedCellType>::const_iterator t=types.begin(); t!=types.end(); ++t)
         {
           //INTERP_KERNEL::CellModel essai=INTERP_KERNEL::CellModel::GetCellModel(*t);
           cout<<" "<<(INTERP_KERNEL::CellModel::GetCellModel(*t)).getRepr();
@@ -761,7 +761,7 @@ void MEDPARTITIONERTest::verifyTestMeshWithVecFieldOnNodes()
       cout<<endl;
     }
   m->decrRef();
-  
+
   MEDFileUMesh * mf = MEDFileUMesh::New(_file_name.c_str(),_mesh_name.c_str(),-1,-1);
   vector<int> lev;
   lev=mf->getNonEmptyLevels();
@@ -839,7 +839,7 @@ void MEDPARTITIONERTest::testMeshCollectionSinglePartitionMetis()
   bool empty_groups=false;
   MEDPARTITIONER::ParaDomainSelector parallelizer(false);
   MEDPARTITIONER::MeshCollection collection(fileName,parallelizer);
-  
+
   MEDPARTITIONER::ParallelTopology* aPT = (MEDPARTITIONER::ParallelTopology*) collection.getTopology();
   aPT->setGlobalNumerotationDefault(collection.getParaDomainSelector());
   //Creating the graph and partitioning it
@@ -847,13 +847,13 @@ void MEDPARTITIONERTest::testMeshCollectionSinglePartitionMetis()
   new_topo.reset( collection.createPartition(ndomains,MEDPARTITIONER::Graph::METIS) );
   //Creating a new mesh collection from the partitioning
   MEDPARTITIONER::MeshCollection new_collection(collection,new_topo.get(),split_family,empty_groups);
-  
+
   //example to create files
   //MyGlobals::_General_Informations.clear();
   //MyGlobals::_General_Informations.push_back(SerializeFromString("finalMeshName=Merge"));
   //if (MyGlobals::_Verbose>100) cout << "generalInformations : \n"<<ReprVectorOfString(MyGlobals::_General_Informations);
   //new_collection.write("ttmp")
-  
+
   CPPUNIT_ASSERT(new_collection.isParallelMode());
   CPPUNIT_ASSERT_EQUAL(3, new_collection.getMeshDimension());
   CPPUNIT_ASSERT(new_collection.getName()==collection.getName());
@@ -872,10 +872,10 @@ void MEDPARTITIONERTest::testMeshCollectionComplexPartitionMetis()
   bool empty_groups=false;
   MEDPARTITIONER::ParaDomainSelector parallelizer(false);
   MEDPARTITIONER::MeshCollection collection(fileName,parallelizer);
-  
+
   MEDPARTITIONER::ParallelTopology* aPT = (MEDPARTITIONER::ParallelTopology*) collection.getTopology();
   aPT->setGlobalNumerotationDefault(collection.getParaDomainSelector());
-  
+
   for (int ndomains=2 ; ndomains<=16 ; ndomains++)
     {
       //Creating the graph and partitioning it
@@ -883,7 +883,7 @@ void MEDPARTITIONERTest::testMeshCollectionComplexPartitionMetis()
       new_topo.reset( collection.createPartition(ndomains,MEDPARTITIONER::Graph::METIS) );
       //Creating a new mesh collection from the partitioning
       MEDPARTITIONER::MeshCollection new_collection(collection,new_topo.get(),split_family,empty_groups);
-      
+
       CPPUNIT_ASSERT_EQUAL(ndomains,new_collection.getNbOfLocalMeshes());
       CPPUNIT_ASSERT_EQUAL(ndomains,new_collection.getNbOfGlobalMeshes());
       CPPUNIT_ASSERT_EQUAL(collection.getNbOfLocalCells(),new_collection.getNbOfLocalCells());
@@ -921,7 +921,7 @@ void MEDPARTITIONERTest::testMeshCollectionSinglePartitionScotch()
   bool empty_groups=false;
   MEDPARTITIONER::ParaDomainSelector parallelizer(false);
   MEDPARTITIONER::MeshCollection collection(fileName,parallelizer);
-  
+
   MEDPARTITIONER::ParallelTopology* aPT = (MEDPARTITIONER::ParallelTopology*) collection.getTopology();
   aPT->setGlobalNumerotationDefault(collection.getParaDomainSelector());
   //Creating the graph and partitioning it
@@ -929,13 +929,13 @@ void MEDPARTITIONERTest::testMeshCollectionSinglePartitionScotch()
   new_topo.reset( collection.createPartition(ndomains,MEDPARTITIONER::Graph::SCOTCH) );
   //Creating a new mesh collection from the partitioning
   MEDPARTITIONER::MeshCollection new_collection(collection,new_topo.get(),split_family,empty_groups);
-  
+
   //example to create files
   //MyGlobals::_General_Informations.clear();
   //MyGlobals::_General_Informations.push_back(SerializeFromString("finalMeshName=Merge"));
   //if (MyGlobals::_Verbose>100) cout << "generalInformations : \n"<<ReprVectorOfString(MyGlobals::_General_Informations);
   //new_collection.write("ttmp")
-  
+
   CPPUNIT_ASSERT(new_collection.isParallelMode());
   CPPUNIT_ASSERT_EQUAL(3, new_collection.getMeshDimension());
   CPPUNIT_ASSERT(new_collection.getName()==collection.getName());
@@ -954,10 +954,10 @@ void MEDPARTITIONERTest::testMeshCollectionComplexPartitionScotch()
   bool empty_groups=false;
   MEDPARTITIONER::ParaDomainSelector parallelizer(false);
   MEDPARTITIONER::MeshCollection collection(fileName,parallelizer);
-  
+
   MEDPARTITIONER::ParallelTopology* aPT = (MEDPARTITIONER::ParallelTopology*) collection.getTopology();
   aPT->setGlobalNumerotationDefault(collection.getParaDomainSelector());
-  
+
   for (int ndomains=2 ; ndomains<=16 ; ndomains++)
     {
       //Creating the graph and partitioning it
@@ -965,7 +965,7 @@ void MEDPARTITIONERTest::testMeshCollectionComplexPartitionScotch()
       new_topo.reset( collection.createPartition(ndomains,MEDPARTITIONER::Graph::SCOTCH) );
       //Creating a new mesh collection from the partitioning
       MEDPARTITIONER::MeshCollection new_collection(collection,new_topo.get(),split_family,empty_groups);
-      
+
       CPPUNIT_ASSERT_EQUAL(ndomains,new_collection.getNbOfLocalMeshes());
       CPPUNIT_ASSERT_EQUAL(ndomains,new_collection.getNbOfGlobalMeshes());
       CPPUNIT_ASSERT_EQUAL(collection.getNbOfLocalCells(),new_collection.getNbOfLocalCells());
@@ -991,13 +991,13 @@ void MEDPARTITIONERTest::launchMetisOrScotchMedpartitionerOnTestMeshes(std::stri
 {
   int res;
   string cmd,execName,sourceName,targetName;
-  
+
   execName=getPartitionerExe();
-  
+
   cmd="which "+execName+" 2>/dev/null 1>/dev/null";  //no trace
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL_MESSAGE(execName + " - INVALID PATH TO medpartitioner", 0, res);
-  
+
   cmd=execName+" --ndomains=2 --split-method="+MetisOrScotch;  //on same proc
   sourceName=_file_name;
   targetName=_file_name;
@@ -1006,7 +1006,7 @@ void MEDPARTITIONERTest::launchMetisOrScotchMedpartitionerOnTestMeshes(std::stri
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
+
   cmd=execName+" --ndomains=5 --split-method="+MetisOrScotch; //on less proc
   sourceName=_file_name;
   targetName=_file_name;
@@ -1015,7 +1015,7 @@ void MEDPARTITIONERTest::launchMetisOrScotchMedpartitionerOnTestMeshes(std::stri
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
+
   cmd=execName+" --ndomains=1 --split-method="+MetisOrScotch;  //on 1 proc
   sourceName=targetName+".xml";
   targetName=_file_name;
@@ -1033,7 +1033,7 @@ void MEDPARTITIONERTest::launchMetisOrScotchMedpartitionerOnTestMeshes(std::stri
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-}  
+}
 
 void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForMesh(std::string MetisOrScotch)
 {
@@ -1041,11 +1041,11 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForMesh(std
   string fileName,cmd,execName,sourceName,targetName,input;
   execName=getPartitionerExe();
   fileName=_file_name_with_faces;
-  
+
   ParaMEDMEM::MEDFileUMesh* initialMesh=ParaMEDMEM::MEDFileUMesh::New(fileName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* cellMesh=initialMesh->getLevel0Mesh(false);
   ParaMEDMEM::MEDCouplingUMesh* faceMesh=initialMesh->getLevelM1Mesh(false);
-  
+
   cmd=execName+" --ndomains=5 --split-method="+MetisOrScotch;  //on same proc
   sourceName=fileName;
   targetName=fileName;
@@ -1055,7 +1055,7 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForMesh(std
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
   input=targetName+".xml";
-  
+
   MEDPARTITIONER::ParaDomainSelector parallelizer(false);
   MEDPARTITIONER::MeshCollection collection(input,parallelizer);
   CPPUNIT_ASSERT_EQUAL(3, collection.getMeshDimension());
@@ -1065,14 +1065,14 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForMesh(std
   for (std::size_t i = 0; i < cellMeshes.size(); i++)
     nbcells+=cellMeshes[i]->getNumberOfCells();
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), nbcells);
-  
+
   std::vector<ParaMEDMEM::MEDCouplingUMesh*>faceMeshes=collection.getFaceMesh();
   CPPUNIT_ASSERT_EQUAL(5, (int) faceMeshes.size());
   int nbfaces=0;
   for (std::size_t i=0; i < faceMeshes.size(); i++)
     nbfaces+=faceMeshes[i]->getNumberOfCells();
   CPPUNIT_ASSERT_EQUAL(faceMesh->getNumberOfCells(), nbfaces);
-  
+
   //merge split meshes and test equality
   cmd=execName+" --ndomains=1 --split-method="+MetisOrScotch;  //on same proc
   sourceName=targetName+".xml";
@@ -1082,25 +1082,25 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForMesh(std
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
+
   string refusedName=targetName+"1.med";
   ParaMEDMEM::MEDFileUMesh* refusedMesh=ParaMEDMEM::MEDFileUMesh::New(refusedName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* refusedCellMesh=refusedMesh->getLevel0Mesh(false);
   ParaMEDMEM::MEDCouplingUMesh* refusedFaceMesh=refusedMesh->getLevelM1Mesh(false);
-  
+
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), refusedCellMesh->getNumberOfCells());
   CPPUNIT_ASSERT_EQUAL(faceMesh->getNumberOfCells(), refusedFaceMesh->getNumberOfCells());
-  
+
   /*not the good job
     ParaMEDMEM::MEDCouplingMesh* mergeCell=cellMesh->mergeMyselfWith(refusedCellMesh);
     CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), mergeCell->getNumberOfCells());
-  
+
     ParaMEDMEM::MEDCouplingMesh* mergeFace=faceMesh->mergeMyselfWith(refusedFaceMesh);
     CPPUNIT_ASSERT_EQUAL(faceMesh->getNumberOfCells(), mergeFace->getNumberOfCells());
-  
+
     CPPUNIT_ASSERT(faceMesh->isEqual(refusedFaceMesh,1e-12));
   */
-  
+
   std::vector<const MEDCouplingUMesh *> meshes;
   std::vector<DataArrayInt *> corr;
   meshes.push_back(cellMesh);
@@ -1108,7 +1108,7 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForMesh(std
   meshes.push_back(refusedCellMesh);
   MEDCouplingUMesh* fusedCell=MEDCouplingUMesh::FuseUMeshesOnSameCoords(meshes,0,corr);
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), fusedCell->getNumberOfCells());
-  
+
   meshes.resize(0);
   for (std::size_t i = 0; i < corr.size(); i++)
     corr[i]->decrRef();
@@ -1118,7 +1118,7 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForMesh(std
   meshes.push_back(refusedFaceMesh);
   MEDCouplingUMesh* fusedFace=MEDCouplingUMesh::FuseUMeshesOnSameCoords(meshes,0,corr);
   CPPUNIT_ASSERT_EQUAL(faceMesh->getNumberOfCells(), fusedFace->getNumberOfCells());
-  
+
   for (std::size_t i = 0; i < corr.size(); i++)
     corr[i]->decrRef();
   fusedFace->decrRef();
@@ -1141,10 +1141,10 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForFieldOnC
   execName=getPartitionerExe();
   fileName=_file_name;
   fileName.replace(fileName.find(".med"),4,"_WithVecFieldOnCells.med");
-  
+
   ParaMEDMEM::MEDFileUMesh* initialMesh=ParaMEDMEM::MEDFileUMesh::New(fileName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* cellMesh=initialMesh->getLevel0Mesh(false);
-  
+
   cmd=execName+" --ndomains=5 --split-method="+MetisOrScotch;  //on same proc
   sourceName=fileName;
   targetName=fileName;
@@ -1154,7 +1154,7 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForFieldOnC
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
   input=targetName+".xml";
-  
+
   //merge split meshes and test equality
   cmd=execName+" --ndomains=1 --split-method="+MetisOrScotch;  //on same proc
   sourceName=targetName+".xml";
@@ -1164,13 +1164,13 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForFieldOnC
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
+
   string refusedName=targetName+"1.med";
   ParaMEDMEM::MEDFileUMesh* refusedMesh=ParaMEDMEM::MEDFileUMesh::New(refusedName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* refusedCellMesh=refusedMesh->getLevel0Mesh(false);
-  
+
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), refusedCellMesh->getNumberOfCells());
-  
+
   std::vector<const MEDCouplingUMesh *> meshes;
   std::vector<DataArrayInt *> corr;
   meshes.push_back(cellMesh);
@@ -1178,22 +1178,22 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForFieldOnC
   meshes.push_back(refusedCellMesh);
   MEDCouplingUMesh* fusedCell=MEDCouplingUMesh::FuseUMeshesOnSameCoords(meshes,0,corr);
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), fusedCell->getNumberOfCells());
-  
+
   MEDCouplingFieldDouble* field1=MEDLoader::ReadFieldCell(fileName.c_str(),initialMesh->getName().c_str(),0,"VectorFieldOnCells",0,1);
   MEDCouplingFieldDouble* field2=MEDLoader::ReadFieldCell(refusedName.c_str(),refusedCellMesh->getName().c_str(),0,"VectorFieldOnCells",0,1);
-  
+
   int nbcells=corr[1]->getNumberOfTuples();
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), nbcells);
   //use corr to test equality of field
   DataArrayDouble* f1=field1->getArray();
   DataArrayDouble* f2=field2->getArray();
-  if (_verbose>300) 
+  if (_verbose>300)
     {
       cout<<"\nf1 : "<<f1->reprZip();
       cout<<"\nf2 : "<<f2->reprZip(); //field2->advancedRepradvancedRepr();
       for (std::size_t i = 0; i < corr.size(); i++)
         cout << "\ncorr " << i << " : " << corr[i]->reprZip();
-    
+
     }
   int nbequal=0;
   int nbcomp=field1->getNumberOfComponents();
@@ -1211,7 +1211,7 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForFieldOnC
         }
     }
   CPPUNIT_ASSERT_EQUAL(nbcells*nbcomp, nbequal);
-  
+
   for (std::size_t i = 0; i < corr.size(); i++)
     corr[i]->decrRef();
   field1->decrRef();
@@ -1230,10 +1230,10 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForFieldOnG
   execName=getPartitionerExe();
   fileName=_file_name;
   fileName.replace(fileName.find(".med"),4,"_WithVecFieldOnGaussNe.med");
-  
+
   ParaMEDMEM::MEDFileUMesh* initialMesh=ParaMEDMEM::MEDFileUMesh::New(fileName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* cellMesh=initialMesh->getLevel0Mesh(false);
-  
+
   cmd=execName+" --ndomains=5 --split-method="+MetisOrScotch;  //on same proc
   sourceName=fileName;
   targetName=fileName;
@@ -1243,7 +1243,7 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForFieldOnG
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
   input=targetName+".xml";
-  
+
   //merge split meshes and test equality
   cmd=execName+" --ndomains=1 --split-method="+MetisOrScotch;  //on same proc
   sourceName=targetName+".xml";
@@ -1253,13 +1253,13 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForFieldOnG
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
+
   string refusedName=targetName+"1.med";
   ParaMEDMEM::MEDFileUMesh* refusedMesh=ParaMEDMEM::MEDFileUMesh::New(refusedName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* refusedCellMesh=refusedMesh->getLevel0Mesh(false);
-  
+
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), refusedCellMesh->getNumberOfCells());
-  
+
   std::vector<const MEDCouplingUMesh *> meshes;
   std::vector<DataArrayInt *> corr;
   meshes.push_back(cellMesh);
@@ -1267,22 +1267,22 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForFieldOnG
   meshes.push_back(refusedCellMesh);
   MEDCouplingUMesh* fusedCell=MEDCouplingUMesh::FuseUMeshesOnSameCoords(meshes,0,corr);
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), fusedCell->getNumberOfCells());
-  
+
   MEDCouplingFieldDouble* field1=MEDLoader::ReadField(ON_GAUSS_NE,fileName.c_str(),initialMesh->getName().c_str(),0,"MyFieldOnGaussNE",5,6);
   MEDCouplingFieldDouble* field2=MEDLoader::ReadField(ON_GAUSS_NE,refusedName.c_str(),refusedCellMesh->getName().c_str(),0,"MyFieldOnGaussNE",5,6);
-  
+
   int nbcells=corr[1]->getNumberOfTuples();
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), nbcells);
   //use corr to test equality of field
   DataArrayDouble* f1=field1->getArray();
   DataArrayDouble* f2=field2->getArray();
-  if (_verbose>300) 
+  if (_verbose>300)
     {
       cout << "\nf1 : " << f1->reprZip(); //123.4 for 12th cell,3rd component, 4th gausspoint
       cout << "\nf2 : " << f2->reprZip(); //field2->advancedRepradvancedRepr();
       for (std::size_t i = 0; i < corr.size(); i++)
         cout << "\ncorr " << i << " : " << corr[i]->reprZip();
-    
+
     }
   int nbequal=0;
   int nbptgauss=8;
@@ -1301,7 +1301,7 @@ void MEDPARTITIONERTest::verifyMetisOrScotchMedpartitionerOnSmallSizeForFieldOnG
         }
     }
   CPPUNIT_ASSERT_EQUAL(nbcells*nbcomp*nbptgauss, nbequal);
-  
+
   for (std::size_t i = 0; i < corr.size(); i++)
     corr[i]->decrRef();
   field1->decrRef();
index 5e437cb32ffc886c81ae9193bca6e6a0193c8183..f6cd5a13bc1e2997e1e9fd1f999f85ddd3f800f2 100644 (file)
@@ -56,7 +56,7 @@ class MEDPARTITIONERTEST_EXPORT MEDPARTITIONERTest : public CppUnit::TestFixture
   CPPUNIT_TEST( testMeshCollectionComplexPartitionScotch );
   CPPUNIT_TEST( testScotchSmallSize );
 #endif
-  
+
 #if defined(HAVE_MPI)
 #if defined(MED_ENABLE_PARMETIS)
   //test with mpi on system
@@ -85,13 +85,14 @@ public:
   int _nb_target_huge;
   std::string _mesh_name; //initial test mesh file med
   int _verbose;
-  
+
   //for utils
   void setSize(int ni, int nj, int nk);
   void setSmallSize();
   void setMedianSize();
   void setbigSize();
   std::string getPartitionerExe() const;
+  std::string getPartitionerParaExe() const;
   ParaMEDMEM::MEDCouplingUMesh * buildCUBE3DMesh();
   ParaMEDMEM::MEDCouplingUMesh * buildFACE3DMesh();
   ParaMEDMEM::MEDCouplingUMesh * buildCARRE3DMesh();
@@ -113,7 +114,7 @@ public:
   void launchMedpartitionerOnTestMeshes();
   void launchMedpartitionerOnHugeTestMeshes();
   void deleteTestMeshes();
-  
+
   //for CPPUNIT_TEST
   void setUp();
   void tearDown();
@@ -129,7 +130,7 @@ public:
   void testMeshCollectionComplexPartitionScotch();
   void testScotchSmallSize();
 #endif
-  
+
 #if defined(HAVE_MPI)
   void testMpirunSmallSize();
   void testMpirunMedianSize();
index 5daaa525dcbdbcb462ae8ee37c424b5c9562f471..c38d51755ae558fce2c99e5af0d9d070d53c1d73 100644 (file)
@@ -50,18 +50,35 @@ using namespace ParaMEDMEM;
 using namespace MEDPARTITIONER;
 
 #if defined(HAVE_MPI)
+std::string MEDPARTITIONERTest::getPartitionerParaExe() const
+{
+  std::string execName;
+  if ( getenv("MEDTOOL_ROOT_DIR") )
+    {
+      execName=getenv("MEDTOOL_ROOT_DIR");  //.../INSTALL/MED
+      execName+="/bin/medpartitioner_para";
+    }
+  else
+    {
+      execName = get_current_dir_name();
+      execName += "/../../MEDPartitioner/medpartitioner_para";
+      if (! std::ifstream(execName.c_str()))
+        CPPUNIT_FAIL("Can't find medpartitioner_para, please set MEDTOOL_ROOT_DIR");
+    }
+  return execName;
+}
+
 void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForMesh()
 {
   int res;
   string fileName,cmd,execName,sourceName,targetName,input;
-  execName=getenv("MED_ROOT_DIR");  //.../INSTALL/MED
-  execName+="/bin/salome/medpartitioner_para";
+  execName=getPartitionerParaExe();
   fileName=_file_name_with_faces;
-  
+
   ParaMEDMEM::MEDFileUMesh* initialMesh=ParaMEDMEM::MEDFileUMesh::New(fileName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* cellMesh=initialMesh->getLevel0Mesh(false);
   ParaMEDMEM::MEDCouplingUMesh* faceMesh=initialMesh->getLevelM1Mesh(false);
-  
+
   cmd="mpirun -np 5 "+execName+" --ndomains=5 --split-method=metis";  //on same proc
   sourceName=fileName;
   targetName=fileName;
@@ -71,7 +88,7 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForMesh()
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
   input=targetName+".xml";
-  
+
   MEDPARTITIONER::ParaDomainSelector parallelizer(false);
   MEDPARTITIONER::MeshCollection collection(input,parallelizer);
   CPPUNIT_ASSERT_EQUAL(3, collection.getMeshDimension());
@@ -81,14 +98,14 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForMesh()
   for (std::size_t i = 0; i < cellMeshes.size(); i++)
     nbcells+=cellMeshes[i]->getNumberOfCells();
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), nbcells);
-  
+
   std::vector<ParaMEDMEM::MEDCouplingUMesh*>faceMeshes=collection.getFaceMesh();
   CPPUNIT_ASSERT_EQUAL(5, (int) faceMeshes.size());
   int nbfaces=0;
   for (std::size_t i=0; i < faceMeshes.size(); i++)
     nbfaces+=faceMeshes[i]->getNumberOfCells();
   CPPUNIT_ASSERT_EQUAL(faceMesh->getNumberOfCells(), nbfaces);
-  
+
   //merge split meshes and test equality
   cmd="mpirun -np 1 "+execName+" --ndomains=1 --split-method=metis";  //on same proc
   sourceName=targetName+".xml";
@@ -98,25 +115,25 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForMesh()
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
+
   string refusedName=targetName+"1.med";
   ParaMEDMEM::MEDFileUMesh* refusedMesh=ParaMEDMEM::MEDFileUMesh::New(refusedName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* refusedCellMesh=refusedMesh->getLevel0Mesh(false);
   ParaMEDMEM::MEDCouplingUMesh* refusedFaceMesh=refusedMesh->getLevelM1Mesh(false);
-  
+
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), refusedCellMesh->getNumberOfCells());
   CPPUNIT_ASSERT_EQUAL(faceMesh->getNumberOfCells(), refusedFaceMesh->getNumberOfCells());
-  
+
   /*not the good job
     ParaMEDMEM::MEDCouplingMesh* mergeCell=cellMesh->mergeMyselfWith(refusedCellMesh);
     CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), mergeCell->getNumberOfCells());
-  
+
     ParaMEDMEM::MEDCouplingMesh* mergeFace=faceMesh->mergeMyselfWith(refusedFaceMesh);
     CPPUNIT_ASSERT_EQUAL(faceMesh->getNumberOfCells(), mergeFace->getNumberOfCells());
-  
+
     CPPUNIT_ASSERT(faceMesh->isEqual(refusedFaceMesh,1e-12));
   */
-  
+
   std::vector<const MEDCouplingUMesh *> meshes;
   std::vector<DataArrayInt *> corr;
   meshes.push_back(cellMesh);
@@ -124,7 +141,7 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForMesh()
   meshes.push_back(refusedCellMesh);
   MEDCouplingUMesh* fusedCell=MEDCouplingUMesh::FuseUMeshesOnSameCoords(meshes,0,corr);
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), fusedCell->getNumberOfCells());
-  
+
   meshes.resize(0);
   for (std::size_t i = 0; i < corr.size(); i++)
     corr[i]->decrRef();
@@ -134,7 +151,7 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForMesh()
   meshes.push_back(refusedFaceMesh);
   MEDCouplingUMesh* fusedFace=MEDCouplingUMesh::FuseUMeshesOnSameCoords(meshes,0,corr);
   CPPUNIT_ASSERT_EQUAL(faceMesh->getNumberOfCells(), fusedFace->getNumberOfCells());
-  
+
   for (std::size_t i = 0; i < corr.size(); i++)
     corr[i]->decrRef();
   fusedFace->decrRef();
@@ -152,14 +169,13 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForFieldOnCells()
 {
   int res;
   string fileName,cmd,execName,sourceName,targetName,input;
-  execName=getenv("MED_ROOT_DIR");  //.../INSTALL/MED
-  execName+="/bin/salome/medpartitioner_para";
+  execName=getPartitionerParaExe();
   fileName=_file_name;
   fileName.replace(fileName.find(".med"),4,"_WithVecFieldOnCells.med");
-  
+
   ParaMEDMEM::MEDFileUMesh* initialMesh=ParaMEDMEM::MEDFileUMesh::New(fileName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* cellMesh=initialMesh->getLevel0Mesh(false);
-  
+
   cmd="mpirun -np 5 "+execName+" --ndomains=5 --split-method=metis";  //on same proc
   sourceName=fileName;
   targetName=fileName;
@@ -169,7 +185,7 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForFieldOnCells()
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
   input=targetName+".xml";
-  
+
   //merge split meshes and test equality
   cmd="mpirun -np 1 "+execName+" --ndomains=1 --split-method=metis";  //on same proc
   sourceName=targetName+".xml";
@@ -179,13 +195,13 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForFieldOnCells()
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
+
   string refusedName=targetName+"1.med";
   ParaMEDMEM::MEDFileUMesh* refusedMesh=ParaMEDMEM::MEDFileUMesh::New(refusedName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* refusedCellMesh=refusedMesh->getLevel0Mesh(false);
-  
+
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), refusedCellMesh->getNumberOfCells());
-  
+
   std::vector<const MEDCouplingUMesh *> meshes;
   std::vector<DataArrayInt *> corr;
   meshes.push_back(cellMesh);
@@ -193,22 +209,22 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForFieldOnCells()
   meshes.push_back(refusedCellMesh);
   MEDCouplingUMesh* fusedCell=MEDCouplingUMesh::FuseUMeshesOnSameCoords(meshes,0,corr);
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), fusedCell->getNumberOfCells());
-  
+
   MEDCouplingFieldDouble* field1=MEDLoader::ReadFieldCell(fileName.c_str(),initialMesh->getName().c_str(),0,"VectorFieldOnCells",0,1);
   MEDCouplingFieldDouble* field2=MEDLoader::ReadFieldCell(refusedName.c_str(),refusedCellMesh->getName().c_str(),0,"VectorFieldOnCells",0,1);
-  
+
   int nbcells=corr[1]->getNumberOfTuples();
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), nbcells);
   //use corr to test equality of field
   DataArrayDouble* f1=field1->getArray();
   DataArrayDouble* f2=field2->getArray();
-  if (_verbose>300) 
+  if (_verbose>300)
     {
       cout<<"\nf1 : "<<f1->reprZip();
       cout<<"\nf2 : "<<f2->reprZip(); //field2->advancedRepradvancedRepr();
       for (std::size_t i = 0; i < corr.size(); i++)
         cout << "\ncorr " << i << " : " << corr[i]->reprZip();
-    
+
     }
   int nbequal=0;
   int nbcomp=field1->getNumberOfComponents();
@@ -226,7 +242,7 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForFieldOnCells()
         }
     }
   CPPUNIT_ASSERT_EQUAL(nbcells*nbcomp, nbequal);
-  
+
   for (std::size_t i = 0; i < corr.size(); i++)
     corr[i]->decrRef();
   field1->decrRef();
@@ -240,14 +256,13 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForFieldOnGaussNe()
 {
   int res;
   string fileName,cmd,execName,sourceName,targetName,input;
-  execName=getenv("MED_ROOT_DIR");  //.../INSTALL/MED
-  execName+="/bin/salome/medpartitioner_para";
+  execName=getPartitionerParaExe();
   fileName=_file_name;
   fileName.replace(fileName.find(".med"),4,"_WithVecFieldOnGaussNe.med");
-  
+
   ParaMEDMEM::MEDFileUMesh* initialMesh=ParaMEDMEM::MEDFileUMesh::New(fileName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* cellMesh=initialMesh->getLevel0Mesh(false);
-  
+
   cmd="mpirun -np 5 "+execName+" --ndomains=5 --split-method=metis";  //on same proc
   sourceName=fileName;
   targetName=fileName;
@@ -257,7 +272,7 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForFieldOnGaussNe()
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
   input=targetName+".xml";
-  
+
   //merge split meshes and test equality
   cmd="mpirun -np 1 "+execName+" --ndomains=1 --split-method=metis";  //on same proc
   sourceName=targetName+".xml";
@@ -267,13 +282,13 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForFieldOnGaussNe()
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
+
   string refusedName=targetName+"1.med";
   ParaMEDMEM::MEDFileUMesh* refusedMesh=ParaMEDMEM::MEDFileUMesh::New(refusedName.c_str(),_mesh_name.c_str());
   ParaMEDMEM::MEDCouplingUMesh* refusedCellMesh=refusedMesh->getLevel0Mesh(false);
-  
+
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), refusedCellMesh->getNumberOfCells());
-  
+
   std::vector<const MEDCouplingUMesh *> meshes;
   std::vector<DataArrayInt *> corr;
   meshes.push_back(cellMesh);
@@ -281,22 +296,22 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForFieldOnGaussNe()
   meshes.push_back(refusedCellMesh);
   MEDCouplingUMesh* fusedCell=MEDCouplingUMesh::FuseUMeshesOnSameCoords(meshes,0,corr);
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), fusedCell->getNumberOfCells());
-  
+
   MEDCouplingFieldDouble* field1=MEDLoader::ReadField(ON_GAUSS_NE,fileName.c_str(),initialMesh->getName().c_str(),0,"MyFieldOnGaussNE",5,6);
   MEDCouplingFieldDouble* field2=MEDLoader::ReadField(ON_GAUSS_NE,refusedName.c_str(),refusedCellMesh->getName().c_str(),0,"MyFieldOnGaussNE",5,6);
-  
+
   int nbcells=corr[1]->getNumberOfTuples();
   CPPUNIT_ASSERT_EQUAL(cellMesh->getNumberOfCells(), nbcells);
   //use corr to test equality of field
   DataArrayDouble* f1=field1->getArray();
   DataArrayDouble* f2=field2->getArray();
-  if (_verbose>300) 
+  if (_verbose>300)
     {
       cout << "\nf1 : " << f1->reprZip(); //123.4 for 12th cell,3rd component, 4th gausspoint
       cout << "\nf2 : " << f2->reprZip(); //field2->advancedRepradvancedRepr();
       for (std::size_t i = 0; i < corr.size(); i++)
         cout << "\ncorr " << i << " : " << corr[i]->reprZip();
-    
+
     }
   int nbequal=0;
   int nbptgauss=8;
@@ -315,7 +330,7 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForFieldOnGaussNe()
         }
     }
   CPPUNIT_ASSERT_EQUAL(nbcells*nbcomp*nbptgauss, nbequal);
-  
+
   for (std::size_t i = 0; i < corr.size(); i++)
     corr[i]->decrRef();
   field1->decrRef();
@@ -327,8 +342,8 @@ void MEDPARTITIONERTest::verifyMedpartitionerOnSmallSizeForFieldOnGaussNe()
 
 void MEDPARTITIONERTest::launchMedpartitionerOnTestMeshes()
 {
-  
-  /* examples 
+
+  /* examples
      export INFI=/home/vb144235/resources/blade.med
      //no need export MESH=Fuse_1
      export INFI=tmp_testMeshxxx.med
@@ -339,17 +354,16 @@ void MEDPARTITIONERTest::launchMedpartitionerOnTestMeshes()
   */
   int res;
   string cmd,execName,sourceName,targetName;
-  
+
   res=system("which mpirun 2>/dev/null 1>/dev/null"); //no trace
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
-  execName=getenv("MED_ROOT_DIR");  //.../INSTALL/MED
-  execName+="/bin/salome/medpartitioner_para";
-  
+
+  execName=getPartitionerParaExe();
+
   cmd="which "+execName+" 2>/dev/null 1>/dev/null";  //no trace
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
+
   cmd="mpirun -np 2 "+execName+" --ndomains=2 --split-method=metis";  //on same proc
   sourceName=_file_name;
   targetName=_file_name;
@@ -358,7 +372,7 @@ void MEDPARTITIONERTest::launchMedpartitionerOnTestMeshes()
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
+
   cmd="mpirun -np 3 "+execName+" --ndomains=5 --split-method=metis"; //on less proc
   sourceName=_file_name;
   targetName=_file_name;
@@ -367,7 +381,7 @@ void MEDPARTITIONERTest::launchMedpartitionerOnTestMeshes()
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-  
+
   cmd="mpirun -np 1 "+execName+" --ndomains=1 --split-method=metis";  //on 1 proc
   sourceName=targetName+".xml";
   targetName=_file_name;
@@ -385,14 +399,13 @@ void MEDPARTITIONERTest::launchMedpartitionerOnTestMeshes()
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-}  
+}
 
 void MEDPARTITIONERTest::launchMedpartitionerOnHugeTestMeshes()
 {
   int res=0;
   string cmd,execName,sourceName,targetName;
-  execName=getenv("MED_ROOT_DIR");  //.../INSTALL/MED
-  execName+="/bin/salome/medpartitioner_para";
+  execName=getPartitionerParaExe();
 
   string snbTarget=IntToStr(_nb_target_huge);
   cmd="mpirun -np "+snbTarget+" "+execName+" --ndomains="+snbTarget+" --split-method=metis";  //on same proc
@@ -404,7 +417,7 @@ void MEDPARTITIONERTest::launchMedpartitionerOnHugeTestMeshes()
   if (_verbose) cout<<endl<<cmd<<endl;
   res=system(cmd.c_str());
   CPPUNIT_ASSERT_EQUAL(0, res);
-}  
+}
 
 void MEDPARTITIONERTest::testMpirunSmallSize()
 {
index a8079c25094a0feeeadf3aa8445314c9862d71d7..2715371a5e75d46d3774b29d8769634102d4a9b1 100644 (file)
@@ -62,9 +62,9 @@ INSTALL(FILES "${PYFILES_TO_INSTALL}" DESTINATION ${MEDTOOL_INSTALL_SCRIPT_PYTHO
 INSTALL(FILES MEDPartitioner.i MEDPartitionerCommon.i DESTINATION ${MEDTOOL_INSTALL_HEADERS})
 INSTALL(FILES MEDPartitionerTest.py DESTINATION ${MEDTOOL_INSTALL_SCRIPT_PYTHON})
 
-#SALOME_GENERATE_TESTS_ENVIRONMENT(tests_env)
 ADD_TEST(MEDPartitionerTest ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDPartitionerTest.py)
-SET_TESTS_PROPERTIES(MEDPartitionerTest PROPERTIES ENVIRONMENT "${tests_env}")
+SET(MEDPartitionerTest_PYTHONPATH "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}:${CMAKE_CURRENT_BINARY_DIR}/../MEDLoader/Swig")
+SET_TESTS_PROPERTIES(MEDPartitionerTest PROPERTIES ENVIRONMENT "${MEDPartitionerTest_PYTHONPATH}")
 
 # Application tests
 
index 8ad3c3064219ba180bae8ee592c62eca439f4f35..9985d19866b12f42116496c1a39f4ce4564f8391 100644 (file)
@@ -54,7 +54,7 @@ class MEDPartitionerTest(unittest.TestCase):
         c=MEDCouplingCMesh() ; c.setCoords(arr,arr)
         m=c.buildUnstructured()
         m.setName("mesh")
-        mm=MEDFileUMesh() 
+        mm=MEDFileUMesh()
         mm.setMeshAtLevel(0,m)
         ms=MEDFileMeshes() ; ms.pushMesh(mm)
         data=MEDFileData()
@@ -66,7 +66,7 @@ class MEDPartitionerTest(unittest.TestCase):
         self.assertEqual( 3, meshes.getMeshAtPos(0).getJoints().getNumberOfJoints())
         self.assertEqual( 3, meshes.getMeshAtPos(1).getJoints().getNumberOfJoints())
         self.assertEqual( 3, meshes.getMeshAtPos(2).getJoints().getNumberOfJoints())
-        self.assertEqual( 3, meshes.getMeshAtPos(3).getJoints().getNumberOfJoints())        
+        self.assertEqual( 3, meshes.getMeshAtPos(3).getJoints().getNumberOfJoints())
         joints=meshes.getMeshAtPos(0).getJoints()
         self.assertEqual( 1, joints.getJointAtPos(0).getDomainNumber(), 1)
         self.assertEqual( 2, joints.getJointAtPos(1).getDomainNumber(), 2)
diff --git a/src/ParaMEDLoader/CMakeLists.txt b/src/ParaMEDLoader/CMakeLists.txt
new file mode 100644 (file)
index 0000000..14d3e92
--- /dev/null
@@ -0,0 +1,49 @@
+# Copyright (C) 2012-2015  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+# Author : Anthony Geay (CEA/DEN)
+
+ADD_DEFINITIONS(${HDF5_DEFINITIONS} ${MEDFILE_DEFINITIONS} ${MPI_DEFINITIONS})
+
+IF(HDF5_ENABLE_PARALLEL OR HDF5_IS_PARALLEL)
+  ADD_DEFINITIONS("-DHDF5_IS_PARALLEL")
+ENDIF(HDF5_ENABLE_PARALLEL OR HDF5_IS_PARALLEL)
+
+INCLUDE_DIRECTORIES(
+  ${MPI_INCLUDE_DIRS}
+  ${MEDFILE_INCLUDE_DIRS}
+  ${HDF5_INCLUDE_DIRS}
+  ${CMAKE_CURRENT_SOURCE_DIR}/../ParaMEDMEM
+  ${CMAKE_CURRENT_SOURCE_DIR}/../MEDLoader
+  ${CMAKE_CURRENT_SOURCE_DIR}/../MEDCoupling
+  ${CMAKE_CURRENT_SOURCE_DIR}/../INTERP_KERNEL
+  ${CMAKE_CURRENT_SOURCE_DIR}/../INTERP_KERNEL/Bases
+  ${CMAKE_CURRENT_SOURCE_DIR}/../INTERP_KERNEL/Geometric2D
+  )
+
+SET(paramedloader_SOURCES
+  ParaMEDLoader.cxx
+  ParaMEDFileMesh.cxx
+  )
+
+ADD_LIBRARY(paramedloader SHARED ${paramedloader_SOURCES})
+TARGET_LINK_LIBRARIES(paramedloader paramedmem medloader ${PLATFORM_LIBS})
+INSTALL(TARGETS paramedloader EXPORT ${PROJECT_NAME}TargetGroup DESTINATION ${MEDTOOL_INSTALL_LIBS})
+
+FILE(GLOB paramedloader_HEADERS_HXX "${CMAKE_CURRENT_SOURCE_DIR}/*.hxx")
+INSTALL(FILES ${paramedloader_HEADERS_HXX} DESTINATION ${MEDTOOL_INSTALL_HEADERS})
diff --git a/src/ParaMEDLoader/ParaMEDFileMesh.cxx b/src/ParaMEDLoader/ParaMEDFileMesh.cxx
new file mode 100644 (file)
index 0000000..043f62d
--- /dev/null
@@ -0,0 +1,128 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (EDF R&D)
+
+#include "ParaMEDFileMesh.hxx"
+#include "MEDCouplingAutoRefCountObjectPtr.hxx"
+#include "MEDFileMesh.hxx"
+#include "MEDFileMeshLL.hxx"
+#include "MEDLoader.hxx"
+
+using namespace ParaMEDMEM;
+
+MEDFileMesh *ParaMEDFileMesh::New(int iPart, int nbOfParts, const std::string& fileName, const std::string& mName, int dt, int it, MEDFileMeshReadSelector *mrs)
+{
+  MEDFileUtilities::CheckFileForRead(fileName);
+  ParaMEDMEM::MEDCouplingMeshType meshType;
+  MEDFileUtilities::AutoFid fid(MEDfileOpen(fileName.c_str(),MED_ACC_RDONLY));
+  int dummy0,dummy1;
+  std::string dummy2;
+  MEDFileMeshL2::GetMeshIdFromName(fid,mName,meshType,dummy0,dummy1,dummy2);
+  switch(meshType)
+  {
+    case UNSTRUCTURED:
+      {
+        return ParaMEDFileUMesh::New(iPart,nbOfParts,fileName,mName,dt,it,mrs);
+      }
+    default:
+      throw INTERP_KERNEL::Exception("ParaMEDFileMesh::New : only unstructured mesh supported for the moment !");
+  }
+}
+
+MEDFileMesh *ParaMEDFileMesh::ParaNew(int iPart, int nbOfParts, const MPI_Comm& com, const MPI_Info& nfo, const std::string& fileName, const std::string& mName, int dt, int it, MEDFileMeshReadSelector *mrs)
+{
+  MEDFileUtilities::CheckFileForRead(fileName);
+  ParaMEDMEM::MEDCouplingMeshType meshType;
+  MEDFileUtilities::AutoFid fid(MEDfileOpen(fileName.c_str(),MED_ACC_RDONLY));
+  int dummy0,dummy1;
+  std::string dummy2;
+  MEDFileMeshL2::GetMeshIdFromName(fid,mName,meshType,dummy0,dummy1,dummy2);
+  switch(meshType)
+  {
+    case UNSTRUCTURED:
+      {
+        return ParaMEDFileUMesh::ParaNew(iPart,nbOfParts,com,nfo,fileName,mName,dt,it,mrs);
+      }
+    default:
+      throw INTERP_KERNEL::Exception("ParaMEDFileMesh::ParaNew : only unstructured mesh supported for the moment !");
+  }
+}
+
+MEDFileUMesh *ParaMEDFileUMesh::New(int iPart, int nbOfParts, const std::string& fileName, const std::string& mName, int dt, int it, MEDFileMeshReadSelector *mrs)
+{
+  MEDFileUtilities::CheckFileForRead(fileName);
+  MEDFileUtilities::AutoFid fid(MEDfileOpen(fileName.c_str(),MED_ACC_RDONLY));
+  return ParaMEDFileUMesh::NewPrivate(fid,iPart,nbOfParts,fileName,mName,dt,it,mrs);
+}
+
+// MPI_COMM_WORLD, MPI_INFO_NULL 
+MEDFileUMesh *ParaMEDFileUMesh::ParaNew(int iPart, int nbOfParts, const MPI_Comm& com, const MPI_Info& nfo, const std::string& fileName, const std::string& mName, int dt, int it, MEDFileMeshReadSelector *mrs)
+{
+  MEDFileUtilities::CheckFileForRead(fileName);
+#ifdef HDF5_IS_PARALLEL
+  MEDFileUtilities::AutoFid fid(MEDparFileOpen(fileName.c_str(),MED_ACC_RDONLY,com,nfo));
+#else
+  MEDFileUtilities::AutoFid fid(MEDfileOpen(fileName.c_str(),MED_ACC_RDONLY));
+#endif
+  return ParaMEDFileUMesh::NewPrivate(fid,iPart,nbOfParts,fileName,mName,dt,it,mrs);
+}
+
+MEDFileUMesh *ParaMEDFileUMesh::NewPrivate(med_idt fid, int iPart, int nbOfParts, const std::string& fileName, const std::string& mName, int dt, int it, MEDFileMeshReadSelector *mrs)
+{
+  MEDCouplingAutoRefCountObjectPtr<MEDFileUMesh> ret;
+  int meshDim, spaceDim, numberOfNodes;
+  std::vector< std::vector< std::pair<INTERP_KERNEL::NormalizedCellType,int> > > typesDistrib(MEDLoader::GetUMeshGlobalInfo(fileName,mName,meshDim,spaceDim,numberOfNodes));
+  std::vector<INTERP_KERNEL::NormalizedCellType> types;
+  std::vector<int> distrib;
+  for(std::vector< std::vector< std::pair<INTERP_KERNEL::NormalizedCellType,int> > >::const_iterator it0=typesDistrib.begin();it0!=typesDistrib.end();it0++)
+    for(std::vector< std::pair<INTERP_KERNEL::NormalizedCellType,int> >::const_iterator it1=(*it0).begin();it1!=(*it0).end();it1++)
+      {
+        types.push_back((*it1).first);
+        int tmp[3];
+        DataArray::GetSlice(0,(*it1).second,1,iPart,nbOfParts,tmp[0],tmp[1]);
+        tmp[2]=1;
+        distrib.insert(distrib.end(),tmp,tmp+3);
+      }
+  ret=MEDFileUMesh::LoadPartOf(fid,mName,types,distrib,dt,it,mrs);
+  return ret.retn();
+}
+
+MEDFileMeshes *ParaMEDFileMeshes::New(int iPart, int nbOfParts, const std::string& fileName)
+{
+  std::vector<std::string> ms(MEDLoader::GetMeshNames(fileName));
+  MEDCouplingAutoRefCountObjectPtr<MEDFileMeshes> ret(MEDFileMeshes::New());
+  for(std::vector<std::string>::const_iterator it=ms.begin();it!=ms.end();it++)
+    {
+      MEDCouplingAutoRefCountObjectPtr<MEDFileMesh> mesh(ParaMEDFileMesh::New(iPart,nbOfParts,fileName,(*it)));
+      ret->pushMesh(mesh);
+    }
+  return ret.retn();
+}
+
+MEDFileMeshes *ParaMEDFileMeshes::ParaNew(int iPart, int nbOfParts, const MPI_Comm& com, const MPI_Info& nfo, const std::string& fileName)
+{
+  std::vector<std::string> ms(MEDLoader::GetMeshNames(fileName));
+  MEDCouplingAutoRefCountObjectPtr<MEDFileMeshes> ret(MEDFileMeshes::New());
+  for(std::vector<std::string>::const_iterator it=ms.begin();it!=ms.end();it++)
+    {
+      MEDCouplingAutoRefCountObjectPtr<MEDFileMesh> mesh(ParaMEDFileMesh::ParaNew(iPart,nbOfParts,com,nfo,fileName,(*it)));
+      ret->pushMesh(mesh);
+    }
+  return ret.retn();
+}
diff --git a/src/ParaMEDLoader/ParaMEDFileMesh.hxx b/src/ParaMEDLoader/ParaMEDFileMesh.hxx
new file mode 100644 (file)
index 0000000..f034c47
--- /dev/null
@@ -0,0 +1,61 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (EDF R&D)
+
+#ifndef __PARAMEDFILEMESH_HXX__
+#define __PARAMEDFILEMESH_HXX__
+
+#include "med.h"
+
+#include "mpi.h"
+
+#include <string>
+
+namespace ParaMEDMEM
+{
+  class MEDFileMesh;
+  class MEDFileUMesh;
+  class MEDFileMeshes;
+  class MEDFileMeshReadSelector;
+
+  class ParaMEDFileMesh
+  {
+  public:
+    static MEDFileMesh *New(int iPart, int nbOfParts, const std::string& fileName, const std::string& mName, int dt=-1, int it=-1, MEDFileMeshReadSelector *mrs=0);
+    static MEDFileMesh *ParaNew(int iPart, int nbOfParts, const MPI_Comm& com, const MPI_Info& nfo, const std::string& fileName, const std::string& mName, int dt=-1, int it=-1, MEDFileMeshReadSelector *mrs=0);
+  };
+
+  class ParaMEDFileUMesh
+  {
+  public:
+    static MEDFileUMesh *New(int iPart, int nbOfParts, const std::string& fileName, const std::string& mName, int dt=-1, int it=-1, MEDFileMeshReadSelector *mrs=0);
+    static MEDFileUMesh *ParaNew(int iPart, int nbOfParts, const MPI_Comm& com, const MPI_Info& nfo, const std::string& fileName, const std::string& mName, int dt=-1, int it=-1, MEDFileMeshReadSelector *mrs=0);
+  private:
+    static MEDFileUMesh *NewPrivate(med_idt fid, int iPart, int nbOfParts, const std::string& fileName, const std::string& mName, int dt, int it, MEDFileMeshReadSelector *mrs);
+  };
+
+  class ParaMEDFileMeshes
+  {
+  public:
+    static MEDFileMeshes *New(int iPart, int nbOfParts, const std::string& fileName);
+    static MEDFileMeshes *ParaNew(int iPart, int nbOfParts, const MPI_Comm& com, const MPI_Info& nfo, const std::string& fileName);
+  };
+}
+
+#endif
diff --git a/src/ParaMEDLoader/ParaMEDLoader.cxx b/src/ParaMEDLoader/ParaMEDLoader.cxx
new file mode 100644 (file)
index 0000000..43a4eec
--- /dev/null
@@ -0,0 +1,65 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (CEA/DEN)
+
+#include "ParaMEDLoader.hxx"
+#include "MEDLoader.hxx"
+#include "ParaMESH.hxx"
+#include "BlockTopology.hxx"
+#include "MEDCouplingUMesh.hxx"
+
+#include <fstream>
+
+using namespace ParaMEDMEM;
+
+ParaMEDLoader::ParaMEDLoader()
+{
+}
+
+void ParaMEDLoader::WriteParaMesh(const char *fileName, ParaMEDMEM::ParaMESH *mesh)
+{
+  if(!mesh->getBlockTopology()->getProcGroup()->containsMyRank())
+    return ;
+  int myRank=mesh->getBlockTopology()->getProcGroup()->myRank();
+  int nbDomains=mesh->getBlockTopology()->getProcGroup()->size();
+  std::vector<std::string> fileNames(nbDomains);
+  for(int i=0;i<nbDomains;i++)
+    {
+      std::ostringstream sstr;
+      sstr << fileName << i+1 << ".med";
+      fileNames[i]=sstr.str();
+    }
+  if(myRank==0)
+    WriteMasterFile(fileName,fileNames,mesh->getCellMesh()->getName().c_str());
+  MEDLoader::WriteUMesh(fileNames[myRank].c_str(),dynamic_cast<MEDCouplingUMesh *>(mesh->getCellMesh()),true);
+}
+
+/*!
+ * This method builds the master file 'fileName' of a parallel MED file defined in 'fileNames'.
+ */
+void ParaMEDLoader::WriteMasterFile(const char *fileName, const std::vector<std::string>& fileNames, const char *meshName)
+{
+  int nbOfDom=fileNames.size();
+  std::ofstream fs(fileName);
+  fs << "#MED Fichier V 2.3" << " " << std::endl;
+  fs << "#"<<" " << std::endl;
+  fs << nbOfDom <<" " << std::endl;
+  for(int i=0;i<nbOfDom;i++)
+    fs << meshName << " " << i+1 << " " << meshName << "_" << i+1 << " localhost " << fileNames[i] << " " << std::endl;
+}
diff --git a/src/ParaMEDLoader/ParaMEDLoader.hxx b/src/ParaMEDLoader/ParaMEDLoader.hxx
new file mode 100644 (file)
index 0000000..6a49a47
--- /dev/null
@@ -0,0 +1,42 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (CEA/DEN)
+
+#ifndef __PARAMEDLOADER_HXX__
+#define __PARAMEDLOADER_HXX__
+
+#include <string>
+#include <vector>
+
+namespace ParaMEDMEM
+{
+  class ParaMESH;
+  class ParaFIELD;
+}
+
+class ParaMEDLoader
+{
+public:
+  static void WriteParaMesh(const char *fileName, ParaMEDMEM::ParaMESH *mesh);
+  static void WriteMasterFile(const char *fileName, const std::vector<std::string>& fileNames, const char *meshName);
+private:
+  ParaMEDLoader();
+};
+
+#endif
diff --git a/src/ParaMEDMEM/BASICS_JR b/src/ParaMEDMEM/BASICS_JR
new file mode 100644 (file)
index 0000000..61a724d
--- /dev/null
@@ -0,0 +1,339 @@
+
+Le document de specification :
+==============================
+
+Globalement le document de specification correspond a
+l'implementation qui a ete faite avec :
+
+. Transport-ParaMEDMEM qui a ete enrichi avec la classe MPI_Access
+
+. Presentation-ParaMEDMEM qui a ete enrichi avec la classe
+  MPI_AccessDEC
+
+
+La conception correspondant a cette specification est restee
+la meme :
+
+. MPI_Access gere pour un ProcessorGroup (IntraCommunicator) :
+  - Les structures MPI_Request et MPI_Status
+  - La valeur des "tags" MPI
+  - Les requetes d'ecritures et de lectures asynchrones
+  - Les communications en "Point a Point" [I]Send, [I]Recv ainsi
+    que [I]SendRecv.
+  - A la difference de l'API MPI [I]SendRecv ne concerne qu'un
+    seul et meme "target".
+  - Les controles de communications asynchrones Wait, Test,
+    WaitAll, TestAll, [I]Probe, Cancel et CancelAll.
+  - Comme c'etait demande seules les methodes "utiles" ont ete
+    implementees.
+  - Les appels a [I]Send ou a [I]Recv avec des sendbuff/recvbuff
+    de valeur NULL ou avec des sendcount/recvcount de valeur
+    nulle sont ignores.
+  - Les methodes de communications collectives ne sont pas
+    implementees dans MPI_Access.
+  - Les deux methodes "Cancel" concernent soit un IRecv deja
+    soumis soit un message en attente (sans IRecv deja soumis).
+    Elles regroupent les differents appels de l'API MPI
+    necessaires (IProbe, IRecv, Wait, Test_Canceled ...).
+
+. MPI_AccessDEC utilise les services de MPI_Access pour un
+  ProcessorGroup (IntraCommunicator) et gere :
+  - Les communications collectives en "Point a Point".
+    (AllToAll[v] synchrone ou asynchrone).
+  - Les temps et l'interpolation
+  - Les [I]Send avec leurs buffers (delete [])
+  - Les [I]Recv
+  - La finalisation des envois et receptions de messages dans
+    le destructeur afin qu'il n'y ait plus de message en attente
+    et afin de liberer les buffers
+
+
+MPI_Access et "tags" (ou "MPITags") :
+=====================================
+
+. Le constructeur permet optionnellement de fixer une plage de tags
+    a utiliser : [BaseTag , MaxTag].
+  Par defaut c'est [ 0 , MPI_TAG_UB], MPI_TAG_UB etant la valeur
+    maximum d'une implementation de MPI (valeur minimum 32767
+    soit 2**15-1). Sur awa avec l'implementation lam MPI_TAG_UB
+    vaut 7353944. La norme MPI specifie que cette valeur doit
+    etre la meme dans les process demarres avec mpirun.
+  Dans le cas de l'usage simultane du meme IntraCommunicator
+    dans un meme process (ou de plusieurs IntraCommunicator
+    d'intersection non nulle) cela peut eviter toute ambiguite
+    et aider au debug.
+
+. Dans MPI_Access les tags sont constitues de deux parties
+    (#define ModuloTag 10) :
+  + Le dernier digit decimal correspond au MPI_DataType ( 1 pour
+    les messages "temps", 2 pour MPI_INT et 3 pour MPI_DOUBLE)
+  + La valeur des autres digits correspond a une numerotation
+    circulaire des messages.
+  + Un message "temps" et le message de donnees associe ont le
+    meme numero de message (mais des types et donc des tags
+    differents).
+
+. Pour un envoi de message d'un process "source" vers un process
+    "target", on dispose de _SendMPITag[target] dans le process
+    source (il contient le dernier "tag" utilise pour l'envoi de
+    messages vers le process target).
+  Et dans le process "target" qui recoit ce message, on dispose
+    de _RecvMPITag[source] (il contient le dernier "tag" utilise
+    pour la reception de messages du process source).
+  Naturellement d'apres la norme MPI les valeurs de ces tags sont
+    les memes.
+
+
+MPI_Access et "RequestIds" :
+============================
+
+. ATTENTION : Dans le document de specification, la distinction
+  n'est pas faite clairement entre les "MPITags" (voir ci-dessus)
+  qui sont un argument des appels a MPI et les "RequestIds" qui
+  ne concernent pas les appels MPI. Ces "RequestIds" figurent
+  en effet sous le nom de tag comme argument d'entree/sortie dans l'API
+  de MPI_Access decrite dans le document de specification. Mais
+  dans l'implementation on a bien le nom RequestId (ou bien
+  RecvRequestId/SendRequestId).
+
+. Lors de la soumission d'une requete d'ecriture ou de lecture MPI
+    via MPI_Access, on obtient un identifieur "RequestId".
+  Cet identifieur "RequestId" correspond a une structure RequestStruct
+    de MPI_Access a laquelle on accede avec la map
+    "_MapOfRequestStruct".
+  Cette structure RequestStruct permet de gerer MPI_Request et
+    MPI_Status * de MPI et permet d'obtenir des informations sur
+    la requete : target, send/recv, tag, [a]synchrone, type, outcount.
+
+. C'est cet identifieur qui peut etre utilise pour controler une
+    requete asynchrone via MPI_Access : Wait, Test, Probe, etc...
+
+. En pratique "RequestId" est simplement un entier de l'intervalle
+    [0 , 2**32-1]. Il y a uniquement un compteur cyclique global
+    aussi bien pour les [I]Send que pour les [I]Recv.
+
+. Ces "RequestIds" et leur structures associees facilitent les
+    communications asynchrones.
+  Par exemple on a mpi_access->Wait( int RequestId )
+  au lieu de MPI_Wait(MPI_Request *request, MPI_Status *status)
+  avec gestion de status.
+
+. L'API de MPI_Access peut fournir les "SendRequestIds" d'un "target",
+    les "RecvRequestIds" d'un "source" ou bien les "SendRequestIds" de
+    tous les "targets" ou les "RecvRequestIds" de tous les "sources".
+  Cela permet d'eviter leur gestion au niveau de Presentation-ParaMEDMEM.
+
+
+MPI_AccessDEC :
+===============
+
+. Comme la classe DEC, il est base sur local_group et distant_group
+  ce qui forme un MPI_union_group et donc un IntraCommunicator.
+
+. Il permet de choisir le mode synchrone ou asynchrone (par defaut).
+  Le meme programme peut fonctionner en synchrone ou en asynchrone
+  sans devoir etre modifie.
+
+. Il permet de choisir un mode d'interpolation (actuellement
+  uniquement une interpolation lineaire) ou bien un mode sans
+  interpolation (par defaut). Ceci pour les communications collectives.
+  Avec interpolation les communications collectives transmettent et
+  recoivent un message "temps" en plus des donnees.
+
+. Il implemente AllToAll[v] en "Point a Point" avec ou sans interpolation.
+
+. Il gere les buffers d'envoi de messages. Il les detruit donc
+  lorsqu'ils sont disponibles.
+
+. Il cree et utilise MPI_Access.
+
+
+MPI_AccessDEC et la gestion des SendBuffers :
+=============================================
+
+. Comme dans les communications collectives on n'envoie que des
+  parties du meme buffer à chaque process "target", il faut s'assurer
+  en asynchrone que toutes ces parties sont disponibles pour
+  pouvoir liberer le buffer.
+
+. On suppose que ces buffers ont ete alloues avec un new double[]
+
+. La structure SendBuffStruct permet de conserver l'adresse du buffer
+  et de gerer un compteur de references de ce buffer. Elle comporte
+  aussi MPI_Datatype pour pouvoir faire un delete [] (double *) ...
+  lorsque le compteur est null.
+
+. La map _MapOfSendBuffers etablit la correspondance entre chaque
+  RequestId obtenu de MPI_Access->ISend(...) et un SendBuffStruct
+  pour chaque "target" d'une partie du buffer.
+
+. Tout cela ne concerne que les envois asynchrones. En synchrone,
+  on detruit senbuf juste apres l'avoir transmis.
+
+
+MPI_AccessDEC et la gestion des RecvBuffers :
+=============================================
+
+S'il n'y a pas d'interpolation, rien de particulier n'est fait.
+
+Avec interpolation pour chaque target :
+---------------------------------------
+. On a _TimeMessages[target] qui est un vecteur de TimesMessages.
+  On en a 2 dans notre cas avec une interpolation lineaire qui
+  contiennent le time(t0)/deltatime precedent et le dernier
+  time(t1)/deltatime.
+
+. On a _DataMessages[target] qui est un vecteur de DatasMessages
+  On en a 2 dans notre cas avec une interpolation lineaire qui
+  contiennent les donnees obtenues par Recv au time(t0)/deltatime
+  precedent et au dernier time(t1)/deltatime.
+
+. Au temps _t(t*) du processus courrant on effectue l'interpolation
+  entre les valeurs des 2 DatasMessages que l'on rend dans la
+  partie de recvbuf correspondant au target pourvu que t0 < t* <= t1.
+
+. Par suite de la difference des "deltatimes" entre process, on
+  peut avoir t0 < t1 < t* auquel cas on aura une extrapolation.
+
+. Les vecteurs _OutOfTime, _DataMessagesRecvCount et _DataMessagesType
+  contiennent pour chaque target true si t* > dernier t1, recvcount et
+  MPI_Datatype pour finaliser la gestion des messages a la fin.
+
+
+Etapes des communications collectives de MPI_AccessDEC :
+========================================================
+
+AllToAll[v] : Les arguments sont les memes que dans MPI sauf MPI_Comm
+------------- inutile (deja connu de MPI_AccessDEC et MPI_Access).
+
+              Si on a un TimeInterpolator, appel de AllToAll[v]Time.
+
+              Sinon, on appelle CheckSent pour les echanges
+                asynchrones (voir ci-apres) et on appelle SendRecv
+                pour chaque "target".
+
+AllToAll[v]Time : 
+-----------------
+
+. CheckSent() :
+  + appelle SendRequestIds de MPI_Access afin d'obtenir tous les
+    RequestIds d'envoi de messages a tous les "targets".
+  + Pour chaque RequestId, appelle Test de MPI_Access pour savoir
+    si le buffer est libre (flag = true). Lorsqu'il s'agit du
+    FinalCheckSent, on appelle Wait au lieu de Test.
+  + Si le buffer est libre, on decremente le compteur de la
+    structure SendBuffStruct obtenue avec _MapOfSendBuffers.
+    (voir MPI_AccessDEC et la gestion des SendBuffers ci-dessus)
+  + Si le compteur est nul on detruit le TimeMessage ou le
+    SendBuffer en fonction du DataType.
+  + Puis on detruit la structure SendBuffStruct avant de supprimer
+    (erase) cet item de _MapOfSendBuffers
+
+. DoSend :
+  + On cree un TimeMessage (voir cette structure dans MPI_Access).
+  + Si l'on est en asynchrone on cree deux structures SendBuffStruct
+    aSendTimeStruct et aSendDataStruct que l'on remplit.
+  + On remplit la structure aSendTimeMessage avec time/deltatime du
+    process courant. "deltatime" doit etre nul s'il s'agit du dernier
+    pas de temps.
+  + Puis pour chaque "target", on envoie le TimeMessage et la partie
+    de sendbuf concernee par ce target.
+  + Si l'on est en asynchrone, on incremente le compteur et on ajoute
+    a _MapOfSendBuffers aSendTimeStruct et aSendDataStruct avec les
+    identifieurs SendTimeRequestId et SendDataRequestId recus de
+    MPI_Access->Send(...).
+  + Et enfin si l'on est en synchrone, on detruit les SendMessages.
+
+. CheckTime(recvcount , recvtype , target , UntilEnd)
+  + Au depart, on lit le premier "Message-temps" dans
+    &(*_TimeMessages)[target][1] et le premier message de donnees
+    dans le buffer alloue (*_DataMessages)[target][1].
+  + Par convention deltatime des messages temps est nul si c'est le
+    dernier.
+  + Boucle while : _t(t*) est le temps courant du processus.
+    "tant que _t(t*) est superieur au temps du "target"
+     (*_TimeMessages)[target][1].time et que
+     (*_TimeMessages)[target][1].deltatime n'est pas nul",
+    ainsi en fin de boucle on aura :
+     _t(t*) <= (*_TimeMessages)[target][1].time avec
+     _t(t*) > (*_TimeMessages)[target][0].time
+    ou bien on aura le dernier message temps du "target".
+  + S'il s'agit de la finalisation des receptions des messages
+    temps et donnees (UntilEnd vaut true), on effectue la
+    boucle jusqu'a ce que l'on trouve
+    (*_TimeMessages)[target][1].deltatime nul.
+  + Dans la boucle :
+    On recopie le dernier message temps dans le message temps
+      precedent et on lit le message temps suivant.
+    On detruit le buffer de donnees du temps precedent.
+    On recopie le pointeur du dernier buffer de donnees dans
+      le precedent.
+    On alloue un nouveau dernier buffer de donnees
+      (*_DataMessages)[target][1] et on lit les donnees
+      correspondantes dans ce buffer.
+  + Si le temps courant du process est plus grand que le dernier
+    temps (*_TimeMessages)[target][1].time du target, on donne
+    la valeur true a (*_OutOfTime)[target].
+    (*_TimeMessages)[target][1].deltatime est alors nul.
+
+. CheckTime + DoRecv + DoInterp
+  + Pour chaque target on appelle CheckTime
+  + Si on a un TimeInterpolator et si le message temps du target
+    n'est pas le premier, on appelle l'interpolateur qui stocke
+    ses resultats dans la partie du buffer de reception qui
+    correspond au "target".
+  + Sinon, on recopie les donnees recues pour ce premier pas de
+    temps dans la partie du buffer de reception qui correspond au
+    "target".
+
+
+Presentation-ParaMEDMEM :
+=========================
+
+. Des modifications mineures ont ete effectuees dans Presentation-ParaMEDMEM
+  afin de pouvoir utiliser ces nouvelles fonctionnalites. Il n'y
+  a surtout pas eu de bouleversement destabilisateur. L'ancien
+  mode de fonctionnement reste naturellement disponible.
+
+. Cela repose sur trois nouvelles options creees avec registerOption
+  dans le constructeur de InterpKernelDEC :
+  + Asynchronous : true ou false (par defaut)
+  + TimeInterpolation : WithoutTimeInterp (par defaut) ou LinearTimeInterp
+    typedef enum{WithoutTimeInterp,LinearTimeInterp} TimeInterpolationMethod;
+    dans MPI_AccessDEC.hxx
+  + AllToAllMethod : Native (par defaut) ou PointToPoint
+    typedef enum{Native,PointToPoint} AllToAllMethod;
+    dans MxN_Mapping.hxx
+
+. Le choix des options se fait avec le Data Exchange Channel :
+  + ParaMEDMEM::InterpKernelDEC dec (*source_group,*target_group);
+  + dec.setOption("Asynchronous",true);
+  + dec.setOption("TimeInterpolation",LinearTimeInterp);
+  + dec.setOption("AllToAllMethod",PointToPoint);
+
+. Dans dec.synchronize(),
+  + on cree un objet InterpolationMatrix
+    qui lui-meme cree un objet MxN_Mapping
+    qui lui-meme cree maintenant un objet MPI_AccessDEC
+  + on transmet a MxN_Mapping via l'InterpolationMatrix l'option
+    choisie de AllToAllMethod
+  + on transmet a MPI_AccessDEC les valeurs des options Asynchronous
+    et TimeInterpolation : methodes Asynchronous et
+    SetTimeInterpolator de MPI_AccessDEC.
+
+. ParaMEDMEM::InterpKernelDEC comporte maintenant une surcharge des
+  methodes recvData() et sendData() :
+  + void InterpKernelDEC::recvData( double time ) qui appelle
+    SetTime(time) de MPI_AccessDEC et
+    recvData()
+  + void InterpKernelDEC::sendData( double time , double deltatime )
+    qui appelle 
+    SetTime(time,deltatime) de MPI_AccessDEC et
+    sendData()
+
+. recvData() et sendData() de ParaMEDMEM::InterpKernelDEC
+  appellent multiply et transposeMultiply de l'InterpolationMatrix
+  qui appellent sendRecv et reverseSendRecv de MxN_Mapping
+  qui appellent comm_interface.allToAllV en mode "Native"
+  ou bien MPI_AccessDEC::AllToAllv en mode "PointToPoint"
+
diff --git a/src/ParaMEDMEM/BlockTopology.cxx b/src/ParaMEDMEM/BlockTopology.cxx
new file mode 100644 (file)
index 0000000..8f6b4ce
--- /dev/null
@@ -0,0 +1,336 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "BlockTopology.hxx"
+#include "MEDCouplingMemArray.hxx"
+#include "MEDCouplingCMesh.hxx"
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "ComponentTopology.hxx"
+#include "InterpKernelUtilities.hxx"
+
+#include <vector>
+#include <algorithm>
+#include <utility>
+#include <iostream>
+
+using namespace std;
+
+namespace ParaMEDMEM
+{
+
+  //!converts a pair <subdomainid,local> to a global number 
+  std::pair<int,int> BlockTopology::globalToLocal(const int global) const
+  {
+    int subdomain_id=0;
+    int position=global;
+    int size=_nb_elems;
+    int size_procs=_proc_group->size();
+    int increment=size;
+    vector<int>axis_position(_dimension);
+    vector<int>axis_offset(_dimension);
+    for (int idim=0; idim<_dimension; idim++)
+      {
+        int axis_size=_local_array_indices[idim].size()-1;
+        int axis_nb_elem=_local_array_indices[idim][axis_size];
+        increment=increment/axis_nb_elem;
+        int proc_increment = size_procs/(axis_size);
+        int axis_pos=position/increment;
+        position=position%increment;  
+        int iaxis=1;
+        while (_local_array_indices[idim][iaxis]<=axis_pos)
+          {
+            subdomain_id+=proc_increment;
+            iaxis++;
+          }
+        axis_position[idim]=axis_pos-_local_array_indices[idim][iaxis-1];
+        axis_offset[idim]=iaxis;
+      }
+    int local=0;
+    int local_increment=1;
+    for (int idim=_dimension-1; idim>=0; idim--)
+      {
+        local+=axis_position[idim]*local_increment;
+        local_increment*=_local_array_indices[idim][axis_offset[idim]]-_local_array_indices[idim][axis_offset[idim]-1];
+      }
+    return make_pair(subdomain_id,local);
+  }
+
+  //!converts local number to a global number
+  int BlockTopology::localToGlobal(const pair<int,int> local) const
+  {
+  
+    int subdomain_id=local.first;
+    int global=0;
+    int loc=local.second;
+    int increment=_nb_elems;
+    int proc_increment=_proc_group->size();
+    int local_increment=getNbLocalElements();
+    for (int idim=0; idim < _dimension; idim++)
+      {
+        int axis_size=_local_array_indices[idim].size()-1;
+        int axis_nb_elem=_local_array_indices[idim][axis_size];
+        increment=axis_nb_elem==0?0:increment/axis_nb_elem;
+        proc_increment = proc_increment/(axis_size);
+        int proc_axis=subdomain_id/proc_increment;
+        subdomain_id=subdomain_id%proc_increment;
+        int local_axis_nb_elem=_local_array_indices[idim][proc_axis+1]-_local_array_indices[idim][proc_axis];
+        local_increment = (local_axis_nb_elem==0)?0:(local_increment/local_axis_nb_elem);
+        int iaxis=((local_increment==0)?0:(loc/local_increment))+_local_array_indices[idim][proc_axis];
+        global+=increment*iaxis;
+        loc = (local_increment==0)?0:(loc%local_increment);
+      }
+    return global;
+  }
+
+  //Retrieves the local number of elements 
+  int BlockTopology::getNbLocalElements()const 
+  {
+    int position=_proc_group->myRank();
+    int nb_elem = 1;
+    int increment=1;
+    for (int i=_dimension-1; i>=0; i--)
+      {  
+        increment *=_nb_procs_per_dim[i];
+        int idim=position%increment;
+        position=position/increment;
+        int imin=_local_array_indices[i][idim];
+        int imax=_local_array_indices[i][idim+1];
+        nb_elem*=(imax-imin);
+      }
+    return nb_elem;
+  }
+
+  /*!
+   * Constructor of a block topology from a grid. 
+   * This preliminary version simply splits along the first axis
+   * instead of making the best choice with respect to the 
+   * values of the different axes. 
+   */
+  BlockTopology::BlockTopology(const ProcessorGroup& group, MEDCouplingCMesh *grid):
+    _dimension(grid->getSpaceDimension()), _proc_group(&group), _owns_processor_group(false)
+  {
+    vector <int> axis_length(_dimension);
+    _nb_elems=1;
+    for (int idim=0; idim <_dimension; idim++)
+      {
+        DataArrayDouble *arr=grid->getCoordsAt(idim);
+        axis_length[idim]=arr->getNbOfElems();
+        _nb_elems*=axis_length[idim];
+      }  
+    //default splitting along 1st dimension
+    _local_array_indices.resize(_dimension);
+    _nb_procs_per_dim.resize(_dimension);
+  
+    _local_array_indices[0].resize(_proc_group->size()+1);
+    _local_array_indices[0][0]=0;
+    _nb_procs_per_dim[0]=_proc_group->size();
+  
+    for (int i=1; i<=_proc_group->size(); i++)
+      {
+        _local_array_indices[0][i]=_local_array_indices[0][i-1]+
+          axis_length[0]/_proc_group->size();
+        if (i<= axis_length[0]%_proc_group->size())
+          _local_array_indices[0][i]+=1;
+      }
+    for (int i=1; i<_dimension; i++)
+      {
+        _local_array_indices[i].resize(2);
+        _local_array_indices[i][0]=0;
+        _local_array_indices[i][1]=axis_length[i];
+        _nb_procs_per_dim[i]=1;
+      }
+    _cycle_type.resize(_dimension);
+    for (int i=0; i<_dimension; i++)
+      _cycle_type[i]=ParaMEDMEM::Block;  
+  }
+
+  /*!
+   * Creation of a block topology by composing 
+   * a geometrical topology and a component topology.
+   * This constructor is intended for creating fields 
+   * for which the parallel distribution is made on the
+   * components of the field rather than on the geometrical 
+   * partitioning of the underlying mesh.
+   * 
+   */ 
+  BlockTopology::BlockTopology(const BlockTopology& geom_topo, const ComponentTopology& comp_topo):_owns_processor_group(false)
+  {
+    // so far, the block topology can only be created if the proc group 
+    // is either on geom_topo or on comp_topo
+    if (geom_topo.getProcGroup()->size()>1 && comp_topo.nbBlocks()>1)
+      throw INTERP_KERNEL::Exception(LOCALIZED("BlockTopology cannot yet be constructed with both complex geo and components topology"));
+
+    if (comp_topo.nbComponents()==1)
+      {
+        *this=geom_topo;
+        return;
+      }
+    else
+      {
+        _dimension = geom_topo.getDimension()+1;
+        if (comp_topo.nbBlocks()>1)
+          _proc_group=comp_topo.getProcGroup();
+        else
+          _proc_group=geom_topo.getProcGroup();
+        _local_array_indices=geom_topo._local_array_indices;
+        vector<int> comp_indices = *(comp_topo.getBlockIndices());
+        _local_array_indices.push_back(comp_indices);
+        _nb_procs_per_dim=geom_topo._nb_procs_per_dim;
+        _nb_procs_per_dim.push_back(comp_topo.nbBlocks());
+        _cycle_type=geom_topo._cycle_type;
+        _cycle_type.push_back(Block);
+        _nb_elems=geom_topo.getNbElements()*comp_topo.nbComponents();
+      }  
+  }
+
+  /*! Constructor for creating a one-dimensional
+   * topology from a processor group and a local 
+   * number of elements on each processor
+   * 
+   * The function must be called only by the processors belonging
+   * to group \a group. Calling it from a processor not belonging
+   * to \a group will cause an MPI error, while calling from a subset
+   * of \a group will result in a deadlock. 
+   */
+  BlockTopology::BlockTopology(const ProcessorGroup& group, int nb_elem):_dimension(1),_proc_group(&group),_owns_processor_group(false)
+  {
+    int* nbelems_per_proc = new int[group.size()];
+    const MPIProcessorGroup* mpi_group=dynamic_cast<const MPIProcessorGroup*>(_proc_group);
+    const MPI_Comm* comm=mpi_group->getComm();
+    int nbtemp=nb_elem;
+    mpi_group->getCommInterface().allGather(&nbtemp, 1, MPI_INT, 
+                                            nbelems_per_proc, 1, MPI_INT, 
+                                            *comm);
+    _nb_elems=0;  
+  
+    //splitting along only dimension
+    _local_array_indices.resize(1);
+    _nb_procs_per_dim.resize(1);  
+          
+    _local_array_indices[0].resize(_proc_group->size()+1);
+    _local_array_indices[0][0]=0;
+    _nb_procs_per_dim[0]=_proc_group->size();
+  
+    for (int i=1; i<=_proc_group->size(); i++)
+      {
+        _local_array_indices[0][i]=_local_array_indices[0][i-1]+
+          nbelems_per_proc[i-1];
+        _nb_elems+=nbelems_per_proc[i-1];
+      }
+    _cycle_type.resize(1);
+    _cycle_type[0]=ParaMEDMEM::Block;
+    delete[] nbelems_per_proc;
+  }
+
+  BlockTopology::~BlockTopology()
+  {
+    if (_owns_processor_group)
+      delete _proc_group;
+  }
+
+  /*! Retrieves the min and max indices of the domain stored locally
+   * for each dimension. The output vector has the topology dimension
+   * as a size and each pair <int,int> contains min and max. Indices 
+   * range from min to max-1.
+   */
+  std::vector<std::pair<int,int> > BlockTopology::getLocalArrayMinMax() const
+  {
+    vector<pair<int,int> > local_indices (_dimension);
+    int myrank=_proc_group->myRank();
+    int increment=1;
+    for (int i=_dimension-1; i>=0; i--)
+      {  
+        increment *=_nb_procs_per_dim[i];
+        int idim=myrank%increment;
+        local_indices[i].first=_local_array_indices[i][idim];
+        local_indices[i].second=_local_array_indices[i][idim+1];
+        cout << local_indices[i].first << " "<< local_indices[i].second<<endl;
+      }
+    return local_indices;
+  }
+
+  /*! Serializes the data contained in the Block Topology
+   * for communication purposes*/
+  void BlockTopology::serialize(int* & serializer, int& size) const 
+  {
+    vector<int> buffer;
+  
+    buffer.push_back(_dimension);
+    buffer.push_back(_nb_elems);
+    for (int i=0; i<_dimension; i++)
+      {
+        buffer.push_back(_nb_procs_per_dim[i]);
+        buffer.push_back(_cycle_type[i]);
+        buffer.push_back(_local_array_indices[i].size());
+        for (int j=0; j<(int)_local_array_indices[i].size(); j++)
+          buffer.push_back(_local_array_indices[i][j]);
+      }
+  
+    //serializing the comm group
+    int size_comm=_proc_group->size();
+    buffer.push_back(size_comm);
+    MPIProcessorGroup world_group(_proc_group->getCommInterface());
+    for (int i=0; i<size_comm;i++)
+      {
+        int world_rank=world_group.translateRank(_proc_group, i);
+        buffer.push_back(world_rank);
+      }
+  
+    serializer=new int[buffer.size()];
+    size=buffer.size();
+    copy(buffer.begin(), buffer.end(), serializer);
+  }
+
+  /*!
+   *
+   * Unserializes the data contained in the Block Topology
+   * after communication. Uses the same structure as the one used for serialize() 
+   *
+   */
+  void BlockTopology::unserialize(const int* serializer,const CommInterface& comm_interface)
+  {
+    const int* ptr_serializer=serializer;
+    cout << "unserialize..."<<endl;
+    _dimension=*(ptr_serializer++);
+    cout << "dimension "<<_dimension<<endl;
+    _nb_elems=*(ptr_serializer++);
+    cout << "nbelems "<<_nb_elems<<endl;
+    _nb_procs_per_dim.resize(_dimension);
+    _cycle_type.resize(_dimension);
+    _local_array_indices.resize(_dimension);
+    for (int i=0; i<_dimension; i++)
+      {
+        _nb_procs_per_dim[i]=*(ptr_serializer++);
+        _cycle_type[i]=(CYCLE_TYPE)*(ptr_serializer++);
+        _local_array_indices[i].resize(*(ptr_serializer++));
+        for (int j=0; j<(int)_local_array_indices[i].size(); j++)
+          _local_array_indices[i][j]=*(ptr_serializer++);
+      }
+    set<int> procs;
+    int size_comm=*(ptr_serializer++);
+    for (int i=0; i<size_comm; i++)
+      procs.insert(*(ptr_serializer++));
+    cout << "unserialize..."<<procs.size()<<endl;
+    _proc_group=new MPIProcessorGroup(comm_interface,procs);
+    _owns_processor_group=true;
+    //TODO manage memory ownership of _proc_group  
+  }
+}
diff --git a/src/ParaMEDMEM/BlockTopology.hxx b/src/ParaMEDMEM/BlockTopology.hxx
new file mode 100644 (file)
index 0000000..eabc2ec
--- /dev/null
@@ -0,0 +1,70 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __BLOCKTOPOLOGY_HXX__
+#define __BLOCKTOPOLOGY_HXX__
+
+#include "Topology.hxx"
+#include "ProcessorGroup.hxx"
+
+#include <vector>
+
+namespace ParaMEDMEM
+{
+  class ComponentTopology;
+  class MEDCouplingCMesh;
+
+  typedef enum{Block,Cycle} CYCLE_TYPE; 
+
+  class BlockTopology : public Topology
+  {
+  public:
+    BlockTopology() { }
+    BlockTopology(const ProcessorGroup& group, MEDCouplingCMesh *grid); 
+    BlockTopology(const BlockTopology& geom_topo, const ComponentTopology& comp_topo);
+    BlockTopology(const ProcessorGroup& group, int nb_elem);
+    virtual ~BlockTopology();
+    //!Retrieves the number of elements for a given topology
+    int getNbElements()const { return _nb_elems; }
+    int getNbLocalElements() const;
+    const ProcessorGroup* getProcGroup()const { return _proc_group; }
+    std::pair<int,int> globalToLocal (const int) const ;
+    int localToGlobal (const std::pair<int,int>) const;
+    std::vector<std::pair<int,int> > getLocalArrayMinMax() const ;
+    int getDimension() const { return _dimension; }
+    void serialize(int* & serializer, int& size) const ;
+    void unserialize(const int* serializer, const CommInterface& comm_interface);
+  private:
+    //dimension : 2 or 3
+    int _dimension;
+    //proc array
+    std::vector<int> _nb_procs_per_dim;
+    //stores the offsets vector  
+    std::vector<std::vector<int> > _local_array_indices;
+    //stores the cycle type (block or cyclic)
+    std::vector<CYCLE_TYPE> _cycle_type;
+    //Processor group
+    const ProcessorGroup* _proc_group;
+    //nb of elements
+    int _nb_elems;
+    bool _owns_processor_group;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/CMakeLists.txt b/src/ParaMEDMEM/CMakeLists.txt
new file mode 100644 (file)
index 0000000..a94cb3e
--- /dev/null
@@ -0,0 +1,71 @@
+# Copyright (C) 2012-2015  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+# Author : Anthony Geay (CEA/DEN)
+
+ADD_DEFINITIONS(${MPI_DEFINITIONS})
+
+INCLUDE_DIRECTORIES(
+  ${MPI_INCLUDE_DIRS}
+  ${CMAKE_CURRENT_SOURCE_DIR}
+  ${CMAKE_CURRENT_SOURCE_DIR}/../MEDCoupling
+  ${CMAKE_CURRENT_SOURCE_DIR}/../INTERP_KERNEL
+  ${CMAKE_CURRENT_SOURCE_DIR}/../INTERP_KERNEL/Bases
+  ${CMAKE_CURRENT_SOURCE_DIR}/../INTERP_KERNEL/Geometric2D
+  ${CMAKE_CURRENT_SOURCE_DIR}/../INTERP_KERNEL/ExprEval
+  ${CMAKE_CURRENT_SOURCE_DIR}/../INTERP_KERNEL/GaussPoints
+  )
+
+SET(paramedmem_SOURCES
+  ProcessorGroup.cxx
+  MPIProcessorGroup.cxx
+  ParaMESH.cxx
+  ComponentTopology.cxx
+  MPIAccess.cxx
+  InterpolationMatrix.cxx
+  OverlapInterpolationMatrix.cxx
+  StructuredCoincidentDEC.cxx
+  ExplicitCoincidentDEC.cxx
+  InterpKernelDEC.cxx
+  ElementLocator.cxx
+  OverlapElementLocator.cxx
+  MPIAccessDEC.cxx
+  TimeInterpolator.cxx
+  LinearTimeInterpolator.cxx
+  DEC.cxx
+  DisjointDEC.cxx
+  OverlapDEC.cxx
+  ExplicitTopology.cxx
+  MxN_Mapping.cxx
+  OverlapMapping.cxx
+  ICoCoMEDField.cxx
+  ICoCoField.cxx
+  ParaFIELD.cxx
+  ParaGRID.cxx
+  BlockTopology.cxx
+  )
+
+ADD_LIBRARY(paramedmem SHARED ${paramedmem_SOURCES})
+TARGET_LINK_LIBRARIES(paramedmem medcoupling ${MPI_LIBRARIES})
+INSTALL(TARGETS paramedmem EXPORT ${PROJECT_NAME}TargetGroup DESTINATION ${MEDTOOL_INSTALL_LIBS})
+
+FILE(GLOB paramedmem_HEADERS_HXX "${CMAKE_CURRENT_SOURCE_DIR}/*.hxx")
+INSTALL(FILES ${paramedmem_HEADERS_HXX} DESTINATION ${MEDTOOL_INSTALL_HEADERS})
+
+# To allow usage as SWIG dependencies:
+SET(paramedmem_HEADERS_HXX PARENT_SCOPE)
diff --git a/src/ParaMEDMEM/CommInterface.cxx b/src/ParaMEDMEM/CommInterface.cxx
new file mode 100644 (file)
index 0000000..948f099
--- /dev/null
@@ -0,0 +1,66 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "CommInterface.hxx"
+
+namespace ParaMEDMEM
+{
+  /*! \anchor CommInterface-det
+     \class CommInterface
+
+    The class \a CommInterface is the gateway to the MPI library.
+
+    It is a helper class that gathers the calls to the MPI
+    library that are made in the %ParaMEDMEM library. This gathering
+    allows easier gathering of information about the communication
+    in the library.
+
+    It is typically called after the MPI_Init() call in a program. It is afterwards passed as a parameter to the constructors of %ParaMEDMEM objects so that they access the MPI library via the CommInterface.
+
+    As an example, the following code excerpt initializes a processor group made of the zero processor.
+
+    \verbatim
+    #include "CommInterface.hxx"
+    #include "ProcessorGroup.hxx"
+
+    int main(int argc, char** argv)
+    {
+    //initialization
+    MPI_Init(&argc, &argv);
+    ParaMEDMEM::CommInterface comm_interface;
+
+    //setting up a processor group with proc 0
+    set<int> procs;
+    procs.insert(0);
+    ParaMEDMEM::ProcessorGroup group(procs, comm_interface);
+
+    //cleanup
+    MPI_Finalize();
+    }
+    \endverbatim
+  */
+
+  CommInterface::CommInterface()
+  {
+  }
+
+  CommInterface::~CommInterface()
+  {
+  }
+}
diff --git a/src/ParaMEDMEM/CommInterface.hxx b/src/ParaMEDMEM/CommInterface.hxx
new file mode 100644 (file)
index 0000000..ae430ed
--- /dev/null
@@ -0,0 +1,92 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __COMMINTERFACE_HXX__
+#define __COMMINTERFACE_HXX__
+
+#include <mpi.h>
+namespace ParaMEDMEM
+{
+
+  class CommInterface
+  {
+  public:
+    CommInterface(){}
+    virtual ~CommInterface(){}
+    int worldSize() const {
+      int size;
+      MPI_Comm_size(MPI_COMM_WORLD, &size);
+      return size;}
+    int commSize(MPI_Comm comm, int* size) const { return MPI_Comm_size(comm,size); }
+    int commRank(MPI_Comm comm, int* rank) const { return MPI_Comm_rank(comm,rank); }
+    int commGroup(MPI_Comm comm, MPI_Group* group) const { return MPI_Comm_group(comm, group); }
+    int groupIncl(MPI_Group group, int size, int* ranks, MPI_Group* group_output) const { return MPI_Group_incl(group, size, ranks, group_output); }
+    int commCreate(MPI_Comm comm, MPI_Group group, MPI_Comm* comm_output) const { return MPI_Comm_create(comm,group,comm_output); }
+    int groupFree(MPI_Group* group) const { return MPI_Group_free(group); }
+    int commFree(MPI_Comm* comm) const { return MPI_Comm_free(comm); }
+
+    int send(void* buffer, int count, MPI_Datatype datatype, int target, int tag, MPI_Comm comm) const { return MPI_Send(buffer,count, datatype, target, tag, comm); }
+    int recv(void* buffer, int count, MPI_Datatype datatype, int source, int tag, MPI_Comm comm, MPI_Status* status) const { return MPI_Recv(buffer,count, datatype, source, tag, comm, status); }
+    int sendRecv(void* sendbuf, int sendcount, MPI_Datatype sendtype, 
+                 int dest, int sendtag, void* recvbuf, int recvcount, 
+                 MPI_Datatype recvtype, int source, int recvtag, MPI_Comm comm,
+                 MPI_Status* status) { return MPI_Sendrecv(sendbuf, sendcount, sendtype, dest, sendtag, recvbuf, recvcount, recvtype, source, recvtag, comm,status); }
+
+    int Isend(void* buffer, int count, MPI_Datatype datatype, int target,
+              int tag, MPI_Comm comm, MPI_Request *request) const { return MPI_Isend(buffer,count, datatype, target, tag, comm, request); }
+    int Irecv(void* buffer, int count, MPI_Datatype datatype, int source,
+              int tag, MPI_Comm comm, MPI_Request* request) const { return MPI_Irecv(buffer,count, datatype, source, tag, comm, request); }
+
+    int wait(MPI_Request *request, MPI_Status *status) const { return MPI_Wait(request, status); }
+    int test(MPI_Request *request, int *flag, MPI_Status *status) const { return MPI_Test(request, flag, status); }
+    int requestFree(MPI_Request *request) const { return MPI_Request_free(request); }
+    int waitany(int count, MPI_Request *array_of_requests, int *index, MPI_Status *status) const { return MPI_Waitany(count, array_of_requests, index, status); }
+    int testany(int count, MPI_Request *array_of_requests, int *index, int *flag, MPI_Status *status) const { return MPI_Testany(count, array_of_requests, index, flag, status); }
+    int waitall(int count, MPI_Request *array_of_requests, MPI_Status *array_of_status) const { return MPI_Waitall(count, array_of_requests, array_of_status); }
+    int testall(int count, MPI_Request *array_of_requests, int *flag, MPI_Status *array_of_status) const { return MPI_Testall(count, array_of_requests, flag, array_of_status); }
+    int waitsome(int incount, MPI_Request *array_of_requests,int *outcount, int *array_of_indices, MPI_Status *array_of_status) const { return MPI_Waitsome(incount, array_of_requests, outcount, array_of_indices, array_of_status); }
+    int testsome(int incount, MPI_Request *array_of_requests, int *outcount,
+                 int *array_of_indices, MPI_Status *array_of_status) const { return MPI_Testsome(incount, array_of_requests, outcount, array_of_indices, array_of_status); }
+    int probe(int source, int tag, MPI_Comm comm, MPI_Status *status) const { return MPI_Probe(source, tag, comm, status) ; }
+    int Iprobe(int source, int tag, MPI_Comm comm, int *flag, MPI_Status *status) const { return MPI_Iprobe(source, tag, comm, flag, status) ; }
+    int cancel(MPI_Request *request) const { return MPI_Cancel(request); }
+    int testCancelled(MPI_Status *status, int *flag) const { return MPI_Test_cancelled(status, flag); }
+    int barrier(MPI_Comm comm) const { return MPI_Barrier(comm); }
+    int errorString(int errorcode, char *string, int *resultlen) const { return MPI_Error_string(errorcode, string, resultlen); }
+    int getCount(MPI_Status *status, MPI_Datatype datatype, int *count) const { return MPI_Get_count(status, datatype, count); }
+
+    int broadcast(void* buffer, int count, MPI_Datatype datatype, int root, MPI_Comm comm) const { return MPI_Bcast(buffer, count,  datatype, root, comm); }
+    int allGather(void* sendbuf, int sendcount, MPI_Datatype sendtype,
+                  void* recvbuf, int recvcount, MPI_Datatype recvtype,
+                  MPI_Comm comm) const { return MPI_Allgather(sendbuf,sendcount, sendtype, recvbuf, recvcount, recvtype, comm); }
+    int allToAll(void* sendbuf, int sendcount, MPI_Datatype sendtype,
+                 void* recvbuf, int recvcount, MPI_Datatype recvtype,
+                 MPI_Comm comm) const { return MPI_Alltoall(sendbuf, sendcount, sendtype, recvbuf, recvcount, recvtype, comm); }
+    int allToAllV(void* sendbuf, int* sendcounts, int* senddispls,
+                  MPI_Datatype sendtype, void* recvbuf, int* recvcounts,
+                  int* recvdispls, MPI_Datatype recvtype, 
+                  MPI_Comm comm) const { return MPI_Alltoallv(sendbuf, sendcounts, senddispls, sendtype, recvbuf, recvcounts, recvdispls, recvtype, comm); }
+
+    int reduce(void* sendbuf, void* recvbuf, int count, MPI_Datatype datatype,
+               MPI_Op op, int root, MPI_Comm comm) const { return MPI_Reduce(sendbuf, recvbuf, count, datatype, op, root, comm); }
+    int allReduce(void* sendbuf, void* recvbuf, int count, MPI_Datatype datatype, MPI_Op op, MPI_Comm comm) const { return MPI_Allreduce(sendbuf, recvbuf, count, datatype, op, comm); }
+  };
+}
+
+#endif /*COMMINTERFACE_HXX_*/
diff --git a/src/ParaMEDMEM/ComponentTopology.cxx b/src/ParaMEDMEM/ComponentTopology.cxx
new file mode 100644 (file)
index 0000000..8af706e
--- /dev/null
@@ -0,0 +1,115 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ComponentTopology.hxx"
+#include "ProcessorGroup.hxx"
+#include "InterpolationUtils.hxx"
+
+namespace ParaMEDMEM
+{
+  /* Generic constructor for \a nb_comp components equally parted
+   * in \a nb_blocks blocks
+   */
+  ComponentTopology::ComponentTopology(int nb_comp, ProcessorGroup* group):_proc_group(group)
+  {
+    int nb_blocks=group->size();
+  
+    if (nb_blocks>nb_comp)
+      throw INTERP_KERNEL::Exception("ComponentTopology Number of components must be larger than number of blocks");
+
+    _component_array.resize(nb_blocks+1);
+    _component_array[0]=0;
+    for (int i=1; i<=nb_blocks; i++)
+      {
+        _component_array[i]=_component_array[i-1]+nb_comp/nb_blocks;
+        if (i<=nb_comp%nb_blocks)
+          _component_array[i]++;
+      }
+  }
+  
+  /* Generic constructor for \a nb_comp components equally parted
+   * in \a nb_blocks blocks
+   */
+  ComponentTopology::ComponentTopology(int nb_comp, int nb_blocks):_proc_group(0)
+  {
+    if (nb_blocks>nb_comp)
+      throw INTERP_KERNEL::Exception("ComponentTopology Number of components must be larger than number of blocks");
+    
+    _component_array.resize(nb_blocks+1);
+    _component_array[0]=0;
+    for (int i=1; i<=nb_blocks; i++)
+      {
+        _component_array[i]=_component_array[i-1]+nb_comp/nb_blocks;
+        if (i<=nb_comp%nb_blocks)
+          _component_array[i]++;
+      }
+  
+  }
+  
+  //!Constructor for one block of \a nb_comp components
+  ComponentTopology::ComponentTopology(int nb_comp):_proc_group(0)
+  {
+    
+    _component_array.resize(2);
+    _component_array[0]=0;
+    _component_array[1]=nb_comp;
+  
+  }
+
+  //! Constructor for one component
+  ComponentTopology::ComponentTopology():_proc_group(0)
+  {
+    _component_array.resize(2);
+    _component_array[0]=0;
+    _component_array[1]=1;
+  
+  }
+  
+  ComponentTopology::~ComponentTopology()
+  {
+  }
+
+  int ComponentTopology::nbLocalComponents() const
+  {
+    if (_proc_group==0)
+      return nbComponents();
+  
+    int nbcomp;
+    int myrank = _proc_group->myRank();
+    if (myrank!=-1)
+      nbcomp = _component_array[myrank+1]-_component_array[myrank];
+    else 
+      nbcomp=0;
+    return nbcomp;
+  }
+
+  int ComponentTopology::firstLocalComponent() const
+  {
+    if (_proc_group==0)
+      return 0;
+  
+    int icomp;
+    int myrank = _proc_group->myRank();
+    if (myrank!=-1)
+      icomp = _component_array[myrank];
+    else 
+      icomp=-1;
+    return icomp;
+  }
+}
diff --git a/src/ParaMEDMEM/ComponentTopology.hxx b/src/ParaMEDMEM/ComponentTopology.hxx
new file mode 100644 (file)
index 0000000..de11e3e
--- /dev/null
@@ -0,0 +1,56 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __COMPONENTTOPOLOGY_HXX__
+#define __COMPONENTTOPOLOGY_HXX__
+
+#include "Topology.hxx"
+
+#include <vector>
+
+namespace ParaMEDMEM
+{
+  class ProcessorGroup;
+
+  class ComponentTopology
+  {
+  public:
+    ComponentTopology(int nb_comp, ProcessorGroup* group);
+    ComponentTopology(int nb_comp, int nb_blocks);
+    ComponentTopology(int nb_comp);
+    ComponentTopology();
+    virtual ~ComponentTopology();
+    //!returns the number of MED components in the topology
+    int nbComponents() const { return _component_array.back(); }
+    //!returns the number of MED components on local processor
+    int nbLocalComponents() const ;
+    //!returns the number of the first MED component on local processor
+    int firstLocalComponent() const ;
+    //!returns the number of blocks in the topology
+    int nbBlocks()const {return _component_array.size()-1;}
+    //!returns the block structure
+    const std::vector<int>* getBlockIndices() const { return &_component_array; }
+    const ProcessorGroup* getProcGroup()const { return _proc_group; } 
+  private:
+    std::vector<int> _component_array;
+    ProcessorGroup* _proc_group;
+  };
+}
+
+#endif /*COMPONENTTOPOLOGY_HXX_*/
diff --git a/src/ParaMEDMEM/DEC.cxx b/src/ParaMEDMEM/DEC.cxx
new file mode 100644 (file)
index 0000000..cbd0ea4
--- /dev/null
@@ -0,0 +1,47 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "CommInterface.hxx"
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+#include "ComponentTopology.hxx"
+#include "ParaFIELD.hxx"
+#include "ParaMESH.hxx"
+#include "DEC.hxx"
+#include "ICoCoField.hxx"
+#include "ICoCoMEDField.hxx"
+#include "MPIProcessorGroup.hxx"
+
+#include <cmath>
+
+namespace ParaMEDMEM
+{
+  DEC::DEC():_comm_interface(0)
+  {
+  }
+
+  void DEC::copyFrom(const DEC& other)
+  {
+    _comm_interface=other._comm_interface;
+  }
+  
+  DEC::~DEC()
+  {
+  }
+}
diff --git a/src/ParaMEDMEM/DEC.hxx b/src/ParaMEDMEM/DEC.hxx
new file mode 100644 (file)
index 0000000..1b0a867
--- /dev/null
@@ -0,0 +1,43 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __DEC_HXX__
+#define __DEC_HXX__
+
+#include "MEDCouplingFieldDouble.hxx"
+#include "NormalizedUnstructuredMesh.hxx"
+#include "DECOptions.hxx"
+
+namespace ParaMEDMEM
+{
+  class CommInterface;
+  class DEC : public DECOptions
+  {
+  public:
+    DEC();
+    void copyFrom(const DEC& other);
+    virtual void synchronize() = 0;
+    virtual void sendRecvData(bool way=true) = 0;
+    virtual ~DEC();
+  protected:
+    const CommInterface* _comm_interface;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/DECOptions.hxx b/src/ParaMEDMEM/DECOptions.hxx
new file mode 100644 (file)
index 0000000..5572ffd
--- /dev/null
@@ -0,0 +1,74 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __DECOPTIONS_HXX__
+#define __DECOPTIONS_HXX__
+
+#include <string>
+
+namespace ParaMEDMEM
+{
+  //Enum describing the allToAll method used in the communication pattern
+  typedef enum { Native, PointToPoint } AllToAllMethod;
+  typedef enum { WithoutTimeInterp, LinearTimeInterp } TimeInterpolationMethod;
+
+  class DECOptions
+  {
+  protected:
+    std::string _method;
+    bool _asynchronous;
+    TimeInterpolationMethod _timeInterpolationMethod;
+    AllToAllMethod _allToAllMethod;
+    bool _forcedRenormalization;
+  public:
+    DECOptions():_method("P0"),
+                 _asynchronous(false),
+                 _timeInterpolationMethod(WithoutTimeInterp),
+                 _allToAllMethod(Native),
+                 _forcedRenormalization(false)
+    {
+    }
+    
+    DECOptions(const DECOptions& deco)
+    {
+      _method=deco._method;
+      _timeInterpolationMethod=deco._timeInterpolationMethod;
+      _asynchronous=deco._asynchronous;
+      _forcedRenormalization=deco._forcedRenormalization;
+      _allToAllMethod=deco._allToAllMethod;
+    }
+    
+    const std::string& getMethod() const { return _method; }
+    void setMethod(const char *m) { _method=m; }
+
+    TimeInterpolationMethod getTimeInterpolationMethod() const { return DECOptions::_timeInterpolationMethod; }
+    void setTimeInterpolationMethod(TimeInterpolationMethod it) { DECOptions::_timeInterpolationMethod=it; }
+
+    bool getForcedRenormalization() const { return DECOptions::_forcedRenormalization; }
+    void setForcedRenormalization( bool dr) { DECOptions::_forcedRenormalization = dr; }
+
+    bool getAsynchronous() const { return DECOptions::_asynchronous; }
+    void setAsynchronous( bool dr) { DECOptions::_asynchronous = dr; }
+     
+    AllToAllMethod getAllToAllMethod() const { return _allToAllMethod; }
+    void setAllToAllMethod(AllToAllMethod sp) { _allToAllMethod=sp; }
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/DisjointDEC.cxx b/src/ParaMEDMEM/DisjointDEC.cxx
new file mode 100644 (file)
index 0000000..57e67fd
--- /dev/null
@@ -0,0 +1,403 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "DisjointDEC.hxx"
+#include "CommInterface.hxx"
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+#include "ComponentTopology.hxx"
+#include "ParaFIELD.hxx"
+#include "ParaMESH.hxx"
+#include "ICoCoField.hxx"
+#include "ICoCoMEDField.hxx"
+#include "MPIProcessorGroup.hxx"
+
+#include <cmath>
+#include <iostream>
+
+
+namespace ParaMEDMEM
+{
+
+  /*!
+   * \anchor DisjointDEC-det
+   * \class DisjointDEC
+   *
+   * Interface class for creation of a link between two
+   * processor groups for exhanging mesh or field data.
+   * The \c DEC is defined by attaching a field on the receiving or on the
+   * sending side.
+   * On top of attaching a \c ParaMEDMEM::ParaFIELD, it is possible to
+   * attach a ICoCo::Field. This class is an abstract class that enables
+   * coupling of codes that respect the ICoCo interface \ref icoco. It has two implementations:
+   * one for codes that express their fields as \ref fields "MEDCoupling fields" (ICoCo::MEDField).
+   *
+   * \section dec_options DEC Options
+   * Options supported by \c DEC objects are
+   *
+   * <TABLE BORDER=1 >
+   * <TR><TD>Option</TD><TD>Description</TD><TD>Default value</TD></TR>
+   * <TR><TD>ForcedRenormalization</TD><TD>After receiving data, the target field is renormalized so that L2-norms of the source and target fields match.</TD><TD> false </TD></TR>
+   *</TABLE>
+
+
+   The following code excerpt shows how to set options for an object that inherits from \c DEC :
+
+   \code
+   InterpKernelDEC dec(source_group,target_group);
+   dec.setOptions("ForcedRenormalization",true);
+   dec.attachLocalField(field);
+   dec.synchronize();
+   if (source_group.containsMyRank())
+     dec.sendData();
+   else
+     dec.recvData();
+   \endcode
+  */
+
+
+  DisjointDEC::DisjointDEC(ProcessorGroup& source_group, ProcessorGroup& target_group):
+      _local_field(0),
+      _source_group(&source_group),
+      _target_group(&target_group),
+      _comm_interface(0),
+      _owns_field(false),
+      _owns_groups(false),
+      _union_comm(MPI_COMM_NULL)
+  {
+    _union_group = source_group.fuse(target_group);  
+  }
+  
+  DisjointDEC::DisjointDEC(const DisjointDEC& s):
+      DEC(s),
+      _local_field(0),
+      _union_group(0),
+      _source_group(0),
+      _target_group(0),
+      _comm_interface(0),
+      _owns_field(false),
+      _owns_groups(false),
+      _union_comm(MPI_COMM_NULL)
+  {
+    copyInstance(s);
+  }
+
+  DisjointDEC & DisjointDEC::operator=(const DisjointDEC& s)
+  {
+    cleanInstance();
+    copyInstance(s);
+    return *this;
+     
+  }
+
+  void DisjointDEC::copyInstance(const DisjointDEC& other)
+  {
+    DEC::copyFrom(other);
+    if(other._target_group)
+      {
+        _target_group=other._target_group->deepCpy();
+        _owns_groups=true;
+      }
+    if(other._source_group)
+      {
+        _source_group=other._source_group->deepCpy();
+        _owns_groups=true;
+      }
+    if (_source_group && _target_group)
+      _union_group = _source_group->fuse(*_target_group);
+  }
+
+  DisjointDEC::DisjointDEC(const std::set<int>& source_ids,
+                           const std::set<int>& target_ids,
+                           const MPI_Comm& world_comm):
+     _local_field(0),
+     _owns_field(false),
+     _owns_groups(true),
+     _comm_interface(0),
+     _union_comm(MPI_COMM_NULL)
+  {
+    ParaMEDMEM::CommInterface comm;
+    // Create the list of procs including source and target
+    std::set<int> union_ids; // source and target ids in world_comm
+    union_ids.insert(source_ids.begin(),source_ids.end());
+    union_ids.insert(target_ids.begin(),target_ids.end());
+    if(union_ids.size()!=(source_ids.size()+target_ids.size()))
+      throw INTERP_KERNEL::Exception("DisjointDEC constructor : source_ids and target_ids overlap partially or fully. This type of DEC does not support it ! OverlapDEC class could be the solution !");
+    int* union_ranks_world=new int[union_ids.size()]; // ranks of sources and targets in world_comm
+    std::copy(union_ids.begin(), union_ids.end(), union_ranks_world);
+
+    // Create a communicator on these procs
+    MPI_Group union_group,world_group;
+    comm.commGroup(world_comm,&world_group);
+    comm.groupIncl(world_group,union_ids.size(),union_ranks_world,&union_group);
+    comm.commCreate(world_comm,union_group,&_union_comm);
+    delete[] union_ranks_world;
+    if (_union_comm==MPI_COMM_NULL)
+      { // This process is not in union
+        _source_group=0;
+        _target_group=0;
+        _union_group=0;
+        comm.groupFree(&union_group);
+        comm.groupFree(&world_group);
+        return;
+      }
+
+    // Translate source_ids and target_ids from world_comm to union_comm
+    int* source_ranks_world=new int[source_ids.size()]; // ranks of sources in world_comm
+    std::copy(source_ids.begin(), source_ids.end(),source_ranks_world);
+    int* source_ranks_union=new int[source_ids.size()]; // ranks of sources in union_comm
+    int* target_ranks_world=new int[target_ids.size()]; // ranks of targets in world_comm
+    std::copy(target_ids.begin(), target_ids.end(),target_ranks_world);
+    int* target_ranks_union=new int[target_ids.size()]; // ranks of targets in union_comm
+    MPI_Group_translate_ranks(world_group,source_ids.size(),source_ranks_world,union_group,source_ranks_union);
+    MPI_Group_translate_ranks(world_group,target_ids.size(),target_ranks_world,union_group,target_ranks_union);
+    std::set<int> source_ids_union;
+    for (int i=0;i<(int)source_ids.size();i++)
+      source_ids_union.insert(source_ranks_union[i]);
+    std::set<int> target_ids_union;
+    for (int i=0;i<(int)target_ids.size();i++)
+      target_ids_union.insert(target_ranks_union[i]);
+    delete [] source_ranks_world;
+    delete [] source_ranks_union;
+    delete [] target_ranks_world;
+    delete [] target_ranks_union;
+
+    // Create the MPIProcessorGroups
+    _source_group = new MPIProcessorGroup(comm,source_ids_union,_union_comm);
+    _target_group = new MPIProcessorGroup(comm,target_ids_union,_union_comm);
+    _union_group = _source_group->fuse(*_target_group);
+    comm.groupFree(&union_group);
+    comm.groupFree(&world_group);
+  }
+
+  DisjointDEC::~DisjointDEC()
+  {
+    cleanInstance();
+  }
+
+  void DisjointDEC::cleanInstance()
+  {
+    if(_owns_field)
+      {
+        delete _local_field;
+      }
+    _local_field=0;
+    _owns_field=false;
+    if(_owns_groups)
+      {
+        delete _source_group;
+        delete _target_group;
+      }
+    _owns_groups=false;
+    _source_group=0;
+    _target_group=0;
+    delete _union_group;
+    _union_group=0;
+    if (_union_comm != MPI_COMM_NULL)
+      _comm_interface->commFree(&_union_comm);
+  }
+
+  void DisjointDEC::setNature(NatureOfField nature)
+  {
+    if(_local_field)
+      _local_field->getField()->setNature(nature);
+  }
+
+  /*! Attaches a local field to a DEC.
+    If the processor is on the receiving end of the DEC, the field
+    will be updated by a recvData() call.
+    Reversely, if the processor is on the sending end, the field will be read, possibly transformed, and sent appropriately to the other side.
+  */
+  void DisjointDEC::attachLocalField(const ParaFIELD *field, bool ownPt)
+  {
+    if(!isInUnion())
+      return ;
+    if(_owns_field)
+      delete _local_field;
+    _local_field=field;
+    _owns_field=ownPt;
+    _comm_interface=&(field->getTopology()->getProcGroup()->getCommInterface());
+    compareFieldAndMethod();
+  }
+
+  /*! Attaches a local field to a DEC. The method will test whether the processor
+    is on the source or the target side and will associate the mesh underlying the 
+    field to the local side.
+
+    If the processor is on the receiving end of the DEC, the field
+    will be updated by a recvData() call.
+    Reversely, if the processor is on the sending end, the field will be read, possibly transformed,
+    and sent appropriately to the other side.
+  */
+
+  void DisjointDEC::attachLocalField(MEDCouplingFieldDouble *field)
+  {
+    if(!isInUnion())
+      return ;
+    ProcessorGroup* local_group;
+    if (_source_group->containsMyRank())
+      local_group=_source_group;
+    else if (_target_group->containsMyRank())
+      local_group=_target_group;
+    else
+      throw INTERP_KERNEL::Exception("Invalid procgroup for field attachment to DEC");
+    ParaMESH *paramesh=new ParaMESH(static_cast<MEDCouplingPointSet *>(const_cast<MEDCouplingMesh *>(field->getMesh())),*local_group,field->getMesh()->getName());
+    ParaFIELD *tmp=new ParaFIELD(field, paramesh, *local_group);
+    tmp->setOwnSupport(true);
+    attachLocalField(tmp,true);
+    //_comm_interface=&(local_group->getCommInterface());
+  }
+
+  /*! 
+    Attaches a local field to a DEC.
+    If the processor is on the receiving end of the DEC, the field
+    will be updated by a recvData() call.
+    Reversely, if the processor is on the sending end, the field will be read, possibly transformed, and sent appropriately to the other side.
+    The field type is a generic ICoCo Field, so that the DEC can couple a number of different fields :
+    - a ICoCo::MEDField, that is created from a MEDCoupling structure
+    
+  */
+  void DisjointDEC::attachLocalField(const ICoCo::MEDField *field)
+  {
+    if(!isInUnion())
+      return ;
+    if(!field)
+      throw INTERP_KERNEL::Exception("DisjointDEC::attachLocalField : ICoCo::MEDField pointer is NULL !");
+    attachLocalField(field->getField());
+  }
+  
+  /*!
+    Computes the field norm over its support 
+    on the source side and renormalizes the field on the target side
+    so that the norms match.
+
+    \f[
+    I_{source}=\sum_{i=1}^{n_{source}}V_{i}.|\Phi^{source}_{i}|^2,
+    \f]
+
+    \f[
+    I_{target}=\sum_{i=1}^{n_{target}}V_{i}.|\Phi^{target}_{i}|^2,
+    \f]
+    
+    \f[
+    \Phi^{target}:=\Phi^{target}.\sqrt{I_{source}/I_{target}}.
+    \f]
+
+  */
+  void DisjointDEC::renormalizeTargetField(bool isWAbs)
+  {
+    if (_source_group->containsMyRank())
+      for (int icomp=0; icomp<_local_field->getField()->getArray()->getNumberOfComponents(); icomp++)
+        {
+          double total_norm = _local_field->getVolumeIntegral(icomp+1,isWAbs);
+          double source_norm = total_norm;
+          _comm_interface->broadcast(&source_norm, 1, MPI_DOUBLE, 0,* dynamic_cast<MPIProcessorGroup*>(_union_group)->getComm());
+
+        }
+    if (_target_group->containsMyRank())
+      {
+        for (int icomp=0; icomp<_local_field->getField()->getArray()->getNumberOfComponents(); icomp++)
+          {
+            double total_norm = _local_field->getVolumeIntegral(icomp+1,isWAbs);
+            double source_norm=total_norm;
+            _comm_interface->broadcast(&source_norm, 1, MPI_DOUBLE, 0,* dynamic_cast<MPIProcessorGroup*>(_union_group)->getComm());
+
+            if (fabs(total_norm)>1e-100)
+              _local_field->getField()->applyLin(source_norm/total_norm,0.0,icomp+1);
+          }
+      }
+  }
+
+  bool DisjointDEC::isInSourceSide() const
+  {
+    if(!_source_group)
+      return false;
+    return _source_group->containsMyRank();
+  }
+
+  bool DisjointDEC::isInTargetSide() const
+  {
+    if(!_target_group)
+      return false;
+    return _target_group->containsMyRank();
+  }
+
+  bool DisjointDEC::isInUnion() const
+  {
+    if(!_union_group)
+      return false;
+    return _union_group->containsMyRank();
+  }
+
+  void DisjointDEC::compareFieldAndMethod() const throw(INTERP_KERNEL::Exception)
+  {
+    if (_local_field)
+      {
+        TypeOfField entity = _local_field->getField()->getTypeOfField();
+        if ( getMethod() == "P0" )
+          {
+            if ( entity != ON_CELLS )
+              throw INTERP_KERNEL::Exception("Field support and interpolation method mismatch."
+                                             " For P0 interpolation, field must be on MED_CELL's");
+          }
+        else if ( getMethod() == "P1" )
+          {
+            if ( entity != ON_NODES )
+              throw INTERP_KERNEL::Exception("Field support and interpolation method mismatch."
+                                             " For P1 interpolation, field must be on MED_NODE's");
+          }
+        else if ( getMethod() == "P1d" )
+          {
+            if ( entity != ON_CELLS )
+              throw INTERP_KERNEL::Exception("Field support and interpolation method mismatch."
+                                             " For P1d interpolation, field must be on MED_CELL's");
+            if ( _target_group->containsMyRank() )
+              throw INTERP_KERNEL::Exception("Projection to P1d field not supported");
+          }
+        else
+          {
+            throw INTERP_KERNEL::Exception("Unknown interpolation method. Possible methods: P0, P1, P1d");
+          }
+      }
+  }
+
+  /*!
+    If way==true, source procs call sendData() and target procs call recvData().
+    if way==false, it's the other way round.
+  */
+  void DisjointDEC::sendRecvData(bool way)
+  {
+    if(!isInUnion())
+      return;
+    if(isInSourceSide())
+      {
+        if(way)
+          sendData();
+        else
+          recvData();
+      }
+    else if(isInTargetSide())
+      {
+        if(way)
+          recvData();
+        else
+          sendData();
+      }
+  }
+}
diff --git a/src/ParaMEDMEM/DisjointDEC.hxx b/src/ParaMEDMEM/DisjointDEC.hxx
new file mode 100644 (file)
index 0000000..aec6bb9
--- /dev/null
@@ -0,0 +1,90 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __DISJOINTDEC_HXX__
+#define __DISJOINTDEC_HXX__
+
+#include "MEDCouplingFieldDouble.hxx"
+#include "NormalizedUnstructuredMesh.hxx"
+#include "DEC.hxx"
+
+#include <mpi.h>
+#include <set>
+
+namespace ICoCo
+{
+  class MEDField;
+}
+
+namespace ParaMEDMEM
+{
+  class ProcessorGroup;
+  class ParaFIELD;
+  
+  class DisjointDEC : public DEC
+  {
+  public:
+    DisjointDEC():_local_field(0),_union_group(0),_source_group(0),_target_group(0),
+      _owns_field(false),_owns_groups(false),
+      _comm_interface(0), _union_comm(MPI_COMM_NULL)
+    { }
+    DisjointDEC(ProcessorGroup& source_group, ProcessorGroup& target_group);
+    DisjointDEC(const DisjointDEC&);
+    DisjointDEC &operator=(const DisjointDEC& s);
+    DisjointDEC(const std::set<int>& src_ids, const std::set<int>& trg_ids,
+                const MPI_Comm& world_comm=MPI_COMM_WORLD);
+    void setNature(NatureOfField nature);
+    void attachLocalField( MEDCouplingFieldDouble *field);
+    void attachLocalField(const ParaFIELD *field, bool ownPt=false);
+    void attachLocalField(const ICoCo::MEDField *field);
+    
+    virtual void prepareSourceDE() = 0;
+    virtual void prepareTargetDE() = 0;
+    virtual void recvData() = 0;
+    virtual void sendData() = 0;
+    void sendRecvData(bool way=true);
+    virtual void synchronize() = 0;
+    virtual ~DisjointDEC();
+    virtual void computeProcGroup() { }
+    void renormalizeTargetField(bool isWAbs);
+    //
+    ProcessorGroup *getSourceGrp() const { return _source_group; }
+    ProcessorGroup *getTargetGrp() const { return _target_group; }
+    bool isInSourceSide() const;
+    bool isInTargetSide() const;
+    bool isInUnion() const;
+  protected:
+    void compareFieldAndMethod() const throw(INTERP_KERNEL::Exception);
+    void cleanInstance();
+    void copyInstance(const DisjointDEC& other);
+  protected:
+    const ParaFIELD* _local_field;
+    //! Processor group representing the union of target and source processors
+    ProcessorGroup* _union_group;
+    ProcessorGroup* _source_group;
+    ProcessorGroup* _target_group;
+    
+    const CommInterface* _comm_interface;
+    bool _owns_field;
+    bool _owns_groups;
+    MPI_Comm _union_comm;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/ElementLocator.cxx b/src/ParaMEDMEM/ElementLocator.cxx
new file mode 100644 (file)
index 0000000..a7fcfc2
--- /dev/null
@@ -0,0 +1,718 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <mpi.h>
+#include "CommInterface.hxx"
+#include "ElementLocator.hxx"
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+#include "ParaFIELD.hxx"
+#include "ParaMESH.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "MEDCouplingFieldDouble.hxx"
+#include "MEDCouplingAutoRefCountObjectPtr.hxx"
+#include "DirectedBoundingBox.hxx"
+
+#include <map>
+#include <set>
+#include <limits>
+
+using namespace std;
+
+//#define USE_DIRECTED_BB
+
+namespace ParaMEDMEM 
+{ 
+  ElementLocator::ElementLocator(const ParaFIELD& sourceField,
+                                 const ProcessorGroup& distant_group,
+                                 const ProcessorGroup& local_group)
+    : _local_para_field(sourceField),
+      _local_cell_mesh(sourceField.getSupport()->getCellMesh()),
+      _local_face_mesh(sourceField.getSupport()->getFaceMesh()),
+      _distant_group(distant_group),
+      _local_group(local_group)
+  { 
+    _union_group = _local_group.fuse(distant_group);
+    _computeBoundingBoxes();
+    _comm=getCommunicator();
+  }
+
+  ElementLocator::~ElementLocator()
+  {
+    delete _union_group;
+    delete [] _domain_bounding_boxes;
+  }
+
+  const MPI_Comm *ElementLocator::getCommunicator() const
+  {
+    MPIProcessorGroup* group=static_cast<MPIProcessorGroup*> (_union_group);
+    return group->getComm();
+  }
+
+  NatureOfField ElementLocator::getLocalNature() const
+  {
+    return _local_para_field.getField()->getNature();
+  }
+
+  // ==========================================================================
+  // Procedure for exchanging mesh between a distant proc and a local processor
+  // param idistantrank  proc id on distant group
+  // param distant_mesh on return , points to a local reconstruction of
+  //  the distant mesh
+  // param distant_ids on return, contains a vector defining a correspondence
+  // between the distant ids and the ids of the local reconstruction 
+  // ==========================================================================
+  void ElementLocator::exchangeMesh(int idistantrank,
+                                    MEDCouplingPointSet*& distant_mesh,
+                                    int*& distant_ids)
+  {
+    int rank = _union_group->translateRank(&_distant_group,idistantrank);
+
+    if (find(_distant_proc_ids.begin(), _distant_proc_ids.end(),rank)==_distant_proc_ids.end())
+      return;
+   
+    MEDCouplingAutoRefCountObjectPtr<DataArrayInt> elems;
+#ifdef USE_DIRECTED_BB
+    INTERP_KERNEL::DirectedBoundingBox dbb;
+    double* distant_bb = _domain_bounding_boxes+rank*dbb.dataSize(_local_cell_mesh_space_dim);
+    dbb.setData(distant_bb);
+    elems=_local_cell_mesh->getCellsInBoundingBox(dbb,getBoundingBoxAdjustment());
+#else
+    double* distant_bb = _domain_bounding_boxes+rank*2*_local_cell_mesh_space_dim;
+    elems=_local_cell_mesh->getCellsInBoundingBox(distant_bb,getBoundingBoxAdjustment());
+#endif
+    
+    DataArrayInt *distant_ids_send;
+    MEDCouplingPointSet *send_mesh = (MEDCouplingPointSet *)_local_para_field.getField()->buildSubMeshData(elems->begin(),elems->end(),distant_ids_send);
+    _exchangeMesh(send_mesh, distant_mesh, idistantrank, distant_ids_send, distant_ids);
+    distant_ids_send->decrRef();
+    
+    if(send_mesh)
+      send_mesh->decrRef();
+  }
+
+  void ElementLocator::exchangeMethod(const std::string& sourceMeth, int idistantrank, std::string& targetMeth)
+  {
+    CommInterface comm_interface=_union_group->getCommInterface();
+    MPIProcessorGroup* group=static_cast<MPIProcessorGroup*> (_union_group);
+    const MPI_Comm* comm=(group->getComm());
+    MPI_Status status;
+    // it must be converted to union numbering before communication
+    int idistRankInUnion = group->translateRank(&_distant_group,idistantrank);
+    char *recv_buffer=new char[4];
+    std::vector<char> send_buffer(4);
+    std::copy(sourceMeth.begin(),sourceMeth.end(),send_buffer.begin());
+    comm_interface.sendRecv(&send_buffer[0], 4, MPI_CHAR,idistRankInUnion, 1112,
+                            recv_buffer, 4, MPI_CHAR,idistRankInUnion, 1112,
+                            *comm, &status);
+    targetMeth=recv_buffer;
+    delete [] recv_buffer;
+  }
+
+
+  // ======================
+  // Compute bounding boxes
+  // ======================
+
+  void ElementLocator::_computeBoundingBoxes()
+  {
+    CommInterface comm_interface =_union_group->getCommInterface();
+    MPIProcessorGroup* group=static_cast<MPIProcessorGroup*> (_union_group);
+    const MPI_Comm* comm = group->getComm();
+    _local_cell_mesh_space_dim = -1;
+    if(_local_cell_mesh->getMeshDimension() != -1)
+      _local_cell_mesh_space_dim=_local_cell_mesh->getSpaceDimension();
+    int *spaceDimForAll=new int[_union_group->size()];
+    comm_interface.allGather(&_local_cell_mesh_space_dim, 1, MPI_INT,
+                             spaceDimForAll,1, MPI_INT, 
+                             *comm);
+    _local_cell_mesh_space_dim=*std::max_element(spaceDimForAll,spaceDimForAll+_union_group->size());
+    _is_m1d_corr=((*std::min_element(spaceDimForAll,spaceDimForAll+_union_group->size()))==-1);
+    for(int i=0;i<_union_group->size();i++)
+      if(spaceDimForAll[i]!=_local_cell_mesh_space_dim && spaceDimForAll[i]!=-1)
+        throw INTERP_KERNEL::Exception("Spacedim not matches !");
+    delete [] spaceDimForAll;
+#ifdef USE_DIRECTED_BB
+    INTERP_KERNEL::DirectedBoundingBox dbb;
+    int bbSize = dbb.dataSize(_local_cell_mesh_space_dim);
+    _domain_bounding_boxes = new double[bbSize*_union_group->size()];
+    if(_local_cell_mesh->getMeshDimension() != -1)
+      dbb = INTERP_KERNEL::DirectedBoundingBox(_local_cell_mesh->getCoords()->getPointer(),
+                                               _local_cell_mesh->getNumberOfNodes(),
+                                               _local_cell_mesh_space_dim);
+    std::vector<double> dbbData = dbb.getData();
+    if ( dbbData.size() < bbSize ) dbbData.resize(bbSize,0);
+    double * minmax= &dbbData[0];
+#else
+    int bbSize = 2*_local_cell_mesh_space_dim;
+    _domain_bounding_boxes = new double[bbSize*_union_group->size()];
+    double * minmax=new double [bbSize];
+    if(_local_cell_mesh->getMeshDimension() != -1)
+      _local_cell_mesh->getBoundingBox(minmax);
+    else
+      for(int i=0;i<_local_cell_mesh_space_dim;i++)
+        {
+          minmax[i*2]=-std::numeric_limits<double>::max();
+          minmax[i*2+1]=std::numeric_limits<double>::max();
+        }
+#endif
+
+    comm_interface.allGather(minmax, bbSize, MPI_DOUBLE,
+                             _domain_bounding_boxes,bbSize, MPI_DOUBLE, 
+                             *comm);
+  
+    for (int i=0; i< _distant_group.size(); i++)
+      {
+        int rank=_union_group->translateRank(&_distant_group,i);
+
+        if (_intersectsBoundingBox(rank))
+          {
+            _distant_proc_ids.push_back(rank);
+          }
+      }
+#ifdef USE_DIRECTED_BB
+#else
+    delete [] minmax;
+#endif
+  }
+
+
+  // =============================================
+  // Intersect Bounding Box (with a given "irank")
+  // =============================================
+  bool ElementLocator::_intersectsBoundingBox(int irank)
+  {
+#ifdef USE_DIRECTED_BB
+    INTERP_KERNEL::DirectedBoundingBox local_dbb, distant_dbb;
+    local_dbb.setData( _domain_bounding_boxes+_union_group->myRank()*local_dbb.dataSize( _local_cell_mesh_space_dim ));
+    distant_dbb.setData( _domain_bounding_boxes+irank*distant_dbb.dataSize( _local_cell_mesh_space_dim ));
+    return !local_dbb.isDisjointWith( distant_dbb );
+#else
+    double*  local_bb = _domain_bounding_boxes+_union_group->myRank()*2*_local_cell_mesh_space_dim;
+    double*  distant_bb =  _domain_bounding_boxes+irank*2*_local_cell_mesh_space_dim;
+
+    for (int idim=0; idim < _local_cell_mesh_space_dim; idim++)
+      {
+        const double eps =  1e-12;
+        bool intersects = (distant_bb[idim*2]<local_bb[idim*2+1]+eps)
+          && (local_bb[idim*2]<distant_bb[idim*2+1]+eps);
+        if (!intersects) return false; 
+      }
+    return true;
+#endif
+  } 
+
+  // ======================
+  // Exchanging meshes data
+  // ======================
+  void ElementLocator::_exchangeMesh( MEDCouplingPointSet* local_mesh,
+                                      MEDCouplingPointSet*& distant_mesh,
+                                      int iproc_distant,
+                                      const DataArrayInt* distant_ids_send,
+                                      int*& distant_ids_recv)
+  {
+    CommInterface comm_interface=_union_group->getCommInterface();
+  
+    // First stage : exchanging sizes
+    // ------------------------------
+    vector<double> tinyInfoLocalD,tinyInfoDistantD(1);//not used for the moment
+    vector<int> tinyInfoLocal,tinyInfoDistant;
+    vector<string> tinyInfoLocalS;
+    //Getting tiny info of local mesh to allow the distant proc to initialize and allocate
+    //the transmitted mesh.
+    local_mesh->getTinySerializationInformation(tinyInfoLocalD,tinyInfoLocal,tinyInfoLocalS);
+    tinyInfoLocal.push_back(distant_ids_send->getNumberOfTuples());
+    tinyInfoDistant.resize(tinyInfoLocal.size());
+    std::fill(tinyInfoDistant.begin(),tinyInfoDistant.end(),0);
+    MPIProcessorGroup* group=static_cast<MPIProcessorGroup*> (_union_group);
+    const MPI_Comm* comm=group->getComm();
+    MPI_Status status; 
+    
+    // iproc_distant is the number of proc in distant group
+    // it must be converted to union numbering before communication
+    int iprocdistant_in_union = group->translateRank(&_distant_group,
+                                                     iproc_distant);
+    
+    comm_interface.sendRecv(&tinyInfoLocal[0], tinyInfoLocal.size(), MPI_INT, iprocdistant_in_union, 1112,
+                            &tinyInfoDistant[0], tinyInfoDistant.size(), MPI_INT,iprocdistant_in_union,1112,
+                            *comm, &status);
+    DataArrayInt *v1Local=0;
+    DataArrayDouble *v2Local=0;
+    DataArrayInt *v1Distant=DataArrayInt::New();
+    DataArrayDouble *v2Distant=DataArrayDouble::New();
+    //serialization of local mesh to send data to distant proc.
+    local_mesh->serialize(v1Local,v2Local);
+    //Building the right instance of copy of distant mesh.
+    MEDCouplingPointSet *distant_mesh_tmp=MEDCouplingPointSet::BuildInstanceFromMeshType((MEDCouplingMeshType)tinyInfoDistant[0]);
+    std::vector<std::string> unusedTinyDistantSts;
+    distant_mesh_tmp->resizeForUnserialization(tinyInfoDistant,v1Distant,v2Distant,unusedTinyDistantSts);
+    int nbLocalElems=0;
+    int nbDistElem=0;
+    int *ptLocal=0;
+    int *ptDist=0;
+    if(v1Local)
+      {
+        nbLocalElems=v1Local->getNbOfElems();
+        ptLocal=v1Local->getPointer();
+      }
+    if(v1Distant)
+      {
+        nbDistElem=v1Distant->getNbOfElems();
+        ptDist=v1Distant->getPointer();
+      }
+    comm_interface.sendRecv(ptLocal, nbLocalElems, MPI_INT,
+                            iprocdistant_in_union, 1111,
+                            ptDist, nbDistElem, MPI_INT,
+                            iprocdistant_in_union,1111,
+                            *comm, &status);
+    nbLocalElems=0;
+    double *ptLocal2=0;
+    double *ptDist2=0;
+    if(v2Local)
+      {
+        nbLocalElems=v2Local->getNbOfElems();
+        ptLocal2=v2Local->getPointer();
+      }
+    nbDistElem=0;
+    if(v2Distant)
+      {
+        nbDistElem=v2Distant->getNbOfElems();
+        ptDist2=v2Distant->getPointer();
+      }
+    comm_interface.sendRecv(ptLocal2, nbLocalElems, MPI_DOUBLE,
+                            iprocdistant_in_union, 1112,
+                            ptDist2, nbDistElem, MPI_DOUBLE,
+                            iprocdistant_in_union, 1112, 
+                            *comm, &status);
+    //
+    distant_mesh=distant_mesh_tmp;
+    //finish unserialization
+    distant_mesh->unserialization(tinyInfoDistantD,tinyInfoDistant,v1Distant,v2Distant,unusedTinyDistantSts);
+    //
+    distant_ids_recv=new int[tinyInfoDistant.back()];
+    comm_interface.sendRecv(const_cast<void *>(reinterpret_cast<const void *>(distant_ids_send->getConstPointer())),tinyInfoLocal.back(), MPI_INT,
+                            iprocdistant_in_union, 1113,
+                            distant_ids_recv,tinyInfoDistant.back(), MPI_INT,
+                            iprocdistant_in_union,1113,
+                            *comm, &status);
+    if(v1Local)
+      v1Local->decrRef();
+    if(v2Local)
+      v2Local->decrRef();
+    if(v1Distant)
+      v1Distant->decrRef();
+    if(v2Distant)
+      v2Distant->decrRef();
+  }
+  
+  /*!
+   * connected with ElementLocator::sendPolicyToWorkingSideL
+   */
+  void ElementLocator::recvPolicyFromLazySideW(std::vector<int>& policy)
+  {
+    policy.resize(_distant_proc_ids.size());
+    int procId=0;
+    CommInterface comm;
+    MPI_Status status;
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        int toRecv;
+        comm.recv((void *)&toRecv,1,MPI_INT,*iter,1120,*_comm,&status);
+        policy[procId]=toRecv;
+      }
+  }
+
+  /*!
+   * connected with ElementLocator::recvFromWorkingSideL
+   */
+  void ElementLocator::sendSumToLazySideW(const std::vector< std::vector<int> >& distantLocEltIds, const std::vector< std::vector<double> >& partialSumRelToDistantIds)
+  {
+    int procId=0;
+    CommInterface comm;
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        const vector<int>& eltIds=distantLocEltIds[procId];
+        const vector<double>& valued=partialSumRelToDistantIds[procId];
+        int lgth=eltIds.size();
+        comm.send(&lgth,1,MPI_INT,*iter,1114,*_comm);
+        comm.send(const_cast<void *>(reinterpret_cast<const void *>(&eltIds[0])),lgth,MPI_INT,*iter,1115,*_comm);
+        comm.send(const_cast<void *>(reinterpret_cast<const void *>(&valued[0])),lgth,MPI_DOUBLE,*iter,1116,*_comm);
+      }
+  }
+
+  /*!
+   * connected with ElementLocator::sendToWorkingSideL
+   */
+  void ElementLocator::recvSumFromLazySideW(std::vector< std::vector<double> >& globalSumRelToDistantIds)
+  {
+    int procId=0;
+    CommInterface comm;
+    MPI_Status status;
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        std::vector<double>& vec=globalSumRelToDistantIds[procId];
+        comm.recv(&vec[0],vec.size(),MPI_DOUBLE,*iter,1117,*_comm,&status);
+      }
+  }
+
+  /*!
+   * connected with ElementLocator::recvLocalIdsFromWorkingSideL
+   */
+  void ElementLocator::sendLocalIdsToLazyProcsW(const std::vector< std::vector<int> >& distantLocEltIds)
+  {
+    int procId=0;
+    CommInterface comm;
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        const vector<int>& eltIds=distantLocEltIds[procId];
+        int lgth=eltIds.size();
+        comm.send(&lgth,1,MPI_INT,*iter,1121,*_comm);
+        comm.send(const_cast<void *>(reinterpret_cast<const void *>(&eltIds[0])),lgth,MPI_INT,*iter,1122,*_comm);
+      }
+  }
+
+  /*!
+   * connected with ElementLocator::sendGlobalIdsToWorkingSideL
+   */
+  void ElementLocator::recvGlobalIdsFromLazyProcsW(const std::vector< std::vector<int> >& distantLocEltIds, std::vector< std::vector<int> >& globalIds)
+  {
+    int procId=0;
+    CommInterface comm;
+    MPI_Status status;
+    globalIds.resize(_distant_proc_ids.size());
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        const std::vector<int>& vec=distantLocEltIds[procId];
+        std::vector<int>& global=globalIds[procId];
+        global.resize(vec.size());
+        comm.recv(&global[0],vec.size(),MPI_INT,*iter,1123,*_comm,&status);
+      }
+  }
+  
+  /*!
+   * connected with ElementLocator::sendCandidatesGlobalIdsToWorkingSideL
+   */
+  void ElementLocator::recvCandidatesGlobalIdsFromLazyProcsW(std::vector< std::vector<int> >& globalIds)
+  {
+    int procId=0;
+    CommInterface comm;
+    MPI_Status status;
+    globalIds.resize(_distant_proc_ids.size());
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        std::vector<int>& global=globalIds[procId];
+        int lgth;
+        comm.recv(&lgth,1,MPI_INT,*iter,1132,*_comm,&status);
+        global.resize(lgth);
+        comm.recv(&global[0],lgth,MPI_INT,*iter,1133,*_comm,&status);
+      }
+  }
+  
+  /*!
+   * connected with ElementLocator::recvSumFromWorkingSideL
+   */
+  void ElementLocator::sendPartialSumToLazyProcsW(const std::vector<int>& distantGlobIds, const std::vector<double>& sum)
+  {
+    int procId=0;
+    CommInterface comm;
+    int lgth=distantGlobIds.size();
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        comm.send(&lgth,1,MPI_INT,*iter,1124,*_comm);
+        comm.send(const_cast<void *>(reinterpret_cast<const void *>(&distantGlobIds[0])),lgth,MPI_INT,*iter,1125,*_comm);
+        comm.send(const_cast<void *>(reinterpret_cast<const void *>(&sum[0])),lgth,MPI_DOUBLE,*iter,1126,*_comm);
+      }
+  }
+
+  /*!
+   * connected with ElementLocator::recvCandidatesForAddElementsL
+   */
+  void ElementLocator::sendCandidatesForAddElementsW(const std::vector<int>& distantGlobIds)
+  {
+    int procId=0;
+    CommInterface comm;
+    int lgth=distantGlobIds.size();
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        comm.send(const_cast<void *>(reinterpret_cast<const void *>(&lgth)),1,MPI_INT,*iter,1128,*_comm);
+        comm.send(const_cast<void *>(reinterpret_cast<const void *>(&distantGlobIds[0])),lgth,MPI_INT,*iter,1129,*_comm);
+      }
+  }
+  
+  /*!
+   * connected with ElementLocator::sendAddElementsToWorkingSideL
+   */
+  void ElementLocator::recvAddElementsFromLazyProcsW(std::vector<std::vector<int> >& elementsToAdd)
+  {
+    int procId=0;
+    CommInterface comm;
+    MPI_Status status;
+    int lgth=_distant_proc_ids.size();
+    elementsToAdd.resize(lgth);
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        int locLgth;
+        std::vector<int>& eltToFeed=elementsToAdd[procId];
+        comm.recv(&locLgth,1,MPI_INT,*iter,1130,*_comm,&status);
+        eltToFeed.resize(locLgth);
+        comm.recv(&eltToFeed[0],locLgth,MPI_INT,*iter,1131,*_comm,&status);
+      }
+  }
+
+  /*!
+   * connected with ElementLocator::recvPolicyFromLazySideW
+   */
+  int ElementLocator::sendPolicyToWorkingSideL()
+  {
+    CommInterface comm;
+    int toSend;
+    DataArrayInt *isCumulative=_local_para_field.returnCumulativeGlobalNumbering();
+    if(isCumulative)
+      {
+        toSend=CUMULATIVE_POLICY;
+        isCumulative->decrRef();
+      }
+    else
+      toSend=NO_POST_TREATMENT_POLICY;
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++)
+      comm.send(&toSend,1,MPI_INT,*iter,1120,*_comm);
+    return toSend;
+  }
+
+  /*!
+   * connected with ElementLocator::sendSumToLazySideW
+   */
+  void ElementLocator::recvFromWorkingSideL()
+  {
+    _values_added.resize(_local_para_field.getField()->getNumberOfTuples());
+    int procId=0;
+    CommInterface comm;
+    _ids_per_working_proc.resize(_distant_proc_ids.size());
+    MPI_Status status;
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        int lgth;
+        comm.recv(&lgth,1,MPI_INT,*iter,1114,*_comm,&status);
+        vector<int>& ids=_ids_per_working_proc[procId];
+        ids.resize(lgth);
+        vector<double> values(lgth);
+        comm.recv(&ids[0],lgth,MPI_INT,*iter,1115,*_comm,&status);
+        comm.recv(&values[0],lgth,MPI_DOUBLE,*iter,1116,*_comm,&status);
+        for(int i=0;i<lgth;i++)
+          _values_added[ids[i]]+=values[i];
+      }
+  }
+
+  /*!
+   * connected with ElementLocator::recvSumFromLazySideW
+   */
+  void ElementLocator::sendToWorkingSideL()
+  {
+    int procId=0;
+    CommInterface comm;
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        vector<int>& ids=_ids_per_working_proc[procId];
+        vector<double> valsToSend(ids.size());
+        vector<double>::iterator iter3=valsToSend.begin();
+        for(vector<int>::const_iterator iter2=ids.begin();iter2!=ids.end();iter2++,iter3++)
+          *iter3=_values_added[*iter2];
+        comm.send(&valsToSend[0],ids.size(),MPI_DOUBLE,*iter,1117,*_comm);
+        //ids.clear();
+      }
+    //_ids_per_working_proc.clear();
+  }
+
+  /*!
+   * connected with ElementLocator::sendLocalIdsToLazyProcsW
+   */
+  void ElementLocator::recvLocalIdsFromWorkingSideL()
+  {
+    int procId=0;
+    CommInterface comm;
+    _ids_per_working_proc.resize(_distant_proc_ids.size());
+    MPI_Status status;
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        int lgth;
+        vector<int>& ids=_ids_per_working_proc[procId];
+        comm.recv(&lgth,1,MPI_INT,*iter,1121,*_comm,&status);
+        ids.resize(lgth);
+        comm.recv(&ids[0],lgth,MPI_INT,*iter,1122,*_comm,&status);
+      }
+  }
+
+  /*!
+   * connected with ElementLocator::recvGlobalIdsFromLazyProcsW
+   */
+  void ElementLocator::sendGlobalIdsToWorkingSideL()
+  {
+    int procId=0;
+    CommInterface comm;
+    DataArrayInt *globalIds=_local_para_field.returnGlobalNumbering();
+    const int *globalIdsC=globalIds->getConstPointer();
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        const vector<int>& ids=_ids_per_working_proc[procId];
+        vector<int> valsToSend(ids.size());
+        vector<int>::iterator iter1=valsToSend.begin();
+        for(vector<int>::const_iterator iter2=ids.begin();iter2!=ids.end();iter2++,iter1++)
+          *iter1=globalIdsC[*iter2];
+        comm.send(&valsToSend[0],ids.size(),MPI_INT,*iter,1123,*_comm);
+      }
+    if(globalIds)
+      globalIds->decrRef();
+  }
+
+  /*!
+   * connected with ElementLocator::sendPartialSumToLazyProcsW
+   */
+  void ElementLocator::recvSumFromWorkingSideL()
+  {
+    int procId=0;
+    int wProcSize=_distant_proc_ids.size();
+    CommInterface comm;
+    _ids_per_working_proc.resize(wProcSize);
+    _values_per_working_proc.resize(wProcSize);
+    MPI_Status status;
+    std::map<int,double> sums;
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        int lgth;
+        comm.recv(&lgth,1,MPI_INT,*iter,1124,*_comm,&status);
+        vector<int>& ids=_ids_per_working_proc[procId];
+        vector<double>& vals=_values_per_working_proc[procId];
+        ids.resize(lgth);
+        vals.resize(lgth);
+        comm.recv(&ids[0],lgth,MPI_INT,*iter,1125,*_comm,&status);
+        comm.recv(&vals[0],lgth,MPI_DOUBLE,*iter,1126,*_comm,&status);
+        vector<int>::const_iterator iter1=ids.begin();
+        vector<double>::const_iterator iter2=vals.begin();
+        for(;iter1!=ids.end();iter1++,iter2++)
+          sums[*iter1]+=*iter2;
+      }
+    //assign sum to prepare sending to working side
+    for(procId=0;procId<wProcSize;procId++)
+      {
+        vector<int>& ids=_ids_per_working_proc[procId];
+        vector<double>& vals=_values_per_working_proc[procId];
+        vector<int>::const_iterator iter1=ids.begin();
+        vector<double>::iterator iter2=vals.begin();
+        for(;iter1!=ids.end();iter1++,iter2++)
+          *iter2=sums[*iter1];
+        ids.clear();
+      }
+  }
+
+  /*!
+   * Foreach working procs Wi compute and push it in _ids_per_working_proc3,
+   * if it exist, local id of nodes that are in interaction with an another lazy proc than this
+   * and that exists in this \b but with no interaction with this.
+   * The computation is performed here. sendAddElementsToWorkingSideL is only in charge to send
+   * precomputed _ids_per_working_proc3 attribute.
+   * connected with ElementLocator::sendCandidatesForAddElementsW
+   */
+  void ElementLocator::recvCandidatesForAddElementsL()
+  {
+    int procId=0;
+    int wProcSize=_distant_proc_ids.size();
+    CommInterface comm;
+    _ids_per_working_proc3.resize(wProcSize);
+    MPI_Status status;
+    std::map<int,double> sums;
+    DataArrayInt *globalIds=_local_para_field.returnGlobalNumbering();
+    const int *globalIdsC=globalIds->getConstPointer();
+    int nbElts=globalIds->getNumberOfTuples();
+    std::set<int> globalIdsS(globalIdsC,globalIdsC+nbElts);
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        const std::vector<int>& ids0=_ids_per_working_proc[procId];
+        int lgth0=ids0.size();
+        std::set<int> elts0;
+        for(int i=0;i<lgth0;i++)
+          elts0.insert(globalIdsC[ids0[i]]);
+        int lgth;
+        comm.recv(&lgth,1,MPI_INT,*iter,1128,*_comm,&status);
+        vector<int> ids(lgth);
+        comm.recv(&ids[0],lgth,MPI_INT,*iter,1129,*_comm,&status);
+        set<int> ids1(ids.begin(),ids.end());
+        ids.clear();
+        set<int> tmp5,tmp6;
+        set_intersection(globalIdsS.begin(),globalIdsS.end(),ids1.begin(),ids1.end(),inserter(tmp5,tmp5.begin()));
+        set_difference(tmp5.begin(),tmp5.end(),elts0.begin(),elts0.end(),inserter(tmp6,tmp6.begin()));
+        std::vector<int>& ids2=_ids_per_working_proc3[procId];
+        ids2.resize(tmp6.size());
+        std::copy(tmp6.begin(),tmp6.end(),ids2.begin());
+        //global->local
+        for(std::vector<int>::iterator iter2=ids2.begin();iter2!=ids2.end();iter2++)
+          *iter2=std::find(globalIdsC,globalIdsC+nbElts,*iter2)-globalIdsC;
+      }
+    if(globalIds)
+      globalIds->decrRef();
+  }
+
+  /*!
+   * connected with ElementLocator::recvAddElementsFromLazyProcsW
+   */
+  void ElementLocator::sendAddElementsToWorkingSideL()
+  {
+    int procId=0;
+    CommInterface comm;
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        const std::vector<int>& vals=_ids_per_working_proc3[procId];
+        int size=vals.size();
+        comm.send(const_cast<void *>(reinterpret_cast<const void *>(&size)),1,MPI_INT,*iter,1130,*_comm);
+        comm.send(const_cast<void *>(reinterpret_cast<const void *>(&vals[0])),size,MPI_INT,*iter,1131,*_comm);
+      }
+  }
+
+  /*!
+   * This method sends to working side Wi only nodes in interaction with Wi \b and located on boundary, to reduce number.
+   * connected with ElementLocator::recvCandidatesGlobalIdsFromLazyProcsW
+   */
+  void ElementLocator::sendCandidatesGlobalIdsToWorkingSideL()
+  { 
+    int procId=0;
+    CommInterface comm;
+    DataArrayInt *globalIds=_local_para_field.returnGlobalNumbering();
+    const int *globalIdsC=globalIds->getConstPointer();
+    MEDCouplingAutoRefCountObjectPtr<DataArrayInt> candidates=_local_para_field.getSupport()->getCellMesh()->findBoundaryNodes();
+    for(int *iter1=candidates->getPointer();iter1!=candidates->getPointer()+candidates->getNumberOfTuples();iter1++)
+      (*iter1)=globalIdsC[*iter1];
+    std::set<int> candidatesS(candidates->begin(),candidates->end());
+    for(vector<int>::const_iterator iter=_distant_proc_ids.begin();iter!=_distant_proc_ids.end();iter++,procId++)
+      {
+        const vector<int>& ids=_ids_per_working_proc[procId];
+        vector<int> valsToSend(ids.size());
+        vector<int>::iterator iter1=valsToSend.begin();
+        for(vector<int>::const_iterator iter2=ids.begin();iter2!=ids.end();iter2++,iter1++)
+          *iter1=globalIdsC[*iter2];
+        std::set<int> tmp2(valsToSend.begin(),valsToSend.end());
+        std::vector<int> tmp3;
+        set_intersection(candidatesS.begin(),candidatesS.end(),tmp2.begin(),tmp2.end(),std::back_insert_iterator< std::vector<int> >(tmp3));
+        int lgth=tmp3.size();
+        comm.send(&lgth,1,MPI_INT,*iter,1132,*_comm);
+        comm.send(&tmp3[0],lgth,MPI_INT,*iter,1133,*_comm);
+      }
+    if(globalIds)
+      globalIds->decrRef();
+  }
+}
diff --git a/src/ParaMEDMEM/ElementLocator.hxx b/src/ParaMEDMEM/ElementLocator.hxx
new file mode 100644 (file)
index 0000000..4853c97
--- /dev/null
@@ -0,0 +1,109 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __ELEMENTLOCATOR_HXX__
+#define __ELEMENTLOCATOR_HXX__
+
+#include "InterpolationOptions.hxx"
+#include "MEDCouplingNatureOfField.hxx"
+
+#include <mpi.h>
+#include <vector>
+#include <set>
+
+namespace ParaMEDMEM
+{
+  class ParaFIELD;
+  class ProcessorGroup;
+  class ParaSUPPORT;
+  class InterpolationMatrix;
+  class MEDCouplingPointSet;
+  class DataArrayInt;
+
+  class ElementLocator : public INTERP_KERNEL::InterpolationOptions
+  {
+  public:
+    ElementLocator(const ParaFIELD& sourceField, const ProcessorGroup& distant_group, const ProcessorGroup& local_group);
+
+    virtual ~ElementLocator();
+    void exchangeMesh(int idistantrank,
+                      MEDCouplingPointSet*& target_mesh,
+                      int*& distant_ids);
+    void exchangeMethod(const std::string& sourceMeth, int idistantrank, std::string& targetMeth);
+    const std::vector<int>& getDistantProcIds() const { return _distant_proc_ids; }
+    const MPI_Comm *getCommunicator() const;
+    NatureOfField getLocalNature() const;
+    //! This method is used to informed if there is -1D mesh on distant_group side or on local_group side.
+    bool isM1DCorr() const { return _is_m1d_corr; }
+    //Working side methods
+    void recvPolicyFromLazySideW(std::vector<int>& policy);
+    void sendSumToLazySideW(const std::vector< std::vector<int> >& distantLocEltIds, const std::vector< std::vector<double> >& partialSumRelToDistantIds);
+    void recvSumFromLazySideW(std::vector< std::vector<double> >& globalSumRelToDistantIds);
+    void sendCandidatesForAddElementsW(const std::vector<int>& distantGlobIds);
+    void recvAddElementsFromLazyProcsW(std::vector<std::vector<int> >& elementsToAdd);
+    //
+    void sendLocalIdsToLazyProcsW(const std::vector< std::vector<int> >& distantLocEltIds);
+    void recvGlobalIdsFromLazyProcsW(const std::vector< std::vector<int> >& distantLocEltIds, std::vector< std::vector<int> >& globalIds);
+    void recvCandidatesGlobalIdsFromLazyProcsW(std::vector< std::vector<int> >& globalIds);
+    void sendPartialSumToLazyProcsW(const std::vector<int>& distantGlobIds, const std::vector<double>& sum);
+    //Lazy side methods
+    int sendPolicyToWorkingSideL();
+    void recvFromWorkingSideL();
+    void sendToWorkingSideL();
+    //
+    void recvLocalIdsFromWorkingSideL();
+    void sendGlobalIdsToWorkingSideL();
+    void sendCandidatesGlobalIdsToWorkingSideL();
+    //
+    void recvSumFromWorkingSideL();
+    void recvCandidatesForAddElementsL();
+    void sendAddElementsToWorkingSideL();
+  private:
+    void _computeBoundingBoxes();
+    bool _intersectsBoundingBox(int irank);
+    void _exchangeMesh(MEDCouplingPointSet* local_mesh, MEDCouplingPointSet*& distant_mesh,
+                       int iproc_distant, const DataArrayInt* distant_ids_send,
+                       int*& distant_ids_recv);
+  private:
+    const ParaFIELD&  _local_para_field ;
+    MEDCouplingPointSet* _local_cell_mesh;
+    int _local_cell_mesh_space_dim;
+    bool _is_m1d_corr;
+    MEDCouplingPointSet* _local_face_mesh;
+    std::vector<MEDCouplingPointSet*> _distant_cell_meshes;
+    std::vector<MEDCouplingPointSet*> _distant_face_meshes;
+    double* _domain_bounding_boxes;
+    const ProcessorGroup& _distant_group;
+    const ProcessorGroup& _local_group;
+    ProcessorGroup* _union_group;
+    std::vector<int> _distant_proc_ids;
+    const MPI_Comm *_comm;
+    //Attributes only used by lazy side
+    std::vector<double> _values_added;
+    std::vector< std::vector<int> > _ids_per_working_proc;
+    std::vector< std::vector<int> > _ids_per_working_proc3;
+    std::vector< std::vector<double> > _values_per_working_proc;
+  public:
+    static const int CUMULATIVE_POLICY=3;
+    static const int NO_POST_TREATMENT_POLICY=7;
+  };
+
+}
+
+#endif
diff --git a/src/ParaMEDMEM/ExplicitCoincidentDEC.cxx b/src/ParaMEDMEM/ExplicitCoincidentDEC.cxx
new file mode 100644 (file)
index 0000000..5d30c60
--- /dev/null
@@ -0,0 +1,393 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <mpi.h>
+#include "CommInterface.hxx"
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+#include "ComponentTopology.hxx"
+#include "ParaFIELD.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "ExplicitCoincidentDEC.hxx"
+#include "ExplicitMapping.hxx"
+#include "InterpKernelUtilities.hxx"
+
+using namespace std;
+
+namespace ParaMEDMEM
+{
+  /*!
+   * \anchor ExplicitCoincidentDEC-det
+   * \class ExplicitCoincidentDEC
+   *
+   * TODO: doc
+   */
+
+  /*! Constructor
+   */
+  ExplicitCoincidentDEC::ExplicitCoincidentDEC():_toposource(0),_topotarget(0)
+  {  
+  }
+
+  ExplicitCoincidentDEC::~ExplicitCoincidentDEC()
+  {
+  }
+
+  /*! Synchronization process for exchanging topologies
+   */
+  void ExplicitCoincidentDEC::synchronize()
+  {
+    if (_source_group->containsMyRank())
+      {
+        _toposource = dynamic_cast<ExplicitTopology*>(_local_field->getTopology());
+        _sourcegroup= _toposource->getProcGroup()->createProcGroup();
+        _targetgroup=_toposource->getProcGroup()->createComplementProcGroup();
+      }
+    if (_target_group->containsMyRank())
+      {
+        _topotarget = dynamic_cast<ExplicitTopology*>(_local_field->getTopology());
+        _sourcegroup= _topotarget->getProcGroup()->createComplementProcGroup();
+        _targetgroup=_topotarget->getProcGroup()->createProcGroup();
+      }
+  
+    // Exchanging
+  
+    // Transmitting source topology to target code 
+    broadcastTopology(_toposource,_topotarget,1000);
+    transferMappingToSource();
+  }
+
+  /*! Creates the arrays necessary for the data transfer
+   * and fills the send array with the values of the 
+   * source field
+   *  */
+  void ExplicitCoincidentDEC::prepareSourceDE()
+  {
+    ////////////////////////////////////
+    //Step 1 : buffer array creation 
+  
+    if (!_toposource->getProcGroup()->containsMyRank())
+      return;
+    MPIProcessorGroup* group=new MPIProcessorGroup(_sourcegroup->getCommInterface());
+  
+    // Warning : the size of the target side is implicitly deduced
+    //from the size of MPI_COMM_WORLD
+    int target_size = _toposource->getProcGroup()->getCommInterface().worldSize()- _toposource->getProcGroup()->size()  ;
+  
+    vector<int>* target_arrays=new vector<int>[target_size];
+  
+    int nb_local = _toposource-> getNbLocalElements();
+
+    int union_size=group->size();
+  
+    _sendcounts=new int[union_size];
+    _senddispls=new int[union_size];
+    _recvcounts=new int[union_size];
+    _recvdispls=new int[union_size];
+  
+    for (int i=0; i< union_size; i++)
+      {
+        _sendcounts[i]=0;
+        _recvcounts[i]=0;
+        _recvdispls[i]=0;
+      }
+    _senddispls[0]=0;
+    int* counts=_explicit_mapping.getCounts();
+    for (int i=0; i<group->size(); i++)
+      _sendcounts[i]=counts[i];
+  
+    for (int iproc=1; iproc<group->size();iproc++)
+      _senddispls[iproc]=_senddispls[iproc-1]+_sendcounts[iproc-1];
+  
+    _sendbuffer = new double [nb_local * _toposource->getNbComponents()];
+  
+    /////////////////////////////////////////////////////////////
+    //Step 2 : filling the buffers with the source field values 
+  
+    int* counter=new int [target_size];
+    counter[0]=0;  
+    for (int i=1; i<target_size; i++)
+      counter[i]=counter[i-1]+target_arrays[i-1].size();
+  
+  
+    const double* value = _local_field->getField()->getArray()->getPointer();
+  
+    int* bufferindex= _explicit_mapping.getBufferIndex();
+  
+    for (int ielem=0; ielem<nb_local; ielem++)
+      {
+        int ncomp = _toposource->getNbComponents();
+        for (int icomp=0; icomp<ncomp; icomp++)
+          {
+            _sendbuffer[ielem*ncomp+icomp]=value[bufferindex[ielem]*ncomp+icomp];
+          }  
+      }
+    delete[] target_arrays;
+    delete[] counter;
+  }
+
+  /*!
+   *  Creates the buffers for receiving the fields on the target side
+   */
+  void ExplicitCoincidentDEC::prepareTargetDE()
+  {
+    if (!_topotarget->getProcGroup()->containsMyRank())
+      return;
+    MPIProcessorGroup* group=new MPIProcessorGroup(_topotarget->getProcGroup()->getCommInterface());
+
+    vector < vector <int> > source_arrays(_sourcegroup->size());
+    int nb_local = _topotarget-> getNbLocalElements();
+    for (int ielem=0; ielem< nb_local ; ielem++)
+      {
+        //pair<int,int> source_local =_distant_elems[ielem];
+        pair <int,int> source_local=_explicit_mapping.getDistantNumbering(ielem);
+        source_arrays[source_local.first].push_back(source_local.second); 
+      }  
+    int union_size=group->size();
+    _recvcounts=new int[union_size];
+    _recvdispls=new int[union_size];
+    _sendcounts=new int[union_size];
+    _senddispls=new int[union_size];
+    
+    for (int i=0; i< union_size; i++)
+      {
+        _sendcounts[i]=0;
+        _recvcounts[i]=0;
+        _recvdispls[i]=0;
+      }
+    for (int iproc=0; iproc < _sourcegroup->size(); iproc++)
+      {
+        //converts the rank in target to the rank in union communicator
+        int unionrank=group->translateRank(_sourcegroup,iproc);
+        _recvcounts[unionrank]=source_arrays[iproc].size()*_topotarget->getNbComponents();
+      }
+    for (int i=1; i<union_size; i++)
+      _recvdispls[i]=_recvdispls[i-1]+_recvcounts[i-1];
+    _recvbuffer=new double[nb_local*_topotarget->getNbComponents()];
+    
+  }
+
+  /*!
+   * Synchronizing a topology so that all the 
+   * group possesses it.
+   * 
+   * \param toposend Topology that is transmitted. It is read on processes where it already exists, and it is created and filled on others.
+   * \param toporecv Topology which is received.
+   * \param tag Communication tag associated with this operation.
+   */
+  void ExplicitCoincidentDEC::broadcastTopology(const ExplicitTopology* toposend, ExplicitTopology* toporecv, int tag)
+  {
+    MPI_Status status;
+  
+    int* serializer=0;
+    int size;
+  
+    MPIProcessorGroup* group=new MPIProcessorGroup(*_comm_interface);
+  
+    // The send processors serialize the send topology
+    // and send the buffers to the recv procs
+    if (toposend !=0 && toposend->getProcGroup()->containsMyRank())
+      {
+        toposend->serialize(serializer, size);
+        for (int iproc=0; iproc< group->size(); iproc++)
+          {
+            int itarget=iproc;
+            if (!toposend->getProcGroup()->contains(itarget))
+              {
+                _comm_interface->send(&size,1,MPI_INT, itarget,tag+itarget,*(group->getComm()));
+                _comm_interface->send(serializer, size, MPI_INT, itarget, tag+itarget,*(group->getComm()));          
+              }
+          }
+      }
+    else
+      {
+        vector <int> size2(group->size());
+        int myworldrank=group->myRank();
+        for (int iproc=0; iproc<group->size();iproc++)
+          {
+            int isource = iproc;
+            if (!toporecv->getProcGroup()->contains(isource))
+              {
+                int nbelem;
+                _comm_interface->recv(&nbelem, 1, MPI_INT, isource, tag+myworldrank, *(group->getComm()), &status);
+                int* buffer = new int[nbelem];
+                _comm_interface->recv(buffer, nbelem, MPI_INT, isource,tag+myworldrank, *(group->getComm()), &status);        
+      
+                ExplicitTopology* topotemp=new ExplicitTopology();
+                topotemp->unserialize(buffer, *_comm_interface);
+                delete[] buffer;
+        
+                for (int ielem=0; ielem<toporecv->getNbLocalElements(); ielem++)
+                  {
+                    int global = toporecv->localToGlobal(ielem);
+                    int sendlocal=topotemp->globalToLocal(global);
+                    if (sendlocal!=-1)
+                      {
+                        size2[iproc]++;
+                        _explicit_mapping.pushBackElem(make_pair(iproc,sendlocal));
+                      }
+                  }
+                delete topotemp;
+              }
+          }  
+      }  
+    MESSAGE (" rank "<<group->myRank()<< " broadcastTopology is over");
+  }
+
+  void ExplicitCoincidentDEC::transferMappingToSource()
+  {
+
+    MPIProcessorGroup* group=new MPIProcessorGroup(*_comm_interface);
+  
+    // sending source->target mapping which is stored by target
+    //in _distant_elems from target to source
+    if (_topotarget!=0 && _topotarget->getProcGroup()->containsMyRank())
+      {
+        int world_size = _topotarget->getProcGroup()->getCommInterface().worldSize()  ;
+        int* nb_transfer_union=new int[world_size];
+        int* dummy_recv=new int[world_size];
+        for (int i=0; i<world_size; i++)
+          nb_transfer_union[i]=0;
+        //converts the rank in target to the rank in union communicator
+    
+        for (int i=0; i<  _explicit_mapping.nbDistantDomains(); i++)
+          {
+            int unionrank=group->translateRank(_sourcegroup,_explicit_mapping.getDistantDomain(i));
+            nb_transfer_union[unionrank]=_explicit_mapping.getNbDistantElems(i);
+          }
+        _comm_interface->allToAll(nb_transfer_union, 1, MPI_INT, dummy_recv, 1, MPI_INT, MPI_COMM_WORLD);
+      
+        int* sendbuffer= _explicit_mapping.serialize(_topotarget->getProcGroup()->myRank());
+      
+        int* sendcounts= new int [world_size];
+        int* senddispls = new int [world_size];
+        for (int i=0; i< world_size; i++)
+          {
+            sendcounts[i]=2*nb_transfer_union[i];
+            if (i==0)
+              senddispls[i]=0;
+            else
+              senddispls[i]=senddispls[i-1]+sendcounts[i-1];
+          }
+        int* recvcounts=new int[world_size];
+        int* recvdispls=new int[world_size];
+        int *dummyrecv=0;
+        for (int i=0; i <world_size; i++)
+          {
+            recvcounts[i]=0;
+            recvdispls[i]=0;
+          }
+        _comm_interface->allToAllV(sendbuffer, sendcounts, senddispls, MPI_INT, dummyrecv, recvcounts, senddispls, MPI_INT, MPI_COMM_WORLD);
+      
+      }
+    //receiving in the source subdomains the mapping sent by targets
+    else
+      {
+        int world_size = _toposource->getProcGroup()->getCommInterface().worldSize()  ;
+        int* nb_transfer_union=new int[world_size];
+        int* dummy_send=new int[world_size];
+        for (int i=0; i<world_size; i++)
+          dummy_send[i]=0;
+        _comm_interface->allToAll(dummy_send, 1, MPI_INT, nb_transfer_union, 1, MPI_INT, MPI_COMM_WORLD);
+      
+        int total_size=0;
+        for (int i=0; i< world_size; i++)
+          total_size+=nb_transfer_union[i];
+        int nbtarget = _targetgroup->size();
+        int* targetranks = new int[ nbtarget];
+        for (int i=0; i<nbtarget; i++)
+          targetranks[i]=group->translateRank(_targetgroup,i);
+        int* mappingbuffer= new int [total_size*2];
+        int* sendcounts= new int [world_size];
+        int* senddispls = new int [world_size];
+        int* recvcounts=new int[world_size];
+        int* recvdispls=new int[world_size];
+        for (int i=0; i< world_size; i++)
+          {
+            recvcounts[i]=2*nb_transfer_union[i];
+            if (i==0)
+              recvdispls[i]=0;
+            else
+              recvdispls[i]=recvdispls[i-1]+recvcounts[i-1];
+          }
+
+        int *dummysend=0;
+        for (int i=0; i <world_size; i++)
+          {
+            sendcounts[i]=0;
+            senddispls[i]=0;
+          }
+        _comm_interface->allToAllV(dummysend, sendcounts, senddispls, MPI_INT, mappingbuffer, recvcounts, recvdispls, MPI_INT, MPI_COMM_WORLD);
+        _explicit_mapping.unserialize(world_size,nb_transfer_union,nbtarget, targetranks, mappingbuffer);
+      }
+  }
+
+  void ExplicitCoincidentDEC::recvData()
+  {
+    //MPI_COMM_WORLD is used instead of group because there is no
+    //mechanism for creating the union group yet
+    MESSAGE("recvData");
+
+    cout<<"start AllToAll"<<endl;
+    _comm_interface->allToAllV(_sendbuffer, _sendcounts, _senddispls, MPI_DOUBLE, 
+                               _recvbuffer, _recvcounts, _recvdispls, MPI_DOUBLE,MPI_COMM_WORLD);
+    cout<<"end AllToAll"<<endl;
+    int nb_local = _topotarget->getNbLocalElements();
+    double* value=new double[nb_local*_topotarget->getNbComponents()];
+
+    vector<int> counters(_sourcegroup->size());
+    counters[0]=0;
+    for (int i=0; i<_sourcegroup->size()-1; i++)
+      {
+        MPIProcessorGroup* group=new MPIProcessorGroup(*_comm_interface);
+        int worldrank=group->translateRank(_sourcegroup,i);
+        counters[i+1]=counters[i]+_recvcounts[worldrank];
+      }
+  
+    for (int ielem=0; ielem<nb_local ; ielem++)
+      {
+        pair<int,int> distant_numbering=_explicit_mapping.getDistantNumbering(ielem);
+        int iproc=distant_numbering.first; 
+        int ncomp =  _topotarget->getNbComponents();
+        for (int icomp=0; icomp< ncomp; icomp++)
+          value[ielem*ncomp+icomp]=_recvbuffer[counters[iproc]*ncomp+icomp];
+        counters[iproc]++;
+      }  
+    _local_field->getField()->getArray()->useArray(value,true,CPP_DEALLOC,nb_local,_topotarget->getNbComponents());
+  }
+
+  void ExplicitCoincidentDEC::sendData()
+  {
+    MESSAGE ("sendData");
+    for (int i=0; i< 4; i++)
+      cout << _sendcounts[i]<<" ";
+    cout <<endl;
+    for (int i=0; i< 4; i++)
+      cout << _senddispls[i]<<" ";
+    cout <<endl;
+    //MPI_COMM_WORLD is used instead of group because there is no
+    //mechanism for creating the union group yet
+    cout <<"start AllToAll"<<endl;
+    _comm_interface->allToAllV(_sendbuffer, _sendcounts, _senddispls, MPI_DOUBLE, 
+                               _recvbuffer, _recvcounts, _recvdispls, MPI_DOUBLE,MPI_COMM_WORLD);
+  }
+}
+
diff --git a/src/ParaMEDMEM/ExplicitCoincidentDEC.hxx b/src/ParaMEDMEM/ExplicitCoincidentDEC.hxx
new file mode 100644 (file)
index 0000000..6205e11
--- /dev/null
@@ -0,0 +1,62 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __EXPLICITCOINCIDENTDEC_HXX__
+#define __EXPLICITCOINCIDENTDEC_HXX__
+
+#include "DisjointDEC.hxx"
+#include "ExplicitMapping.hxx"
+#include "ExplicitTopology.hxx"
+
+#include <map>
+
+namespace ParaMEDMEM
+{
+  class BlockTopology;
+
+  class ExplicitCoincidentDEC : public DisjointDEC
+  {
+  public:
+    ExplicitCoincidentDEC();
+    virtual ~ExplicitCoincidentDEC();
+    void synchronize();
+    void broadcastTopology(BlockTopology*&, int tag);
+    void broadcastTopology(const ExplicitTopology* toposend, ExplicitTopology* toporecv, int tag);
+    void transferMappingToSource();
+    void prepareSourceDE();
+    void prepareTargetDE();
+    void recvData();
+    void sendData();
+  private:  
+    ExplicitTopology* _toposource;
+    ExplicitTopology* _topotarget;
+    ProcessorGroup* _targetgroup;
+    ProcessorGroup* _sourcegroup;
+    int* _sendcounts;
+    int* _recvcounts;
+    int* _senddispls;
+    int* _recvdispls;
+    double* _recvbuffer;
+    double* _sendbuffer;
+    std::map<int,std::pair<int,int> > _distant_elems;
+    ExplicitMapping _explicit_mapping;
+  }; 
+}
+
+#endif
diff --git a/src/ParaMEDMEM/ExplicitMapping.hxx b/src/ParaMEDMEM/ExplicitMapping.hxx
new file mode 100644 (file)
index 0000000..e83d0dc
--- /dev/null
@@ -0,0 +1,176 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __EXPLICITMAPPING_HXX__
+#define __EXPLICITMAPPING_HXX__
+
+#include <vector>
+#include <map>
+#include <set>
+
+namespace ParaMEDMEM
+{
+  class ExplicitMapping
+  {
+  public:
+
+    ExplicitMapping():_numbers(0), _domains(0), _comm_buffer(0) { }
+
+    ~ExplicitMapping()
+    {
+      if (_domains!=0) delete[] _domains;
+      if (_numbers!=0) delete[] _numbers;
+      if (_comm_buffer!=0) delete[] _comm_buffer;
+    }
+    
+    void pushBackElem(std::pair<int,int> idistant)
+    {
+      _mapping.push_back(idistant);
+    }
+
+    void  setDistantElem(int ilocal, std::pair<int,int> idistant)
+    {
+      _mapping[ilocal]=idistant;
+    }
+
+    int nbDistantDomains()
+    {
+      if (_distant_domains.empty())
+        {
+          for (std::vector <std::pair<int,int> >::const_iterator iter= _mapping.begin();
+               iter!=_mapping.end();
+               iter++)
+            _distant_domains.insert(iter->first);
+        }
+      return _distant_domains.size();
+    }
+    
+    std::pair <int,int> getDistantNumbering(int ielem)const
+    {
+      return _mapping[ielem];
+    }
+    
+    int getDistantDomain(int i)
+    {
+      if (_domains==0)
+        computeNumbers();
+
+      return _domains[i];
+    }
+
+    int getNbDistantElems(int i)
+    {
+      if (_numbers==0)
+        computeNumbers();
+      return _numbers[i];    
+    }
+
+    int* serialize(int idproc)
+    {
+      _comm_buffer=new int[_mapping.size()*2];
+      std::vector<int> offsets(_distant_domains.size());
+      offsets[0]=0;
+      for (int i=1; i<(int)_distant_domains.size();i++)
+        offsets[i]=offsets[i-1]+_numbers[i-1];
+      
+      for (int i=0; i<(int)_mapping.size(); i++)
+        {
+          int offset= offsets[_mapping[i].first];
+          _comm_buffer[offset*2]=idproc;
+          _comm_buffer[offset*2+1]=_mapping[i].second;
+          offsets[_mapping[i].first]++;
+        }
+      return _comm_buffer;
+    }
+
+    void unserialize(int nbprocs, int* sizes,int nbtarget, int* targetrank, int* commbuffer)
+    {
+      int total_size=0;
+      for (int i=0; i< nbprocs; i++)
+        total_size+=sizes[i];
+      
+      _mapping.resize(total_size);
+      _buffer_index=new int[total_size];
+      int indmap=0;
+      for (int i=0; i<nbprocs; i++)
+        for (int ielem=0; ielem<sizes[i]; ielem++)
+          {
+            _mapping[indmap].first=i;
+            _mapping[indmap].second=commbuffer[indmap*2+1];
+            _buffer_index[indmap]=commbuffer[indmap*2+1];
+            indmap++;
+          }  
+      _numbers=new int [nbtarget];
+      _domains=new int [nbtarget];
+      
+      int index=0;      
+      for (int i=0; i<nbtarget; i++)
+        {
+          if (sizes[targetrank[i]]>0)
+            {
+              _numbers[index]=sizes[targetrank[i]];
+              _domains[index]=i;
+              index++;
+            }
+        }
+      _send_counts=new int[nbprocs];
+      for (int i=0; i<nbprocs; i++)
+        _send_counts[i]=sizes[i];
+    }
+
+    int* getBufferIndex() const { return _buffer_index; }
+    int* getCounts() const { return _send_counts; }
+  private:
+    std::vector <std::pair<int,int> > _mapping;
+    std::set<int> _distant_domains;
+    int* _numbers;
+    int* _domains;
+    int* _comm_buffer;
+    int* _buffer_index;
+    int* _send_counts;
+
+    void computeNumbers()
+    {
+      std::map <int,int> counts;
+      if (_numbers==0)
+        {
+          _numbers=new int[nbDistantDomains()];
+          _domains=new int[nbDistantDomains()];
+          for (int i=0; i<(int)_mapping.size(); i++)
+            {
+              if ( counts.find(_mapping[i].first) == counts.end())
+                counts.insert(std::make_pair(_mapping[i].first,1));
+              else
+                (counts[_mapping[i].first])++;
+            }
+          int counter=0;
+          for (std::map<int,int>::const_iterator iter=counts.begin(); 
+               iter!=counts.end(); 
+               iter++)
+            {
+              _numbers[counter]=iter->second;
+              _domains[counter]=iter->first;
+              counter++;
+            }
+        }
+    }
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/ExplicitTopology.cxx b/src/ParaMEDMEM/ExplicitTopology.cxx
new file mode 100644 (file)
index 0000000..a624623
--- /dev/null
@@ -0,0 +1,109 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "ParaMESH.hxx"
+#include "Topology.hxx"
+#include "ExplicitTopology.hxx"
+#include "BlockTopology.hxx"
+#include "ComponentTopology.hxx"
+
+#include <vector>
+#include <algorithm>
+
+using namespace std;
+namespace ParaMEDMEM
+{
+
+ExplicitTopology::ExplicitTopology(const ParaMESH& paramesh ):
+_proc_group(paramesh.getBlockTopology()->getProcGroup()),
+_nb_components(1)
+{
+  _nb_elems=paramesh.getCellMesh()->getNumberOfCells();
+  const int* global=paramesh.getGlobalNumberingCell();
+  _loc2glob=new int[_nb_elems]; 
+  
+    for (int i=0; i<_nb_elems; i++)
+    {
+      _loc2glob[i]=global[i];
+      _glob2loc[global[i]]=i;
+    }
+}
+
+ExplicitTopology::ExplicitTopology(const ExplicitTopology& topo, int nb_components)
+{
+  _proc_group = topo._proc_group;
+  _nb_elems = topo._nb_elems;
+  _nb_components = nb_components;
+  _loc2glob=new int[_nb_elems];
+  for (int i=0; i<_nb_elems; i++)
+    {
+      _loc2glob[i]=topo._loc2glob[i];
+    }
+  _glob2loc=topo._glob2loc;
+}
+
+
+ExplicitTopology::~ExplicitTopology()
+{
+  if (_loc2glob != 0) delete[] _loc2glob;
+}
+
+
+/*! Serializes the data contained in the Explicit Topology
+ * for communication purposes*/
+void ExplicitTopology::serialize(int* & serializer, int& size) const 
+{
+  vector <int> buffer;
+  
+  buffer.push_back(_nb_elems);
+  for (int i=0; i<_nb_elems; i++)
+  {
+    buffer.push_back(_loc2glob[i]);
+  }
+    
+  serializer=new int[buffer.size()];
+  size=  buffer.size();
+  copy(buffer.begin(), buffer.end(), serializer);
+  
+}
+/*! Unserializes the data contained in the Explicit Topology
+ * after communication. Uses the same structure as the one used for serialize()
+ * 
+ * */
+void ExplicitTopology::unserialize(const int* serializer,const CommInterface& comm_interface)
+{
+  const int* ptr_serializer=serializer;
+  cout << "unserialize..."<<endl;
+  _nb_elems=*ptr_serializer++;
+  cout << "nbelems "<<_nb_elems<<endl;
+  _loc2glob=new int[_nb_elems];
+  for (int i=0; i<_nb_elems; i++)
+  {
+    _loc2glob[i]=*ptr_serializer;
+    _glob2loc[*ptr_serializer]=i;
+    ptr_serializer++;
+    
+  }
+
+}
+
+}
diff --git a/src/ParaMEDMEM/ExplicitTopology.hxx b/src/ParaMEDMEM/ExplicitTopology.hxx
new file mode 100644 (file)
index 0000000..a1f4cce
--- /dev/null
@@ -0,0 +1,92 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __EXPLICITTOPOLOGY_HXX__
+#define __EXPLICITTOPOLOGY_HXX__
+
+#include "ProcessorGroup.hxx"
+#include "InterpKernelHashMap.hxx"
+
+#include <vector>
+#include <utility>
+#include <iostream>
+
+namespace ParaMEDMEM
+{
+  class ParaMESH;
+  class Topology;
+  class ComponentTopology;
+
+  class ExplicitTopology : public Topology
+  {
+  public:
+    ExplicitTopology() { }
+    ExplicitTopology( const ExplicitTopology& topo, int nbcomponents);
+    ExplicitTopology(const ParaMESH &mesh);
+    virtual ~ExplicitTopology();
+    
+    inline int getNbElements()const;
+    inline int getNbLocalElements() const;
+    const ProcessorGroup* getProcGroup()const { return _proc_group; }
+    int localToGlobal (const std::pair<int,int> local) const { return localToGlobal(local.second); }
+    inline int localToGlobal(int) const;
+    inline int globalToLocal(int) const;
+    void serialize(int* & serializer, int& size) const ;
+    void unserialize(const int* serializer, const CommInterface& comm_interface);
+    int getNbComponents() const { return _nb_components; }
+  private:
+    //Processor group
+    const ProcessorGroup* _proc_group;
+    //nb of elements
+    int _nb_elems;
+    //nb of components
+    int _nb_components;
+    //mapping local to global
+    int* _loc2glob;
+    //mapping global to local
+    INTERP_KERNEL::HashMap<int,int> _glob2loc;
+  };
+
+  //!converts a pair <subdomainid,local> to a global number 
+  inline int ExplicitTopology::globalToLocal(const int global) const
+  {
+    return (_glob2loc.find(global))->second;;
+  }
+
+  //!converts local number to a global number
+  int ExplicitTopology::localToGlobal(int local) const
+  {
+    return _loc2glob[local];
+  }
+  
+  //!Retrieves the number of elements for a given topology
+  inline int ExplicitTopology::getNbElements() const
+  {
+    return _nb_elems;
+  }
+
+  //Retrieves the local number of elements 
+  inline int ExplicitTopology::getNbLocalElements()const 
+  {
+    return _glob2loc.size();
+  }
+}
+
+
+#endif
diff --git a/src/ParaMEDMEM/ICoCoField.cxx b/src/ParaMEDMEM/ICoCoField.cxx
new file mode 100644 (file)
index 0000000..3925945
--- /dev/null
@@ -0,0 +1,48 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+// ICoCo file common to several codes
+// ICoCoField.cxx
+// version 1.2 10/05/2010
+
+#include <ICoCoField.hxx>
+#include <string>
+
+using namespace ICoCo;
+using std::string;
+
+Field::Field() {
+  _name=new string;
+}
+
+Field::~Field() {
+  delete _name;
+}
+
+void Field::setName(const string& name) {
+  *_name=name;
+}
+
+const string& Field::getName() const {
+  return *_name;
+}
+
+const char* Field::getCharName() const {
+  return _name->c_str();
+}
diff --git a/src/ParaMEDMEM/ICoCoField.hxx b/src/ParaMEDMEM/ICoCoField.hxx
new file mode 100644 (file)
index 0000000..509dc68
--- /dev/null
@@ -0,0 +1,43 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+// ICoCo file common to several codes
+// ICoCoField.h
+// version 1.2 10/05/2010
+
+#ifndef _ICoCoField_included_
+#define _ICoCoField_included_
+#include <string>
+
+
+namespace ICoCo {
+
+  class Field {
+  public:
+    Field();
+    virtual ~Field();
+    void setName(const std::string& name);
+    const std::string& getName() const;
+    const char* getCharName() const;
+    
+  private:
+    std::string* _name;
+  };
+}
+#endif
diff --git a/src/ParaMEDMEM/ICoCoMEDField.cxx b/src/ParaMEDMEM/ICoCoMEDField.cxx
new file mode 100644 (file)
index 0000000..1bf60fc
--- /dev/null
@@ -0,0 +1,62 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ICoCoMEDField.hxx"
+#include "ProcessorGroup.hxx"
+#include "MEDCouplingUMesh.hxx"
+#include "MEDCouplingFieldDouble.hxx"
+#include "NormalizedUnstructuredMesh.hxx"
+
+namespace ICoCo
+{
+
+  /*! Constructor directly attaching a MEDCouplingFieldDouble
+    the object does not take the control the objects pointed by 
+    \a field.
+  */
+    
+  MEDField::MEDField(ParaMEDMEM::MEDCouplingFieldDouble *field):_field(field)
+  {
+    if(_field)
+      _field->incrRef();
+  }
+ MEDField::MEDField(const MEDField& field):_field(field.getField())
+  {
+    if(_field)
+      _field->incrRef();
+  }
+
+  MEDField::~MEDField()
+  {
+    if(_field)
+      _field->decrRef();
+  }
+
+
+  MEDField& MEDField::operator=(const MEDField& field)
+  {
+    if (_field)
+      _field->decrRef();
+     
+    _field=field.getField();
+    if(_field)
+      _field->incrRef();
+    return *this;
+  }
+}
diff --git a/src/ParaMEDMEM/ICoCoMEDField.hxx b/src/ParaMEDMEM/ICoCoMEDField.hxx
new file mode 100644 (file)
index 0000000..c5dbdbb
--- /dev/null
@@ -0,0 +1,46 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __ICOCOMEDFIELD_HXX__
+#define __ICOCOMEDFIELD_HXX__
+
+#include "ICoCoField.hxx"
+#include "MEDCouplingUMesh.hxx"
+#include "MEDCouplingFieldDouble.hxx"
+
+#include <vector>
+
+namespace ICoCo
+{
+  class MEDField : public ICoCo::Field
+  {
+  public:
+    MEDField():_field(0) { }
+    MEDField(ParaMEDMEM::MEDCouplingFieldDouble* field);
+    MEDField(const MEDField& field);
+    MEDField& operator=(const MEDField& field);
+    virtual ~MEDField();
+    ParaMEDMEM::MEDCouplingFieldDouble *getField() const  { return _field; }
+    const ParaMEDMEM::MEDCouplingMesh *getMesh() const { return _field->getMesh(); }
+  private:
+    ParaMEDMEM::MEDCouplingFieldDouble *_field;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/InterpKernelDEC.cxx b/src/ParaMEDMEM/InterpKernelDEC.cxx
new file mode 100644 (file)
index 0000000..a7557e5
--- /dev/null
@@ -0,0 +1,292 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <mpi.h>
+#include "CommInterface.hxx"
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+#include "ComponentTopology.hxx"
+#include "ParaFIELD.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "ParaMESH.hxx"
+#include "DEC.hxx"
+#include "InterpolationMatrix.hxx"
+#include "InterpKernelDEC.hxx"
+#include "ElementLocator.hxx"
+
+namespace ParaMEDMEM
+{  
+
+  /*!
+    \anchor InterpKernelDEC-det
+    \class InterpKernelDEC
+
+    \section dec-over Overview
+
+    The InterpKernelDEC enables the \ref InterpKerRemapGlobal "remapping" of fields between two parallel codes.
+    This remapping is based on the computation of intersection volumes between elements from code A
+    and elements from code B. The computation is possible for 3D meshes, 2D meshes, and 3D-surface
+    meshes. Dimensions must be similar for code A and code B (for instance, though it could be
+    desirable, it is not yet possible to couple 3D surfaces with 2D surfaces).
+
+    In the present version, only fields lying on elements are considered.
+
+    \image html NonCoincident_small.png "Example showing the transfer from a field based on a
+    quadrangular mesh to a triangular mesh. In a P0-P0 interpolation, to obtain the value on a triangle,
+    the values on quadrangles are weighted by their intersection area and summed."
+
+    \image latex NonCoincident_small.eps "Example showing the transfer from a field based on a quadrangular
+     mesh to a triangular mesh. In a P0-P0 interpolation, to obtain the value on a triangle, the values
+     on quadrangles are weighted by their intersection area and summed."
+
+    A typical use of InterpKernelDEC encompasses two distinct phases :
+    - A setup phase during which the intersection volumes are computed and the communication structures are
+    setup. This corresponds to calling the InterpKernelDEC::synchronize() method.
+    - A use phase during which the remappings are actually performed. This corresponds to the calls to
+    sendData() and recvData() which actually trigger the data exchange. The data exchange are synchronous
+    in the current version of the library so that recvData() and sendData() calls must be synchronized
+    on code A and code B processor groups.
+
+    The following code excerpt illutrates a typical use of the InterpKernelDEC class.
+
+    \code
+    ...
+    InterpKernelDEC dec(groupA, groupB);
+    dec.attachLocalField(field);
+    dec.synchronize();
+    if (groupA.containsMyRank())
+    dec.recvData();
+    else if (groupB.containsMyRank())
+    dec.sendData();
+    ...
+    \endcode
+    A \ref InterpKerRemapGlobal "remapping" of the field from the source mesh to the target mesh is performed by
+    the function synchronise(), which computes the interpolation matrix.
+
+    Computing the field on the receiving side can be expressed in terms of a matrix-vector product :
+    \f$ \phi_t=W.\phi_s\f$, with \f$ \phi_t \f$ the field on the target side and \f$ \phi_s \f$ the field
+    on the source side.
+    When remapping a 3D surface to another 3D surface, a projection phase is necessary to match elements
+    from both sides. Care must be taken when defining this projection to obtain a
+    \ref InterpKerRemapGlobal "conservative remapping".
+
+    In the P0-P0 case, this matrix is a plain rectangular matrix with coefficients equal to the
+    intersection areas between triangle and quadrangles. For instance, in the above figure, the matrix
+    is :
+
+    \f[
+    \begin{tabular}{|cccc|}
+    0.72 & 0 & 0.2 & 0 \\
+    0.46 & 0 & 0.51 & 0.03\\
+    0.42 & 0.53 & 0 & 0.05\\
+    0 & 0 & 0.92 & 0.05 \\
+    \end{tabular}
+    \f]
+
+
+
+    \section interpkerneldec_options Options
+    On top of \ref dec_options, options supported by %InterpKernelDEC objects are
+    related to the underlying Intersector class. 
+    All the options available in the intersector objects are
+    available for the %InterpKernelDEC object. The various options available for  * intersectors can
+    be reviewed in \ref InterpKerIntersectors.
+    For instance :
+    \verbatim
+    InterpKernelDEC dec(source_group, target_group);
+    dec.attachLocalField(field);
+    dec.setOptions("DoRotate",false);
+    dec.setOptions("Precision",1e-12);
+    dec.synchronize();
+    \endverbatim
+
+    \warning{  Options must be set before calling the synchronize method. }
+  */
+  
+  InterpKernelDEC::InterpKernelDEC():_interpolation_matrix(0)
+  {  
+  }
+
+  /*!
+    This constructor creates an InterpKernelDEC which has \a source_group as a working side 
+    and  \a target_group as an idle side. All the processors will actually participate, but intersection computations will be performed on the working side during the \a synchronize() phase.
+    The constructor must be called synchronously on all processors of both processor groups.
+
+    \param source_group working side ProcessorGroup
+    \param target_group lazy side ProcessorGroup
+
+  */
+  InterpKernelDEC::InterpKernelDEC(ProcessorGroup& source_group, ProcessorGroup& target_group):
+    DisjointDEC(source_group, target_group),_interpolation_matrix(0)
+  {
+
+  }
+
+  InterpKernelDEC::InterpKernelDEC(const std::set<int>& src_ids, const std::set<int>& trg_ids,
+                                   const MPI_Comm& world_comm):DisjointDEC(src_ids,trg_ids,world_comm),
+                                                               _interpolation_matrix(0)
+  {
+  }
+
+  InterpKernelDEC::~InterpKernelDEC()
+  {
+    if (_interpolation_matrix !=0)
+      delete _interpolation_matrix;
+  } 
+
+  /*! 
+    \brief Synchronization process for exchanging topologies.
+
+    This method prepares all the structures necessary for sending data from a processor group to the other. It uses the mesh underlying the fields that have been set with attachLocalField method.
+    It works in four steps :
+    -# Bounding boxes are computed for each subdomain,
+    -# The lazy side mesh parts that are likely to intersect the working side local processor are sent to the working side,
+    -# The working side calls the interpolation kernel to compute the intersection between local and imported mesh.
+    -# The lazy side is updated so that it knows the structure of the data that will be sent by
+    the working side during a \a sendData() call.
+
+  */
+  void InterpKernelDEC::synchronize()
+  {
+    if(!isInUnion())
+      return ;
+    delete _interpolation_matrix;
+    _interpolation_matrix = new InterpolationMatrix (_local_field, *_source_group,*_target_group,*this,*this); 
+
+    //setting up the communication DEC on both sides  
+    if (_source_group->containsMyRank())
+      {
+        //locate the distant meshes
+        ElementLocator locator(*_local_field, *_target_group, *_source_group);
+        //transfering option from InterpKernelDEC to ElementLocator   
+        locator.copyOptions(*this);
+        MEDCouplingPointSet* distant_mesh=0; 
+        int* distant_ids=0;
+        std::string distantMeth;
+        for (int i=0; i<_target_group->size(); i++)
+          {
+            //        int idistant_proc = (i+_source_group->myRank())%_target_group->size();
+            int idistant_proc=i;
+
+            //gathers pieces of the target meshes that can intersect the local mesh
+            locator.exchangeMesh(idistant_proc,distant_mesh,distant_ids);
+            if (distant_mesh !=0)
+              {
+                locator.exchangeMethod(_method,idistant_proc,distantMeth);
+                //adds the contribution of the distant mesh on the local one
+                int idistant_proc_in_union=_union_group->translateRank(_target_group,idistant_proc);
+                //std::cout <<"add contribution from proc "<<idistant_proc_in_union<<" to proc "<<_union_group->myRank()<<std::endl;
+                _interpolation_matrix->addContribution(*distant_mesh,idistant_proc_in_union,distant_ids,_method,distantMeth);
+                distant_mesh->decrRef();
+                delete [] distant_ids;
+                distant_mesh=0;
+                distant_ids=0;
+              }
+          }
+       _interpolation_matrix->finishContributionW(locator);
+      }
+
+    if (_target_group->containsMyRank())
+      {
+        ElementLocator locator(*_local_field, *_source_group, *_target_group);
+        //transfering option from InterpKernelDEC to ElementLocator
+        locator.copyOptions(*this);
+        MEDCouplingPointSet* distant_mesh=0;
+        int* distant_ids=0;
+        for (int i=0; i<_source_group->size(); i++)
+          {
+            //        int idistant_proc = (i+_target_group->myRank())%_source_group->size();
+            int  idistant_proc=i;
+            //gathers pieces of the target meshes that can intersect the local mesh
+            locator.exchangeMesh(idistant_proc,distant_mesh,distant_ids);
+            //std::cout << " Data sent from "<<_union_group->myRank()<<" to source proc "<< idistant_proc<<std::endl;
+            if (distant_mesh!=0)
+              {
+                std::string distantMeth;
+                locator.exchangeMethod(_method,idistant_proc,distantMeth);
+                distant_mesh->decrRef();
+                delete [] distant_ids;
+                distant_mesh=0;
+                distant_ids=0;
+              }
+          }
+        _interpolation_matrix->finishContributionL(locator);
+      }
+    _interpolation_matrix->prepare();
+  }
+
+
+  /*!
+    Receives the data whether the processor is on the working side or on the lazy side. It must match a \a sendData() call on the other side.
+  */
+  void InterpKernelDEC::recvData()
+  {
+    if (_source_group->containsMyRank())
+      _interpolation_matrix->transposeMultiply(*_local_field->getField());
+    else if (_target_group->containsMyRank())
+      {
+        _interpolation_matrix->multiply(*_local_field->getField());
+        if (getForcedRenormalization())
+          renormalizeTargetField(getMeasureAbsStatus());
+      }
+  }
+
+
+  /*!
+    Receives the data at time \a time in asynchronous mode. The value of the field
+    will be time-interpolated from the field values received.
+    \param time time at which the value is desired
+  */
+  void InterpKernelDEC::recvData( double time )
+  {
+    _interpolation_matrix->getAccessDEC()->setTime(time);
+    recvData() ;
+  }
+
+  /*!
+    Sends the data whether the processor is on the working side or on the lazy side.
+    It must match a recvData() call on the other side.
+  */
+  void InterpKernelDEC::sendData()
+  {
+    if (_source_group->containsMyRank())
+      {
+    
+        _interpolation_matrix->multiply(*_local_field->getField());
+        if (getForcedRenormalization())
+          renormalizeTargetField(getMeasureAbsStatus());
+    
+      }
+    else if (_target_group->containsMyRank())
+      _interpolation_matrix->transposeMultiply(*_local_field->getField());
+  }
+
+  /*!
+    Sends the data available at time \a time in asynchronous mode. 
+    \param time time at which the value is available
+    \param deltatime time interval between the value presently sent and the next one. 
+  */
+  void InterpKernelDEC::sendData( double time , double deltatime )
+  {
+    _interpolation_matrix->getAccessDEC()->setTime(time,deltatime);
+    sendData() ;
+  }
+  
+}
diff --git a/src/ParaMEDMEM/InterpKernelDEC.hxx b/src/ParaMEDMEM/InterpKernelDEC.hxx
new file mode 100644 (file)
index 0000000..54b8819
--- /dev/null
@@ -0,0 +1,57 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __INTERPKERNELDEC_HXX__
+#define __INTERPKERNELDEC_HXX__
+
+#include "DisjointDEC.hxx"
+#include "MxN_Mapping.hxx"
+#include "InterpolationOptions.hxx"
+
+namespace ParaMEDMEM
+{
+  class InterpolationMatrix;
+
+  class InterpKernelDEC : public DisjointDEC, public INTERP_KERNEL::InterpolationOptions
+  {
+  public:  
+    InterpKernelDEC();
+    InterpKernelDEC(ProcessorGroup& source_group, ProcessorGroup& target_group);
+    InterpKernelDEC(const std::set<int>& src_ids, const std::set<int>& trg_ids,
+                    const MPI_Comm& world_comm=MPI_COMM_WORLD);
+    virtual ~InterpKernelDEC();
+    void synchronize();
+    void recvData();
+    void recvData(double time);
+    void sendData();
+    void sendData(double time , double deltatime);
+    void prepareSourceDE() { }
+    void prepareTargetDE() { }
+  private :
+    //Number of distant points to be located locally 
+    int _nb_distant_points;
+    //coordinates of distant points 
+    const double* _distant_coords;
+    //local element number containing the distant points  
+    const int* _distant_locations; 
+    InterpolationMatrix* _interpolation_matrix;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/InterpolationMatrix.cxx b/src/ParaMEDMEM/InterpolationMatrix.cxx
new file mode 100644 (file)
index 0000000..8b8c50f
--- /dev/null
@@ -0,0 +1,963 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "ProcessorGroup.hxx"
+#include "MxN_Mapping.hxx"
+#include "InterpolationMatrix.hxx"
+#include "TranslationRotationMatrix.hxx"
+#include "Interpolation.hxx"
+#include "Interpolation1D.txx"
+#include "Interpolation2DCurve.hxx"
+#include "Interpolation2D.txx"
+#include "Interpolation3DSurf.hxx"
+#include "Interpolation3D.txx"
+#include "Interpolation3D2D.txx"
+#include "Interpolation2D1D.txx"
+#include "MEDCouplingUMesh.hxx"
+#include "MEDCouplingNormalizedUnstructuredMesh.txx"
+#include "InterpolationOptions.hxx"
+#include "NormalizedUnstructuredMesh.hxx"
+#include "ElementLocator.hxx"
+
+#include <algorithm>
+
+using namespace std;
+
+namespace ParaMEDMEM
+{
+
+  /**!
+     Creates an empty matrix structure linking two distributed supports.
+     The method must be called by all processors belonging to source
+     and target groups.
+     \param source_support local support
+     \param source_group processor group containing the local processors
+     \param target_group processor group containing the distant processors
+     \param method interpolation method
+  */
+  InterpolationMatrix::InterpolationMatrix(const ParaMEDMEM::ParaFIELD *source_field, 
+                                           const ProcessorGroup& source_group,
+                                           const ProcessorGroup& target_group,
+                                           const DECOptions& dec_options,
+                                           const INTERP_KERNEL::InterpolationOptions& interp_options):
+    INTERP_KERNEL::InterpolationOptions(interp_options),
+    DECOptions(dec_options),
+    _source_field(source_field),
+    _source_support(source_field->getSupport()->getCellMesh()),
+    _mapping(source_group, target_group, dec_options),
+    _source_group(source_group),
+    _target_group(target_group)
+  {
+    int nbelems = source_field->getField()->getNumberOfTuples();
+    _row_offsets.resize(nbelems+1);
+    _coeffs.resize(nbelems);
+    _target_volume.resize(nbelems);
+  }
+
+  InterpolationMatrix::~InterpolationMatrix()
+  {
+  }
+
+
+  /*!
+     \brief Adds the contribution of a distant subdomain to the*
+     interpolation matrix.
+     The method adds contribution to the interpolation matrix.
+     For each row of the matrix, elements are addded as
+     a (column, coeff) pair in the _coeffs array. This column number refers
+     to an element on the target side via the _col_offsets array.
+     It is made of a series of (iproc, ielem) pairs.
+     The number of elements per row is stored in the row_offsets array.
+
+     param distant_support local representation of the distant subdomain
+     param iproc_distant id of the distant subdomain (in the distant group)
+     param distant_elems mapping between the local representation of
+     the subdomain and the actual elem ids on the distant subdomain
+   */
+  void InterpolationMatrix::addContribution ( MEDCouplingPointSet& distant_support,
+                                              int iproc_distant,
+                                              const int* distant_elems,
+                                              const std::string& srcMeth,
+                                              const std::string& targetMeth)
+  {
+    std::string interpMethod(targetMeth);
+    interpMethod+=srcMeth;
+    //creating the interpolator structure
+    vector<map<int,double> > surfaces;
+    //computation of the intersection volumes between source and target elements
+    MEDCouplingUMesh *distant_supportC=dynamic_cast<MEDCouplingUMesh *>(&distant_support);
+    MEDCouplingUMesh *source_supportC=dynamic_cast<MEDCouplingUMesh *>(_source_support);
+    if ( distant_support.getMeshDimension() == -1 )
+      {
+        if(source_supportC->getMeshDimension()==2 && source_supportC->getSpaceDimension()==2)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<2,2> source_mesh_wrapper(source_supportC);
+            INTERP_KERNEL::Interpolation2D interpolation(*this);
+            interpolation.fromIntegralUniform(source_mesh_wrapper,surfaces,srcMeth);
+          }
+        else if(source_supportC->getMeshDimension()==3 && source_supportC->getSpaceDimension()==3)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<3,3> source_mesh_wrapper(source_supportC);
+            INTERP_KERNEL::Interpolation3D interpolation(*this);
+            interpolation.fromIntegralUniform(source_mesh_wrapper,surfaces,srcMeth);
+          }
+        else if(source_supportC->getMeshDimension()==2 && source_supportC->getSpaceDimension()==3)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<3,2> source_mesh_wrapper(source_supportC);
+            INTERP_KERNEL::Interpolation3DSurf interpolation(*this);
+            interpolation.fromIntegralUniform(source_mesh_wrapper,surfaces,srcMeth);
+          }
+        else
+          throw INTERP_KERNEL::Exception("No para interpolation available for the given mesh and space dimension of source mesh to -1D targetMesh");
+      }
+    else if ( source_supportC->getMeshDimension() == -1 )
+      {
+        if(distant_supportC->getMeshDimension()==2 && distant_supportC->getSpaceDimension()==2)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<2,2> distant_mesh_wrapper(distant_supportC);
+            INTERP_KERNEL::Interpolation2D interpolation(*this);
+            interpolation.toIntegralUniform(distant_mesh_wrapper,surfaces,srcMeth);
+          }
+        else if(distant_supportC->getMeshDimension()==3 && distant_supportC->getSpaceDimension()==3)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<3,3> distant_mesh_wrapper(distant_supportC);
+            INTERP_KERNEL::Interpolation3D interpolation(*this);
+            interpolation.toIntegralUniform(distant_mesh_wrapper,surfaces,srcMeth);
+          }
+        else if(distant_supportC->getMeshDimension()==2 && distant_supportC->getSpaceDimension()==3)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<3,2> distant_mesh_wrapper(distant_supportC);
+            INTERP_KERNEL::Interpolation3DSurf interpolation(*this);
+            interpolation.toIntegralUniform(distant_mesh_wrapper,surfaces,srcMeth);
+          }
+        else
+          throw INTERP_KERNEL::Exception("No para interpolation available for the given mesh and space dimension of distant mesh to -1D sourceMesh");
+      }
+    else if ( distant_support.getMeshDimension() == 2
+              && _source_support->getMeshDimension() == 3
+              && distant_support.getSpaceDimension() == 3 && _source_support->getSpaceDimension() == 3)
+      {
+        MEDCouplingNormalizedUnstructuredMesh<3,3> target_wrapper(distant_supportC);
+        MEDCouplingNormalizedUnstructuredMesh<3,3> source_wrapper(source_supportC);
+        INTERP_KERNEL::Interpolation3D2D interpolator (*this);
+        interpolator.interpolateMeshes(target_wrapper,source_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if ( distant_support.getMeshDimension() == 1
+              && _source_support->getMeshDimension() == 2
+              && distant_support.getSpaceDimension() == 2 && _source_support->getSpaceDimension() == 2)
+      {
+        MEDCouplingNormalizedUnstructuredMesh<2,2> target_wrapper(distant_supportC);
+        MEDCouplingNormalizedUnstructuredMesh<2,2> source_wrapper(source_supportC);
+        INTERP_KERNEL::Interpolation2D1D interpolator (*this);
+        interpolator.interpolateMeshes(target_wrapper,source_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if ( distant_support.getMeshDimension() == 3
+              && _source_support->getMeshDimension() == 1
+              && distant_support.getSpaceDimension() == 3 && _source_support->getSpaceDimension() == 3)
+      {
+        MEDCouplingNormalizedUnstructuredMesh<3,3> target_wrapper(distant_supportC);
+        MEDCouplingNormalizedUnstructuredMesh<3,3> source_wrapper(source_supportC);
+        INTERP_KERNEL::Interpolation3D interpolator (*this);
+        interpolator.interpolateMeshes(target_wrapper,source_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if (distant_support.getMeshDimension() != _source_support->getMeshDimension())
+      {
+        throw INTERP_KERNEL::Exception("local and distant meshes do not have the same space and mesh dimensions");
+      }
+    else if( distant_support.getMeshDimension() == 1
+             && distant_support.getSpaceDimension() == 1 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<1,1> target_wrapper(distant_supportC);
+        MEDCouplingNormalizedUnstructuredMesh<1,1> source_wrapper(source_supportC);
+
+        INTERP_KERNEL::Interpolation1D interpolation(*this);
+        interpolation.interpolateMeshes(target_wrapper,source_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if( distant_support.getMeshDimension() == 1
+             && distant_support.getSpaceDimension() == 2 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<2,1> target_wrapper(distant_supportC);
+        MEDCouplingNormalizedUnstructuredMesh<2,1> source_wrapper(source_supportC);
+
+        INTERP_KERNEL::Interpolation2DCurve interpolation(*this);
+        interpolation.interpolateMeshes(target_wrapper,source_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if ( distant_support.getMeshDimension() == 2
+              && distant_support.getSpaceDimension() == 3 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<3,2> target_wrapper(distant_supportC);
+        MEDCouplingNormalizedUnstructuredMesh<3,2> source_wrapper(source_supportC);
+
+        INTERP_KERNEL::Interpolation3DSurf interpolator (*this);
+        interpolator.interpolateMeshes(target_wrapper,source_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if ( distant_support.getMeshDimension() == 2
+              && distant_support.getSpaceDimension() == 2)
+      {
+        MEDCouplingNormalizedUnstructuredMesh<2,2> target_wrapper(distant_supportC);
+        MEDCouplingNormalizedUnstructuredMesh<2,2> source_wrapper(source_supportC);
+
+        INTERP_KERNEL::Interpolation2D interpolator (*this);
+        interpolator.interpolateMeshes(target_wrapper,source_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if ( distant_support.getMeshDimension() == 3
+              && distant_support.getSpaceDimension() == 3 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<3,3> target_wrapper(distant_supportC);
+        MEDCouplingNormalizedUnstructuredMesh<3,3> source_wrapper(source_supportC);
+
+        INTERP_KERNEL::Interpolation3D interpolator (*this);
+        interpolator.interpolateMeshes(target_wrapper,source_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else
+      {
+        throw INTERP_KERNEL::Exception("no interpolator exists for these mesh and space dimensions ");
+      }
+    bool needTargetSurf=isSurfaceComputationNeeded(targetMeth);
+
+    MEDCouplingFieldDouble *target_triangle_surf=0;
+    if(needTargetSurf)
+      target_triangle_surf = distant_support.getMeasureField(getMeasureAbsStatus());
+    fillDSFromVM(iproc_distant,distant_elems,surfaces,target_triangle_surf);
+
+    if(needTargetSurf)
+      target_triangle_surf->decrRef();
+  }
+
+  void InterpolationMatrix::fillDSFromVM(int iproc_distant, const int* distant_elems, const std::vector< std::map<int,double> >& values, MEDCouplingFieldDouble *surf)
+  {
+    //loop over the elements to build the interpolation
+    //matrix structures
+    int source_size=values.size();
+    for (int ielem=0; ielem < source_size; ielem++) 
+      {
+        _row_offsets[ielem+1] += values[ielem].size();
+        for(map<int,double>::const_iterator iter=values[ielem].begin();iter!=values[ielem].end();iter++)
+          {
+            int localId;
+            if(distant_elems)
+              localId=distant_elems[iter->first];
+            else
+              localId=iter->first;
+            //locating the (iproc, itriangle) pair in the list of columns
+            map<pair<int,int>,int >::iterator iter2 = _col_offsets.find(make_pair(iproc_distant,localId));
+            int col_id;
+
+            if (iter2 == _col_offsets.end())
+              {
+                //(iproc, itriangle) is not registered in the list
+                //of distant elements
+                col_id =_col_offsets.size();
+                _col_offsets.insert(make_pair(make_pair(iproc_distant,localId),col_id));
+                _mapping.addElementFromSource(iproc_distant,localId);
+              }
+            else 
+              {
+                col_id = iter2->second;
+              }
+            //the non zero coefficient is stored 
+            //ielem is the row,
+            //col_id is the number of the column
+            //iter->second is the value of the coefficient
+            if(surf)
+              {
+                double surface = surf->getIJ(iter->first,0);
+                _target_volume[ielem].push_back(surface);
+              }
+            _coeffs[ielem].push_back(make_pair(col_id,iter->second));
+          }
+      }
+  }
+
+  void InterpolationMatrix::serializeMe(std::vector< std::vector< std::map<int,double> > >& data1, std::vector<int>& data2) const
+  {
+    data1.clear();
+    data2.clear();
+    const std::vector<std::pair<int,int> >& sendingIds=_mapping.getSendingIds();
+    std::set<int> procsS;
+    for(std::vector<std::pair<int,int> >::const_iterator iter1=sendingIds.begin();iter1!=sendingIds.end();iter1++)
+      procsS.insert((*iter1).first);
+    data1.resize(procsS.size());
+    data2.resize(procsS.size());
+    std::copy(procsS.begin(),procsS.end(),data2.begin());
+    std::map<int,int> fastProcAcc;
+    int id=0;
+    for(std::set<int>::const_iterator iter2=procsS.begin();iter2!=procsS.end();iter2++,id++)
+      fastProcAcc[*iter2]=id;
+    int nbOfSrcElt=_coeffs.size();
+    for(std::vector< std::vector< std::map<int,double> > >::iterator iter3=data1.begin();iter3!=data1.end();iter3++)
+      (*iter3).resize(nbOfSrcElt);
+    id=0;
+    for(std::vector< std::vector< std::pair<int,double> > >::const_iterator iter4=_coeffs.begin();iter4!=_coeffs.end();iter4++,id++)
+      {
+        for(std::vector< std::pair<int,double> >::const_iterator iter5=(*iter4).begin();iter5!=(*iter4).end();iter5++)
+          {
+            const std::pair<int,int>& elt=sendingIds[(*iter5).first];
+            data1[fastProcAcc[elt.first]][id][elt.second]=(*iter5).second;
+          }
+      }
+  }
+
+  void InterpolationMatrix::initialize()
+  {
+    int lgth=_coeffs.size();
+    _row_offsets.clear(); _row_offsets.resize(lgth+1);
+    _coeffs.clear(); _coeffs.resize(lgth);
+    _target_volume.clear(); _target_volume.resize(lgth);
+    _col_offsets.clear();
+    _mapping.initialize();
+  }
+
+  void InterpolationMatrix::finishContributionW(ElementLocator& elementLocator)
+  {
+    NatureOfField nature=elementLocator.getLocalNature();
+    switch(nature)
+      {
+      case ConservativeVolumic:
+        computeConservVolDenoW(elementLocator);
+        break;
+      case Integral:
+        {
+          if(!elementLocator.isM1DCorr())
+            computeIntegralDenoW(elementLocator);
+          else
+            computeGlobConstraintDenoW(elementLocator);
+          break;
+        }
+      case IntegralGlobConstraint:
+        computeGlobConstraintDenoW(elementLocator);
+        break;
+      case RevIntegral:
+        {
+          if(!elementLocator.isM1DCorr())
+            computeRevIntegralDenoW(elementLocator);
+          else
+            computeConservVolDenoW(elementLocator);
+          break;
+        }
+      default:
+        throw INTERP_KERNEL::Exception("Not recognized nature of field. Change nature of Field.");
+        break;
+      }
+  }
+
+  void InterpolationMatrix::finishContributionL(ElementLocator& elementLocator)
+  {
+    NatureOfField nature=elementLocator.getLocalNature();
+    switch(nature)
+      {
+      case ConservativeVolumic:
+        computeConservVolDenoL(elementLocator);
+        break;
+      case Integral:
+        {
+          if(!elementLocator.isM1DCorr())
+            computeIntegralDenoL(elementLocator);
+          else
+            computeConservVolDenoL(elementLocator);
+          break;
+        }
+      case IntegralGlobConstraint:
+        //this is not a bug doing like ConservativeVolumic
+        computeConservVolDenoL(elementLocator);
+        break;
+      case RevIntegral:
+        {
+          if(!elementLocator.isM1DCorr())
+            computeRevIntegralDenoL(elementLocator);
+          else
+            computeConservVolDenoL(elementLocator);
+          break;
+        }
+      default:
+        throw INTERP_KERNEL::Exception("Not recognized nature of field. Change nature of Field.");
+        break;
+      }
+  }
+  
+  void InterpolationMatrix::computeConservVolDenoW(ElementLocator& elementLocator)
+  {
+    computeGlobalColSum(_deno_reverse_multiply);
+    computeGlobalRowSum(elementLocator,_deno_multiply,_deno_reverse_multiply);
+  }
+  
+  void InterpolationMatrix::computeConservVolDenoL(ElementLocator& elementLocator)
+  {
+    int pol1=elementLocator.sendPolicyToWorkingSideL();
+    if(pol1==ElementLocator::NO_POST_TREATMENT_POLICY)
+      {
+        elementLocator.recvFromWorkingSideL();
+        elementLocator.sendToWorkingSideL();
+      }
+    else if(ElementLocator::CUMULATIVE_POLICY)
+      {
+        //ask for lazy side to deduce ids eventually missing on working side and to send it back.
+        elementLocator.recvLocalIdsFromWorkingSideL();
+        elementLocator.sendCandidatesGlobalIdsToWorkingSideL();
+        elementLocator.recvCandidatesForAddElementsL();
+        elementLocator.sendAddElementsToWorkingSideL();
+        //Working side has updated its eventually missing ids updates its global ids with lazy side procs contribution
+        elementLocator.recvLocalIdsFromWorkingSideL();
+        elementLocator.sendGlobalIdsToWorkingSideL();
+        //like no post treatment
+        elementLocator.recvFromWorkingSideL();
+        elementLocator.sendToWorkingSideL();
+      }
+    else
+      throw INTERP_KERNEL::Exception("Not managed policy detected on lazy side : not implemented !");
+  }
+
+  void InterpolationMatrix::computeIntegralDenoW(ElementLocator& elementLocator)
+  {
+    MEDCouplingFieldDouble *source_triangle_surf = _source_support->getMeasureField(getMeasureAbsStatus());
+    _deno_multiply.resize(_coeffs.size());
+    vector<vector<double> >::iterator iter6=_deno_multiply.begin();
+    const double *values=source_triangle_surf->getArray()->getConstPointer();
+    for(vector<vector<pair<int,double> > >::const_iterator iter4=_coeffs.begin();iter4!=_coeffs.end();iter4++,iter6++,values++)
+      {
+        (*iter6).resize((*iter4).size());
+        std::fill((*iter6).begin(),(*iter6).end(),*values);
+      }
+    source_triangle_surf->decrRef();
+    _deno_reverse_multiply=_target_volume;
+  }
+
+  void InterpolationMatrix::computeRevIntegralDenoW(ElementLocator& elementLocator)
+  {
+    _deno_multiply=_target_volume;
+    MEDCouplingFieldDouble *source_triangle_surf = _source_support->getMeasureField(getMeasureAbsStatus());
+    _deno_reverse_multiply.resize(_coeffs.size());
+    vector<vector<double> >::iterator iter6=_deno_reverse_multiply.begin();
+    const double *values=source_triangle_surf->getArray()->getConstPointer();
+    for(vector<vector<pair<int,double> > >::const_iterator iter4=_coeffs.begin();iter4!=_coeffs.end();iter4++,iter6++,values++)
+      {
+        (*iter6).resize((*iter4).size());
+        std::fill((*iter6).begin(),(*iter6).end(),*values);
+      }
+    source_triangle_surf->decrRef();
+  }
+  
+  /*!
+   * Nothing to do because surface computation is on working side.
+   */
+  void InterpolationMatrix::computeIntegralDenoL(ElementLocator& elementLocator)
+  {
+  }
+
+  /*!
+   * Nothing to do because surface computation is on working side.
+   */
+  void InterpolationMatrix::computeRevIntegralDenoL(ElementLocator& elementLocator)
+  {
+  }
+
+
+  void InterpolationMatrix::computeGlobConstraintDenoW(ElementLocator& elementLocator)
+  {
+    computeGlobalColSum(_deno_multiply);
+    computeGlobalRowSum(elementLocator,_deno_reverse_multiply,_deno_multiply);
+  }
+
+  void InterpolationMatrix::computeGlobalRowSum(ElementLocator& elementLocator, std::vector<std::vector<double> >& denoStrorage, std::vector<std::vector<double> >& denoStrorageInv)
+  {
+    //stores id in distant procs sorted by lazy procs connected with
+    vector< vector<int> > rowsPartialSumI;
+    //stores for each lazy procs connected with, if global info is available and if it's the case the policy
+    vector<int> policyPartial;
+    //stores the corresponding values.
+    vector< vector<double> > rowsPartialSumD;
+    elementLocator.recvPolicyFromLazySideW(policyPartial);
+    int pol1=mergePolicies(policyPartial);
+    if(pol1==ElementLocator::NO_POST_TREATMENT_POLICY)
+      {
+        computeLocalRowSum(elementLocator.getDistantProcIds(),rowsPartialSumI,rowsPartialSumD);
+        elementLocator.sendSumToLazySideW(rowsPartialSumI,rowsPartialSumD);
+        elementLocator.recvSumFromLazySideW(rowsPartialSumD);
+      }
+    else if(pol1==ElementLocator::CUMULATIVE_POLICY)
+      {
+        //updateWithNewAdditionnalElements(addingElements);
+        //stores for each lazy procs connected with, the ids in global mode if it exists (regarding policyPartial). This array has exactly the size of  rowsPartialSumI,
+        //if policyPartial has CUMALATIVE_POLICY in each.
+        vector< vector<int> > globalIdsPartial;
+        computeLocalRowSum(elementLocator.getDistantProcIds(),rowsPartialSumI,rowsPartialSumD);
+        elementLocator.sendLocalIdsToLazyProcsW(rowsPartialSumI);
+        elementLocator.recvCandidatesGlobalIdsFromLazyProcsW(globalIdsPartial);
+        std::vector< std::vector<int> > addingElements;
+        findAdditionnalElements(elementLocator,addingElements,rowsPartialSumI,globalIdsPartial);
+        addGhostElements(elementLocator.getDistantProcIds(),addingElements);
+        rowsPartialSumI.clear();
+        globalIdsPartial.clear();
+        computeLocalRowSum(elementLocator.getDistantProcIds(),rowsPartialSumI,rowsPartialSumD);
+        elementLocator.sendLocalIdsToLazyProcsW(rowsPartialSumI);
+        elementLocator.recvGlobalIdsFromLazyProcsW(rowsPartialSumI,globalIdsPartial);
+        //
+        elementLocator.sendSumToLazySideW(rowsPartialSumI,rowsPartialSumD);
+        elementLocator.recvSumFromLazySideW(rowsPartialSumD);
+        mergeRowSum3(globalIdsPartial,rowsPartialSumD);
+        mergeCoeffs(elementLocator.getDistantProcIds(),rowsPartialSumI,globalIdsPartial,denoStrorageInv);
+      }
+    else
+      throw INTERP_KERNEL::Exception("Not managed policy detected : not implemented !");
+    divideByGlobalRowSum(elementLocator.getDistantProcIds(),rowsPartialSumI,rowsPartialSumD,denoStrorage);
+  }
+
+  /*!
+   * @param distantProcs in parameter that indicates which lazy procs are concerned.
+   * @param resPerProcI out parameter that must be cleared before calling this method. The size of 1st dimension is equal to the size of 'distantProcs'.
+   *                    It contains the element ids (2nd dimension) of the corresponding lazy proc.
+   * @param  resPerProcD out parameter with the same format than 'resPerProcI'. It contains corresponding sum values.
+   */
+  void InterpolationMatrix::computeLocalRowSum(const std::vector<int>& distantProcs, std::vector<std::vector<int> >& resPerProcI,
+                                               std::vector<std::vector<double> >& resPerProcD) const
+  {
+    resPerProcI.resize(distantProcs.size());
+    resPerProcD.resize(distantProcs.size());
+    std::vector<double> res(_col_offsets.size());
+    for(vector<vector<pair<int,double> > >::const_iterator iter=_coeffs.begin();iter!=_coeffs.end();iter++)
+      for(vector<pair<int,double> >::const_iterator iter3=(*iter).begin();iter3!=(*iter).end();iter3++)
+        res[(*iter3).first]+=(*iter3).second;
+    set<int> procsSet;
+    int id=-1;
+    const vector<std::pair<int,int> >& mapping=_mapping.getSendingIds();
+    for(vector<std::pair<int,int> >::const_iterator iter2=mapping.begin();iter2!=mapping.end();iter2++)
+      {
+        std::pair<set<int>::iterator,bool> isIns=procsSet.insert((*iter2).first);
+        if(isIns.second)
+          id=std::find(distantProcs.begin(),distantProcs.end(),(*iter2).first)-distantProcs.begin();
+        resPerProcI[id].push_back((*iter2).second);
+        resPerProcD[id].push_back(res[iter2-mapping.begin()]);
+      }
+  }
+
+  /*!
+   * This method is only usable when CUMULATIVE_POLICY detected. This method finds elements ids (typically nodes) lazy side that
+   * are not present in columns of 'this' and that should regarding cumulative merge of elements regarding their global ids.
+   */
+  void InterpolationMatrix::findAdditionnalElements(ElementLocator& elementLocator, std::vector<std::vector<int> >& elementsToAdd,
+                                                    const std::vector<std::vector<int> >& resPerProcI, const std::vector<std::vector<int> >& globalIdsPartial)
+  {
+    std::set<int> globalIds;
+    int nbLazyProcs=globalIdsPartial.size();
+    for(int i=0;i<nbLazyProcs;i++)
+      globalIds.insert(globalIdsPartial[i].begin(),globalIdsPartial[i].end());
+    std::vector<int> tmp(globalIds.size());
+    std::copy(globalIds.begin(),globalIds.end(),tmp.begin());
+    globalIds.clear();
+    elementLocator.sendCandidatesForAddElementsW(tmp);
+    elementLocator.recvAddElementsFromLazyProcsW(elementsToAdd);
+  }
+
+  void InterpolationMatrix::addGhostElements(const std::vector<int>& distantProcs, const std::vector<std::vector<int> >& elementsToAdd)
+  {
+    std::vector< std::vector< std::map<int,double> > > data1;
+    std::vector<int> data2;
+    serializeMe(data1,data2);
+    initialize();
+    int nbOfDistProcs=distantProcs.size();
+    for(int i=0;i<nbOfDistProcs;i++)
+      {
+        int procId=distantProcs[i];
+        const std::vector<int>& eltsForThisProc=elementsToAdd[i];
+        if(!eltsForThisProc.empty())
+          {
+            std::vector<int>::iterator iter1=std::find(data2.begin(),data2.end(),procId);
+            std::map<int,double> *toFeed=0;
+            if(iter1!=data2.end())
+              {//to test
+                int rank=iter1-data2.begin();
+                toFeed=&(data1[rank].back());
+              }
+            else
+              {
+                iter1=std::lower_bound(data2.begin(),data2.end(),procId);
+                int rank=iter1-data2.begin();
+                data2.insert(iter1,procId);
+                std::vector< std::map<int,double> > tmp(data1.front().size());
+                data1.insert(data1.begin()+rank,tmp);
+                toFeed=&(data1[rank].back());
+              }
+            for(std::vector<int>::const_iterator iter2=eltsForThisProc.begin();iter2!=eltsForThisProc.end();iter2++)
+              (*toFeed)[*iter2]=0.;
+          }
+      }
+    //
+    nbOfDistProcs=data2.size();
+    for(int j=0;j<nbOfDistProcs;j++)
+      fillDSFromVM(data2[j],0,data1[j],0);
+  }
+
+  int InterpolationMatrix::mergePolicies(const std::vector<int>& policyPartial)
+  {
+    if(policyPartial.empty())
+      return ElementLocator::NO_POST_TREATMENT_POLICY;
+    int ref=policyPartial[0];
+     std::vector<int>::const_iterator iter1=std::find_if(policyPartial.begin(),policyPartial.end(),std::bind2nd(std::not_equal_to<int>(),ref));
+    if(iter1!=policyPartial.end())
+      {
+        std::ostringstream msg; msg << "Incompatible policies between lazy procs each other : proc # " << iter1-policyPartial.begin();
+        throw INTERP_KERNEL::Exception(msg.str().c_str());
+      }
+    return ref;
+  }
+
+  /*!
+   * This method introduce global ids aspects in computed 'rowsPartialSumD'.
+   * As precondition rowsPartialSumD.size()==policyPartial.size()==globalIdsPartial.size(). Foreach i in [0;rowsPartialSumD.size() ) rowsPartialSumD[i].size()==globalIdsPartial[i].size()
+   * @param rowsPartialSumD : in parameter, Partial row sum computed for each lazy procs connected with.
+   * @param rowsPartialSumI : in parameter, Corresponding local ids for each lazy procs connected with.
+   * @param globalIdsPartial : in parameter, the global numbering, of elements connected with.
+   * @param globalIdsLazySideInteraction : out parameter, constituted from all global ids of lazy procs connected with.
+   * @para sumCorresponding : out parameter, relative to 'globalIdsLazySideInteraction'
+   */
+  void InterpolationMatrix::mergeRowSum(const std::vector< std::vector<double> >& rowsPartialSumD, const std::vector< std::vector<int> >& globalIdsPartial,
+                                        std::vector<int>& globalIdsLazySideInteraction, std::vector<double>& sumCorresponding)
+  {
+    std::map<int,double> sumToReturn;
+    int nbLazyProcs=rowsPartialSumD.size();
+    for(int i=0;i<nbLazyProcs;i++)
+      {
+        const std::vector<double>& rowSumOfP=rowsPartialSumD[i];
+        const std::vector<int>& globalIdsOfP=globalIdsPartial[i];
+        std::vector<double>::const_iterator iter1=rowSumOfP.begin();
+        std::vector<int>::const_iterator iter2=globalIdsOfP.begin();
+        for(;iter1!=rowSumOfP.end();iter1++,iter2++)
+          sumToReturn[*iter2]+=*iter1;
+      }
+    //
+    int lgth=sumToReturn.size();
+    globalIdsLazySideInteraction.resize(lgth);
+    sumCorresponding.resize(lgth);
+    std::vector<int>::iterator iter3=globalIdsLazySideInteraction.begin();
+    std::vector<double>::iterator iter4=sumCorresponding.begin();
+    for(std::map<int,double>::const_iterator iter5=sumToReturn.begin();iter5!=sumToReturn.end();iter5++,iter3++,iter4++)
+      {
+        *iter3=(*iter5).first;
+        *iter4=(*iter5).second;
+      }
+  }
+
+  /*!
+   * This method simply reorganize the result contained in 'sumCorresponding' computed by lazy side into 'rowsPartialSumD' with help of 'globalIdsPartial' and 'globalIdsLazySideInteraction'
+   *
+   * @param globalIdsPartial : in parameter, global ids sorted by lazy procs
+   * @param rowsPartialSumD : in/out parameter, with exactly the same size as 'globalIdsPartial'
+   * @param globalIdsLazySideInteraction : in parameter that represents ALL the global ids of every lazy procs in interaction
+   * @param sumCorresponding : in parameter with same size as 'globalIdsLazySideInteraction' that stores the corresponding sum of 'globalIdsLazySideInteraction'
+   */
+  void InterpolationMatrix::mergeRowSum2(const std::vector< std::vector<int> >& globalIdsPartial, std::vector< std::vector<double> >& rowsPartialSumD,
+                                         const std::vector<int>& globalIdsLazySideInteraction, const std::vector<double>& sumCorresponding)
+  {
+    std::map<int,double> acc;
+    std::vector<int>::const_iterator iter1=globalIdsLazySideInteraction.begin();
+    std::vector<double>::const_iterator iter2=sumCorresponding.begin();
+    for(;iter1!=globalIdsLazySideInteraction.end();iter1++,iter2++)
+      acc[*iter1]=*iter2;
+    //
+    int nbLazyProcs=globalIdsPartial.size();
+    for(int i=0;i<nbLazyProcs;i++)
+      {
+        const std::vector<int>& tmp1=globalIdsPartial[i];
+        std::vector<double>& tmp2=rowsPartialSumD[i];
+        std::vector<int>::const_iterator iter3=tmp1.begin();
+        std::vector<double>::iterator iter4=tmp2.begin();
+        for(;iter3!=tmp1.end();iter3++,iter4++)
+          *iter4=acc[*iter3];
+      }
+  }
+  
+  void InterpolationMatrix::mergeRowSum3(const std::vector< std::vector<int> >& globalIdsPartial, std::vector< std::vector<double> >& rowsPartialSumD)
+  {
+    std::map<int,double> sum;
+    std::vector< std::vector<int> >::const_iterator iter1=globalIdsPartial.begin();
+    std::vector< std::vector<double> >::iterator iter2=rowsPartialSumD.begin();
+    for(;iter1!=globalIdsPartial.end();iter1++,iter2++)
+      {
+        std::vector<int>::const_iterator iter3=(*iter1).begin();
+        std::vector<double>::const_iterator iter4=(*iter2).begin();
+        for(;iter3!=(*iter1).end();iter3++,iter4++)
+          sum[*iter3]+=*iter4;
+      }
+    iter2=rowsPartialSumD.begin();
+    for(iter1=globalIdsPartial.begin();iter1!=globalIdsPartial.end();iter1++,iter2++)
+      {
+        std::vector<int>::const_iterator iter3=(*iter1).begin();
+        std::vector<double>::iterator iter4=(*iter2).begin();
+        for(;iter3!=(*iter1).end();iter3++,iter4++)
+          *iter4=sum[*iter3];
+      }
+  }
+
+  /*!
+   * This method updates this->_coeffs attribute in order to take into account hidden (because having the same global number) similar nodes in _coeffs array.
+   * If in this->_coeffs two distant element id have the same global id their values will be replaced for each by the sum of the two.
+   * @param procsInInteraction input parameter : specifies the procId in absolute of distant lazy procs in interaction with
+   * @param rowsPartialSumI input parameter : local ids of distant lazy procs elements in interaction with
+   * @param globalIdsPartial input parameter : global ids of distant lazy procs elements in interaction with
+   */
+  void InterpolationMatrix::mergeCoeffs(const std::vector<int>& procsInInteraction, const std::vector< std::vector<int> >& rowsPartialSumI,
+                                        const std::vector<std::vector<int> >& globalIdsPartial, std::vector<std::vector<double> >& denoStrorageInv)
+  {
+    //preparing fast access structures
+    std::map<int,int> procT;
+    int localProcId=0;
+    for(std::vector<int>::const_iterator iter1=procsInInteraction.begin();iter1!=procsInInteraction.end();iter1++,localProcId++)
+      procT[*iter1]=localProcId;
+    int size=procsInInteraction.size();
+    std::vector<std::map<int,int> > localToGlobal(size);
+    for(int i=0;i<size;i++)
+      {
+        std::map<int,int>& myLocalToGlobal=localToGlobal[i];
+        const std::vector<int>& locals=rowsPartialSumI[i];
+        const std::vector<int>& globals=globalIdsPartial[i];
+        std::vector<int>::const_iterator iter3=locals.begin();
+        std::vector<int>::const_iterator iter4=globals.begin();
+        for(;iter3!=locals.end();iter3++,iter4++)
+          myLocalToGlobal[*iter3]=*iter4;
+      }
+    //
+    const vector<std::pair<int,int> >& mapping=_mapping.getSendingIds();
+    std::map<int,double> globalIdVal;
+    //accumulate for same global id on lazy part.
+    for(vector<vector<pair<int,double> > >::iterator iter1=_coeffs.begin();iter1!=_coeffs.end();iter1++)
+      for(vector<pair<int,double> >::iterator iter2=(*iter1).begin();iter2!=(*iter1).end();iter2++)
+        {
+          const std::pair<int,int>& distantLocalLazyId=mapping[(*iter2).first];
+          int localLazyProcId=procT[distantLocalLazyId.first];
+          int globalDistantLazyId=localToGlobal[localLazyProcId][distantLocalLazyId.second];
+          globalIdVal[globalDistantLazyId]+=(*iter2).second;
+        }
+    //perform merge
+    std::vector<std::vector<double> >::iterator iter3=denoStrorageInv.begin();
+    for(vector<vector<pair<int,double> > >::iterator iter1=_coeffs.begin();iter1!=_coeffs.end();iter1++,iter3++)
+      {
+        double val=(*iter3).back();
+        (*iter3).resize((*iter1).size());
+        std::vector<double>::iterator iter4=(*iter3).begin();
+        for(vector<pair<int,double> >::iterator iter2=(*iter1).begin();iter2!=(*iter1).end();iter2++,iter4++)
+          {
+            const std::pair<int,int>& distantLocalLazyId=mapping[(*iter2).first];
+            int localLazyProcId=procT[distantLocalLazyId.first];
+            int globalDistantLazyId=localToGlobal[localLazyProcId][distantLocalLazyId.second];
+            double newVal=globalIdVal[globalDistantLazyId];
+            if((*iter2).second!=0.)
+              (*iter4)=val*newVal/(*iter2).second;
+            else
+              (*iter4)=std::numeric_limits<double>::max();
+            (*iter2).second=newVal;
+          }
+      }
+  }
+
+  void InterpolationMatrix::divideByGlobalRowSum(const std::vector<int>& distantProcs, const std::vector<std::vector<int> >& resPerProcI,
+                                                 const std::vector<std::vector<double> >& resPerProcD, std::vector<std::vector<double> >& deno)
+  {
+    map<int,double> fastSums;
+    int procId=0;
+    for(vector<int>::const_iterator iter1=distantProcs.begin();iter1!=distantProcs.end();iter1++,procId++)
+      {
+        const std::vector<int>& currentProcI=resPerProcI[procId];
+        const std::vector<double>& currentProcD=resPerProcD[procId];
+        vector<double>::const_iterator iter3=currentProcD.begin();
+        for(vector<int>::const_iterator iter2=currentProcI.begin();iter2!=currentProcI.end();iter2++,iter3++)
+          fastSums[_col_offsets[std::make_pair(*iter1,*iter2)]]=*iter3;
+      }
+    deno.resize(_coeffs.size());
+    vector<vector<double> >::iterator iter6=deno.begin();
+    for(vector<vector<pair<int,double> > >::const_iterator iter4=_coeffs.begin();iter4!=_coeffs.end();iter4++,iter6++)
+      {
+        (*iter6).resize((*iter4).size());
+        vector<double>::iterator iter7=(*iter6).begin();
+        for(vector<pair<int,double> >::const_iterator iter5=(*iter4).begin();iter5!=(*iter4).end();iter5++,iter7++)
+          *iter7=fastSums[(*iter5).first];
+      }
+  }
+
+  void InterpolationMatrix::computeGlobalColSum(std::vector<std::vector<double> >& denoStrorage)
+  {
+    denoStrorage.resize(_coeffs.size());
+    vector<vector<double> >::iterator iter2=denoStrorage.begin();
+    for(vector<vector<pair<int,double> > >::const_iterator iter1=_coeffs.begin();iter1!=_coeffs.end();iter1++,iter2++)
+      {
+        (*iter2).resize((*iter1).size());
+        double sumOfCurrentRow=0.;
+        for(vector<pair<int,double> >::const_iterator iter3=(*iter1).begin();iter3!=(*iter1).end();iter3++)
+          sumOfCurrentRow+=(*iter3).second;
+        std::fill((*iter2).begin(),(*iter2).end(),sumOfCurrentRow);
+      }
+  }
+
+  void InterpolationMatrix::resizeGlobalColSum(std::vector<std::vector<double> >& denoStrorage)
+  {
+    vector<vector<double> >::iterator iter2=denoStrorage.begin();
+    for(vector<vector<pair<int,double> > >::const_iterator iter1=_coeffs.begin();iter1!=_coeffs.end();iter1++,iter2++)
+      {
+        double val=(*iter2).back();
+        (*iter2).resize((*iter1).size());
+        std::fill((*iter2).begin(),(*iter2).end(),val);
+      }
+  }
+
+
+ /**!  The call to this method updates the arrays on the target side
+     so that they know which amount of data from which processor they
+     should expect.
+     That call makes actual interpolations via multiply method
+     available.
+     */
+  void InterpolationMatrix::prepare()
+  {
+    int nbelems = _source_field->getField()->getNumberOfTuples();
+    for (int ielem=0; ielem < nbelems; ielem++)
+      {
+        _row_offsets[ielem+1]+=_row_offsets[ielem];
+      }
+    _mapping.prepareSendRecv();
+  }
+
+
+
+  /*!
+     \brief performs t=Ws, where t is the target field, s is the source field
+
+     The call to this method must be called both on the working side
+     and on the idle side. On the working side, the vector  T=VT^(-1).(W.S)
+     is computed and sent. On the idle side, no computation is done, but the
+     result from the working side is received and the field is updated.
+
+     \param field source field on processors involved on the source side,
+     target field on processors on the target side
+   */
+  void InterpolationMatrix::multiply(MEDCouplingFieldDouble& field) const
+  {
+    int nbcomp = field.getArray()->getNumberOfComponents();
+    vector<double> target_value(_col_offsets.size()* nbcomp,0.0);
+
+    //computing the matrix multiply on source side
+    if (_source_group.containsMyRank())
+      {
+        int nbrows = _coeffs.size();
+
+        // performing W.S
+        // W is the intersection matrix
+        // S is the source vector
+
+        for (int irow=0; irow<nbrows; irow++)
+          {
+            for (int icomp=0; icomp< nbcomp; icomp++)
+              {
+                double coeff_row = field.getIJ(irow,icomp);
+                for (int icol=_row_offsets[irow]; icol< _row_offsets[irow+1];icol++)
+                  {
+                    int colid= _coeffs[irow][icol-_row_offsets[irow]].first;
+                    double value = _coeffs[irow][icol-_row_offsets[irow]].second;
+                    double deno = _deno_multiply[irow][icol-_row_offsets[irow]];
+                    target_value[colid*nbcomp+icomp]+=value*coeff_row/deno;
+                  }
+              }
+          }
+      }
+
+    if (_target_group.containsMyRank())
+      {
+        int nbelems = field.getArray()->getNumberOfTuples() ;
+        double* value = const_cast<double*> (field.getArray()->getPointer());
+        for (int i=0; i<nbelems*nbcomp; i++)
+          {
+            value[i]=0.0;
+          }
+      }
+
+    //on source side : sending  T=VT^(-1).(W.S)
+    //on target side :: receiving T and storing it in field
+    _mapping.sendRecv(&target_value[0],field);
+  }
+  
+
+  /**!
+   \brief performs s=WTt, where t is the target field, s is the source field,
+   WT is the transpose matrix from W
+
+     The call to this method must be called both on the working side
+     and on the idle side. On the working side, the target vector T is
+     received and the vector  S=VS^(-1).(WT.T) is computed to update
+     the field.
+     On the idle side, no computation is done, but the field is sent.
+
+     param field source field on processors involved on the source side,
+     target field on processors on the target side
+     */
+  void InterpolationMatrix::transposeMultiply(MEDCouplingFieldDouble& field) const
+  {
+    int nbcomp = field.getArray()->getNumberOfComponents();
+    vector<double> source_value(_col_offsets.size()* nbcomp,0.0);
+    _mapping.reverseSendRecv(&source_value[0],field);
+
+    //treatment of the transpose matrix multiply on the source side
+    if (_source_group.containsMyRank())
+      {
+        int nbrows    = _coeffs.size();
+        double *array = field.getArray()->getPointer() ;
+
+        // Initialization
+        std::fill(array, array+nbrows*nbcomp, 0.0) ;
+
+        //performing WT.T
+        //WT is W transpose
+        //T is the target vector
+        for (int irow = 0; irow < nbrows; irow++)
+          {
+            for (int icol = _row_offsets[irow]; icol < _row_offsets[irow+1]; icol++)
+              {
+                int colid    = _coeffs[irow][icol-_row_offsets[irow]].first;
+                double value = _coeffs[irow][icol-_row_offsets[irow]].second;
+                double deno = _deno_reverse_multiply[irow][icol-_row_offsets[irow]];
+                for (int icomp=0; icomp<nbcomp; icomp++)
+                  {
+                    double coeff_row = source_value[colid*nbcomp+icomp];
+                    array[irow*nbcomp+icomp] += value*coeff_row/deno;
+                  }
+              }
+          }
+      }
+  }
+
+  bool InterpolationMatrix::isSurfaceComputationNeeded(const std::string& method) const
+  {
+    return method=="P0";
+  }
+}
diff --git a/src/ParaMEDMEM/InterpolationMatrix.hxx b/src/ParaMEDMEM/InterpolationMatrix.hxx
new file mode 100644 (file)
index 0000000..97fc2a3
--- /dev/null
@@ -0,0 +1,109 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __INTERPOLATIONMATRIX_HXX__
+#define __INTERPOLATIONMATRIX_HXX__
+
+#include "MPIAccessDEC.hxx"
+#include "MxN_Mapping.hxx"
+#include "InterpolationOptions.hxx"
+#include "DECOptions.hxx"
+
+namespace ParaMEDMEM
+{
+  class ElementLocator;
+
+  /**! class InterpolationMatrix
+   This class enables the storage of an interpolation matrix Wij mapping
+   source field Sj to target field Ti via Ti=Vi^(-1).Wij.Sj.
+   The matrix is built and stored on the processors belonging to the source
+   group.
+   */
+  class InterpolationMatrix : public INTERP_KERNEL::InterpolationOptions,
+                              public DECOptions
+  {
+  public:
+    
+    InterpolationMatrix(const ParaMEDMEM::ParaFIELD *source_field, 
+                        const ProcessorGroup& source_group,
+                        const ProcessorGroup& target_group,
+                        const DECOptions& dec_opt,
+                        const InterpolationOptions& i_opt);
+
+    
+    virtual ~InterpolationMatrix();
+    void addContribution(MEDCouplingPointSet& distant_support, int iproc_distant,
+                         const int* distant_elems, const std::string& srcMeth, const std::string& targetMeth);
+    void finishContributionW(ElementLocator& elementLocator);
+    void finishContributionL(ElementLocator& elementLocator);
+    void multiply(MEDCouplingFieldDouble& field) const;
+    void transposeMultiply(MEDCouplingFieldDouble& field)const;
+    void prepare();
+    int getNbRows() const { return _row_offsets.size(); }
+    MPIAccessDEC* getAccessDEC() { return _mapping.getAccessDEC(); }
+  private:
+    void computeConservVolDenoW(ElementLocator& elementLocator);
+    void computeIntegralDenoW(ElementLocator& elementLocator);
+    void computeRevIntegralDenoW(ElementLocator& elementLocator);
+    void computeGlobConstraintDenoW(ElementLocator& elementLocator);
+    void computeConservVolDenoL(ElementLocator& elementLocator);
+    void computeIntegralDenoL(ElementLocator& elementLocator);
+    void computeRevIntegralDenoL(ElementLocator& elementLocator);
+    
+    void computeLocalColSum(std::vector<double>& res) const;
+    void computeLocalRowSum(const std::vector<int>& distantProcs, std::vector<std::vector<int> >& resPerProcI,
+                            std::vector<std::vector<double> >& resPerProcD) const;
+    void computeGlobalRowSum(ElementLocator& elementLocator, std::vector<std::vector<double> >& denoStrorage, std::vector<std::vector<double> >& denoStrorageInv);
+    void computeGlobalColSum(std::vector<std::vector<double> >& denoStrorage);
+    void resizeGlobalColSum(std::vector<std::vector<double> >& denoStrorage);
+    void fillDSFromVM(int iproc_distant, const int* distant_elems, const std::vector< std::map<int,double> >& values, MEDCouplingFieldDouble *surf);
+    void serializeMe(std::vector< std::vector< std::map<int,double> > >& data1, std::vector<int>& data2) const;
+    void initialize();
+    void findAdditionnalElements(ElementLocator& elementLocator, std::vector<std::vector<int> >& elementsToAdd,
+                                 const std::vector<std::vector<int> >& resPerProcI, const std::vector<std::vector<int> >& globalIdsPartial);
+    void addGhostElements(const std::vector<int>& distantProcs, const std::vector<std::vector<int> >& elementsToAdd);
+    int mergePolicies(const std::vector<int>& policyPartial);
+    void mergeRowSum(const std::vector< std::vector<double> >& rowsPartialSumD, const std::vector< std::vector<int> >& globalIdsPartial,
+                     std::vector<int>& globalIdsLazySideInteraction, std::vector<double>& sumCorresponding);
+    void mergeRowSum2(const std::vector< std::vector<int> >& globalIdsPartial, std::vector< std::vector<double> >& rowsPartialSumD,
+                      const std::vector<int>& globalIdsLazySideInteraction, const std::vector<double>& sumCorresponding);
+    void mergeRowSum3(const std::vector< std::vector<int> >& globalIdsPartial, std::vector< std::vector<double> >& rowsPartialSumD);
+    void mergeCoeffs(const std::vector<int>& procsInInteraction, const std::vector< std::vector<int> >& rowsPartialSumI,
+                     const std::vector<std::vector<int> >& globalIdsPartial, std::vector<std::vector<double> >& denoStrorageInv);
+    void divideByGlobalRowSum(const std::vector<int>& distantProcs, const std::vector<std::vector<int> >& resPerProcI,
+                              const std::vector<std::vector<double> >& resPerProcD, std::vector<std::vector<double> >& deno);
+  private:
+    bool isSurfaceComputationNeeded(const std::string& method) const;
+  private:
+    const ParaMEDMEM::ParaFIELD *_source_field;
+    std::vector<int> _row_offsets;
+    std::map<std::pair<int,int>, int > _col_offsets;
+    MEDCouplingPointSet *_source_support;
+    MxN_Mapping _mapping;
+    const ProcessorGroup& _source_group;
+    const ProcessorGroup& _target_group;
+    std::vector< std::vector<double> > _target_volume;
+    std::vector<std::vector<std::pair<int,double> > > _coeffs;
+    std::vector<std::vector<double> > _deno_multiply;
+    std::vector<std::vector<double> > _deno_reverse_multiply;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/LinearTimeInterpolator.cxx b/src/ParaMEDMEM/LinearTimeInterpolator.cxx
new file mode 100644 (file)
index 0000000..7952498
--- /dev/null
@@ -0,0 +1,54 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "LinearTimeInterpolator.hxx"
+
+using namespace std;
+
+namespace ParaMEDMEM
+{    
+
+  LinearTimeInterpolator::LinearTimeInterpolator( double InterpPrecision, int nStepBefore,
+                                                  int nStepAfter ):
+    TimeInterpolator( InterpPrecision, nStepBefore, nStepAfter )
+  {
+  }
+  
+  LinearTimeInterpolator::~LinearTimeInterpolator()
+  {
+  } 
+  
+  void LinearTimeInterpolator::doInterp( double time0, double time1, double time,
+                                         int recvcount , int nbuff0, int nbuff1,
+                                         int **recvbuff0, int **recvbuff1, int *result )
+  {
+    for(int i = 0 ; i < recvcount ; i++ )
+        result[i] = (int) ((recvbuff0[0][i]*(time1 - time) + recvbuff1[0][i]*(time - time0))/(time1 - time0) + _interp_precision);
+  }
+
+  void LinearTimeInterpolator::doInterp( double time0, double time1, double time,
+                                         int recvcount , int nbuff0, int nbuff1,
+                                         double **recvbuff0, double **recvbuff1,
+                                         double *result )
+  {
+    for(int i = 0 ; i < recvcount ; i++ )
+      result[i] = (recvbuff0[0][i]*(time1 - time) + recvbuff1[0][i]*(time - time0))/(time1 - time0);
+  }
+  
+}
diff --git a/src/ParaMEDMEM/LinearTimeInterpolator.hxx b/src/ParaMEDMEM/LinearTimeInterpolator.hxx
new file mode 100644 (file)
index 0000000..0128e42
--- /dev/null
@@ -0,0 +1,47 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __LINEARTIMEINTERPOLATOR_HXX__
+#define __LINEARTIMEINTERPOLATOR_HXX__
+
+#include "TimeInterpolator.hxx"
+
+#include <map>
+#include <iostream>
+
+namespace ParaMEDMEM
+{
+  class DEC;
+  
+  class LinearTimeInterpolator : public TimeInterpolator
+  {
+    public:  
+      LinearTimeInterpolator( double InterpPrecision=0, int nStepBefore=1,
+                              int nStepAfter=1 ) ;
+      virtual ~LinearTimeInterpolator();
+      void doInterp( double time0, double time1, double time, int recvcount,
+                     int nbuff0, int nbuff1,
+                     int **recvbuff0, int **recvbuff1, int *result );
+      void doInterp( double time0, double time1, double time, int recvcount,
+                     int nbuff0, int nbuff1,
+                     double **recvbuff0, double **recvbuff1, double *result );
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/MPIAccess.cxx b/src/ParaMEDMEM/MPIAccess.cxx
new file mode 100644 (file)
index 0000000..b7fe803
--- /dev/null
@@ -0,0 +1,1091 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "MPIAccess.hxx"
+#include "InterpolationUtils.hxx"
+
+#include <iostream>
+
+using namespace std;
+
+namespace ParaMEDMEM
+{
+  /**!
+    \anchor MPIAccess-det
+    \class MPIAccess
+
+    The class \a MPIAccess is the gateway to the MPI library.
+    It is a helper class that gathers the calls to the MPI
+    library that are made in the ParaMEDMEM library. This gathering
+    allows easier gathering of information about the communication
+    in the library. With MPIAccess, tags are managed automatically
+    and asynchronous operations are easier.
+
+    It is typically called after the MPI_Init() call in a program. It is afterwards passed as a parameter to the constructors of ParaMEDMEM objects so that they access the MPI library via the MPIAccess.
+
+    As an example, the following code initializes a processor group made of the zero processor.
+
+    \verbatim
+    #include "MPIAccess.hxx"
+    #include "ProcessorGroup.hxx"
+
+    int main(int argc, char** argv)
+    {
+    //initialization
+    MPI_Init(&argc, &argv);
+    ParaMEDMEM::CommInterface comm_interface;
+
+    //setting up a processor group with proc 0
+    set<int> procs;
+    procs.insert(0);
+    ParaMEDMEM::ProcessorGroup group(procs, comm_interface);
+
+    ParaMEDMEM::MPIAccess mpi_access(group);
+
+    //cleanup
+    MPI_Finalize();
+    }
+    \endverbatim
+  */
+
+
+  /*! Creates a MPIAccess that is based on the processors included in \a ProcessorGroup.
+    This class may be called for easier use of MPI API.
+
+    \param ProcessorGroup MPIProcessorGroup object giving access to group management
+    \param BaseTag and MaxTag define the range of tags to be used.
+    Tags are managed by MPIAccess. They are cyclically incremented.
+    When there is a Send or a Receive operation there is a new RequestId tag returned
+    to the caller. That RequestId may be used to manage the operation Wait, Check of
+    status etc... The MPITag internally managed by MPIAccess is used as "tag" argument
+    in MPI call.
+  */
+
+  MPIAccess::MPIAccess(MPIProcessorGroup * ProcessorGroup, int BaseTag, int MaxTag) :
+    _comm_interface( ProcessorGroup->getCommInterface() ) ,
+    _intra_communicator( ProcessorGroup->getComm() )
+  {
+    void *v ;
+    int mpitagub ;
+    int flag ;
+    //MPI_Comm_get_attr does not run with _IntraCommunicator ???
+    //MPI_Comm_get_attr(*_IntraCommunicator,MPID_TAG_UB,&mpitagub,&flag) ;
+    MPI_Comm_get_attr(MPI_COMM_WORLD,MPI_TAG_UB,&v,&flag) ;
+    mpitagub=*(reinterpret_cast<int*>(v));
+    if ( BaseTag != 0 )
+      BaseTag = (BaseTag/MODULO_TAG)*MODULO_TAG ;
+    if ( MaxTag == 0 )
+      MaxTag = (mpitagub/MODULO_TAG-1)*MODULO_TAG ;
+    MPI_Comm_rank( *_intra_communicator, &_my_rank ) ;
+    if ( !flag | (BaseTag < 0) | (BaseTag >= MaxTag) | (MaxTag > mpitagub) )
+      throw INTERP_KERNEL::Exception("wrong call to MPIAccess constructor");
+
+    _processor_group = ProcessorGroup ;
+    _processor_group_size = _processor_group->size() ;
+    _trace = false ;
+
+    _base_request = -1 ;
+    _max_request = std::numeric_limits<int>::max() ;
+    _request = _base_request ;
+    
+    _base_MPI_tag = BaseTag ;
+    _max_MPI_tag = MaxTag ;
+    
+    _send_request = new int[ _processor_group_size ] ;
+    _recv_request = new int[ _processor_group_size ] ;
+
+    _send_requests.resize( _processor_group_size ) ;
+    _recv_requests.resize( _processor_group_size ) ;
+
+    _send_MPI_tag = new int[ _processor_group_size ] ;
+    _recv_MPI_Tag = new int[ _processor_group_size ] ;
+    int i ;
+    for (i = 0 ; i < _processor_group_size ; i++ )
+      {
+        _send_request[ i ] = _max_request ;
+        _recv_request[ i ] = _max_request ;
+        _send_requests[ i ].resize(0) ;
+        _recv_requests[ i ].resize(0) ;
+        _send_MPI_tag[ i ] = _max_MPI_tag ;
+        _recv_MPI_Tag[ i ] = _max_MPI_tag ;
+      }
+    MPI_Datatype array_of_types[3] ;
+    array_of_types[0] = MPI_DOUBLE ;
+    array_of_types[1] = MPI_DOUBLE ;
+    array_of_types[2] = MPI_INT ;
+    int array_of_blocklengths[3] ;
+    array_of_blocklengths[0] = 1 ;
+    array_of_blocklengths[1] = 1 ;
+    array_of_blocklengths[2] = 1 ;
+    MPI_Aint array_of_displacements[3] ;
+    array_of_displacements[0] = 0 ;
+    array_of_displacements[1] = sizeof(double) ;
+    array_of_displacements[2] = 2*sizeof(double) ;
+    MPI_Type_struct(3, array_of_blocklengths, array_of_displacements,
+                    array_of_types, &_MPI_TIME) ;
+    MPI_Type_commit(&_MPI_TIME) ;
+  }
+
+  MPIAccess::~MPIAccess()
+  {
+    delete [] _send_request ;
+    delete [] _recv_request ;
+    delete [] _send_MPI_tag ;
+    delete [] _recv_MPI_Tag ;
+    MPI_Type_free(&_MPI_TIME) ;
+  }
+
+  /*
+    MPIAccess and "RequestIds" :
+    ============================
+
+    . WARNING : In the specification document, the distinction
+    between "MPITags" and "RequestIds" is not clear. "MPITags"
+    are arguments of calls to MPI. "RequestIds" does not concern
+    calls to MPI. "RequestIds" are named "tag"as arguments in/out
+    in the MPIAccess API in the specification documentation.
+    But in the implementation we have the right name RequestId (or
+    RecvRequestId/SendRequestId).
+
+    . When we have a MPI write/read request via MPIAccess, we get
+    an identifier "RequestId".
+    That identifier matches a  structure RequestStruct of
+    MPIAccess. The access to that structure is done with the map
+    "_MapOfRequestStruct".
+    That structure RequestStruct give the possibility to manage
+    the structures MPI_Request and MPI_Status * of MPI. It give
+    also the possibility to get informations about that request :
+    target, send/recv, tag, [a]synchronous, type, outcount.
+
+    . That identifier is used to control an asynchronous request
+    via MPIAccess : Wait, Test, Probe, etc...
+
+    . In practise "RequestId" is simply an integer fo the interval
+    [0 , 2**32-1]. There is only one such a cyclic for
+    [I]Sends and [I]Recvs.
+
+    . That "RequestIds" and their associated structures give an easy
+    way to manage asynchronous communications.
+    For example we have mpi_access->Wait( int RequestId ) instead of
+    MPI_Wait(MPI_Request *request, MPI_Status *status).
+
+    . The API of MPIAccess may give the "SendRequestIds" of a "target",
+    the "RecvRequestIds" from a "source" or the "SendRequestIds" of
+    all "targets" or the "RecvRequestIds" of all "sources".
+    That avoid to manage them in Presentation-ParaMEDMEM.
+  */
+
+  int MPIAccess::newRequest( MPI_Datatype datatype, int tag , int destsourcerank ,
+                             bool fromsourcerank , bool asynchronous )
+  {
+    RequestStruct *mpiaccessstruct = new RequestStruct;
+    mpiaccessstruct->MPITag = tag ;
+    mpiaccessstruct->MPIDatatype = datatype ;
+    mpiaccessstruct->MPITarget = destsourcerank ;
+    mpiaccessstruct->MPIIsRecv = fromsourcerank ;
+    MPI_Status *aStatus = new MPI_Status ;
+    mpiaccessstruct->MPIStatus = aStatus ;
+    mpiaccessstruct->MPIAsynchronous = asynchronous ;
+    mpiaccessstruct->MPICompleted = !asynchronous ;
+    mpiaccessstruct->MPIOutCount = -1 ;
+    if ( !asynchronous )
+      {
+        mpiaccessstruct->MPIRequest = MPI_REQUEST_NULL ;
+        mpiaccessstruct->MPIStatus->MPI_SOURCE = destsourcerank ;
+        mpiaccessstruct->MPIStatus->MPI_TAG = tag ;
+        mpiaccessstruct->MPIStatus->MPI_ERROR = MPI_SUCCESS ;
+      }
+    if ( _request == _max_request )
+      _request = _base_request ;
+    _request += 1 ;
+    _map_of_request_struct[_request] = mpiaccessstruct ;
+    if ( fromsourcerank )
+      _recv_request[ destsourcerank ] = _request;
+    else
+      _send_request[ destsourcerank ] = _request;
+    if ( _trace )
+      cout << "NewRequest" << _my_rank << "( " << _request << " ) "
+           << mpiaccessstruct << endl ;
+    return _request ;
+  }
+
+  /*
+    MPIAccess and "tags" (or "MPITags") :
+    =====================================
+
+    . The constructor give the possibility to choose an interval of
+    tags to use : [BaseTag , MaxTag].
+    The default is [ 0 , MPI_TAG_UB], MPI_TAG_UB being the maximum
+    value in an implementation of MPI (minimum 32767 = 2**15-1).
+    On awa with the implementation lam MPI_TAG_UB value is
+    7353944. The norm MPI specify that value is the same in all
+    processes started by mpirun.
+    In the case of the use of the same IntraCommunicator in a process
+    for several distinct data flows (or for several IntraCommunicators
+    with common processes), that permits to avoid ambiguity
+    and may help debug.
+
+    . In MPIAccess the tags have two parts (#define MODULO_TAG 10) :
+    + The last decimal digit decimal correspond to MPI_DataType ( 1 for
+    TimeMessages, 2 for MPI_INT and 3 for MPI_DOUBLE)
+    + The value of other digits correspond to a circular number for each
+    message.
+    + A TimeMessage and the associated DataMessage have the same number
+    (but the types are different and the tags also).
+
+    . For a Send of a message from a process "source" to a process
+    "target", we have _send_MPI_tag[target] in the process
+    source (it contains the last "tag" used for the Send of a
+    message to the process target).
+    And in the process "target" which receive that message, we have
+    _recv_MPI_Tag[source] (it contains the last "tag" used for the Recv
+    of messages from the process source).
+    Naturally in the MPI norm the values of that tags must be the same.
+  */
+  int MPIAccess::newSendTag( MPI_Datatype datatype, int destrank , int method ,
+                             bool asynchronous, int &RequestId )
+  {
+    int tag ;
+    tag = incrTag( _send_MPI_tag[destrank] ) ;
+    tag = valTag( tag, method ) ;
+    _send_MPI_tag[ destrank ] = tag ;
+    RequestId = newRequest( datatype, tag, destrank , false , asynchronous ) ;
+    _send_request[ destrank ] = RequestId ;
+    _send_requests[ destrank ].push_back( RequestId ) ;
+    return tag ;
+  }
+
+  int MPIAccess::newRecvTag( MPI_Datatype datatype, int sourcerank , int method ,
+                             bool asynchronous, int &RequestId )
+  {
+    int tag ;
+    tag = incrTag( _recv_MPI_Tag[sourcerank] ) ;
+    tag = valTag( tag, method ) ;
+    _recv_MPI_Tag[ sourcerank ] = tag ;
+    RequestId = newRequest( datatype, tag , sourcerank , true , asynchronous ) ;
+    _recv_request[ sourcerank ] = RequestId ;
+    _recv_requests[ sourcerank ].push_back( RequestId ) ;
+    return tag ;
+  }
+
+  // Returns the number of all SendRequestIds that may be used to allocate
+  // ArrayOfSendRequests for the call to SendRequestIds
+  int MPIAccess::sendRequestIdsSize()
+  {
+    int size = 0;
+    for (int i = 0 ; i < _processor_group_size ; i++ )
+      size += _send_requests[ i ].size() ;
+    return size ;
+  }
+
+  // Returns in ArrayOfSendRequests with the dimension "size" all the
+  // SendRequestIds
+  int MPIAccess::sendRequestIds(int size, int *ArrayOfSendRequests)
+  {
+    int destrank ;
+    int i = 0 ;
+    for ( destrank = 0 ; destrank < _processor_group_size ; destrank++ )
+      {
+        list< int >::const_iterator iter ;
+        for (iter = _send_requests[ destrank ].begin() ; iter != _send_requests[destrank].end() ; iter++ )
+          ArrayOfSendRequests[i++] = *iter ;
+      }
+    return i ;
+  }
+
+  // Returns the number of all RecvRequestIds that may be used to allocate
+  // ArrayOfRecvRequests for the call to RecvRequestIds
+  int MPIAccess::recvRequestIdsSize()
+  {
+    int size = 0 ;
+    for (int i = 0 ; i < _processor_group_size ; i++ )
+      size += _recv_requests[ i ].size() ;
+    return size ;
+  }
+
+  // Returns in ArrayOfRecvRequests with the dimension "size" all the
+  // RecvRequestIds
+  int MPIAccess::recvRequestIds(int size, int *ArrayOfRecvRequests)
+  {
+    int sourcerank ;
+    int i = 0 ;
+    for ( sourcerank = 0 ; sourcerank < _processor_group_size ; sourcerank++ )
+      {
+        list< int >::const_iterator iter ;
+        for (iter = _recv_requests[ sourcerank ].begin() ; iter != _recv_requests[sourcerank].end() ; iter++ )
+          ArrayOfRecvRequests[i++] = *iter ;
+      }
+    return i ;
+  }
+
+  // Returns in ArrayOfSendRequests with the dimension "size" all the
+  // SendRequestIds to a destination rank
+  int MPIAccess::sendRequestIds(int destrank, int size, int *ArrayOfSendRequests)
+  {
+    if (size < (int)_send_requests[destrank].size() )
+      throw INTERP_KERNEL::Exception("wrong call to MPIAccess::SendRequestIds");
+    int i = 0 ;
+    list< int >::const_iterator iter ;
+    for (iter = _send_requests[ destrank ].begin() ; iter != _send_requests[destrank].end() ; iter++ )
+      ArrayOfSendRequests[i++] = *iter ;
+    return _send_requests[destrank].size() ;
+  }
+
+  // Returns in ArrayOfRecvRequests with the dimension "size" all the
+  // RecvRequestIds from a sourcerank
+  int MPIAccess::recvRequestIds(int sourcerank, int size, int *ArrayOfRecvRequests)
+  {
+    if (size < (int)_recv_requests[sourcerank].size() )
+      throw INTERP_KERNEL::Exception("wrong call to MPIAccess::RecvRequestIds");
+    int i = 0 ;
+    list< int >::const_iterator iter ;
+    _recv_requests[ sourcerank ] ;
+    for (iter = _recv_requests[ sourcerank ].begin() ; iter != _recv_requests[sourcerank].end() ; iter++ )
+      ArrayOfRecvRequests[i++] = *iter ;
+    return _recv_requests[sourcerank].size() ;
+  }
+
+  // Send in synchronous mode count values of type datatype from buffer to target
+  // (returns RequestId identifier even if the corresponding structure is deleted :
+  // it is only in order to have the same signature as the asynchronous mode)
+  int MPIAccess::send(void* buffer, int count, MPI_Datatype datatype, int target, int &RequestId)
+  {
+    int sts = MPI_SUCCESS ;
+    RequestId = -1 ;
+    if ( count )
+      {
+        _MessageIdent aMethodIdent = methodId( datatype ) ;
+        int MPItag = newSendTag( datatype, target , aMethodIdent , false , RequestId ) ;
+        if ( aMethodIdent == _message_time )
+          {
+            TimeMessage *aTimeMsg = (TimeMessage *) buffer ;
+            aTimeMsg->tag = MPItag ;
+          }
+        deleteRequest( RequestId ) ;
+        sts = _comm_interface.send(buffer, count, datatype, target, MPItag,
+                                  *_intra_communicator ) ;
+        if ( _trace )
+          cout << "MPIAccess::Send" << _my_rank << " SendRequestId "
+               << RequestId << " count " << count << " target " << target
+               << " MPItag " << MPItag << endl ;
+      }
+    return sts ;
+  }
+
+  // Receive (read) in synchronous mode count values of type datatype in buffer from source
+  // (returns RequestId identifier even if the corresponding structure is deleted :
+  // it is only in order to have the same signature as the asynchronous mode)
+  // The output argument OutCount is optionnal : *OutCount <= count
+  int MPIAccess::recv(void* buffer, int count, MPI_Datatype datatype, int source, int &RequestId, int *OutCount)
+  {
+    int sts = MPI_SUCCESS ;
+    RequestId = -1 ;
+    if ( OutCount != NULL )
+      *OutCount = -1 ;
+    if ( count )
+      {
+        _MessageIdent aMethodIdent = methodId( datatype ) ;
+        int MPItag = newRecvTag( datatype, source , aMethodIdent , false , RequestId ) ;
+        sts =  _comm_interface.recv(buffer, count, datatype, source, MPItag,
+                                   *_intra_communicator , MPIStatus( RequestId ) ) ;
+        int outcount = 0 ;
+        if ( sts == MPI_SUCCESS )
+          {
+            MPI_Datatype datatype = MPIDatatype( RequestId ) ;
+            _comm_interface.getCount(MPIStatus( RequestId ), datatype, &outcount ) ;
+            setMPIOutCount( RequestId , outcount ) ;
+            setMPICompleted( RequestId , true ) ;
+            deleteStatus( RequestId ) ;
+          }
+        if ( OutCount != NULL )
+          *OutCount = outcount ;
+        if ( _trace )
+          cout << "MPIAccess::Recv" << _my_rank << " RecvRequestId "
+               << RequestId << " count " << count << " source " << source
+               << " MPItag " << MPItag << endl ;
+        deleteRequest( RequestId ) ;
+      }
+    return sts ;
+  }
+
+  // Send in asynchronous mode count values of type datatype from buffer to target
+  // Returns RequestId identifier.
+  int MPIAccess::ISend(void* buffer, int count, MPI_Datatype datatype, int target, int &RequestId)
+  {
+    int sts = MPI_SUCCESS ;
+    RequestId = -1 ;
+    if ( count )
+      {
+        _MessageIdent aMethodIdent = methodId( datatype ) ;
+        int MPItag = newSendTag( datatype, target , aMethodIdent , true , RequestId ) ;
+        if ( aMethodIdent == _message_time )
+          {
+            TimeMessage *aTimeMsg = (TimeMessage *) buffer ;
+            aTimeMsg->tag = MPItag ;
+          }
+        MPI_Request *aSendRequest = MPIRequest( RequestId ) ;
+        if ( _trace )
+          {
+            cout << "MPIAccess::ISend" << _my_rank << " ISendRequestId "
+                 << RequestId << " count " << count << " target " << target
+                 << " MPItag " << MPItag << endl ;
+            if ( MPItag == 1 )
+              cout << "MPIAccess::ISend" << _my_rank << " time "
+                   << ((TimeMessage *)buffer)->time << " " << ((TimeMessage *)buffer)->deltatime
+                   << endl ;
+          }
+        sts = _comm_interface.Isend(buffer, count, datatype, target, MPItag,
+                                   *_intra_communicator , aSendRequest) ;
+      }
+    return sts ;
+  }
+
+  // Receive (read) in asynchronous mode count values of type datatype in buffer from source
+  // returns RequestId identifier.
+  int MPIAccess::IRecv(void* buffer, int count, MPI_Datatype datatype, int source, int &RequestId)
+  {
+    int sts = MPI_SUCCESS ;
+    RequestId = -1 ;
+    if ( count )
+      {
+        _MessageIdent aMethodIdent = methodId( datatype ) ;
+        int MPItag = newRecvTag( datatype, source , aMethodIdent , true , RequestId ) ;
+        MPI_Request *aRecvRequest = MPIRequest( RequestId ) ;
+        if ( _trace )
+          {
+            cout << "MPIAccess::IRecv" << _my_rank << " IRecvRequestId "
+                 << RequestId << " count " << count << " source " << source
+                 << " MPItag " << MPItag << endl ;
+            if ( MPItag == 1 )
+              cout << "MPIAccess::ISend" << _my_rank << " time "
+                   << ((TimeMessage *)buffer)->time << " " << ((TimeMessage *)buffer)->deltatime
+                   << endl ;
+          }
+        sts = _comm_interface.Irecv(buffer, count, datatype, source, MPItag,
+                                   *_intra_communicator , aRecvRequest) ;
+      }
+    return sts ;
+  }
+
+  // Perform a Send and a Recv in synchronous mode
+  int MPIAccess::sendRecv(void* sendbuf, int sendcount, MPI_Datatype sendtype,
+                          int dest, int &SendRequestId,
+                          void* recvbuf, int recvcount, MPI_Datatype recvtype,
+                          int source, int &RecvRequestId, int *OutCount)
+  {
+    int sts = MPI_SUCCESS ;
+    SendRequestId = -1 ;
+    RecvRequestId = -1 ;
+    if ( recvcount )
+      sts = IRecv(recvbuf, recvcount, recvtype, source, RecvRequestId) ;
+    int outcount = -1 ;
+    if ( _trace )
+      cout << "MPIAccess::SendRecv" << _my_rank << " IRecv RecvRequestId "
+           << RecvRequestId << endl ;
+    if ( sts == MPI_SUCCESS )
+      {
+        if ( sendcount )
+          sts = send(sendbuf, sendcount, sendtype, dest, SendRequestId) ;
+        if ( _trace )
+          cout << "MPIAccess::SendRecv" << _my_rank << " Send SendRequestId "
+               << SendRequestId << endl ;
+        if ( sts == MPI_SUCCESS && recvcount )
+          {
+            sts = wait( RecvRequestId ) ;
+            outcount = MPIOutCount( RecvRequestId ) ;
+            if ( _trace )
+              cout << "MPIAccess::SendRecv" << _my_rank << " IRecv RecvRequestId "
+                   << RecvRequestId << " outcount " << outcount << endl ;
+          }
+      }
+    if ( OutCount != NULL )
+      {
+        *OutCount = outcount ;
+        if ( _trace )
+          cout << "MPIAccess::SendRecv" << _my_rank << " *OutCount = " << *OutCount
+               << endl ;
+      }
+    deleteRequest( RecvRequestId ) ;
+    return sts ;
+  }
+
+  // Perform a Send and a Recv in asynchronous mode
+  int MPIAccess::ISendRecv(void* sendbuf, int sendcount, MPI_Datatype sendtype,
+                           int dest, int &SendRequestId,
+                           void* recvbuf, int recvcount, MPI_Datatype recvtype,
+                           int source, int &RecvRequestId)
+  {
+    int sts = MPI_SUCCESS ;
+    SendRequestId = -1 ;
+    RecvRequestId = -1 ;
+    if ( recvcount )
+      sts = IRecv(recvbuf, recvcount, recvtype, source, RecvRequestId) ;
+    if ( sts == MPI_SUCCESS )
+      if ( sendcount )
+        sts = ISend(sendbuf, sendcount, sendtype, dest, SendRequestId) ;
+    return sts ;
+  }
+
+  // Perform a wait of a Send or Recv asynchronous Request
+  // Do nothing for a synchronous Request
+  // Manage MPI_Request * and MPI_Status * structure
+  int MPIAccess::wait( int RequestId )
+  {
+    int status = MPI_SUCCESS ;
+    if ( !MPICompleted( RequestId ) )
+      {
+        if ( *MPIRequest( RequestId ) != MPI_REQUEST_NULL )
+          {
+            if ( _trace )
+              cout << "MPIAccess::Wait" << _my_rank << " -> wait( " << RequestId
+                   << " ) MPIRequest " << MPIRequest( RequestId ) << " MPIStatus "
+                   << MPIStatus( RequestId ) << " MPITag " << MPITag( RequestId )
+                   << " MPIIsRecv " << MPIIsRecv( RequestId ) << endl ;
+            status = _comm_interface.wait(MPIRequest( RequestId ), MPIStatus( RequestId )) ;
+          }
+        else
+          {
+            if ( _trace )
+              cout << "MPIAccess::Wait" << _my_rank << " MPIRequest == MPI_REQUEST_NULL"
+                   << endl ;
+          }
+        setMPICompleted( RequestId , true ) ;
+        if ( MPIIsRecv( RequestId ) && MPIStatus( RequestId ) )
+          {
+            MPI_Datatype datatype = MPIDatatype( RequestId ) ;
+            int outcount ;
+            status = _comm_interface.getCount(MPIStatus( RequestId ), datatype,
+                                             &outcount ) ;
+            if ( status == MPI_SUCCESS )
+              {
+                setMPIOutCount( RequestId , outcount ) ;
+                deleteStatus( RequestId ) ;
+                if ( _trace )
+                  cout << "MPIAccess::Wait" << _my_rank << " RequestId " << RequestId
+                       << "MPIIsRecv " << MPIIsRecv( RequestId ) << " outcount " << outcount
+                       << endl ;
+              }
+            else
+              {
+                if ( _trace )
+                  cout << "MPIAccess::Wait" << _my_rank << " MPIIsRecv "
+                       << MPIIsRecv( RequestId ) << " outcount " << outcount << endl ;
+              }
+          }
+        else
+          {
+            if ( _trace )
+              cout << "MPIAccess::Wait" << _my_rank << " MPIIsRecv " << MPIIsRecv( RequestId )
+                   << " MPIOutCount " << MPIOutCount( RequestId ) << endl ;
+          }
+      }
+    if ( _trace )
+      cout << "MPIAccess::Wait" << _my_rank << " RequestId " << RequestId
+           << " Request " << MPIRequest( RequestId )
+           << " Status " << MPIStatus( RequestId ) << " MPICompleted "
+           << MPICompleted( RequestId ) << " MPIOutCount " << MPIOutCount( RequestId )
+           << endl ;
+    return status ;
+  }
+
+  // Perform a "test" of a Send or Recv asynchronous Request
+  // If the request is done, returns true in the flag argument
+  // If the request is not finished, returns false in the flag argument
+  // Do nothing for a synchronous Request
+  // Manage MPI_request * and MPI_status * structure
+  int MPIAccess::test(int RequestId, int &flag)
+  {
+    int status = MPI_SUCCESS ;
+    flag = MPICompleted( RequestId ) ;
+    if ( _trace )
+      cout << "MPIAccess::Test" << _my_rank << " flag " << flag ;
+    if ( MPIIsRecv( RequestId ) )
+      {
+        if ( _trace )
+          cout << " Recv" ;
+      }
+    else
+      {
+        if ( _trace )
+          cout << " Send" ;
+      }
+    if( _trace )
+      cout << "Request" << RequestId << " " << MPIRequest( RequestId )
+           << " Status " << MPIStatus( RequestId ) << endl ;
+    if ( !flag )
+      {
+        if ( *MPIRequest( RequestId ) != MPI_REQUEST_NULL )
+          {
+            if ( _trace )
+              cout << "MPIAccess::Test" << _my_rank << " -> test( " << RequestId
+                   << " ) MPIRequest " << MPIRequest( RequestId )
+                   << " MPIStatus " << MPIStatus( RequestId )
+                   << " MPITag " << MPITag( RequestId )
+                   << " MPIIsRecv " << MPIIsRecv( RequestId ) << endl ;
+            status = _comm_interface.test(MPIRequest( RequestId ), &flag,
+                                         MPIStatus( RequestId )) ;
+          }
+        else
+          {
+            if ( _trace )
+              cout << "MPIAccess::Test" << _my_rank << " MPIRequest == MPI_REQUEST_NULL"
+                   << endl ;
+          }
+        if ( flag )
+          {
+            setMPICompleted( RequestId , true ) ;
+            if ( MPIIsRecv( RequestId ) && MPIStatus( RequestId ) )
+              {
+                int outcount ;
+                MPI_Datatype datatype = MPIDatatype( RequestId ) ;
+                status = _comm_interface.getCount( MPIStatus( RequestId ), datatype,
+                                                  &outcount ) ;
+                if ( status == MPI_SUCCESS )
+                  {
+                    setMPIOutCount( RequestId , outcount ) ;
+                    deleteStatus( RequestId ) ;
+                    if ( _trace )
+                      cout << "MPIAccess::Test" << _my_rank << " MPIIsRecv "
+                           << MPIIsRecv( RequestId ) << " outcount " << outcount << endl ;
+                  }
+                else
+                  {
+                    if ( _trace )
+                      cout << "MPIAccess::Test" << _my_rank << " MPIIsRecv "
+                           << MPIIsRecv( RequestId ) << " outcount " << outcount << endl ;
+                  }
+              }
+            else
+              {
+                if ( _trace )
+                  cout << "MPIAccess::Test" << _my_rank << " MPIIsRecv "
+                       << MPIIsRecv( RequestId ) << " MPIOutCount "
+                       << MPIOutCount( RequestId ) << endl ;
+              }
+          }
+      }
+    if ( _trace )
+      cout << "MPIAccess::Test" << _my_rank << " RequestId " << RequestId
+           << " flag " << flag << " MPICompleted " << MPICompleted( RequestId )
+           << " MPIOutCount " << MPIOutCount( RequestId ) << endl ;
+    return status ;
+  }
+
+  int MPIAccess::waitAny(int count, int *array_of_RequestIds, int &RequestId)
+  {
+    int status = MPI_ERR_OTHER ;
+    RequestId = -1 ;
+    cout << "MPIAccess::WaitAny not yet implemented" << endl ;
+    return status ;
+  }
+
+  int MPIAccess::testAny(int count, int *array_of_RequestIds, int &RequestId, int &flag)
+  {
+    int status = MPI_ERR_OTHER ;
+    RequestId = -1 ;
+    flag = 0 ;
+    cout << "MPIAccess::TestAny not yet implemented" << endl ;
+    return status ;
+  }
+  
+  // Perform a wait of each Send or Recv asynchronous Request of the array 
+  // array_of_RequestIds of size "count".
+  // That array may be filled with a call to SendRequestIdsSize or RecvRequestIdsSize
+  // Do nothing for a synchronous Request
+  // Manage MPI_Request * and MPI_Status * structure
+  int MPIAccess::waitAll(int count, int *array_of_RequestIds)
+  {
+    if ( _trace )
+      cout << "WaitAll" << _my_rank << " : count " << count << endl ;
+    int status ;
+    int retstatus = MPI_SUCCESS ;
+    int i ;
+    for ( i = 0 ; i < count ; i++ )
+      {
+        if ( _trace )
+          cout << "WaitAll" << _my_rank << " " << i << " -> Wait( "
+               << array_of_RequestIds[i] << " )" << endl ;
+        status = wait( array_of_RequestIds[i] ) ;
+        if ( status != MPI_SUCCESS )
+          retstatus = status ;
+      }
+    if ( _trace )
+      cout << "EndWaitAll" << _my_rank << endl ;
+    return retstatus ;
+  }
+
+  // Perform a "test" of each Send or Recv asynchronous Request of the array 
+  // array_of_RequestIds of size "count".
+  // That array may be filled with a call to SendRequestIdsSize or RecvRequestIdsSize
+  // If all requests are done, returns true in the flag argument
+  // If all requests are not finished, returns false in the flag argument
+  // Do nothing for a synchronous Request
+  // Manage MPI_Request * and MPI_Status * structure
+  int MPIAccess::testAll(int count, int *array_of_RequestIds, int &flag)
+  {
+    if ( _trace )
+      cout << "TestAll" << _my_rank << " : count " << count << endl ;
+    int status ;
+    int retstatus = MPI_SUCCESS ;
+    bool retflag = true ;
+    int i ;
+    for ( i = 0 ; i < count ; i++ )
+      {
+        status = test( array_of_RequestIds[i] , flag ) ;
+        retflag = retflag && (flag != 0) ;
+        if ( status != MPI_SUCCESS )
+          retstatus = status ;
+      }
+    flag = retflag ;
+    if ( _trace )
+      cout << "EndTestAll" << _my_rank << endl ;
+    return retstatus ;
+  }
+
+  int MPIAccess::waitSome(int count, int *array_of_RequestIds, int outcount,
+                          int *outarray_of_RequestIds)
+  {
+    int status = MPI_ERR_OTHER ;
+    cout << "MPIAccess::WaitSome not yet implemented" << endl ;
+    return status ;
+  }
+
+  int MPIAccess::testSome(int count, int *array_of_RequestIds, int outcounts,
+                          int *outarray_of_RequestIds)
+  {
+    int status = MPI_ERR_OTHER ;
+    cout << "MPIAccess::TestSome not yet implemented" << endl ;
+    return status ;
+  }
+  
+  // Probe checks if a message is available for read from FromSource rank.
+  // Returns the corresponding source, MPITag, datatype and outcount
+  // Probe is a blocking call which wait until a message is available
+  int MPIAccess::probe(int FromSource, int &source, int &MPITag,
+                       MPI_Datatype &myDatatype, int &outcount)
+  {
+    MPI_Status aMPIStatus ;
+    int sts =  _comm_interface.probe( FromSource, MPI_ANY_TAG,
+                                     *_intra_communicator , &aMPIStatus ) ;
+    if ( sts == MPI_SUCCESS )
+      {
+        source = aMPIStatus.MPI_SOURCE ;
+        MPITag = aMPIStatus.MPI_TAG ;
+        int MethodId = (MPITag % MODULO_TAG) ;
+        myDatatype = datatype( (ParaMEDMEM::_MessageIdent) MethodId ) ;
+        _comm_interface.getCount(&aMPIStatus, myDatatype, &outcount ) ;
+        if ( _trace )
+          cout << "MPIAccess::Probe" << _my_rank << " FromSource " << FromSource
+               << " source " << source << " MPITag " << MPITag << " MethodId "
+               << MethodId << " datatype " << myDatatype << " outcount " << outcount
+               << endl ;
+      }
+    else
+      {
+        source = -1 ;
+        MPITag = -1 ;
+        myDatatype = 0 ;
+        outcount = -1 ;
+      }
+    return sts ;
+  }
+
+  // IProbe checks if a message is available for read from FromSource rank.
+  // If there is a message available, returns the corresponding source,
+  // MPITag, datatype and outcount with flag = true
+  // If not, returns flag = false
+  int MPIAccess::IProbe(int FromSource, int &source, int &MPITag,
+                        MPI_Datatype &myDataType, int &outcount, int &flag)
+  {
+    MPI_Status aMPIStatus ;
+    int sts =  _comm_interface.Iprobe( FromSource, MPI_ANY_TAG,
+                                      *_intra_communicator , &flag,
+                                      &aMPIStatus ) ;
+    if ( sts == MPI_SUCCESS && flag )
+      {
+        source = aMPIStatus.MPI_SOURCE ;
+        MPITag = aMPIStatus.MPI_TAG ;
+        int MethodId = (MPITag % MODULO_TAG) ;
+        myDataType = datatype( (ParaMEDMEM::_MessageIdent) MethodId ) ;
+        _comm_interface.getCount(&aMPIStatus, myDataType, &outcount ) ;
+        if ( _trace )
+          cout << "MPIAccess::IProbe" << _my_rank << " FromSource " << FromSource
+               << " source " << source << " MPITag " << MPITag << " MethodId "
+               << MethodId << " datatype " << myDataType << " outcount " << outcount
+               << " flag " << flag << endl ;
+      }
+    else
+      {
+        source = -1 ;
+        MPITag = -1 ;
+        myDataType = 0 ;
+        outcount = -1 ;
+      }
+    return sts ;
+  }
+
+  // Cancel concerns a "posted" asynchronous IRecv
+  // Returns flag = true if the receiving request was successfully canceled
+  // Returns flag = false if the receiving request was finished but not canceled
+  // Use cancel, wait and test_cancelled of the MPI API
+  int MPIAccess::cancel( int RecvRequestId, int &flag )
+  {
+    flag = 0 ;
+    int sts = _comm_interface.cancel( MPIRequest( RecvRequestId ) ) ;
+    if ( sts == MPI_SUCCESS )
+      {
+        sts = _comm_interface.wait( MPIRequest( RecvRequestId ) ,
+                                   MPIStatus( RecvRequestId ) ) ;
+        if ( sts == MPI_SUCCESS )
+          sts = _comm_interface.testCancelled( MPIStatus( RecvRequestId ) , &flag ) ;
+      }
+    return sts ;
+  }
+
+  // Cancel concerns a "pending" receiving message (without IRecv "posted")
+  // Returns flag = true if the message was successfully canceled
+  // Returns flag = false if the receiving request was finished but not canceled
+  // Use Irecv, cancel, wait and test_cancelled of the MPI API
+  int MPIAccess::cancel( int source, int theMPITag, MPI_Datatype datatype, int outcount, int &flag )
+  {
+    int sts ;
+    MPI_Aint extent ;
+    flag = 0 ;
+    sts =  MPI_Type_extent( datatype , &extent ) ;
+    if ( sts == MPI_SUCCESS )
+      {
+        void * recvbuf = malloc( extent*outcount ) ;
+        MPI_Request aRecvRequest ;
+        if ( _trace )
+          cout << "MPIAccess::Cancel" << _my_rank << " Irecv extent " << extent
+               << " datatype " << datatype << " source " << source << " theMPITag "
+               << theMPITag << endl ;
+        sts = _comm_interface.Irecv( recvbuf, outcount, datatype, source, theMPITag,
+                                    *_intra_communicator , &aRecvRequest ) ;
+        if ( sts == MPI_SUCCESS )
+          {
+            sts = _comm_interface.cancel( &aRecvRequest ) ;
+            if ( _trace )
+              cout << "MPIAccess::Cancel" << _my_rank << " theMPITag " << theMPITag
+                   << " cancel done" << endl ;
+            if ( sts == MPI_SUCCESS )
+              {
+                MPI_Status aStatus ;
+                if ( _trace )
+                  cout << "MPIAccess::Cancel" << _my_rank << " wait" << endl ;
+                sts = _comm_interface.wait( &aRecvRequest , &aStatus ) ;
+                if ( sts == MPI_SUCCESS )
+                  {
+                    if ( _trace )
+                      cout << "MPIAccess::Cancel" << _my_rank << " test_cancelled" << endl ;
+                    sts = _comm_interface.testCancelled( &aStatus , &flag ) ;
+                  }
+              }
+          }
+        if ( _trace && datatype == timeType() )
+          cout << "MPIAccess::Cancel" << _my_rank << " time "
+               << ((TimeMessage *) recvbuf)->time << " "
+               << ((TimeMessage *) recvbuf)->deltatime << endl ;
+        free( recvbuf ) ;
+      }
+    if ( _trace )
+      cout << "MPIAccess::Cancel" << _my_rank << " flag " << flag << endl ;
+    return sts ;
+  }
+
+
+  // CancelAll concerns all "pending" receiving message (without IRecv "posted")
+  // CancelAll use IProbe and Cancel (see obove)
+  int MPIAccess::cancelAll()
+  {
+    int sts = MPI_SUCCESS ;
+    int target ;
+    int source ;
+    int MPITag ;
+    MPI_Datatype datatype ;
+    int outcount ;
+    int flag ;
+    for ( target = 0 ; target < _processor_group_size ; target++ )
+      {
+        sts = IProbe(target, source, MPITag, datatype, outcount, flag) ;
+        if ( sts == MPI_SUCCESS && flag )
+          {
+            sts = cancel(source, MPITag, datatype, outcount, flag) ;
+            if ( _trace )
+              cout << "MPIAccess::CancelAll" << _my_rank << " source " << source
+                   << " MPITag " << MPITag << " datatype " << datatype
+                   << " outcount " << outcount << " Cancel flag " << flag << endl ;
+            if ( sts != MPI_SUCCESS )
+              break ;
+          }
+        else if ( sts != MPI_SUCCESS )
+          break ;
+      }
+    return sts ;
+  }
+
+  // Same as barrier of MPI API
+  int MPIAccess::barrier()
+  {
+    int status = _comm_interface.barrier( *_intra_communicator ) ;
+    return status ;
+  }
+
+  // Same as Error_string of MPI API
+  int MPIAccess::errorString(int errorcode, char *string, int *resultlen) const
+  {
+    return _comm_interface.errorString( errorcode, string, resultlen) ;
+  }
+  
+  // Returns source, tag, error and outcount corresponding to receiving RequestId
+  // By default the corresponding structure of RequestId is deleted
+  int MPIAccess::status(int RequestId, int &source, int &tag, int &error,
+                        int &outcount, bool keepRequestStruct)
+  {
+    MPI_Status *myStatus = MPIStatus( RequestId ) ;
+    if ( _trace )
+      cout << "MPIAccess::status" << _my_rank << " RequestId " << RequestId
+           << " status " << myStatus << endl ;
+    if ( myStatus != NULL && MPIAsynchronous( RequestId ) &&
+         MPICompleted( RequestId ) )
+      {
+        if ( MPIIsRecv( RequestId ) )
+          {
+            source = myStatus->MPI_SOURCE ;
+            tag = myStatus->MPI_TAG ;
+            error = myStatus->MPI_ERROR ;
+            MPI_Datatype datatype = MPIDatatype( RequestId ) ;
+            _comm_interface.getCount(myStatus, datatype, &outcount ) ;
+            if ( _trace )
+              cout << "MPIAccess::status" << _my_rank << " RequestId " << RequestId
+                   << " status " << myStatus << " outcount " << outcount << endl ;
+            setMPIOutCount( RequestId , outcount ) ;
+          }
+        else
+          {
+            source = MPITarget( RequestId ) ;
+            tag = MPITag( RequestId ) ;
+            error = 0 ;
+            outcount = MPIOutCount( RequestId ) ;
+          }
+        if ( !keepRequestStruct )
+          deleteRequest( RequestId ) ;
+        return MPI_SUCCESS ;
+      }
+    else
+      {
+        source = MPITarget( RequestId ) ;
+        tag = MPITag( RequestId ) ;
+        error = 0 ;
+        outcount = MPIOutCount( RequestId ) ;
+      }
+    return MPI_SUCCESS ;
+  }
+  
+  int MPIAccess::requestFree( MPI_Request *request )
+  {
+    return _comm_interface.requestFree( request ) ;
+  }
+  
+  // Print all informations of all known requests for debugging purpose
+  void MPIAccess::check() const
+  {
+    int i = 0 ;
+    map< int , RequestStruct * >::const_iterator MapOfRequestStructiterator ;
+    cout << "MPIAccess::Check" << _my_rank << "_map_of_request_struct_size "
+         << _map_of_request_struct.size() << endl ;
+    for ( MapOfRequestStructiterator = _map_of_request_struct.begin() ;
+          MapOfRequestStructiterator != _map_of_request_struct.end() ;
+          MapOfRequestStructiterator++ )
+      {
+        if ( MapOfRequestStructiterator->second != NULL )
+          {
+            cout << "    Check" << _my_rank << " " << i << ". Request"
+                 << MapOfRequestStructiterator->first << "-->" ;
+            if ( (MapOfRequestStructiterator->second)->MPIAsynchronous )
+              cout << "I" ;
+            if ( (MapOfRequestStructiterator->second)->MPIIsRecv )
+              cout << "Recv from " ;
+            else
+              cout << "Send to " ;
+            cout << (MapOfRequestStructiterator->second)->MPITarget
+                 << " MPITag " << (MapOfRequestStructiterator->second)->MPITag
+                 << " DataType " << (MapOfRequestStructiterator->second)->MPIDatatype
+                 << " Request " << (MapOfRequestStructiterator->second)->MPIRequest
+                 << " Status " << (MapOfRequestStructiterator->second)->MPIStatus
+                 << " Completed " << (MapOfRequestStructiterator->second)->MPICompleted
+                 << endl ;
+          }
+        i++ ;
+      }
+  }
+
+  // Returns the MPI size of a TimeMessage
+  MPI_Aint MPIAccess::timeExtent() const
+  {
+    MPI_Aint aextent ;
+    MPI_Type_extent( _MPI_TIME , &aextent ) ;
+    return aextent ;
+  }
+
+  // Returns the MPI size of a MPI_INT
+  MPI_Aint MPIAccess::intExtent() const
+  {
+    MPI_Aint aextent ;
+    MPI_Type_extent( MPI_INT , &aextent ) ;
+    return aextent ;
+  }
+
+  // Returns the MPI size of a MPI_DOUBLE
+  MPI_Aint MPIAccess::doubleExtent() const
+  {
+    MPI_Aint aextent ;
+    MPI_Type_extent( MPI_DOUBLE , &aextent ) ;
+    return aextent ;
+  }
+
+  // Outputs fields of a TimeMessage structure
+  ostream & operator<< (ostream & f ,const TimeMessage & aTimeMsg )
+  {
+    f << " time " << aTimeMsg.time << " deltatime " << aTimeMsg.deltatime
+      << " tag " << aTimeMsg.tag ;
+    return f;
+  }
+
+  // Outputs the DataType coded in a Tag
+  ostream & operator<< (ostream & f ,const _MessageIdent & methodtype )
+  {
+    switch (methodtype)
+      {
+      case _message_time :
+        f << " MethodTime ";
+        break;
+      case _message_int :
+        f << " MPI_INT ";
+        break;
+      case _message_double :
+        f << " MPI_DOUBLE ";
+        break;
+      default :
+        f << " UnknownMethodType ";
+        break;
+      }
+    return f;
+  }
+}
diff --git a/src/ParaMEDMEM/MPIAccess.hxx b/src/ParaMEDMEM/MPIAccess.hxx
new file mode 100644 (file)
index 0000000..d438c8c
--- /dev/null
@@ -0,0 +1,471 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __MPIACCESS_HXX__
+#define __MPIACCESS_HXX__
+
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+
+#include <map>
+#include <list>
+#include <vector>
+#include <iostream>
+
+namespace ParaMEDMEM
+{
+  typedef struct
+  {
+    double time ;
+    double deltatime ;
+    int tag ;
+  } TimeMessage;
+  
+  static MPI_Request mpirequestnull = MPI_REQUEST_NULL ;
+  enum _MessageIdent { _message_unknown, _message_time, _message_int, _message_double } ;
+
+  class MPIAccess
+  {
+  private:
+    struct RequestStruct
+    {
+      int MPITarget ;
+      bool MPIIsRecv ;
+      int MPITag ;
+      bool MPIAsynchronous ;
+      bool MPICompleted ;
+      MPI_Datatype MPIDatatype ;
+      MPI_Request MPIRequest ;
+      MPI_Status *MPIStatus ;
+      int MPIOutCount ;
+    };
+  public:
+    MPIAccess(MPIProcessorGroup * ProcessorGroup, int BaseTag=0, int MaxTag=0) ;
+    virtual ~MPIAccess() ;
+
+    void trace( bool trace = true ) ;
+
+    void deleteRequest( int RequestId ) ;
+    void deleteRequests(int size , int *ArrayOfSendRequests ) ;
+
+    int sendMPITag(int destrank) ;
+    int recvMPITag(int sourcerank) ;
+
+    int sendRequestIdsSize() ;
+    int sendRequestIds(int size, int *ArrayOfSendRequests) ;
+    int recvRequestIdsSize() ;
+    int recvRequestIds(int size, int *ArrayOfRecvRequests) ;
+
+    int sendRequestIdsSize(int destrank) ;
+    int sendRequestIds(int destrank, int size, int *ArrayOfSendRequests) ;
+    int recvRequestIdsSize(int sourcerank) ;
+    int recvRequestIds(int sourcerank, int size, int *ArrayOfRecvRequests) ;
+
+    int send(void* buffer, int count, MPI_Datatype datatype, int target,
+             int &RequestId) ;
+    int ISend(void* buffer, int count, MPI_Datatype datatype, int target,
+              int &RequestId) ;
+    int recv(void* buffer, int count, MPI_Datatype datatype, int source,
+             int &RequestId, int *OutCount=NULL) ;
+    int IRecv(void* buffer, int count, MPI_Datatype datatype, int source,
+              int &RequestId) ;
+    int sendRecv(void* sendbuf, int sendcount, MPI_Datatype sendtype, int dest,
+                 int &SendRequestId, void* recvbuf, int recvcount,
+                 MPI_Datatype recvtype, int source,
+                 int &RecvRequestId, int *OutCount=NULL) ;
+    int ISendRecv(void* sendbuf, int sendcount, MPI_Datatype sendtype, int dest,
+                  int &SendRequestId, void* recvbuf, int recvcount,
+                  MPI_Datatype recvtype, int source, int &RecvRequestId) ;
+
+    int wait(int RequestId) ;
+    int test(int RequestId, int &flag) ;
+    int waitAny(int count, int *array_of_RequestIds, int &RequestId) ;
+    int testAny(int count, int *array_of_RequestIds, int &RequestId, int &flag)  ;
+    int waitAll(int count, int *array_of_RequestIds) ;
+    int testAll(int count, int *array_of_RequestIds, int &flag)  ;
+    int waitSome(int count, int *array_of_RequestIds, int outcount,
+                 int *outarray_of_RequestIds) ;
+    int testSome(int count, int *array_of_RequestIds, int outcounts,
+                 int *outarray_of_RequestIds) ;
+    int probe(int FromSource, int &source, int &MPITag, MPI_Datatype &datatype,
+              int &outcount) ;
+    int IProbe(int FromSource, int &source, int &MPITag, MPI_Datatype &datatype,
+               int &outcount, int &flag) ;
+    int cancel( int RecvRequestId, int &flag ) ; 
+    int cancel( int source, int MPITag, MPI_Datatype datatype, int outcount,
+                int &flag ) ;
+    int cancelAll() ;
+    int barrier() ;
+    int errorString(int errorcode, char *string, int *resultlen) const ;
+    int status(int RequestId, int &source, int &tag, int &error, int &outcount,
+               bool keepRequestStruct=false) ;
+    int requestFree( MPI_Request *request ) ;
+
+    void check() const ;
+
+    MPI_Datatype timeType() const ;
+    bool isTimeMessage( int MPITag ) const ;
+    MPI_Aint timeExtent() const ;
+    MPI_Aint intExtent() const ;
+    MPI_Aint doubleExtent() const ;
+    MPI_Aint extent( MPI_Datatype datatype ) const ;
+
+    int MPITag( int RequestId ) ;
+    int MPITarget( int RequestId ) ;
+    bool MPIIsRecv( int RequestId ) ;
+    bool MPIAsynchronous( int RequestId ) ;
+    bool MPICompleted( int RequestId ) ;
+    MPI_Datatype MPIDatatype( int RequestId ) ;
+    int MPIOutCount( int RequestId ) ;
+
+  private:
+    int newRequest( MPI_Datatype datatype, int tag , int destsourcerank ,
+                    bool fromsourcerank , bool asynchronous ) ;
+    int newSendTag( MPI_Datatype datatype, int destrank , int method ,
+                    bool asynchronous, int &RequestId ) ;
+    int newRecvTag( MPI_Datatype datatype, int sourcerank , int method ,
+                    bool asynchronous, int &RequestId ) ;
+    int incrTag( int prevtag ) ;
+    int valTag( int tag, int method ) ;
+
+    void deleteSendRecvRequest( int RequestId ) ;
+
+    void deleteStatus( int RequestId ) ;
+
+    MPI_Request *MPIRequest( int RequestId ) ;
+    MPI_Status *MPIStatus( int RequestId ) ;
+    void setMPICompleted( int RequestId , bool completed ) ;
+    void setMPIOutCount( int RequestId , int outcount ) ;
+    void clearMPIStatus( int RequestId ) ;
+
+    _MessageIdent methodId( MPI_Datatype datatype ) const ;
+    MPI_Datatype datatype( _MessageIdent aMethodIdent ) const ;
+  private:
+    const CommInterface &_comm_interface ;
+    const MPI_Comm* _intra_communicator ;
+    MPIProcessorGroup * _processor_group ;
+    int _processor_group_size ;
+    int _my_rank ;
+    bool _trace ;
+    int _base_request ;
+    int _max_request ;
+    int _request ;
+    int * _send_request ;
+    int * _recv_request ;
+    std::vector< std::list< int > > _send_requests ;
+    std::vector< std::list< int > > _recv_requests ;
+    int _base_MPI_tag ;
+    int _max_MPI_tag ;
+    int * _send_MPI_tag ;
+    int * _recv_MPI_Tag ;
+    MPI_Datatype _MPI_TIME ;
+    static const int MODULO_TAG=10;
+    std::map< int , RequestStruct * > _map_of_request_struct ;
+
+  };
+
+  inline void MPIAccess::trace( bool atrace )
+  {
+    _trace = atrace ;
+  }
+
+  // Delete the structure Request corresponding to RequestId identifier after
+  // the deletion of the structures MPI_Request * and MPI_Status *
+  // remove it from _MapOfRequestStruct (erase)
+  inline void MPIAccess::deleteRequest( int RequestId )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      {
+        if ( _trace )
+          std::cout << "MPIAccess::DeleteRequest" << _my_rank << "( " << RequestId << " ) "
+                    << aRequestStruct << " MPIRequest " << aRequestStruct->MPIRequest
+                    << " MPIIsRecv " << aRequestStruct->MPIIsRecv << std::endl ;
+        if ( _map_of_request_struct[RequestId]->MPIRequest != MPI_REQUEST_NULL )
+          requestFree( &_map_of_request_struct[RequestId]->MPIRequest ) ;
+        deleteSendRecvRequest( RequestId ) ;
+        deleteStatus( RequestId ) ;
+        _map_of_request_struct.erase( RequestId ) ;
+        delete aRequestStruct ;
+      }
+    else
+      {
+        if ( _trace )
+          std::cout << "MPIAccess::DeleteRequest" << _my_rank << "( " << RequestId
+                    << " ) Request not found" << std::endl ;
+      }
+  }
+
+  // Delete all requests of the array ArrayOfSendRequests
+  inline void MPIAccess::deleteRequests(int size , int *ArrayOfSendRequests )
+  {
+    for (int i = 0 ; i < size ; i++ )
+      deleteRequest( ArrayOfSendRequests[i] ) ;
+  }
+
+  // Returns the last MPITag of the destination rank destrank
+  inline int MPIAccess::sendMPITag(int destrank)
+  {
+    return _send_MPI_tag[destrank] ;
+  }
+
+  // Returns the last MPITag of the source rank sourcerank
+  inline int MPIAccess::recvMPITag(int sourcerank)
+  {
+    return _recv_MPI_Tag[sourcerank] ;
+  }
+
+  // Returns the number of all SendRequestIds matching a destination rank. It may be
+  // used to allocate ArrayOfSendRequests for the call to SendRequestIds
+  inline int MPIAccess::sendRequestIdsSize(int destrank)
+  {
+    return _send_requests[destrank].size() ;
+  }
+
+  // Returns the number of all RecvRequestIds matching a source rank. It may be
+  // used to allocate ArrayOfRecvRequests for the call to RecvRequestIds
+  inline int MPIAccess::recvRequestIdsSize(int sourcerank)
+  {
+    return _recv_requests[sourcerank].size() ;
+  }
+
+  // Returns the MPI_Datatype (registered in MPI in the constructor with
+  // MPI_Type_struct and MPI_Type_commit) for TimeMessages
+  inline MPI_Datatype MPIAccess::timeType() const
+  {
+    return _MPI_TIME ;
+  }
+  
+  // Returns true if the tag MPITag corresponds to a TimeMessage
+  inline bool MPIAccess::isTimeMessage( int aMPITag ) const
+  {
+    return ((aMPITag%MODULO_TAG) == _message_time) ;
+  }
+
+  // Returns the MPI size of the MPI_Datatype datatype
+  inline MPI_Aint MPIAccess::extent( MPI_Datatype adatatype ) const
+  {
+    if ( adatatype == _MPI_TIME )
+      return timeExtent() ;
+    if ( adatatype == MPI_INT )
+      return intExtent() ;
+    if ( adatatype == MPI_DOUBLE )
+      return doubleExtent() ;
+    return 0 ;
+  }
+  
+  // Returns the MPITag of the request corresponding to RequestId identifier
+  inline int MPIAccess::MPITag( int RequestId )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      return aRequestStruct->MPITag ;
+    return -1 ;
+  }
+  
+  // Returns the MPITarget of the request corresponding to RequestId identifier
+  inline int MPIAccess::MPITarget( int RequestId )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      return aRequestStruct->MPITarget ;
+    return -1 ;
+  }
+
+  // Returns true if the request corresponding to RequestId identifier was [I]Recv
+  inline bool MPIAccess::MPIIsRecv( int RequestId )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      return aRequestStruct->MPIIsRecv ;
+    return false ;
+  }
+
+  // Returns true if the request corresponding to RequestId identifier was asynchronous
+  inline bool MPIAccess::MPIAsynchronous( int RequestId )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      return aRequestStruct->MPIAsynchronous ;
+    return false ;
+  }
+  
+  // Returns true if the request corresponding to RequestId identifier was completed
+  inline bool MPIAccess::MPICompleted( int RequestId )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      return aRequestStruct->MPICompleted;
+    return true ;
+  }
+
+  // Returns the MPI_datatype  of the request corresponding to RequestId identifier
+  inline MPI_Datatype MPIAccess::MPIDatatype( int RequestId )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      return aRequestStruct->MPIDatatype;
+    return MPI_DATATYPE_NULL;
+  }
+
+  // Returns the size of the receiving message of the request corresponding to
+  // RequestId identifier
+  inline int MPIAccess::MPIOutCount( int RequestId )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      return aRequestStruct->MPIOutCount;
+    return 0 ;
+  }
+
+  // Increments the previous tag value (cyclically)
+  // Look at MPIAccess::NewSendTag/NewRecvTag in MPIAccess.cxx
+  inline int MPIAccess::incrTag( int prevtag )
+  {
+    int tag;
+    if ( (prevtag % MODULO_TAG) == _message_time )
+      tag = ((prevtag/MODULO_TAG)*MODULO_TAG);
+    else
+      tag = ((prevtag/MODULO_TAG + 1)*MODULO_TAG);
+    if ( tag > _max_MPI_tag )
+      tag = _base_MPI_tag ;
+    return tag ;
+  }
+
+  // Returns the MPITag with the method-type field
+  // Look at MPIAccess::NewSendTag/NewRecvTag in MPIAccess.cxx
+  inline int MPIAccess::valTag( int tag, int method )
+  {
+    return ((tag/MODULO_TAG)*MODULO_TAG) + method;
+  }
+  
+  // Remove a Request identifier from the list _RecvRequests/_SendRequests for
+  // the corresponding target.
+  inline void MPIAccess::deleteSendRecvRequest( int RequestId )
+  {
+    if ( _trace )
+      std::cout << "MPIAccess::DeleteSendRecvRequest" << _my_rank
+                << "( " << RequestId << " ) " << std::endl ;
+    if ( MPIIsRecv( RequestId ) )
+      _recv_requests[ MPITarget( RequestId ) ].remove( RequestId );
+    else
+      _send_requests[ MPITarget( RequestId ) ].remove( RequestId );
+  }
+
+  // Delete the MPI structure MPI_status * of a ReaquestId
+  inline void MPIAccess::deleteStatus( int RequestId )
+  {
+    if ( _map_of_request_struct[RequestId]->MPIStatus != NULL )
+      {
+        delete _map_of_request_struct[RequestId]->MPIStatus ;
+        clearMPIStatus( RequestId ) ;
+      }
+  }
+
+  // Returns the MPI structure MPI_request * of a RequestId
+  inline MPI_Request * MPIAccess::MPIRequest( int RequestId )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      return &aRequestStruct->MPIRequest;
+    return &mpirequestnull ;
+  }
+  
+  // Returns the MPI structure MPI_status * of a RequestId
+  inline MPI_Status * MPIAccess::MPIStatus( int RequestId )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ];
+    if ( aRequestStruct )
+      return aRequestStruct->MPIStatus;
+    return NULL ;
+  }
+
+  // Set the MPICompleted field of the structure Request corresponding to RequestId
+  // identifier with the value completed
+  inline void MPIAccess::setMPICompleted( int RequestId , bool completed )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      aRequestStruct->MPICompleted = completed;
+  }
+
+  // Set the MPIOutCount field of the structure Request corresponding to RequestId
+  // identifier with the value outcount
+  inline void MPIAccess::setMPIOutCount( int RequestId , int outcount )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      aRequestStruct->MPIOutCount = outcount;
+  }
+
+  // Nullify the MPIStatusfield of the structure Request corresponding to RequestId
+  // identifier
+  inline void MPIAccess::clearMPIStatus( int RequestId )
+  {
+    struct RequestStruct *aRequestStruct = _map_of_request_struct[ RequestId ] ;
+    if ( aRequestStruct )
+      aRequestStruct->MPIStatus = NULL ;
+  }
+
+  // Returns the _MessageIdent enum value corresponding to the MPI_Datatype datatype
+  // Look at MPIAccess::NewSendTag/NewRecvTag in MPIAccess.cxx
+  inline _MessageIdent MPIAccess::methodId( MPI_Datatype adatatype ) const
+  {
+    _MessageIdent aMethodIdent ;
+    if ( adatatype == _MPI_TIME )
+      aMethodIdent = _message_time;
+    else if ( adatatype == MPI_INT )
+      aMethodIdent = _message_int ;
+    else if ( adatatype == MPI_DOUBLE )
+      aMethodIdent = _message_double ;
+    else
+      aMethodIdent = _message_unknown ;
+    return aMethodIdent ;
+  }
+  
+  // Returns the MPI_Datatype corresponding to the _MessageIdent enum aMethodIdent
+  inline MPI_Datatype MPIAccess::datatype( _MessageIdent aMethodIdent ) const
+  {
+    MPI_Datatype aDataType ;
+    switch( aMethodIdent )
+      {
+      case _message_time :
+        aDataType = _MPI_TIME ;
+        break ;
+      case _message_int :
+        aDataType = MPI_INT ;
+        break ;
+      case _message_double :
+        aDataType = MPI_DOUBLE ;
+        break ;
+      default :
+        aDataType = (MPI_Datatype) -1 ;
+        break ;
+      }
+    return aDataType ;
+  }
+
+  std::ostream & operator<< (std::ostream &,const _MessageIdent &);
+
+  std::ostream & operator<< (std::ostream &,const TimeMessage &);
+
+}
+
+#endif
diff --git a/src/ParaMEDMEM/MPIAccessDEC.cxx b/src/ParaMEDMEM/MPIAccessDEC.cxx
new file mode 100644 (file)
index 0000000..f8a0e10
--- /dev/null
@@ -0,0 +1,1057 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "MPIAccessDEC.hxx"
+
+#include <cstring>
+
+using namespace std;
+
+namespace ParaMEDMEM
+{    
+
+  /*!
+    This constructor creates an MPIAccessDEC which has \a source_group as a working side 
+    and  \a target_group as an idle side. 
+    The constructor must be called synchronously on all processors of both processor groups.
+
+    \param source_group working side ProcessorGroup
+    \param target_group lazy side ProcessorGroup
+    \param Asynchronous Communication mode (default asynchronous)
+    \param nStepBefore Number of Time step needed for the interpolation before current time
+    \param nStepAfter Number of Time step needed for the interpolation after current time
+
+  */
+
+  MPIAccessDEC::MPIAccessDEC( const ProcessorGroup& source_group,
+                              const ProcessorGroup& target_group,
+                              bool Asynchronous )
+  {
+
+    ProcessorGroup * union_group = source_group.fuse(target_group) ;  
+    int i ;
+    std::set<int> procs;
+    for ( i = 0 ; i < union_group->size() ; i++ )
+      {
+        procs.insert(i) ;
+      }
+    MPIProcessorGroup *mpilg = static_cast<MPIProcessorGroup *>(const_cast<ProcessorGroup *>(&source_group));
+    _MPI_union_group = new ParaMEDMEM::MPIProcessorGroup( union_group->getCommInterface(),procs,mpilg->getWorldComm());
+    delete union_group ;
+    _my_rank = _MPI_union_group->myRank() ;
+    _group_size = _MPI_union_group->size() ;
+    _MPI_access = new MPIAccess( _MPI_union_group ) ;
+    _asynchronous = Asynchronous ;
+    _time_messages = new vector< vector< TimeMessage > > ;
+    _time_messages->resize( _group_size ) ;
+    _out_of_time = new vector< bool > ;
+    _out_of_time->resize( _group_size ) ;
+    _data_messages_recv_count = new vector< int > ;
+    _data_messages_recv_count->resize( _group_size ) ;
+    for ( i = 0 ; i < _group_size ; i++ )
+      {
+        (*_out_of_time)[i] = false ;
+        (*_data_messages_recv_count)[i] = 0 ;
+      }
+    _data_messages_type = new vector< MPI_Datatype > ;
+    _data_messages_type->resize( _group_size ) ;
+    _data_messages = new vector< vector< void * > > ;
+    _data_messages->resize( _group_size ) ;
+    _time_interpolator = NULL ;
+    _map_of_send_buffers = new map< int , SendBuffStruct * > ;
+  }
+
+  MPIAccessDEC::~MPIAccessDEC()
+  {
+    checkFinalSent() ;
+    checkFinalRecv() ;
+    delete _MPI_union_group ;
+    delete _MPI_access ;
+    if ( _time_interpolator )
+      delete _time_interpolator ;
+    if ( _time_messages )
+      delete _time_messages ;
+    if ( _out_of_time )
+      delete _out_of_time ;
+    if ( _data_messages_recv_count )
+      delete _data_messages_recv_count ;
+    if ( _data_messages_type )
+      delete _data_messages_type ;
+    if ( _data_messages )
+      delete _data_messages ;
+    if ( _map_of_send_buffers )
+      delete _map_of_send_buffers ;
+  } 
+
+  void MPIAccessDEC::setTimeInterpolator( TimeInterpolationMethod aTimeInterp ,
+                                          double InterpPrecision, int nStepBefore,
+                                          int nStepAfter )
+  {
+    if ( _time_interpolator )
+      delete _time_interpolator ;
+    switch ( aTimeInterp )
+      {
+      case WithoutTimeInterp :
+        _time_interpolator = NULL ;
+        _n_step_before = 0 ;
+        _n_step_after = 0 ;
+        break ;
+      case LinearTimeInterp :
+        _time_interpolator = new LinearTimeInterpolator( InterpPrecision , nStepBefore ,
+                                                         nStepAfter ) ;
+        _n_step_before = nStepBefore ;
+        _n_step_after = nStepAfter ;
+        int i ;
+        for ( i = 0 ; i < _group_size ; i++ )
+          {
+            (*_time_messages)[ i ].resize( _n_step_before + _n_step_after ) ;
+            (*_data_messages)[ i ].resize( _n_step_before + _n_step_after ) ;
+            int j ;
+            for ( j = 0 ; j < _n_step_before + _n_step_after ; j++ )
+              {
+                (*_time_messages)[ i ][ j ].time = -1 ;
+                (*_time_messages)[ i ][ j ].deltatime = -1 ;
+                (*_data_messages)[ i ][ j ] = NULL ;
+              }
+          }
+        break ;
+      }
+  }
+
+  /*!
+    Send sendcount datas from sendbuf[offset] with type sendtype to target of IntraCommunicator
+    (Internal Protected method)
+
+    Returns the request identifier SendRequestId
+
+  */
+  int MPIAccessDEC::send( void* sendbuf, int sendcount , int offset ,
+                          MPI_Datatype sendtype , int target , int &SendRequestId )
+  {
+    int sts ;
+    if ( _asynchronous )
+      {
+        if ( sendtype == MPI_INT )
+          {
+            sts = _MPI_access->ISend( &((int *) sendbuf)[offset] , sendcount , sendtype ,
+                                      target , SendRequestId ) ;
+          }
+        else
+          {
+            sts = _MPI_access->ISend( &((double *) sendbuf)[offset] , sendcount , sendtype ,
+                                      target , SendRequestId ) ;
+          }
+      }
+    else
+      {
+        if ( sendtype == MPI_INT )
+          {
+            sts = _MPI_access->send( &((int *) sendbuf)[offset] , sendcount , sendtype ,
+                                     target , SendRequestId ) ;
+          }
+        else
+          {
+            sts = _MPI_access->send( &((double *) sendbuf)[offset] , sendcount , sendtype ,
+                                     target , SendRequestId ) ;
+          }
+      }
+    return sts ;
+  }
+
+  /*!
+    Receive recvcount datas to recvbuf[offset] with type recvtype from target of IntraCommunicator
+    (Internal Protected method)
+
+    Returns the request identifier RecvRequestId
+
+  */
+  int MPIAccessDEC::recv( void* recvbuf, int recvcount , int offset ,
+                          MPI_Datatype recvtype , int target , int &RecvRequestId )
+  {
+    int sts ;
+    if ( _asynchronous )
+      {
+        if ( recvtype == MPI_INT )
+          {
+            sts = _MPI_access->IRecv( &((int *) recvbuf)[offset] , recvcount , recvtype ,
+                                      target , RecvRequestId ) ;
+          }
+        else
+          {
+            sts = _MPI_access->IRecv( &((double *) recvbuf)[offset] , recvcount , recvtype ,
+                                      target , RecvRequestId ) ;
+          }
+      }
+    else
+      {
+        if ( recvtype == MPI_INT )
+          {
+            sts = _MPI_access->recv( &((int *) recvbuf)[offset] , recvcount , recvtype ,
+                                     target , RecvRequestId ) ;
+          }
+        else
+          {
+            sts = _MPI_access->recv( &((double *) recvbuf)[offset] , recvcount , recvtype ,
+                                     target , RecvRequestId ) ;
+          }
+      }
+    return sts ;
+  }
+
+  /*!
+    Send sendcount datas from sendbuf[offset] with type sendtype to target of IntraCommunicator
+    Receive recvcount datas to recvbuf[offset] with type recvtype from target of IntraCommunicator
+    (Internal Protected method)
+
+    Returns the request identifier SendRequestId
+    Returns the request identifier RecvRequestId
+
+  */
+  int MPIAccessDEC::sendRecv( void* sendbuf, int sendcount , int sendoffset ,
+                              MPI_Datatype sendtype ,
+                              void* recvbuf, int recvcount , int recvoffset ,
+                              MPI_Datatype recvtype , int target ,
+                              int &SendRequestId , int &RecvRequestId )
+  {
+    int sts ;
+    if ( _asynchronous )
+      {
+        if ( sendtype == MPI_INT )
+          {
+            if ( recvtype == MPI_INT )
+              {
+                sts = _MPI_access->ISendRecv( &((int *) sendbuf)[sendoffset] , sendcount ,
+                                              sendtype , target , SendRequestId ,
+                                              &((int *) recvbuf)[recvoffset] , recvcount ,
+                                              recvtype , target , RecvRequestId ) ;
+              }
+            else
+              {
+                sts = _MPI_access->ISendRecv( &((int *) sendbuf)[sendoffset] , sendcount ,
+                                              sendtype , target , SendRequestId ,
+                                              &((double *) recvbuf)[recvoffset] ,
+                                              recvcount , recvtype , target , RecvRequestId ) ;
+              }
+          }
+        else
+          {
+            if ( recvtype == MPI_INT )
+              {
+                sts = _MPI_access->ISendRecv( &((double *) sendbuf)[sendoffset] , sendcount ,
+                                              sendtype , target , SendRequestId ,
+                                              &((int *) recvbuf)[recvoffset] ,
+                                              recvcount , recvtype , target , RecvRequestId ) ;
+              }
+            else
+              {
+                sts = _MPI_access->ISendRecv( &((double *) sendbuf)[sendoffset] , sendcount ,
+                                              sendtype , target , SendRequestId ,
+                                              &((double *) recvbuf)[recvoffset] ,
+                                              recvcount , recvtype , target , RecvRequestId ) ;
+              }
+          }
+      }
+    else
+      {
+        if ( sendtype == MPI_INT )
+          {
+            if ( recvtype == MPI_INT )
+              {
+                sts = _MPI_access->sendRecv( &((int *) sendbuf)[sendoffset] , sendcount ,
+                                             sendtype , target , SendRequestId ,
+                                             &((int *) recvbuf)[recvoffset] , recvcount ,
+                                             recvtype , target , RecvRequestId ) ;
+              }
+            else
+              {
+                sts = _MPI_access->sendRecv( &((int *) sendbuf)[sendoffset] , sendcount ,
+                                             sendtype , target , SendRequestId ,
+                                             &((double *) recvbuf)[recvoffset] ,
+                                             recvcount , recvtype , target , RecvRequestId ) ;
+              }
+          }
+        else
+          {
+            if ( recvtype == MPI_INT )
+              {
+                sts = _MPI_access->sendRecv( &((double *) sendbuf)[sendoffset] , sendcount ,
+                                             sendtype , target , SendRequestId ,
+                                             &((int *) recvbuf)[recvoffset] ,
+                                             recvcount , recvtype , target , RecvRequestId ) ;
+              }
+            else
+              {
+                sts = _MPI_access->sendRecv( &((double *) sendbuf)[sendoffset] , sendcount ,
+                                             sendtype , target , SendRequestId ,
+                                             &((double *) recvbuf)[recvoffset] ,
+                                             recvcount , recvtype , target , RecvRequestId ) ;
+              }
+          }
+      }
+    return sts ;
+  }
+
+  /*!
+    Send sendcount datas from sendbuf[offset] with type sendtype to all targets of IntraCommunicator
+    Receive recvcount datas to recvbuf[offset] with type recvtype from all targets of IntraCommunicator
+
+  */
+  int MPIAccessDEC::allToAll( void* sendbuf, int sendcount, MPI_Datatype sendtype ,
+                              void* recvbuf, int recvcount, MPI_Datatype recvtype )
+  {
+    if ( _time_interpolator )
+      {
+        return allToAllTime( sendbuf, sendcount, sendtype , recvbuf, recvcount, recvtype ) ;
+      }
+    int sts ;
+    int target ;
+    int sendoffset = 0 ;
+    int recvoffset = 0 ;
+    int SendRequestId ;
+    int RecvRequestId ;
+
+    //Free of SendBuffers 
+    if ( _asynchronous )
+      checkSent() ;
+
+    //DoSend + DoRecv : SendRecv
+    SendBuffStruct * aSendDataStruct = NULL ;
+    if ( _asynchronous && sendbuf )
+      {
+        aSendDataStruct = new SendBuffStruct ;
+        aSendDataStruct->SendBuffer = sendbuf ;
+        aSendDataStruct->Counter = 0 ;
+        aSendDataStruct->DataType = sendtype ;
+      }
+    for ( target = 0 ; target < _group_size ; target++ )
+      {
+        sts = sendRecv( sendbuf , sendcount , sendoffset , sendtype ,
+                        recvbuf , recvcount , recvoffset , recvtype ,
+                        target , SendRequestId , RecvRequestId ) ;
+        if ( _asynchronous && sendbuf && sendcount )
+          {
+            aSendDataStruct->Counter += 1 ;
+            (*_map_of_send_buffers)[ SendRequestId ] = aSendDataStruct ;
+          }
+        sendoffset += sendcount ;
+        recvoffset += recvcount ;
+      }
+    if ( !_asynchronous && sendbuf )
+      {
+        if ( sendtype == MPI_INT )
+          {
+            delete [] (int *) sendbuf ;
+          }
+        else
+          {
+            delete [] (double *) sendbuf ;
+          }
+      }
+    return sts ;
+  }
+
+  /*!
+    Send sendcounts[target] datas from sendbuf[sdispls[target]] with type sendtype to all targets of IntraCommunicator
+    Receive recvcounts[target] datas to recvbuf[rdispls[target]] with type recvtype from all targets of IntraCommunicator
+
+  */
+  int MPIAccessDEC::allToAllv( void* sendbuf, int* sendcounts, int* sdispls,
+                               MPI_Datatype sendtype ,
+                               void* recvbuf, int* recvcounts, int* rdispls,
+                               MPI_Datatype recvtype )
+  {
+    if ( _time_interpolator )
+      {
+        return allToAllvTime( sendbuf, sendcounts, sdispls, sendtype ,
+                              recvbuf, recvcounts, rdispls, recvtype ) ;
+      }
+    int sts ;
+    int target ;
+    int SendRequestId ;
+    int RecvRequestId ;
+
+    //Free of SendBuffers 
+    if ( _asynchronous )
+      {
+        checkSent() ;
+      }
+
+    //DoSend + DoRecv : SendRecv
+    SendBuffStruct * aSendDataStruct = NULL ;
+    if ( _asynchronous && sendbuf )
+      {
+        aSendDataStruct = new SendBuffStruct ;
+        aSendDataStruct->SendBuffer = sendbuf ;
+        aSendDataStruct->Counter = 0 ;
+        aSendDataStruct->DataType = sendtype ;
+      }
+    for ( target = 0 ; target < _group_size ; target++ )
+      {
+        if ( sendcounts[target] || recvcounts[target] )
+          {
+            sts = sendRecv( sendbuf , sendcounts[target] , sdispls[target] , sendtype ,
+                            recvbuf , recvcounts[target] , rdispls[target] , recvtype ,
+                            target , SendRequestId , RecvRequestId ) ;
+            if ( _asynchronous && sendbuf && sendcounts[target])
+              {
+                aSendDataStruct->Counter += 1 ;
+                (*_map_of_send_buffers)[ SendRequestId ] = aSendDataStruct ;
+              }
+          }
+      }
+    if ( !_asynchronous && sendbuf )
+      {
+        if ( sendtype == MPI_INT )
+          {
+            delete [] (int *) sendbuf ;
+          }
+        else
+          {
+            delete [] (double *) sendbuf ;
+          }
+      }
+    return sts ;
+  }
+
+  /*
+    MPIAccessDEC and the management of SendBuffers :
+    =================================================
+
+    . In the collective communications collectives we send only parts of
+    the same buffer to each "target". So in asynchronous mode it is
+    necessary that all parts are free before to delete/free the
+    buffer.
+
+    . We assume that buffers are allocated with a new double[]. so a
+    delete [] is done.
+
+    . The structure SendBuffStruct permit to keep the adress of the buffer
+    and to manage a reference counter of that buffer. It contains
+    also MPI_Datatype for the delete [] (double *) ... when the counter
+    is null.
+
+    . The map _MapOfSendBuffers etablish the correspondance between each
+    RequestId given by a MPI_Access->ISend(...) and a SendBuffStruct
+    for each "target" of a part of the buffer.
+
+    . All that concerns only asynchronous Send. In synchronous mode,
+    we delete senbuf just after the Send.
+  */
+
+  /*
+    MPIAccessDEC and the management of RecvBuffers :
+    =================================================
+
+    If there is no interpolation, no special action is done.
+
+    With interpolation for each target :
+    ------------------------------------
+    . We have _time_messages[target] which is a vector of TimesMessages.
+    We have 2 TimesMessages in our case with a linear interpolation.
+    They contain the previous time(t0)/deltatime and the last
+    time(t1)/deltatime.
+
+    . We have _data_messages[target] which is a vector of DatasMessages.
+    We have 2 DatasMessages in our case with a linear interpolation.
+    They contain the previous datas at time(t0)/deltatime and at last
+    time(t1)/deltatime.
+
+    . At time _t(t*) of current processus we do the interpolation of
+    the values of the 2 DatasMessages which are returned in the part of
+    recvbuf corresponding to the target with t0 < t* <= t1.
+
+    . Because of the difference of "deltatimes" between processes, we
+    may have t0 < t1 < t* and there is an extrapolation.
+
+    . The vectors _out_of_time, _DataMessagesRecvCount and _DataMessagesType
+    contain for each target true if t* > last t1, recvcount and
+    MPI_Datatype for the finalize of messages at the end.
+  */
+
+  /*!
+    Send a TimeMessage to all targets of IntraCommunicator
+    Receive the TimeMessages from targets of IntraCommunicator if necessary.
+
+    Send sendcount datas from sendbuf[offset] with type sendtype to all targets of IntraCommunicator
+    Returns recvcount datas to recvbuf[offset] with type recvtype after an interpolation
+    with datas received from all targets of IntraCommunicator.
+
+  */
+  int MPIAccessDEC::allToAllTime( void* sendbuf, int sendcount , MPI_Datatype sendtype ,
+                                  void* recvbuf, int recvcount , MPI_Datatype recvtype )
+  {
+    int sts ;
+    int target ;
+    int sendoffset = 0 ;
+    int SendTimeRequestId ;
+    int SendDataRequestId ;
+
+    if ( _time_interpolator == NULL )
+      {
+        return MPI_ERR_OTHER ;
+      }
+
+    //Free of SendBuffers 
+    if ( _asynchronous )
+      {
+        checkSent() ;
+      }
+
+    //DoSend : Time + SendBuff
+    SendBuffStruct * aSendTimeStruct = NULL ;
+    SendBuffStruct * aSendDataStruct = NULL ;
+    if ( sendbuf && sendcount )
+      {
+        TimeMessage * aSendTimeMessage = new TimeMessage ;
+        if ( _asynchronous )
+          {
+            aSendTimeStruct = new SendBuffStruct ;
+            aSendTimeStruct->SendBuffer = aSendTimeMessage ;
+            aSendTimeStruct->Counter = 0 ;
+            aSendTimeStruct->DataType = _MPI_access->timeType() ;
+            aSendDataStruct = new SendBuffStruct ;
+            aSendDataStruct->SendBuffer = sendbuf ;
+            aSendDataStruct->Counter = 0 ;
+            aSendDataStruct->DataType = sendtype ;
+          }
+        aSendTimeMessage->time = _t ;
+        aSendTimeMessage->deltatime = _dt ;
+        for ( target = 0 ; target < _group_size ; target++ )
+          {
+            sts = send( aSendTimeMessage , 1 , 0 , _MPI_access->timeType() , target ,
+                        SendTimeRequestId ) ;
+            sts = send( sendbuf , sendcount , sendoffset , sendtype , target , SendDataRequestId ) ;
+            if ( _asynchronous )
+              {
+                aSendTimeStruct->Counter += 1 ;
+                (*_map_of_send_buffers)[ SendTimeRequestId ] = aSendTimeStruct ;
+                aSendDataStruct->Counter += 1 ;
+                (*_map_of_send_buffers)[ SendDataRequestId ] = aSendDataStruct ;
+              }
+            sendoffset += sendcount ;
+          }
+        if ( !_asynchronous )
+          {
+            delete aSendTimeMessage ;
+            if ( sendtype == MPI_INT )
+              {
+                delete [] (int *) sendbuf ;
+              }
+            else
+              {
+                delete [] (double *) sendbuf ;
+              }
+          }
+      }
+
+    //CheckTime + DoRecv + DoInterp
+    if ( recvbuf && recvcount )
+      {
+        for ( target = 0 ; target < _group_size ; target++ )
+          {
+            int recvsize = recvcount*_MPI_access->extent( recvtype ) ;
+            checkTime( recvcount , recvtype , target , false ) ;
+            //===========================================================================
+            //TODO : it is assumed actually that we have only 1 timestep before nad after
+            //===========================================================================
+            if ( _time_interpolator && (*_time_messages)[target][0].time != -1 )
+              {
+                if ( (*_out_of_time)[target] )
+                  {
+                    cout << " =====================================================" << endl
+                         << "Recv" << _my_rank << " <-- target " << target << " t0 "
+                         << (*_time_messages)[target][0].time << " < t1 "
+                         << (*_time_messages)[target][1].time << " < t* " << _t << endl
+                         << " =====================================================" << endl ;
+                  }
+                if ( recvtype == MPI_INT )
+                  {
+                    _time_interpolator->doInterp( (*_time_messages)[target][0].time,
+                                                  (*_time_messages)[target][1].time, _t, recvcount ,
+                                                  _n_step_before, _n_step_after,
+                                                  (int **) &(*_data_messages)[target][0],
+                                                  (int **) &(*_data_messages)[target][1],
+                                                  &((int *)recvbuf)[target*recvcount] ) ;
+                  }
+                else
+                  {
+                    _time_interpolator->doInterp( (*_time_messages)[target][0].time,
+                                                  (*_time_messages)[target][1].time, _t, recvcount ,
+                                                  _n_step_before, _n_step_after,
+                                                  (double **) &(*_data_messages)[target][0],
+                                                  (double **) &(*_data_messages)[target][1],
+                                                  &((double *)recvbuf)[target*recvcount] ) ;
+                  }
+              }
+            else
+              {
+                char * buffdest = (char *) recvbuf ;
+                char * buffsrc = (char *) (*_data_messages)[target][1] ;
+                memcpy( &buffdest[target*recvsize] , buffsrc , recvsize ) ;
+              }
+          }
+      }
+
+    return sts ;
+  }
+
+  int MPIAccessDEC::allToAllvTime( void* sendbuf, int* sendcounts, int* sdispls,
+                                   MPI_Datatype sendtype ,
+                                   void* recvbuf, int* recvcounts, int* rdispls,
+                                   MPI_Datatype recvtype )
+  {
+    int sts ;
+    int target ;
+    int SendTimeRequestId ;
+    int SendDataRequestId ;
+
+    if ( _time_interpolator == NULL )
+      {
+        return MPI_ERR_OTHER ;
+      }
+
+    //Free of SendBuffers 
+    if ( _asynchronous )
+      {
+        checkSent() ;
+      }
+
+    /*
+      . DoSend :
+      + We create a TimeMessage (look at that structure in MPI_Access).
+      + If we are in asynchronous mode, we create two structures SendBuffStruct
+      aSendTimeStruct and aSendDataStruct that we fill.
+      + We fill the structure aSendTimeMessage with time/deltatime of
+      the current process. "deltatime" must be nul if it is the last step of
+      Time.
+      + After that for each "target", we Send the TimeMessage and the part
+      of sendbuf corresponding to that target.
+      + If we are in asynchronous mode, we increment the counter and we add
+      aSendTimeStruct and aSendDataStruct to _MapOfSendBuffers with the
+      identifiers SendTimeRequestId and SendDataRequestId returned by
+      MPI_Access->Send(...).
+      + And if we are in synchronous mode we delete the SendMessages.
+    */
+    //DoSend : Time + SendBuff
+    SendBuffStruct * aSendTimeStruct = NULL ;
+    SendBuffStruct * aSendDataStruct = NULL ;
+    if ( sendbuf )
+      {
+        TimeMessage * aSendTimeMessage = new TimeMessage ;
+        if ( _asynchronous )
+          {
+            aSendTimeStruct = new SendBuffStruct ;
+            aSendTimeStruct->SendBuffer = aSendTimeMessage ;
+            aSendTimeStruct->Counter = 0 ;
+            aSendTimeStruct->DataType = _MPI_access->timeType() ;
+            aSendDataStruct = new SendBuffStruct ;
+            aSendDataStruct->SendBuffer = sendbuf ;
+            aSendDataStruct->Counter = 0 ;
+            aSendDataStruct->DataType = sendtype ;
+          }
+        aSendTimeMessage->time = _t ;
+        aSendTimeMessage->deltatime = _dt ;
+        for ( target = 0 ; target < _group_size ; target++ )
+          {
+            if ( sendcounts[target] )
+              {
+                sts = send( aSendTimeMessage , 1 , 0 , _MPI_access->timeType() , target ,
+                            SendTimeRequestId ) ;
+                sts = send( sendbuf , sendcounts[target] , sdispls[target] , sendtype , target ,
+                            SendDataRequestId ) ;
+                if ( _asynchronous )
+                  {
+                    aSendTimeStruct->Counter += 1 ;
+                    (*_map_of_send_buffers)[ SendTimeRequestId ] = aSendTimeStruct ;
+                    aSendDataStruct->Counter += 1 ;
+                    (*_map_of_send_buffers)[ SendDataRequestId ] = aSendDataStruct ;
+                  }
+              }
+          }
+        if ( !_asynchronous )
+          {
+            delete aSendTimeMessage ;
+            if ( sendtype == MPI_INT )
+              {
+                delete [] (int *) sendbuf ;
+              }
+            else
+              {
+                delete [] (double *) sendbuf ;
+              }
+          }
+      }
+
+    /*
+      . CheckTime + DoRecv + DoInterp
+      + For each target we call CheckTime
+      + If there is a TimeInterpolator and if the TimeMessage of the target
+      is not the first, we call the interpolator which return its
+      results in the part of the recv buffer corresponding to the "target".
+      + If not, there is a copy of received datas for that first step of time
+      in the part of the recv buffer corresponding to the "target".
+    */
+    //CheckTime + DoRecv + DoInterp
+    if ( recvbuf )
+      {
+        for ( target = 0 ; target < _group_size ; target++ )
+          {
+            if ( recvcounts[target] )
+              {
+                int recvsize = recvcounts[target]*_MPI_access->extent( recvtype ) ;
+                checkTime( recvcounts[target] , recvtype , target , false ) ;
+                //===========================================================================
+                //TODO : it is assumed actually that we have only 1 timestep before nad after
+                //===========================================================================
+                if ( _time_interpolator && (*_time_messages)[target][0].time != -1 )
+                  {
+                    if ( (*_out_of_time)[target] )
+                      {
+                        cout << " =====================================================" << endl
+                             << "Recv" << _my_rank << " <-- target " << target << " t0 "
+                             << (*_time_messages)[target][0].time << " < t1 "
+                             << (*_time_messages)[target][1].time << " < t* " << _t << endl
+                             << " =====================================================" << endl ;
+                      }
+                    if ( recvtype == MPI_INT )
+                      {
+                        _time_interpolator->doInterp( (*_time_messages)[target][0].time,
+                                                      (*_time_messages)[target][1].time, _t,
+                                                      recvcounts[target] , _n_step_before, _n_step_after,
+                                                      (int **) &(*_data_messages)[target][0],
+                                                      (int **) &(*_data_messages)[target][1],
+                                                      &((int *)recvbuf)[rdispls[target]] ) ;
+                      }
+                    else
+                      {
+                        _time_interpolator->doInterp( (*_time_messages)[target][0].time,
+                                                      (*_time_messages)[target][1].time, _t,
+                                                      recvcounts[target] , _n_step_before, _n_step_after,
+                                                      (double **) &(*_data_messages)[target][0],
+                                                      (double **) &(*_data_messages)[target][1],
+                                                      &((double *)recvbuf)[rdispls[target]] ) ;
+                      }
+                  }
+                else
+                  {
+                    char * buffdest = (char *) recvbuf ;
+                    char * buffsrc = (char *) (*_data_messages)[target][1] ;
+                    memcpy( &buffdest[rdispls[target]*_MPI_access->extent( recvtype )] , buffsrc ,
+                            recvsize ) ;
+                  }
+              }
+          }
+      }
+
+    return sts ;
+  }
+
+  /*
+    . CheckTime(recvcount , recvtype , target , UntilEnd)
+    + At the beginning, we read the first TimeMessage in
+    &(*_TimeMessages)[target][1] and the first DataMessage
+    in the allocated buffer (*_DataMessages)[target][1].
+    + deltatime of TimesMessages must be nul if it is the last one.
+    + While : _t(t*) is the current time of the processus.
+    "while _t(t*) is greater than the time of the "target"
+    (*_TimeMessages)[target][1].time and
+    (*_TimeMessages)[target][1].deltatime is not nul",
+    So at the end of the while we have :
+    _t(t*) <= (*_TimeMessages)[target][1].time with
+    _t(t*) > (*_TimeMessages)[target][0].time
+    or we have the last TimeMessage of the "target".
+    + If it is the finalization of the recv of TimeMessages and
+    DataMessages (UntilEnd value is true), we execute the while
+    until (*_TimeMessages)[target][1].deltatime is nul.
+    + In the while :
+    We copy the last TimeMessage in the previoud TimeMessage and
+    we read a new TimeMessage
+    We delete the previous DataMessage.
+    We copy the last DataMessage pointer in the previous one.
+    We allocate a new last DataMessage buffer
+    (*_DataMessages)[target][1] and we read the corresponding
+    datas in that buffe.
+    + If the current time of the current process is greater than the
+    last time (*_TimeMessages)[target][1].time du target, we give
+    a true value to (*_OutOfTime)[target].
+    (*_TimeMessages)[target][1].deltatime is nul.
+  */
+  int MPIAccessDEC::checkTime( int recvcount , MPI_Datatype recvtype , int target ,
+                               bool UntilEnd )
+  {
+    int sts = MPI_SUCCESS ;
+    int RecvTimeRequestId ;
+    int RecvDataRequestId ;
+    //Pour l'instant on cherche _time_messages[target][0] < _t <= _time_messages[target][1]
+    //===========================================================================
+    //TODO : it is assumed actually that we have only 1 timestep before and after
+    //       instead of _n_step_before and _n_step_after ...
+    //===========================================================================
+    (*_data_messages_recv_count)[target] = recvcount ;
+    (*_data_messages_type)[target] = recvtype ;
+    if ( (*_time_messages)[target][1].time == -1 )
+      {
+        (*_time_messages)[target][0] = (*_time_messages)[target][1] ;
+        sts = recv( &(*_time_messages)[target][1] , 1 , _MPI_access->timeType() ,
+                    target , RecvTimeRequestId ) ;
+        (*_data_messages)[target][0] = (*_data_messages)[target][1] ;
+        if ( recvtype == MPI_INT )
+          {
+            (*_data_messages)[target][1] = new int[recvcount] ;
+          }
+        else
+          {
+            (*_data_messages)[target][1] = new double[recvcount] ;
+          }
+        sts = recv( (*_data_messages)[target][1] , recvcount , recvtype , target ,
+                    RecvDataRequestId ) ;
+      }
+    else
+      {
+        while ( ( _t > (*_time_messages)[target][1].time || UntilEnd ) &&
+                (*_time_messages)[target][1].deltatime != 0 )
+          {
+            (*_time_messages)[target][0] = (*_time_messages)[target][1] ;
+            sts = recv( &(*_time_messages)[target][1] , 1 , _MPI_access->timeType() ,
+                        target , RecvTimeRequestId ) ;
+            if ( UntilEnd )
+              {
+                cout << "CheckTime" << _my_rank << " TimeMessage target " << target
+                     << " RecvTimeRequestId " << RecvTimeRequestId << " MPITag "
+                     << _MPI_access->recvMPITag(target) << endl ;
+              }
+            if ( recvtype == MPI_INT )
+              {
+                delete [] (int *) (*_data_messages)[target][0] ;
+              }
+            else
+              {
+                delete [] (double *) (*_data_messages)[target][0] ;
+              }
+            (*_data_messages)[target][0] = (*_data_messages)[target][1] ;
+            if ( recvtype == MPI_INT )
+              {
+                (*_data_messages)[target][1] = new int[recvcount] ;
+              }
+            else
+              {
+                (*_data_messages)[target][1] = new double[recvcount] ;
+              }
+            sts = recv( (*_data_messages)[target][1] , recvcount , recvtype , target ,
+                        RecvDataRequestId ) ;
+            if ( UntilEnd )
+              {
+                cout << "CheckTime" << _my_rank << " DataMessage target " << target
+                     << " RecvDataRequestId " << RecvDataRequestId << " MPITag "
+                     << _MPI_access->recvMPITag(target) << endl ;
+              }
+          }
+
+        if ( _t > (*_time_messages)[target][0].time &&
+             _t <= (*_time_messages)[target][1].time )
+          {
+          }
+        else
+          {
+            (*_out_of_time)[target] = true ;
+          }
+      }
+    return sts ;
+  }
+
+  /*
+    . CheckSent() :
+    + call  SendRequestIds of MPI_Access in order to get all
+    RequestIds of SendMessages of all "targets".
+    + For each RequestId, CheckSent call "Test" of MPI_Access in order
+    to know if the buffer is "free" (flag = true). If it is the
+    FinalCheckSent (WithWait = true), we call Wait instead of Test.
+    + If the buffer is "free", the counter of the structure SendBuffStruct
+    (from _MapOfSendBuffers) is decremented.
+    + If that counter is nul we delete the TimeMessage or the
+    SendBuffer according to the DataType.
+    + And we delete the structure SendBuffStruct before the suppression
+    (erase) of that item of _MapOfSendBuffers
+  */
+  int MPIAccessDEC::checkSent(bool WithWait)
+  {
+    int sts = MPI_SUCCESS ;
+    int flag = WithWait ;
+    int size = _MPI_access->sendRequestIdsSize() ;
+    int * ArrayOfSendRequests = new int[ size ] ;
+    int nSendRequest = _MPI_access->sendRequestIds( size , ArrayOfSendRequests ) ;
+    bool SendTrace = false ;
+    int i ;
+    for ( i = 0 ; i < nSendRequest ; i++ )
+      {
+        if ( WithWait )
+          {
+            if (SendTrace)
+              {
+                cout << "CheckSent" << _my_rank << " " << i << "./" << nSendRequest
+                    << " SendRequestId " << ArrayOfSendRequests[i] << " MPITarget "
+                    << _MPI_access->MPITarget(ArrayOfSendRequests[i]) << " MPITag "
+                    << _MPI_access->MPITag(ArrayOfSendRequests[i]) << " Wait :" << endl ;
+              }
+            sts = _MPI_access->wait( ArrayOfSendRequests[i] ) ;
+          }
+        else
+          {
+            sts = _MPI_access->test( ArrayOfSendRequests[i] , flag ) ;
+          }
+        if ( flag )
+          {
+            _MPI_access->deleteRequest( ArrayOfSendRequests[i] ) ;
+            if ( SendTrace )
+              {
+                cout << "CheckSent" << _my_rank << " " << i << "./" << nSendRequest
+                     << " SendRequestId " << ArrayOfSendRequests[i]
+                     << " flag " << flag
+                     << " Counter " << (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->Counter
+                     << " DataType " << (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->DataType
+                     << endl ;
+              }
+            (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->Counter -= 1 ;
+            if ( SendTrace )
+              {
+                if ( (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->DataType == 
+                     _MPI_access->timeType() )
+                  {
+                    cout << "CheckTimeSent" << _my_rank << " Request " ;
+                  }
+                else
+                  {
+                    cout << "CheckDataSent" << _my_rank << " Request " ;
+                  }
+                cout << ArrayOfSendRequests[i]
+                     << " _map_of_send_buffers->SendBuffer "
+                     << (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->SendBuffer
+                     << " Counter " << (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->Counter
+                     << endl ;
+              }
+            if ( (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->Counter  == 0 )
+              {
+                if ( SendTrace )
+                  {
+                    cout << "CheckSent" << _my_rank << " SendRequestId " << ArrayOfSendRequests[i]
+                         << " Counter " << (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->Counter
+                         << " flag " << flag << " SendBuffer "
+                         << (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->SendBuffer
+                         << " deleted. Erase in _map_of_send_buffers :" << endl ;
+                  }
+                if ( (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->DataType ==
+                     _MPI_access->timeType() )
+                  {
+                    delete (TimeMessage * ) (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->SendBuffer ;
+                  }
+                else
+                  {
+                    if ( (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->DataType == MPI_INT )
+                      {
+                        delete [] (int *) (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->SendBuffer ;
+                      }
+                    else
+                      {
+                        delete [] (double *) (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->SendBuffer ;
+                      }
+                  }
+                delete (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ] ;
+              }
+            if ( SendTrace )
+              {
+                cout << "CheckSent" << _my_rank << " Erase in _map_of_send_buffers SendRequestId "
+                     << ArrayOfSendRequests[i] << endl ;
+              }
+            (*_map_of_send_buffers).erase( ArrayOfSendRequests[i] ) ;
+          }
+        else if ( SendTrace )
+          {
+            cout << "CheckSent" << _my_rank << " " << i << "./" << nSendRequest
+                 << " SendRequestId " << ArrayOfSendRequests[i]
+                 << " flag " << flag
+                 << " Counter " << (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->Counter
+                 << " DataType " << (*_map_of_send_buffers)[ ArrayOfSendRequests[i] ]->DataType
+                 << endl ;
+          }
+      }
+    if ( SendTrace )
+      {
+        _MPI_access->check() ;
+      }
+    delete [] ArrayOfSendRequests ;
+    return sts ;
+  }
+
+  int MPIAccessDEC::checkFinalRecv()
+  {
+    int sts = MPI_SUCCESS ;
+    if ( _time_interpolator )
+      {
+        int target ;
+        for ( target = 0 ; target < _group_size ; target++ )
+          {
+            if ( (*_data_messages)[target][0] != NULL )
+              {
+                sts = checkTime( (*_data_messages_recv_count)[target] , (*_data_messages_type)[target] ,
+                                 target , true ) ;
+                if ( (*_data_messages_type)[target] == MPI_INT )
+                  {
+                    delete [] (int *) (*_data_messages)[target][0] ;
+                  }
+                else
+                  {
+                    delete [] (double *) (*_data_messages)[target][0] ;
+                  }
+                (*_data_messages)[target][0] = NULL ;
+                if ( (*_data_messages)[target][1] != NULL )
+                  {
+                    if ( (*_data_messages_type)[target] == MPI_INT )
+                      {
+                        delete [] (int *) (*_data_messages)[target][1] ;
+                      }
+                    else
+                      {
+                        delete [] (double *) (*_data_messages)[target][1] ;
+                      }
+                    (*_data_messages)[target][1] = NULL ;
+                  }
+              }
+          }
+      }
+    return sts ;
+  }
+
+  ostream & operator<< (ostream & f ,const TimeInterpolationMethod & interpolationmethod )
+  {
+    switch (interpolationmethod)
+      {
+      case WithoutTimeInterp :
+        f << " WithoutTimeInterpolation ";
+        break;
+      case LinearTimeInterp :
+        f << " LinearTimeInterpolation ";
+        break;
+      default :
+        f << " UnknownTimeInterpolation ";
+        break;
+      }
+
+    return f;
+  }
+}
diff --git a/src/ParaMEDMEM/MPIAccessDEC.hxx b/src/ParaMEDMEM/MPIAccessDEC.hxx
new file mode 100644 (file)
index 0000000..e381ff6
--- /dev/null
@@ -0,0 +1,179 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __MPIACCESSDEC_HXX__
+#define __MPIACCESSDEC_HXX__
+
+#include "MPIAccess.hxx"
+#include "DEC.hxx"
+#include "LinearTimeInterpolator.hxx"
+
+#include <map>
+#include <iostream>
+
+namespace ParaMEDMEM
+{
+  class MPIAccessDEC
+  {
+  public:  
+    MPIAccessDEC( const ProcessorGroup& local_group, const ProcessorGroup& distant_group,
+                  bool Asynchronous = true );
+    virtual ~MPIAccessDEC();
+    MPIAccess * getMPIAccess() { return _MPI_access; }
+    const MPI_Comm* getComm() { return _MPI_union_group->getComm(); }
+    void asynchronous( bool Asynchronous = true ) { _asynchronous = Asynchronous; }
+    void setTimeInterpolator( TimeInterpolationMethod anInterp , double InterpPrecision=0 ,
+                              int n_step_before=1, int nStepAfter=1 );
+
+    void setTime( double t ) { _t = t; _dt = -1; }
+    void setTime( double t , double dt ) { _t = t; _dt = dt; }
+    bool outOfTime( int target ) { return (*_out_of_time)[target]; }
+
+    int send( void* sendbuf, int sendcount , MPI_Datatype sendtype , int target );
+    int recv( void* recvbuf, int recvcount , MPI_Datatype recvtype , int target );
+    int recv( void* recvbuf, int recvcount , MPI_Datatype recvtype , int target ,
+              int &RecvRequestId , bool Asynchronous=false );
+    int sendRecv( void* sendbuf, int sendcount , MPI_Datatype sendtype ,
+                  void* recvbuf, int recvcount , MPI_Datatype recvtype , int target );
+
+    int allToAll( void* sendbuf, int sendcount, MPI_Datatype sendtype ,
+                  void* recvbuf, int recvcount, MPI_Datatype recvtype );
+    int allToAllv( void* sendbuf, int* sendcounts, int* sdispls, MPI_Datatype sendtype ,
+                   void* recvbuf, int* recvcounts, int* rdispls, MPI_Datatype recvtype );
+
+    int allToAllTime( void* sendbuf, int sendcount , MPI_Datatype sendtype ,
+                      void* recvbuf, int recvcount , MPI_Datatype recvtype );
+    int allToAllvTime( void* sendbuf, int* sendcounts, int* sdispls,
+                       MPI_Datatype sendtype ,
+                       void* recvbuf, int* recvcounts, int* rdispls,
+                       MPI_Datatype recvtype );
+    int checkTime( int recvcount , MPI_Datatype recvtype , int target , bool UntilEnd );
+    int checkSent(bool WithWait=false);
+    int checkFinalSent() { return checkSent( true ); }
+    int checkFinalRecv();
+  protected:
+    int send( void* sendbuf, int sendcount , int sendoffset , MPI_Datatype sendtype ,
+              int target, int &SendRequestId );
+    int recv( void* recvbuf, int recvcount , int recvoffset , MPI_Datatype recvtype ,
+              int target, int &RecvRequestId );
+    int sendRecv( void* sendbuf, int sendcount , int sendoffset ,
+                  MPI_Datatype sendtype , 
+                  void* recvbuf, int recvcount , int recvoffset ,
+                  MPI_Datatype recvtype , int target ,
+                  int &SendRequestId ,int &RecvRequestId );
+  private :
+    bool _asynchronous;
+    MPIProcessorGroup* _MPI_union_group;
+
+    TimeInterpolator* _time_interpolator;
+    int _n_step_before;
+    int _n_step_after;
+
+    int _my_rank;
+    int _group_size;
+    MPIAccess* _MPI_access;
+
+    // Current time and deltatime of current process
+    double _t;
+    double _dt;
+
+    // TimeMessages from each target _TimeMessages[target][Step] : TimeMessage
+    std::vector< std::vector< TimeMessage > > *_time_messages;
+    // Corresponding DataMessages from each target _DataMessages[target][~TimeStep]
+    std::vector< bool >* _out_of_time;
+    std::vector< int >* _data_messages_recv_count;
+    std::vector< MPI_Datatype >* _data_messages_type;
+    std::vector< std::vector< void * > >* _data_messages;
+
+    typedef struct
+    {
+      void * SendBuffer;
+      int Counter;
+      MPI_Datatype DataType; }
+      SendBuffStruct;
+    std::map< int ,  SendBuffStruct * > *_map_of_send_buffers;
+  };
+
+  inline int MPIAccessDEC::send( void* sendbuf, int sendcount , MPI_Datatype sendtype , int target )
+  {
+    int SendRequestId;
+    int sts;
+    if ( _asynchronous )
+      {
+        sts = _MPI_access->ISend( sendbuf , sendcount , sendtype , target ,
+                                  SendRequestId );
+      }
+    else
+      {
+        sts = _MPI_access->send( sendbuf , sendcount , sendtype , target ,
+                                 SendRequestId );
+        if ( sts == MPI_SUCCESS )
+          free( sendbuf );
+      }
+    return sts;
+  }
+
+  inline int MPIAccessDEC::recv( void* recvbuf, int recvcount , MPI_Datatype recvtype , int target )
+  {
+    int RecvRequestId;
+    int sts;
+    if ( _asynchronous )
+      sts = _MPI_access->IRecv( recvbuf , recvcount , recvtype , target , RecvRequestId );
+    else
+      sts = _MPI_access->recv( recvbuf , recvcount , recvtype , target ,  RecvRequestId );
+    return sts;
+  }
+
+  inline int MPIAccessDEC::recv( void* recvbuf, int recvcount , MPI_Datatype recvtype ,
+                                 int target ,  int &RecvRequestId , bool Asynchronous )
+  {
+    int sts;
+    if ( Asynchronous )
+      sts = _MPI_access->IRecv( recvbuf , recvcount , recvtype , target ,
+                                RecvRequestId );
+    else
+      sts = _MPI_access->recv( recvbuf , recvcount , recvtype , target ,
+                               RecvRequestId );
+    return sts;
+  }
+  
+  inline int MPIAccessDEC::sendRecv( void* sendbuf, int sendcount , MPI_Datatype sendtype ,
+                                     void* recvbuf, int recvcount , MPI_Datatype recvtype ,
+                                     int target )
+  {
+    int SendRequestId;
+    int RecvRequestId;
+    int sts;
+    if ( _asynchronous )
+      sts = _MPI_access->ISendRecv( sendbuf , sendcount , sendtype , target ,
+                                    SendRequestId ,
+                                    recvbuf , recvcount , recvtype , target ,
+                                    RecvRequestId );
+    else
+      sts = _MPI_access->sendRecv( sendbuf , sendcount , sendtype , target ,
+                                   SendRequestId ,
+                                   recvbuf , recvcount , recvtype , target ,
+                                   RecvRequestId );
+    return sts;
+  }
+
+  std::ostream & operator<< (std::ostream &,const TimeInterpolationMethod &);
+}
+
+#endif
diff --git a/src/ParaMEDMEM/MPIProcessorGroup.cxx b/src/ParaMEDMEM/MPIProcessorGroup.cxx
new file mode 100644 (file)
index 0000000..922f209
--- /dev/null
@@ -0,0 +1,254 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "CommInterface.hxx"
+#include "InterpolationUtils.hxx"
+
+#include <iostream>
+#include <set>
+#include <algorithm>
+#include "mpi.h"
+
+using namespace std;
+
+
+namespace ParaMEDMEM
+{
+  /*!
+   * \anchor MPIProcessorGroup-det
+   * \class MPIProcessorGroup
+   *
+   * \section processor_group_overview Overview
+   * The MPIProcessorGroup class is used to set up processor groups that help to define
+   * the MPI topology of the couplings. They can be set up in various ways, the most common being
+   * the use of the \c MPIProcessorGroup(Comminterface, int pfirst, int plast)
+   * constructor.
+   *
+   * The following code excerpt creates two processor groups on respectively 3 and 2 processors.
+   \verbatim
+   int main()
+   {
+   MPI_Init(&argc,&argv);
+   CommInterface comm_interface;
+   MPIProcessorGroup codeA_group(comm_interface, 0, 2);  // groups processors 0, 1 and 2
+   MPIProcessorGroup codeB_group(comm_interface, 3, 4);  // groups processors 3 and 4
+
+   ...
+   }
+   \endverbatim
+  */
+
+
+  /*! 
+   * Creates a processor group that is based on all the
+   MPI_COMM_WORLD processor.This routine must be called by all processors in MPI_COMM_WORLD.
+   \param interface CommInterface object giving access to the MPI
+   communication layer
+  */
+  MPIProcessorGroup::MPIProcessorGroup(const CommInterface& interface):
+    ProcessorGroup(interface),_world_comm(MPI_COMM_WORLD)
+  {
+    _comm=_world_comm;
+    _comm_interface.commGroup(_world_comm, &_group);
+    int size;
+    _comm_interface.commSize(_world_comm,&size);
+    for (int i=0; i<size; i++)
+      _proc_ids.insert(i);
+
+  }
+
+  /*! Creates a processor group that is based on the processors included in \a proc_ids.
+    This routine must be called by all processors in MPI_COMM_WORLD.
+
+    \param interface CommInterface object giving access to the MPI
+    communication layer
+    \param proc_ids set of ids that are to be integrated in the group. The ids number are 
+    to be understood in terms of MPI_COMM_WORLD ranks.
+  */
+
+  MPIProcessorGroup::MPIProcessorGroup(const CommInterface& interface, set<int> proc_ids, const MPI_Comm& world_comm):
+    ProcessorGroup(interface, proc_ids), _world_comm(world_comm)
+  {
+    updateMPISpecificAttributes();
+  }
+
+
+  void MPIProcessorGroup::updateMPISpecificAttributes()
+  {
+    //Creation of a communicator 
+    MPI_Group group_world;
+  
+    int size_world;
+    _comm_interface.commSize(_world_comm,&size_world);
+    int rank_world;
+    _comm_interface.commRank(_world_comm,&rank_world);
+    _comm_interface.commGroup(_world_comm, &group_world);
+
+    int* ranks=new int[_proc_ids.size()];
+   
+    // copying proc_ids in ranks
+    copy<set<int>::const_iterator,int*> (_proc_ids.begin(), _proc_ids.end(), ranks);
+    for (int i=0; i< (int)_proc_ids.size();i++)
+      if (ranks[i]>size_world-1)
+        {
+          delete[] ranks;
+          _comm_interface.groupFree(&group_world);  // MPI_Group is a C structure and won't get de-allocated automatically?
+          throw INTERP_KERNEL::Exception("invalid rank in set<int> argument of MPIProcessorGroup constructor");
+        }
+      
+    _comm_interface.groupIncl(group_world, _proc_ids.size(), ranks, &_group);
+  
+    _comm_interface.commCreate(_world_comm, _group, &_comm);
+
+    // clean-up
+    delete[] ranks;
+    _comm_interface.groupFree(&group_world);  // MPI_Group is a C structure and won't get de-allocated automatically?
+  }
+
+  /*! Creates a processor group that is based on the processors between \a pstart and \a pend.
+    This routine must be called by all processors in MPI_COMM_WORLD.
+
+    \param comm_interface CommInterface object giving access to the MPI
+    communication layer
+    \param pstart id in MPI_COMM_WORLD of the first processor in the group
+    \param pend id in MPI_COMM_WORLD of the last processor in the group
+  */
+  MPIProcessorGroup::MPIProcessorGroup (const CommInterface& comm_interface, int pstart, int pend, const MPI_Comm& world_comm): ProcessorGroup(comm_interface,pstart,pend),_world_comm(world_comm)
+  {
+    //Creation of a communicator 
+    MPI_Group group_world;
+  
+    int size_world;
+    _comm_interface.commSize(_world_comm,&size_world);
+    int rank_world;
+    _comm_interface.commRank(_world_comm,&rank_world);
+    _comm_interface.commGroup(_world_comm, &group_world);
+
+    if (pend>size_world-1 || pend <pstart || pstart<0)
+      {
+        _comm_interface.groupFree(&group_world);
+        throw INTERP_KERNEL::Exception("invalid argument in MPIProcessorGroup constructor (comm,pfirst,plast)");
+      }
+    int nprocs=pend-pstart+1;
+    int* ranks=new int[nprocs];
+    for (int i=pstart; i<=pend;i++)
+      {
+        ranks[i-pstart]=i;
+      }
+
+    _comm_interface.groupIncl(group_world, nprocs, ranks, &_group);
+  
+    _comm_interface.commCreate(_world_comm, _group, &_comm);
+
+    // clean-up
+    delete[] ranks;
+    _comm_interface.groupFree(&group_world);  // MPI_Group is a C structured and won't get de-allocated automatically?
+  }
+
+  MPIProcessorGroup::MPIProcessorGroup (const ProcessorGroup& proc_group, set<int> proc_ids) :
+    ProcessorGroup(proc_group.getCommInterface()),_world_comm(MPI_COMM_WORLD)
+  {
+    cout << "MPIProcessorGroup (const ProcessorGroup& proc_group, set<int> proc_ids)" <<endl;
+    cout << "Not implemented yet !"<<endl;
+    exit(1);
+  }
+
+  MPIProcessorGroup::MPIProcessorGroup(const MPIProcessorGroup& other):ProcessorGroup(other),_world_comm(other._world_comm)
+  {
+    updateMPISpecificAttributes();
+  }
+
+  MPIProcessorGroup::~MPIProcessorGroup()
+  {
+    _comm_interface.groupFree(&_group);
+    if (_comm!=_world_comm && _comm !=MPI_COMM_NULL)
+      _comm_interface.commFree(&_comm);
+  
+  }
+
+  /*! Translation of the rank id between two processor groups. This method translates rank \a rank
+    on the current processor group to the rank on group pointed by \a group.
+    \param group group from which the rank is expected
+    \param rank rank on group \a group of the processor which is to be translated
+    \return rank on local group
+  */
+  int MPIProcessorGroup::translateRank(const ProcessorGroup* group, int rank) const
+  {
+    const MPIProcessorGroup* targetgroup=dynamic_cast<const MPIProcessorGroup*>(group);
+    int local_rank;
+    MPI_Group_translate_ranks(targetgroup->_group, 1, &rank, _group, &local_rank);
+    return local_rank;
+  }
+  
+  /*!Creates a processor group that is the complement of the current group 
+    inside MPI_COMM_WORLD
+    \return pointer to the new ProcessorGroup structure.
+  */
+  ProcessorGroup* MPIProcessorGroup::createComplementProcGroup() const
+  {
+    set <int> procs;
+    int world_size=_comm_interface.worldSize();
+    for (int i=0; i<world_size; i++)
+      procs.insert(i);
+    for (set<int>::const_iterator iter=_proc_ids.begin(); iter!= _proc_ids.end(); iter++)
+      procs.erase(*iter);
+    
+    return new MPIProcessorGroup(_comm_interface, procs, _world_comm);
+    
+  }
+
+  ProcessorGroup *MPIProcessorGroup::deepCpy() const
+  {
+    return new MPIProcessorGroup(*this);
+  }
+
+  /*!Adding processors of group \a group to local group.
+    \param group group that is to be fused with current group
+    \return new group formed by the fusion of local group and \a group.
+  */
+  ProcessorGroup*  MPIProcessorGroup::fuse (const ProcessorGroup& group) const
+  {
+    set <int> procs = _proc_ids;
+    const set<int>& distant_proc_ids = group.getProcIDs();
+    for (set<int>::const_iterator iter=distant_proc_ids.begin(); iter!=distant_proc_ids.end(); iter++)
+      {
+        procs.insert(*iter);
+      }
+    return new MPIProcessorGroup(_comm_interface, procs, _world_comm);
+  }
+
+  int MPIProcessorGroup::myRank() const
+  { 
+    int rank;
+    MPI_Comm_rank(_comm,&rank);
+    return rank;
+  }
+  
+  ProcessorGroup* MPIProcessorGroup::createProcGroup() const
+  {
+    set <int> procs;
+    for (set<int>::const_iterator iter=_proc_ids.begin(); iter!= _proc_ids.end(); iter++)
+      procs.insert(*iter);
+  
+    return new MPIProcessorGroup(_comm_interface, procs, _world_comm);
+
+  }
+}
diff --git a/src/ParaMEDMEM/MPIProcessorGroup.hxx b/src/ParaMEDMEM/MPIProcessorGroup.hxx
new file mode 100644 (file)
index 0000000..7c39ed2
--- /dev/null
@@ -0,0 +1,60 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __MPIPROCESSORGROUP_HXX__
+#define __MPIPROCESSORGROUP_HXX__
+
+#include "ProcessorGroup.hxx"
+
+#include <set>
+#include <mpi.h>
+
+namespace ParaMEDMEM
+{
+  class CommInterface;
+
+  class MPIProcessorGroup : public ProcessorGroup
+  {
+  public:
+    MPIProcessorGroup(const CommInterface& interface);
+    MPIProcessorGroup(const CommInterface& interface, std::set<int> proc_ids, const MPI_Comm& world_comm=MPI_COMM_WORLD);
+    MPIProcessorGroup (const ProcessorGroup& proc_group, std::set<int> proc_ids);
+    MPIProcessorGroup(const CommInterface& interface,int pstart, int pend, const MPI_Comm& world_comm=MPI_COMM_WORLD);
+    MPIProcessorGroup(const MPIProcessorGroup& other);
+    virtual ~MPIProcessorGroup();
+    virtual ProcessorGroup *deepCpy() const;
+    virtual ProcessorGroup* fuse (const ProcessorGroup&) const;
+    void intersect (ProcessorGroup&) { }
+    int myRank() const;
+    bool containsMyRank() const { int rank; MPI_Group_rank(_group, &rank); return (rank!=MPI_UNDEFINED); }
+    int translateRank(const ProcessorGroup* group, int rank) const;
+    const MPI_Comm* getComm() const { return &_comm; }
+    ProcessorGroup* createComplementProcGroup() const;
+    ProcessorGroup* createProcGroup() const;
+    MPI_Comm getWorldComm() { return _world_comm; }
+  private:
+    void updateMPISpecificAttributes();
+  private:
+    const MPI_Comm _world_comm;  // just an observer - current instance is not responsible for the management of this comm
+    MPI_Group _group;
+    MPI_Comm _comm;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/MxN_Mapping.cxx b/src/ParaMEDMEM/MxN_Mapping.cxx
new file mode 100644 (file)
index 0000000..05ca099
--- /dev/null
@@ -0,0 +1,317 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "CommInterface.hxx" 
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "MPIAccessDEC.hxx"
+#include "MxN_Mapping.hxx"
+
+using namespace std;
+
+namespace ParaMEDMEM
+{
+  MxN_Mapping::MxN_Mapping()
+  {
+  }
+
+
+  MxN_Mapping::MxN_Mapping(const ProcessorGroup& source_group, const ProcessorGroup& target_group,const DECOptions& dec_options)
+    : DECOptions(dec_options),_union_group(source_group.fuse(target_group))
+  {
+    _access_DEC = new MPIAccessDEC(source_group,target_group,getAsynchronous());
+    _access_DEC->setTimeInterpolator(getTimeInterpolationMethod());
+    _send_proc_offsets.resize(_union_group->size()+1,0);
+    _recv_proc_offsets.resize(_union_group->size()+1,0);
+  
+  }
+
+  MxN_Mapping::~MxN_Mapping()
+  {
+    delete _union_group;
+    delete _access_DEC;
+  }
+
+
+  /*!
+    Method registering a new element for correspondence with a distant element
+    \param distant_proc proc rank of the distant processor (in terms of the union group)
+    \param distant_element id of the element on the distant processor
+  */
+  void MxN_Mapping::addElementFromSource(int distant_proc, int distant_element)
+  {
+    _sending_ids.push_back(make_pair(distant_proc,distant_element));
+    for (int i=distant_proc; i<_union_group->size(); i++)
+      _send_proc_offsets[i+1]++;
+  }
+
+  void MxN_Mapping::initialize()
+  {
+    _sending_ids.clear();
+    std::fill(_send_proc_offsets.begin(),_send_proc_offsets.end(),0);
+  }
+
+  void MxN_Mapping::prepareSendRecv()
+  {
+    CommInterface comm_interface=_union_group->getCommInterface();
+    // sending count pattern
+    int* nbsend=new int[_union_group->size()];
+    int* nbrecv=new int[_union_group->size()];
+    for (int i=0; i<_union_group->size(); i++)
+      {
+        nbsend[i]=_send_proc_offsets[i+1]-_send_proc_offsets[i];
+      }
+  
+    MPIProcessorGroup* group = static_cast<MPIProcessorGroup*>(_union_group);
+    const MPI_Comm* comm=group->getComm();
+    comm_interface.allToAll(nbsend, 1, MPI_INT,
+                            nbrecv, 1, MPI_INT,
+                            *comm);
+         
+    for (int i=0; i<_union_group->size(); i++)
+      {
+        for (int j=i+1;j<_union_group->size()+1; j++)
+          _recv_proc_offsets[j]+=nbrecv[i];
+    
+      } 
+
+    delete[] nbsend;
+    delete[] nbrecv;
+
+    _recv_ids.resize(_recv_proc_offsets[_union_group->size()]);
+    int* isendbuf=0;
+    int* irecvbuf=0;
+    if (_sending_ids.size()>0)
+      isendbuf = new int[_sending_ids.size()];
+    if (_recv_ids.size()>0)  
+      irecvbuf = new int[_recv_ids.size()];
+    int* sendcounts = new int[_union_group->size()];
+    int* senddispls=new int[_union_group->size()];
+    int* recvcounts=new int[_union_group->size()];
+    int* recvdispls=new int[_union_group->size()];
+    for (int i=0; i< _union_group->size(); i++)
+      {
+        sendcounts[i]=_send_proc_offsets[i+1]-_send_proc_offsets[i];
+        senddispls[i]=_send_proc_offsets[i];
+        recvcounts[i]=_recv_proc_offsets[i+1]-_recv_proc_offsets[i];
+        recvdispls[i]=_recv_proc_offsets[i];
+      }
+    vector<int> offsets = _send_proc_offsets;
+    for (int i=0; i<(int)_sending_ids.size();i++)
+      {
+        int iproc = _sending_ids[i].first;
+        isendbuf[offsets[iproc]]=_sending_ids[i].second;
+        offsets[iproc]++;
+      }
+    comm_interface.allToAllV(isendbuf, sendcounts, senddispls, MPI_INT,
+                             irecvbuf, recvcounts, recvdispls, MPI_INT,
+                             *comm);
+                           
+    for (int i=0; i< _recv_proc_offsets[_union_group->size()]; i++)
+      _recv_ids[i]=irecvbuf[i];                           
+    if (_sending_ids.size()>0)
+      delete[] isendbuf;
+    if (_recv_ids.size()>0)  
+      delete[] irecvbuf;
+    delete[] sendcounts;
+    delete[]recvcounts;
+    delete[]senddispls;
+    delete[] recvdispls;
+  }
+
+  /*! Exchanging field data between two groups of processes
+   * 
+   * \param field MEDCoupling field containing the values to be sent
+   * 
+   * The ids that were defined by addElementFromSource method
+   * are sent.
+   */ 
+  void MxN_Mapping::sendRecv(double* sendfield, MEDCouplingFieldDouble& field) const 
+  {
+    CommInterface comm_interface=_union_group->getCommInterface();
+    const MPIProcessorGroup* group = static_cast<const MPIProcessorGroup*>(_union_group);
+    int nbcomp=field.getArray()->getNumberOfComponents();
+    double* sendbuf=0;
+    double* recvbuf=0;
+    if (_sending_ids.size() >0)
+      sendbuf = new double[_sending_ids.size()*nbcomp];
+    if (_recv_ids.size()>0)
+      recvbuf = new double[_recv_ids.size()*nbcomp];
+    
+    int* sendcounts = new int[_union_group->size()];
+    int* senddispls=new int[_union_group->size()];
+    int* recvcounts=new int[_union_group->size()];
+    int* recvdispls=new int[_union_group->size()];
+  
+    for (int i=0; i< _union_group->size(); i++)
+      {
+        sendcounts[i]=nbcomp*(_send_proc_offsets[i+1]-_send_proc_offsets[i]);
+        senddispls[i]=nbcomp*(_send_proc_offsets[i]);
+        recvcounts[i]=nbcomp*(_recv_proc_offsets[i+1]-_recv_proc_offsets[i]);
+        recvdispls[i]=nbcomp*(_recv_proc_offsets[i]);
+      }
+    //building the buffer of the elements to be sent
+    vector<int> offsets = _send_proc_offsets;
+
+    for (int i=0; i<(int)_sending_ids.size();i++)
+      { 
+        int iproc = _sending_ids[i].first;
+        for (int icomp=0; icomp<nbcomp; icomp++)
+          sendbuf[offsets[iproc]*nbcomp+icomp]=sendfield[i*nbcomp+icomp];
+        offsets[iproc]++;
+      }
+  
+    //communication phase
+    switch (getAllToAllMethod())
+      {
+      case Native:
+        {
+          const MPI_Comm* comm = group->getComm();
+          comm_interface.allToAllV(sendbuf, sendcounts, senddispls, MPI_DOUBLE,
+                                   recvbuf, recvcounts, recvdispls, MPI_DOUBLE,
+                                   *comm);
+        }
+        break;
+      case PointToPoint:
+        _access_DEC->allToAllv(sendbuf, sendcounts, senddispls, MPI_DOUBLE,
+                              recvbuf, recvcounts, recvdispls, MPI_DOUBLE);
+        break;
+      }
+  
+    //setting the received values in the field
+    DataArrayDouble *fieldArr=field.getArray();
+    double* recvptr=recvbuf;                         
+    for (int i=0; i< _recv_proc_offsets[_union_group->size()]; i++)
+      {
+        for (int icomp=0; icomp<nbcomp; icomp++)
+          {
+            double temp = fieldArr->getIJ(_recv_ids[i],icomp);
+            fieldArr->setIJ(_recv_ids[i],icomp,temp+*recvptr);
+            recvptr++;
+          }
+      }   
+    if (sendbuf!=0 && getAllToAllMethod()== Native)
+      delete[] sendbuf;
+    if (recvbuf !=0)
+      delete[] recvbuf;
+    delete[] sendcounts;
+    delete[] recvcounts;
+    delete[] senddispls; 
+    delete[] recvdispls;
+  
+  }
+
+  /*! Exchanging field data between two groups of processes
+   * 
+   * \param field MEDCoupling field containing the values to be sent
+   * 
+   * The ids that were defined by addElementFromSource method
+   * are sent.
+   */ 
+  void MxN_Mapping::reverseSendRecv(double* recvfield, MEDCouplingFieldDouble& field) const 
+  {
+    CommInterface comm_interface=_union_group->getCommInterface();
+    const MPIProcessorGroup* group = static_cast<const MPIProcessorGroup*>(_union_group);
+
+    int nbcomp=field.getArray()->getNumberOfComponents();
+    double* sendbuf=0;
+    double* recvbuf=0;
+    if (_recv_ids.size() >0)
+      sendbuf = new double[_recv_ids.size()*nbcomp];
+    if (_sending_ids.size()>0)
+      recvbuf = new double[_sending_ids.size()*nbcomp];
+
+    int* sendcounts = new int[_union_group->size()];
+    int* senddispls=new int[_union_group->size()];
+    int* recvcounts=new int[_union_group->size()];
+    int* recvdispls=new int[_union_group->size()];
+
+    for (int i=0; i< _union_group->size(); i++)
+      {
+        sendcounts[i]=nbcomp*(_recv_proc_offsets[i+1]-_recv_proc_offsets[i]);
+        senddispls[i]=nbcomp*(_recv_proc_offsets[i]);
+        recvcounts[i]=nbcomp*(_send_proc_offsets[i+1]-_send_proc_offsets[i]);
+        recvdispls[i]=nbcomp*(_send_proc_offsets[i]);
+      }
+    //building the buffer of the elements to be sent
+    vector<int> offsets = _recv_proc_offsets;
+    DataArrayDouble *fieldArr=field.getArray();
+    for (int iproc=0; iproc<_union_group->size();iproc++)
+      for (int i=_recv_proc_offsets[iproc]; i<_recv_proc_offsets[iproc+1]; i++)
+        {
+          for (int icomp=0; icomp<nbcomp; icomp++)
+            sendbuf[i*nbcomp+icomp]=fieldArr->getIJ(_recv_ids[i],icomp);
+        }
+
+    //communication phase
+    switch (getAllToAllMethod())
+      {
+      case Native:
+        {
+          const MPI_Comm* comm = group->getComm();
+          comm_interface.allToAllV(sendbuf, sendcounts, senddispls, MPI_DOUBLE,
+                                   recvbuf, recvcounts, recvdispls, MPI_DOUBLE,
+                                   *comm);
+        }
+        break;
+      case PointToPoint:
+        _access_DEC->allToAllv(sendbuf, sendcounts, senddispls, MPI_DOUBLE,
+                               recvbuf, recvcounts, recvdispls, MPI_DOUBLE);
+        break;
+      }
+
+    //setting the received values in the field
+    double* recvptr=recvbuf;                         
+    for (int i=0; i< _send_proc_offsets[_union_group->size()]; i++)
+      {
+        for (int icomp=0; icomp<nbcomp; icomp++)
+          {
+            recvfield[i*nbcomp+icomp]=*recvptr;
+            recvptr++;
+          }
+      }
+    if (sendbuf!=0 && getAllToAllMethod() == Native)
+      delete[] sendbuf;
+    if (recvbuf!=0)
+      delete[] recvbuf;
+    delete[] sendcounts;
+    delete[] recvcounts;
+    delete[] senddispls; 
+    delete[] recvdispls;
+  }
+
+  ostream & operator<< (ostream & f ,const AllToAllMethod & alltoallmethod )
+  {
+    switch (alltoallmethod)
+      {
+      case Native :
+        f << " Native ";
+        break;
+      case PointToPoint :
+        f << " PointToPoint ";
+        break;
+      default :
+        f << " UnknownAllToAllMethod ";
+        break;
+      }
+    return f;
+  }
+}
diff --git a/src/ParaMEDMEM/MxN_Mapping.hxx b/src/ParaMEDMEM/MxN_Mapping.hxx
new file mode 100644 (file)
index 0000000..5aa3ce7
--- /dev/null
@@ -0,0 +1,66 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __MXN_MAPPING_HXX__
+#define __MXN_MAPPING_HXX__
+
+#include "MEDCouplingFieldDouble.hxx"
+#include "MPIAccessDEC.hxx"
+#include "DECOptions.hxx"
+
+#include <vector>
+
+namespace ParaMEDMEM
+{
+
+  class ProcessorGroup;
+
+  class MxN_Mapping : public DECOptions
+  {
+  public:
+    MxN_Mapping();
+    MxN_Mapping(const ProcessorGroup& source_group, const ProcessorGroup& target_group, const DECOptions& dec_options);
+    virtual ~MxN_Mapping();
+    void addElementFromSource(int distant_proc, int distant_elem);
+    void prepareSendRecv();
+    void sendRecv(MEDCouplingFieldDouble& field);
+    void sendRecv(double* sendfield, MEDCouplingFieldDouble& field) const ;
+    void reverseSendRecv(double* recvfield, MEDCouplingFieldDouble& field) const ;
+    //
+    const std::vector<std::pair<int,int> >& getSendingIds() const { return _sending_ids; }
+    const std::vector<int>& getSendProcsOffsets() const { return _send_proc_offsets; }
+    void initialize();
+
+    MPIAccessDEC* getAccessDEC(){ return _access_DEC; }
+  private :
+    ProcessorGroup* _union_group;
+    MPIAccessDEC * _access_DEC;
+    int _nb_comps;
+    std::vector<std::pair<int,int> > _sending_ids;
+    std::vector<int> _recv_ids;
+    std::vector<int> _send_proc_offsets;
+    std::vector<int> _recv_proc_offsets;
+  };
+
+  std::ostream & operator<< (std::ostream &,const AllToAllMethod &);
+
+}
+
+#endif
diff --git a/src/ParaMEDMEM/NonCoincidentDEC.cxx b/src/ParaMEDMEM/NonCoincidentDEC.cxx
new file mode 100644 (file)
index 0000000..4f44a7f
--- /dev/null
@@ -0,0 +1,390 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <mpi.h>
+#include "CommInterface.hxx"
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+#include "ComponentTopology.hxx"
+#include "ParaFIELD.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "DEC.hxx"
+#include "NonCoincidentDEC.hxx"
+
+extern "C" {
+#include <fvm_parall.h>
+#include <fvm_nodal.h>
+#include <fvm_nodal_append.h>
+#include <fvm_locator.h>
+}
+
+namespace ParaMEDMEM
+{
+
+  /*!
+    \anchor NonCoincidentDEC-det
+    \class NonCoincidentDEC
+
+    \c NonCoincidentDEC enables nonconservative remapping of fields 
+    between two parallel codes. 
+    The computation is possible for 3D meshes and 2D meshes.
+    It is not available for 3D surfaces. The computation enables fast parallel localization, and is based on a point in element search, followed 
+    by a field evaluation at the point location. Thus, it is typically
+    faster than the \ref InterpKernelDEC-det "InterpKernelDEC" which gives a
+    \ref InterpKerRemapGlobal "conservative remapping".
+    It is particularly true for the initialisation phase (synchronize)
+    which is very computationnaly intensive in \ref InterpKernelDEC-det.
+
+    In the present version, only fields lying on elements are considered. 
+    The value is estimated by locating the barycenter of the target
+    side cell in a source cell and sending the value of this source cell 
+    as the value of the target cell.
+
+    \image html NonCoincident_small.png "Example showing the transfer from a field based on a quadrangular mesh to a triangular mesh. The triangle barycenters are computed and located in the quadrangles. In a P0-P0 interpolation, the value on the quadrangle is then applied to the triangles whose barycenter lies within."
+
+    \image latex NonCoincident_small.eps "Example showing the transfer from a field based on a quadrangular mesh to a triangular mesh. The triangle barycenters are computed and located in the quadrangles. In a P0-P0 interpolation, the value on the quadrangle is then applied to the triangles whose barycenter lies within."
+
+    A typical use of NonCoincidentDEC encompasses two distinct phases :
+    - A setup phase during which the intersection volumes are computed and the communication structures are setup. This corresponds to calling the NonCoincidentDEC::synchronize() method.
+    - A use phase during which the remappings are actually performed. This corresponds to the calls to sendData() and recvData() which actually trigger the data exchange. The data exchange are synchronous in the current version of the library so that recvData() and sendData() calls must be synchronized on code A and code B processor groups. 
+
+    The following code excerpt illutrates a typical use of the NonCoincidentDEC class.
+
+    \code
+    ...
+    NonCoincidentDEC dec(groupA, groupB);
+    dec.attachLocalField(field);
+    dec.synchronize();
+    if (groupA.containsMyRank())
+    dec.recvData();
+    else if (groupB.containsMyRank())
+    dec.sendData();
+    ...
+    \endcode
+
+    Computing the field on the receiving side can be expressed in terms 
+    of a matrix-vector product : \f$ \phi_t=W.\phi_s\f$, with \f$ \phi_t
+    \f$ the field on the target side and \f$ \phi_s \f$ the field on 
+    the source side.
+    In the P0-P0 case, this matrix is a plain rectangular matrix with one 
+    non-zero element per row (with value 1). For instance, in the above figure, the matrix is :
+    \f[
+
+    \begin{tabular}{|cccc|}
+    1 & 0 & 0 & 0\\
+    0 & 0 & 1 & 0\\
+    1 & 0 & 0 & 0\\
+    0 & 0 & 1 & 0\\
+    \end{tabular}
+    \f]
+  */
+
+  fvm_nodal_t*  medmemMeshToFVMMesh(const MEDMEM::MESH* mesh)
+  {
+    // create an FVM structure from the paramesh structure
+    std::string meshName(mesh->getName());//this line avoid that mesh->getName() object killed before fvm_nodal_create read the const char *.
+    fvm_nodal_t * fvm_nodal = fvm_nodal_create(meshName.c_str(),mesh->getMeshDimension());
+      
+    //loop on cell types
+    int nbtypes = mesh->getNumberOfTypes(MED_EN::MED_CELL);
+    const MED_EN::medGeometryElement* types = mesh->getTypes(MED_EN::MED_CELL);
+    for (int itype=0; itype<nbtypes; itype++)
+      {
+        fvm_element_t fvm_type;
+        switch (types[itype]) 
+          {
+          case MED_EN::MED_TRIA3 :
+            fvm_type=FVM_FACE_TRIA;
+            break;
+          case MED_EN::MED_QUAD4 :
+            fvm_type=FVM_FACE_QUAD;
+            break;
+          case MED_EN::MED_TETRA4 :
+            fvm_type=FVM_CELL_TETRA;
+            break;
+          case MED_EN::MED_HEXA8 :
+            fvm_type=FVM_CELL_HEXA;
+            break;
+          default:
+            throw MEDEXCEPTION(" MED type  conversion to fvm is not handled yet.");
+            break;
+
+          }
+
+        fvm_lnum_t nbelems = mesh->getNumberOfElements(MED_EN::MED_CELL, types[itype]);
+        fvm_lnum_t* conn = new fvm_lnum_t[nbelems*(types[itype]%100)];
+        const int* mesh_conn =mesh->getConnectivity(MED_EN::MED_FULL_INTERLACE,MED_EN::MED_NODAL, MED_EN::MED_CELL, types[itype]);
+        for (int i=0; i<nbelems*(types[itype]%100); i++)
+          conn[i]=mesh_conn[i]; 
+        //swapping trias
+        if (types[itype]==MED_EN::MED_TRIA3)
+          {
+            for (int i=0; i<nbelems;i++)
+              {
+                int tmp=conn[3*i];
+                conn[3*i]=mesh_conn[3*i+1];
+                conn[3*i+1]=tmp;
+              }
+          }
+        //swapping tetras
+        if (types[itype]==MED_EN::MED_TETRA4)
+          {
+            for (int i=0; i<nbelems;i++)
+              {
+                int tmp=conn[4*i];
+                conn[4*i]=mesh_conn[4*i+1];
+                conn[4*i+1]=tmp;
+              }
+          }
+        fvm_nodal_append_by_transfer(fvm_nodal, nbelems, fvm_type,0,0,0,conn,0);
+         
+        int nbnodes= mesh->getNumberOfNodes();
+        int spacedim=mesh->getSpaceDimension();
+        fvm_coord_t* coords = new fvm_coord_t[nbnodes*spacedim];
+        const double* mesh_coords=mesh->getCoordinates(MED_EN::MED_FULL_INTERLACE);
+        for (int i=0; i<nbnodes*spacedim; i++)
+          coords[i]=mesh_coords[i];                  
+        fvm_nodal_transfer_vertices(fvm_nodal,coords);
+      }
+    return fvm_nodal;
+  }
+  
+  fvm_nodal_t*  medmemSupportToFVMMesh(const MEDMEM::SUPPORT* support)
+  {
+
+    // create an FVM structure from the paramesh structure
+    std::string supportName(support->getName());//this line avoid that support->getName() object killed before fvm_nodal_create read the const char *.
+    fvm_nodal_t * fvm_nodal = fvm_nodal_create(supportName.c_str(),1);
+      
+    const MEDMEM::MESH* mesh= support->getMesh();
+      
+    //loop on cell types
+    MED_EN::medEntityMesh entity = support->getEntity();
+      
+    int nbtypes = support->getNumberOfTypes();
+    const MED_EN::medGeometryElement* types = support->getTypes();
+    int ioffset=0;
+    const int* type_offset = support->getNumberIndex();
+      
+    //browsing through all types
+    for (int itype=0; itype<nbtypes; itype++)
+      {
+        fvm_element_t fvm_type;
+        switch (types[itype]) 
+          {
+          case MED_EN::MED_TRIA3 :
+            fvm_type=FVM_FACE_TRIA;
+            break;
+          case MED_EN::MED_QUAD4 :
+            fvm_type=FVM_FACE_QUAD;
+            break;
+          case MED_EN::MED_TETRA4 :
+            fvm_type=FVM_CELL_TETRA;
+            break;
+          case MED_EN::MED_HEXA8 :
+            fvm_type=FVM_CELL_HEXA;
+            break;
+          default:
+            throw MEDEXCEPTION(" MED type  conversion to fvm is not handled yet.");
+            break;
+
+          }
+        fvm_lnum_t nbelems = support->getNumberOfElements(types[itype]);
+         
+        //for a partial support, defining the element numbers that are taken into
+        //account in the support
+        fvm_lnum_t* elem_numbers=0;
+        if (!support->isOnAllElements())
+          {
+            elem_numbers = const_cast<fvm_lnum_t*> (support->getNumber(types[itype]));
+           
+            //creating work arrays to store list of elems for partial suports
+            if (itype>0)
+              {
+                fvm_lnum_t* temp = new int[nbelems];
+                for (int i=0; i< nbelems; i++)
+                  temp[i] = elem_numbers [i]-ioffset;
+                ioffset+=type_offset[itype];
+                elem_numbers = temp;
+              }
+          }
+        //retrieving original mesh connectivity
+        fvm_lnum_t* conn = const_cast<fvm_lnum_t*> (mesh->getConnectivity(MED_EN::MED_FULL_INTERLACE,MED_EN::MED_NODAL,entity, types[itype]));
+       
+        // adding the elements to the FVM structure 
+        fvm_nodal_append_by_transfer(fvm_nodal, nbelems, fvm_type,0,0,0,conn,elem_numbers);
+         
+        //cleaning work arrays (for partial supports)
+        if (!support->isOnAllElements() && itype>0)
+          delete[] elem_numbers;
+      
+      }
+    return fvm_nodal;
+  }
+  
+  NonCoincidentDEC::NonCoincidentDEC()
+  {  
+  }
+
+  /*! Constructor of a non coincident \ref para-dec "DEC" with
+   * a source group on which lies a field lying on a mesh and a 
+   * target group on which lies a mesh.
+   * 
+   * \param source_group ProcessorGroup on the source side
+   * \param target_group ProcessorGroup on the target side 
+   */
+  
+  NonCoincidentDEC::NonCoincidentDEC(ProcessorGroup& source_group,
+                                     ProcessorGroup& target_group)
+    :DEC(source_group, target_group)
+  {}
+                                   
+  NonCoincidentDEC::~NonCoincidentDEC()
+  {
+  }
+
+  /*! Synchronization process. Calling this method 
+   * synchronizes the topologies so that the target side
+   * gets the information which enable it to fetch the field value 
+   * from the source side.
+   * A typical call is : 
+   \verbatim
+   NonCoincidentDEC dec(source_group,target_group);
+   dec.attachLocalField(field);
+   dec.synchronize();
+   \endverbatim
+  */
+  void NonCoincidentDEC::synchronize()
+  {
+  
+    //initializing FVM parallel environment
+    const MPI_Comm* comm=dynamic_cast<const MPIProcessorGroup*> (_union_group)->getComm();
+    fvm_parall_set_mpi_comm(*const_cast<MPI_Comm*> (comm));
+  
+  
+    //setting up the communication DEC on both sides
+  
+    if (_source_group->containsMyRank())
+      {
+        MEDMEM::MESH* mesh = _local_field->getField()->getSupport()->getMesh();
+        fvm_nodal_t* source_nodal = ParaMEDMEM::medmemMeshToFVMMesh(mesh);
+      
+        int target_size = _target_group->size()  ;
+        int start_rank=  _source_group->size();
+        const MPI_Comm* comm = (dynamic_cast<const MPIProcessorGroup*> (_union_group))->getComm();
+      
+        _locator =  fvm_locator_create(1e-6,
+                                       *comm,
+                                       target_size,
+                                       start_rank);
+      
+        fvm_locator_set_nodal(_locator,
+                              source_nodal,
+                              mesh->getSpaceDimension(),
+                              0,
+                              NULL,
+                              0);
+
+      
+        _nb_distant_points = fvm_locator_get_n_dist_points(_locator);
+        _distant_coords = fvm_locator_get_dist_coords(_locator);
+        _distant_locations = fvm_locator_get_dist_locations(_locator);
+           
+      }
+    if (_target_group->containsMyRank())
+      {
+        MEDMEM::MESH* mesh = _local_field->getField()->getSupport()->getMesh();
+      
+        fvm_nodal_t* target_nodal = ParaMEDMEM::medmemMeshToFVMMesh(mesh);
+        int source_size = _source_group->size();
+        int start_rank=  0 ;
+        const MPI_Comm* comm = (dynamic_cast<const MPIProcessorGroup*> (_union_group))->getComm();
+      
+        _locator = fvm_locator_create(1e-6,
+                                      *comm,
+                                      source_size,
+                                      start_rank);
+        int nbcells = mesh->getNumberOfElements(MED_EN::MED_CELL,MED_EN::MED_ALL_ELEMENTS);
+        const MEDMEM::SUPPORT* support=_local_field->getField()->getSupport();
+        MEDMEM::FIELD<double>* barycenter_coords = mesh->getBarycenter(support);
+        const double* coords = barycenter_coords->getValue();
+        fvm_locator_set_nodal(_locator,
+                              target_nodal,
+                              mesh->getSpaceDimension(),
+                              nbcells,
+                              NULL,
+                              coords);  
+        delete barycenter_coords;
+      }
+  }
+
+
+  /*! This method is called on the target group in order to 
+   * trigger the retrieveal of field data. It must 
+   * be called synchronously with a sendData() call on 
+   * the source group.
+   */
+  void NonCoincidentDEC::recvData()
+  {
+    int nbelems = _local_field->getField()->getSupport()->getMesh()->getNumberOfElements(MED_EN::MED_CELL, MED_EN::MED_ALL_ELEMENTS);
+    int nbcomp =  _local_field->getField()->getNumberOfComponents();
+    double* values = new double [nbelems*nbcomp];
+    fvm_locator_exchange_point_var(_locator,
+                                   0,
+                                   values,
+                                   0,
+                                   sizeof(double),
+                                   nbcomp,
+                                   0);
+    _local_field->getField()->setValue(values);
+    if (_forced_renormalization_flag)
+      renormalizeTargetField();
+    delete[]values;
+  }
+
+  /*! This method is called on the source group in order to 
+   * send field data. It must be called synchronously with 
+   * a recvData() call on 
+   * the target group.
+   */
+  void NonCoincidentDEC::sendData()
+  {
+    const double* values=_local_field->getField()->getValue();
+    int nbcomp = _local_field->getField()->getNumberOfComponents();
+    double* distant_values = new double [_nb_distant_points*nbcomp];
+
+    //cheap interpolation :  the value of the cell is transfered to the point
+    for (int i=0; i<_nb_distant_points; i++)
+      for (int j=0; j <nbcomp; j++)
+        distant_values[i*nbcomp+j]=values[(_distant_locations[i]-1)*nbcomp+j];
+  
+    fvm_locator_exchange_point_var(_locator,
+                                   distant_values,
+                                   0,
+                                   0,
+                                   sizeof(double),
+                                   nbcomp,
+                                   0);
+
+    delete [] distant_values;
+    if (_forced_renormalization_flag)
+      renormalizeTargetField();
+
+  }
+}
diff --git a/src/ParaMEDMEM/NonCoincidentDEC.hxx b/src/ParaMEDMEM/NonCoincidentDEC.hxx
new file mode 100644 (file)
index 0000000..6691336
--- /dev/null
@@ -0,0 +1,70 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __NONCOINCIDENTDEC_HXX__
+#define __NONCOINCIDENTDEC_HXX__
+
+#include "DEC.hxx"
+
+struct _fvm_locator_t;
+
+typedef enum {NN} InterpolationMethod;
+
+namespace ParaMEDMEM
+{   
+  class NonCoincidentDEC : public DEC
+  {
+    public:  
+    NonCoincidentDEC();
+    NonCoincidentDEC(ProcessorGroup& , ProcessorGroup&);
+
+    virtual ~NonCoincidentDEC();
+
+    void synchronize();
+
+    void recvData();
+
+    void sendData();
+    
+    void prepareSourceDE() { }
+    void prepareTargetDE() { }
+    
+    void setInterpolationMethod(InterpolationMethod method) { _method=method; }
+    
+    private :
+    // Structure for computing the localization
+    // of remote nodes on local mesh
+    _fvm_locator_t* _locator;
+    
+    //Number of distant points to be located locally 
+    int _nb_distant_points;
+    
+    //coordinates of distant points 
+    const double* _distant_coords;
+    
+    //local element number containing the distant points  
+    const int* _distant_locations; 
+   
+   //inerpolation method
+   InterpolationMethod _method;
+  };
+}
+
+
+#endif
diff --git a/src/ParaMEDMEM/OverlapDEC.cxx b/src/ParaMEDMEM/OverlapDEC.cxx
new file mode 100644 (file)
index 0000000..0c02028
--- /dev/null
@@ -0,0 +1,318 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (CEA/DEN)
+
+#include "OverlapDEC.hxx"
+#include "CommInterface.hxx"
+#include "ParaFIELD.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "OverlapElementLocator.hxx"
+#include "OverlapInterpolationMatrix.hxx"
+
+namespace ParaMEDMEM
+{
+/*!
+    \anchor OverlapDEC-det
+    \class OverlapDEC
+
+    The \c OverlapDEC enables the \ref InterpKerRemapGlobal "conservative remapping" of fields between
+    two parallel codes. This remapping is based on the computation of intersection volumes on
+    a \b same \b processor \b group. On this processor group are defined two field-templates called A
+    and B. The computation is possible for 3D meshes, 2D meshes, 3D-surface meshes, 1D meshes and
+    2D-curve meshes. Dimensions must be similar for the distribution templates A and B.
+    The main difference with \ref InterpKernelDEC-det is that this \ref para-dec "DEC" manages 2 field templates
+    on each processor of the processor group (A and B) called source and target.
+    Furthermore all processors in processor group cooperates in global interpolation matrix
+    computation. In this respect \ref InterpKernelDEC is a specialization of \c OverlapDEC.
+
+    \section ParaMEDMEMOverlapDECAlgorithmDescription Algorithm Description
+
+    Let's consider the following use case that is ran in ParaMEDMEMTest_OverlapDEC.cxx to describes
+    the different steps of the computation. The processor group contains 3 processors.
+    \anchor ParaMEDMEMOverlapDECImgTest1
+    \image html OverlapDEC1.png "Example showing the use case in order to explain the different steps."
+
+    \subsection ParaMEDMEMOverlapDECAlgoStep1 Step 1 : Bounding box exchange and global interaction
+    between procs computation.
+
+    In order to reduce as much as possible the amount of communications between distant processors,
+    every processor computes a bounding box for A and B. Then a AllToAll communication is performed
+    so that
+    every processor can compute the \b global interactions between processor.
+    This computation leads every processor to compute the same global TODO list expressed as a list
+    of pair. A pair ( x, y ) means that proc \b x fieldtemplate A can interact with fieltemplate B of
+    proc \b y because the two bounding boxes interact.
+    In the \ref ParaMEDMEMOverlapDECImgTest1 "example above" this computation leads to the following
+    a \b global TODO list :
+
+    \b (0,0),(0,1),(1,0),(1,2),(2,0),(2,1),(2,2)
+
+    Here the pair (0,2) does not appear because the bounding box of fieldtemplateA of proc#2 does
+    not intersect that of fieldtemplate B on proc#0.
+
+    Stage performed by ParaMEDMEM::OverlapElementLocator::computeBoundingBoxes.
+
+    \subsection ParaMEDMEMOverlapDECAlgoStep2 Step 2 : Computation of local TODO list
+
+    Starting from the global interaction previously computed in \ref ParaMEDMEMOverlapDECAlgoStep1
+    "Step 1", each proc computes the TODO list per proc.
+    The following rules is chosen : a pair (x,y) can be treated by either proc \#x or proc \#y,
+    in order to reduce the amount of data transfert among
+    processors. The algorithm chosen for load balancing is the following : Each processor has
+    an empty \b local TODO list at the beginning. Then for each pair (k,m) in
+    \b global TODO list, if proc\#k has less temporary local list than proc\#m pair, (k,m) is added
+    to temparary local TODO list of proc\#k.
+    If proc\#m has less temporary local TODO list than proc\#k pair, (k,m) is added to temporary
+    local TODO list of proc\#m.
+    If proc\#k and proc\#m have the same amount of temporary local TODO list pair, (k,m) is added to
+    temporary local TODO list of proc\#k.
+
+    In the \ref ParaMEDMEMOverlapDECImgTest1 "example above" this computation leads to the following
+    local TODO list :
+
+    - proc\#0 : (0,0)
+    - proc\#1 : (0,1),(1,0)
+    - proc\#2 : (1,2),(2,0),(2,1),(2,2)
+    
+    The algorithm described here is not perfect for this use case, we hope to enhance it soon.
+
+    At this stage each proc knows precisely its \b local TODO list (with regard to interpolation).
+    The \b local TODO list of other procs than local
+    is kept for future computations.
+
+    \subsection ParaMEDMEMOverlapDECAlgoStep3 Step 3 : Matrix echange between procs
+
+    Knowing the \b local TODO list, the aim now is to exchange field-templates between procs.
+    Each proc computes knowing TODO list per
+    proc computed in \ref ParaMEDMEMOverlapDECAlgoStep2 "Step 2" the exchange TODO list :
+
+    In the \ref ParaMEDMEMOverlapDECImgTest1 "example above" the exchange TODO list gives the
+    following results :
+
+    Sending TODO list per proc :
+
+    - proc \#0 : Send fieldtemplate A to Proc\#1, Send fieldtemplate B to Proc\#1, Send fieldtemplate
+    B to Proc\#2
+    - Proc \#1 : Send fieldtemplate A to Proc\#2, Send fieldtemplate B to Proc\#2
+    - Proc \#2 : No send.
+
+    Receiving TODO list per proc :
+
+    - proc \#0 : No receiving
+    - proc \#1 : receiving fieldtemplate A from Proc\#0,  receiving fieldtemplate B from Proc\#0
+    - proc \#2 : receiving fieldtemplate B from Proc\#0, receiving fieldtemplate A from Proc\#1,
+    receiving fieldtemplate B from Proc\#1
+
+    To avoid as much as possible large volumes of transfers between procs, only relevant parts of
+    meshes are sent. In order for proc\#k to send fieldtemplate A to fieldtemplate B
+    of proc \#m., proc\#k computes the part of mesh A contained in the boundingbox B of proc\#m. It
+    implies that the corresponding cellIds or nodeIds of the
+    corresponding part are sent to proc \#m too.
+
+    Let's consider the couple (k,m) in the TODO list. This couple is treated by either k or m as
+    seen in \ref ParaMEDMEMOverlapDECAlgoStep2 "here in Step2".
+
+    As will be dealt in Step 6, for final matrix-vector computations, the resulting matrix of the
+    couple (k,m) whereever it is computed (proc \#k or proc \#m)
+    will be stored in \b proc\#m.
+
+    - If proc \#k is in charge (performs the matrix computation) for this couple (k,m), target ids
+    (cells or nodes) of the mesh in proc \#m are renumbered, because proc \#m has seelected a sub mesh
+    of the target mesh to avoid large amounts of data to transfer. In this case as proc \#m is ultimately
+     in charge of the matrix, proc \#k must keep preciously the
+    source ids needed to be sent to proc\#m. No problem will appear for matrix assembling in proc m
+    for source ids because no restriction was done.
+    Concerning source ids to be sent for the matrix-vector computation, proc k will know precisely
+    which source ids field values to send to proc \#m.
+    This is embodied by OverlapMapping::keepTracksOfTargetIds in proc m.
+
+    - If proc \#m is in charge (performs matrix computation) for this couple (k,m), source ids (cells
+    or nodes) of the mesh in proc \#k are renumbered, because proc \#k has selected a sub mesh of the
+     source mesh to avoid large amounts of data to transfer. In this case as proc \#k is ultimately
+     in charge of the matrix, proc \#m receives the source ids
+    from remote proc \#k, and thus the matrix is directly correct, no need for renumbering as
+     in \ref ParaMEDMEMOverlapDECAlgoStep5 "Step 5". However proc \#k must
+    keep track of the ids sent to proc \#m for te matrix-vector computation.
+    This is incarnated by OverlapMapping::keepTracksOfSourceIds in proc k.
+
+    This step is performed in ParaMEDMEM::OverlapElementLocator::exchangeMeshes method.
+
+    \subsection ParaMEDMEMOverlapDECAlgoStep4 Step 4 : Computation of the interpolation matrix
+
+    After mesh exchange in \ref ParaMEDMEMOverlapDECAlgoStep3 "Step3" each processor has all the
+    required information to treat its \b local TODO list computed in
+    \ref ParaMEDMEMOverlapDECAlgoStep2 "Step2". This step is potentially CPU costly, which is why
+    the \b local TODO list per proc is expected to
+    be as well balanced as possible.
+
+    The interpolation is performed as \ref ParaMEDMEM::MEDCouplingRemapper "Remapper" does.
+
+    This operation is performed by OverlapInterpolationMatrix::addContribution method.
+
+    \subsection ParaMEDMEMOverlapDECAlgoStep5 Step 5 : Global matrix construction.
+    
+    After having performed the TODO list at the end of \ref ParaMEDMEMOverlapDECAlgoStep4 "Step4"
+    we need to assemble the final matrix.
+    
+    The final aim is to have a distributed matrix \f$ M_k \f$ on each proc\#k. In order to reduce
+    data exchange during the matrix product process,
+    \f$ M_k \f$ is built using sizeof(Proc group) \c std::vector< \c std::map<int,double> \c >.
+
+    For a proc\#k, it is necessary to fetch info of all matrices built in
+    \ref ParaMEDMEMOverlapDECAlgoStep4 "Step4" where the first element in pair (i,j)
+    is equal to k.
+
+    After this step, the matrix repartition is the following after a call to
+    ParaMEDMEM::OverlapMapping::prepare :
+
+    - proc\#0 : (0,0),(1,0),(2,0)
+    - proc\#1 : (0,1),(2,1)
+    - proc\#2 : (1,2),(2,2)
+
+    Tuple (2,1) computed on proc 2 is stored in proc 1 after execution of the function
+    "prepare". This is an example of item 0 in \ref ParaMEDMEMOverlapDECAlgoStep2 "Step2".
+    Tuple (0,1) computed on proc 1 is stored in proc 1 too. This is an example of item 1 in \ref ParaMEDMEMOverlapDECAlgoStep2 "Step2".
+
+    In the end ParaMEDMEM::OverlapMapping::_proc_ids_to_send_vector_st will contain :
+
+    - Proc\#0 : 0,1
+    - Proc\#1 : 0,2
+    - Proc\#2 : 0,1,2
+
+    In the end ParaMEDMEM::OverlapMapping::_proc_ids_to_recv_vector_st will contain :
+
+    - Proc\#0 : 0,1,2
+    - Proc\#1 : 0,2
+    - Proc\#2 : 1,2
+
+    The method in charge to perform this is : ParaMEDMEM::OverlapMapping::prepare.
+*/
+  OverlapDEC::OverlapDEC(const std::set<int>& procIds, const MPI_Comm& world_comm):
+      _own_group(true),_interpolation_matrix(0),
+      _source_field(0),_own_source_field(false),
+      _target_field(0),_own_target_field(false),
+      _comm(MPI_COMM_NULL)
+  {
+    ParaMEDMEM::CommInterface comm;
+    int *ranks_world=new int[procIds.size()]; // ranks of sources and targets in world_comm
+    std::copy(procIds.begin(),procIds.end(),ranks_world);
+    MPI_Group group,world_group;
+    comm.commGroup(world_comm,&world_group);
+    comm.groupIncl(world_group,procIds.size(),ranks_world,&group);
+    delete [] ranks_world;
+    comm.commCreate(world_comm,group,&_comm);
+    comm.groupFree(&group);
+    comm.groupFree(&world_group);
+    if(_comm==MPI_COMM_NULL)
+      {
+        _group=0;
+        return ;
+      }
+    std::set<int> idsUnion;
+    for(std::size_t i=0;i<procIds.size();i++)
+      idsUnion.insert(i);
+    _group=new MPIProcessorGroup(comm,idsUnion,_comm);
+  }
+
+  OverlapDEC::~OverlapDEC()
+  {
+    if(_own_group)
+      delete _group;
+    if(_own_source_field)
+      delete _source_field;
+    if(_own_target_field)
+      delete _target_field;
+    delete _interpolation_matrix;
+    if (_comm != MPI_COMM_NULL)
+      {
+        ParaMEDMEM::CommInterface comm;
+        comm.commFree(&_comm);
+      }
+  }
+
+  void OverlapDEC::sendRecvData(bool way)
+  {
+    if(way)
+      sendData();
+    else
+      recvData();
+  }
+
+  void OverlapDEC::sendData()
+  {
+    _interpolation_matrix->multiply();
+  }
+
+  void OverlapDEC::recvData()
+  {
+    throw INTERP_KERNEL::Exception("Not implemented yet !!!!");
+    //_interpolation_matrix->transposeMultiply();
+  }
+  
+  void OverlapDEC::synchronize()
+  {
+    if(!isInGroup())
+      return ;
+    delete _interpolation_matrix;
+    _interpolation_matrix=new OverlapInterpolationMatrix(_source_field,_target_field,*_group,*this,*this);
+    OverlapElementLocator locator(_source_field,_target_field,*_group);
+    locator.copyOptions(*this);
+    locator.exchangeMeshes(*_interpolation_matrix);
+    std::vector< std::pair<int,int> > jobs=locator.getToDoList();
+    std::string srcMeth=locator.getSourceMethod();
+    std::string trgMeth=locator.getTargetMethod();
+    for(std::vector< std::pair<int,int> >::const_iterator it=jobs.begin();it!=jobs.end();it++)
+      {
+        const MEDCouplingPointSet *src=locator.getSourceMesh((*it).first);
+        const DataArrayInt *srcIds=locator.getSourceIds((*it).first);
+        const MEDCouplingPointSet *trg=locator.getTargetMesh((*it).second);
+        const DataArrayInt *trgIds=locator.getTargetIds((*it).second);
+        _interpolation_matrix->addContribution(src,srcIds,srcMeth,(*it).first,trg,trgIds,trgMeth,(*it).second);
+      }
+    _interpolation_matrix->prepare(locator.getProcsInInteraction());
+    _interpolation_matrix->computeDeno();
+  }
+
+  void OverlapDEC::attachSourceLocalField(ParaFIELD *field, bool ownPt)
+  {
+    if(!isInGroup())
+      return ;
+    if(_own_source_field)
+      delete _source_field;
+    _source_field=field;
+    _own_source_field=ownPt;
+  }
+
+  void OverlapDEC::attachTargetLocalField(ParaFIELD *field, bool ownPt)
+  {
+    if(!isInGroup())
+      return ;
+    if(_own_target_field)
+      delete _target_field;
+    _target_field=field;
+    _own_target_field=ownPt;
+  }
+
+  bool OverlapDEC::isInGroup() const
+  {
+    if(!_group)
+      return false;
+    return _group->containsMyRank();
+  }
+}
diff --git a/src/ParaMEDMEM/OverlapDEC.hxx b/src/ParaMEDMEM/OverlapDEC.hxx
new file mode 100644 (file)
index 0000000..48b853c
--- /dev/null
@@ -0,0 +1,61 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (CEA/DEN)
+
+#ifndef __OVERLAPDEC_HXX__
+#define __OVERLAPDEC_HXX__
+
+#include "DEC.hxx"
+#include "InterpolationOptions.hxx"
+
+#include <mpi.h>
+
+namespace ParaMEDMEM
+{
+  class OverlapInterpolationMatrix;
+  class ProcessorGroup;
+  class ParaFIELD;
+
+  class OverlapDEC : public DEC, public INTERP_KERNEL::InterpolationOptions
+  {
+  public:
+    OverlapDEC(const std::set<int>& procIds,const MPI_Comm& world_comm=MPI_COMM_WORLD);
+    virtual ~OverlapDEC();
+    void sendRecvData(bool way=true);
+    void sendData();
+    void recvData();
+    void synchronize();
+    void attachSourceLocalField(ParaFIELD *field, bool ownPt=false);
+    void attachTargetLocalField(ParaFIELD *field, bool ownPt=false);
+    ProcessorGroup *getGrp() { return _group; }
+    bool isInGroup() const;
+  private:
+    bool _own_group;
+    OverlapInterpolationMatrix* _interpolation_matrix;
+    ProcessorGroup *_group;
+  private:
+    ParaFIELD *_source_field;
+    bool _own_source_field;
+    ParaFIELD *_target_field;
+    bool _own_target_field;
+    MPI_Comm _comm;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/OverlapElementLocator.cxx b/src/ParaMEDMEM/OverlapElementLocator.cxx
new file mode 100644 (file)
index 0000000..51560e1
--- /dev/null
@@ -0,0 +1,369 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (CEA/DEN)
+
+#include "OverlapElementLocator.hxx"
+
+#include "CommInterface.hxx"
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+#include "ParaFIELD.hxx"
+#include "ParaMESH.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "OverlapInterpolationMatrix.hxx"
+#include "MEDCouplingFieldDouble.hxx"
+#include "MEDCouplingFieldDiscretization.hxx"
+#include "DirectedBoundingBox.hxx"
+#include "InterpKernelAutoPtr.hxx"
+
+#include <limits>
+
+using namespace std;
+
+namespace ParaMEDMEM 
+{ 
+  OverlapElementLocator::OverlapElementLocator(const ParaFIELD *sourceField, const ParaFIELD *targetField, const ProcessorGroup& group)
+    : _local_source_field(sourceField),
+      _local_target_field(targetField),
+      _local_source_mesh(0),
+      _local_target_mesh(0),
+      _domain_bounding_boxes(0),
+      _group(group)
+  { 
+    if(_local_source_field)
+      _local_source_mesh=_local_source_field->getSupport()->getCellMesh();
+    if(_local_target_field)
+      _local_target_mesh=_local_target_field->getSupport()->getCellMesh();
+    _comm=getCommunicator();
+    computeBoundingBoxes();
+  }
+
+  OverlapElementLocator::~OverlapElementLocator()
+  {
+    delete [] _domain_bounding_boxes;
+  }
+
+  const MPI_Comm *OverlapElementLocator::getCommunicator() const
+  {
+    const MPIProcessorGroup* group=static_cast<const MPIProcessorGroup*>(&_group);
+    return group->getComm();
+  }
+
+  void OverlapElementLocator::computeBoundingBoxes()
+  {
+    CommInterface comm_interface=_group.getCommInterface();
+    const MPIProcessorGroup* group=static_cast<const MPIProcessorGroup*> (&_group);
+    _local_space_dim=0;
+    if(_local_source_mesh)
+      _local_space_dim=_local_source_mesh->getSpaceDimension();
+    else
+      _local_space_dim=_local_target_mesh->getSpaceDimension();
+    //
+    const MPI_Comm* comm = group->getComm();
+    int bbSize=2*2*_local_space_dim;//2 (for source/target) 2 (min/max)
+    _domain_bounding_boxes=new double[bbSize*_group.size()];
+    INTERP_KERNEL::AutoPtr<double> minmax=new double[bbSize];
+    //Format minmax : Xmin_src,Xmax_src,Ymin_src,Ymax_src,Zmin_src,Zmax_src,Xmin_trg,Xmax_trg,Ymin_trg,Ymax_trg,Zmin_trg,Zmax_trg
+    if(_local_source_mesh)
+      _local_source_mesh->getBoundingBox(minmax);
+    else
+      {
+        for(int i=0;i<_local_space_dim;i++)
+          {
+            minmax[i*2]=std::numeric_limits<double>::max();
+            minmax[i*2+1]=-std::numeric_limits<double>::max();
+          }
+      }
+    if(_local_target_mesh)
+      _local_target_mesh->getBoundingBox(minmax+2*_local_space_dim);
+    else
+      {
+        for(int i=0;i<_local_space_dim;i++)
+          {
+            minmax[i*2+2*_local_space_dim]=std::numeric_limits<double>::max();
+            minmax[i*2+1+2*_local_space_dim]=-std::numeric_limits<double>::max();
+          }
+      }
+    comm_interface.allGather(minmax, bbSize, MPI_DOUBLE,
+                             _domain_bounding_boxes,bbSize, MPI_DOUBLE, 
+                             *comm);
+  
+    // Computation of all pairs needing an interpolation pairs are duplicated now !
+    
+    _proc_pairs.clear();//first is source second is target
+    _proc_pairs.resize(_group.size());
+    for(int i=0;i<_group.size();i++)
+      for(int j=0;j<_group.size();j++)
+        {
+          if(intersectsBoundingBox(i,j))
+            _proc_pairs[i].push_back(j);
+        }
+
+    // OK now let's assigning as balanced as possible, job to each proc of group
+    std::vector< std::vector< std::pair<int,int> > > pairsToBeDonePerProc(_group.size());
+    int i=0;
+    for(std::vector< std::vector< int > >::const_iterator it1=_proc_pairs.begin();it1!=_proc_pairs.end();it1++,i++)
+      for(std::vector< int >::const_iterator it2=(*it1).begin();it2!=(*it1).end();it2++)
+        {
+          if(pairsToBeDonePerProc[i].size()<=pairsToBeDonePerProc[*it2].size())//it includes the fact that i==*it2
+            pairsToBeDonePerProc[i].push_back(std::pair<int,int>(i,*it2));
+          else
+            pairsToBeDonePerProc[*it2].push_back(std::pair<int,int>(i,*it2));
+        }
+    //Keeping todo list of current proc. _to_do_list contains a set of pair where at least _group.myRank() appears once.
+    //This proc will be in charge to perform interpolation of any of element of '_to_do_list'
+    //If _group.myRank()==myPair.first, current proc should fetch target mesh of myPair.second (if different from _group.myRank()).
+    //If _group.myRank()==myPair.second, current proc should fetch source mesh of myPair.second.
+    
+    int myProcId=_group.myRank();
+    _to_do_list=pairsToBeDonePerProc[myProcId];
+
+    //Feeding now '_procs_to_send'. A same id can appears twice. The second parameter in pair means what to send true=source, false=target
+    _procs_to_send.clear();
+    for(int i=_group.size()-1;i>=0;i--)
+      if(i!=myProcId)
+        {
+          const std::vector< std::pair<int,int> >& anRemoteProcToDoList=pairsToBeDonePerProc[i];
+          for(std::vector< std::pair<int,int> >::const_iterator it=anRemoteProcToDoList.begin();it!=anRemoteProcToDoList.end();it++)
+            {
+              if((*it).first==myProcId)
+                _procs_to_send.push_back(std::pair<int,bool>(i,true));
+              if((*it).second==myProcId)
+                _procs_to_send.push_back(std::pair<int,bool>(i,false));
+            }
+        }
+  }
+
+  /*!
+   * The aim of this method is to perform the communication to get data corresponding to '_to_do_list' attribute.
+   * The principle is the following : if proc n1 and n2 need to perform a cross sending with n1<n2, then n1 will send first and receive then.
+   */
+  void OverlapElementLocator::exchangeMeshes(OverlapInterpolationMatrix& matrix)
+  {
+    int myProcId=_group.myRank();
+    //starting to receive every procs whose id is lower than myProcId.
+    std::vector< std::pair<int,int> > toDoListForFetchRemaining;
+    for(std::vector< std::pair<int,int> >::const_iterator it=_to_do_list.begin();it!=_to_do_list.end();it++)
+      {
+        if((*it).first!=(*it).second)
+          {
+            if((*it).first==myProcId)
+              {
+                if((*it).second<myProcId)
+                  receiveRemoteMesh((*it).second,false);
+                else
+                  toDoListForFetchRemaining.push_back(std::pair<int,int>((*it).first,(*it).second));
+              }
+            else
+              {//(*it).second==myProcId
+                if((*it).first<myProcId)
+                  receiveRemoteMesh((*it).first,true);
+                else
+                  toDoListForFetchRemaining.push_back(std::pair<int,int>((*it).first,(*it).second));
+              }
+          }
+      }
+    //sending source or target mesh to remote procs
+    for(std::vector< std::pair<int,bool> >::const_iterator it2=_procs_to_send.begin();it2!=_procs_to_send.end();it2++)
+      sendLocalMeshTo((*it2).first,(*it2).second,matrix);
+    //fetching remaining meshes
+    for(std::vector< std::pair<int,int> >::const_iterator it=toDoListForFetchRemaining.begin();it!=toDoListForFetchRemaining.end();it++)
+      {
+        if((*it).first!=(*it).second)
+          {
+            if((*it).first==myProcId)
+              receiveRemoteMesh((*it).second,false);
+            else//(*it).second==myProcId
+              receiveRemoteMesh((*it).first,true);
+          }
+      }
+  }
+  
+  std::string OverlapElementLocator::getSourceMethod() const
+  {
+    return _local_source_field->getField()->getDiscretization()->getStringRepr();
+  }
+
+  std::string OverlapElementLocator::getTargetMethod() const
+  {
+    return _local_target_field->getField()->getDiscretization()->getStringRepr();
+  }
+
+  const MEDCouplingPointSet *OverlapElementLocator::getSourceMesh(int procId) const
+  {
+    int myProcId=_group.myRank();
+    if(myProcId==procId)
+      return _local_source_mesh;
+    std::pair<int,bool> p(procId,true);
+    std::map<std::pair<int,bool>, MEDCouplingAutoRefCountObjectPtr< MEDCouplingPointSet > >::const_iterator it=_remote_meshes.find(p);
+    return (*it).second;
+  }
+
+  const DataArrayInt *OverlapElementLocator::getSourceIds(int procId) const
+  {
+    int myProcId=_group.myRank();
+    if(myProcId==procId)
+      return 0;
+    std::pair<int,bool> p(procId,true);
+    std::map<std::pair<int,bool>, MEDCouplingAutoRefCountObjectPtr< DataArrayInt > >::const_iterator it=_remote_elems.find(p);
+    return (*it).second;
+  }
+
+  const MEDCouplingPointSet *OverlapElementLocator::getTargetMesh(int procId) const
+  {
+    int myProcId=_group.myRank();
+    if(myProcId==procId)
+      return _local_target_mesh;
+    std::pair<int,bool> p(procId,false);
+    std::map<std::pair<int,bool>, MEDCouplingAutoRefCountObjectPtr< MEDCouplingPointSet > >::const_iterator it=_remote_meshes.find(p);
+    return (*it).second;
+  }
+
+  const DataArrayInt *OverlapElementLocator::getTargetIds(int procId) const
+  {
+    int myProcId=_group.myRank();
+    if(myProcId==procId)
+      return 0;
+    std::pair<int,bool> p(procId,false);
+    std::map<std::pair<int,bool>, MEDCouplingAutoRefCountObjectPtr< DataArrayInt > >::const_iterator it=_remote_elems.find(p);
+    return (*it).second;
+  }
+
+  bool OverlapElementLocator::intersectsBoundingBox(int isource, int itarget) const
+  {
+    const double *source_bb=_domain_bounding_boxes+isource*2*2*_local_space_dim;
+    const double *target_bb=_domain_bounding_boxes+itarget*2*2*_local_space_dim+2*_local_space_dim;
+
+    for (int idim=0; idim < _local_space_dim; idim++)
+      {
+        const double eps = -1e-12;//tony to change
+        bool intersects = (target_bb[idim*2]<source_bb[idim*2+1]+eps)
+          && (source_bb[idim*2]<target_bb[idim*2+1]+eps);
+        if (!intersects)
+          return false; 
+      }
+    return true;
+  }
+
+  /*!
+   * This methods sends local source if 'sourceOrTarget'==True to proc 'procId'.
+   * This methods sends local target if 'sourceOrTarget'==False to proc 'procId'.
+   *
+   * This method prepares the matrix too, for matrix assembling and future matrix-vector computation.
+   */
+  void OverlapElementLocator::sendLocalMeshTo(int procId, bool sourceOrTarget, OverlapInterpolationMatrix& matrix) const
+  {
+   //int myProcId=_group.myRank();
+   const double *distant_bb=0;
+   MEDCouplingPointSet *local_mesh=0;
+   const ParaFIELD *field=0;
+   if(sourceOrTarget)//source for local but target for distant
+     {
+       distant_bb=_domain_bounding_boxes+procId*2*2*_local_space_dim+2*_local_space_dim;
+       local_mesh=_local_source_mesh;
+       field=_local_source_field;
+     }
+   else//target for local but source for distant
+     {
+       distant_bb=_domain_bounding_boxes+procId*2*2*_local_space_dim;
+       local_mesh=_local_target_mesh;
+       field=_local_target_field;
+     }
+   MEDCouplingAutoRefCountObjectPtr<DataArrayInt> elems=local_mesh->getCellsInBoundingBox(distant_bb,getBoundingBoxAdjustment());
+   DataArrayInt *idsToSend;
+   MEDCouplingPointSet *send_mesh=static_cast<MEDCouplingPointSet *>(field->getField()->buildSubMeshData(elems->begin(),elems->end(),idsToSend));
+   if(sourceOrTarget)
+     matrix.keepTracksOfSourceIds(procId,idsToSend);//Case#1 in Step2 of main algorithm.
+   else
+     matrix.keepTracksOfTargetIds(procId,idsToSend);//Case#0 in Step2 of main algorithm.
+   sendMesh(procId,send_mesh,idsToSend);
+   send_mesh->decrRef();
+   idsToSend->decrRef();
+  }
+
+  /*!
+   * This method recieves source remote mesh on proc 'procId' if sourceOrTarget==True
+   * This method recieves target remote mesh on proc 'procId' if sourceOrTarget==False
+   */
+  void OverlapElementLocator::receiveRemoteMesh(int procId, bool sourceOrTarget)
+  {
+    DataArrayInt *da=0;
+    MEDCouplingPointSet *m=0;
+    receiveMesh(procId,m,da);
+    std::pair<int,bool> p(procId,sourceOrTarget);
+    _remote_meshes[p]=m;
+    _remote_elems[p]=da;
+  }
+
+  void OverlapElementLocator::sendMesh(int procId, const MEDCouplingPointSet *mesh, const DataArrayInt *idsToSend) const
+  {
+    CommInterface comInterface=_group.getCommInterface();
+    // First stage : exchanging sizes
+    vector<double> tinyInfoLocalD;//tinyInfoLocalD not used for the moment
+    vector<int> tinyInfoLocal;
+    vector<string> tinyInfoLocalS;
+    mesh->getTinySerializationInformation(tinyInfoLocalD,tinyInfoLocal,tinyInfoLocalS);
+    const MPI_Comm *comm=getCommunicator();
+    //
+    int lgth[2];
+    lgth[0]=tinyInfoLocal.size();
+    lgth[1]=idsToSend->getNbOfElems();
+    comInterface.send(&lgth,2,MPI_INT,procId,1140,*_comm);
+    comInterface.send(&tinyInfoLocal[0],tinyInfoLocal.size(),MPI_INT,procId,1141,*comm);
+    //
+    DataArrayInt *v1Local=0;
+    DataArrayDouble *v2Local=0;
+    mesh->serialize(v1Local,v2Local);
+    comInterface.send(v1Local->getPointer(),v1Local->getNbOfElems(),MPI_INT,procId,1142,*comm);
+    comInterface.send(v2Local->getPointer(),v2Local->getNbOfElems(),MPI_DOUBLE,procId,1143,*comm);
+    //finished for mesh, ids now
+    comInterface.send(const_cast<int *>(idsToSend->getConstPointer()),lgth[1],MPI_INT,procId,1144,*comm);
+    //
+    v1Local->decrRef();
+    v2Local->decrRef();
+  }
+
+  void OverlapElementLocator::receiveMesh(int procId, MEDCouplingPointSet* &mesh, DataArrayInt *&ids) const
+  {
+    int lgth[2];
+    MPI_Status status;
+    const MPI_Comm *comm=getCommunicator();
+    CommInterface comInterface=_group.getCommInterface();
+    comInterface.recv(lgth,2,MPI_INT,procId,1140,*_comm,&status);
+    std::vector<int> tinyInfoDistant(lgth[0]);
+    ids=DataArrayInt::New();
+    ids->alloc(lgth[1],1);
+    comInterface.recv(&tinyInfoDistant[0],lgth[0],MPI_INT,procId,1141,*comm,&status);
+    mesh=MEDCouplingPointSet::BuildInstanceFromMeshType((MEDCouplingMeshType)tinyInfoDistant[0]);
+    std::vector<std::string> unusedTinyDistantSts;
+    vector<double> tinyInfoDistantD(1);//tinyInfoDistantD not used for the moment
+    DataArrayInt *v1Distant=DataArrayInt::New();
+    DataArrayDouble *v2Distant=DataArrayDouble::New();
+    mesh->resizeForUnserialization(tinyInfoDistant,v1Distant,v2Distant,unusedTinyDistantSts);
+    comInterface.recv(v1Distant->getPointer(),v1Distant->getNbOfElems(),MPI_INT,procId,1142,*comm,&status);
+    comInterface.recv(v2Distant->getPointer(),v2Distant->getNbOfElems(),MPI_DOUBLE,procId,1143,*comm,&status);
+    mesh->unserialization(tinyInfoDistantD,tinyInfoDistant,v1Distant,v2Distant,unusedTinyDistantSts);
+    //finished for mesh, ids now
+    comInterface.recv(ids->getPointer(),lgth[1],MPI_INT,procId,1144,*comm,&status);
+    //
+    v1Distant->decrRef();
+    v2Distant->decrRef();
+  }
+}
diff --git a/src/ParaMEDMEM/OverlapElementLocator.hxx b/src/ParaMEDMEM/OverlapElementLocator.hxx
new file mode 100644 (file)
index 0000000..13a94c8
--- /dev/null
@@ -0,0 +1,92 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (CEA/DEN)
+
+#ifndef __OVERLAPELEMENTLOCATOR_HXX__
+#define __OVERLAPELEMENTLOCATOR_HXX__
+
+#include "InterpolationOptions.hxx"
+#include "MEDCouplingNatureOfField.hxx"
+#include "MEDCouplingPointSet.hxx"
+#include "MEDCouplingMemArray.hxx"
+#include "MEDCouplingAutoRefCountObjectPtr.hxx"
+
+#include <mpi.h>
+#include <vector>
+#include <map>
+#include <set>
+
+namespace ParaMEDMEM
+{
+  class ParaFIELD;
+  class ProcessorGroup;
+  class ParaSUPPORT;
+  class OverlapInterpolationMatrix;
+  
+  class OverlapElementLocator : public INTERP_KERNEL::InterpolationOptions
+  {
+  public:
+    OverlapElementLocator(const ParaFIELD *sourceField, const ParaFIELD *targetField, const ProcessorGroup& group);
+    virtual ~OverlapElementLocator();
+    const MPI_Comm *getCommunicator() const;
+    void exchangeMeshes(OverlapInterpolationMatrix& matrix);
+    std::vector< std::pair<int,int> > getToDoList() const { return _to_do_list; }
+    std::vector< std::vector< int > > getProcsInInteraction() const { return _proc_pairs; }
+    std::string getSourceMethod() const;
+    std::string getTargetMethod() const;
+    const MEDCouplingPointSet *getSourceMesh(int procId) const;
+    const DataArrayInt *getSourceIds(int procId) const;
+    const MEDCouplingPointSet *getTargetMesh(int procId) const;
+    const DataArrayInt *getTargetIds(int procId) const;
+  private:
+    void computeBoundingBoxes();
+    bool intersectsBoundingBox(int i, int j) const;
+    void sendLocalMeshTo(int procId, bool sourceOrTarget, OverlapInterpolationMatrix& matrix) const;
+    void receiveRemoteMesh(int procId, bool sourceOrTarget);
+    void sendMesh(int procId, const MEDCouplingPointSet *mesh, const DataArrayInt *idsToSend) const;
+    void receiveMesh(int procId, MEDCouplingPointSet* &mesh, DataArrayInt *&ids) const;
+  private:
+    const ParaFIELD *_local_source_field;
+    const ParaFIELD *_local_target_field;
+    int _local_space_dim;
+    MEDCouplingPointSet *_local_source_mesh;
+    MEDCouplingPointSet *_local_target_mesh;
+    std::vector<MEDCouplingPointSet*> _distant_cell_meshes;
+    std::vector<MEDCouplingPointSet*> _distant_face_meshes;
+    //! of size _group.size(). Contains for each source proc i, the ids of proc j the targets interact with. This vector is common for all procs in _group. 
+    std::vector< std::vector< int > > _proc_pairs;
+    //! list of interpolations couple to be done
+    std::vector< std::pair<int,int> > _to_do_list;
+    std::vector< std::pair<int,bool> > _procs_to_send;
+    std::map<std::pair<int,bool>, MEDCouplingAutoRefCountObjectPtr< MEDCouplingPointSet > > _remote_meshes;
+    std::map<std::pair<int,bool>, MEDCouplingAutoRefCountObjectPtr< DataArrayInt > > _remote_elems;
+    double* _domain_bounding_boxes;
+    const ProcessorGroup& _group;
+    std::vector<int> _distant_proc_ids;
+    const MPI_Comm *_comm;
+    //Attributes only used by lazy side
+    //std::vector<double> _values_added;
+    //std::vector< std::vector<int> > _ids_per_working_proc;
+    //std::vector< std::vector<int> > _ids_per_working_proc3;
+    //std::vector< std::vector<double> > _values_per_working_proc;
+  };
+
+}
+
+#endif
diff --git a/src/ParaMEDMEM/OverlapInterpolationMatrix.cxx b/src/ParaMEDMEM/OverlapInterpolationMatrix.cxx
new file mode 100644 (file)
index 0000000..b57541b
--- /dev/null
@@ -0,0 +1,315 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (CEA/DEN)
+
+#include "OverlapInterpolationMatrix.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "ProcessorGroup.hxx"
+#include "TranslationRotationMatrix.hxx"
+#include "Interpolation.hxx"
+#include "Interpolation1D.txx"
+#include "Interpolation2DCurve.hxx"
+#include "Interpolation2D.txx"
+#include "Interpolation3DSurf.hxx"
+#include "Interpolation3D.txx"
+#include "Interpolation3D2D.txx"
+#include "Interpolation2D1D.txx"
+#include "MEDCouplingUMesh.hxx"
+#include "MEDCouplingNormalizedUnstructuredMesh.txx"
+#include "InterpolationOptions.hxx"
+#include "NormalizedUnstructuredMesh.hxx"
+#include "ElementLocator.hxx"
+#include "InterpKernelAutoPtr.hxx"
+
+#include <algorithm>
+
+using namespace std;
+
+namespace ParaMEDMEM
+{
+  OverlapInterpolationMatrix::OverlapInterpolationMatrix(ParaFIELD *source_field,
+                                                         ParaFIELD *target_field,
+                                                         const ProcessorGroup& group,
+                                                         const DECOptions& dec_options,
+                                                         const INTERP_KERNEL::InterpolationOptions& i_opt):
+    INTERP_KERNEL::InterpolationOptions(i_opt),
+    DECOptions(dec_options),
+    _source_field(source_field),
+    _target_field(target_field),
+    _source_support(source_field->getSupport()->getCellMesh()),
+    _target_support(target_field->getSupport()->getCellMesh()),
+    _mapping(group),
+    _group(group)
+  {
+    int nbelems = source_field->getField()->getNumberOfTuples();
+    _row_offsets.resize(nbelems+1);
+    _coeffs.resize(nbelems);
+    _target_volume.resize(nbelems);
+  }
+
+  void OverlapInterpolationMatrix::keepTracksOfSourceIds(int procId, DataArrayInt *ids)
+  {
+    _mapping.keepTracksOfSourceIds(procId,ids);
+  }
+
+  void OverlapInterpolationMatrix::keepTracksOfTargetIds(int procId, DataArrayInt *ids)
+  {
+    _mapping.keepTracksOfTargetIds(procId,ids);
+  }
+
+  OverlapInterpolationMatrix::~OverlapInterpolationMatrix()
+  {
+  }
+
+  void OverlapInterpolationMatrix::addContribution(const MEDCouplingPointSet *src, const DataArrayInt *srcIds, const std::string& srcMeth, int srcProcId,
+                                                   const MEDCouplingPointSet *trg, const DataArrayInt *trgIds, const std::string& trgMeth, int trgProcId)
+  {
+    std::string interpMethod(srcMeth);
+    interpMethod+=trgMeth;
+    //creating the interpolator structure
+    vector<map<int,double> > surfaces;
+    int colSize=0;
+    //computation of the intersection volumes between source and target elements
+    const MEDCouplingUMesh *trgC=dynamic_cast<const MEDCouplingUMesh *>(trg);
+    const MEDCouplingUMesh *srcC=dynamic_cast<const MEDCouplingUMesh *>(src);
+    if ( src->getMeshDimension() == -1 )
+      {
+        if(trgC->getMeshDimension()==2 && trgC->getSpaceDimension()==2)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<2,2> target_mesh_wrapper(trgC);
+            INTERP_KERNEL::Interpolation2D interpolation(*this);
+            colSize=interpolation.fromIntegralUniform(target_mesh_wrapper,surfaces,trgMeth);
+          }
+        else if(trgC->getMeshDimension()==3 && trgC->getSpaceDimension()==3)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<3,3> target_mesh_wrapper(trgC);
+            INTERP_KERNEL::Interpolation3D interpolation(*this);
+            colSize=interpolation.fromIntegralUniform(target_mesh_wrapper,surfaces,trgMeth);
+          }
+        else if(trgC->getMeshDimension()==2 && trgC->getSpaceDimension()==3)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<3,2> target_mesh_wrapper(trgC);
+            INTERP_KERNEL::Interpolation3DSurf interpolation(*this);
+            colSize=interpolation.fromIntegralUniform(target_mesh_wrapper,surfaces,trgMeth);
+          }
+        else
+          throw INTERP_KERNEL::Exception("No para interpolation available for the given mesh and space dimension of source mesh to -1D targetMesh");
+      }
+    else if ( trg->getMeshDimension() == -1 )
+      {
+        if(srcC->getMeshDimension()==2 && srcC->getSpaceDimension()==2)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<2,2> local_mesh_wrapper(srcC);
+            INTERP_KERNEL::Interpolation2D interpolation(*this);
+            colSize=interpolation.toIntegralUniform(local_mesh_wrapper,surfaces,srcMeth);
+          }
+        else if(srcC->getMeshDimension()==3 && srcC->getSpaceDimension()==3)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<3,3> local_mesh_wrapper(srcC);
+            INTERP_KERNEL::Interpolation3D interpolation(*this);
+            colSize=interpolation.toIntegralUniform(local_mesh_wrapper,surfaces,srcMeth);
+          }
+        else if(srcC->getMeshDimension()==2 && srcC->getSpaceDimension()==3)
+          {
+            MEDCouplingNormalizedUnstructuredMesh<3,2> local_mesh_wrapper(srcC);
+            INTERP_KERNEL::Interpolation3DSurf interpolation(*this);
+            colSize=interpolation.toIntegralUniform(local_mesh_wrapper,surfaces,srcMeth);
+          }
+        else
+          throw INTERP_KERNEL::Exception("No para interpolation available for the given mesh and space dimension of distant mesh to -1D sourceMesh");
+      }
+    else if ( src->getMeshDimension() == 2 && trg->getMeshDimension() == 3
+              && trg->getSpaceDimension() == 3 && src->getSpaceDimension() == 3 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<3,3> target_wrapper(trgC);
+        MEDCouplingNormalizedUnstructuredMesh<3,3> source_wrapper(srcC);
+        
+        INTERP_KERNEL::Interpolation3D2D interpolator (*this);
+        colSize=interpolator.interpolateMeshes(source_wrapper,target_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if ( src->getMeshDimension() == 3 && trg->getMeshDimension() == 2
+              && trg->getSpaceDimension() == 3 && src->getSpaceDimension() == 3 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<3,3> target_wrapper(trgC);
+        MEDCouplingNormalizedUnstructuredMesh<3,3> source_wrapper(srcC);
+        
+        INTERP_KERNEL::Interpolation3D2D interpolator (*this);
+        vector<map<int,double> > surfacesTranspose;
+        colSize=interpolator.interpolateMeshes(target_wrapper,source_wrapper,surfaces,interpMethod);//not a bug target in source.
+        TransposeMatrix(surfacesTranspose,colSize,surfaces);
+        colSize=surfacesTranspose.size();
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if ( src->getMeshDimension() == 1 && trg->getMeshDimension() == 2
+              && trg->getSpaceDimension() == 2 && src->getSpaceDimension() == 2 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<2,2> target_wrapper(trgC);
+        MEDCouplingNormalizedUnstructuredMesh<2,2> source_wrapper(srcC);
+        
+        INTERP_KERNEL::Interpolation2D1D interpolator (*this);
+        colSize=interpolator.interpolateMeshes(source_wrapper,target_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if ( src->getMeshDimension() == 2 && trg->getMeshDimension() == 1
+              && trg->getSpaceDimension() == 2 && src->getSpaceDimension() == 2 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<2,2> target_wrapper(trgC);
+        MEDCouplingNormalizedUnstructuredMesh<2,2> source_wrapper(srcC);
+        
+        INTERP_KERNEL::Interpolation2D1D interpolator (*this);
+        vector<map<int,double> > surfacesTranspose;
+        colSize=interpolator.interpolateMeshes(target_wrapper,source_wrapper,surfacesTranspose,interpMethod);//not a bug target in source.
+        TransposeMatrix(surfacesTranspose,colSize,surfaces);
+        colSize=surfacesTranspose.size();
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if (trg->getMeshDimension() != _source_support->getMeshDimension())
+      {
+        throw INTERP_KERNEL::Exception("local and distant meshes do not have the same space and mesh dimensions");
+      }
+    else if( src->getMeshDimension() == 1
+             && src->getSpaceDimension() == 1 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<1,1> target_wrapper(trgC);
+        MEDCouplingNormalizedUnstructuredMesh<1,1> source_wrapper(srcC);
+
+        INTERP_KERNEL::Interpolation1D interpolation(*this);
+        colSize=interpolation.interpolateMeshes(source_wrapper,target_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if( trg->getMeshDimension() == 1
+             && trg->getSpaceDimension() == 2 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<2,1> target_wrapper(trgC);
+        MEDCouplingNormalizedUnstructuredMesh<2,1> source_wrapper(srcC);
+
+        INTERP_KERNEL::Interpolation2DCurve interpolation(*this);
+        colSize=interpolation.interpolateMeshes(source_wrapper,target_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if ( trg->getMeshDimension() == 2
+              && trg->getSpaceDimension() == 3 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<3,2> target_wrapper(trgC);
+        MEDCouplingNormalizedUnstructuredMesh<3,2> source_wrapper(srcC);
+
+        INTERP_KERNEL::Interpolation3DSurf interpolator (*this);
+        colSize=interpolator.interpolateMeshes(source_wrapper,target_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if ( trg->getMeshDimension() == 2
+              && trg->getSpaceDimension() == 2)
+      {
+        MEDCouplingNormalizedUnstructuredMesh<2,2> target_wrapper(trgC);
+        MEDCouplingNormalizedUnstructuredMesh<2,2> source_wrapper(srcC);
+
+        INTERP_KERNEL::Interpolation2D interpolator (*this);
+        colSize=interpolator.interpolateMeshes(source_wrapper,target_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else if ( trg->getMeshDimension() == 3
+              && trg->getSpaceDimension() == 3 )
+      {
+        MEDCouplingNormalizedUnstructuredMesh<3,3> target_wrapper(trgC);
+        MEDCouplingNormalizedUnstructuredMesh<3,3> source_wrapper(srcC);
+
+        INTERP_KERNEL::Interpolation3D interpolator (*this);
+        colSize=interpolator.interpolateMeshes(source_wrapper,target_wrapper,surfaces,interpMethod);
+        target_wrapper.releaseTempArrays();
+        source_wrapper.releaseTempArrays();
+      }
+    else
+      {
+        throw INTERP_KERNEL::Exception("no interpolator exists for these mesh and space dimensions ");
+      }
+    bool needSourceSurf=isSurfaceComputationNeeded(srcMeth);
+    MEDCouplingFieldDouble *source_triangle_surf=0;
+    if(needSourceSurf)
+      source_triangle_surf=src->getMeasureField(getMeasureAbsStatus());
+    //
+    fillDistributedMatrix(surfaces,srcIds,srcProcId,trgIds,trgProcId);
+    //
+    if(needSourceSurf)
+      source_triangle_surf->decrRef();
+  }
+
+  /*!
+   * \b res rows refers to target and column (first param of map) to source.
+   */
+  void OverlapInterpolationMatrix::fillDistributedMatrix(const std::vector< std::map<int,double> >& res,
+                                                         const DataArrayInt *srcIds, int srcProc,
+                                                         const DataArrayInt *trgIds, int trgProc)
+  {
+    _mapping.addContributionST(res,srcIds,srcProc,trgIds,trgProc);
+  }
+
+  /*!
+   * 'procsInInteraction' gives the global view of interaction between procs.
+   * In 'procsInInteraction' for a proc with id i, is in interaction with procs listed in procsInInteraction[i]
+   */
+  void OverlapInterpolationMatrix::prepare(const std::vector< std::vector<int> >& procsInInteraction)
+  {
+    if(_source_support)
+      _mapping.prepare(procsInInteraction,_target_field->getField()->getNumberOfTuplesExpected());
+    else
+      _mapping.prepare(procsInInteraction,0);
+  }
+
+  void OverlapInterpolationMatrix::computeDeno()
+  {
+    if(_target_field->getField()->getNature()==ConservativeVolumic)
+      _mapping.computeDenoConservativeVolumic(_target_field->getField()->getNumberOfTuplesExpected());
+    else
+      throw INTERP_KERNEL::Exception("Policy Not implemented yet : only ConservativeVolumic defined !");
+  }
+
+  void OverlapInterpolationMatrix::multiply()
+  {
+    _mapping.multiply(_source_field->getField(),_target_field->getField());
+  }
+
+  void OverlapInterpolationMatrix::transposeMultiply()
+  {
+    _mapping.transposeMultiply(_target_field->getField(),_source_field->getField());
+  }
+  
+  bool OverlapInterpolationMatrix::isSurfaceComputationNeeded(const std::string& method) const
+  {
+    return method=="P0";
+  }
+
+  void OverlapInterpolationMatrix::TransposeMatrix(const std::vector<std::map<int,double> >& matIn, int nbColsMatIn, std::vector<std::map<int,double> >& matOut)
+  {
+    matOut.resize(nbColsMatIn);
+    int id=0;
+    for(std::vector<std::map<int,double> >::const_iterator iter1=matIn.begin();iter1!=matIn.end();iter1++,id++)
+      for(std::map<int,double>::const_iterator iter2=(*iter1).begin();iter2!=(*iter1).end();iter2++)
+        matOut[(*iter2).first][id]=(*iter2).second;
+  }
+}
diff --git a/src/ParaMEDMEM/OverlapInterpolationMatrix.hxx b/src/ParaMEDMEM/OverlapInterpolationMatrix.hxx
new file mode 100644 (file)
index 0000000..514deb8
--- /dev/null
@@ -0,0 +1,126 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (CEA/DEN)
+
+#ifndef __OVERLAPINTERPOLATIONMATRIX_HXX__
+#define __OVERLAPINTERPOLATIONMATRIX_HXX__
+
+#include "MPIAccessDEC.hxx"
+#include "OverlapMapping.hxx"
+#include "InterpolationOptions.hxx"
+#include "DECOptions.hxx"
+
+namespace ParaMEDMEM
+{
+  class ParaFIELD;
+  class MEDCouplingPointSet;
+
+  class OverlapInterpolationMatrix : public INTERP_KERNEL::InterpolationOptions,
+                                     public DECOptions
+  {
+  public:
+    
+    OverlapInterpolationMatrix(ParaFIELD *source_field,
+                               ParaFIELD *target_field,
+                               const ProcessorGroup& group,
+                               const DECOptions& dec_opt,
+                               const InterpolationOptions& i_opt);
+
+    void keepTracksOfSourceIds(int procId, DataArrayInt *ids);
+
+    void keepTracksOfTargetIds(int procId, DataArrayInt *ids);
+
+    void addContribution(const MEDCouplingPointSet *src, const DataArrayInt *srcIds, const std::string& srcMeth, int srcProcId,
+                         const MEDCouplingPointSet *trg, const DataArrayInt *trgIds, const std::string& trgMeth, int trgProcId);
+
+    void prepare(const std::vector< std::vector<int> >& procsInInteraction);
+    
+    void computeDeno();
+
+    void multiply();
+
+    void transposeMultiply();
+    
+    virtual ~OverlapInterpolationMatrix();
+#if 0
+    void addContribution(MEDCouplingPointSet& distant_support, int iproc_distant,
+                         const int* distant_elems, const std::string& srcMeth, const std::string& targetMeth);
+    void finishContributionW(ElementLocator& elementLocator);
+    void finishContributionL(ElementLocator& elementLocator);
+    void multiply(MEDCouplingFieldDouble& field) const;
+    void transposeMultiply(MEDCouplingFieldDouble& field)const;
+    void prepare();
+    int getNbRows() const { return _row_offsets.size(); }
+    MPIAccessDEC* getAccessDEC() { return _mapping.getAccessDEC(); }
+  private:
+    void computeConservVolDenoW(ElementLocator& elementLocator);
+    void computeIntegralDenoW(ElementLocator& elementLocator);
+    void computeRevIntegralDenoW(ElementLocator& elementLocator);
+    void computeGlobConstraintDenoW(ElementLocator& elementLocator);
+    void computeConservVolDenoL(ElementLocator& elementLocator);
+    void computeIntegralDenoL(ElementLocator& elementLocator);
+    void computeRevIntegralDenoL(ElementLocator& elementLocator);
+    
+    void computeLocalColSum(std::vector<double>& res) const;
+    void computeLocalRowSum(const std::vector<int>& distantProcs, std::vector<std::vector<int> >& resPerProcI,
+                            std::vector<std::vector<double> >& resPerProcD) const;
+    void computeGlobalRowSum(ElementLocator& elementLocator, std::vector<std::vector<double> >& denoStrorage, std::vector<std::vector<double> >& denoStrorageInv);
+    void computeGlobalColSum(std::vector<std::vector<double> >& denoStrorage);
+    void resizeGlobalColSum(std::vector<std::vector<double> >& denoStrorage);
+    void fillDSFromVM(int iproc_distant, const int* distant_elems, const std::vector< std::map<int,double> >& values, MEDCouplingFieldDouble *surf);
+    void serializeMe(std::vector< std::vector< std::map<int,double> > >& data1, std::vector<int>& data2) const;
+    void initialize();
+    void findAdditionnalElements(ElementLocator& elementLocator, std::vector<std::vector<int> >& elementsToAdd,
+                                 const std::vector<std::vector<int> >& resPerProcI, const std::vector<std::vector<int> >& globalIdsPartial);
+    void addGhostElements(const std::vector<int>& distantProcs, const std::vector<std::vector<int> >& elementsToAdd);
+    int mergePolicies(const std::vector<int>& policyPartial);
+    void mergeRowSum(const std::vector< std::vector<double> >& rowsPartialSumD, const std::vector< std::vector<int> >& globalIdsPartial,
+                     std::vector<int>& globalIdsLazySideInteraction, std::vector<double>& sumCorresponding);
+    void mergeRowSum2(const std::vector< std::vector<int> >& globalIdsPartial, std::vector< std::vector<double> >& rowsPartialSumD,
+                      const std::vector<int>& globalIdsLazySideInteraction, const std::vector<double>& sumCorresponding);
+    void mergeRowSum3(const std::vector< std::vector<int> >& globalIdsPartial, std::vector< std::vector<double> >& rowsPartialSumD);
+    void mergeCoeffs(const std::vector<int>& procsInInteraction, const std::vector< std::vector<int> >& rowsPartialSumI,
+                     const std::vector<std::vector<int> >& globalIdsPartial, std::vector<std::vector<double> >& denoStrorageInv);
+    void divideByGlobalRowSum(const std::vector<int>& distantProcs, const std::vector<std::vector<int> >& resPerProcI,
+                              const std::vector<std::vector<double> >& resPerProcD, std::vector<std::vector<double> >& deno);
+#endif
+  private:
+    bool isSurfaceComputationNeeded(const std::string& method) const;
+    void fillDistributedMatrix(const std::vector< std::map<int,double> >& res,
+                               const DataArrayInt *srcIds, int srcProc,
+                               const DataArrayInt *trgIds, int trgProc);
+    static void TransposeMatrix(const std::vector<std::map<int,double> >& matIn, int nbColsMatIn, std::vector<std::map<int,double> >& matOut);
+  private:
+    ParaMEDMEM::ParaFIELD *_source_field;
+    ParaMEDMEM::ParaFIELD *_target_field;
+    std::vector<int> _row_offsets;
+    std::map<std::pair<int,int>, int > _col_offsets;
+    MEDCouplingPointSet *_source_support;
+    MEDCouplingPointSet *_target_support;
+    OverlapMapping _mapping;
+    const ProcessorGroup& _group;
+    std::vector< std::vector<double> > _target_volume;
+    std::vector<std::vector<std::pair<int,double> > > _coeffs;
+    std::vector<std::vector<double> > _deno_multiply;
+    std::vector<std::vector<double> > _deno_reverse_multiply;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/OverlapMapping.cxx b/src/ParaMEDMEM/OverlapMapping.cxx
new file mode 100644 (file)
index 0000000..abb7a1d
--- /dev/null
@@ -0,0 +1,673 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (CEA/DEN)
+
+#include "OverlapMapping.hxx"
+#include "MPIProcessorGroup.hxx"
+
+#include "MEDCouplingFieldDouble.hxx"
+#include "MEDCouplingAutoRefCountObjectPtr.hxx"
+
+#include "InterpKernelAutoPtr.hxx"
+
+#include <numeric>
+#include <algorithm>
+
+using namespace ParaMEDMEM;
+
+OverlapMapping::OverlapMapping(const ProcessorGroup& group):_group(group)
+{
+}
+
+/*!
+ * This method keeps tracks of source ids to know in step 6 of main algorithm, which tuple ids to send away.
+ * This method incarnates item#1 of step2 algorithm.
+ */
+void OverlapMapping::keepTracksOfSourceIds(int procId, DataArrayInt *ids)
+{
+  ids->incrRef();
+  _src_ids_st2.push_back(ids);
+  _src_proc_st2.push_back(procId);
+}
+
+/*!
+ * This method keeps tracks of target ids to know in step 6 of main algorithm.
+ * This method incarnates item#0 of step2 algorithm.
+ */
+void OverlapMapping::keepTracksOfTargetIds(int procId, DataArrayInt *ids)
+{
+  ids->incrRef();
+  _trg_ids_st2.push_back(ids);
+  _trg_proc_st2.push_back(procId);
+}
+
+/*!
+ * This method stores from a matrix in format Target(rows)/Source(cols) for a source procId 'srcProcId' and for a target procId 'trgProcId'.
+ * All ids (source and target) are in format of local ids. 
+ */
+void OverlapMapping::addContributionST(const std::vector< std::map<int,double> >& matrixST, const DataArrayInt *srcIds, int srcProcId, const DataArrayInt *trgIds, int trgProcId)
+{
+  _matrixes_st.push_back(matrixST);
+  _source_proc_id_st.push_back(srcProcId);
+  _target_proc_id_st.push_back(trgProcId);
+  if(srcIds)
+    {//item#1 of step2 algorithm in proc m. Only to know in advanced nb of recv ids [ (0,1) computed on proc1 and Matrix-Vector on proc1 ]
+      _nb_of_src_ids_proc_st2.push_back(srcIds->getNumberOfTuples());
+      _src_ids_proc_st2.push_back(srcProcId);
+    }
+  else
+    {//item#0 of step2 algorithm in proc k
+      std::set<int> s;
+      for(std::vector< std::map<int,double> >::const_iterator it1=matrixST.begin();it1!=matrixST.end();it1++)
+        for(std::map<int,double>::const_iterator it2=(*it1).begin();it2!=(*it1).end();it2++)
+          s.insert((*it2).first);
+      _src_ids_zip_st2.resize(_src_ids_zip_st2.size()+1);
+      _src_ids_zip_st2.back().insert(_src_ids_zip_st2.back().end(),s.begin(),s.end());
+      _src_ids_zip_proc_st2.push_back(trgProcId);
+    }
+}
+
+/*!
+ * 'procsInInteraction' gives the global view of interaction between procs.
+ * In 'procsInInteraction' for a proc with id i, is in interaction with procs listed in procsInInteraction[i].
+ *
+ * This method is in charge to send matrixes in AlltoAll mode.
+ * After the call of this method 'this' contains the matrixST for all source elements of the current proc
+ */
+void OverlapMapping::prepare(const std::vector< std::vector<int> >& procsInInteraction, int nbOfTrgElems)
+{
+  CommInterface commInterface=_group.getCommInterface();
+  const MPIProcessorGroup *group=static_cast<const MPIProcessorGroup*>(&_group);
+  const MPI_Comm *comm=group->getComm();
+  int grpSize=_group.size();
+  INTERP_KERNEL::AutoPtr<int> nbsend=new int[grpSize];
+  INTERP_KERNEL::AutoPtr<int> nbsend2=new int[grpSize];
+  INTERP_KERNEL::AutoPtr<int> nbsend3=new int[grpSize];
+  std::fill<int *>(nbsend,nbsend+grpSize,0);
+  int myProcId=_group.myRank();
+  _proc_ids_to_recv_vector_st.clear();
+  int curProc=0;
+  for(std::vector< std::vector<int> >::const_iterator it1=procsInInteraction.begin();it1!=procsInInteraction.end();it1++,curProc++)
+    if(std::find((*it1).begin(),(*it1).end(),myProcId)!=(*it1).end())
+      _proc_ids_to_recv_vector_st.push_back(curProc);
+  _proc_ids_to_send_vector_st=procsInInteraction[myProcId];
+  for(std::size_t i=0;i<_matrixes_st.size();i++)
+    if(_source_proc_id_st[i]==myProcId)
+      nbsend[_target_proc_id_st[i]]=_matrixes_st[i].size();
+  INTERP_KERNEL::AutoPtr<int> nbrecv=new int[grpSize];
+  commInterface.allToAll(nbsend,1,MPI_INT,nbrecv,1,MPI_INT,*comm);
+  //exchanging matrix
+  //first exchanging offsets+ids_source
+  INTERP_KERNEL::AutoPtr<int> nbrecv1=new int[grpSize];
+  INTERP_KERNEL::AutoPtr<int> nbrecv2=new int[grpSize];
+  //
+  int *tmp=0;
+  serializeMatrixStep0ST(nbrecv,
+                         tmp,nbsend2,nbsend3,
+                         nbrecv1,nbrecv2);
+  INTERP_KERNEL::AutoPtr<int> bigArr=tmp;
+  INTERP_KERNEL::AutoPtr<int> bigArrRecv=new int[nbrecv2[grpSize-1]+nbrecv1[grpSize-1]];
+  commInterface.allToAllV(bigArr,nbsend2,nbsend3,MPI_INT,
+                          bigArrRecv,nbrecv1,nbrecv2,MPI_INT,
+                          *comm);// sending ids of sparse matrix (n+1 elems)
+  //second phase echange target ids
+  std::fill<int *>(nbsend2,nbsend2+grpSize,0);
+  INTERP_KERNEL::AutoPtr<int> nbrecv3=new int[grpSize];
+  INTERP_KERNEL::AutoPtr<int> nbrecv4=new int[grpSize];
+  double *tmp2=0;
+  int lgthOfArr=serializeMatrixStep1ST(nbrecv,bigArrRecv,nbrecv1,nbrecv2,
+                                       tmp,tmp2,
+                                       nbsend2,nbsend3,nbrecv3,nbrecv4);
+  INTERP_KERNEL::AutoPtr<int> bigArr2=tmp;
+  INTERP_KERNEL::AutoPtr<double> bigArrD2=tmp2;
+  INTERP_KERNEL::AutoPtr<int> bigArrRecv2=new int[lgthOfArr];
+  INTERP_KERNEL::AutoPtr<double> bigArrDRecv2=new double[lgthOfArr];
+  commInterface.allToAllV(bigArr2,nbsend2,nbsend3,MPI_INT,
+                          bigArrRecv2,nbrecv3,nbrecv4,MPI_INT,
+                          *comm);
+  commInterface.allToAllV(bigArrD2,nbsend2,nbsend3,MPI_DOUBLE,
+                          bigArrDRecv2,nbrecv3,nbrecv4,MPI_DOUBLE,
+                          *comm);
+  //finishing
+  unserializationST(nbOfTrgElems,nbrecv,bigArrRecv,nbrecv1,nbrecv2,
+                    bigArrRecv2,bigArrDRecv2,nbrecv3,nbrecv4);
+  //updating _src_ids_zip_st2 and _src_ids_zip_st2 with received matrix.
+  updateZipSourceIdsForFuture();
+  //finish to fill _the_matrix_st with already in place matrix in _matrixes_st
+  finishToFillFinalMatrixST();
+  //printTheMatrix();
+}
+
+/*!
+ * Compute denominators.
+ */
+void OverlapMapping::computeDenoGlobConstraint()
+{
+  _the_deno_st.clear();
+  std::size_t sz1=_the_matrix_st.size();
+  _the_deno_st.resize(sz1);
+  for(std::size_t i=0;i<sz1;i++)
+    {
+      std::size_t sz2=_the_matrix_st[i].size();
+      _the_deno_st[i].resize(sz2);
+      for(std::size_t j=0;j<sz2;j++)
+        {
+          double sum=0;
+          std::map<int,double>& mToFill=_the_deno_st[i][j];
+          const std::map<int,double>& m=_the_matrix_st[i][j];
+          for(std::map<int,double>::const_iterator it=m.begin();it!=m.end();it++)
+            sum+=(*it).second;
+          for(std::map<int,double>::const_iterator it=m.begin();it!=m.end();it++)
+            mToFill[(*it).first]=sum;
+        }
+    }
+}
+
+/*!
+ * Compute denominators.
+ */
+void OverlapMapping::computeDenoConservativeVolumic(int nbOfTuplesTrg)
+{
+  CommInterface commInterface=_group.getCommInterface();
+  int myProcId=_group.myRank();
+  //
+  _the_deno_st.clear();
+  std::size_t sz1=_the_matrix_st.size();
+  _the_deno_st.resize(sz1);
+  std::vector<double> deno(nbOfTuplesTrg);
+  for(std::size_t i=0;i<sz1;i++)
+    {
+      const std::vector< std::map<int,double> >& mat=_the_matrix_st[i];
+      int curSrcId=_the_matrix_st_source_proc_id[i];
+      std::vector<int>::iterator isItem1=std::find(_trg_proc_st2.begin(),_trg_proc_st2.end(),curSrcId);
+      int rowId=0;
+      if(isItem1==_trg_proc_st2.end() || curSrcId==myProcId)//item1 of step2 main algo. Simple, because rowId of mat are directly target ids.
+        {
+          for(std::vector< std::map<int,double> >::const_iterator it1=mat.begin();it1!=mat.end();it1++,rowId++)
+            for(std::map<int,double>::const_iterator it2=(*it1).begin();it2!=(*it1).end();it2++)
+              deno[rowId]+=(*it2).second;
+        }
+      else
+        {//item0 of step2 main algo. More complicated.
+          std::vector<int>::iterator fnd=isItem1;//std::find(_trg_proc_st2.begin(),_trg_proc_st2.end(),curSrcId);
+          int locId=std::distance(_trg_proc_st2.begin(),fnd);
+          const DataArrayInt *trgIds=_trg_ids_st2[locId];
+          const int *trgIds2=trgIds->getConstPointer();
+          for(std::vector< std::map<int,double> >::const_iterator it1=mat.begin();it1!=mat.end();it1++,rowId++)
+            for(std::map<int,double>::const_iterator it2=(*it1).begin();it2!=(*it1).end();it2++)
+              deno[trgIds2[rowId]]+=(*it2).second;
+        }
+    }
+  //
+  for(std::size_t i=0;i<sz1;i++)
+    {
+      int rowId=0;
+      const std::vector< std::map<int,double> >& mat=_the_matrix_st[i];
+      int curSrcId=_the_matrix_st_source_proc_id[i];
+      std::vector<int>::iterator isItem1=std::find(_trg_proc_st2.begin(),_trg_proc_st2.end(),curSrcId);
+      std::vector< std::map<int,double> >& denoM=_the_deno_st[i];
+      denoM.resize(mat.size());
+      if(isItem1==_trg_proc_st2.end() || curSrcId==myProcId)//item1 of step2 main algo. Simple, because rowId of mat are directly target ids.
+        {
+          int rowId=0;
+          for(std::vector< std::map<int,double> >::const_iterator it1=mat.begin();it1!=mat.end();it1++,rowId++)
+            for(std::map<int,double>::const_iterator it2=(*it1).begin();it2!=(*it1).end();it2++)
+              denoM[rowId][(*it2).first]=deno[rowId];
+        }
+      else
+        {
+          std::vector<int>::iterator fnd=isItem1;
+          int locId=std::distance(_trg_proc_st2.begin(),fnd);
+          const DataArrayInt *trgIds=_trg_ids_st2[locId];
+          const int *trgIds2=trgIds->getConstPointer();
+          for(std::vector< std::map<int,double> >::const_iterator it1=mat.begin();it1!=mat.end();it1++,rowId++)
+            for(std::map<int,double>::const_iterator it2=(*it1).begin();it2!=(*it1).end();it2++)
+              denoM[rowId][(*it2).first]=deno[trgIds2[rowId]];
+        }
+    }
+}
+
+/*!
+ * This method performs step #0/3 in serialization process.
+ * \param count tells specifies nb of elems to send to corresponding proc id. size equal to _group.size().
+ * \param offsets tells for a proc i where to start serialize#0 matrix. size equal to _group.size().
+ * \param nbOfElemsSrc of size _group.size(). Comes from previous all2all call. tells how many srcIds per proc contains matrix for current proc.
+ */
+void OverlapMapping::serializeMatrixStep0ST(const int *nbOfElemsSrc, int *&bigArr, int *count, int *offsets,
+                                            int *countForRecv, int *offsetsForRecv) const
+{
+  int grpSize=_group.size();
+  std::fill<int *>(count,count+grpSize,0);
+  int szz=0;
+  int myProcId=_group.myRank();
+  for(std::size_t i=0;i<_matrixes_st.size();i++)
+    {
+      if(_source_proc_id_st[i]==myProcId)// && _target_proc_id_st[i]!=myProcId
+        {
+          count[_target_proc_id_st[i]]=_matrixes_st[i].size()+1;
+          szz+=_matrixes_st[i].size()+1;
+        }
+    }
+  bigArr=new int[szz];
+  offsets[0]=0;
+  for(int i=1;i<grpSize;i++)
+    offsets[i]=offsets[i-1]+count[i-1];
+  for(std::size_t i=0;i<_matrixes_st.size();i++)
+    {
+      if(_source_proc_id_st[i]==myProcId)
+        {
+          int start=offsets[_target_proc_id_st[i]];
+          int *work=bigArr+start;
+          *work=0;
+          const std::vector< std::map<int,double> >& mat=_matrixes_st[i];
+          for(std::vector< std::map<int,double> >::const_iterator it=mat.begin();it!=mat.end();it++,work++)
+            work[1]=work[0]+(*it).size();
+        }
+    }
+  //
+  offsetsForRecv[0]=0;
+  for(int i=0;i<grpSize;i++)
+    {
+      if(nbOfElemsSrc[i]>0)
+        countForRecv[i]=nbOfElemsSrc[i]+1;
+      else
+        countForRecv[i]=0;
+      if(i>0)
+        offsetsForRecv[i]=offsetsForRecv[i-1]+countForRecv[i-1];
+    }
+}
+
+/*!
+ * This method performs step#1 and step#2/3. It returns the size of expected array to get allToAllV.
+ */
+int OverlapMapping::serializeMatrixStep1ST(const int *nbOfElemsSrc, const int *recvStep0, const int *countStep0, const int *offsStep0,
+                                           int *&bigArrI, double *&bigArrD, int *count, int *offsets,
+                                           int *countForRecv, int *offsForRecv) const
+{
+  int grpSize=_group.size();
+  int myProcId=_group.myRank();
+  offsForRecv[0]=0;
+  int szz=0;
+  for(int i=0;i<grpSize;i++)
+    {
+      if(nbOfElemsSrc[i]!=0)
+        countForRecv[i]=recvStep0[offsStep0[i]+nbOfElemsSrc[i]];
+      else
+        countForRecv[i]=0;
+      szz+=countForRecv[i];
+      if(i>0)
+        offsForRecv[i]=offsForRecv[i-1]+countForRecv[i-1];
+    }
+  //
+  std::fill(count,count+grpSize,0);
+  offsets[0]=0;
+  int fullLgth=0;
+  for(std::size_t i=0;i<_matrixes_st.size();i++)
+    {
+      if(_source_proc_id_st[i]==myProcId)
+        {
+          const std::vector< std::map<int,double> >& mat=_matrixes_st[i];
+          int lgthToSend=0;
+          for(std::vector< std::map<int,double> >::const_iterator it=mat.begin();it!=mat.end();it++)
+            lgthToSend+=(*it).size();
+          count[_target_proc_id_st[i]]=lgthToSend;
+          fullLgth+=lgthToSend;
+        }
+    }
+  for(int i=1;i<grpSize;i++)
+    offsets[i]=offsets[i-1]+count[i-1];
+  //
+  bigArrI=new int[fullLgth];
+  bigArrD=new double[fullLgth];
+  // feeding arrays
+  fullLgth=0;
+  for(std::size_t i=0;i<_matrixes_st.size();i++)
+    {
+      if(_source_proc_id_st[i]==myProcId)
+        {
+          const std::vector< std::map<int,double> >& mat=_matrixes_st[i];
+          for(std::vector< std::map<int,double> >::const_iterator it1=mat.begin();it1!=mat.end();it1++)
+            {
+              int j=0;
+              for(std::map<int,double>::const_iterator it2=(*it1).begin();it2!=(*it1).end();it2++,j++)
+                {
+                  bigArrI[fullLgth+j]=(*it2).first;
+                  bigArrD[fullLgth+j]=(*it2).second;
+                }
+              fullLgth+=(*it1).size();
+            }
+        }
+    }
+  return szz;
+}
+
+/*!
+ * This is the last step after all2Alls for matrix exchange.
+ * _the_matrix_st is the final matrix : 
+ *      - The first entry is srcId in current proc.
+ *      - The second is the pseudo id of source proc (correspondance with true id is in attribute _the_matrix_st_source_proc_id and _the_matrix_st_source_ids)
+ *      - the third is the srcId in the pseudo source proc
+ */
+void OverlapMapping::unserializationST(int nbOfTrgElems,
+                                       const int *nbOfElemsSrcPerProc,//first all2all
+                                       const int *bigArrRecv, const int *bigArrRecvCounts, const int *bigArrRecvOffs,//2nd all2all
+                                       const int *bigArrRecv2, const double *bigArrDRecv2, const int *bigArrRecv2Count, const int *bigArrRecv2Offs)//3rd and 4th all2alls
+{
+  _the_matrix_st.clear();
+  _the_matrix_st_source_proc_id.clear();
+  //
+  int grpSize=_group.size();
+  for(int i=0;i<grpSize;i++)
+    if(nbOfElemsSrcPerProc[i]!=0)
+      _the_matrix_st_source_proc_id.push_back(i);
+  int nbOfPseudoProcs=_the_matrix_st_source_proc_id.size();//_the_matrix_st_target_proc_id.size() contains number of matrix fetched remotely whose sourceProcId==myProcId
+  _the_matrix_st.resize(nbOfPseudoProcs);
+  //
+  int j=0;
+  for(int i=0;i<grpSize;i++)
+    if(nbOfElemsSrcPerProc[i]!=0)
+      {
+        _the_matrix_st[j].resize(nbOfElemsSrcPerProc[i]);
+        for(int k=0;k<nbOfElemsSrcPerProc[i];k++)
+          {
+            int offs=bigArrRecv[bigArrRecvOffs[i]+k];
+            int lgthOfMap=bigArrRecv[bigArrRecvOffs[i]+k+1]-offs;
+            for(int l=0;l<lgthOfMap;l++)
+              _the_matrix_st[j][k][bigArrRecv2[bigArrRecv2Offs[i]+offs+l]]=bigArrDRecv2[bigArrRecv2Offs[i]+offs+l];
+          }
+        j++;
+      }
+}
+
+/*!
+ * This method should be called when all remote matrix with sourceProcId==thisProcId have been retrieved and are in 'this->_the_matrix_st' and 'this->_the_matrix_st_target_proc_id'
+ * and 'this->_the_matrix_st_target_ids'.
+ * This method finish the job of filling 'this->_the_matrix_st' and 'this->_the_matrix_st_target_proc_id' by putting candidates in 'this->_matrixes_st' into them.
+ */
+void OverlapMapping::finishToFillFinalMatrixST()
+{
+  int myProcId=_group.myRank();
+  int sz=_matrixes_st.size();
+  int nbOfEntryToAdd=0;
+  for(int i=0;i<sz;i++)
+    if(_source_proc_id_st[i]!=myProcId)
+      nbOfEntryToAdd++;
+  if(nbOfEntryToAdd==0)
+    return ;
+  int oldNbOfEntry=_the_matrix_st.size();
+  int newNbOfEntry=oldNbOfEntry+nbOfEntryToAdd;
+  _the_matrix_st.resize(newNbOfEntry);
+  int j=oldNbOfEntry;
+  for(int i=0;i<sz;i++)
+    if(_source_proc_id_st[i]!=myProcId)
+      {
+        const std::vector<std::map<int,double> >& mat=_matrixes_st[i];
+        _the_matrix_st[j]=mat;
+        _the_matrix_st_source_proc_id.push_back(_source_proc_id_st[i]);
+        j++;
+      }
+  _matrixes_st.clear();
+}
+
+/*!
+ * This method performs the operation of target ids broadcasting.
+ */
+void OverlapMapping::prepareIdsToSendST()
+{
+  CommInterface commInterface=_group.getCommInterface();
+  const MPIProcessorGroup *group=static_cast<const MPIProcessorGroup*>(&_group);
+  const MPI_Comm *comm=group->getComm();
+  int grpSize=_group.size();
+  _source_ids_to_send_st.clear();
+  _source_ids_to_send_st.resize(grpSize);
+  INTERP_KERNEL::AutoPtr<int> nbsend=new int[grpSize];
+  std::fill<int *>(nbsend,nbsend+grpSize,0);
+  for(std::size_t i=0;i<_the_matrix_st_source_proc_id.size();i++)
+    nbsend[_the_matrix_st_source_proc_id[i]]=_the_matrix_st_source_ids[i].size();
+  INTERP_KERNEL::AutoPtr<int> nbrecv=new int[grpSize];
+  commInterface.allToAll(nbsend,1,MPI_INT,nbrecv,1,MPI_INT,*comm);
+  //
+  INTERP_KERNEL::AutoPtr<int> nbsend2=new int[grpSize];
+  std::copy((int *)nbsend,((int *)nbsend)+grpSize,(int *)nbsend2);
+  INTERP_KERNEL::AutoPtr<int> nbsend3=new int[grpSize];
+  nbsend3[0]=0;
+  for(int i=1;i<grpSize;i++)
+    nbsend3[i]=nbsend3[i-1]+nbsend2[i-1];
+  int sendSz=nbsend3[grpSize-1]+nbsend2[grpSize-1];
+  INTERP_KERNEL::AutoPtr<int> bigDataSend=new int[sendSz];
+  for(std::size_t i=0;i<_the_matrix_st_source_proc_id.size();i++)
+    {
+      int offset=nbsend3[_the_matrix_st_source_proc_id[i]];
+      std::copy(_the_matrix_st_source_ids[i].begin(),_the_matrix_st_source_ids[i].end(),((int *)nbsend3)+offset);
+    }
+  INTERP_KERNEL::AutoPtr<int> nbrecv2=new int[grpSize];
+  INTERP_KERNEL::AutoPtr<int> nbrecv3=new int[grpSize];
+  std::copy((int *)nbrecv,((int *)nbrecv)+grpSize,(int *)nbrecv2);
+  nbrecv3[0]=0;
+  for(int i=1;i<grpSize;i++)
+    nbrecv3[i]=nbrecv3[i-1]+nbrecv2[i-1];
+  int recvSz=nbrecv3[grpSize-1]+nbrecv2[grpSize-1];
+  INTERP_KERNEL::AutoPtr<int> bigDataRecv=new int[recvSz];
+  //
+  commInterface.allToAllV(bigDataSend,nbsend2,nbsend3,MPI_INT,
+                          bigDataRecv,nbrecv2,nbrecv3,MPI_INT,
+                          *comm);
+  for(int i=0;i<grpSize;i++)
+    {
+      if(nbrecv2[i]>0)
+        {
+          _source_ids_to_send_st[i].insert(_source_ids_to_send_st[i].end(),((int *)bigDataRecv)+nbrecv3[i],((int *)bigDataRecv)+nbrecv3[i]+nbrecv2[i]);
+        }
+    }
+}
+
+/*!
+ * This method performs a transpose multiply of 'fieldInput' and put the result into 'fieldOutput'.
+ * 'fieldInput' is expected to be the sourcefield and 'fieldOutput' the targetfield.
+ */
+void OverlapMapping::multiply(const MEDCouplingFieldDouble *fieldInput, MEDCouplingFieldDouble *fieldOutput) const
+{
+  int nbOfCompo=fieldInput->getNumberOfComponents();//to improve same number of components to test
+  CommInterface commInterface=_group.getCommInterface();
+  const MPIProcessorGroup *group=static_cast<const MPIProcessorGroup*>(&_group);
+  const MPI_Comm *comm=group->getComm();
+  int grpSize=_group.size();
+  int myProcId=_group.myRank();
+  //
+  INTERP_KERNEL::AutoPtr<int> nbsend=new int[grpSize];
+  INTERP_KERNEL::AutoPtr<int> nbsend2=new int[grpSize];
+  INTERP_KERNEL::AutoPtr<int> nbrecv=new int[grpSize];
+  INTERP_KERNEL::AutoPtr<int> nbrecv2=new int[grpSize];
+  std::fill<int *>(nbsend,nbsend+grpSize,0);
+  std::fill<int *>(nbrecv,nbrecv+grpSize,0);
+  nbsend2[0]=0;
+  nbrecv2[0]=0;
+  std::vector<double> valsToSend;
+  for(int i=0;i<grpSize;i++)
+    {
+      if(std::find(_proc_ids_to_send_vector_st.begin(),_proc_ids_to_send_vector_st.end(),i)!=_proc_ids_to_send_vector_st.end())
+        {
+          std::vector<int>::const_iterator isItem1=std::find(_src_proc_st2.begin(),_src_proc_st2.end(),i);
+          MEDCouplingAutoRefCountObjectPtr<DataArrayDouble> vals;
+          if(isItem1!=_src_proc_st2.end())//item1 of step2 main algo
+            {
+              int id=std::distance(_src_proc_st2.begin(),isItem1);
+              vals=fieldInput->getArray()->selectByTupleId(_src_ids_st2[id]->getConstPointer(),_src_ids_st2[id]->getConstPointer()+_src_ids_st2[id]->getNumberOfTuples());
+            }
+          else
+            {//item0 of step2 main algo
+              int id=std::distance(_src_ids_zip_proc_st2.begin(),std::find(_src_ids_zip_proc_st2.begin(),_src_ids_zip_proc_st2.end(),i));
+              vals=fieldInput->getArray()->selectByTupleId(&(_src_ids_zip_st2[id])[0],&(_src_ids_zip_st2[id])[0]+_src_ids_zip_st2[id].size());
+            }
+          nbsend[i]=vals->getNbOfElems();
+          valsToSend.insert(valsToSend.end(),vals->getConstPointer(),vals->getConstPointer()+nbsend[i]);
+        }
+      if(std::find(_proc_ids_to_recv_vector_st.begin(),_proc_ids_to_recv_vector_st.end(),i)!=_proc_ids_to_recv_vector_st.end())
+        {
+          std::vector<int>::const_iterator isItem0=std::find(_trg_proc_st2.begin(),_trg_proc_st2.end(),i);
+          if(isItem0==_trg_proc_st2.end())//item1 of step2 main algo [ (0,1) computed on proc1 and Matrix-Vector on proc1 ]
+            {
+              std::vector<int>::const_iterator it1=std::find(_src_ids_proc_st2.begin(),_src_ids_proc_st2.end(),i);
+              if(it1!=_src_ids_proc_st2.end())
+                {
+                  int id=std::distance(_src_ids_proc_st2.begin(),it1);
+                  nbrecv[i]=_nb_of_src_ids_proc_st2[id]*nbOfCompo;
+                }
+              else if(i==myProcId)
+                {
+                  nbrecv[i]=fieldInput->getNumberOfTuplesExpected()*nbOfCompo;
+                }
+              else
+                throw INTERP_KERNEL::Exception("Plouff ! send email to anthony.geay@cea.fr ! ");
+            }
+          else
+            {//item0 of step2 main algo [ (2,1) computed on proc2 but Matrix-Vector on proc1 ] [(1,0) computed on proc1 but Matrix-Vector on proc0]
+              int id=std::distance(_src_ids_zip_proc_st2.begin(),std::find(_src_ids_zip_proc_st2.begin(),_src_ids_zip_proc_st2.end(),i));
+              nbrecv[i]=_src_ids_zip_st2[id].size()*nbOfCompo;
+            }
+        }
+    }
+  for(int i=1;i<grpSize;i++)
+    {
+      nbsend2[i]=nbsend2[i-1]+nbsend[i-1];
+      nbrecv2[i]=nbrecv2[i-1]+nbrecv[i-1];
+    }
+  INTERP_KERNEL::AutoPtr<double> bigArr=new double[nbrecv2[grpSize-1]+nbrecv[grpSize-1]];
+  commInterface.allToAllV(&valsToSend[0],nbsend,nbsend2,MPI_DOUBLE,
+                          bigArr,nbrecv,nbrecv2,MPI_DOUBLE,*comm);
+  fieldOutput->getArray()->fillWithZero();
+  INTERP_KERNEL::AutoPtr<double> tmp=new double[nbOfCompo];
+  for(int i=0;i<grpSize;i++)
+    {
+      if(nbrecv[i]>0)
+        {
+          double *pt=fieldOutput->getArray()->getPointer();
+          std::vector<int>::const_iterator it=std::find(_the_matrix_st_source_proc_id.begin(),_the_matrix_st_source_proc_id.end(),i);
+          if(it==_the_matrix_st_source_proc_id.end())
+            throw INTERP_KERNEL::Exception("Big problem !");
+          int id=std::distance(_the_matrix_st_source_proc_id.begin(),it);
+          const std::vector< std::map<int,double> >& mat=_the_matrix_st[id];
+          const std::vector< std::map<int,double> >& deno=_the_deno_st[id];
+          std::vector<int>::const_iterator isItem0=std::find(_trg_proc_st2.begin(),_trg_proc_st2.end(),i);
+          if(isItem0==_trg_proc_st2.end())//item1 of step2 main algo [ (0,1) computed on proc1 and Matrix-Vector on proc1 ]
+            {
+              int nbOfTrgTuples=mat.size();
+              for(int j=0;j<nbOfTrgTuples;j++,pt+=nbOfCompo)
+                {
+                  const std::map<int,double>& mat1=mat[j];
+                  const std::map<int,double>& deno1=deno[j];
+                  std::map<int,double>::const_iterator it4=deno1.begin();
+                  for(std::map<int,double>::const_iterator it3=mat1.begin();it3!=mat1.end();it3++,it4++)
+                    {
+                      std::transform(bigArr+nbrecv2[i]+((*it3).first)*nbOfCompo,bigArr+nbrecv2[i]+((*it3).first+1)*(nbOfCompo),(double *)tmp,std::bind2nd(std::multiplies<double>(),(*it3).second/(*it4).second));
+                      std::transform((double *)tmp,(double *)tmp+nbOfCompo,pt,pt,std::plus<double>());
+                    }
+                }
+            }
+          else
+            {//item0 of step2 main algo [ (2,1) computed on proc2 but Matrix-Vector on proc1 ]
+              double *pt=fieldOutput->getArray()->getPointer();
+              std::map<int,int> zipCor;
+              int id=std::distance(_src_ids_zip_proc_st2.begin(),std::find(_src_ids_zip_proc_st2.begin(),_src_ids_zip_proc_st2.end(),i));
+              const std::vector<int> zipIds=_src_ids_zip_st2[id];
+              int newId=0;
+              for(std::vector<int>::const_iterator it=zipIds.begin();it!=zipIds.end();it++,newId++)
+                zipCor[*it]=newId;
+              int id2=std::distance(_trg_proc_st2.begin(),std::find(_trg_proc_st2.begin(),_trg_proc_st2.end(),i));
+              const DataArrayInt *tgrIds=_trg_ids_st2[id2];
+              const int *tgrIds2=tgrIds->getConstPointer();
+              int nbOfTrgTuples=mat.size();
+              for(int j=0;j<nbOfTrgTuples;j++)
+                {
+                  const std::map<int,double>& mat1=mat[j];
+                  const std::map<int,double>& deno1=deno[j];
+                  std::map<int,double>::const_iterator it5=deno1.begin();
+                  for(std::map<int,double>::const_iterator it3=mat1.begin();it3!=mat1.end();it3++,it5++)
+                    {
+                      std::map<int,int>::const_iterator it4=zipCor.find((*it3).first);
+                      if(it4==zipCor.end())
+                        throw INTERP_KERNEL::Exception("Hmmmmm send e mail to anthony.geay@cea.fr !");
+                      std::transform(bigArr+nbrecv2[i]+((*it4).second)*nbOfCompo,bigArr+nbrecv2[i]+((*it4).second+1)*(nbOfCompo),(double *)tmp,std::bind2nd(std::multiplies<double>(),(*it3).second/(*it5).second));
+                      std::transform((double *)tmp,(double *)tmp+nbOfCompo,pt+tgrIds2[j]*nbOfCompo,pt+tgrIds2[j]*nbOfCompo,std::plus<double>());
+                    }
+                }
+            }
+        }
+    }
+}
+
+/*!
+ * This method performs a transpose multiply of 'fieldInput' and put the result into 'fieldOutput'.
+ * 'fieldInput' is expected to be the targetfield and 'fieldOutput' the sourcefield.
+ */
+void OverlapMapping::transposeMultiply(const MEDCouplingFieldDouble *fieldInput, MEDCouplingFieldDouble *fieldOutput)
+{
+}
+
+/*!
+ * This method should be called immediately after _the_matrix_st has been filled with remote computed matrix put in this proc for Matrix-Vector.
+ * This method computes for these matrix the minimal set of source ids corresponding to the source proc id. 
+ */
+void OverlapMapping::updateZipSourceIdsForFuture()
+{
+  CommInterface commInterface=_group.getCommInterface();
+  int myProcId=_group.myRank();
+  int nbOfMatrixRecveived=_the_matrix_st_source_proc_id.size();
+  for(int i=0;i<nbOfMatrixRecveived;i++)
+    {
+      int curSrcProcId=_the_matrix_st_source_proc_id[i];
+      if(curSrcProcId!=myProcId)
+        {
+          const std::vector< std::map<int,double> >& mat=_the_matrix_st[i];
+          _src_ids_zip_proc_st2.push_back(curSrcProcId);
+          _src_ids_zip_st2.resize(_src_ids_zip_st2.size()+1);
+          std::set<int> s;
+          for(std::vector< std::map<int,double> >::const_iterator it1=mat.begin();it1!=mat.end();it1++)
+            for(std::map<int,double>::const_iterator it2=(*it1).begin();it2!=(*it1).end();it2++)
+              s.insert((*it2).first);
+          _src_ids_zip_st2.back().insert(_src_ids_zip_st2.back().end(),s.begin(),s.end());
+        }
+    }
+}
+
+// #include <iostream>
+
+// void OverlapMapping::printTheMatrix() const
+// {
+//   CommInterface commInterface=_group.getCommInterface();
+//   const MPIProcessorGroup *group=static_cast<const MPIProcessorGroup*>(&_group);
+//   const MPI_Comm *comm=group->getComm();
+//   int grpSize=_group.size();
+//   int myProcId=_group.myRank();
+//   std::cerr << "I am proc #" << myProcId << std::endl;
+//   int nbOfMat=_the_matrix_st.size();
+//   std::cerr << "I do manage " << nbOfMat << "matrix : "<< std::endl;
+//   for(int i=0;i<nbOfMat;i++)
+//     {
+//       std::cerr << "   - Matrix #" << i << " on source proc #" << _the_matrix_st_source_proc_id[i];
+//       const std::vector< std::map<int,double> >& locMat=_the_matrix_st[i];
+//       for(std::vector< std::map<int,double> >::const_iterator it1=locMat.begin();it1!=locMat.end();it1++)
+//         {
+//           for(std::map<int,double>::const_iterator it2=(*it1).begin();it2!=(*it1).end();it2++)
+//             std::cerr << "(" << (*it2).first << "," << (*it2).second << "), ";
+//           std::cerr << std::endl;
+//         }
+//     }
+//   std::cerr << "*********" << std::endl;
+// }
diff --git a/src/ParaMEDMEM/OverlapMapping.hxx b/src/ParaMEDMEM/OverlapMapping.hxx
new file mode 100644 (file)
index 0000000..9525247
--- /dev/null
@@ -0,0 +1,90 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+// Author : Anthony Geay (CEA/DEN)
+
+#ifndef __OVERLAPMAPPING_HXX__
+#define __OVERLAPMAPPING_HXX__
+
+#include "MEDCouplingAutoRefCountObjectPtr.hxx"
+
+#include <vector>
+#include <map>
+
+namespace ParaMEDMEM
+{
+  class ProcessorGroup;
+  class DataArrayInt;
+  class MEDCouplingFieldDouble;
+
+  class OverlapMapping
+  {
+  public:
+    OverlapMapping(const ProcessorGroup& group);
+    void keepTracksOfSourceIds(int procId, DataArrayInt *ids);
+    void keepTracksOfTargetIds(int procId, DataArrayInt *ids);
+    void addContributionST(const std::vector< std::map<int,double> >& matrixST, const DataArrayInt *srcIds, int srcProcId, const DataArrayInt *trgIds, int trgProcId);
+    void prepare(const std::vector< std::vector<int> >& procsInInteraction, int nbOfTrgElems);
+    void computeDenoConservativeVolumic(int nbOfTuplesTrg);
+    void computeDenoGlobConstraint();
+    //
+    void multiply(const MEDCouplingFieldDouble *fieldInput, MEDCouplingFieldDouble *fieldOutput) const;
+    void transposeMultiply(const MEDCouplingFieldDouble *fieldInput, MEDCouplingFieldDouble *fieldOutput);
+  private:
+    void serializeMatrixStep0ST(const int *nbOfElemsSrc, int *&bigArr, int *count, int *offsets,
+                                int *countForRecv, int *offsetsForRecv) const;
+    int serializeMatrixStep1ST(const int *nbOfElemsSrc, const int *recvStep0, const int *countStep0, const int *offsStep0,
+                               int *&bigArrI, double *&bigArrD, int *count, int *offsets,
+                               int *countForRecv, int *offsForRecv) const;
+    void unserializationST(int nbOfTrgElems, const int *nbOfElemsSrcPerProc, const int *bigArrRecv, const int *bigArrRecvCounts, const int *bigArrRecvOffs,
+                           const int *bigArrRecv2, const double *bigArrDRecv2, const int *bigArrRecv2Count, const int *bigArrRecv2Offs);
+    void finishToFillFinalMatrixST();
+    void prepareIdsToSendST();
+    void updateZipSourceIdsForFuture();
+    //void printTheMatrix() const;
+  private:
+    const ProcessorGroup &_group;
+    //! vector of ids
+    std::vector< MEDCouplingAutoRefCountObjectPtr<DataArrayInt> > _src_ids_st2;//item #1
+    std::vector< int > _src_proc_st2;//item #1
+    std::vector< MEDCouplingAutoRefCountObjectPtr<DataArrayInt> > _trg_ids_st2;//item #0
+    std::vector< int > _trg_proc_st2;//item #0
+    std::vector< int > _nb_of_src_ids_proc_st2;//item #1
+    std::vector< int > _src_ids_proc_st2;//item #1
+    std::vector< std::vector<int> > _src_ids_zip_st2;//same size as _src_ids_zip_proc_st2. Sorted. specifies for each id the corresponding ids to send. This is for item0 of Step2 of main algorithm
+    std::vector< int > _src_ids_zip_proc_st2;
+    //! vector of matrixes the first entry correspond to source proc id in _source_ids_st
+    std::vector< std::vector< std::map<int,double> > > _matrixes_st;
+    std::vector< std::vector<int> > _source_ids_st;
+    std::vector< int > _source_proc_id_st;
+    std::vector< std::vector<int> > _target_ids_st;
+    std::vector< int > _target_proc_id_st;
+    //! the matrix for matrix-vector product. The first dimension the set of target procs that interacts with local source mesh. The second dimension correspond to nb of local source ids. 
+    std::vector< std::vector< std::map<int,double> > > _the_matrix_st;
+    std::vector< int > _the_matrix_st_source_proc_id;
+    std::vector< std::vector<int> > _the_matrix_st_source_ids;
+    std::vector< std::vector< std::map<int,double> > > _the_deno_st;
+    //! this attribute stores the proc ids that wait for data from this proc ids for matrix-vector computation
+    std::vector< int > _proc_ids_to_send_vector_st;
+    std::vector< int > _proc_ids_to_recv_vector_st;
+    //! this attribute is of size _group.size(); for each procId in _group _source_ids_to_send_st[procId] contains tupleId to send abroad
+    std::vector< std::vector<int> > _source_ids_to_send_st;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/ParaFIELD.cxx b/src/ParaMEDMEM/ParaFIELD.cxx
new file mode 100644 (file)
index 0000000..9995cca
--- /dev/null
@@ -0,0 +1,228 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+#include "ComponentTopology.hxx"
+#include "ExplicitCoincidentDEC.hxx"
+#include "StructuredCoincidentDEC.hxx"
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "ParaFIELD.hxx"
+#include "ParaMESH.hxx"
+#include "InterpKernelUtilities.hxx"
+#include "InterpolationMatrix.hxx"
+
+#include <numeric>
+
+namespace ParaMEDMEM
+{
+  /*!
+    \anchor ParaFIELD-det
+    \class ParaFIELD
+
+    This class encapsulates parallel fields.
+
+    It basically encapsulates
+    a MEDCouplingField with extra information related to parallel 
+    topology.
+
+    It is most conveniently created by giving a pointer to a MEDCouplingField
+    object and a \c ProcessorGroup.
+    By default, a ParaFIELD object will be constructed with all field components
+    located on the same processors. In some specific cases, it might be necessary to scatter components over several processors. In this case, the constructor
+    using a ComponentTopology is required.
+
+    */
+
+  /*!
+
+    \brief  Constructing a \c ParaFIELD from a \c ParaSUPPORT and a \c ComponentTopology.
+
+    This constructor creates an empty field based on the ParaSUPPORT description 
+    and the partitioning of components described in \a component_topology.
+    It takes ownership over the \c _field object that it creates.
+
+    Here come the three ComponentTopology constructors :
+    \verbatim
+    ComponentTopology c; // one component in the field
+    ComponentTopology c(6); //six components, all of them on the same processor
+    ComponentTopology c(6, proc_group); // six components, evenly distributed over the processors of procgroup
+    \endverbatim
+
+  */
+  ParaFIELD::ParaFIELD(TypeOfField type, TypeOfTimeDiscretization td, ParaMESH* para_support, const ComponentTopology& component_topology)
+    :_field(0),
+     _component_topology(component_topology),_topology(0),_own_support(false),
+     _support(para_support)
+  {
+    if (para_support->isStructured() || (para_support->getTopology()->getProcGroup()->size()==1 && component_topology.nbBlocks()!=1))
+      {
+        const BlockTopology* source_topo = dynamic_cast<const BlockTopology*>(para_support->getTopology());
+        _topology=new BlockTopology(*source_topo,component_topology);
+      }
+    else
+      {
+        if (component_topology.nbBlocks()!=1 &&  para_support->getTopology()->getProcGroup()->size()!=1)
+          throw INTERP_KERNEL::Exception(LOCALIZED("ParaFIELD constructor : Unstructured Support not taken into account with component topology yet"));
+        else 
+          {
+            const BlockTopology* source_topo=dynamic_cast<const BlockTopology*> (para_support->getTopology());
+            int nb_local_comp=component_topology.nbLocalComponents();
+            _topology=new BlockTopology(*source_topo,nb_local_comp);
+          }
+      }
+    int nb_components = component_topology.nbLocalComponents();
+    if (nb_components!=0)
+      {
+        _field=MEDCouplingFieldDouble::New(type,td);
+        _field->setMesh(_support->getCellMesh());
+        DataArrayDouble *array=DataArrayDouble::New();
+        array->alloc(_field->getNumberOfTuples(),nb_components);
+        _field->setArray(array);
+        array->decrRef();
+      }
+    else return;
+  
+    _field->setName("Default ParaFIELD name");
+    _field->setDescription("Default ParaFIELD description");
+  } 
+
+  /*! \brief Constructor creating the ParaFIELD
+    from a given FIELD and a processor group. 
+
+    This constructor supposes that support underlying \a subdomain_field has no ParaSUPPORT 
+    attached and it therefore recreates one. It therefore takes ownership over _support. The component topology associated with the field is a basic one (all components on the same processor). 
+  */
+  ParaFIELD::ParaFIELD(MEDCouplingFieldDouble* subdomain_field, ParaMESH *sup, const ProcessorGroup& proc_group):
+    _field(subdomain_field),
+    _component_topology(ComponentTopology(_field->getNumberOfComponents())),_topology(0),_own_support(false),
+    _support(sup)
+  {
+    if(_field)
+      _field->incrRef();
+    const BlockTopology* source_topo=dynamic_cast<const BlockTopology*> (_support->getTopology());
+    _topology=new BlockTopology(*source_topo,_component_topology.nbLocalComponents());
+  }
+
+  ParaFIELD::~ParaFIELD()
+  {
+    if(_field)
+      _field->decrRef();
+    if(_own_support)
+      delete _support;
+    delete _topology;
+  }
+
+  void ParaFIELD::synchronizeTarget(ParaFIELD* source_field)
+  {
+    DisjointDEC* data_channel;
+    if (dynamic_cast<BlockTopology*>(_topology)!=0)
+      {
+        data_channel=new StructuredCoincidentDEC;
+      }
+    else
+      {
+        data_channel=new ExplicitCoincidentDEC;
+      }
+    data_channel->attachLocalField(this);
+    data_channel->synchronize();
+    data_channel->prepareTargetDE();
+    data_channel->recvData();
+  
+    delete data_channel;
+  }
+
+  void ParaFIELD::synchronizeSource(ParaFIELD* target_field)
+  {
+    DisjointDEC* data_channel;
+    if (dynamic_cast<BlockTopology*>(_topology)!=0)
+      {
+        data_channel=new StructuredCoincidentDEC;
+      }
+    else
+      {
+        data_channel=new ExplicitCoincidentDEC;
+      }
+    data_channel->attachLocalField(this);
+    data_channel->synchronize();
+    data_channel->prepareSourceDE();
+    data_channel->sendData();
+  
+    delete data_channel;
+  }
+
+  /*!
+   * This method returns, if it exists, an array with only one component and as many as tuples as _field has.
+   * This array gives for every element on which this->_field lies, its global number, if this->_field is nodal.
+   * For example if _field is a nodal field : returned array will be the nodal global numbers.
+   * The content of this method is used to inform Working side to accumulate data recieved by lazy side.
+   */
+  DataArrayInt* ParaFIELD::returnCumulativeGlobalNumbering() const
+  {
+    if(!_field)
+      return 0;
+    TypeOfField type=_field->getTypeOfField();
+    switch(type)
+      {
+      case ON_CELLS:
+        return 0;
+      case ON_NODES:
+        return _support->getGlobalNumberingNodeDA();
+      default:
+        return 0;
+      }
+  }
+
+  DataArrayInt* ParaFIELD::returnGlobalNumbering() const
+  {
+    if(!_field)
+      return 0;
+    TypeOfField type=_field->getTypeOfField();
+    switch(type)
+      {
+      case ON_CELLS:
+        return _support->getGlobalNumberingCellDA();
+      case ON_NODES:
+        return _support->getGlobalNumberingNodeDA();
+      default:
+        return 0;
+      }
+  }
+  
+  int ParaFIELD::nbComponents() const
+  {
+    return _component_topology.nbComponents();
+  }
+
+
+  /*! This method retrieves the integral of component \a icomp
+    over the all domain. */
+  double ParaFIELD::getVolumeIntegral(int icomp, bool isWAbs) const
+  {
+    CommInterface comm_interface = _topology->getProcGroup()->getCommInterface();
+    double integral=_field->integral(icomp,isWAbs);
+    double total=0.;
+    const MPI_Comm* comm = (dynamic_cast<const MPIProcessorGroup*>(_topology->getProcGroup()))->getComm();
+    comm_interface.allReduce(&integral, &total, 1, MPI_DOUBLE, MPI_SUM, *comm);
+  
+    return total;
+  }
+}
diff --git a/src/ParaMEDMEM/ParaFIELD.hxx b/src/ParaMEDMEM/ParaFIELD.hxx
new file mode 100644 (file)
index 0000000..2f5f893
--- /dev/null
@@ -0,0 +1,66 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __PARAFIELD_HXX__
+#define __PARAFIELD_HXX__
+
+#include "MEDCouplingRefCountObject.hxx"
+#include "ComponentTopology.hxx"
+
+namespace ParaMEDMEM
+{
+  class DataArrayInt;
+  class ParaMESH;
+  class ProcessorGroup;
+  class MEDCouplingFieldDouble;
+  class ComponentTopology;
+  class Topology;
+
+  class ParaFIELD
+  {
+  public:
+
+    ParaFIELD(TypeOfField type, TypeOfTimeDiscretization td, ParaMESH* mesh, const ComponentTopology& component_topology); 
+
+
+    ParaFIELD(MEDCouplingFieldDouble* field, ParaMESH *sup, const ProcessorGroup& group);
+  
+    virtual ~ParaFIELD();
+    void synchronizeTarget( ParaMEDMEM::ParaFIELD* source_field);
+    void synchronizeSource( ParaMEDMEM::ParaFIELD* target_field);
+    MEDCouplingFieldDouble* getField() const { return _field; }
+    void setOwnSupport(bool v) const { _own_support=v; }
+    DataArrayInt* returnCumulativeGlobalNumbering() const;
+    DataArrayInt* returnGlobalNumbering() const;
+    Topology* getTopology() const { return _topology; }
+    ParaMESH* getSupport() const  { return _support; }
+    int nbComponents() const;
+    double getVolumeIntegral(int icomp, bool isWAbs) const;
+    double getL2Norm()const { return -1; }
+  private:
+    MEDCouplingFieldDouble* _field;
+    ParaMEDMEM::ComponentTopology _component_topology;
+    Topology* _topology; 
+    mutable bool _own_support;
+    ParaMESH* _support;
+  };
+
+}
+
+#endif
diff --git a/src/ParaMEDMEM/ParaGRID.cxx b/src/ParaMEDMEM/ParaGRID.cxx
new file mode 100644 (file)
index 0000000..f45c1e7
--- /dev/null
@@ -0,0 +1,74 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaGRID.hxx"
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+#include "MEDCouplingMemArray.hxx"
+#include "MEDCouplingCMesh.hxx"
+#include "InterpKernelUtilities.hxx"
+
+#include <iostream>
+
+using namespace std;
+
+namespace ParaMEDMEM
+{
+  
+  ParaGRID::ParaGRID(MEDCouplingCMesh* global_grid, Topology* topology) throw(INTERP_KERNEL::Exception)
+  {
+  
+    _block_topology = dynamic_cast<BlockTopology*>(topology);
+    if(_block_topology==0)
+      throw INTERP_KERNEL::Exception(LOCALIZED("ParaGRID::ParaGRID topology must be block topology"));
+    
+    if (!_block_topology->getProcGroup()->containsMyRank())
+      return;
+    
+    int dimension=_block_topology->getDimension() ;
+    if (dimension != global_grid->getSpaceDimension())
+      throw INTERP_KERNEL::Exception(LOCALIZED("ParaGrid::ParaGrid incompatible topology"));
+    _grid=global_grid;
+    _grid->incrRef();
+    /*vector<vector<double> > xyz_array(dimension);
+      vector<pair<int,int> > local_indices = _block_topology->getLocalArrayMinMax();
+      vector <string> coordinates_names;
+      vector <string> coordinates_units;
+      for (int idim=0; idim<dimension ; idim++)
+      {
+      DataArrayDouble *array=global_grid->getCoordsAt(idim);
+      double *arrayC=array->getPointer();
+      cout << " Indices "<< local_indices[idim].first <<" "<<local_indices[idim].second<<endl;
+      for (int i=(local_indices)[idim].first; i<(local_indices)[idim].second; i++)
+      xyz_array[idim].push_back(arrayC[i]);
+      coordinates_names.push_back(array->getName());
+      coordinates_units.push_back(array->getInfoOnComponentAt(0));
+      }
+      _grid=MEDCouplingCMesh::New();
+      _grid->set(xyz_array, coordinates_names,coordinates_units);
+      _grid->setName(global_grid->getName());
+      _grid->setDescription(global_grid->getDescription());*/
+  }
+
+  ParaGRID::~ParaGRID()
+  {
+    if(_grid)
+      _grid->decrRef();
+  }
+}
diff --git a/src/ParaMEDMEM/ParaGRID.hxx b/src/ParaMEDMEM/ParaGRID.hxx
new file mode 100644 (file)
index 0000000..2335b9d
--- /dev/null
@@ -0,0 +1,51 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __PARAGRID_HXX__
+#define __PARAGRID_HXX__
+
+#include "InterpolationUtils.hxx"
+
+#include <vector>
+
+namespace ParaMEDMEM
+{
+  class Topology;
+  class BlockTopology;
+  class MEDCouplingCMesh;
+
+  class ParaGRID
+  {
+  public:
+    ParaGRID(MEDCouplingCMesh* global_grid, Topology* topology) throw(INTERP_KERNEL::Exception);
+    BlockTopology * getBlockTopology() const { return _block_topology; }
+    virtual ~ParaGRID();
+    MEDCouplingCMesh* getGrid() const { return _grid; }
+  private:
+    MEDCouplingCMesh* _grid;
+    // structured grid topology
+    ParaMEDMEM::BlockTopology* _block_topology;
+    // stores the x,y,z axes on the global grid
+    std::vector<std::vector<double> > _global_axis;
+    //id of the local grid
+    int _my_domain_id;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/ParaMESH.cxx b/src/ParaMEDMEM/ParaMESH.cxx
new file mode 100644 (file)
index 0000000..a6482a5
--- /dev/null
@@ -0,0 +1,122 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMESH.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+#include "MEDCouplingMemArray.hxx"
+
+#include <fstream>
+#include <vector>
+
+//inclusion for the namespaces
+using namespace std;
+
+namespace ParaMEDMEM
+{
+  ParaMESH::ParaMESH( MEDCouplingPointSet *subdomain_mesh, MEDCouplingPointSet *subdomain_face,
+            DataArrayInt *CorrespElt_local2global, DataArrayInt *CorrespFace_local2global,
+            DataArrayInt *CorrespNod_local2global, const ProcessorGroup& proc_group ):
+    _cell_mesh(subdomain_mesh),
+    _face_mesh(subdomain_face),
+    _my_domain_id(proc_group.myRank()),
+    _block_topology (new BlockTopology(proc_group, subdomain_mesh->getNumberOfCells())),
+    _explicit_topology(0),
+    _node_global(CorrespNod_local2global),
+    _face_global(CorrespFace_local2global),
+    _cell_global(CorrespElt_local2global)
+  {
+    if(_cell_mesh)
+      _cell_mesh->incrRef();
+    if(_face_mesh)
+      _face_mesh->incrRef();
+    if(CorrespElt_local2global)
+      CorrespElt_local2global->incrRef();
+    if(CorrespFace_local2global)
+      CorrespFace_local2global->incrRef();
+    if(CorrespNod_local2global)
+      CorrespNod_local2global->incrRef();
+  }
+
+  ParaMESH::ParaMESH( MEDCouplingPointSet *mesh, const ProcessorGroup& proc_group, const std::string& name):
+    _cell_mesh(mesh),
+    _face_mesh(0),
+    _my_domain_id(proc_group.myRank()),
+    _block_topology (new BlockTopology(proc_group, mesh->getNumberOfCells())),
+    _node_global(0),
+    _face_global(0)
+  {
+    if(_cell_mesh)
+      _cell_mesh->incrRef();
+    int nb_elem=mesh->getNumberOfCells();
+    _explicit_topology=new BlockTopology(proc_group,nb_elem);
+    int nbOfCells=mesh->getNumberOfCells();
+    _cell_global = DataArrayInt::New();
+    _cell_global->alloc(nbOfCells,1);
+    int *cellglobal=_cell_global->getPointer();
+    int offset = _block_topology->localToGlobal(make_pair(_my_domain_id,0));
+    for (int i=0; i<nbOfCells; i++)
+      {
+        cellglobal[i]=offset+i;
+      }
+  }
+
+  void ParaMESH::setNodeGlobal(DataArrayInt *nodeGlobal)
+  {
+    if(nodeGlobal!=_node_global)
+      {
+        if(_node_global)
+          _node_global->decrRef();
+        _node_global=nodeGlobal;
+        if(_node_global)
+          _node_global->incrRef();
+      }
+  }
+
+  void ParaMESH::setCellGlobal(DataArrayInt *cellGlobal)
+  {
+    if(cellGlobal!=_cell_global)
+      {
+        if(_cell_global)
+          _cell_global->decrRef();
+        _cell_global=cellGlobal;
+        if(_cell_global)
+          _cell_global->incrRef();
+      }
+  }
+
+  ParaMESH::~ParaMESH()
+  {
+    if(_cell_mesh)
+      _cell_mesh->decrRef();
+    if(_face_mesh)
+      _face_mesh->decrRef();
+    delete _block_topology;
+    if(_node_global)
+      _node_global->decrRef();
+    if(_cell_global)
+      _cell_global->decrRef();
+    if(_face_global)
+      _face_global->decrRef();
+    delete _explicit_topology;
+  }
+
+}
diff --git a/src/ParaMEDMEM/ParaMESH.hxx b/src/ParaMEDMEM/ParaMESH.hxx
new file mode 100644 (file)
index 0000000..391bff5
--- /dev/null
@@ -0,0 +1,82 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __PARAMESH_HXX__
+#define __PARAMESH_HXX__
+
+#include "MEDCouplingPointSet.hxx"
+#include "ProcessorGroup.hxx"
+#include "MEDCouplingMemArray.hxx"
+
+#include <string>
+#include <vector>
+
+namespace ParaMEDMEM
+{
+  class Topology;
+  class BlockTopology;
+  class DataArrayInt;
+
+  class ParaMESH
+  {
+  public:
+    ParaMESH( MEDCouplingPointSet *subdomain_mesh,
+              MEDCouplingPointSet *subdomain_face,
+              DataArrayInt *CorrespElt_local2global,
+              DataArrayInt *CorrespFace_local2global,
+              DataArrayInt *CorrespNod_local2global,
+              const ProcessorGroup& proc_group ) ;
+    ParaMESH( MEDCouplingPointSet *mesh,
+              const ProcessorGroup& proc_group, const std::string& name);
+
+    virtual ~ParaMESH();
+    void setNodeGlobal(DataArrayInt *nodeGlobal);
+    void setCellGlobal(DataArrayInt *cellGlobal);
+    Topology* getTopology() const { return _explicit_topology; }
+    bool isStructured() const { return _cell_mesh->isStructured(); }
+    MEDCouplingPointSet *getCellMesh() const { return _cell_mesh; }
+    MEDCouplingPointSet *getFaceMesh() const { return _face_mesh; }
+    BlockTopology* getBlockTopology() const { return _block_topology; }
+
+    DataArrayInt* getGlobalNumberingNodeDA() const { if(_node_global) _node_global->incrRef(); return _node_global; }
+    DataArrayInt* getGlobalNumberingFaceDA() const { if(_face_global) _face_global->incrRef(); return _face_global; }
+    DataArrayInt* getGlobalNumberingCellDA() const { if(_cell_global) _cell_global->incrRef(); return _cell_global; }
+    const int* getGlobalNumberingNode() const { if(_node_global) return _node_global->getConstPointer(); return 0; }
+    const int* getGlobalNumberingFace() const { if(_face_global) return _face_global->getConstPointer(); return 0; }
+    const int* getGlobalNumberingCell() const { if(_cell_global) return _cell_global->getConstPointer(); return 0; }
+
+  private:
+    //mesh object underlying the ParaMESH object
+    MEDCouplingPointSet *_cell_mesh ;
+    MEDCouplingPointSet *_face_mesh ;
+
+    //id of the local grid
+    int _my_domain_id;
+
+    //global topology of the cells
+    ParaMEDMEM::BlockTopology* _block_topology;
+    Topology*  _explicit_topology;
+    // pointers to global numberings
+    DataArrayInt* _node_global;
+    DataArrayInt* _face_global;
+    DataArrayInt* _cell_global;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/ProcessorGroup.cxx b/src/ParaMEDMEM/ProcessorGroup.cxx
new file mode 100644 (file)
index 0000000..0116950
--- /dev/null
@@ -0,0 +1,32 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ProcessorGroup.hxx"
+#include "InterpolationUtils.hxx"
+
+namespace ParaMEDMEM
+{
+  ProcessorGroup::ProcessorGroup (const CommInterface& interface, int start, int end):_comm_interface(interface)
+  {
+    if (start>end)
+      throw INTERP_KERNEL::Exception("wrong call to Processor group constructor");
+    for (int i=start; i<=end;i++)
+      _proc_ids.insert(i);
+  }
+}
diff --git a/src/ParaMEDMEM/ProcessorGroup.hxx b/src/ParaMEDMEM/ProcessorGroup.hxx
new file mode 100644 (file)
index 0000000..344704a
--- /dev/null
@@ -0,0 +1,60 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __PROCESSORGROUP_HXX__
+#define __PROCESSORGROUP_HXX__
+
+#include "CommInterface.hxx"
+
+#include <set>
+
+namespace ParaMEDMEM
+{
+  class ProcessorGroup
+  {
+  public:
+  
+    ProcessorGroup(const CommInterface& interface):_comm_interface(interface) { }
+    ProcessorGroup(const CommInterface& interface, std::set<int> proc_ids):
+      _comm_interface(interface),_proc_ids(proc_ids) { }
+    ProcessorGroup (const ProcessorGroup& proc_group, std::set<int> proc_ids):
+      _comm_interface(proc_group.getCommInterface()),_proc_ids(proc_ids) { }
+    ProcessorGroup (const ProcessorGroup& other):
+      _comm_interface(other.getCommInterface()),_proc_ids(other._proc_ids) { }
+    ProcessorGroup (const CommInterface& interface, int start, int end);
+    virtual ~ProcessorGroup() { }
+    virtual ProcessorGroup *deepCpy() const = 0;
+    virtual ProcessorGroup* fuse (const ProcessorGroup&) const = 0;
+    virtual void intersect (ProcessorGroup&) = 0;
+    bool contains(int rank) const { return _proc_ids.find(rank)!=_proc_ids.end(); }
+    virtual bool containsMyRank() const = 0;
+    int size() const  { return _proc_ids.size(); }
+    const CommInterface& getCommInterface()const { return _comm_interface; }
+    virtual int myRank() const = 0;
+    virtual int translateRank(const ProcessorGroup*, int) const = 0;
+    virtual ProcessorGroup* createComplementProcGroup() const = 0;
+    virtual ProcessorGroup* createProcGroup() const = 0;
+    virtual const std::set<int>& getProcIDs()const  { return _proc_ids; } 
+  protected:
+    const CommInterface _comm_interface;
+    std::set<int> _proc_ids;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/README_JR b/src/ParaMEDMEM/README_JR
new file mode 100644 (file)
index 0000000..762dc9e
--- /dev/null
@@ -0,0 +1,446 @@
+
+CVS :
+=====
+
+Branche : BR_MEDPARA : MED_SRC
+setenv CVSROOT :pserver:rahuel@cvs.opencascade.com:/home/server/cvs/MED   
+cvs login
+...
+
+Repertoires :
+=============
+
+Sources : /home/rahuel/MEDPARAsynch
+Construction sur awa : /data/tmpawa/rahuel/MEDPARAsynch/MED_Build
+Intallation sur awa : /data/tmpawa/rahuel/MEDPARAsynch/MED_Install
+
+
+Environnement :
+===============
+
+source /home/rahuel/MEDPARAsynch/env_products.csh
+
+On utilise :
+/data/tmpawa/vb144235/valgrind-3.2.1/valgrind_install/bin
+/data/tmpawa/adam/Salome3/V3_2_7_AWA_OCC/Python-2.4.1
+/data/tmpawa/vb144235/med_231_install
+/data/tmpawa2/adam/omniORB/omniORB-4.0.7
+/data/tmpawa/vb144235/lam_install
+/data/tmpawa/vb144235/cppunit_install
+/data/tmpawa/vb144235/fvm_install_lam
+/data/tmpawa/vb144235/bft_install
+/home/rahuel/MEDPARAsynch/ICoCo
+/data/tmpawa2/adam/Salome3/V3_2_0_maintainance/KERNEL/KERNEL_INSTALL
+
+
+Build_Configure et Configure :
+==============================
+
+MEDMEM est en "stand-alone" sans KERNEL ni IHM.
+
+cd $MED_BUILD_DIR
+${MED_SRC_DIR}/build_configure --without-kernel --without-ihm
+rm ${MED_SRC_DIR}/adm_local_without_kernel/adm_local_without_kernel
+rm -fR $MED_BUILD_DIR/adm_local_without_kernel/adm_local_without_kernel 
+
+cd $MED_BUILD_DIR
+${MED_SRC_DIR}/configure --without-kernel --without-ihm --with-lam=/data/tmpawa/vb144235/lam_install --prefix=${MED_ROOT_DIR} --with-med2=/data/tmpawa/vb144235/med_231_install --with-python=/data/tmpawa/adam/Salome3/V3_2_7_AWA_OCC/Python-2.4.1 --with-cppunit=/data/tmpawa/vb144235/cppunit_install --with-cppunit_inc=/data/tmpawa/vb144235/cppunit_install/include --with-fvm=/data/tmpawa/vb144235/fvm_install_lam
+rm ${MED_SRC_DIR}/adm_local_without_kernel/adm_local_without_kernel
+rm -fR $MED_BUILD_DIR/adm_local_without_kernel/adm_local_without_kernel 
+
+
+Construction :
+==============
+
+cd $MED_BUILD_DIR
+make
+make install
+
+Problemes de construction :
+===========================
+
+Liste des fichiers modifies et differents de la base CVS pour pouvoir
+effectuer la construction et l'installation :
+
+M MED_SRC/configure.in.base :
+-----------------------------
+CHECK_MPICH
+CHECK_LAM
+CHECK_OPENMPI mis en commentaire (redefinit le resultat de CHECK_LAM)
+CHECK_CPPUNIT a ete ajoute
+
+M MED_SRC/adm_local_without_kernel/unix/config_files/check_lam.m4 :
+-------------------------------------------------------------------
+Debugs pour trouver la bonne configuration de LAM
+
+M MED_SRC/src/INTERP_KERNEL/Makefile.in :
+-----------------------------------------
+Problemes de construction des tests
+
+M MED_SRC/src/ParaMEDMEM/Makefile.in :
+--------------------------------------
+. Construction de libParaMEDMEM.a pour gcov (link statique)
+. Ajout d'options de compilations :  -fprofile-arcs -ftest-coverage -pg (gcov) ==>
+  instrumentation du code
+
+C MED_SRC/src/ParaMEDMEM/Test/Makefile.in :
+-------------------------------------------
+. Construction de libParaMEDMEMTest.a pour gcov (link statique)
+. Ajout d'options de compilations :  -fprofile-arcs -ftest-coverage -pg (gcov) ==>
+  instrumentation du code
+. Prise en compte de $(MED_WITH_KERNEL) avec :
+  ifeq ($(MED_WITH_KERNEL),yes)
+    LDFLAGSFORBIN += $(LDFLAGS) -lm $(MED3_LIBS) $(HDF5_LIBS) $(MPI_LIBS) \
+                   -L$(CMAKE_BINARY_DIR)/lib@LIB_LOCATION_SUFFIX@/salome -lmed_V2_1 -lparamed -lmedmem \
+                   ${KERNEL_LDFLAGS} -lSALOMELocalTrace -lSALOMEBasics \
+                   $(CPPUNIT_LIBS) \
+                   -lParaMEDMEMTest 
+  endif
+  ifeq ($(MED_WITH_KERNEL),no)
+  LDFLAGSFORBIN += $(LDFLAGS) -lm $(MED3_LIBS) $(HDF5_LIBS) $(MPI_LIBS) \
+                   -L$(CMAKE_BINARY_DIR)/lib@LIB_LOCATION_SUFFIX@/salome -lmed_V2_1 -lparamed -linterpkernel -lmedmem \
+                   ${KERNEL_LDFLAGS} ${FVM_LIBS} ${CPPUNIT_LIBS} -L/data/tmpawa/vb144235/bft_install/lib -lbft\
+                  -lParaMEDMEMTest
+  endif
+
+M MED_SRC/src/ParaMEDMEM/Test/ParaMEDMEMTest.hxx :
+--------------------------------------------------
+Mise en commentaire du test manquant :
+CPPUNIT_TEST(testNonCoincidentDEC_3D); 
+
+U MED_SRC/src/ParaMEDMEM/Test/ParaMEDMEMTest_NonCoincidentDEC.cxx :
+-------------------------------------------------------------------
+Manque dans CVS
+
+Pour forcer la reconstruction des tests :
+=========================================
+
+cd $MED_BUILD_DIR
+rm src/ParaMEDMEM/*o
+rm src/ParaMEDMEM/*.la
+rm src/ParaMEDMEM/test_*
+rm src/ParaMEDMEM/.libs/*
+rm src/ParaMEDMEM/Test/*o
+rm src/ParaMEDMEM/Test/*.la
+rm src/ParaMEDMEM/Test/.libs/*
+rm core.*
+rm vgcore.*
+cd $MED_BUILD_DIR/src/ParaMEDMEM/Test
+make
+make install
+cd $MED_BUILD_DIR
+
+
+Probleme avec lam :
+===================
+
+jr[1175]> mpirun -np 5 -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestParaMEDMEM
+21508 TestParaMEDMEM running on n0 (o)
+21509 TestParaMEDMEM running on n0 (o)
+21510 TestParaMEDMEM running on n0 (o)
+21511 TestParaMEDMEM running on n0 (o)
+21512 TestParaMEDMEM running on n0 (o)
+- Trace /home/rahuel/MEDPARAsynch/MED_SRC/src/MEDMEM/MEDMEM_Init.cxx [54] : Med Memory Initialization with $SALOME_trace = local
+- Trace /home/rahuel/MEDPARAsynch/MED_SRC/src/MEDMEM/MEDMEM_Init.cxx [54] : Med Memory Initialization with $SALOME_trace = local
+- Trace /home/rahuel/MEDPARAsynch/MED_SRC/src/MEDMEM/MEDMEM_Init.cxx [54] : Med Memory Initialization with $SALOME_trace = local
+- Trace /home/rahuel/MEDPARAsynch/MED_SRC/src/MEDMEM/MEDMEM_Init.cxx [54] : Med Memory Initialization with $SALOME_trace = local
+- Trace /home/rahuel/MEDPARAsynch/MED_SRC/src/MEDMEM/MEDMEM_Init.cxx [54] : Med Memory Initialization with $SALOME_trace = local
+-----------------------------------------------------------------------------
+The selected RPI failed to initialize during MPI_INIT.  This is a
+fatal error; I must abort.
+
+This occurred on host awa (n0).
+The PID of failed process was 21508 (MPI_COMM_WORLD rank: 0)
+-----------------------------------------------------------------------------
+-----------------------------------------------------------------------------
+One of the processes started by mpirun has exited with a nonzero exit
+code.  This typically indicates that the process finished in error.
+If your process did not finish in error, be sure to include a "return
+0" or "exit(0)" in your C code before exiting the application.
+
+PID 21510 failed on node n0 (127.0.0.1) with exit status 1.
+-----------------------------------------------------------------------------
+jr[1176]> 
+
+
+Contournement du probleme lam :
+===============================
+
+mpirun -np 5 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestParaMEDMEM
+
+
+Valgrind :
+==========
+. Les tests avec valgrind indiquent des erreurs dans MPI_Init et
+  MPI_Finalize ainsi que dans des programmes appeles "below main".
+. De plus on obtient un "Segmentation Violation" accompagne d'un
+  fichier "vgcore.*" (plantage de valgrind)
+. Mais on a " All heap blocks were freed -- no leaks are possible."
+  et on n'a aucune erreur de malloc/free new/delete dans ParaMEDMEM et
+  dans les tests.
+
+. Cependant si on execute les tests sans valgrind, il n'y a pas
+  d'erreur ni de fichier "core.*".
+
+
+Tests avec CPPUNIT de $MED_BUILD_DIR/src/ParaMEDMEM/Test :
+==========================================================
+
+L'appel a MPI_Init n'est fait qu'une seule fois.
+Il est suivi par l'execution de toute la suite des tests regroupes
+dans les trois executables TestParaMEDMEM, TestMPIAccessDEC et
+TestMPIAccess
+On a enfin un seul appel a MPI_Finalize.
+
+Si un des tests d'une suite de tests comporte une anomalie cela
+peut avoir des implications sur l'execution des tests suivants.
+
+Lors de la mise au point de la suite de tests de TestMPIAccessDEC
+cela etait le cas : il restait des messages postes dans lam mais
+non lus. Le test suivant s'executait de plus en plus lentement
+sans donner d'erreur (probleme difficile a identifier).
+
+
+Lancement des tests de TestParaMEDMEM avec CPPUNIT et TotalView (option -tv) :
+==============================================================================
+
+mpirun -np 5 -ssi rpi tcp C -tv -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestParaMEDMEM
+
+Il arrive qu'on ne puisse pas utiliser totalview par manque de
+license.
+
+
+
+Lancement des tests de TestParaMEDMEM avec CPPUNIT et Valgrind avec "memory leaks" :
+====================================================================================
+
+mpirun -np 5 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full TestParaMEDMEM
+
+
+Lancement des tests fonctionnels de MPI_AccessDEC avec CPPUNIT :
+================================================================
+
+mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full TestMPIAccessDEC
+
+
+Lancement des tests unitaires de MPI_Access avec CPPUNIT :
+==========================================================
+
+mpirun -np 3 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full TestMPIAccess
+
+
+TestMPIAccess/TestMPIAccessDEC/TestParaMEDMEM et gcov :
+=======================================================
+
+Les resultats sont dans les repertoires suivants de $MED_BUILD_DIR/src/ParaMEDMEM/Test :
+-------------
+
+    TestParaMEDMEM-gcov/
+    TestMPIAccessDEC-gcov/
+    TestMPIAccess-gcov/
+
+Je n'y ai pas trouve d'anomalies.
+
+compilation : -fprofile-arcs -ftest-coverage
+-------------
+
+$MED_BUILD_DIR/src/ParaMEDMEM/makefile.in : LIB=libparamedar.a \
+-------------------------------------------     libparamed.la
+
+$MED_BUILD_DIR/src/ParaMEDMEM/Test/makefile.in : LIB = libParaMEDMEMTestar.a \
+------------------------------------------------       libParaMEDMEMTest.la
+
+links statiques manuels :
+-------------------------
+
+g++  -g -D_DEBUG_ -Wno-deprecated -Wparentheses -Wreturn-type -Wunused -DPCLINUX -I/data/tmpawa/vb144235/cppunit_install/include -I/data/tmpawa/vb144235/lam_install/include -ftemplate-depth-42 -I/home/rahuel/MEDPARAsynch/MED_SRC/src/ParaMEDMEM -fprofile-arcs -ftest-coverage -o TestMPIAccess TestMPIAccess.lo -L../../../lib64/salome -lstdc++ -L../../../lib64/salome -lstdc++ -lm -L/data/tmpawa/vb144235/med_231_install/lib -lmed -lhdf5 -lhdf5 -L/data/tmpawa/vb144235/lam_install/lib -llam -lmpi -L../../../lib64/salome -lmed_V2_1 --whole-archive    -linterpkernel -lmedmem -L/data/tmpawa/vb144235/fvm_install_lam/lib -lfvm -L/data/tmpawa/vb144235/cppunit_install/lib -lcppunit -L/data/tmpawa/vb144235/bft_install/lib -lbft  -lutil -lm -lrt -ldl -Bstatic -L./ -lParaMEDMEMTestar -L../ -lparamedar -L./ -lParaMEDMEMTestar
+
+g++  -g -D_DEBUG_ -Wno-deprecated -Wparentheses -Wreturn-type -Wunused -DPCLINUX -I/data/tmpawa/vb144235/cppunit_install/include -I/data/tmpawa/vb144235/lam_install/include -ftemplate-depth-42 -I/home/rahuel/MEDPARAsynch/MED_SRC/src/ParaMEDMEM -fprofile-arcs -ftest-coverage -o TestMPIAccessDEC TestMPIAccessDEC.lo -L../../../lib64/salome -lstdc++ -L../../../lib64/salome -lstdc++ -lm -L/data/tmpawa/vb144235/med_231_install/lib -lmed -lhdf5 -lhdf5 -L/data/tmpawa/vb144235/lam_install/lib -llam -lmpi -L../../../lib64/salome -lmed_V2_1 --whole-archive    -linterpkernel -lmedmem -L/data/tmpawa/vb144235/fvm_install_lam/lib -lfvm -L/data/tmpawa/vb144235/cppunit_install/lib -lcppunit -L/data/tmpawa/vb144235/bft_install/lib -lbft  -lutil -lm -lrt -ldl -Bstatic -L./ -lParaMEDMEMTestar -L../ -lparamedar -L./ -lParaMEDMEMTestar
+
+g++  -g -D_DEBUG_ -Wno-deprecated -Wparentheses -Wreturn-type -Wunused -DPCLINUX -I/data/tmpawa/vb144235/cppunit_install/include -I/data/tmpawa/vb144235/lam_install/include -ftemplate-depth-42 -I/home/rahuel/MEDPARAsynch/MED_SRC/src/ParaMEDMEM -fprofile-arcs -ftest-coverage -o TestParaMEDMEM TestParaMEDMEM.lo -L../../../lib64/salome -lstdc++ -L../../../lib64/salome -lstdc++ -lm -L/data/tmpawa/vb144235/med_231_install/lib -lmed -lhdf5 -lhdf5 -L/data/tmpawa/vb144235/lam_install/lib -llam -lmpi -L../../../lib64/salome -lmed_V2_1 --whole-archive    -linterpkernel -lmedmem -L/data/tmpawa/vb144235/fvm_install_lam/lib -lfvm -L/data/tmpawa/vb144235/cppunit_install/lib -lcppunit -L/data/tmpawa/vb144235/bft_install/lib -lbft  -lutil -lm -lrt -ldl -Bstatic -L./ -lParaMEDMEMTestar -L../ -lparamedar -L./ -lParaMEDMEMTestar
+
+Ne pas oublier le make install apres ...
+
+execution et gcov :
+-------------------
+
+Pour pouvoir traiter les .cxx de ${MED_BUILD_DIR}/src/ParaMEDMEM et de
+${MED_BUILD_DIR}/src/ParaMEDMEM/Test, on execute deux fois gcov.
+
+cd ${MED_BUILD_DIR}/src/ParaMEDMEM/Test
+
+mpirun -np 3 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestMPIAccess
+
+gcov TestMPIAccess.cxx test_MPI_Access_Send_Recv.cxx \
+                       test_MPI_Access_Cyclic_Send_Recv.cxx \
+                       test_MPI_Access_SendRecv.cxx \
+                       test_MPI_Access_ISend_IRecv.cxx \
+                       test_MPI_Access_Cyclic_ISend_IRecv.cxx \
+                       test_MPI_Access_ISendRecv.cxx \
+                       test_MPI_Access_Probe.cxx \
+                       test_MPI_Access_IProbe.cxx \
+                       test_MPI_Access_Cancel.cxx \
+                       test_MPI_Access_Send_Recv_Length.cxx \
+                       test_MPI_Access_ISend_IRecv_Length.cxx \
+                       test_MPI_Access_ISend_IRecv_Length_1.cxx \
+                       test_MPI_Access_Time.cxx \
+                       test_MPI_Access_Time_0.cxx \
+                       test_MPI_Access_ISend_IRecv_BottleNeck.cxx \
+                       ../MPI_Access.cxx
+gcov -o ../ TestMPIAccess.cxx test_MPI_Access_Send_Recv.cxx \
+                              test_MPI_Access_Cyclic_Send_Recv.cxx \
+                              test_MPI_Access_SendRecv.cxx \
+                              test_MPI_Access_ISend_IRecv.cxx \
+                              test_MPI_Access_Cyclic_ISend_IRecv.cxx \
+                              test_MPI_Access_ISendRecv.cxx \
+                              test_MPI_Access_Probe.cxx \
+                              test_MPI_Access_IProbe.cxx \
+                              test_MPI_Access_Cancel.cxx \
+                              test_MPI_Access_Send_Recv_Length.cxx \
+                              test_MPI_Access_ISend_IRecv_Length.cxx \
+                              test_MPI_Access_ISend_IRecv_Length_1.cxx \
+                              test_MPI_Access_Time.cxx \
+                              test_MPI_Access_Time_0.cxx \
+                              test_MPI_Access_ISend_IRecv_BottleNeck.cxx \
+                              ../MPI_Access.cxx
+
+
+cd ${MED_BUILD_DIR}/src/ParaMEDMEM/Test
+mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestMPIAccessDEC
+
+gcov TestMPIAccessDEC.cxx test_AllToAllDEC.cxx \
+                          test_AllToAllvDEC.cxx \
+                          test_AllToAllTimeDEC.cxx \
+                          test_AllToAllvTimeDEC.cxx \
+                          test_AllToAllvTimeDoubleDEC.cxx \
+                          ../TimeInterpolator.cxx \
+                          ../LinearTimeInterpolator.cxx \
+                          ../MPI_Access.cxx \
+                          ../MPI_AccessDEC.cxx
+gcov -o ../ TestMPIAccessDEC.cxx test_AllToAllDEC.cxx \
+                                 test_AllToAllvDEC.cxx \
+                                 test_AllToAllTimeDEC.cxx \
+                                 test_AllToAllvTimeDEC.cxx \
+                                 test_AllToAllvTimeDoubleDEC.cxx \
+                                 ../TimeInterpolator.cxx \
+                                 ../LinearTimeInterpolator.cxx \
+                                 ../MPI_Access.cxx \
+                                 ../MPI_AccessDEC.cxx
+
+cd ${MED_BUILD_DIR}/src/ParaMEDMEM/Test
+mpirun -np 5 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestParaMEDMEM
+
+gcov TestParaMEDMEM.cxx ParaMEDMEMTest.cxx \
+                             ParaMEDMEMTest_MPIProcessorGroup.cxx \
+                             ParaMEDMEMTest_BlockTopology.cxx \
+                             ParaMEDMEMTest_InterpKernelDEC.cxx \
+                        ../BlockTopology.cxx \
+                        ../ComponentTopology.cxx \
+                        ../DEC.cxx \
+                        ../ElementLocator.cxx \
+                        ../InterpolationMatrix.cxx \
+                        ../InterpKernelDEC.cxx \
+                        ../MPIProcessorGroup.cxx \
+                        ../MxN_Mapping.cxx \
+                        ../ParaFIELD.cxx \
+                        ../ParaMESH.cxx \
+                        ../ParaSUPPORT.cxx \
+                        ../ProcessorGroup.cxx \
+                        ../TimeInterpolator.cxx \
+                        ../LinearTimeInterpolator.cxx \
+                        ../MPI_Access.cxx \
+                        ../MPI_AccessDEC.cxx
+
+gcov -o ../ TestParaMEDMEM.cxx ParaMEDMEMTest.cxx \
+                                    ParaMEDMEMTest_MPIProcessorGroup.cxx \
+                                    ParaMEDMEMTest_BlockTopology.cxx \
+                                    ParaMEDMEMTest_InterpKernelDEC.cxx \
+                               ../BlockTopology.cxx \
+                               ../ComponentTopology.cxx \
+                               ../DEC.cxx \
+                               ../ElementLocator.cxx \
+                               ../InterpolationMatrix.cxx \
+                               ../InterpKernelDEC.cxx \
+                               ../MPIProcessorGroup.cxx \
+                               ../MxN_Mapping.cxx \
+                               ../ParaFIELD.cxx \
+                               ../ParaMESH.cxx \
+                               ../ParaSUPPORT.cxx \
+                               ../ProcessorGroup.cxx \
+                               ../TimeInterpolator.cxx \
+                               ../LinearTimeInterpolator.cxx \
+                               ../MPI_Access.cxx \
+                               ../MPI_AccessDEC.cxx
+
+
+
+
+
+Lancement des tests unitaires sans CPPUNIT :
+============================================
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Send_Recv
+
+mpirun -np 3  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Cyclic_Send_Recv
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_SendRecv
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_ISend_IRecv
+
+mpirun -np 3  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Cyclic_ISend_IRecv
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_ISendRecv
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Probe
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_IProbe
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Cancel
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Send_Recv_Length
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_ISend_IRecv_Length
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_ISend_IRecv_Length_1
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Time
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Time_0 2 1
+
+
+#AllToAllDEC
+mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllDEC 0
+
+mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllDEC 1
+
+
+#AllToAllvDEC
+mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvDEC 0
+
+mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvDEC 1
+
+
+#AllToAllTimeDEC
+mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllTimeDEC 0
+
+mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllTimeDEC 1
+
+
+#AllToAllvTimeDEC
+mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvTimeDEC 0 1
+
+mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvTimeDEC 0
+
+mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvTimeDEC 1
+
+
+
+#AllToAllvTimeDoubleDEC
+mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvTimeDoubleDEC 0
+
+mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvTimeDoubleDEC 1
+
+
+
+mpirun -np 2  -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_ISend_IRecv_BottleNeck
+
diff --git a/src/ParaMEDMEM/StructuredCoincidentDEC.cxx b/src/ParaMEDMEM/StructuredCoincidentDEC.cxx
new file mode 100644 (file)
index 0000000..1e88a95
--- /dev/null
@@ -0,0 +1,412 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <mpi.h>
+#include "CommInterface.hxx"
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+#include "ComponentTopology.hxx"
+#include "ParaFIELD.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "StructuredCoincidentDEC.hxx"
+#include "InterpKernelUtilities.hxx"
+
+#include <iostream>
+
+using namespace std;
+
+namespace ParaMEDMEM
+{
+
+  /*!
+    \anchor StructuredCoincidentDEC-det
+    \class StructuredCoincidentDEC
+
+    This class is meant for remapping fields that have identical
+    supports with different parallel topologies. It can be used to couple
+    together multiphysics codes that operate on the same domain
+    with different partitionings, which can be useful if one of
+    the computation is much faster than the other. It can also be used 
+    to couple together codes that share an interface that was generated
+    in the same manner (with identical global ids). 
+    Also, this \ref para-dec can be used for fields that have component topologies,
+    i.e., components that are scattered over several processors.
+
+    The remapping between the two supports is based on identity of global
+    ids, instead of geometrical considerations as it is the case for
+    \ref NonCoincidentDEC-det "NonCoincidentDEC" and \ref InterpKernelDEC-det "InterpKernelDEC".
+    Therefore, this \ref para-dec "DEC" must not be used
+    for coincident meshes that do not have the same numbering.
+
+    As all the other DECs, its use is made of two phases :
+    - a setup phase during which the topologies are exchanged so that
+    the target side knows from which processors it should expect 
+    the data.
+    - a send/recv phase during which the field data is actually transferred.
+
+    This example illustrates the sending of a field with 
+    the \c StructuredCoincidentDEC : 
+    \code
+    ...
+    StructuredCoincidentDEC dec(groupA, groupB);
+    dec.attachLocalField(field);
+    dec.synchronize();
+    if (groupA.containsMyRank())
+    dec.recvData();
+    else if (groupB.containsMyRank())
+    dec.sendData();
+    ...
+    \endcode
+
+    Creating a ParaFIELD to be attached to the DEC is exactly the same as for 
+    other DECs in the case when the remapping concerns similar meshes 
+    that only have different partitionings. In the case when the
+    fields have also different component topologies, creating the ParaFIELD 
+    requires some more effort. See the \ref para-over section for more details.
+  */
+
+
+  StructuredCoincidentDEC::StructuredCoincidentDEC():_topo_source(0),_topo_target(0),
+                                                     _send_counts(0),_recv_counts(0),
+                                                     _send_displs(0),_recv_displs(0),
+                                                     _recv_buffer(0),_send_buffer(0)
+  {  
+  }
+
+
+  StructuredCoincidentDEC::~StructuredCoincidentDEC()
+  {
+    delete [] _send_buffer;
+    delete [] _recv_buffer;
+    delete []_send_displs;
+    delete [] _recv_displs;
+    delete [] _send_counts;
+    delete [] _recv_counts;
+    if (! _source_group->containsMyRank())
+      delete _topo_source;
+    if(!_target_group->containsMyRank())
+      delete _topo_target;
+  }
+
+  StructuredCoincidentDEC::StructuredCoincidentDEC(ProcessorGroup& local_group, ProcessorGroup& distant_group):DisjointDEC(local_group,distant_group),
+                                                                                                               _topo_source(0),_topo_target(0),
+                                                                                                               _send_counts(0),_recv_counts(0),
+                                                                                                               _send_displs(0),_recv_displs(0),
+                                                                                                               _recv_buffer(0),_send_buffer(0)
+  {
+  }
+
+  /*! Synchronization process for exchanging topologies
+   */
+  void StructuredCoincidentDEC::synchronizeTopology()
+  {
+    if (_source_group->containsMyRank())
+      _topo_source = dynamic_cast<BlockTopology*>(_local_field->getTopology());
+    if (_target_group->containsMyRank())
+      _topo_target = dynamic_cast<BlockTopology*>(_local_field->getTopology());
+  
+    // Transmitting source topology to target code 
+    broadcastTopology(_topo_source,1000);
+    // Transmitting target topology to source code
+    broadcastTopology(_topo_target,2000);
+    if (_topo_source->getNbElements() != _topo_target->getNbElements())
+      throw INTERP_KERNEL::Exception("Incompatible dimensions for target and source topologies");
+
+  }
+
+  /*! Creates the arrays necessary for the data transfer
+   * and fills the send array with the values of the 
+   * source field
+   *  */
+  void StructuredCoincidentDEC::prepareSourceDE()
+  {
+    ////////////////////////////////////
+    //Step 1 : _buffer array creation 
+  
+    if (!_topo_source->getProcGroup()->containsMyRank())
+      return;
+    MPIProcessorGroup* group=new MPIProcessorGroup(_topo_source->getProcGroup()->getCommInterface());
+  
+    int myranksource = _topo_source->getProcGroup()->myRank();
+  
+    vector <int>* target_arrays=new vector<int>[_topo_target->getProcGroup()->size()];
+  
+    //cout<<" topotarget size"<<  _topo_target->getProcGroup()->size()<<endl;
+  
+    int nb_local = _topo_source-> getNbLocalElements();
+    for (int ielem=0; ielem< nb_local ; ielem++)
+      {
+        //  cout <<"source local :"<<myranksource<<","<<ielem<<endl; 
+        int global = _topo_source->localToGlobal(make_pair(myranksource, ielem));
+        //  cout << "global "<<global<<endl;
+        pair<int,int> target_local =_topo_target->globalToLocal(global);
+        //  cout << "target local : "<<target_local.first<<","<<target_local.second<<endl; 
+        target_arrays[target_local.first].push_back(target_local.second); 
+      }  
+  
+    int union_size=group->size();
+  
+    _send_counts=new int[union_size];
+    _send_displs=new int[union_size];
+    _recv_counts=new int[union_size];
+    _recv_displs=new int[union_size];
+     
+    for (int i=0; i< union_size; i++)
+      {
+        _send_counts[i]=0;
+        _recv_counts[i]=0;
+        _recv_displs[i]=0;
+      }
+    _send_displs[0]=0;
+  
+    for (int iproc=0; iproc < _topo_target->getProcGroup()->size(); iproc++)
+      {
+        //converts the rank in target to the rank in union communicator
+        int unionrank=group->translateRank(_topo_target->getProcGroup(),iproc);
+        _send_counts[unionrank]=target_arrays[iproc].size();
+      }
+  
+    for (int iproc=1; iproc<group->size();iproc++)
+      _send_displs[iproc]=_send_displs[iproc-1]+_send_counts[iproc-1];
+  
+    _send_buffer = new double [nb_local ];
+
+    /////////////////////////////////////////////////////////////
+    //Step 2 : filling the _buffers with the source field values 
+
+    int* counter=new int [_topo_target->getProcGroup()->size()];
+    counter[0]=0;  
+    for (int i=1; i<_topo_target->getProcGroup()->size(); i++)
+      counter[i]=counter[i-1]+target_arrays[i-1].size();
+    
+      
+    const double* value = _local_field->getField()->getArray()->getPointer();
+    //cout << "Nb local " << nb_local<<endl;
+    for (int ielem=0; ielem<nb_local ; ielem++)
+      {
+        int global = _topo_source->localToGlobal(make_pair(myranksource, ielem));
+        pair<int,int> target_local =_topo_target->globalToLocal(global);
+        //cout <<"global : "<< global<<" local :"<<target_local.first<<" "<<target_local.second;
+        //cout <<"counter[]"<<counter[target_local.first]<<endl;
+        _send_buffer[counter[target_local.first]++]=value[ielem];
+    
+      }
+    delete[] target_arrays;
+    delete[] counter;
+    delete group;
+  }
+
+  /*!
+   *  Creates the buffers for receiving the fields on the target side
+   */
+  void StructuredCoincidentDEC::prepareTargetDE()
+  {
+    if (!_topo_target->getProcGroup()->containsMyRank())
+      return;
+    MPIProcessorGroup* group=new MPIProcessorGroup(_topo_source->getProcGroup()->getCommInterface());
+  
+    int myranktarget = _topo_target->getProcGroup()->myRank();
+  
+    vector < vector <int> > source_arrays(_topo_source->getProcGroup()->size());
+    int nb_local = _topo_target-> getNbLocalElements();
+    for (int ielem=0; ielem< nb_local ; ielem++)
+      {
+        //  cout <<"TS target local :"<<myranktarget<<","<<ielem<<endl; 
+        int global = _topo_target->localToGlobal(make_pair(myranktarget, ielem));
+        //cout << "TS global "<<global<<endl;
+        pair<int,int> source_local =_topo_source->globalToLocal(global);
+        //  cout << "TS source local : "<<source_local.first<<","<<source_local.second<<endl; 
+        source_arrays[source_local.first].push_back(source_local.second); 
+      }  
+    int union_size=group->size();
+    _recv_counts=new int[union_size];
+    _recv_displs=new int[union_size];
+    _send_counts=new int[union_size];
+    _send_displs=new int[union_size];
+    
+    for (int i=0; i< union_size; i++)
+      {
+        _send_counts[i]=0;
+        _recv_counts[i]=0;
+        _recv_displs[i]=0;
+      }
+    for (int iproc=0; iproc < _topo_source->getProcGroup()->size(); iproc++)
+      {
+        //converts the rank in target to the rank in union communicator
+        int unionrank=group->translateRank(_topo_source->getProcGroup(),iproc);
+        _recv_counts[unionrank]=source_arrays[iproc].size();
+      }
+    for (int i=1; i<union_size; i++)
+      _recv_displs[i]=_recv_displs[i-1]+_recv_counts[i-1];
+    _recv_buffer=new double[nb_local];
+    
+    delete group;
+  }
+
+  /*!
+   * Synchronizing a topology so that all the 
+   * group possesses it.
+   * 
+   * \param topo Topology that is transmitted. It is read on processes where it already exists, and it is created and filled on others.
+   * \param tag Communication tag associated with this operation.
+   */
+  void StructuredCoincidentDEC::broadcastTopology(BlockTopology*& topo, int tag)
+  {
+    MPI_Status status;
+  
+    int* serializer=0;
+    int size;
+  
+    MPIProcessorGroup* group=new MPIProcessorGroup(*_comm_interface);
+  
+    // The master proc creates a send buffer containing
+    // a serialized topology
+    int rank_master;
+  
+    if (topo!=0 && topo->getProcGroup()->myRank()==0)
+      {
+        MESSAGE ("Master rank");
+        topo->serialize(serializer, size);
+        rank_master = group->translateRank(topo->getProcGroup(),0);
+        MESSAGE("Master rank world number is "<<rank_master);
+        MESSAGE("World Size is "<<group->size());
+        for (int i=0; i< group->size(); i++)
+          {
+            if (i!= rank_master)
+              _comm_interface->send(&rank_master,1,MPI_INT, i,tag+i,*(group->getComm()));
+          }
+      }
+    else
+      {
+        MESSAGE(" rank "<<group->myRank()<< " waiting ...");
+        _comm_interface->recv(&rank_master, 1,MPI_INT, MPI_ANY_SOURCE, tag+group->myRank(), *(group->getComm()),&status);
+        MESSAGE(" rank "<<group->myRank()<< "received master rank"<<rank_master);
+      }
+    // The topology is broadcasted to all processsors in the group
+    _comm_interface->broadcast(&size, 1,MPI_INT,rank_master,*(group->getComm()));
+  
+    int* buffer=new int[size];
+    if (topo!=0 && topo->getProcGroup()->myRank()==0)
+      copy(serializer, serializer+size, buffer); 
+    _comm_interface->broadcast(buffer,size,MPI_INT,rank_master,*(group->getComm()));
+  
+    // Processors which did not possess the source topology 
+    // unserialize it
+  
+    BlockTopology* topotemp=new BlockTopology();
+    topotemp->unserialize(buffer, *_comm_interface);
+  
+    if (topo==0) 
+      topo=topotemp;
+    else 
+      delete topotemp;
+  
+    // Memory cleaning
+    delete[] buffer;
+    if (serializer!=0)
+      delete[] serializer;
+    MESSAGE (" rank "<<group->myRank()<< " unserialize is over");
+    delete group;
+  }
+
+
+
+  void StructuredCoincidentDEC::recvData()
+  {
+    //MPI_COMM_WORLD is used instead of group because there is no
+    //mechanism for creating the union group yet
+    MESSAGE("recvData");
+    for (int i=0; i< 4; i++)
+      cout << _recv_counts[i]<<" ";
+    cout <<endl;
+    for (int i=0; i< 4; i++)
+      cout << _recv_displs[i]<<" ";
+    cout <<endl;
+  
+    cout<<"start AllToAll"<<endl;
+    MPI_Comm comm = *(dynamic_cast<MPIProcessorGroup*>(_union_group)->getComm());
+    _comm_interface->allToAllV(_send_buffer, _send_counts, _send_displs, MPI_DOUBLE, 
+                               _recv_buffer, _recv_counts, _recv_displs, MPI_DOUBLE,comm);
+    cout<<"end AllToAll"<<endl;
+
+    int nb_local = _topo_target->getNbLocalElements();
+    //double* value=new double[nb_local];
+    double* value=const_cast<double*>(_local_field->getField()->getArray()->getPointer());
+  
+    int myranktarget=_topo_target->getProcGroup()->myRank();
+    vector<int> counters(_topo_source->getProcGroup()->size());
+    counters[0]=0;
+    for (int i=0; i<_topo_source->getProcGroup()->size()-1; i++)
+      {
+        MPIProcessorGroup* group=new MPIProcessorGroup(*_comm_interface);
+        int worldrank=group->translateRank(_topo_source->getProcGroup(),i);
+        counters[i+1]=counters[i]+_recv_counts[worldrank];
+        delete group;
+      }
+  
+    for (int ielem=0; ielem<nb_local ; ielem++)
+      {
+        int global = _topo_target->localToGlobal(make_pair(myranktarget, ielem));
+        pair<int,int> source_local =_topo_source->globalToLocal(global);
+        value[ielem]=_recv_buffer[counters[source_local.first]++];
+      }
+  
+  
+    //_local_field->getField()->setValue(value);
+  }
+
+  void StructuredCoincidentDEC::sendData()
+  {
+    MESSAGE ("sendData");
+    for (int i=0; i< 4; i++)
+      cout << _send_counts[i]<<" ";
+    cout <<endl;
+    for (int i=0; i< 4; i++)
+      cout << _send_displs[i]<<" ";
+    cout <<endl;
+    cout <<"start AllToAll"<<endl;
+    MPI_Comm comm = *(dynamic_cast<MPIProcessorGroup*>(_union_group)->getComm());
+    _comm_interface->allToAllV(_send_buffer, _send_counts, _send_displs, MPI_DOUBLE, 
+                               _recv_buffer, _recv_counts, _recv_displs, MPI_DOUBLE,comm);
+    cout<<"end AllToAll"<<endl;
+  }
+
+  /*! Prepares a DEC for data exchange
+
+    This method broadcasts the topologies from source to target 
+    so that the target side can analyse from which processors it
+    is expected to receive data. 
+  */
+  
+  void StructuredCoincidentDEC::synchronize()
+  {
+    if (_source_group->containsMyRank())
+      {
+        synchronizeTopology();
+        prepareSourceDE();
+      }
+    else if (_target_group->containsMyRank())
+      {
+        synchronizeTopology();
+        prepareTargetDE();
+      }
+  }
+}
+
diff --git a/src/ParaMEDMEM/StructuredCoincidentDEC.hxx b/src/ParaMEDMEM/StructuredCoincidentDEC.hxx
new file mode 100644 (file)
index 0000000..75f63b4
--- /dev/null
@@ -0,0 +1,58 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __STRUCTUREDCOINCIDENTDEC_HXX__
+#define __STRUCTUREDCOINCIDENTDEC_HXX__
+
+#include "DisjointDEC.hxx"
+#include "BlockTopology.hxx"
+
+
+namespace ParaMEDMEM
+{
+  class DEC;
+  class BlockTopology;
+  class StructuredCoincidentDEC : public DisjointDEC
+  {
+  public:
+    StructuredCoincidentDEC();
+    StructuredCoincidentDEC( ProcessorGroup& source, ProcessorGroup& target);
+    virtual ~StructuredCoincidentDEC();
+    void synchronize();
+    void recvData();
+    void sendData();
+    void prepareSourceDE();
+    void prepareTargetDE();
+
+  private :
+    void synchronizeTopology();
+    void broadcastTopology(BlockTopology*&, int tag);
+
+    BlockTopology* _topo_source;
+    BlockTopology* _topo_target;
+    int* _send_counts;
+    int* _recv_counts;
+    int* _send_displs;
+    int* _recv_displs;
+    double* _recv_buffer;
+    double* _send_buffer;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/TODO_JR b/src/ParaMEDMEM/TODO_JR
new file mode 100644 (file)
index 0000000..de2318d
--- /dev/null
@@ -0,0 +1,50 @@
+
+MPI_Access :
+============
+
+. Creer des methodes [I]SendRecv en point a point avec un "target"
+  pour le Send et un "target" pour le Recv comme le SendRecv MPI.
+
+. Ne pas creer de structure RequestStruct en mode synchrone.
+
+
+MPI_AccessDEC :
+===============
+
+. AllToAll, AllToAllv, AllToAllTime et AllToAllvTime comportent
+  des sequences de code semblables qui pourraient etre regroupees
+  sans que cela nuise a la lisibilite du code.
+
+. En mode asynchrone, il n'y a pas de controle d'engorgement des
+  messages envoyes dans CheckSent(). Il est vrai qu'en pratique
+  une synchronisation des temps est faite dans AllToAllTime et
+  AllToAllvTime. Mais ce probleme pourrait se produire avec
+  AllToAll et AllToAllv. Il serait possible de fixer un nombre
+  maximum de messages envoyes et "en cours" et de le comparer avec
+  le nombre de requetes rendu par MPI_Access. En cas de depassement
+  de ?n?*UnionGroupSize par exemple, CheckSent pourrait fonctionner
+  en mode "WithWait". Ce qui ferait qu'on apellerait Wait au lieu de Test.
+
+. Meme si le prototype d'interpolateur comporte des parametres
+  int nStepBefore et int nStepAfter, le codage actuel considere
+  qu'on n'a que nStepBefore=1 et nStepAfter=1.
+  Ainsi on a (*_TimeMessages)[target][0] et (*_TimeMessages)[target][1]
+  ainsi que &(*_DataMessages)[target][0] et &(*_DataMessages)[target][1].
+
+. Les champs nStepBefore et nStepAfter correspondent a un maximum
+  requis. On devrait avoir les champs correspondants qui valent les
+  nombres disponibles a un moment donne.
+
+. Il existe un champs OutOfTime qui n'est pas utilise actuellement.
+  Il faudrait definir son usage et le transmettre sans doute à
+  l'interpolateur. Actuellement, L'interpolateur lineaire effectue une
+  extrapolation si OutOfTime vaut true.
+
+. Dans CheckTime, on alloue et detruit les (*_DataMessages)[target][]
+  alors qu'on pourrait considerer que pour un "target" donne, les
+  recvcount sont constants pendant toute la boucle de temps. Ainsi
+  on n'allouerait les buffers qu'une fois au depart et ils ne seraient
+  liberes qu'a la fin.
+
+
+
diff --git a/src/ParaMEDMEM/TimeInterpolator.cxx b/src/ParaMEDMEM/TimeInterpolator.cxx
new file mode 100644 (file)
index 0000000..86c3bfb
--- /dev/null
@@ -0,0 +1,34 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "TimeInterpolator.hxx"
+
+namespace ParaMEDMEM
+{
+  TimeInterpolator::TimeInterpolator( double InterpPrecision, int nStepBefore, int nStepAfter )
+  {
+    _interp_precision=InterpPrecision;
+    _n_step_before=nStepBefore;
+    _n_step_after=nStepAfter;
+  }
+
+  TimeInterpolator::~TimeInterpolator()
+  {
+  } 
+}
diff --git a/src/ParaMEDMEM/TimeInterpolator.hxx b/src/ParaMEDMEM/TimeInterpolator.hxx
new file mode 100644 (file)
index 0000000..30df1c5
--- /dev/null
@@ -0,0 +1,51 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __TIMEINTERPOLATOR_HXX__
+#define __TIMEINTERPOLATOR_HXX__
+
+#include "ProcessorGroup.hxx"
+
+#include <map>
+#include <iostream>
+
+namespace ParaMEDMEM
+{
+  class TimeInterpolator
+  {
+  public:  
+    TimeInterpolator( double InterpPrecision, int nStepBefore=1, int nStepAfter=1 );
+    virtual ~TimeInterpolator();
+
+    void setInterpParams( double InterpPrecision, int nStepBefore=1, int nStepAfter=1 ) { _interp_precision=InterpPrecision; _n_step_before=nStepBefore; _n_step_after=nStepAfter; }
+    void steps( int &nStepBefore, int &nStepAfter ) { nStepBefore=_n_step_before; nStepAfter=_n_step_after ; }
+    virtual void doInterp( double time0, double time1, double time, int recvcount ,
+                           int nbuff0, int nbuff1,
+                           int **recvbuff0, int **recvbuff1, int *result ) = 0;
+    virtual void doInterp( double time0, double time1, double time, int recvcount ,
+                           int nbuff0, int nbuff1,
+                           double **recvbuff0, double **recvbuff1, double *result ) = 0;
+  protected :
+    double _interp_precision;
+    int _n_step_before;
+    int _n_step_after;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEM/Topology.cxx b/src/ParaMEDMEM/Topology.cxx
new file mode 100644 (file)
index 0000000..49a7fc2
--- /dev/null
@@ -0,0 +1,31 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "Topology.hxx"
+
+namespace ParaMEDMEM
+{
+  Topology::Topology()
+  {
+  }
+
+  Topology::~Topology()
+  {
+  }
+}
diff --git a/src/ParaMEDMEM/Topology.hxx b/src/ParaMEDMEM/Topology.hxx
new file mode 100644 (file)
index 0000000..4b10f8b
--- /dev/null
@@ -0,0 +1,40 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __TOPOLOGY_HXX__
+#define __TOPOLOGY_HXX__
+
+#include <utility>
+
+namespace ParaMEDMEM
+{
+  class ProcessorGroup;
+
+  class Topology
+  {
+  public:
+    Topology() { }
+    virtual ~Topology() { }
+    virtual int getNbElements() const = 0;
+    virtual int getNbLocalElements() const  = 0;
+    virtual const ProcessorGroup* getProcGroup()const  = 0;
+  };
+}
+
+#endif
diff --git a/src/ParaMEDMEMTest/MPI2Connector.cxx b/src/ParaMEDMEMTest/MPI2Connector.cxx
new file mode 100644 (file)
index 0000000..616ac13
--- /dev/null
@@ -0,0 +1,153 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "MPI2Connector.hxx"
+
+#include <iostream>
+#include <cstring>
+
+#ifndef WIN32
+#include <unistd.h>
+#endif
+
+MPI2Connector::MPI2Connector()
+{
+  MPI_Comm_size( MPI_COMM_WORLD, &_nb_proc );
+  MPI_Comm_rank( MPI_COMM_WORLD, &_num_proc );
+}
+
+MPI2Connector::~MPI2Connector()
+{
+}
+
+MPI_Comm MPI2Connector::remoteMPI2Connect(const std::string& service)
+{
+  int i;
+  char port_name[MPI_MAX_PORT_NAME];
+  char port_name_clt[MPI_MAX_PORT_NAME];
+  std::ostringstream msg;
+  MPI_Comm icom;
+
+  if( service.size() == 0 )
+    {
+      msg << "[" << _num_proc << "] You have to give a service name !";
+      std::cerr << msg.str().c_str() << std::endl;
+      throw std::exception();
+    }
+
+  _srv = false;
+
+  MPI_Barrier(MPI_COMM_WORLD);
+
+  MPI_Errhandler_set(MPI_COMM_WORLD, MPI_ERRORS_RETURN);
+  if( _num_proc == 0 )
+    { 
+      /* rank 0 try to be a server. If service is already published, try to be a cient */
+      MPI_Open_port(MPI_INFO_NULL, port_name); 
+      if ( MPI_Lookup_name((char*)service.c_str(), MPI_INFO_NULL, port_name_clt) == MPI_SUCCESS )
+        {
+          std::cerr << "[" << _num_proc << "] I get the connection with " << service << " at " << port_name_clt << std::endl;
+          MPI_Close_port( port_name );
+        }
+      else if ( MPI_Publish_name((char*)service.c_str(), MPI_INFO_NULL, port_name) == MPI_SUCCESS )
+        {
+          _srv = true;
+          _port_name = port_name;
+          std::cerr << "[" << _num_proc << "] service " << service << " available at " << port_name << std::endl;
+        }      
+      else if ( MPI_Lookup_name((char*)service.c_str(), MPI_INFO_NULL, port_name_clt) == MPI_SUCCESS )
+        {
+          std::cerr << "[" << _num_proc << "] I get the connection with " << service << " at " << port_name_clt << std::endl;
+          MPI_Close_port( port_name );
+        }
+      else
+        {
+          msg << "[" << _num_proc << "] Error on connection with " << service << " at " << port_name_clt;
+          std::cerr << msg.str().c_str() << std::endl;
+          throw std::exception();
+        }
+    }
+  else
+    {
+      i=0;
+      /* Waiting rank 0 publish name and try to be a client */
+      while ( i != TIMEOUT  ) 
+        {
+          sleep(1);
+          if ( MPI_Lookup_name((char*)service.c_str(), MPI_INFO_NULL, port_name_clt) == MPI_SUCCESS )
+            {
+              std::cerr << "[" << _num_proc << "] I get the connection with " << service << " at " << port_name_clt << std::endl;
+              break;
+            }
+          i++;
+        }
+      if(i==TIMEOUT)
+        {
+          msg << "[" << _num_proc << "] Error on connection with " << service << " at " << port_name_clt;
+          std::cerr << msg.str().c_str() << std::endl;
+          throw std::exception();
+        }
+    }
+  MPI_Errhandler_set(MPI_COMM_WORLD, MPI_ERRORS_ARE_FATAL);
+  
+  /* If rank 0 is server, all processes call MPI_Comm_accept */
+  /* If rank 0 is not server, all processes call MPI_Comm_connect */
+  int srv = (int)_srv;
+  MPI_Bcast(&srv,1,MPI_INT,0,MPI_COMM_WORLD);
+  _srv = (bool)srv;
+  if ( _srv )
+    MPI_Comm_accept( port_name, MPI_INFO_NULL, 0, MPI_COMM_WORLD, &icom );
+  else
+    MPI_Comm_connect(port_name_clt, MPI_INFO_NULL, 0, MPI_COMM_WORLD, &icom );
+
+  /* create global communicator: servers have low index in global communicator*/
+  MPI_Intercomm_merge(icom,!_srv,&_gcom);
+
+  /* only rank 0 can be server for unpublish name */
+  if(_num_proc != 0) _srv = false;
+
+  return _gcom;
+
+}
+
+void MPI2Connector::remoteMPI2Disconnect(const std::string& service)
+{
+  std::ostringstream msg;
+
+  if( service.size() == 0 )
+    {
+      msg << "[" << _num_proc << "] You have to give a service name !";
+      std::cerr << msg.str().c_str() << std::endl;
+      throw std::exception();
+    }
+
+  MPI_Comm_disconnect( &_gcom ); 
+  if ( _srv )
+    {
+
+      char port_name[MPI_MAX_PORT_NAME];
+      strcpy(port_name,_port_name.c_str());
+
+      MPI_Unpublish_name((char*)service.c_str(), MPI_INFO_NULL, port_name); 
+      std::cerr << "[" << _num_proc << "] " << service << ": close port " << _port_name << std::endl;
+      MPI_Close_port( port_name ); 
+    }
+  
+}
+
diff --git a/src/ParaMEDMEMTest/MPI2Connector.hxx b/src/ParaMEDMEMTest/MPI2Connector.hxx
new file mode 100644 (file)
index 0000000..57680b2
--- /dev/null
@@ -0,0 +1,48 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef __MPI2CONNECTOR_HXX__
+#define __MPI2CONNECTOR_HXX__
+
+#include <mpi.h>
+#include <string>
+#include <sstream>
+
+class MPI2Connector
+{
+public:
+  MPI2Connector();
+  ~MPI2Connector();
+  // MPI2 connection
+  MPI_Comm remoteMPI2Connect(const std::string& service);
+  // MPI2 disconnection
+  void remoteMPI2Disconnect(const std::string& service);
+private:
+  // Processus id
+  int _num_proc;
+  // Processus size
+  int _nb_proc;
+  MPI_Comm _gcom;
+  bool _srv;
+  std::string _port_name;
+private:
+  static const int TIMEOUT=5;
+};
+
+#endif
diff --git a/src/ParaMEDMEMTest/MPIAccessDECTest.cxx b/src/ParaMEDMEMTest/MPIAccessDECTest.cxx
new file mode 100644 (file)
index 0000000..c757e6e
--- /dev/null
@@ -0,0 +1,52 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "MPIAccessDECTest.hxx"
+#include <cppunit/TestAssert.h>
+
+#include <sstream>
+#include <cmath>
+
+#ifndef WIN32
+#include <unistd.h>
+#endif
+
+using namespace std;
+
+
+
+/*!
+ *  Tool to remove temporary files.
+ *  Allows automatique removal of temporary files in case of test failure.
+ */
+MPIAccessDECTest_TmpFilesRemover::~MPIAccessDECTest_TmpFilesRemover()
+{
+  set<string>::iterator it = myTmpFiles.begin();
+  for (; it != myTmpFiles.end(); it++) {
+    if (access((*it).data(), F_OK) == 0)
+      remove((*it).data());
+  }
+  myTmpFiles.clear();
+  //cout << "~MPIAccessTest_TmpFilesRemover()" << endl;
+}
+
+bool MPIAccessDECTest_TmpFilesRemover::Register(const string theTmpFile)
+{
+  return (myTmpFiles.insert(theTmpFile)).second;
+}
diff --git a/src/ParaMEDMEMTest/MPIAccessDECTest.hxx b/src/ParaMEDMEMTest/MPIAccessDECTest.hxx
new file mode 100644 (file)
index 0000000..5afb6e5
--- /dev/null
@@ -0,0 +1,108 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef _MPIACCESSDECTEST_HXX_
+#define _MPIACCESSDECTEST_HXX_
+
+#include <cppunit/extensions/HelperMacros.h>
+
+#include <set>
+#include <string>
+#include <iostream>
+#include "mpi.h"
+
+// (ABN]: too many text output in the MPIAccesTest - this renders
+// the analysis complicated:
+#define MPI_ACCESS_VERBOSE 0
+#define debugStream \
+    if (!MPI_ACCESS_VERBOSE) {} \
+    else std::cout
+
+class MPIAccessDECTest : public CppUnit::TestFixture
+{
+  CPPUNIT_TEST_SUITE( MPIAccessDECTest );
+  //  CPPUNIT_TEST( test_AllToAllDECSynchronousPointToPoint ) ;
+  CPPUNIT_TEST( test_AllToAllDECAsynchronousPointToPoint ) ;
+  //CPPUNIT_TEST( test_AllToAllvDECSynchronousPointToPoint ) ;
+  CPPUNIT_TEST( test_AllToAllvDECAsynchronousPointToPoint ) ;
+  //CPPUNIT_TEST( test_AllToAllTimeDECSynchronousPointToPoint ) ;
+  CPPUNIT_TEST( test_AllToAllTimeDECAsynchronousPointToPoint ) ;
+  CPPUNIT_TEST( test_AllToAllvTimeDECSynchronousNative ) ;
+  //CPPUNIT_TEST( test_AllToAllvTimeDECSynchronousPointToPoint ) ;
+  CPPUNIT_TEST( test_AllToAllvTimeDECAsynchronousPointToPoint ) ;
+  //CPPUNIT_TEST( test_AllToAllvTimeDoubleDECSynchronousPointToPoint ) ;
+  CPPUNIT_TEST( test_AllToAllvTimeDoubleDECAsynchronousPointToPoint ) ;
+  CPPUNIT_TEST_SUITE_END();
+  
+
+public:
+  MPIAccessDECTest():CppUnit::TestFixture(){}
+  ~MPIAccessDECTest(){}  
+  void setUp(){}
+  void tearDown(){}
+  void test_AllToAllDECSynchronousPointToPoint() ;
+  void test_AllToAllDECAsynchronousPointToPoint() ;
+  void test_AllToAllvDECSynchronousPointToPoint() ;
+  void test_AllToAllvDECAsynchronousPointToPoint() ;
+  void test_AllToAllTimeDECSynchronousPointToPoint() ;
+  void test_AllToAllTimeDECAsynchronousPointToPoint() ;
+  void test_AllToAllvTimeDECSynchronousNative() ;
+  void test_AllToAllvTimeDECSynchronousPointToPoint() ;
+  void test_AllToAllvTimeDECAsynchronousPointToPoint() ;
+  void test_AllToAllvTimeDoubleDECSynchronousPointToPoint() ;
+  void test_AllToAllvTimeDoubleDECAsynchronousPointToPoint() ;
+
+private:
+  void test_AllToAllDEC( bool Asynchronous ) ;
+  void test_AllToAllvDEC( bool Asynchronous ) ;
+  void test_AllToAllTimeDEC( bool Asynchronous ) ;
+  void test_AllToAllvTimeDEC( bool Asynchronous , bool UseMPINative ) ;
+  void test_AllToAllvTimeDoubleDEC( bool Asynchronous ) ;
+  };
+
+// to automatically remove temporary files from disk
+class MPIAccessDECTest_TmpFilesRemover
+{
+public:
+  MPIAccessDECTest_TmpFilesRemover() {}
+  ~MPIAccessDECTest_TmpFilesRemover();
+  bool Register(const std::string theTmpFile);
+
+private:
+  std::set<std::string> myTmpFiles;
+};
+
+/*!
+ *  Tool to print array to stream.
+ */
+template<class T>
+void MPIAccessDECTest_DumpArray (std::ostream & stream, const T* array, const int length, const std::string text)
+{
+  stream << text << ": {";
+  if (length > 0) {
+    stream << array[0];
+    for (int i = 1; i < length; i++) {
+      stream << ", " << array[i];
+    }
+  }
+  stream << "}" << std::endl;
+};
+
+#endif
diff --git a/src/ParaMEDMEMTest/MPIAccessTest.cxx b/src/ParaMEDMEMTest/MPIAccessTest.cxx
new file mode 100644 (file)
index 0000000..a9714f8
--- /dev/null
@@ -0,0 +1,52 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+#include <sstream>
+#include <cmath>
+
+#ifndef WIN32
+#include <unistd.h>
+#endif
+
+using namespace std;
+
+
+
+/*!
+ *  Tool to remove temporary files.
+ *  Allows automatique removal of temporary files in case of test failure.
+ */
+MPIAccessTest_TmpFilesRemover::~MPIAccessTest_TmpFilesRemover()
+{
+  set<string>::iterator it = myTmpFiles.begin();
+  for (; it != myTmpFiles.end(); it++) {
+    if (access((*it).data(), F_OK) == 0)
+      remove((*it).data());
+  }
+  myTmpFiles.clear();
+  //cout << "~MPIAccessTest_TmpFilesRemover()" << endl;
+}
+
+bool MPIAccessTest_TmpFilesRemover::Register(const string theTmpFile)
+{
+  return (myTmpFiles.insert(theTmpFile)).second;
+}
diff --git a/src/ParaMEDMEMTest/MPIAccessTest.hxx b/src/ParaMEDMEMTest/MPIAccessTest.hxx
new file mode 100644 (file)
index 0000000..f98fb55
--- /dev/null
@@ -0,0 +1,111 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef _MPIACCESSTEST_HXX_
+#define _MPIACCESSTEST_HXX_
+
+#include <cppunit/extensions/HelperMacros.h>
+
+#include <set>
+#include <string>
+#include <iostream>
+#include "mpi.h"
+
+// (ABN]: too many text output in the MPIAccesTest - this renders
+// the analysis complicated:
+#define MPI_ACCESS_VERBOSE 0
+#define debugStream \
+    if (!MPI_ACCESS_VERBOSE) {} \
+    else std::cout
+
+class MPIAccessTest : public CppUnit::TestFixture
+{
+  CPPUNIT_TEST_SUITE( MPIAccessTest );
+  CPPUNIT_TEST( test_MPI_Access_Send_Recv ) ;
+  CPPUNIT_TEST( test_MPI_Access_Cyclic_Send_Recv ) ;
+  CPPUNIT_TEST( test_MPI_Access_SendRecv ) ;
+  CPPUNIT_TEST( test_MPI_Access_ISend_IRecv ) ;
+  CPPUNIT_TEST( test_MPI_Access_Cyclic_ISend_IRecv ) ;
+  CPPUNIT_TEST( test_MPI_Access_ISendRecv ) ;
+  CPPUNIT_TEST( test_MPI_Access_Probe ) ;
+  CPPUNIT_TEST( test_MPI_Access_IProbe ) ;
+  CPPUNIT_TEST( test_MPI_Access_Cancel ) ;
+  CPPUNIT_TEST( test_MPI_Access_Send_Recv_Length ) ;
+  CPPUNIT_TEST( test_MPI_Access_ISend_IRecv_Length ) ;
+  CPPUNIT_TEST( test_MPI_Access_ISend_IRecv_Length_1 ) ;
+  CPPUNIT_TEST( test_MPI_Access_Time ) ;
+  CPPUNIT_TEST( test_MPI_Access_Time_0 ) ;
+  CPPUNIT_TEST( test_MPI_Access_ISend_IRecv_BottleNeck ) ;
+  CPPUNIT_TEST_SUITE_END();
+  
+
+public:
+  MPIAccessTest():CppUnit::TestFixture(){}
+  ~MPIAccessTest(){}  
+  void setUp(){}
+  void tearDown(){}
+  void test_MPI_Access_Send_Recv() ;
+  void test_MPI_Access_Cyclic_Send_Recv() ;
+  void test_MPI_Access_SendRecv() ;
+  void test_MPI_Access_ISend_IRecv() ;
+  void test_MPI_Access_Cyclic_ISend_IRecv() ;
+  void test_MPI_Access_ISendRecv() ;
+  void test_MPI_Access_Probe() ;
+  void test_MPI_Access_IProbe() ;
+  void test_MPI_Access_Cancel() ;
+  void test_MPI_Access_Send_Recv_Length() ;
+  void test_MPI_Access_ISend_IRecv_Length() ;
+  void test_MPI_Access_ISend_IRecv_Length_1() ;
+  void test_MPI_Access_Time() ;
+  void test_MPI_Access_Time_0() ;
+  void test_MPI_Access_ISend_IRecv_BottleNeck() ;
+
+private:
+  };
+
+// to automatically remove temporary files from disk
+class MPIAccessTest_TmpFilesRemover
+{
+public:
+  MPIAccessTest_TmpFilesRemover() {}
+  ~MPIAccessTest_TmpFilesRemover();
+  bool Register(const std::string theTmpFile);
+
+private:
+  std::set<std::string> myTmpFiles;
+};
+
+/*!
+ *  Tool to print array to stream.
+ */
+template<class T>
+void MPIAccessTest_DumpArray (std::ostream & stream, const T* array, const int length, const std::string text)
+{
+  stream << text << ": {";
+  if (length > 0) {
+    stream << array[0];
+    for (int i = 1; i < length; i++) {
+      stream << ", " << array[i];
+    }
+  }
+  stream << "}" << std::endl;
+}
+
+#endif
diff --git a/src/ParaMEDMEMTest/MPIMainTest.hxx b/src/ParaMEDMEMTest/MPIMainTest.hxx
new file mode 100644 (file)
index 0000000..eec6e5a
--- /dev/null
@@ -0,0 +1,105 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef _MPIMAINTEST_HXX_
+#define _MPIMAINTEST_HXX_
+
+#include <cppunit/CompilerOutputter.h>
+#include <cppunit/TestResult.h>
+#include <cppunit/TestResultCollector.h>
+#include <cppunit/TextTestProgressListener.h>
+#include <cppunit/BriefTestProgressListener.h>
+#include <cppunit/extensions/TestFactoryRegistry.h>
+#include <cppunit/TestRunner.h>
+#include <stdexcept>
+
+#include <mpi.h>
+
+#include <iostream>
+#include <fstream>
+#ifndef WIN32
+#include <fpu_control.h>
+#endif
+
+// ============================================================================
+/*!
+ *  Main program source for Unit Tests with cppunit package does not depend
+ *  on actual tests, so we use the same for all partial unit tests.
+ */
+// ============================================================================
+
+int main(int argc, char* argv[])
+{
+#ifndef WIN32
+  fpu_control_t cw = _FPU_DEFAULT & ~(_FPU_MASK_IM | _FPU_MASK_ZM | _FPU_MASK_OM);
+  _FPU_SETCW(cw);
+#endif
+  MPI_Init(&argc,&argv);
+  int rank;
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  
+  // --- Create the event manager and test controller
+  CPPUNIT_NS::TestResult controller;
+
+  // ---  Add a listener that colllects test result
+  CPPUNIT_NS::TestResultCollector result;
+  controller.addListener( &result );        
+
+  // ---  Add a listener that print dots as test run.
+#ifdef WIN32
+  CPPUNIT_NS::TextTestProgressListener progress;
+#else
+  CPPUNIT_NS::BriefTestProgressListener progress;
+#endif
+  controller.addListener( &progress );      
+
+  // ---  Get the top level suite from the registry
+
+  CPPUNIT_NS::Test *suite =
+    CPPUNIT_NS::TestFactoryRegistry::getRegistry().makeTest();
+
+  // ---  Adds the test to the list of test to run
+
+  CPPUNIT_NS::TestRunner runner;
+  runner.addTest( suite );
+  runner.run( controller);
+
+  // ---  Print test in a compiler compatible format.
+
+  std::ostringstream testFileName;
+  testFileName<<"UnitTestsResult"<<rank;
+  std::ofstream testFile;
+  testFile.open(testFileName.str().c_str(), std::ios::out |  std::ios::trunc);
+  //CPPUNIT_NS::CompilerOutputter outputter( &result, std::cerr );
+  CPPUNIT_NS::CompilerOutputter outputter( &result, testFile );
+  outputter.write(); 
+
+  // ---  Run the tests.
+
+  bool wasSucessful = result.wasSuccessful();
+  testFile.close();
+
+  // ---  Return error code 1 if the one of test failed.
+
+  MPI_Finalize();
+  
+  return wasSucessful ? 0 : 1;
+}
+
+#endif
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTest.cxx
new file mode 100644 (file)
index 0000000..42899d9
--- /dev/null
@@ -0,0 +1,132 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMEDMEMTest.hxx"
+#include <cppunit/TestAssert.h>
+
+#include <sstream>
+#include <cmath>
+#include <list>
+#include <stdexcept>
+#include <stdlib.h>
+
+#ifndef WIN32
+#include <unistd.h>
+#endif
+
+//================================================================================
+/*!
+ * \brief Get path to the resources file.
+ *
+ * When running 'make test' source file is taken from MED_SRC/resources folder.
+ * Otherwise, file is searched in ${MED_ROOT_DIR}/share/salome/resources/med folder.
+ * 
+ * \param filename name of the resource file (should not include a path)
+ * \return full path to the resource file
+ */
+//================================================================================
+
+std::string ParaMEDMEMTest::getResourceFile( const std::string& filename )
+{
+  std::string resourceFile = "";
+
+  if ( getenv("top_srcdir") ) {
+    // we are in 'make test' step
+    resourceFile = getenv("top_srcdir");
+    resourceFile += "/resources/";
+  }
+  else if ( getenv("MED_ROOT_DIR") ) {
+    // use MED_ROOT_DIR env.var
+    resourceFile = getenv("MED_ROOT_DIR");
+    resourceFile += "/share/salome/resources/med/";
+  }
+  resourceFile += filename;
+  return resourceFile;
+}
+
+
+//================================================================================
+/*!
+ * \brief Returns writable temporary directory
+ * \return full path to the temporary directory
+ */
+//================================================================================
+
+std::string ParaMEDMEMTest::getTmpDirectory()
+{
+  std::string path;
+
+  std::list<std::string> dirs;
+  if ( getenv("TMP") )    dirs.push_back( getenv("TMP" ));
+  if ( getenv("TMPDIR") ) dirs.push_back( getenv("TMPDIR" ));
+  dirs.push_back( "/tmp" );
+
+  std::string tmpd = "";
+  for ( std::list<std::string>::iterator dir = dirs.begin(); dir != dirs.end() && tmpd == "" ; ++dir ) {
+    if ( access( dir->data(), W_OK ) == 0 ) {
+      tmpd = dir->data();
+    }
+  }
+
+  if ( tmpd == "" )
+    throw std::runtime_error("Can't find writable temporary directory. Set TMP environment variable");
+
+  return tmpd;
+}
+
+//================================================================================
+/*!
+ * \brief Creates a copy of source file (if source file is specified) 
+ * in the temporary directory and returns a path to the tmp file
+ *
+ * \param tmpfile name of the temporary file (without path)
+ * \param srcfile source file
+ * \return path to the temporary file
+ */
+//================================================================================
+std::string ParaMEDMEMTest::makeTmpFile( const std::string& tmpfile, const std::string& srcfile )
+{
+  std::string tmpf = getTmpDirectory() + "/" + tmpfile;
+  if ( srcfile != "" ) {
+    std::string cmd  = "cp " + srcfile + " " + tmpf + " ; chmod +w " + tmpf;
+    system( cmd.c_str() );
+  }
+  return tmpf;
+}
+
+
+/*!
+ *  Tool to remove temporary files.
+ *  Allows automatique removal of temporary files in case of test failure.
+ */
+ParaMEDMEMTest_TmpFilesRemover::~ParaMEDMEMTest_TmpFilesRemover()
+{
+  std::set<std::string>::iterator it = myTmpFiles.begin();
+  for (; it != myTmpFiles.end(); it++) {
+    if (access((*it).data(), F_OK) == 0)
+      remove((*it).data());
+  }
+  myTmpFiles.clear();
+  //cout << "~ParaMEDMEMTest_TmpFilesRemover()" << endl;
+}
+
+bool ParaMEDMEMTest_TmpFilesRemover::Register(const std::string theTmpFile)
+{
+  return (myTmpFiles.insert(theTmpFile)).second;
+}
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest.hxx b/src/ParaMEDMEMTest/ParaMEDMEMTest.hxx
new file mode 100644 (file)
index 0000000..a8bf2b4
--- /dev/null
@@ -0,0 +1,188 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifndef _ParaMEDMEMTEST_HXX_
+#define _ParaMEDMEMTEST_HXX_
+
+#include <cppunit/extensions/HelperMacros.h>
+
+#include <set>
+#include <string>
+#include <iostream>
+#include "mpi.h"
+
+
+class ParaMEDMEMTest : public CppUnit::TestFixture
+{
+  CPPUNIT_TEST_SUITE( ParaMEDMEMTest );
+  CPPUNIT_TEST(testMPIProcessorGroup_constructor);
+  CPPUNIT_TEST(testMPIProcessorGroup_boolean);
+  CPPUNIT_TEST(testMPIProcessorGroup_rank);
+  CPPUNIT_TEST(testBlockTopology_constructor);
+  CPPUNIT_TEST(testBlockTopology_serialize);
+  CPPUNIT_TEST(testInterpKernelDEC_1D);
+  CPPUNIT_TEST(testInterpKernelDEC_2DCurve);
+  CPPUNIT_TEST(testInterpKernelDEC_2D);
+  CPPUNIT_TEST(testInterpKernelDEC2_2D);
+  CPPUNIT_TEST(testInterpKernelDEC_2DP0P1);
+  CPPUNIT_TEST(testInterpKernelDEC_3D);
+  CPPUNIT_TEST(testInterpKernelDECNonOverlapp_2D_P0P0);
+  CPPUNIT_TEST(testInterpKernelDECNonOverlapp_2D_P0P1P1P0);
+  CPPUNIT_TEST(testInterpKernelDEC2DM1D_P0P0);
+  CPPUNIT_TEST(testInterpKernelDECPartialProcs);
+  CPPUNIT_TEST(testInterpKernelDEC3DSurfEmptyBBox);
+  CPPUNIT_TEST(testOverlapDEC1);
+
+  CPPUNIT_TEST(testSynchronousEqualInterpKernelWithoutInterpNativeDEC_2D);
+  CPPUNIT_TEST(testSynchronousEqualInterpKernelWithoutInterpDEC_2D);
+  CPPUNIT_TEST(testSynchronousEqualInterpKernelDEC_2D);
+  CPPUNIT_TEST(testSynchronousFasterSourceInterpKernelDEC_2D);
+  CPPUNIT_TEST(testSynchronousSlowerSourceInterpKernelDEC_2D);
+  CPPUNIT_TEST(testSynchronousSlowSourceInterpKernelDEC_2D);
+  CPPUNIT_TEST(testSynchronousFastSourceInterpKernelDEC_2D);
+  CPPUNIT_TEST(testAsynchronousEqualInterpKernelDEC_2D);
+  CPPUNIT_TEST(testAsynchronousFasterSourceInterpKernelDEC_2D);
+  CPPUNIT_TEST(testAsynchronousSlowerSourceInterpKernelDEC_2D);
+  CPPUNIT_TEST(testAsynchronousSlowSourceInterpKernelDEC_2D);
+  CPPUNIT_TEST(testAsynchronousFastSourceInterpKernelDEC_2D);
+#ifdef MED_ENABLE_FVM
+  //can be added again after FVM correction for 2D
+  //  CPPUNIT_TEST(testNonCoincidentDEC_2D);
+  CPPUNIT_TEST(testNonCoincidentDEC_3D);
+#endif
+  CPPUNIT_TEST(testStructuredCoincidentDEC);
+  CPPUNIT_TEST(testStructuredCoincidentDEC);
+  CPPUNIT_TEST(testICoco1);
+  CPPUNIT_TEST(testGauthier1);
+  CPPUNIT_TEST(testGauthier2);
+  CPPUNIT_TEST(testGauthier3);
+  CPPUNIT_TEST(testGauthier4);
+  CPPUNIT_TEST(testFabienAPI1);
+  CPPUNIT_TEST(testFabienAPI2);
+  CPPUNIT_TEST(testMEDLoaderRead1);
+  CPPUNIT_TEST(testMEDLoaderPolygonRead);
+  CPPUNIT_TEST(testMEDLoaderPolyhedronRead);
+  CPPUNIT_TEST_SUITE_END();
+  
+
+public:
+  ParaMEDMEMTest():CppUnit::TestFixture(){}
+  ~ParaMEDMEMTest(){}  
+  void setUp(){}
+  void tearDown(){}
+  void testMPIProcessorGroup_constructor();
+  void testMPIProcessorGroup_boolean();
+  void testMPIProcessorGroup_rank();
+  void testBlockTopology_constructor();
+  void testBlockTopology_serialize();
+  void testInterpKernelDEC_1D();
+  void testInterpKernelDEC_2DCurve();
+  void testInterpKernelDEC_2D();
+  void testInterpKernelDEC2_2D();
+  void testInterpKernelDEC_2DP0P1();
+  void testInterpKernelDEC_3D();
+  void testInterpKernelDECNonOverlapp_2D_P0P0();
+  void testInterpKernelDECNonOverlapp_2D_P0P1P1P0();
+  void testInterpKernelDEC2DM1D_P0P0();
+  void testInterpKernelDECPartialProcs();
+  void testInterpKernelDEC3DSurfEmptyBBox();
+  void testOverlapDEC1();
+#ifdef MED_ENABLE_FVM
+  void testNonCoincidentDEC_2D();
+  void testNonCoincidentDEC_3D();
+#endif
+  void testStructuredCoincidentDEC();
+  void testSynchronousEqualInterpKernelWithoutInterpNativeDEC_2D();
+  void testSynchronousEqualInterpKernelWithoutInterpDEC_2D();
+  void testSynchronousEqualInterpKernelDEC_2D();
+  void testSynchronousFasterSourceInterpKernelDEC_2D();
+  void testSynchronousSlowerSourceInterpKernelDEC_2D();
+  void testSynchronousSlowSourceInterpKernelDEC_2D();
+  void testSynchronousFastSourceInterpKernelDEC_2D();
+
+  void testAsynchronousEqualInterpKernelDEC_2D();
+  void testAsynchronousFasterSourceInterpKernelDEC_2D();
+  void testAsynchronousSlowerSourceInterpKernelDEC_2D();
+  void testAsynchronousSlowSourceInterpKernelDEC_2D();
+  void testAsynchronousFastSourceInterpKernelDEC_2D();
+  //
+  void testICoco1();
+  void testGauthier1();
+  void testGauthier2();
+  void testGauthier3();
+  void testGauthier4();
+  void testFabienAPI1();
+  void testFabienAPI2();
+  //
+  void testMEDLoaderRead1();
+  void testMEDLoaderPolygonRead();
+  void testMEDLoaderPolyhedronRead();
+  void testMEDLoaderWrite1();
+  void testMEDLoaderPolygonWrite();
+
+  std::string getResourceFile( const std::string& );
+  std::string getTmpDirectory();
+  std::string makeTmpFile( const std::string&, const std::string& = "" );
+
+private:
+#ifdef MED_ENABLE_FVM
+  void testNonCoincidentDEC(const std::string& filename1, 
+                            const std::string& meshname1, 
+                            const std::string& filename2, 
+                            const std::string& meshname2,
+                            int nbprocsource, double epsilon);
+#endif
+  void testAsynchronousInterpKernelDEC_2D(double dtA, double tmaxA, 
+                                          double dtB, double tmaxB,
+                                          bool WithPointToPoint, bool Asynchronous, bool WithInterp, const char *srcMeth, const char *targetMeth);
+  void testInterpKernelDEC_2D_(const char *srcMeth, const char *targetMeth);
+  void testInterpKernelDEC2_2D_(const char *srcMeth, const char *targetMeth);
+  void testInterpKernelDEC_3D_(const char *srcMeth, const char *targetMeth);
+};
+
+// to automatically remove temporary files from disk
+class ParaMEDMEMTest_TmpFilesRemover
+{
+public:
+  ParaMEDMEMTest_TmpFilesRemover() {}
+  ~ParaMEDMEMTest_TmpFilesRemover();
+  bool Register(const std::string theTmpFile);
+
+private:
+  std::set<std::string> myTmpFiles;
+};
+
+/*!
+ *  Tool to print array to stream.
+ */
+template<class T>
+void ParaMEDMEMTest_DumpArray (std::ostream & stream, const T* array, const int length, const std::string text)
+{
+  stream << text << ": {";
+  if (length > 0) {
+    stream << array[0];
+    for (int i = 1; i < length; i++) {
+      stream << ", " << array[i];
+    }
+  }
+  stream << "}" << std::endl;
+}
+
+#endif
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTestMPI2_1.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTestMPI2_1.cxx
new file mode 100644 (file)
index 0000000..e569721
--- /dev/null
@@ -0,0 +1,125 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <cppunit/extensions/HelperMacros.h>
+
+#include "MPI2Connector.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "MEDCouplingUMesh.hxx"
+#include "MEDCouplingFieldDouble.hxx"
+#include "InterpKernelDEC.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "CommInterface.hxx"
+
+#include <mpi.h>
+#include <iostream>
+#include <stdlib.h>
+
+class MPI2ParaMEDMEMTest : public CppUnit::TestFixture
+{
+  CPPUNIT_TEST_SUITE( MPI2ParaMEDMEMTest );
+  CPPUNIT_TEST( testBasicMPI2_1 );
+  CPPUNIT_TEST_SUITE_END();
+public:
+  void testBasicMPI2_1();
+};
+
+using namespace ParaMEDMEM;
+
+void MPI2ParaMEDMEMTest::testBasicMPI2_1()
+{
+  int lsize, lrank, gsize, grank;
+  MPI_Comm gcom;
+  std::string service = "SERVICE";
+  std::ostringstream meshfilename, meshname;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::MEDCouplingUMesh *mesh;
+  ParaMEDMEM::ParaFIELD *parafield=0;
+  ParaMEDMEM::CommInterface *interface;
+  ParaMEDMEM::MPIProcessorGroup *source, *target;
+
+  MPI_Comm_size( MPI_COMM_WORLD, &lsize );
+  MPI_Comm_rank( MPI_COMM_WORLD, &lrank );
+  if(lsize!=2)
+    {
+      CPPUNIT_ASSERT(false);
+      return;
+    }
+
+  /* Connection to remote programm */
+  MPI2Connector *mpio = new MPI2Connector;
+  gcom = mpio->remoteMPI2Connect(service);
+  MPI_Comm_size( gcom, &gsize );
+  MPI_Comm_rank( gcom, &grank );
+  if(gsize!=5)
+    {
+      CPPUNIT_ASSERT(false);
+      return;
+    }
+  interface = new ParaMEDMEM::CommInterface;
+  source = new ParaMEDMEM::MPIProcessorGroup(*interface,0,lsize-1,gcom);
+  target = new ParaMEDMEM::MPIProcessorGroup(*interface,lsize,gsize-1,gcom);
+
+  const double sourceCoordsAll[2][8]={{0.4,0.5,0.4,1.5,1.6,1.5,1.6,0.5},
+                                      {0.3,-0.5,1.6,-0.5,1.6,-1.5,0.3,-1.5}};
+  
+  int conn4All[8]={0,1,2,3,4,5,6,7};
+  
+  std::ostringstream stream; stream << "sourcemesh2D proc " << grank;
+  mesh=MEDCouplingUMesh::New(stream.str().c_str(),2);
+  mesh->allocateCells(2);
+  mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn4All);
+  mesh->finishInsertingCells();
+  DataArrayDouble *myCoords=DataArrayDouble::New();
+  myCoords->alloc(4,2);
+  const double *sourceCoords=sourceCoordsAll[grank];
+  std::copy(sourceCoords,sourceCoords+8,myCoords->getPointer());
+  mesh->setCoords(myCoords);
+  myCoords->decrRef();
+  paramesh=new ParaMESH(mesh,*source,"source mesh");
+  ParaMEDMEM::ComponentTopology comptopo;
+  parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+  double *value=parafield->getField()->getArray()->getPointer();
+  value[0]=34+13*((double)grank);
+
+  ParaMEDMEM::InterpKernelDEC dec(*source,*target);
+  parafield->getField()->setNature(ConservativeVolumic);
+
+
+  dec.setMethod("P0");
+  dec.attachLocalField(parafield);
+  dec.synchronize();
+  dec.setForcedRenormalization(false);
+  dec.sendData();
+  /* Deconnection of remote programm */
+  mpio->remoteMPI2Disconnect(service);
+  /* clean-up */
+  delete mpio;
+  delete parafield;
+  mesh->decrRef();
+  delete paramesh;
+  delete source;
+  delete target;
+  delete interface;
+}
+
+CPPUNIT_TEST_SUITE_REGISTRATION( MPI2ParaMEDMEMTest );
+
+#include "MPIMainTest.hxx"
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTestMPI2_2.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTestMPI2_2.cxx
new file mode 100644 (file)
index 0000000..102443e
--- /dev/null
@@ -0,0 +1,130 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <cppunit/extensions/HelperMacros.h>
+
+#include "MPI2Connector.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "MEDCouplingUMesh.hxx"
+#include "MEDCouplingFieldDouble.hxx"
+#include "InterpKernelDEC.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "CommInterface.hxx"
+
+#include <mpi.h>
+#include <iostream>
+#include <stdlib.h>
+
+class MPI2ParaMEDMEMTest : public CppUnit::TestFixture
+{
+  CPPUNIT_TEST_SUITE( MPI2ParaMEDMEMTest );
+  CPPUNIT_TEST( testBasicMPI2_1 );
+  CPPUNIT_TEST_SUITE_END();
+public:
+  void testBasicMPI2_1();
+};
+
+using namespace ParaMEDMEM;
+
+void MPI2ParaMEDMEMTest::testBasicMPI2_1()
+{
+  int lsize, lrank, gsize, grank;
+  MPI_Comm gcom;
+  std::string service = "SERVICE";
+  std::ostringstream meshfilename, meshname;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::MEDCouplingUMesh* mesh;
+  ParaMEDMEM::ParaFIELD *parafield=0;
+  ParaMEDMEM::CommInterface* interface;
+  ParaMEDMEM::MPIProcessorGroup* source, *target;
+  
+  MPI_Comm_size( MPI_COMM_WORLD, &lsize );
+  MPI_Comm_rank( MPI_COMM_WORLD, &lrank );
+  if(lsize!=3)
+    {
+      CPPUNIT_ASSERT(false);
+      return;
+    }
+
+  /* Connection to remote programm */
+  MPI2Connector *mpio = new MPI2Connector;
+  gcom = mpio->remoteMPI2Connect(service);
+  
+  MPI_Comm_size( gcom, &gsize );
+  MPI_Comm_rank( gcom, &grank );
+  if(gsize!=5)
+    {
+      CPPUNIT_ASSERT(false);
+      return;
+    }
+
+  interface = new ParaMEDMEM::CommInterface;
+  source = new ParaMEDMEM::MPIProcessorGroup(*interface,0,gsize-lsize-1,gcom);
+  target = new ParaMEDMEM::MPIProcessorGroup(*interface,gsize-lsize,gsize-1,gcom);
+
+  const double targetCoordsAll[3][16]={{0.7,1.45,0.7,1.65,0.9,1.65,0.9,1.45,  1.1,1.4,1.1,1.6,1.3,1.6,1.3,1.4},
+                                       {0.7,-0.6,0.7,0.7,0.9,0.7,0.9,-0.6,  1.1,-0.7,1.1,0.6,1.3,0.6,1.3,-0.7},
+                                       {0.7,-1.55,0.7,-1.35,0.9,-1.35,0.9,-1.55,  1.1,-1.65,1.1,-1.45,1.3,-1.45,1.3,-1.65}};
+  int conn4All[8]={0,1,2,3,4,5,6,7};
+  double targetResults[3][2]={{34.,34.},{38.333333333333336,42.666666666666664},{47.,47.}};
+
+  std::ostringstream stream; stream << "targetmesh2D proc " << grank-(gsize-lsize);
+  mesh=MEDCouplingUMesh::New(stream.str().c_str(),2);
+  mesh->allocateCells(2);
+  mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn4All);
+  mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn4All+4);
+  mesh->finishInsertingCells();
+  DataArrayDouble *myCoords=DataArrayDouble::New();
+  myCoords->alloc(8,2);
+  const double *targetCoords=targetCoordsAll[grank-(gsize-lsize)];
+  std::copy(targetCoords,targetCoords+16,myCoords->getPointer());
+  mesh->setCoords(myCoords);
+  myCoords->decrRef();
+  paramesh=new ParaMESH (mesh,*target,"target mesh");
+  ParaMEDMEM::ComponentTopology comptopo;
+  parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+
+  ParaMEDMEM::InterpKernelDEC dec(*source,*target);
+  parafield->getField()->setNature(ConservativeVolumic);
+
+  dec.setMethod("P0");
+  dec.attachLocalField(parafield);
+  dec.synchronize();
+  dec.setForcedRenormalization(false);
+  dec.recvData();
+  const double *res=parafield->getField()->getArray()->getConstPointer();
+  const double *expected=targetResults[grank-(gsize-lsize)];
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[0],res[0],1e-13);
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[1],res[1],1e-13);
+  /* Deconnection of remote programm */
+  mpio->remoteMPI2Disconnect(service);
+  /* clean-up */
+  delete mpio;
+  delete parafield;
+  mesh->decrRef();
+  delete paramesh;
+  delete source;
+  delete target;
+  delete interface;
+}
+
+CPPUNIT_TEST_SUITE_REGISTRATION( MPI2ParaMEDMEMTest );
+
+#include "MPIMainTest.hxx"
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest_BlockTopology.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTest_BlockTopology.cxx
new file mode 100644 (file)
index 0000000..dc129cc
--- /dev/null
@@ -0,0 +1,123 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMEDMEMTest.hxx"
+#include <cppunit/TestAssert.h>
+
+#include "InterpolationUtils.hxx"
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "Topology.hxx"
+#include "BlockTopology.hxx"
+
+#include <string>
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+
+using namespace std;
+using namespace ParaMEDMEM;
+/*
+ * Check methods defined in BlockTopology.hxx
+ *
+  BlockTopology(){};
+  BlockTopology(const ProcessorGroup& group, const MEDMEM::GRID& grid); 
+  BlockTopology(const BlockTopology& geom_topo, const ComponentTopology& comp_topo);
+  (+) BlockTopology(const ProcessorGroup& group, int nb_elem);
+  virtual ~BlockTopology();
+  (+) inline int getNbElements()const;
+  (+) inline int getNbLocalElements() const;
+  const ProcessorGroup* getProcGroup()const {return _proc_group;};
+  (+) inline std::pair<int,int> globalToLocal (const int) const ;
+  (+) inline int localToGlobal (const std::pair<int,int>) const;
+  (+) std::vector<std::pair<int,int> > getLocalArrayMinMax() const ;
+  (+) int getDimension() const {return _dimension;};
+  (+) void serialize(int* & serializer, int& size) const ;
+  (+) void unserialize(const int* serializer, const CommInterface& comm_interface);
+  
+ */
+void ParaMEDMEMTest::testBlockTopology_constructor()
+{
+  //test constructor
+  int size;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  int rank;
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  CommInterface interface;
+  MPIProcessorGroup group(interface);
+  BlockTopology blocktopo(group,1);
+  CPPUNIT_ASSERT_EQUAL(1,blocktopo.getNbLocalElements());
+  CPPUNIT_ASSERT_EQUAL(size,blocktopo.getNbElements());
+  CPPUNIT_ASSERT_EQUAL(1,blocktopo.getDimension());
+  
+  //checking access methods
+  BlockTopology blocktopo2(group,2);
+  std::pair<int,int> local= blocktopo2.globalToLocal(0);
+  CPPUNIT_ASSERT_EQUAL(local.first,0);
+  CPPUNIT_ASSERT_EQUAL(local.second,0);
+  int global=blocktopo2.localToGlobal(local);
+  CPPUNIT_ASSERT_EQUAL(global,0);
+  
+  local = blocktopo2.globalToLocal(1);
+  CPPUNIT_ASSERT_EQUAL(local.first,0);
+  CPPUNIT_ASSERT_EQUAL(local.second,1);
+  global=blocktopo2.localToGlobal(local);
+  CPPUNIT_ASSERT_EQUAL(global,1);
+  
+  local = blocktopo2.globalToLocal(2*size-1);
+  CPPUNIT_ASSERT_EQUAL(local.first,size-1);
+  CPPUNIT_ASSERT_EQUAL(local.second,1);
+  global=blocktopo2.localToGlobal(local);
+  CPPUNIT_ASSERT_EQUAL(global,2*size-1);
+
+  std::vector<std::pair<int,int> > bounds = blocktopo2.getLocalArrayMinMax();
+  int vecsize = bounds.size();
+  CPPUNIT_ASSERT_EQUAL(1,vecsize);
+  CPPUNIT_ASSERT_EQUAL(2*rank, (bounds[0]).first);
+  CPPUNIT_ASSERT_EQUAL(2*rank+2, (bounds[0]).second);
+ }
+void ParaMEDMEMTest::testBlockTopology_serialize()
+{
+
+  int size;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  int rank;
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  CommInterface interface;
+  MPIProcessorGroup group(interface);
+  BlockTopology blocktopo(group,3);
+
+//testing the serialization process that is used to transfer a
+//block topology via a MPI_Send/Recv comm  
+  BlockTopology blocktopo_recv;
+  int* serializer;
+  int sersize;
+  blocktopo.serialize(serializer,sersize);
+  blocktopo_recv.unserialize(serializer,interface);
+  CPPUNIT_ASSERT_EQUAL(blocktopo.getNbElements(),blocktopo_recv.getNbElements());
+  delete [] serializer;
+}
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest_FabienAPI.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTest_FabienAPI.cxx
new file mode 100644 (file)
index 0000000..341ed7c
--- /dev/null
@@ -0,0 +1,199 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMEDMEMTest.hxx"
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "InterpKernelDEC.hxx"
+#include "MEDCouplingUMesh.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "ComponentTopology.hxx"
+
+#include <set>
+
+using namespace ParaMEDMEM;
+
+void ParaMEDMEMTest::testFabienAPI1()
+{
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  //
+  if(size!=3)
+    return ;
+  int procs_source_c[1]={0};
+  std::set<int> procs_source(procs_source_c,procs_source_c+1);
+  int procs_target_c[1]={1};
+  std::set<int> procs_target(procs_target_c,procs_target_c+1);
+  //
+  ParaMEDMEM::MEDCouplingUMesh *mesh=0;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::ParaFIELD *parafield=0;
+  //
+  ParaMEDMEM::CommInterface interface;
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+  double targetCoords[8]={ 0.,0., 1., 0., 0., 1., 1., 1. };
+  CommInterface comm;
+  //
+  ParaMEDMEM::InterpKernelDEC *dec=new ParaMEDMEM::InterpKernelDEC(procs_source,procs_target);
+  if(dec->isInSourceSide())
+    {    
+      mesh=MEDCouplingUMesh::New();
+      mesh->setMeshDimension(2);
+      DataArrayDouble *myCoords=DataArrayDouble::New();
+      myCoords->alloc(4,2);
+      std::copy(targetCoords,targetCoords+8,myCoords->getPointer());
+      mesh->setCoords(myCoords);
+      myCoords->decrRef();
+      int targetConn[4]={0,2,3,1};
+      mesh->allocateCells(1);
+      mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,targetConn);
+      mesh->finishInsertingCells();
+      ParaMEDMEM::ComponentTopology comptopo;
+      paramesh=new ParaMESH(mesh,*dec->getSourceGrp(),"source mesh");
+      parafield=new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      parafield->getField()->setNature(ConservativeVolumic);
+      double *vals=parafield->getField()->getArray()->getPointer();
+      vals[0]=7.;
+    }
+  if(dec->isInTargetSide())
+    {
+      mesh=MEDCouplingUMesh::New();
+      mesh->setMeshDimension(2);
+      DataArrayDouble *myCoords=DataArrayDouble::New();
+      myCoords->alloc(4,2);
+      std::copy(targetCoords,targetCoords+8,myCoords->getPointer());
+      mesh->setCoords(myCoords);
+      myCoords->decrRef();
+      int targetConn[6]={0,2,1,2,3,1};
+      mesh->allocateCells(2);
+      mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,targetConn);
+      mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,targetConn+3);
+      mesh->finishInsertingCells();
+      ParaMEDMEM::ComponentTopology comptopo;
+      paramesh=new ParaMESH(mesh,*dec->getTargetGrp(),"target mesh");
+      parafield=new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      parafield->getField()->setNature(ConservativeVolumic);
+    }
+  dec->attachLocalField(parafield);
+  dec->synchronize();
+  dec->sendRecvData();
+  if(dec->isInTargetSide())
+    {
+      const double *valsToTest=parafield->getField()->getArray()->getConstPointer();
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(valsToTest[0],7.,1e-14);
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(valsToTest[1],7.,1e-14);
+    }
+  //
+  delete parafield;
+  delete paramesh;
+  if(mesh)
+    mesh->decrRef();
+  delete dec;
+  MPI_Barrier(MPI_COMM_WORLD);
+}
+
+/*!
+ * Idem testFabienAPI1 except that procs are shuffled. Test of the good management of group translation in newly created communicator.
+ */
+void ParaMEDMEMTest::testFabienAPI2()
+{
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  //
+  if(size!=3)
+    return ;
+  int procs_source_c[1]={2};//difference with testFabienAPI1
+  std::set<int> procs_source(procs_source_c,procs_source_c+1);
+  int procs_target_c[1]={1};
+  std::set<int> procs_target(procs_target_c,procs_target_c+1);
+  //
+  ParaMEDMEM::MEDCouplingUMesh *mesh=0;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::ParaFIELD *parafield=0;
+  //
+  ParaMEDMEM::CommInterface interface;
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+  double targetCoords[8]={ 0.,0., 1., 0., 0., 1., 1., 1. };
+  CommInterface comm;
+  //
+  ParaMEDMEM::InterpKernelDEC *dec=new ParaMEDMEM::InterpKernelDEC(procs_source,procs_target);
+  if(dec->isInSourceSide())
+    {    
+      mesh=MEDCouplingUMesh::New();
+      mesh->setMeshDimension(2);
+      DataArrayDouble *myCoords=DataArrayDouble::New();
+      myCoords->alloc(4,2);
+      std::copy(targetCoords,targetCoords+8,myCoords->getPointer());
+      mesh->setCoords(myCoords);
+      myCoords->decrRef();
+      int targetConn[4]={0,2,3,1};
+      mesh->allocateCells(1);
+      mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,targetConn);
+      mesh->finishInsertingCells();
+      ParaMEDMEM::ComponentTopology comptopo;
+      paramesh=new ParaMESH(mesh,*dec->getSourceGrp(),"source mesh");
+      parafield=new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      parafield->getField()->setNature(ConservativeVolumic);
+      double *vals=parafield->getField()->getArray()->getPointer();
+      vals[0]=7.;
+    }
+  if(dec->isInTargetSide())
+    {
+      mesh=MEDCouplingUMesh::New();
+      mesh->setMeshDimension(2);
+      DataArrayDouble *myCoords=DataArrayDouble::New();
+      myCoords->alloc(4,2);
+      std::copy(targetCoords,targetCoords+8,myCoords->getPointer());
+      mesh->setCoords(myCoords);
+      myCoords->decrRef();
+      int targetConn[6]={0,2,1,2,3,1};
+      mesh->allocateCells(2);
+      mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,targetConn);
+      mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,targetConn+3);
+      mesh->finishInsertingCells();
+      ParaMEDMEM::ComponentTopology comptopo;
+      paramesh=new ParaMESH(mesh,*dec->getTargetGrp(),"target mesh");
+      parafield=new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      parafield->getField()->setNature(ConservativeVolumic);
+    }
+  dec->attachLocalField(parafield);
+  dec->synchronize();
+  dec->sendRecvData();
+  if(dec->isInTargetSide())
+    {
+      const double *valsToTest=parafield->getField()->getArray()->getConstPointer();
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(valsToTest[0],7.,1e-14);
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(valsToTest[1],7.,1e-14);
+    }
+  //
+  delete parafield;
+  delete paramesh;
+  if(mesh)
+    mesh->decrRef();
+  delete dec;
+  MPI_Barrier(MPI_COMM_WORLD);
+}
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest_Gauthier1.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTest_Gauthier1.cxx
new file mode 100644 (file)
index 0000000..cc97ede
--- /dev/null
@@ -0,0 +1,665 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMEDMEMTest.hxx"
+#include <cppunit/TestAssert.h>
+
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "DEC.hxx"
+#include "InterpKernelDEC.hxx"
+#include "MEDCouplingUMesh.hxx"
+#include "MEDCouplingFieldDouble.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "ComponentTopology.hxx"
+#include "BlockTopology.hxx"
+
+#include <set>
+#include <time.h>
+#include <iostream>
+#include <assert.h>
+#include <string>
+#include <math.h>
+
+using namespace std;
+using namespace ParaMEDMEM;
+using namespace ICoCo;
+
+void afficheGauthier1(const ParaFIELD& field, const double *vals, int lgth)
+{
+  const DataArrayDouble *valsOfField(field.getField()->getArray());
+  CPPUNIT_ASSERT_EQUAL(lgth,valsOfField->getNumberOfTuples());
+  for (int ele=0;ele<valsOfField->getNumberOfTuples();ele++)
+    CPPUNIT_ASSERT_DOUBLES_EQUAL(vals[ele],valsOfField->getIJ(ele,0),1e-12);
+}
+
+MEDCouplingUMesh *init_quadGauthier1(int is_master)
+{
+  MEDCouplingAutoRefCountObjectPtr<MEDCouplingUMesh> m(MEDCouplingUMesh::New("champ_quad",2));
+  MEDCouplingAutoRefCountObjectPtr<DataArrayDouble> coo(DataArrayDouble::New());
+  if(is_master)
+    {
+      const double dataCoo[24]={0,0,0,1,0,0,0,0,1,1,0,1,0,1,0,1,1,0,0,1,1,1,1,1};
+      coo->alloc(8,3);
+      std::copy(dataCoo,dataCoo+24,coo->getPointer());
+      const int conn[8]={0,1,3,2,4,5,7,6};
+      m->allocateCells(2);
+      m->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn);
+      m->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn+4);
+    }
+  else
+    {
+      coo->alloc(0,3);
+      m->allocateCells(0);
+    }
+  m->setCoords(coo);
+  return m.retn();
+}
+
+MEDCouplingUMesh *init_triangleGauthier1(int is_master)
+{
+  MEDCouplingAutoRefCountObjectPtr<MEDCouplingUMesh> m(MEDCouplingUMesh::New("champ_triangle",2));
+  MEDCouplingAutoRefCountObjectPtr<DataArrayDouble> coo(DataArrayDouble::New());
+  if(is_master)
+    {
+      const double dataCoo[24]={0,0,0,1,0,0,0,0,1,1,0,1,0,1,0,1,1,0,0,1,1,1,1,1};
+      coo->alloc(8,3);
+      std::copy(dataCoo,dataCoo+24,coo->getPointer());
+      const int conn[12]={0,1,2,1,2,3,4,5,7,4,6,7};
+      m->allocateCells(2);
+      for(int i=0;i<4;i++)
+        m->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,conn+3*i);
+    }
+  else
+    {
+      coo->alloc(0,3);
+      m->allocateCells(0);
+    }
+  m->setCoords(coo);
+  return m.retn();
+}
+
+
+void ParaMEDMEMTest::testGauthier1()
+{
+  int num_cas=0;
+  int rank, size;
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  
+  int is_master=0;
+
+  CommInterface comm;
+  set<int> emetteur_ids;
+  set<int> recepteur_ids;
+  emetteur_ids.insert(0);
+  if(size!=4)
+    return;
+  recepteur_ids.insert(1);
+  if (size >2) 
+    recepteur_ids.insert(2);
+  if (size >2) 
+    emetteur_ids.insert(3);
+  if ((rank==0)||(rank==1)) 
+    is_master=1;
+  
+  MPIProcessorGroup recepteur_group(comm,recepteur_ids);
+  MPIProcessorGroup emetteur_group(comm,emetteur_ids);
+
+  string cas;
+  if (recepteur_group.containsMyRank())
+    {
+      cas="recepteur";
+      //freopen("recpeteur.out","w",stdout);
+      //freopen("recepteur.err","w",stderr);
+    }
+  else
+    {
+      cas="emetteur";
+      // freopen("emetteur.out","w",stdout);
+      //freopen("emetteur.err","w",stderr);
+    }
+  double expected[8][4]={
+    {1.,1.,1.,1.},
+    {40., 40., 1., 1.},
+    {1.,1.,1e200,1e200},
+    {40.,1.,1e200,1e200},
+    {1.,1.,1.,1.},
+    {40.,1.,1.,1.},
+    {1.,1.,1e200,1e200},
+    {20.5,1.,1e200,1e200}
+  };
+  int expectedLgth[8]={4,4,2,2,4,4,2,2};
+  
+  for (int send=0;send<2;send++)
+    for (int rec=0;rec<2;rec++)
+      {
+        InterpKernelDEC dec_emetteur(emetteur_group, recepteur_group);
+        ParaMEDMEM::ParaFIELD *champ_emetteur(0),*champ_recepteur(0);
+        ParaMEDMEM::ParaMESH *paramesh(0);
+        MEDCouplingAutoRefCountObjectPtr<MEDCouplingUMesh> mesh;
+        dec_emetteur.setOrientation(2);
+        if (send==0)
+          {
+            mesh=init_quadGauthier1(is_master);
+          }
+        else
+          {
+            mesh=init_triangleGauthier1(is_master);
+          }
+        paramesh=new ParaMEDMEM::ParaMESH(mesh,recepteur_group.containsMyRank()?recepteur_group:emetteur_group,"emetteur mesh");
+        ParaMEDMEM::ComponentTopology comptopo;
+        champ_emetteur=new ParaMEDMEM::ParaFIELD(ON_CELLS,ONE_TIME,paramesh,comptopo);
+        champ_emetteur->getField()->setNature(ConservativeVolumic);
+        champ_emetteur->setOwnSupport(true);
+        if (rec==0)
+          {
+            mesh=init_triangleGauthier1(is_master);
+          }
+        else
+          {
+            mesh=init_quadGauthier1(is_master);
+          }
+        paramesh=new ParaMEDMEM::ParaMESH(mesh,recepteur_group.containsMyRank()?recepteur_group:emetteur_group,"recepteur mesh");
+        champ_recepteur=new ParaMEDMEM::ParaFIELD(ON_CELLS,ONE_TIME,paramesh,comptopo);
+        champ_recepteur->getField()->setNature(ConservativeVolumic);
+        champ_recepteur->setOwnSupport(true);
+        if (cas=="emetteur") 
+          {
+            champ_emetteur->getField()->getArray()->fillWithValue(1.);
+          }
+  
+  
+        MPI_Barrier(MPI_COMM_WORLD);
+
+        //clock_t clock0= clock ();
+        int compti=0;
+
+        bool init=true; // first time step ??
+        bool stop=false;
+        //boucle sur les pas de quads
+        while (!stop) {
+  
+          compti++;
+          //clock_t clocki= clock ();
+          //cout << compti << " CLOCK " << (clocki-clock0)*1.e-6 << endl; 
+          for (int non_unif=0;non_unif<2;non_unif++)
+            {
+              if (cas=="emetteur") 
+                {
+                  if (non_unif)
+                    if(rank!=3)
+                      champ_emetteur->getField()->getArray()->setIJ(0,0,40);
+                }
+              //bool ok=false; // Is the time interval successfully solved ?
+    
+              // Loop on the time interval tries
+              if(1) {
+      
+
+                if (cas=="emetteur")
+                  dec_emetteur.attachLocalField(champ_emetteur);
+                else
+                  dec_emetteur.attachLocalField(champ_recepteur);
+
+
+                if(init) dec_emetteur.synchronize();
+                init=false;
+
+                if (cas=="emetteur") {
+                  //    affiche(champ_emetteur);
+                  dec_emetteur.sendData();
+                }
+                else if (cas=="recepteur")
+                  {
+                    dec_emetteur.recvData();
+                    if (is_master)
+                      afficheGauthier1(*champ_recepteur,expected[num_cas],expectedLgth[num_cas]);
+                  }
+                else
+                  throw 0;
+                MPI_Barrier(MPI_COMM_WORLD);
+              }
+              stop=true;
+              num_cas++;
+            }
+        }
+        delete champ_emetteur;
+        delete champ_recepteur;
+      }
+}
+
+void ParaMEDMEMTest::testGauthier2()
+{
+  double valuesExpected1[2]={0.,0.};
+  double valuesExpected2[2]={0.95,0.970625};
+  
+  double valuesExpected30[]={0., 0., 0.05, 0., 0., 0.15, 0., 0., 0.25, 0., 0., 0.35, 0., 0., 0.45, 0., 0., 0.55, 0., 0., 0.65, 0., 0., 0.75, 0., 0., 0.85, 0., 0., 0.95};
+  double valuesExpected31[]={0.,  0.,  0.029375,  0.,  0.,  0.029375,  0.,  0.,  0.1,  0.,  0.,  0.1,  0.,  0.,  0.2,  0.,  0.,  0.2,  0.,  0.,  0.3,  0.,  0.,  0.3,  0.,  0.,  0.4,  0.,  0.,  0.4,  0.,  0.,  0.5,  0.,  0.,  0.5,  0.,  0.,  0.6,  0.,  0.,  0.6,  0.,  0.,  0.7,  0.,  0.,  0.7,  0.,  0.,  0.8,  0.,  0.,  0.8,  0.,  0.,  0.9,  0.,  0.,  0.9,  0.,  0.,  0.970625,  0.,  0.,  0.970625 };
+
+  double *valuesExpected3[2]={valuesExpected30,valuesExpected31};
+
+  int rank, size;
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  if (size <2)
+    return ;
+  CommInterface comm;
+  set<int> Genepi_ids;
+  set<int> entree_chaude_ids;
+  Genepi_ids.insert(0);
+  for (int i=1;i<size;i++)
+    entree_chaude_ids.insert(i);
+  for (int type=0;type<2;type++)
+    {
+      MPIProcessorGroup entree_chaude_group(comm,entree_chaude_ids);
+      MPIProcessorGroup Genepi_group(comm,Genepi_ids);
+
+      ParaMEDMEM::ParaFIELD *vitesse(0);
+      InterpKernelDEC dec_vit_in_chaude(entree_chaude_group, Genepi_group);
+
+      if ( entree_chaude_group.containsMyRank())
+        {
+          MEDCouplingAutoRefCountObjectPtr<MEDCouplingUMesh> mesh(MEDCouplingUMesh::New("mesh",2));
+          MEDCouplingAutoRefCountObjectPtr<DataArrayDouble> arr(DataArrayDouble::New()); arr->alloc(63,3);
+          const double cooData[189]={0.,0.,0.,0.5,0.,0.,0.5,0.05,0.,0.,0.1,0.,0.5,0.1,0.,0.5,0.15,0.,0.,0.2,0.,0.5,0.2,0.,0.5,0.25,0.,0.,0.3,0.,0.5,0.3,0.,0.5,0.35,0.,0.,0.4,0.,0.5,0.4,0.,0.5,0.45,0.,0.,0.5,0.,0.5,0.5,0.,0.5,0.55,0.,0.,0.6,0.,0.5,0.6,0.,0.5,0.65,0.,0.,0.7,0.,0.5,0.7,0.,0.5,0.75,0.,0.,0.8,0.,0.5,0.8,0.,0.5,0.85,0.,0.,0.9,0.,0.5,0.9,0.,0.5,0.95,0.,1.,0.,0.,1.,0.1,0.,1.,0.2,0.,1.,0.3,0.,1.,0.4,0.,1.,0.5,0.,1.,0.6,0.,1.,0.7,0.,1.,0.8,0.,1.,0.9,0.,1.,0.05,0.,1.,0.15,0.,1.,0.25,0.,1.,0.35,0.,1.,0.45,0.,1.,0.55,0.,1.,0.65,0.,1.,0.75,0.,1.,0.85,0.,1.,0.95,0.,1.,1.,0.,0.,1.,0.,0.5,1.,0.,0.,0.05,0.,0.,0.15,0.,0.,0.25,0.,0.,0.35,0.,0.,0.45,0.,0.,0.55,0.,0.,0.65,0.,0.,0.75,0.,0.,0.85,0.,0.,0.95,0.};
+          std::copy(cooData,cooData+189,arr->getPointer());
+          mesh->setCoords(arr);
+          mesh->allocateCells(80);
+          const int conn[240]={0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,2,1,31,5,4,32,8,7,33,11,10,34,14,13,35,17,16,36,20,19,37,23,22,38,26,25,39,29,28,30,40,2,31,41,5,32,42,8,33,43,11,34,44,14,35,45,17,36,46,20,37,47,23,38,48,26,39,49,29,31,2,40,32,5,41,33,8,42,34,11,43,35,14,44,36,17,45,37,20,46,38,23,47,39,26,48,50,29,49,3,2,4,6,5,7,9,8,10,12,11,13,15,14,16,18,17,19,21,20,22,24,23,25,27,26,28,51,29,52,31,4,2,32,7,5,33,10,8,34,13,11,35,16,14,36,19,17,37,22,20,38,25,23,39,28,26,50,52,29,0,2,53,3,5,54,6,8,55,9,11,56,12,14,57,15,17,58,18,20,59,21,23,60,24,26,61,27,29,62,3,53,2,6,54,5,9,55,8,12,56,11,15,57,14,18,58,17,21,59,20,24,60,23,27,61,26,51,62,29};
+          for(int i=0;i<80;i++)
+            mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,conn+3*i);
+          MEDCouplingAutoRefCountObjectPtr<MEDCouplingFieldDouble> f(MEDCouplingFieldDouble::New(ON_NODES,ONE_TIME));
+          const double valsOfField[189]={0.,0.,0.,0.,0.,0.,0.,0.,0.05,0.,0.,0.1,0.,0.,0.1,0.,0.,0.15,0.,0.,0.2,0.,0.,0.2,0.,0.,0.25,0.,0.,0.3,0.,0.,0.3,0.,0.,0.35,0.,0.,0.4,0.,0.,0.4,0.,0.,0.45,0.,0.,0.5,0.,0.,0.5,0.,0.,0.55,0.,0.,0.6,0.,0.,0.6,0.,0.,0.65,0.,0.,0.7,0.,0.,0.7,0.,0.,0.75,0.,0.,0.8,0.,0.,0.8,0.,0.,0.85,0.,0.,0.9,0.,0.,0.9,0.,0.,0.95,0.,0.,0.,0.,0.,0.1,0.,0.,0.2,0.,0.,0.3,0.,0.,0.4,0.,0.,0.5,0.,0.,0.6,0.,0.,0.7,0.,0.,0.8,0.,0.,0.9,0.,0.,0.05,0.,0.,0.15,0.,0.,0.25,0.,0.,0.35,0.,0.,0.45,0.,0.,0.55,0.,0.,0.65,0.,0.,0.75,0.,0.,0.85,0.,0.,0.95,0.,0.,1.,0.,0.,1.,0.,0.,1.,0.,0.,0.05,0.,0.,0.15,0.,0.,0.25,0.,0.,0.35,0.,0.,0.45,0.,0.,0.55,0.,0.,0.65,0.,0.,0.75,0.,0.,0.85,0.,0.,0.95};
+          f->setMesh(mesh); f->setName("VITESSE_P1_OUT");
+          arr=DataArrayDouble::New(); arr->alloc(63,3);
+          std::copy(valsOfField,valsOfField+189,arr->getPointer());
+          f->setArray(arr); f->setNature(ConservativeVolumic);
+          ParaMEDMEM::ParaMESH *paramesh(new ParaMEDMEM::ParaMESH(mesh,entree_chaude_group,"emetteur mesh"));
+          vitesse=new ParaMEDMEM::ParaFIELD(f,paramesh,entree_chaude_group);
+          vitesse->setOwnSupport(true);
+          dec_vit_in_chaude.setMethod("P1");
+        }
+      else
+        {
+          MEDCouplingAutoRefCountObjectPtr<MEDCouplingUMesh> mesh(MEDCouplingUMesh::New("mesh",2));
+          MEDCouplingAutoRefCountObjectPtr<DataArrayDouble> arr(DataArrayDouble::New()); arr->alloc(22,3);
+          const double cooData[66]={0,0,0,1,0,0,0,0.1,0,1,0.1,0,0,0.2,0,1,0.2,0,0,0.3,0,1,0.3,0,0,0.4,0,1,0.4,0,0,0.5,0,1,0.5,0,0,0.6,0,1,0.6,0,0,0.7,0,1,0.7,0,0,0.8,0,1,0.8,0,0,0.9,0,1,0.9,0,0,1,0,1,1,0};
+          std::copy(cooData,cooData+66,arr->getPointer());
+          mesh->setCoords(arr);
+          mesh->allocateCells(10);
+          const int conn[40]={0,1,3,2,2,3,5,4,4,5,7,6,6,7,9,8,8,9,11,10,10,11,13,12,12,13,15,14,14,15,17,16,16,17,19,18,18,19,21,20};
+          for(int i=0;i<10;i++)
+            mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn+4*i);
+          MEDCouplingAutoRefCountObjectPtr<MEDCouplingFieldDouble> f(MEDCouplingFieldDouble::New(type==0?ON_CELLS:ON_NODES,ONE_TIME));
+          f->setMesh(mesh); f->setName("vitesse_in_chaude");
+          arr=DataArrayDouble::New(); arr->alloc(f->getNumberOfTuplesExpected()*3); arr->fillWithZero(); arr->rearrange(3);
+          f->setArray(arr); f->setNature(ConservativeVolumic);
+          ParaMEDMEM::ParaMESH *paramesh(new ParaMEDMEM::ParaMESH(mesh,Genepi_group,"recepteur mesh"));
+          vitesse=new ParaMEDMEM::ParaFIELD(f,paramesh,Genepi_group);
+          vitesse->setOwnSupport(true);
+          dec_vit_in_chaude.setMethod(f->getDiscretization()->getRepr());
+        }
+
+      dec_vit_in_chaude.attachLocalField(vitesse);
+      
+      dec_vit_in_chaude.synchronize();
+  
+  
+      // Envois - receptions
+      if (entree_chaude_group.containsMyRank())
+        {
+          dec_vit_in_chaude.sendData();
+        }
+      else
+        {
+          dec_vit_in_chaude.recvData(); 
+        }
+      if ( !entree_chaude_group.containsMyRank() )
+        {
+          double pmin=1e38, pmax=-1e38;
+          const double *p(vitesse->getField()->getArray()->begin());
+          for(std::size_t i=0;i<vitesse->getField()->getArray()->getNbOfElems();i++,p++)
+            {
+              if (*p<pmin) pmin=*p;
+              if (*p>pmax) pmax=*p;
+            }
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(valuesExpected1[type],pmin,1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(valuesExpected2[type],pmax,1e-12);
+      
+          int nbCompo(vitesse->getField()->getNumberOfComponents());
+          p=vitesse->getField()->getArray()->begin();
+          for(int i=0;i<vitesse->getField()->getNumberOfTuples();i++)
+            for(int c=0;c<nbCompo;c++,p++)
+              CPPUNIT_ASSERT_DOUBLES_EQUAL(valuesExpected3[type][i*nbCompo+c],*p,1e-12);
+        }
+      delete vitesse;
+    }
+}
+
+/*!
+ * Non regression test testing copy constructor of InterpKernelDEC. 
+ */
+void ParaMEDMEMTest::testGauthier3()
+{
+  int num_cas=0;
+  int rank, size;
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  
+  int is_master=0;
+
+  CommInterface comm;
+  set<int> emetteur_ids;
+  set<int> recepteur_ids;
+  emetteur_ids.insert(0);
+  if(size!=4)
+    return;
+  recepteur_ids.insert(1);
+  if (size >2) 
+    recepteur_ids.insert(2);
+  if (size >2) 
+    emetteur_ids.insert(3);
+  if ((rank==0)||(rank==1)) 
+    is_master=1;
+  
+  MPIProcessorGroup recepteur_group(comm,recepteur_ids);
+  MPIProcessorGroup emetteur_group(comm,emetteur_ids);
+
+  string cas;
+  if (recepteur_group.containsMyRank())
+    {
+      cas="recepteur";
+      //freopen("recpeteur.out","w",stdout);
+      //freopen("recepteur.err","w",stderr);
+    }
+  else
+    {
+      cas="emetteur";
+      // freopen("emetteur.out","w",stdout);
+      //freopen("emetteur.err","w",stderr);
+    }
+  double expected[8][4]={
+    {1.,1.,1.,1.},
+    {40., 40., 1., 1.},
+    {1.,1.,1e200,1e200},
+    {40.,1.,1e200,1e200},
+    {1.,1.,1.,1.},
+    {40.,1.,1.,1.},
+    {1.,1.,1e200,1e200},
+    {20.5,1.,1e200,1e200}
+  };
+  int expectedLgth[8]={4,4,2,2,4,4,2,2};
+  
+  for (int send=0;send<2;send++)
+    for (int rec=0;rec<2;rec++)
+      {
+        std::vector<InterpKernelDEC> decu(1);
+        decu[0]=InterpKernelDEC(emetteur_group,recepteur_group);
+        InterpKernelDEC& dec_emetteur=decu[0];
+        ParaMEDMEM::ParaFIELD *champ_emetteur(0),*champ_recepteur(0);
+        ParaMEDMEM::ParaMESH *paramesh(0);
+        MEDCouplingAutoRefCountObjectPtr<MEDCouplingUMesh> mesh;
+        dec_emetteur.setOrientation(2);
+        if (send==0)
+          {
+            mesh=init_quadGauthier1(is_master);
+          }
+        else
+          {
+            mesh=init_triangleGauthier1(is_master);
+          }
+        paramesh=new ParaMEDMEM::ParaMESH(mesh,recepteur_group.containsMyRank()?recepteur_group:emetteur_group,"emetteur mesh");
+        ParaMEDMEM::ComponentTopology comptopo;
+        champ_emetteur=new ParaMEDMEM::ParaFIELD(ON_CELLS,ONE_TIME,paramesh,comptopo);
+        champ_emetteur->getField()->setNature(ConservativeVolumic);
+        champ_emetteur->setOwnSupport(true);
+        if (rec==0)
+          {
+            mesh=init_triangleGauthier1(is_master);
+          }
+        else
+          {
+            mesh=init_quadGauthier1(is_master);
+          }
+        paramesh=new ParaMEDMEM::ParaMESH(mesh,recepteur_group.containsMyRank()?recepteur_group:emetteur_group,"recepteur mesh");
+        champ_recepteur=new ParaMEDMEM::ParaFIELD(ON_CELLS,ONE_TIME,paramesh,comptopo);
+        champ_recepteur->getField()->setNature(ConservativeVolumic);
+        champ_recepteur->setOwnSupport(true);
+        if (cas=="emetteur") 
+          {
+            champ_emetteur->getField()->getArray()->fillWithValue(1.);
+          }
+  
+  
+        MPI_Barrier(MPI_COMM_WORLD);
+
+        //clock_t clock0= clock ();
+        int compti=0;
+
+        bool init=true; // first time step ??
+        bool stop=false;
+        //boucle sur les pas de quads
+        while (!stop) {
+  
+          compti++;
+          //clock_t clocki= clock ();
+          //cout << compti << " CLOCK " << (clocki-clock0)*1.e-6 << endl; 
+          for (int non_unif=0;non_unif<2;non_unif++)
+            {
+              if (cas=="emetteur") 
+                {
+                  if (non_unif)
+                    if(rank!=3)
+                      champ_emetteur->getField()->getArray()->setIJ(0,0,40);
+                }
+              //bool ok=false; // Is the time interval successfully solved ?
+    
+              // Loop on the time interval tries
+              if(1) {
+      
+
+                if (cas=="emetteur")
+                  dec_emetteur.attachLocalField(champ_emetteur);
+                else
+                  dec_emetteur.attachLocalField(champ_recepteur);
+
+
+                if(init) dec_emetteur.synchronize();
+                init=false;
+
+                if (cas=="emetteur") {
+                  //    affiche(champ_emetteur);
+                  dec_emetteur.sendData();
+                }
+                else if (cas=="recepteur")
+                  {
+                    dec_emetteur.recvData();
+                    if (is_master)
+                      afficheGauthier1(*champ_recepteur,expected[num_cas],expectedLgth[num_cas]);
+                  }
+                else
+                  throw 0;
+                MPI_Barrier(MPI_COMM_WORLD);
+              }
+              stop=true;
+              num_cas++;
+            }
+        }
+        delete champ_emetteur;
+        delete champ_recepteur;
+      }
+}
+
+/*!
+ * This test is the parallel version of MEDCouplingBasicsTest.test3D1DOnP1P0_1 test.
+ */
+void ParaMEDMEMTest::testGauthier4()
+{
+  //
+  const double sourceCoords[19*3]={0.5,0.5,0.1,0.5,0.5,1.2,0.5,0.5,1.6,0.5,0.5,1.8,0.5,0.5,2.43,0.5,0.5,2.55,0.5,0.5,4.1,0.5,0.5,4.4,0.5,0.5,4.9,0.5,0.5,5.1,0.5,0.5,7.6,0.5,0.5,7.7,0.5,0.5,8.2,0.5,0.5,8.4,0.5,0.5,8.6,0.5,0.5,8.8,0.5,0.5,9.2,0.5,0.5,9.6,0.5,0.5,11.5};
+  const int sourceConn[18*2]={0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18};
+  const double sourceVals[19]={0.49,2.8899999999999997,7.29,13.69,22.09,32.49,44.89,59.29,75.69,94.09, 114.49,136.89,161.29,187.69,216.09,246.49,278.89,313.29,349.69};
+  const double targetCoords0[20*3]={0.,0.,0.,1.,0.,0.,0.,1.,0.,1.,1.,0.,0.,0.,1.,1.,0.,1.,0.,1.,1.,1.,1.,1.,0.,0.,2.,1.,0.,2.,0.,1.,2.,1.,1.,2.,0.,0.,3.,1.,0.,3.,0.,1.,3.,1.,1.,3.,0.,0.,4.,1.,0.,4.,0.,1.,4.,1.,1.,4.};
+  const int targetConn0[8*4]={1,0,2,3,5,4,6,7,5,4,6,7,9,8,10,11,9,8,10,11,13,12,14,15,13,12,14,15,17,16,18,19};
+  const double targetCoords1[28*3]={0.,0.,4.,1.,0.,4.,0.,1.,4.,1.,1.,4.,0.,0.,5.,1.,0.,5.,0.,1.,5.,1.,1.,5.,0.,0.,6.,1.,0.,6.,0.,1.,6.,1.,1.,6.,0.,0.,7.,1.,0.,7.,0.,1.,7.,1.,1.,7.,0.,0.,8.,1.,0.,8.,0.,1.,8.,1.,1.,8.,0.,0.,9.,1.,0.,9.,0.,1.,9.,1.,1.,9.,0.,0.,10.,1.,0.,10.,0.,1.,10.,1.,1.,10.};
+  const int targetConn1[8*6]={1,0,2,3,5,4,6,7,5,4,6,7,9,8,10,11,9,8,10,11,13,12,14,15,13,12,14,15,17,16,18,19,17,16,18,19,21,20,22,23,21,20,22,23,25,24,26,27};
+  //
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  //
+  if(size!=3)
+    return ;
+  int nproc_source = 1;
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  //
+  ParaMEDMEM::MEDCouplingUMesh *mesh=0;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::ParaFIELD* parafield=0;
+  //
+  ParaMEDMEM::CommInterface interface;
+  //
+  ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+  if(source_group->containsMyRank())
+    {
+      std::ostringstream stream; stream << "sourcemesh2D proc " << rank;
+      mesh=MEDCouplingUMesh::New(stream.str().c_str(),1);
+      mesh->allocateCells();
+      for(int i=0;i<18;i++)
+        mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,sourceConn+2*i);
+      mesh->finishInsertingCells();
+      DataArrayDouble *myCoords=DataArrayDouble::New();
+      myCoords->alloc(19,3);
+      std::copy(sourceCoords,sourceCoords+19*3,myCoords->getPointer());
+      mesh->setCoords(myCoords);
+      myCoords->decrRef();
+      paramesh=new ParaMESH(mesh,*source_group,"source mesh");
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafield = new ParaFIELD(ON_NODES,NO_TIME,paramesh,comptopo);
+      double *value=parafield->getField()->getArray()->getPointer();
+      std::copy(sourceVals,sourceVals+19,value);
+    }
+  else
+    {
+      if(rank==1)
+        {
+          std::ostringstream stream; stream << "targetmesh2D proc " << rank-nproc_source;
+          mesh=MEDCouplingUMesh::New(stream.str().c_str(),3);
+          mesh->allocateCells();
+          for(int i=0;i<4;i++)
+            mesh->insertNextCell(INTERP_KERNEL::NORM_HEXA8,8,targetConn0+8*i);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(20,3);
+          std::copy(targetCoords0,targetCoords0+20*3,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          paramesh=new ParaMESH (mesh,*target_group,"target mesh");
+          ParaMEDMEM::ComponentTopology comptopo;
+          parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+        }
+      else if(rank==2)
+        {
+          std::ostringstream stream; stream << "targetmesh2D proc " << rank-nproc_source;
+          mesh=MEDCouplingUMesh::New(stream.str().c_str(),3);
+          mesh->allocateCells();
+          for(int i=0;i<6;i++)
+            mesh->insertNextCell(INTERP_KERNEL::NORM_HEXA8,8,targetConn1+8*i);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(28,3);
+          std::copy(targetCoords1,targetCoords1+28*3,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          paramesh=new ParaMESH (mesh,*target_group,"target mesh");
+          ParaMEDMEM::ComponentTopology comptopo;
+          parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+        }
+    }
+  //test 1 - primaire -> secondaire
+  ParaMEDMEM::InterpKernelDEC dec(*source_group,*target_group);
+  dec.setIntersectionType(INTERP_KERNEL::PointLocator);
+  parafield->getField()->setNature(ConservativeVolumic);//very important
+  if (source_group->containsMyRank())
+    { 
+      dec.setMethod("P1");
+      dec.attachLocalField(parafield);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.sendData();
+    }
+  else
+    {
+      dec.setMethod("P0");
+      dec.attachLocalField(parafield);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.recvData();
+      const double *res(parafield->getField()->getArray()->getConstPointer());
+      if(rank==1)
+        {
+          const double expected0[4]={0.49,7.956666666666667,27.29,0.};
+          for(int i=0;i<4;i++)
+            CPPUNIT_ASSERT_DOUBLES_EQUAL(expected0[i],res[i],1e-13);
+        }
+      else
+        {
+          const double expected1[6]={59.95666666666667,94.09,0.,125.69,202.89,296.09};
+          for(int i=0;i<6;i++)
+            CPPUNIT_ASSERT_DOUBLES_EQUAL(expected1[i],res[i],1e-13);
+        }
+    }
+  MPI_Barrier(MPI_COMM_WORLD);
+  if (source_group->containsMyRank())
+    {
+      dec.recvData();
+      const double expected2[19]={0.49,7.956666666666667,7.956666666666667,7.956666666666667,27.29,27.29,59.95666666666667,59.95666666666667,59.95666666666667,94.09,125.69,125.69,202.89,202.89,202.89,202.89,296.09,296.09,0.};
+      const double *res(parafield->getField()->getArray()->getConstPointer());
+      for(int i=0;i<19;i++)
+        CPPUNIT_ASSERT_DOUBLES_EQUAL(expected2[i],res[i],1e-13);
+    }
+  else
+    {
+      dec.sendData();
+    }
+  delete parafield;
+  mesh->decrRef();
+  delete paramesh;
+  delete self_group;
+  delete target_group;
+  delete source_group;
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+}
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest_ICoco.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTest_ICoco.cxx
new file mode 100644 (file)
index 0000000..f46c767
--- /dev/null
@@ -0,0 +1,194 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMEDMEMTest.hxx"
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "ComponentTopology.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "InterpKernelDEC.hxx"
+
+#include "MEDCouplingUMesh.hxx"
+
+#include <set>
+#include <string>
+#include <time.h>
+#include <iostream>
+#include <assert.h>
+
+using namespace std;
+using namespace ParaMEDMEM;
+using namespace ICoCo;
+
+typedef enum {sync_and,sync_or} synctype;
+void synchronize_bool(bool& stop, synctype s)
+{
+  int my_stop;
+  int my_stop_temp = stop?1:0;
+  if (s==sync_and)
+    MPI_Allreduce(&my_stop_temp,&my_stop,1,MPI_INTEGER,MPI_MIN,MPI_COMM_WORLD);
+  else if (s==sync_or)
+    MPI_Allreduce(&my_stop_temp,&my_stop,1,MPI_INTEGER,MPI_MAX,MPI_COMM_WORLD);
+  stop =(my_stop==1);
+}
+
+void synchronize_dt(double& dt)
+{
+  double dttemp=dt;
+  MPI_Allreduce(&dttemp,&dt,1,MPI_DOUBLE,MPI_MIN,MPI_COMM_WORLD);
+}
+
+
+void affiche(const ParaFIELD& field)
+{
+  cout <<field.getField()->getName()<<endl;
+  const double *vals(field.getField()->getArray()->begin());
+  for(int ele=0;ele<field.getField()->getNumberOfTuples();ele++)
+    cout << ele <<": "<< vals[ele] << endl;
+}
+
+MEDCouplingUMesh *init_quad()
+{
+  MEDCouplingAutoRefCountObjectPtr<MEDCouplingUMesh> m(MEDCouplingUMesh::New("champ_quad",2));
+  MEDCouplingAutoRefCountObjectPtr<DataArrayDouble> coo(DataArrayDouble::New());
+  const double dataCoo[24]={0.,0.,0.,1.,0.,0.,0.,0.,1.,1.,0.,1.,0.,1e-05,0.,1.,1e-05,0.,0.,1e-05,1.,1.,1e-05,1.};
+  coo->alloc(8,3);
+  std::copy(dataCoo,dataCoo+24,coo->getPointer());
+  const int conn[8]={0,1,3,2,4,5,7,6};
+  m->allocateCells(2);
+  m->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn);
+  m->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn+4);
+  m->setCoords(coo);
+  return m.retn();
+}
+
+MEDCouplingUMesh *init_triangle()
+{
+  MEDCouplingAutoRefCountObjectPtr<MEDCouplingUMesh> m(MEDCouplingUMesh::New("champ_triangle",2));
+  MEDCouplingAutoRefCountObjectPtr<DataArrayDouble> coo(DataArrayDouble::New());
+  const double dataCoo[24]={0.,0.,0.,1.,0.,0.,0.,0.,1.,1.,0.,1.,0.,1e-05,0.,1.,1e-05,0.,0.,1e-05,1.,1.,1e-05,1.};
+  coo->alloc(8,3);
+  std::copy(dataCoo,dataCoo+24,coo->getPointer());
+  const int conn[12]={0,1,2,1,2,3,4,5,7,4,6,7};
+  m->allocateCells(4);
+  for(int i=0;i<4;i++)
+    m->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,conn+3*i);
+  m->setCoords(coo);
+  return m.retn();
+}
+
+void ParaMEDMEMTest::testICoco1()
+{
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+
+  //the test is meant to run on 2 processors
+  if (size !=2) return ;
+  
+  CommInterface comm;
+  set<int> emetteur_ids;
+  set<int> recepteur_ids;
+  emetteur_ids.insert(0);
+  recepteur_ids.insert(1);
+
+  MPIProcessorGroup recepteur_group(comm,recepteur_ids);
+  MPIProcessorGroup emetteur_group(comm,emetteur_ids);
+
+  string cas;
+  if (recepteur_group.containsMyRank())
+    cas="recepteur";
+  else
+    cas="emetteur";
+
+  InterpKernelDEC dec_emetteur(emetteur_group,recepteur_group);
+  dec_emetteur.setOrientation(2);
+  ParaMEDMEM::ParaFIELD *champ_emetteur(0),*champ_recepteur(0);
+  ParaMEDMEM::ParaMESH *paramesh(0);
+  if (cas=="emetteur") 
+    {
+      MEDCouplingAutoRefCountObjectPtr<ParaMEDMEM::MEDCouplingUMesh> mesh_emetteur(init_triangle());
+      paramesh=new ParaMEDMEM::ParaMESH(mesh_emetteur,emetteur_group,"emetteur mesh");
+      ParaMEDMEM::ComponentTopology comptopo;
+      champ_emetteur=new ParaMEDMEM::ParaFIELD(ON_CELLS,ONE_TIME,paramesh,comptopo);
+      champ_emetteur->getField()->setNature(ConservativeVolumic);
+      champ_emetteur->setOwnSupport(true);
+      champ_emetteur->getField()->getArray()->fillWithValue(1.);
+    }
+  else
+    {
+      MEDCouplingAutoRefCountObjectPtr<ParaMEDMEM::MEDCouplingUMesh> mesh_recepteur(init_quad());
+      paramesh=new ParaMEDMEM::ParaMESH(mesh_recepteur,recepteur_group,"recepteur mesh");
+      ParaMEDMEM::ComponentTopology comptopo;
+      champ_recepteur=new ParaMEDMEM::ParaFIELD(ON_CELLS,ONE_TIME,paramesh,comptopo);
+      champ_recepteur->getField()->setNature(ConservativeVolumic);
+      champ_recepteur->setOwnSupport(true);
+    }
+  
+  
+  MPI_Barrier(MPI_COMM_WORLD);
+
+  clock_t clock0(clock());
+  int compti=0;
+
+  bool init(true),stop(false);
+  //boucle sur les pas de quads
+  while(!stop)
+    {
+      compti++;
+      clock_t clocki= clock ();
+      cout << compti << " CLOCK " << (clocki-clock0)*1.e-6 << endl; 
+      for (int non_unif=0;non_unif<2;non_unif++)
+        {
+          if (cas=="emetteur") 
+            if (non_unif)
+              champ_emetteur->getField()->getArray()->setIJ(0,0,40.);
+          //bool ok=false; // Is the time interval successfully solved ?
+    
+          // Loop on the time interval tries
+          if (cas=="emetteur")
+            dec_emetteur.attachLocalField(champ_emetteur);
+          else
+            dec_emetteur.attachLocalField(champ_recepteur);
+            
+          if(init)
+            dec_emetteur.synchronize();
+          init=false;
+            
+          if (cas=="emetteur")
+            {
+              dec_emetteur.sendData();
+              affiche(*champ_emetteur);
+            }
+          else if (cas=="recepteur")
+            {
+              dec_emetteur.recvData();
+              affiche(*champ_recepteur);
+            }
+          else
+            throw 0;
+        }
+      stop=true;
+    }
+  delete champ_recepteur;
+  delete champ_emetteur;
+}
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest_InterpKernelDEC.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTest_InterpKernelDEC.cxx
new file mode 100644 (file)
index 0000000..9ca93d3
--- /dev/null
@@ -0,0 +1,2293 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMEDMEMTest.hxx"
+#include <cppunit/TestAssert.h>
+
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "Topology.hxx"
+#include "DEC.hxx"
+#include "MxN_Mapping.hxx"
+#include "InterpKernelDEC.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "ComponentTopology.hxx"
+#include "ICoCoMEDField.hxx"
+#include "ParaMEDLoader.hxx"
+#include "MEDLoader.hxx"
+
+#include <string>
+#include <iterator>
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void ParaMEDMEMTest::testInterpKernelDEC_2D()
+{
+  testInterpKernelDEC_2D_("P0","P0");
+}
+
+void ParaMEDMEMTest::testInterpKernelDEC2_2D()
+{
+  testInterpKernelDEC2_2D_("P0","P0");
+}
+
+void ParaMEDMEMTest::testInterpKernelDEC_3D()
+{
+  testInterpKernelDEC_3D_("P0","P0");
+}
+
+void ParaMEDMEMTest::testInterpKernelDEC_2DP0P1()
+{
+  //testInterpKernelDEC_2D_("P0","P1");
+}
+
+void ParaMEDMEMTest::testInterpKernelDEC_1D()
+{
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  //
+  if(size!=5)
+    return ;
+  int nproc_source = 3;
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  //
+  ParaMEDMEM::MEDCouplingUMesh *mesh=0;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::ParaFIELD *parafieldP0=0;
+  //
+  ParaMEDMEM::CommInterface interface;
+  //
+  ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+  if(source_group->containsMyRank())
+    {
+      if(rank==0)
+        {
+          double coords[4]={0.3,0.7, 0.9,1.0};
+          int conn[4]={0,1,2,3};
+          mesh=MEDCouplingUMesh::New("Source mesh Proc0",1);
+          mesh->allocateCells(2);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn+2);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(4,1);
+          std::copy(coords,coords+4,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+        }
+      if(rank==1)
+        {
+          double coords[2]={0.7,0.9};
+          int conn[2]={0,1};
+          mesh=MEDCouplingUMesh::New("Source mesh Proc1",1);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(2,1);
+          std::copy(coords,coords+2,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+        }
+      if(rank==2)
+        {
+          double coords[2]={1.,1.12};
+          int conn[2]={0,1};
+          mesh=MEDCouplingUMesh::New("Source mesh Proc2",1);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(2,1);
+          std::copy(coords,coords+2,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+        }
+      paramesh=new ParaMESH(mesh,*source_group,"source mesh");
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafieldP0 = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      double *valueP0=parafieldP0->getField()->getArray()->getPointer();
+      parafieldP0->getField()->setNature(ConservativeVolumic);
+      if(rank==0)
+        {
+          valueP0[0]=7.; valueP0[1]=8.;
+        }
+      if(rank==1)
+        {
+          valueP0[0]=9.;
+        }
+      if(rank==2)
+        {
+          valueP0[0]=10.;
+        }
+    }
+  else
+    {
+      const char targetMeshName[]="target mesh";
+      if(rank==3)
+        {
+          double coords[2]={0.5,0.75};
+          int conn[2]={0,1};
+          mesh=MEDCouplingUMesh::New("Target mesh Proc3",1);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(2,1);
+          std::copy(coords,coords+2,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          paramesh=new ParaMESH(mesh,*target_group,targetMeshName);
+        }
+      if(rank==4)
+        {
+          double coords[2]={0.75,1.2};
+          int conn[2]={0,1};
+          mesh=MEDCouplingUMesh::New("Target mesh Proc4",1);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(2,1);
+          std::copy(coords,coords+2,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          paramesh=new ParaMESH(mesh,*target_group,targetMeshName);
+        }
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafieldP0 = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      parafieldP0->getField()->setNature(ConservativeVolumic);
+    }
+  // test 1
+  ParaMEDMEM::InterpKernelDEC dec(*source_group,*target_group);
+  if (source_group->containsMyRank())
+    { 
+      dec.setMethod("P0");
+      dec.attachLocalField(parafieldP0);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.sendData();
+      dec.recvData();
+      const double *valueP0=parafieldP0->getField()->getArray()->getPointer();
+      if(rank==0)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(7.4,valueP0[0],1e-7);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.0540540540540544,valueP0[1],1e-7);
+        }
+      if(rank==1)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(8.64054054054054,valueP0[0],1e-7);
+        }
+      if(rank==2)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.0540540540540544,valueP0[0],1e-7);
+        }
+    }
+  else
+    {
+      dec.setMethod("P0");
+      dec.attachLocalField(parafieldP0);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.recvData();
+      const double *res=parafieldP0->getField()->getArray()->getConstPointer();
+      if(rank==3)
+        {
+          CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfTuples());
+          CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfComponents());
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(7.4,res[0],1e-12);
+        }
+      if(rank==4)
+        {
+          CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfTuples());
+          CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfComponents());
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.0540540540540526,res[0],1e-12);
+        }
+      dec.sendData();
+    }
+  //
+  delete parafieldP0;
+  mesh->decrRef();
+  delete paramesh;
+  delete self_group;
+  delete target_group;
+  delete source_group;
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+}
+
+void ParaMEDMEMTest::testInterpKernelDEC_2DCurve()
+{
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  //
+  if(size!=5)
+    return ;
+  int nproc_source = 3;
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  //
+  ParaMEDMEM::MEDCouplingUMesh *mesh=0;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::ParaFIELD *parafieldP0=0;
+  //
+  ParaMEDMEM::CommInterface interface;
+  //
+  ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+  if(source_group->containsMyRank())
+    {
+      if(rank==0)
+        {
+          double coords[8]={0.3,0.3,0.7,0.7, 0.9,0.9,1.0,1.0};
+          int conn[4]={0,1,2,3};
+          mesh=MEDCouplingUMesh::New("Source mesh Proc0",1);
+          mesh->allocateCells(2);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn+2);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(4,2);
+          std::copy(coords,coords+8,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+        }
+      if(rank==1)
+        {
+          double coords[4]={0.7,0.7,0.9,0.9};
+          int conn[2]={0,1};
+          mesh=MEDCouplingUMesh::New("Source mesh Proc1",1);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(2,2);
+          std::copy(coords,coords+4,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+        }
+      if(rank==2)
+        {
+          double coords[4]={1.,1.,1.12,1.12};
+          int conn[2]={0,1};
+          mesh=MEDCouplingUMesh::New("Source mesh Proc2",1);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(2,2);
+          std::copy(coords,coords+4,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+        }
+      paramesh=new ParaMESH(mesh,*source_group,"source mesh");
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafieldP0 = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      double *valueP0=parafieldP0->getField()->getArray()->getPointer();
+      parafieldP0->getField()->setNature(ConservativeVolumic);
+      if(rank==0)
+        {
+          valueP0[0]=7.; valueP0[1]=8.;
+        }
+      if(rank==1)
+        {
+          valueP0[0]=9.;
+        }
+      if(rank==2)
+        {
+          valueP0[0]=10.;
+        }
+    }
+  else
+    {
+      const char targetMeshName[]="target mesh";
+      if(rank==3)
+        {
+          double coords[4]={0.5,0.5,0.75,0.75};
+          int conn[2]={0,1};
+          mesh=MEDCouplingUMesh::New("Target mesh Proc3",1);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(2,2);
+          std::copy(coords,coords+4,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          paramesh=new ParaMESH(mesh,*target_group,targetMeshName);
+        }
+      if(rank==4)
+        {
+          double coords[4]={0.75,0.75,1.2,1.2};
+          int conn[2]={0,1};
+          mesh=MEDCouplingUMesh::New("Target mesh Proc4",1);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_SEG2,2,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(2,2);
+          std::copy(coords,coords+4,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          paramesh=new ParaMESH(mesh,*target_group,targetMeshName);
+        }
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafieldP0 = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      parafieldP0->getField()->setNature(ConservativeVolumic);
+    }
+  // test 1
+  ParaMEDMEM::InterpKernelDEC dec(*source_group,*target_group);
+  if (source_group->containsMyRank())
+    { 
+      dec.setMethod("P0");
+      dec.attachLocalField(parafieldP0);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.sendData();
+      dec.recvData();
+      const double *valueP0=parafieldP0->getField()->getArray()->getPointer();
+      if(rank==0)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(7.4,valueP0[0],1e-7);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.0540540540540544,valueP0[1],1e-7);
+        }
+      if(rank==1)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(8.64054054054054,valueP0[0],1e-7);
+        }
+      if(rank==2)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.0540540540540544,valueP0[0],1e-7);
+        }
+    }
+  else
+    {
+      dec.setMethod("P0");
+      dec.attachLocalField(parafieldP0);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.recvData();
+      const double *res=parafieldP0->getField()->getArray()->getConstPointer();
+      if(rank==3)
+        {
+          CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfTuples());
+          CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfComponents());
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(7.4,res[0],1e-12);
+        }
+      if(rank==4)
+        {
+          CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfTuples());
+          CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfComponents());
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.0540540540540526,res[0],1e-12);
+        }
+      dec.sendData();
+    }
+  //
+  delete parafieldP0;
+  mesh->decrRef();
+  delete paramesh;
+  delete self_group;
+  delete target_group;
+  delete source_group;
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+}
+
+
+/*
+ * Check methods defined in InterpKernelDEC.hxx
+ *
+ InterpKernelDEC();
+ InterpKernelDEC(ProcessorGroup& local_group, ProcessorGroup& distant_group);
+ virtual ~InterpKernelDEC();
+ void synchronize();
+ void recvData();
+ void sendData();
+*/
+void ParaMEDMEMTest::testInterpKernelDEC_2D_(const char *srcMeth, const char *targetMeth)
+{
+  std::string srcM(srcMeth);
+  std::string targetM(targetMeth);
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+
+  //the test is meant to run on five processors
+  if (size !=5) return ;
+   
+  int nproc_source = 3;
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  
+  ParaMEDMEM::CommInterface interface;
+    
+  ParaMEDMEM::ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ParaMEDMEM::ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ParaMEDMEM::ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  
+  //loading the geometry for the source group
+
+  ParaMEDMEM::InterpKernelDEC dec (*source_group,*target_group);
+
+  ParaMEDMEM::MEDCouplingUMesh* mesh;
+  ParaMEDMEM::ParaMESH* paramesh;
+  ParaMEDMEM::ParaFIELD* parafield;
+  ICoCo::MEDField* icocofield ;
+  
+  string filename_xml1              = getResourceFile("square1_split");
+  string filename_xml2              = getResourceFile("square2_split");
+  //string filename_seq_wr            = makeTmpFile("");
+  //string filename_seq_med           = makeTmpFile("myWrField_seq_pointe221.med");
+  
+  // To remove tmp files from disk
+  ParaMEDMEMTest_TmpFilesRemover aRemover;
+  
+  MPI_Barrier(MPI_COMM_WORLD);
+  if (source_group->containsMyRank())
+    {
+      string master = filename_xml1;
+      
+      ostringstream strstream;
+      strstream <<master<<rank+1<<".med";
+      ostringstream meshname ;
+      meshname<< "Mesh_2_"<< rank+1;
+      
+      mesh=MEDLoader::ReadUMeshFromFile(strstream.str().c_str(),meshname.str().c_str(),0);
+      
+    
+      paramesh=new ParaMESH (mesh,*source_group,"source mesh");
+    
+      //      ParaMEDMEM::ParaSUPPORT* parasupport=new UnstructuredParaSUPPORT( support,*source_group);
+      ParaMEDMEM::ComponentTopology comptopo;
+      if(srcM=="P0")
+        {
+          parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+          parafield->getField()->setNature(ConservativeVolumic);
+        }
+      else
+        parafield = new ParaFIELD(ON_NODES,NO_TIME,paramesh, comptopo);
+      int nb_local;
+      if(srcM=="P0")
+        nb_local=mesh->getNumberOfCells();
+      else
+        nb_local=mesh->getNumberOfNodes();
+      //      double * value= new double[nb_local];
+      double *value=parafield->getField()->getArray()->getPointer();
+      for(int ielem=0; ielem<nb_local;ielem++)
+        value[ielem]=1.0;
+    
+      //      ICoCo::Field* icocofield=new ICoCo::MEDField(paramesh,parafield);
+      icocofield=new ICoCo::MEDField(parafield->getField());
+      dec.setMethod(srcMeth);
+      dec.attachLocalField(icocofield);
+    }
+  
+  //loading the geometry for the target group
+  if (target_group->containsMyRank())
+    {
+      string master= filename_xml2;
+      ostringstream strstream;
+      strstream << master<<(rank-nproc_source+1)<<".med";
+      ostringstream meshname ;
+      meshname<< "Mesh_3_"<<rank-nproc_source+1;
+      mesh = MEDLoader::ReadUMeshFromFile(strstream.str().c_str(),meshname.str().c_str(),0);
+      
+      paramesh=new ParaMESH (mesh,*target_group,"target mesh");
+      //      ParaMEDMEM::ParaSUPPORT* parasupport=new UnstructuredParaSUPPORT(support,*target_group);
+      ParaMEDMEM::ComponentTopology comptopo;
+      if(targetM=="P0")
+        {
+          parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+          parafield->getField()->setNature(ConservativeVolumic);
+        }
+      else
+        parafield = new ParaFIELD(ON_NODES,NO_TIME,paramesh, comptopo);
+      int nb_local;
+      if(targetM=="P0")
+        nb_local=mesh->getNumberOfCells();
+      else
+        nb_local=mesh->getNumberOfNodes();
+      //      double * value= new double[nb_local];
+      double *value=parafield->getField()->getArray()->getPointer();
+      for(int ielem=0; ielem<nb_local;ielem++)
+        value[ielem]=0.0;
+      //      ICoCo::Field* icocofield=new ICoCo::MEDField(paramesh,parafield);
+      icocofield=new ICoCo::MEDField(parafield->getField());
+      dec.setMethod(targetMeth);
+      dec.attachLocalField(icocofield);
+    }
+    
+  
+  //attaching a DEC to the source group 
+  double field_before_int;
+  double field_after_int;
+  
+  if (source_group->containsMyRank())
+    { 
+      field_before_int = parafield->getVolumeIntegral(0,true);
+      dec.synchronize();
+      cout<<"DEC usage"<<endl;
+      dec.setForcedRenormalization(false);
+
+      dec.sendData();
+      ParaMEDLoader::WriteParaMesh("./sourcesquareb",paramesh);
+      if (source_group->myRank()==0)
+        aRemover.Register("./sourcesquareb");
+      ostringstream filename;
+      filename<<"./sourcesquareb_"<<source_group->myRank()+1;
+      aRemover.Register(filename.str().c_str());
+      //MEDLoader::WriteField("./sourcesquareb",parafield->getField());
+   
+      dec.recvData();
+      cout <<"writing"<<endl;
+      ParaMEDLoader::WriteParaMesh("./sourcesquare",paramesh);
+      if (source_group->myRank()==0)
+        aRemover.Register("./sourcesquare");
+      //MEDLoader::WriteField("./sourcesquare",parafield->getField());
+      
+     
+      filename<<"./sourcesquare_"<<source_group->myRank()+1;
+      aRemover.Register(filename.str().c_str());
+      field_after_int = parafield->getVolumeIntegral(0,true);
+      
+      
+      //      MPI_Bcast(&field_before_int,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
+      //       MPI_Bcast(&field_after_int,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
+
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(field_before_int, field_after_int, 1e-6);
+    
+    }
+  
+  //attaching a DEC to the target group
+  if (target_group->containsMyRank())
+    {
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+
+      dec.recvData();
+      ParaMEDLoader::WriteParaMesh("./targetsquareb",paramesh);
+      //MEDLoader::WriteField("./targetsquareb",parafield->getField());
+      if (target_group->myRank()==0)
+        aRemover.Register("./targetsquareb");
+      ostringstream filename;
+      filename<<"./targetsquareb_"<<target_group->myRank()+1;
+      aRemover.Register(filename.str().c_str());
+      dec.sendData();
+      ParaMEDLoader::WriteParaMesh("./targetsquare",paramesh);
+      //MEDLoader::WriteField("./targetsquare",parafield->getField());
+      
+      if (target_group->myRank()==0)
+        aRemover.Register("./targetsquareb");
+      
+      filename<<"./targetsquareb_"<<target_group->myRank()+1;
+      aRemover.Register(filename.str().c_str());
+      //    double field_before_int, field_after_int;
+      //       MPI_Bcast(&field_before_int,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
+      //       MPI_Bcast(&field_after_int,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
+      
+      //      CPPUNIT_ASSERT_DOUBLES_EQUAL(field_before_int, field_after_int, 1e-6);
+    
+    }
+  
+  delete source_group;
+  delete target_group;
+  delete self_group;
+  delete parafield;
+  delete paramesh;
+  mesh->decrRef();
+
+  delete icocofield;
+
+  MPI_Barrier(MPI_COMM_WORLD);
+  cout << "end of InterpKernelDEC_2D test"<<endl;
+}
+
+void ParaMEDMEMTest::testInterpKernelDEC2_2D_(const char *srcMeth, const char *targetMeth)
+{
+  std::string srcM(srcMeth);
+  std::string targetM(targetMeth);
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+
+  //the test is meant to run on five processors
+  if (size !=5) return ;
+   
+  int nproc_source = 3;
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  
+  ParaMEDMEM::CommInterface interface;
+    
+  ParaMEDMEM::ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ParaMEDMEM::ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ParaMEDMEM::ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  
+  //loading the geometry for the source group
+
+  ParaMEDMEM::InterpKernelDEC dec (*source_group,*target_group);
+
+  ParaMEDMEM::MEDCouplingUMesh* mesh;
+  ParaMEDMEM::MEDCouplingFieldDouble* mcfield;
+  
+  string filename_xml1              = getResourceFile("square1_split");
+  string filename_xml2              = getResourceFile("square2_split");
+  
+  // To remove tmp files from disk
+  ParaMEDMEMTest_TmpFilesRemover aRemover;
+  
+  MPI_Barrier(MPI_COMM_WORLD);
+  if (source_group->containsMyRank())
+    {
+      string master = filename_xml1;
+      
+      ostringstream strstream;
+      strstream <<master<<rank+1<<".med";
+      ostringstream meshname ;
+      meshname<< "Mesh_2_"<< rank+1;
+      
+      mesh=MEDLoader::ReadUMeshFromFile(strstream.str().c_str(),meshname.str().c_str(),0);
+      ParaMEDMEM::ComponentTopology comptopo;
+      if(srcM=="P0")
+        {
+          mcfield = MEDCouplingFieldDouble::New(ON_CELLS,NO_TIME);
+          mcfield->setMesh(mesh);
+          DataArrayDouble *array=DataArrayDouble::New();
+          array->alloc(mcfield->getNumberOfTuples(),1);
+          mcfield->setArray(array);
+          array->decrRef();
+          mcfield->setNature(ConservativeVolumic);
+        }
+      else
+        {
+          mcfield = MEDCouplingFieldDouble::New(ON_CELLS,NO_TIME);
+          mcfield->setMesh(mesh);
+          DataArrayDouble *array=DataArrayDouble::New();
+          array->alloc(mcfield->getNumberOfTuples(),1);
+          mcfield->setArray(array);
+          array->decrRef();
+        }
+      int nb_local;
+      if(srcM=="P0")
+        nb_local=mesh->getNumberOfCells();
+      else
+        nb_local=mesh->getNumberOfNodes();
+      double *value=mcfield->getArray()->getPointer();
+      for(int ielem=0; ielem<nb_local;ielem++)
+        value[ielem]=1.0;
+      dec.setMethod(srcMeth);
+      dec.attachLocalField(mcfield);
+      dec.attachLocalField(mcfield);
+    }
+  
+  //loading the geometry for the target group
+  if (target_group->containsMyRank())
+    {
+      string master= filename_xml2;
+      ostringstream strstream;
+      strstream << master<<(rank-nproc_source+1)<<".med";
+      ostringstream meshname ;
+      meshname<< "Mesh_3_"<<rank-nproc_source+1;
+      mesh = MEDLoader::ReadUMeshFromFile(strstream.str().c_str(),meshname.str().c_str(),0);
+      ParaMEDMEM::ComponentTopology comptopo;
+      if(targetM=="P0")
+        {
+          mcfield = MEDCouplingFieldDouble::New(ON_CELLS,NO_TIME);
+          mcfield->setMesh(mesh);
+          DataArrayDouble *array=DataArrayDouble::New();
+          array->alloc(mcfield->getNumberOfTuples(),1);
+          mcfield->setArray(array);
+          array->decrRef();
+          mcfield->setNature(ConservativeVolumic);
+        }
+      else
+        {
+          mcfield = MEDCouplingFieldDouble::New(ON_NODES,NO_TIME);
+          mcfield->setMesh(mesh);
+          DataArrayDouble *array=DataArrayDouble::New();
+          array->alloc(mcfield->getNumberOfTuples(),1);
+          mcfield->setArray(array);
+          array->decrRef();
+        }
+      int nb_local;
+      if(targetM=="P0")
+        nb_local=mesh->getNumberOfCells();
+      else
+        nb_local=mesh->getNumberOfNodes();
+      double *value=mcfield->getArray()->getPointer();
+      for(int ielem=0; ielem<nb_local;ielem++)
+        value[ielem]=0.0;
+      dec.setMethod(targetMeth);
+      dec.attachLocalField(mcfield);
+      dec.attachLocalField(mcfield);
+    }
+    
+  
+  //attaching a DEC to the source group 
+
+  if (source_group->containsMyRank())
+    { 
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.sendData();
+      dec.recvData();
+    }
+  
+  //attaching a DEC to the target group
+  if (target_group->containsMyRank())
+    {
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.recvData();
+      dec.sendData();
+    }
+  delete source_group;
+  delete target_group;
+  delete self_group;
+  mcfield->decrRef();
+  mesh->decrRef();
+
+  MPI_Barrier(MPI_COMM_WORLD);
+  cout << "end of InterpKernelDEC2_2D test"<<endl;
+}
+
+void ParaMEDMEMTest::testInterpKernelDEC_3D_(const char *srcMeth, const char *targetMeth)
+{
+  std::string srcM(srcMeth);
+  std::string targetM(targetMeth);
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+
+  //the test is meant to run on five processors
+  if (size !=3) return ;
+   
+  int nproc_source = 2;
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  
+  ParaMEDMEM::CommInterface interface;
+    
+  ParaMEDMEM::ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ParaMEDMEM::ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ParaMEDMEM::ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  
+  //loading the geometry for the source group
+
+  ParaMEDMEM::InterpKernelDEC dec (*source_group,*target_group);
+
+  ParaMEDMEM::MEDCouplingUMesh* mesh;
+  ParaMEDMEM::ParaMESH* paramesh;
+  ParaMEDMEM::ParaFIELD* parafield;
+  ICoCo::MEDField* icocofield ;
+  
+  char * tmp_dir_c                    = getenv("TMP");
+  string tmp_dir;
+  if (tmp_dir_c != NULL)
+    tmp_dir = string(tmp_dir_c);
+  else
+    tmp_dir = "/tmp";
+  string filename_xml1              = getResourceFile("Mesh3D_10_2d");
+  string filename_xml2              = getResourceFile("Mesh3D_11");
+  //string filename_seq_wr            = makeTmpFile("");
+  //string filename_seq_med           = makeTmpFile("myWrField_seq_pointe221.med");
+  
+  // To remove tmp files from disk
+  ParaMEDMEMTest_TmpFilesRemover aRemover;
+  
+  MPI_Barrier(MPI_COMM_WORLD);
+  if (source_group->containsMyRank())
+    {
+      string master = filename_xml1;
+      
+      ostringstream strstream;
+      strstream <<master<<rank+1<<".med";
+      ostringstream meshname ;
+      meshname<< "Mesh_3_"<< rank+1;
+      
+      mesh=MEDLoader::ReadUMeshFromFile(strstream.str().c_str(),meshname.str().c_str(),0);
+      
+    
+      paramesh=new ParaMESH (mesh,*source_group,"source mesh");
+    
+      //      ParaMEDMEM::ParaSUPPORT* parasupport=new UnstructuredParaSUPPORT( support,*source_group);
+      ParaMEDMEM::ComponentTopology comptopo;
+      if(srcM=="P0")
+        {
+          parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+          parafield->getField()->setNature(ConservativeVolumic);
+        }
+      else
+        parafield = new ParaFIELD(ON_NODES,NO_TIME,paramesh, comptopo);
+      int nb_local;
+      if(srcM=="P0")
+        nb_local=mesh->getNumberOfCells();
+      else
+        nb_local=mesh->getNumberOfNodes();
+      //      double * value= new double[nb_local];
+      double *value=parafield->getField()->getArray()->getPointer();
+      for(int ielem=0; ielem<nb_local;ielem++)
+        value[ielem]=1.0;
+    
+      //      ICoCo::Field* icocofield=new ICoCo::MEDField(paramesh,parafield);
+      icocofield=new ICoCo::MEDField(parafield->getField());
+      dec.setMethod(srcMeth);
+      dec.attachLocalField(icocofield);
+    }
+  
+  //loading the geometry for the target group
+  if (target_group->containsMyRank())
+    {
+      string master= filename_xml2;
+      ostringstream strstream;
+      strstream << master << ".med";
+      ostringstream meshname ;
+      meshname<< "Mesh_6";
+      mesh = MEDLoader::ReadUMeshFromFile(strstream.str().c_str(),meshname.str().c_str(),0);
+      
+      paramesh=new ParaMESH (mesh,*target_group,"target mesh");
+      //      ParaMEDMEM::ParaSUPPORT* parasupport=new UnstructuredParaSUPPORT(support,*target_group);
+      ParaMEDMEM::ComponentTopology comptopo;
+      if(targetM=="P0")
+        {
+          parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+          parafield->getField()->setNature(ConservativeVolumic);
+        }
+      else
+        parafield = new ParaFIELD(ON_NODES,NO_TIME,paramesh, comptopo);
+      int nb_local;
+      if(targetM=="P0")
+        nb_local=mesh->getNumberOfCells();
+      else
+        nb_local=mesh->getNumberOfNodes();
+      //      double * value= new double[nb_local];
+      double *value=parafield->getField()->getArray()->getPointer();
+      for(int ielem=0; ielem<nb_local;ielem++)
+        value[ielem]=0.0;
+      //      ICoCo::Field* icocofield=new ICoCo::MEDField(paramesh,parafield);
+      icocofield=new ICoCo::MEDField(parafield->getField());
+      dec.setMethod(targetMeth);
+      dec.attachLocalField(icocofield);
+    }  
+  //attaching a DEC to the source group 
+  double field_before_int;
+  double field_after_int;
+  
+  if (source_group->containsMyRank())
+    { 
+      field_before_int = parafield->getVolumeIntegral(0,true);
+      dec.synchronize();
+      cout<<"DEC usage"<<endl;
+      dec.setForcedRenormalization(false);
+
+      dec.sendData();
+      ParaMEDLoader::WriteParaMesh("./sourcesquareb",paramesh);
+      if (source_group->myRank()==0)
+        aRemover.Register("./sourcesquareb");
+      ostringstream filename;
+      filename<<"./sourcesquareb_"<<source_group->myRank()+1;
+      aRemover.Register(filename.str().c_str());
+      //MEDLoader::WriteField("./sourcesquareb",parafield->getField());
+   
+      dec.recvData();
+      cout <<"writing"<<endl;
+      ParaMEDLoader::WriteParaMesh("./sourcesquare",paramesh);
+      if (source_group->myRank()==0)
+        aRemover.Register("./sourcesquare");
+      //MEDLoader::WriteField("./sourcesquare",parafield->getField());
+      
+     
+      filename<<"./sourcesquare_"<<source_group->myRank()+1;
+      aRemover.Register(filename.str().c_str());
+      field_after_int = parafield->getVolumeIntegral(0,true);
+
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(field_before_int, field_after_int, 1e-6);
+    
+    }
+  
+  //attaching a DEC to the target group
+  if (target_group->containsMyRank())
+    {
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+
+      dec.recvData();
+      ParaMEDLoader::WriteParaMesh("./targetsquareb",paramesh);
+      //MEDLoader::WriteField("./targetsquareb",parafield->getField());
+      if (target_group->myRank()==0)
+        aRemover.Register("./targetsquareb");
+      ostringstream filename;
+      filename<<"./targetsquareb_"<<target_group->myRank()+1;
+      aRemover.Register(filename.str().c_str());
+      dec.sendData();
+      ParaMEDLoader::WriteParaMesh("./targetsquare",paramesh);
+      //MEDLoader::WriteField("./targetsquare",parafield->getField());
+      
+      if (target_group->myRank()==0)
+        aRemover.Register("./targetsquareb");
+      
+      filename<<"./targetsquareb_"<<target_group->myRank()+1;
+      aRemover.Register(filename.str().c_str());
+    }
+  delete source_group;
+  delete target_group;
+  delete self_group;
+  delete parafield;
+  delete paramesh;
+  mesh->decrRef();
+
+  delete icocofield;
+
+  MPI_Barrier(MPI_COMM_WORLD);
+  cout << "end of InterpKernelDEC_3D test"<<endl;
+}
+
+//Synchronous tests without interpolation with native mode (AllToAll(v) from lam/MPI:
+void ParaMEDMEMTest::testSynchronousEqualInterpKernelWithoutInterpNativeDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.1,1,0.1,1,false,false,false,"P0","P0");
+}
+
+//Synchronous tests without interpolation :
+void ParaMEDMEMTest::testSynchronousEqualInterpKernelWithoutInterpDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.1,1,0.1,1,true,false,false,"P0","P0");
+}
+
+//Synchronous tests with interpolation :
+void ParaMEDMEMTest::testSynchronousEqualInterpKernelDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.1,1,0.1,1,true,false,true,"P0","P0");
+}
+void ParaMEDMEMTest::testSynchronousFasterSourceInterpKernelDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.09,1,0.1,1,true,false,true,"P0","P0");
+}
+void ParaMEDMEMTest::testSynchronousSlowerSourceInterpKernelDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.11,1,0.1,1,true,false,true,"P0","P0");
+}
+void ParaMEDMEMTest::testSynchronousSlowSourceInterpKernelDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.11,1,0.01,1,true,false,true,"P0","P0");
+}
+void ParaMEDMEMTest::testSynchronousFastSourceInterpKernelDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.01,1,0.11,1,true,false,true,"P0","P0");
+}
+
+//Asynchronous tests with interpolation :
+void ParaMEDMEMTest::testAsynchronousEqualInterpKernelDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.1,1,0.1,1,true,true,true,"P0","P0");
+}
+void ParaMEDMEMTest::testAsynchronousFasterSourceInterpKernelDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.09,1,0.1,1,true,true,true,"P0","P0");
+}
+void ParaMEDMEMTest::testAsynchronousSlowerSourceInterpKernelDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.11,1,0.1,1,true,true,true,"P0","P0");
+}
+void ParaMEDMEMTest::testAsynchronousSlowSourceInterpKernelDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.11,1,0.01,1,true,true,true,"P0","P0");
+}
+void ParaMEDMEMTest::testAsynchronousFastSourceInterpKernelDEC_2D()
+{
+  testAsynchronousInterpKernelDEC_2D(0.01,1,0.11,1,true,true,true,"P0","P0");
+}
+
+void ParaMEDMEMTest::testInterpKernelDECNonOverlapp_2D_P0P0()
+{
+  //
+  const double sourceCoordsAll[2][8]={{0.4,0.5,0.4,1.5,1.6,1.5,1.6,0.5},
+                                      {0.3,-0.5,1.6,-0.5,1.6,-1.5,0.3,-1.5}};
+  const double targetCoordsAll[3][16]={{0.7,1.45,0.7,1.65,0.9,1.65,0.9,1.45,  1.1,1.4,1.1,1.6,1.3,1.6,1.3,1.4},
+                                       {0.7,-0.6,0.7,0.7,0.9,0.7,0.9,-0.6,  1.1,-0.7,1.1,0.6,1.3,0.6,1.3,-0.7},
+                                       {0.7,-1.55,0.7,-1.35,0.9,-1.35,0.9,-1.55,  1.1,-1.65,1.1,-1.45,1.3,-1.45,1.3,-1.65}};
+  int conn4All[8]={0,1,2,3,4,5,6,7};
+  double targetResults[3][2]={{34.,34.},{38.333333333333336,42.666666666666664},{47.,47.}};
+  double targetResults2[3][2]={{0.28333333333333344,0.56666666666666687},{1.8564102564102569,2.0128205128205132},{1.0846153846153845,0.36153846153846159}};
+  double targetResults3[3][2]={{3.7777777777777781,7.5555555555555562},{24.511111111111113,26.355555555555558},{14.1,4.7}};
+  double targetResults4[3][2]={{8.5,17},{8.8461538461538431, 9.8461538461538449},{35.25,11.75}};
+  //
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  //
+  if(size!=5)
+    return ;
+  int nproc_source = 2;
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  //
+  ParaMEDMEM::MEDCouplingUMesh *mesh=0;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::ParaFIELD* parafield=0;
+  //
+  ParaMEDMEM::CommInterface interface;
+  //
+  ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+  if(source_group->containsMyRank())
+    {
+      std::ostringstream stream; stream << "sourcemesh2D proc " << rank;
+      mesh=MEDCouplingUMesh::New(stream.str().c_str(),2);
+      mesh->allocateCells(2);
+      mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn4All);
+      mesh->finishInsertingCells();
+      DataArrayDouble *myCoords=DataArrayDouble::New();
+      myCoords->alloc(4,2);
+      const double *sourceCoords=sourceCoordsAll[rank];
+      std::copy(sourceCoords,sourceCoords+8,myCoords->getPointer());
+      mesh->setCoords(myCoords);
+      myCoords->decrRef();
+      paramesh=new ParaMESH(mesh,*source_group,"source mesh");
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      double *value=parafield->getField()->getArray()->getPointer();
+      value[0]=34+13*((double)rank);
+    }
+  else
+    {
+      std::ostringstream stream; stream << "targetmesh2D proc " << rank-nproc_source;
+      mesh=MEDCouplingUMesh::New(stream.str().c_str(),2);
+      mesh->allocateCells(2);
+      mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn4All);
+      mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn4All+4);
+      mesh->finishInsertingCells();
+      DataArrayDouble *myCoords=DataArrayDouble::New();
+      myCoords->alloc(8,2);
+      const double *targetCoords=targetCoordsAll[rank-nproc_source];
+      std::copy(targetCoords,targetCoords+16,myCoords->getPointer());
+      mesh->setCoords(myCoords);
+      myCoords->decrRef();
+      paramesh=new ParaMESH (mesh,*target_group,"target mesh");
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+    }
+  //test 1 - Conservative volumic
+  ParaMEDMEM::InterpKernelDEC dec(*source_group,*target_group);
+  parafield->getField()->setNature(ConservativeVolumic);
+  if (source_group->containsMyRank())
+    { 
+      dec.setMethod("P0");
+      dec.attachLocalField(parafield);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.sendData();
+    }
+  else
+    {
+      dec.setMethod("P0");
+      dec.attachLocalField(parafield);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      const double *expected=targetResults[rank-nproc_source];
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[0],res[0],1e-13);
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[1],res[1],1e-13);
+    }
+  //test 2 - Integral
+  ParaMEDMEM::InterpKernelDEC dec2(*source_group,*target_group);
+  parafield->getField()->setNature(Integral);
+  if (source_group->containsMyRank())
+    { 
+      dec2.setMethod("P0");
+      dec2.attachLocalField(parafield);
+      dec2.synchronize();
+      dec2.setForcedRenormalization(false);
+      dec2.sendData();
+    }
+  else
+    {
+      dec2.setMethod("P0");
+      dec2.attachLocalField(parafield);
+      dec2.synchronize();
+      dec2.setForcedRenormalization(false);
+      dec2.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      const double *expected=targetResults2[rank-nproc_source];
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[0],res[0],1e-13);
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[1],res[1],1e-13);
+    }
+  //test 3 - Integral with global constraint
+  ParaMEDMEM::InterpKernelDEC dec3(*source_group,*target_group);
+  parafield->getField()->setNature(IntegralGlobConstraint);
+  if (source_group->containsMyRank())
+    { 
+      dec3.setMethod("P0");
+      dec3.attachLocalField(parafield);
+      dec3.synchronize();
+      dec3.setForcedRenormalization(false);
+      dec3.sendData();
+    }
+  else
+    {
+      dec3.setMethod("P0");
+      dec3.attachLocalField(parafield);
+      dec3.synchronize();
+      dec3.setForcedRenormalization(false);
+      dec3.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      const double *expected=targetResults3[rank-nproc_source];
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[0],res[0],1e-13);
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[1],res[1],1e-13);
+    }
+  //test 4 - RevIntegral
+  ParaMEDMEM::InterpKernelDEC dec4(*source_group,*target_group);
+  parafield->getField()->setNature(RevIntegral);
+  if (source_group->containsMyRank())
+    { 
+      dec4.setMethod("P0");
+      dec4.attachLocalField(parafield);
+      dec4.synchronize();
+      dec4.setForcedRenormalization(false);
+      dec4.sendData();
+    }
+  else
+    {
+      dec4.setMethod("P0");
+      dec4.attachLocalField(parafield);
+      dec4.synchronize();
+      dec4.setForcedRenormalization(false);
+      dec4.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      const double *expected=targetResults4[rank-nproc_source];
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[0],res[0],1e-13);
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[1],res[1],1e-13);
+    }
+  //test 5 - Conservative volumic reversed
+  ParaMEDMEM::InterpKernelDEC dec5(*source_group,*target_group);
+  parafield->getField()->setNature(ConservativeVolumic);
+  if (source_group->containsMyRank())
+    { 
+      dec5.setMethod("P0");
+      dec5.attachLocalField(parafield);
+      dec5.synchronize();
+      dec5.setForcedRenormalization(false);
+      dec5.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      CPPUNIT_ASSERT_EQUAL(1,parafield->getField()->getNumberOfTuples());
+      const double expected[]={37.8518518518519,43.5333333333333};
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[rank],res[0],1e-13);
+    }
+  else
+    {
+      dec5.setMethod("P0");
+      dec5.attachLocalField(parafield);
+      dec5.synchronize();
+      dec5.setForcedRenormalization(false);
+      double *res=parafield->getField()->getArray()->getPointer();
+      const double *toSet=targetResults[rank-nproc_source];
+      res[0]=toSet[0];
+      res[1]=toSet[1];
+      dec5.sendData();
+    }
+  //test 6 - Integral reversed
+  ParaMEDMEM::InterpKernelDEC dec6(*source_group,*target_group);
+  parafield->getField()->setNature(Integral);
+  if (source_group->containsMyRank())
+    { 
+      dec6.setMethod("P0");
+      dec6.attachLocalField(parafield);
+      dec6.synchronize();
+      dec6.setForcedRenormalization(false);
+      dec6.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      CPPUNIT_ASSERT_EQUAL(1,parafield->getField()->getNumberOfTuples());
+      const double expected[]={0.794600591715977,1.35631163708087};
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[rank],res[0],1e-13);
+    }
+  else
+    {
+      dec6.setMethod("P0");
+      dec6.attachLocalField(parafield);
+      dec6.synchronize();
+      dec6.setForcedRenormalization(false);
+      double *res=parafield->getField()->getArray()->getPointer();
+      const double *toSet=targetResults2[rank-nproc_source];
+      res[0]=toSet[0];
+      res[1]=toSet[1];
+      dec6.sendData();
+    }
+  //test 7 - Integral with global constraint reversed
+  ParaMEDMEM::InterpKernelDEC dec7(*source_group,*target_group);
+  parafield->getField()->setNature(IntegralGlobConstraint);
+  if (source_group->containsMyRank())
+    { 
+      dec7.setMethod("P0");
+      dec7.attachLocalField(parafield);
+      dec7.synchronize();
+      dec7.setForcedRenormalization(false);
+      dec7.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      CPPUNIT_ASSERT_EQUAL(1,parafield->getField()->getNumberOfTuples());
+      const double expected[]={36.4592592592593,44.5407407407407};
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[rank],res[0],1e-13);
+    }
+  else
+    {
+      dec7.setMethod("P0");
+      dec7.attachLocalField(parafield);
+      dec7.synchronize();
+      dec7.setForcedRenormalization(false);
+      double *res=parafield->getField()->getArray()->getPointer();
+      const double *toSet=targetResults3[rank-nproc_source];
+      res[0]=toSet[0];
+      res[1]=toSet[1];
+      dec7.sendData();
+    }
+  //test 8 - Integral with RevIntegral reversed
+  ParaMEDMEM::InterpKernelDEC dec8(*source_group,*target_group);
+  parafield->getField()->setNature(RevIntegral);
+  if (source_group->containsMyRank())
+    { 
+      dec8.setMethod("P0");
+      dec8.attachLocalField(parafield);
+      dec8.synchronize();
+      dec8.setForcedRenormalization(false);
+      dec8.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      CPPUNIT_ASSERT_EQUAL(1,parafield->getField()->getNumberOfTuples());
+      const double expected[]={0.81314102564102553,1.3428994082840233};
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(expected[rank],res[0],1e-13);
+    }
+  else
+    {
+      dec8.setMethod("P0");
+      dec8.attachLocalField(parafield);
+      dec8.synchronize();
+      dec8.setForcedRenormalization(false);
+      double *res=parafield->getField()->getArray()->getPointer();
+      const double *toSet=targetResults4[rank-nproc_source];
+      res[0]=toSet[0];
+      res[1]=toSet[1];
+      dec8.sendData();
+    }
+  //
+  delete parafield;
+  mesh->decrRef();
+  delete paramesh;
+  delete self_group;
+  delete target_group;
+  delete source_group;
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+}
+
+void ParaMEDMEMTest::testInterpKernelDECNonOverlapp_2D_P0P1P1P0()
+{
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  //
+  if(size!=5)
+    return ;
+  int nproc_source = 2;
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  //
+  ParaMEDMEM::MEDCouplingUMesh *mesh=0;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::ParaFIELD *parafieldP0=0,*parafieldP1=0;
+  //
+  ParaMEDMEM::CommInterface interface;
+  //
+  ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+  if(source_group->containsMyRank())
+    {
+      if(rank==0)
+        {
+          double coords[6]={-0.3,-0.3, 0.7,0.7, 0.7,-0.3};
+          int conn[3]={0,1,2};
+          //int globalNode[3]={1,2,0};
+          mesh=MEDCouplingUMesh::New("Source mesh Proc0",2);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(3,2);
+          std::copy(coords,coords+6,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+        }
+      if(rank==1)
+        {
+          double coords[6]={-0.3,-0.3, -0.3,0.7, 0.7,0.7};
+          int conn[3]={0,1,2};
+          //int globalNode[3]={1,3,2};
+          mesh=MEDCouplingUMesh::New("Source mesh Proc1",2);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(3,2);
+          std::copy(coords,coords+6,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+        }
+      paramesh=new ParaMESH(mesh,*source_group,"source mesh");
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafieldP0 = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      parafieldP1 = new ParaFIELD(ON_NODES,NO_TIME,paramesh, comptopo);
+      double *valueP0=parafieldP0->getField()->getArray()->getPointer();
+      double *valueP1=parafieldP1->getField()->getArray()->getPointer();
+      parafieldP0->getField()->setNature(ConservativeVolumic);
+      parafieldP1->getField()->setNature(ConservativeVolumic);
+      if(rank==0)
+        {
+          valueP0[0]=31.;
+          valueP1[0]=34.; valueP1[1]=77.; valueP1[2]=53.;
+        }
+      if(rank==1)
+        {
+          valueP0[0]=47.;
+          valueP1[0]=34.; valueP1[1]=57.; valueP1[2]=77.;
+        }
+    }
+  else
+    {
+      const char targetMeshName[]="target mesh";
+      if(rank==2)
+        {
+          double coords[10]={-0.3,-0.3, 0.2,-0.3, 0.7,-0.3, -0.3,0.2, 0.2,0.2 };
+          int conn[7]={0,3,4,1, 1,4,2};
+          //int globalNode[5]={4,3,0,2,1};
+          mesh=MEDCouplingUMesh::New("Target mesh Proc2",2);
+          mesh->allocateCells(2);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,conn+4);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(5,2);
+          std::copy(coords,coords+10,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          paramesh=new ParaMESH(mesh,*target_group,targetMeshName);
+          DataArrayInt *da=DataArrayInt::New();
+          const int globalNumberingP2[5]={0,1,2,3,4};
+          da->useArray(globalNumberingP2,false,CPP_DEALLOC,5,1);
+          paramesh->setNodeGlobal(da);
+          da->decrRef();
+        }
+      if(rank==3)
+        {
+          double coords[6]={0.2,0.2, 0.7,-0.3, 0.7,0.2};
+          int conn[3]={0,2,1};
+          //int globalNode[3]={1,0,5};
+          mesh=MEDCouplingUMesh::New("Target mesh Proc3",2);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(3,2);
+          std::copy(coords,coords+6,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          paramesh=new ParaMESH(mesh,*target_group,targetMeshName);
+          DataArrayInt *da=DataArrayInt::New();
+          const int globalNumberingP3[3]={4,2,5};
+          da->useArray(globalNumberingP3,false,CPP_DEALLOC,3,1);
+          paramesh->setNodeGlobal(da);
+          da->decrRef();
+        }
+      if(rank==4)
+        {
+          double coords[12]={-0.3,0.2, -0.3,0.7, 0.2,0.7, 0.2,0.2, 0.7,0.7, 0.7,0.2};
+          int conn[8]={0,1,2,3, 3,2,4,5};
+          //int globalNode[6]={2,6,7,1,8,5};
+          mesh=MEDCouplingUMesh::New("Target mesh Proc4",2);
+          mesh->allocateCells(2);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn+4);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(6,2);
+          std::copy(coords,coords+12,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          paramesh=new ParaMESH(mesh,*target_group,targetMeshName);
+          DataArrayInt *da=DataArrayInt::New();
+          const int globalNumberingP4[6]={3,6,7,4,8,5};
+          da->useArray(globalNumberingP4,false,CPP_DEALLOC,6,1);
+          paramesh->setNodeGlobal(da);
+          da->decrRef();
+        }
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafieldP0 = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      parafieldP1 = new ParaFIELD(ON_NODES,NO_TIME,paramesh, comptopo);
+      parafieldP0->getField()->setNature(ConservativeVolumic);
+      parafieldP1->getField()->setNature(ConservativeVolumic);
+    }
+  // test 1 - P0 P1
+  ParaMEDMEM::InterpKernelDEC dec(*source_group,*target_group);
+  if (source_group->containsMyRank())
+    { 
+      dec.setMethod("P0");
+      dec.attachLocalField(parafieldP0);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.sendData();
+      dec.recvData();
+      const double *valueP0=parafieldP0->getField()->getArray()->getPointer();
+      if(rank==0)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(34.42857143,valueP0[0],1e-7);
+        }
+      if(rank==1)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(44.,valueP0[0],1e-7);
+        }
+    }
+  else
+    {
+      dec.setMethod("P1");
+      dec.attachLocalField(parafieldP1);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.recvData();
+      const double *res=parafieldP1->getField()->getArray()->getConstPointer();
+      if(rank==2)
+        {
+          const double expectP2[5]={39.0, 31.0, 31.0, 47.0, 39.0};
+          CPPUNIT_ASSERT_EQUAL(5,parafieldP1->getField()->getNumberOfTuples());
+          CPPUNIT_ASSERT_EQUAL(1,parafieldP1->getField()->getNumberOfComponents());
+          for(int kk=0;kk<5;kk++)
+            CPPUNIT_ASSERT_DOUBLES_EQUAL(expectP2[kk],res[kk],1e-12);
+        }
+      if(rank==3)
+        {
+          const double expectP3[3]={39.0, 31.0, 31.0};
+          CPPUNIT_ASSERT_EQUAL(3,parafieldP1->getField()->getNumberOfTuples());
+          CPPUNIT_ASSERT_EQUAL(1,parafieldP1->getField()->getNumberOfComponents());
+          for(int kk=0;kk<3;kk++)
+            CPPUNIT_ASSERT_DOUBLES_EQUAL(expectP3[kk],res[kk],1e-12);
+        }
+      if(rank==4)
+        {
+          const double expectP4[6]={47.0, 47.0, 47.0, 39.0, 39.0, 31.0};
+          CPPUNIT_ASSERT_EQUAL(6,parafieldP1->getField()->getNumberOfTuples());
+          CPPUNIT_ASSERT_EQUAL(1,parafieldP1->getField()->getNumberOfComponents());
+          for(int kk=0;kk<6;kk++)
+            CPPUNIT_ASSERT_DOUBLES_EQUAL(expectP4[kk],res[kk],1e-12);
+        }
+      dec.sendData();
+    }
+  //
+  delete parafieldP0;
+  delete parafieldP1;
+  mesh->decrRef();
+  delete paramesh;
+  delete self_group;
+  delete target_group;
+  delete source_group;
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+}
+
+void ParaMEDMEMTest::testInterpKernelDEC2DM1D_P0P0()
+{
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  //
+  if(size!=3)
+    return ;
+  int nproc_source=2;
+  set<int> procs_source;
+  set<int> procs_target;
+  //
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source;i<size; i++)
+    procs_target.insert(i);
+  //
+  ParaMEDMEM::MEDCouplingUMesh *mesh=0;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::ParaFIELD *parafield=0;
+  //
+  ParaMEDMEM::CommInterface interface;
+  //
+  ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+  if(source_group->containsMyRank())
+    {
+      double targetCoords[18]={-0.3,-0.3, 0.2,-0.3, 0.7,-0.3, -0.3,0.2, 0.2,0.2, 0.7,0.2, -0.3,0.7, 0.2,0.7, 0.7,0.7 };
+      mesh=MEDCouplingUMesh::New();
+      mesh->setMeshDimension(2);
+      DataArrayDouble *myCoords=DataArrayDouble::New();
+      myCoords->alloc(9,2);
+      std::copy(targetCoords,targetCoords+18,myCoords->getPointer());
+      mesh->setCoords(myCoords);
+      myCoords->decrRef();
+      if(rank==0)
+        {
+          int targetConn[7]={0,3,4,1, 1,4,2};
+          mesh->allocateCells(2);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,targetConn);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,targetConn+4);
+          mesh->finishInsertingCells();
+        }
+      else
+        { 
+          int targetConn[11]={4,5,2, 6,7,4,3, 7,8,5,4};
+          mesh->allocateCells(3);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,targetConn);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,targetConn+3);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,targetConn+7);
+          mesh->finishInsertingCells();
+        }
+      ParaMEDMEM::ComponentTopology comptopo;
+      paramesh=new ParaMESH(mesh,*source_group,"source mesh");
+      parafield=new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      parafield->getField()->setNature(ConservativeVolumic);
+      double *vals=parafield->getField()->getArray()->getPointer();
+      if(rank==0)
+        { vals[0]=7.; vals[1]=8.; }
+      else
+        { vals[0]=9.; vals[1]=10.; vals[2]=11.; }
+    }
+  else
+    {
+      mesh=MEDCouplingUMesh::New("an example of -1 D mesh",-1);
+      ParaMEDMEM::ComponentTopology comptopo;
+      paramesh=new ParaMESH(mesh,*target_group,"target mesh");
+      parafield=new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      parafield->getField()->setNature(ConservativeVolumic);
+    }
+  ParaMEDMEM::InterpKernelDEC dec(*source_group,*target_group);
+  if(source_group->containsMyRank())
+    {
+      dec.setMethod("P0");
+      dec.attachLocalField(parafield);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.sendData();
+      dec.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      if(rank==0)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[0],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[1],1e-12);
+        }
+      else
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[0],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[1],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[2],1e-12);
+        }
+    }
+  else
+    {
+      dec.setMethod("P0");
+      dec.attachLocalField(parafield);
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      dec.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[0],1e-12);
+      dec.sendData();
+    }
+  ParaMEDMEM::InterpKernelDEC dec2(*source_group,*target_group);
+  dec2.setMethod("P0");
+  parafield->getField()->setNature(IntegralGlobConstraint);
+  if(source_group->containsMyRank())
+    {
+      double *vals=parafield->getField()->getArray()->getPointer();
+      if(rank==0)
+        { vals[0]=7.; vals[1]=8.; }
+      else
+        { vals[0]=9.; vals[1]=10.; vals[2]=11.; }
+      dec2.attachLocalField(parafield);
+      dec2.synchronize();
+      dec2.sendData();
+      dec2.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      if(rank==0)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(11.25,res[0],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(5.625,res[1],1e-12);
+        }
+      else
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(5.625,res[0],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(11.25,res[1],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(11.25,res[2],1e-12);
+        }
+    }
+  else
+    {
+      dec2.attachLocalField(parafield);
+      dec2.synchronize();
+      dec2.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(45.,res[0],1e-12);
+      dec2.sendData();
+    }
+  //
+  ParaMEDMEM::InterpKernelDEC dec3(*source_group,*target_group);
+  dec3.setMethod("P0");
+  parafield->getField()->setNature(Integral);
+  if(source_group->containsMyRank())
+    {
+      double *vals=parafield->getField()->getArray()->getPointer();
+      if(rank==0)
+        { vals[0]=7.; vals[1]=8.; }
+      else
+        { vals[0]=9.; vals[1]=10.; vals[2]=11.; }
+      dec3.attachLocalField(parafield);
+      dec3.synchronize();
+      dec3.sendData();
+      dec3.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      if(rank==0)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(11.25,res[0],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(5.625,res[1],1e-12);
+        }
+      else
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(5.625,res[0],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(11.25,res[1],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(11.25,res[2],1e-12);
+        }
+    }
+  else
+    {
+      dec3.attachLocalField(parafield);
+      dec3.synchronize();
+      dec3.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(45.,res[0],1e-12);
+      dec3.sendData();
+    }
+  //
+  ParaMEDMEM::InterpKernelDEC dec4(*source_group,*target_group);
+  dec4.setMethod("P0");
+  parafield->getField()->setNature(RevIntegral);
+  if(source_group->containsMyRank())
+    {
+      double *vals=parafield->getField()->getArray()->getPointer();
+      if(rank==0)
+        { vals[0]=7.; vals[1]=8.; }
+      else
+        { vals[0]=9.; vals[1]=10.; vals[2]=11.; }
+      dec4.attachLocalField(parafield);
+      dec4.synchronize();
+      dec4.sendData();
+      dec4.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+       if(rank==0)
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[0],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[1],1e-12);
+        }
+      else
+        {
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[0],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[1],1e-12);
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[2],1e-12);
+        }
+    }
+  else
+    {
+      dec4.attachLocalField(parafield);
+      dec4.synchronize();
+      dec4.recvData();
+      const double *res=parafield->getField()->getArray()->getConstPointer();
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(9.125,res[0],1e-12);
+      dec4.sendData();
+    }
+  delete parafield;
+  delete paramesh;
+  mesh->decrRef();
+  delete target_group;
+  delete source_group;
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+}
+
+void ParaMEDMEMTest::testInterpKernelDECPartialProcs()
+{
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  //
+  if(size!=3)
+    return ;
+  set<int> procs_source;
+  set<int> procs_target;
+  //
+  procs_source.insert(0);
+  procs_target.insert(1);
+  //
+  ParaMEDMEM::MEDCouplingUMesh *mesh=0;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::ParaFIELD *parafield=0;
+  //
+  ParaMEDMEM::CommInterface interface;
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+  double targetCoords[8]={ 0.,0., 1., 0., 0., 1., 1., 1. };
+  CommInterface comm;
+  int grpIds[2]={0,1};
+  MPI_Group grp,group_world;
+  comm.commGroup(MPI_COMM_WORLD,&group_world);
+  comm.groupIncl(group_world,2,grpIds,&grp);
+  MPI_Comm partialComm;
+  comm.commCreate(MPI_COMM_WORLD,grp,&partialComm);
+  //
+  ProcessorGroup* target_group=0;
+  ProcessorGroup* source_group=0;
+  //
+  ParaMEDMEM::InterpKernelDEC *dec=0;
+  if(rank==0 || rank==1)
+    {
+      target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target,partialComm);
+      source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source,partialComm);
+      if(source_group->containsMyRank())
+        {    
+          mesh=MEDCouplingUMesh::New();
+          mesh->setMeshDimension(2);
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(4,2);
+          std::copy(targetCoords,targetCoords+8,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          int targetConn[4]={0,2,3,1};
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,targetConn);
+          mesh->finishInsertingCells();
+          ParaMEDMEM::ComponentTopology comptopo;
+          paramesh=new ParaMESH(mesh,*source_group,"source mesh");
+          parafield=new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+          parafield->getField()->setNature(ConservativeVolumic);
+          double *vals=parafield->getField()->getArray()->getPointer();
+          vals[0]=7.;
+          dec=new ParaMEDMEM::InterpKernelDEC(*source_group,*target_group);
+          dec->attachLocalField(parafield);
+          dec->synchronize();
+          dec->sendData();
+          dec->recvData();
+        }
+      else
+        {
+          mesh=MEDCouplingUMesh::New();
+          mesh->setMeshDimension(2);
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(4,2);
+          std::copy(targetCoords,targetCoords+8,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          int targetConn[6]={0,2,1,2,3,1};
+          mesh->allocateCells(2);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,targetConn);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,targetConn+3);
+          mesh->finishInsertingCells();
+          ParaMEDMEM::ComponentTopology comptopo;
+          paramesh=new ParaMESH(mesh,*target_group,"target mesh");
+          parafield=new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+          parafield->getField()->setNature(ConservativeVolumic);
+          dec=new ParaMEDMEM::InterpKernelDEC(*source_group,*target_group);
+          dec->attachLocalField(parafield);
+          dec->synchronize();
+          dec->recvData();
+          dec->sendData();
+        }
+    }
+  delete parafield;
+  delete paramesh;
+  if(mesh)
+    mesh->decrRef();
+  delete target_group;
+  delete source_group;
+  delete dec;
+  if(partialComm != MPI_COMM_NULL)
+    comm.commFree(&partialComm);
+  comm.groupFree(&grp);
+  comm.groupFree(&group_world);
+  MPI_Barrier(MPI_COMM_WORLD);
+}
+
+/*!
+ * This test reproduces bug of Gauthier on 13/9/2010 concerning 3DSurf meshes.
+ * It is possible to lead to dead lock in InterpKernelDEC when 3DSurfMeshes global bounding boxes intersects whereas cell bounding box intersecting only on one side.
+ */
+void ParaMEDMEMTest::testInterpKernelDEC3DSurfEmptyBBox()
+{
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  //
+  if(size!=3)
+    return ;
+  int nproc_source = 1;
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  //
+  ParaMEDMEM::MEDCouplingUMesh *mesh=0;
+  ParaMEDMEM::ParaMESH *paramesh=0;
+  ParaMEDMEM::ParaFIELD *parafieldP0=0;
+  //
+  ParaMEDMEM::CommInterface interface;
+  //
+  ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+  if(source_group->containsMyRank())
+    {
+      double coords[15]={1.,0.,0., 2.,0.,0., 2.,2.,0., 0.,2.,0., 0.5,0.5,1.};
+      int conn[7]={0,1,2,3,0,3,4};
+      mesh=MEDCouplingUMesh::New("Source mesh Proc0",2);
+      mesh->allocateCells(2);
+      mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn);
+      mesh->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,conn+4);
+      mesh->finishInsertingCells();
+      DataArrayDouble *myCoords=DataArrayDouble::New();
+      myCoords->alloc(5,3);
+      std::copy(coords,coords+15,myCoords->getPointer());
+      mesh->setCoords(myCoords);
+      myCoords->decrRef();
+      //
+      paramesh=new ParaMESH(mesh,*source_group,"source mesh");
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafieldP0 = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      double *valueP0=parafieldP0->getField()->getArray()->getPointer();
+      parafieldP0->getField()->setNature(ConservativeVolumic);
+      valueP0[0]=7.; valueP0[1]=8.;
+    }
+  else
+    {
+      const char targetMeshName[]="target mesh";
+      if(rank==1)
+        {
+          double coords[12]={0.25,0.25,0.5, 0.,0.25,0.5, 0.,0.,0.5, 0.25,0.,0.5};
+          int conn[4]={0,1,2,3};
+          mesh=MEDCouplingUMesh::New("Target mesh Proc1",2);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(4,3);
+          std::copy(coords,coords+12,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          paramesh=new ParaMESH(mesh,*target_group,targetMeshName);
+        }
+      if(rank==2)
+        {
+          double coords[12]={0.,0.25,0.5, 0.,0.,0.5, -1.,0.,0.5, -1.,0.25,0.5};
+          int conn[4]={0,1,2,3};
+          mesh=MEDCouplingUMesh::New("Target mesh Proc2",2);
+          mesh->allocateCells(1);
+          mesh->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,conn);
+          mesh->finishInsertingCells();
+          DataArrayDouble *myCoords=DataArrayDouble::New();
+          myCoords->alloc(4,3);
+          std::copy(coords,coords+12,myCoords->getPointer());
+          mesh->setCoords(myCoords);
+          myCoords->decrRef();
+          paramesh=new ParaMESH(mesh,*target_group,targetMeshName);
+        }
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafieldP0 = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+      parafieldP0->getField()->setNature(ConservativeVolumic);
+    }
+  // test 1
+  ParaMEDMEM::InterpKernelDEC dec(*source_group,*target_group);
+  if (source_group->containsMyRank())
+    { 
+      dec.setMethod("P0");
+      dec.attachLocalField(parafieldP0);
+      dec.synchronize();
+      // dec.setForcedRenormalization(false);
+      // dec.sendData();
+      // dec.recvData();
+      // const double *valueP0=parafieldP0->getField()->getArray()->getPointer();
+      // if(rank==0)
+      //   {
+      //     CPPUNIT_ASSERT_DOUBLES_EQUAL(7.4,valueP0[0],1e-7);
+      //     CPPUNIT_ASSERT_DOUBLES_EQUAL(9.0540540540540544,valueP0[1],1e-7);
+      //   }
+      // if(rank==1)
+      //   {
+      //     CPPUNIT_ASSERT_DOUBLES_EQUAL(8.64054054054054,valueP0[0],1e-7);
+      //   }
+      // if(rank==2)
+      //   {
+      //     CPPUNIT_ASSERT_DOUBLES_EQUAL(9.0540540540540544,valueP0[0],1e-7);
+      //   }
+    }
+  else
+    {
+      dec.setMethod("P0");
+      dec.attachLocalField(parafieldP0);
+      dec.synchronize();
+      // dec.setForcedRenormalization(false);
+      // dec.recvData();
+      // const double *res=parafieldP0->getField()->getArray()->getConstPointer();
+      // if(rank==3)
+      //   {
+      //     CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfTuples());
+      //     CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfComponents());
+      //     CPPUNIT_ASSERT_DOUBLES_EQUAL(7.4,res[0],1e-12);
+      //   }
+      // if(rank==4)
+      //   {
+      //     CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfTuples());
+      //     CPPUNIT_ASSERT_EQUAL(1,parafieldP0->getField()->getNumberOfComponents());
+      //     CPPUNIT_ASSERT_DOUBLES_EQUAL(9.0540540540540526,res[0],1e-12);
+      //   }
+      // dec.sendData();
+    }
+  //
+  delete parafieldP0;
+  mesh->decrRef();
+  delete paramesh;
+  delete self_group;
+  delete target_group;
+  delete source_group;
+  //
+  MPI_Barrier(MPI_COMM_WORLD);
+}
+
+/*!
+ * Tests an asynchronous exchange between two codes
+ * one sends data with dtA as an interval, the max time being tmaxA
+ * the other one receives with dtB as an interval, the max time being tmaxB
+ */
+void ParaMEDMEMTest::testAsynchronousInterpKernelDEC_2D(double dtA, double tmaxA, 
+                                                        double dtB, double tmaxB, bool WithPointToPoint, bool Asynchronous,
+                                                        bool WithInterp, const char *srcMeth, const char *targetMeth)
+{
+  std::string srcM(srcMeth);
+  std::string targetM(targetMeth);
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  //the test is meant to run on five processors
+  if (size !=5) return ;
+   
+  int nproc_source = 3;
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  
+  ParaMEDMEM::CommInterface interface;
+    
+  ParaMEDMEM::ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ParaMEDMEM::ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ParaMEDMEM::ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+    
+  //loading the geometry for the source group
+
+  ParaMEDMEM::InterpKernelDEC dec (*source_group,*target_group);
+  
+  ParaMEDMEM::MEDCouplingUMesh* mesh;
+  ParaMEDMEM::ParaMESH* paramesh;
+  ParaMEDMEM::ParaFIELD* parafield;
+  
+  ICoCo::MEDField* icocofield ;
+
+  char * tmp_dir_c                    = getenv("TMP");
+  string tmp_dir;
+  if (tmp_dir_c != NULL)
+    tmp_dir = string(tmp_dir_c);
+  else
+    tmp_dir = "/tmp";
+  string filename_xml1              = getResourceFile("square1_split");
+  string filename_xml2              = getResourceFile("square2_split"); 
+  //string filename_seq_wr            = makeTmpFile("");
+  //string filename_seq_med           = makeTmpFile("myWrField_seq_pointe221.med");
+  
+  // To remove tmp files from disk
+  ParaMEDMEMTest_TmpFilesRemover aRemover;
+  
+  MPI_Barrier(MPI_COMM_WORLD);
+
+  if (source_group->containsMyRank())
+    {
+      string master = filename_xml1;
+      
+      ostringstream strstream;
+      strstream <<master<<rank+1<<".med";
+      ostringstream meshname ;
+      meshname<< "Mesh_2_"<< rank+1;
+      
+      mesh=MEDLoader::ReadUMeshFromFile(strstream.str().c_str(),meshname.str().c_str(),0);
+
+      paramesh=new ParaMESH (mesh,*source_group,"source mesh");
+    
+      //      ParaMEDMEM::ParaSUPPORT* parasupport=new UnstructuredParaSUPPORT( support,*source_group);
+      ParaMEDMEM::ComponentTopology comptopo;
+      if(srcM=="P0")
+        {
+          parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+          parafield->getField()->setNature(ConservativeVolumic);//InvertIntegral);//ConservativeVolumic);
+        }
+      else
+        parafield = new ParaFIELD(ON_NODES,NO_TIME,paramesh, comptopo);
+
+      int nb_local;
+      if(srcM=="P0")
+        nb_local=mesh->getNumberOfCells();
+      else
+        nb_local=mesh->getNumberOfNodes();
+      //      double * value= new double[nb_local];
+      double *value=parafield->getField()->getArray()->getPointer();
+      for(int ielem=0; ielem<nb_local;ielem++)
+        value[ielem]=0.0;
+    
+      //      ICoCo::Field* icocofield=new ICoCo::MEDField(paramesh,parafield);
+      icocofield=new ICoCo::MEDField(parafield->getField());
+     
+      dec.attachLocalField(icocofield);
+
+
+    }
+  
+  //loading the geometry for the target group
+  if (target_group->containsMyRank())
+    {
+      string master= filename_xml2;
+      ostringstream strstream;
+      strstream << master<<(rank-nproc_source+1)<<".med";
+      ostringstream meshname ;
+      meshname<< "Mesh_3_"<<rank-nproc_source+1;
+      
+      mesh = MEDLoader::ReadUMeshFromFile(strstream.str().c_str(),meshname.str().c_str(),0);
+
+      paramesh=new ParaMESH (mesh,*target_group,"target mesh");
+      //      ParaMEDMEM::ParaSUPPORT* parasupport=new UnstructuredParaSUPPORT(support,*target_group);
+      ParaMEDMEM::ComponentTopology comptopo;
+      if(targetM=="P0")
+        {
+          parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+          parafield->getField()->setNature(ConservativeVolumic);//InvertIntegral);//ConservativeVolumic);
+        }
+      else
+        parafield = new ParaFIELD(ON_NODES,NO_TIME,paramesh, comptopo);
+      
+      int nb_local;
+      if(targetM=="P0")
+        nb_local=mesh->getNumberOfCells();
+      else
+        nb_local=mesh->getNumberOfNodes();
+                        
+      double *value=parafield->getField()->getArray()->getPointer();
+      for(int ielem=0; ielem<nb_local;ielem++)
+        value[ielem]=0.0;
+      //      ICoCo::Field* icocofield=new ICoCo::MEDField(paramesh,parafield);
+      icocofield=new ICoCo::MEDField(parafield->getField());
+      
+      dec.attachLocalField(icocofield);
+    }
+    
+  
+  //attaching a DEC to the source group 
+  
+  if (source_group->containsMyRank())
+    { 
+      cout<<"DEC usage"<<endl;
+      dec.setAsynchronous(Asynchronous);
+      if ( WithInterp ) {
+        dec.setTimeInterpolationMethod(LinearTimeInterp);
+      }
+      if ( WithPointToPoint ) {
+        dec.setAllToAllMethod(PointToPoint);
+      }
+      else {
+        dec.setAllToAllMethod(Native);
+      }
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      for (double time=0; time<tmaxA+1e-10; time+=dtA)
+        {
+          cout << "testAsynchronousInterpKernelDEC_2D" << rank << " time " << time
+               << " dtA " << dtA << " tmaxA " << tmaxA << endl ;
+          if ( time+dtA < tmaxA+1e-7 ) {
+            dec.sendData( time , dtA );
+          }
+          else {
+            dec.sendData( time , 0 );
+          }
+          double* value = parafield->getField()->getArray()->getPointer();
+          int nb_local=parafield->getField()->getMesh()->getNumberOfCells();
+          for (int i=0; i<nb_local;i++)
+            value[i]= time+dtA;
+
+       
+        }
+    }
+  
+  //attaching a DEC to the target group
+  if (target_group->containsMyRank())
+    {
+      cout<<"DEC usage"<<endl;
+      dec.setAsynchronous(Asynchronous);
+      if ( WithInterp ) {
+        dec.setTimeInterpolationMethod(LinearTimeInterp);
+      }
+      if ( WithPointToPoint ) {
+        dec.setAllToAllMethod(PointToPoint);
+      }
+      else {
+        dec.setAllToAllMethod(Native);
+      }
+      dec.synchronize();
+      dec.setForcedRenormalization(false);
+      vector<double> times;
+      for (double time=0; time<tmaxB+1e-10; time+=dtB)
+        {
+          cout << "testAsynchronousInterpKernelDEC_2D" << rank << " time " << time
+               << " dtB " << dtB << " tmaxB " << tmaxB << endl ;
+          dec.recvData( time );
+          double vi = parafield->getVolumeIntegral(0,true);
+          cout << "testAsynchronousInterpKernelDEC_2D" << rank << " time " << time
+               << " VolumeIntegral " << vi
+               << " time*10000 " << time*10000 << endl ;
+          
+          CPPUNIT_ASSERT_DOUBLES_EQUAL(vi,time*10000,0.001);
+        }
+      
+    }
+  
+  delete source_group;
+  delete target_group;
+  delete self_group;
+  delete parafield ;
+  delete paramesh ;
+  mesh->decrRef() ;
+  delete icocofield ;
+
+  cout << "testAsynchronousInterpKernelDEC_2D" << rank << " MPI_Barrier " << endl ;
+  if (Asynchronous) MPI_Barrier(MPI_COMM_WORLD);
+  cout << "end of InterpKernelDEC_2D test"<<endl;
+}
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest_MEDLoader.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTest_MEDLoader.cxx
new file mode 100644 (file)
index 0000000..f8962a7
--- /dev/null
@@ -0,0 +1,399 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMEDMEMTest.hxx"
+#include "MEDLoader.hxx"
+#include "MEDCouplingUMesh.hxx"
+#include "MEDCouplingFieldDouble.hxx"
+
+#include <cppunit/TestAssert.h>
+
+#include <algorithm>
+#include <numeric>
+#include <iostream>
+#include <iterator>
+
+using namespace std;
+using namespace INTERP_KERNEL;
+using namespace ParaMEDMEM;
+
+void ParaMEDMEMTest::testMEDLoaderRead1()
+{
+  string fileName=getResourceFile("pointe.med");
+  vector<string> meshNames=MEDLoader::GetMeshNames(fileName.c_str());
+  CPPUNIT_ASSERT_EQUAL(1,(int)meshNames.size());
+  MEDCouplingUMesh *mesh=MEDLoader::ReadUMeshFromFile(fileName.c_str(),meshNames[0].c_str(),0);
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(16,mesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(19,mesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(3,(int)mesh->getAllGeoTypes().size());
+  for(int i=0;i<12;i++)
+    CPPUNIT_ASSERT_EQUAL(NORM_TETRA4,mesh->getTypeOfCell(i));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,mesh->getTypeOfCell(12));
+  CPPUNIT_ASSERT_EQUAL(NORM_HEXA8,mesh->getTypeOfCell(13));
+  CPPUNIT_ASSERT_EQUAL(NORM_HEXA8,mesh->getTypeOfCell(14));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,mesh->getTypeOfCell(15));
+  CPPUNIT_ASSERT_EQUAL((std::size_t)90,mesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(701,std::accumulate(mesh->getNodalConnectivity()->getPointer(),mesh->getNodalConnectivity()->getPointer()+90,0));
+  CPPUNIT_ASSERT_EQUAL(705,std::accumulate(mesh->getNodalConnectivityIndex()->getPointer(),mesh->getNodalConnectivityIndex()->getPointer()+17,0));
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(46.,std::accumulate(mesh->getCoords()->getPointer(),mesh->getCoords()->getPointer()+57,0),1e-12);
+  mesh->decrRef();
+  //
+  vector<string> families=MEDLoader::GetMeshFamiliesNames(fileName.c_str(),meshNames[0].c_str());
+  CPPUNIT_ASSERT_EQUAL(8,(int)families.size());
+  CPPUNIT_ASSERT(families[2]=="FAMILLE_ELEMENT_3");
+  //
+  vector<string> families2;
+  families2.push_back(families[2]);
+  mesh=MEDLoader::ReadUMeshFromFamilies(fileName.c_str(),meshNames[0].c_str(),0,families2);
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(2,mesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(19,mesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(2,(int)mesh->getAllGeoTypes().size());
+  CPPUNIT_ASSERT_EQUAL(NORM_TETRA4,mesh->getTypeOfCell(0));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,mesh->getTypeOfCell(1));
+  CPPUNIT_ASSERT_EQUAL((std::size_t)11,mesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(132,std::accumulate(mesh->getNodalConnectivity()->getPointer(),mesh->getNodalConnectivity()->getPointer()+11,0));
+  CPPUNIT_ASSERT_EQUAL(16,std::accumulate(mesh->getNodalConnectivityIndex()->getPointer(),mesh->getNodalConnectivityIndex()->getPointer()+3,0));
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(46.,std::accumulate(mesh->getCoords()->getPointer(),mesh->getCoords()->getPointer()+57,0),1e-12);
+  mesh->decrRef();
+  //
+  vector<string> groups=MEDLoader::GetMeshGroupsNames(fileName.c_str(),meshNames[0].c_str());
+  CPPUNIT_ASSERT_EQUAL(5,(int)groups.size());
+  CPPUNIT_ASSERT(groups[0]=="groupe1");
+  CPPUNIT_ASSERT(groups[1]=="groupe2");
+  CPPUNIT_ASSERT(groups[2]=="groupe3");
+  CPPUNIT_ASSERT(groups[3]=="groupe4");
+  CPPUNIT_ASSERT(groups[4]=="groupe5");
+  vector<string> groups2;
+  groups2.push_back(groups[0]);
+  mesh=MEDLoader::ReadUMeshFromGroups(fileName.c_str(),meshNames[0].c_str(),0,groups2);
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(7,mesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(19,mesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(2,(int)mesh->getAllGeoTypes().size());
+  for(int i=0;i<6;i++)
+    CPPUNIT_ASSERT_EQUAL(NORM_TETRA4,mesh->getTypeOfCell(i));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,mesh->getTypeOfCell(6));
+  CPPUNIT_ASSERT_EQUAL((std::size_t)36,mesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(254,std::accumulate(mesh->getNodalConnectivity()->getPointer(),mesh->getNodalConnectivity()->getPointer()+36,0));
+  CPPUNIT_ASSERT_EQUAL(141,std::accumulate(mesh->getNodalConnectivityIndex()->getPointer(),mesh->getNodalConnectivityIndex()->getPointer()+8,0));
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(46.,std::accumulate(mesh->getCoords()->getPointer(),mesh->getCoords()->getPointer()+57,0),1e-12);
+  mesh->decrRef();
+  //
+  std::vector<std::string> fieldsName=MEDLoader::GetCellFieldNamesOnMesh(fileName.c_str(),meshNames[0].c_str());
+  CPPUNIT_ASSERT_EQUAL(2,(int)fieldsName.size());
+  CPPUNIT_ASSERT(fieldsName[0]=="fieldcelldoublescalar");
+  CPPUNIT_ASSERT(fieldsName[1]=="fieldcelldoublevector");
+  std::vector<std::pair<int,int> > its0=MEDLoader::GetCellFieldIterations(fileName.c_str(),meshNames[0].c_str(),fieldsName[0].c_str());
+  CPPUNIT_ASSERT_EQUAL(1,(int)its0.size());
+  CPPUNIT_ASSERT_EQUAL(-1,its0[0].first);
+  CPPUNIT_ASSERT_EQUAL(-1,its0[0].second);
+  std::vector<std::pair<int,int> > its1=MEDLoader::GetCellFieldIterations(fileName.c_str(),meshNames[0].c_str(),fieldsName[1].c_str());
+  CPPUNIT_ASSERT_EQUAL(1,(int)its1.size());
+  CPPUNIT_ASSERT_EQUAL(-1,its1[0].first);
+  CPPUNIT_ASSERT_EQUAL(-1,its1[0].second);
+  //
+  MEDCouplingFieldDouble *field0=MEDLoader::ReadFieldCell(fileName.c_str(),meshNames[0].c_str(),0,fieldsName[0].c_str(),its0[0].first,its0[0].second);
+  field0->checkCoherency();
+  CPPUNIT_ASSERT(field0->getName()==fieldsName[0]);
+  CPPUNIT_ASSERT_EQUAL(1,field0->getNumberOfComponents());
+  CPPUNIT_ASSERT_EQUAL(16,field0->getNumberOfTuples());
+  const double expectedValues[16]={1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,2.,3.,3.,2.};
+  double diffValue[16];
+  std::transform(field0->getArray()->getPointer(),field0->getArray()->getPointer()+16,expectedValues,diffValue,std::minus<double>());
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::max_element(diffValue,diffValue+16),1e-12);
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::min_element(diffValue,diffValue+16),1e-12);
+  const MEDCouplingUMesh *constMesh=dynamic_cast<const MEDCouplingUMesh *>(field0->getMesh());
+  CPPUNIT_ASSERT(constMesh);
+  CPPUNIT_ASSERT_EQUAL(3,constMesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(3,constMesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(16,constMesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(19,constMesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(3,(int)constMesh->getAllGeoTypes().size());
+  for(int i=0;i<12;i++)
+    CPPUNIT_ASSERT_EQUAL(NORM_TETRA4,constMesh->getTypeOfCell(i));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,constMesh->getTypeOfCell(12));
+  CPPUNIT_ASSERT_EQUAL(NORM_HEXA8,constMesh->getTypeOfCell(13));
+  CPPUNIT_ASSERT_EQUAL(NORM_HEXA8,constMesh->getTypeOfCell(14));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,constMesh->getTypeOfCell(15));
+  CPPUNIT_ASSERT_EQUAL((std::size_t)90,constMesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(701,std::accumulate(constMesh->getNodalConnectivity()->getConstPointer(),constMesh->getNodalConnectivity()->getConstPointer()+90,0));
+  CPPUNIT_ASSERT_EQUAL(705,std::accumulate(constMesh->getNodalConnectivityIndex()->getConstPointer(),constMesh->getNodalConnectivityIndex()->getConstPointer()+17,0));
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(46.,std::accumulate(constMesh->getCoords()->getConstPointer(),constMesh->getCoords()->getConstPointer()+57,0),1e-12);
+  field0->decrRef();
+  //
+  MEDCouplingFieldDouble *field1=MEDLoader::ReadFieldCell(fileName.c_str(),meshNames[0].c_str(),0,fieldsName[1].c_str(),its1[0].first,its1[0].second);
+  field1->checkCoherency();
+  CPPUNIT_ASSERT(field1->getName()==fieldsName[1]);
+  CPPUNIT_ASSERT_EQUAL(3,field1->getNumberOfComponents());
+  CPPUNIT_ASSERT_EQUAL(16,field1->getNumberOfTuples());
+  const double expectedValues2[48]={1.,0.,1.,1.,0.,1.,1.,0.,1.,2.,1.,0.,2.,1.,0.,2.,1.,0.,3.,0.,1.,3.,0.,1.,3.,0.,1.,4.,1.,0.,4.,1.,0.,4.,1.,0.,5.,0.,0.,6.,1.,1.,6.,0.,0.,5.,1.,1.};
+  double diffValue2[48];
+  std::transform(field1->getArray()->getPointer(),field1->getArray()->getPointer()+48,expectedValues2,diffValue2,std::minus<double>());
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::max_element(diffValue2,diffValue2+48),1e-12);
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::min_element(diffValue2,diffValue2+48),1e-12);
+  constMesh=dynamic_cast<const MEDCouplingUMesh *>(field1->getMesh());
+  CPPUNIT_ASSERT(constMesh);
+  CPPUNIT_ASSERT_EQUAL(3,constMesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(3,constMesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(16,constMesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(19,constMesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(3,(int)constMesh->getAllGeoTypes().size());
+  for(int i=0;i<12;i++)
+    CPPUNIT_ASSERT_EQUAL(NORM_TETRA4,constMesh->getTypeOfCell(i));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,constMesh->getTypeOfCell(12));
+  CPPUNIT_ASSERT_EQUAL(NORM_HEXA8,constMesh->getTypeOfCell(13));
+  CPPUNIT_ASSERT_EQUAL(NORM_HEXA8,constMesh->getTypeOfCell(14));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,constMesh->getTypeOfCell(15));
+  CPPUNIT_ASSERT_EQUAL((std::size_t)90,constMesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(701,std::accumulate(constMesh->getNodalConnectivity()->getConstPointer(),constMesh->getNodalConnectivity()->getConstPointer()+90,0));
+  CPPUNIT_ASSERT_EQUAL(705,std::accumulate(constMesh->getNodalConnectivityIndex()->getConstPointer(),constMesh->getNodalConnectivityIndex()->getConstPointer()+17,0));
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(46.,std::accumulate(constMesh->getCoords()->getConstPointer(),constMesh->getCoords()->getConstPointer()+57,0),1e-12);
+  field1->decrRef();
+  //fields on nodes
+  std::vector<std::string> fieldsNameNode=MEDLoader::GetNodeFieldNamesOnMesh(fileName.c_str(),meshNames[0].c_str());
+  CPPUNIT_ASSERT_EQUAL(2,(int)fieldsNameNode.size());
+  CPPUNIT_ASSERT(fieldsNameNode[0]=="fieldnodedouble");
+  CPPUNIT_ASSERT(fieldsNameNode[1]=="fieldnodeint");
+  std::vector<std::pair<int,int> > its0Node=MEDLoader::GetNodeFieldIterations(fileName.c_str(),meshNames[0].c_str(),fieldsNameNode[0].c_str());
+  CPPUNIT_ASSERT_EQUAL(3,(int)its0Node.size());
+  CPPUNIT_ASSERT_EQUAL(-1,its0Node[0].first);
+  CPPUNIT_ASSERT_EQUAL(-1,its0Node[0].second);
+  CPPUNIT_ASSERT_EQUAL(1,its0Node[1].first);
+  CPPUNIT_ASSERT_EQUAL(-1,its0Node[1].second);
+  CPPUNIT_ASSERT_EQUAL(2,its0Node[2].first);
+  CPPUNIT_ASSERT_EQUAL(-1,its0Node[2].second);
+  MEDCouplingFieldDouble *field0Nodes=MEDLoader::ReadFieldNode(fileName.c_str(),meshNames[0].c_str(),0,fieldsNameNode[0].c_str(),its0Node[0].first,its0Node[0].second);
+  field0Nodes->checkCoherency();
+  CPPUNIT_ASSERT(field0Nodes->getName()==fieldsNameNode[0]);
+  CPPUNIT_ASSERT_EQUAL(1,field0Nodes->getNumberOfComponents());
+  CPPUNIT_ASSERT_EQUAL(19,field0Nodes->getNumberOfTuples());
+  const double expectedValues3[19]={1.,1.,1.,2.,2.,2.,3.,3.,3.,4.,4.,4.,5.,5.,5.,6.,6.,6.,7.};
+  double diffValue3[19];
+  std::transform(field0Nodes->getArray()->getPointer(),field0Nodes->getArray()->getPointer()+19,expectedValues3,diffValue3,std::minus<double>());
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::max_element(diffValue3,diffValue3+19),1e-12);
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::min_element(diffValue3,diffValue3+19),1e-12);
+  constMesh=dynamic_cast<const MEDCouplingUMesh *>(field0Nodes->getMesh());
+  CPPUNIT_ASSERT(constMesh);
+  field0Nodes->decrRef();
+  //
+  field0Nodes=MEDLoader::ReadFieldNode(fileName.c_str(),meshNames[0].c_str(),0,fieldsNameNode[0].c_str(),its0Node[2].first,its0Node[2].second);
+  field0Nodes->checkCoherency();
+  CPPUNIT_ASSERT(field0Nodes->getName()==fieldsNameNode[0]);
+  CPPUNIT_ASSERT_EQUAL(1,field0Nodes->getNumberOfComponents());
+  CPPUNIT_ASSERT_EQUAL(19,field0Nodes->getNumberOfTuples());
+  const double expectedValues4[19]={1.,2.,2.,2.,3.,3.,3.,4.,4.,4.,5.,5.,5.,6.,6.,6.,7.,7.,7.};
+  std::transform(field0Nodes->getArray()->getPointer(),field0Nodes->getArray()->getPointer()+19,expectedValues4,diffValue3,std::minus<double>());
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::max_element(diffValue3,diffValue3+19),1e-12);
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::min_element(diffValue3,diffValue3+19),1e-12);
+  constMesh=dynamic_cast<const MEDCouplingUMesh *>(field0Nodes->getMesh());
+  CPPUNIT_ASSERT(constMesh);
+  CPPUNIT_ASSERT_EQUAL(3,constMesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(3,constMesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(16,constMesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(19,constMesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(3,(int)constMesh->getAllGeoTypes().size());
+  for(int i=0;i<12;i++)
+    CPPUNIT_ASSERT_EQUAL(NORM_TETRA4,constMesh->getTypeOfCell(i));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,constMesh->getTypeOfCell(12));
+  CPPUNIT_ASSERT_EQUAL(NORM_HEXA8,constMesh->getTypeOfCell(13));
+  CPPUNIT_ASSERT_EQUAL(NORM_HEXA8,constMesh->getTypeOfCell(14));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,constMesh->getTypeOfCell(15));
+  CPPUNIT_ASSERT_EQUAL((std::size_t)90,constMesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(701,std::accumulate(constMesh->getNodalConnectivity()->getConstPointer(),constMesh->getNodalConnectivity()->getConstPointer()+90,0));
+  CPPUNIT_ASSERT_EQUAL(705,std::accumulate(constMesh->getNodalConnectivityIndex()->getConstPointer(),constMesh->getNodalConnectivityIndex()->getConstPointer()+17,0));
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(46.,std::accumulate(constMesh->getCoords()->getConstPointer(),constMesh->getCoords()->getConstPointer()+57,0),1e-12);
+  field0Nodes->decrRef();
+  //
+  field0Nodes=MEDLoader::ReadFieldNode(fileName.c_str(),meshNames[0].c_str(),0,fieldsNameNode[0].c_str(),its0Node[0].first,its0Node[0].second);
+  field0Nodes->checkCoherency();
+  CPPUNIT_ASSERT(field0Nodes->getName()==fieldsNameNode[0]);
+  CPPUNIT_ASSERT_EQUAL(1,field0Nodes->getNumberOfComponents());
+  CPPUNIT_ASSERT_EQUAL(19,field0Nodes->getNumberOfTuples());
+  const double expectedValues5[19]={1.,1.,1.,2.,2.,2.,3.,3.,3.,4.,4.,4.,5.,5.,5.,6.,6.,6.,7.};
+  std::transform(field0Nodes->getArray()->getPointer(),field0Nodes->getArray()->getPointer()+19,expectedValues5,diffValue3,std::minus<double>());
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::max_element(diffValue3,diffValue3+19),1e-12);
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::min_element(diffValue3,diffValue3+19),1e-12);
+  constMesh=dynamic_cast<const MEDCouplingUMesh *>(field0Nodes->getMesh());
+  CPPUNIT_ASSERT(constMesh);
+  CPPUNIT_ASSERT_EQUAL(3,constMesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(3,constMesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(16,constMesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(19,constMesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(3,(int)constMesh->getAllGeoTypes().size());
+  for(int i=0;i<12;i++)
+    CPPUNIT_ASSERT_EQUAL(NORM_TETRA4,constMesh->getTypeOfCell(i));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,constMesh->getTypeOfCell(12));
+  CPPUNIT_ASSERT_EQUAL(NORM_HEXA8,constMesh->getTypeOfCell(13));
+  CPPUNIT_ASSERT_EQUAL(NORM_HEXA8,constMesh->getTypeOfCell(14));
+  CPPUNIT_ASSERT_EQUAL(NORM_PYRA5,constMesh->getTypeOfCell(15));
+  CPPUNIT_ASSERT_EQUAL((std::size_t)90,constMesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(701,std::accumulate(constMesh->getNodalConnectivity()->getConstPointer(),constMesh->getNodalConnectivity()->getConstPointer()+90,0));
+  CPPUNIT_ASSERT_EQUAL(705,std::accumulate(constMesh->getNodalConnectivityIndex()->getConstPointer(),constMesh->getNodalConnectivityIndex()->getConstPointer()+17,0));
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(46.,std::accumulate(constMesh->getCoords()->getConstPointer(),constMesh->getCoords()->getConstPointer()+57,0),1e-12);
+  field0Nodes->decrRef();
+}
+
+void ParaMEDMEMTest::testMEDLoaderPolygonRead()
+{
+  string fileName=getResourceFile("polygones.med");
+  vector<string> meshNames=MEDLoader::GetMeshNames(fileName.c_str());
+  CPPUNIT_ASSERT_EQUAL(1,(int)meshNames.size());
+  CPPUNIT_ASSERT(meshNames[0]=="Bord");
+  MEDCouplingUMesh *mesh=MEDLoader::ReadUMeshFromFile(fileName.c_str(),meshNames[0].c_str(),0);
+  mesh->checkCoherency();
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(2,mesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(538,mesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(579,mesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(2,(int)mesh->getAllGeoTypes().size());
+  for(int i=0;i<514;i++)
+    CPPUNIT_ASSERT_EQUAL(NORM_QUAD4,mesh->getTypeOfCell(i));
+  for(int i=514;i<538;i++)
+    CPPUNIT_ASSERT_EQUAL(NORM_POLYGON,mesh->getTypeOfCell(i));
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,std::accumulate(mesh->getCoords()->getPointer(),mesh->getCoords()->getPointer()+1737,0),1e-12);
+  const double expectedVals1[12]={1.4851585216522212,-0.5,0.,1.4851585216522212,-0.4,0.,1.4851585216522212,-0.3,0., 1.5741585216522211, -0.5, 0. };
+  double diffValue1[12];
+  std::transform(mesh->getCoords()->getPointer(),mesh->getCoords()->getPointer()+12,expectedVals1,diffValue1,std::minus<double>());
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::max_element(diffValue1,diffValue1+12),1e-12);
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::min_element(diffValue1,diffValue1+12),1e-12);
+  CPPUNIT_ASSERT_EQUAL((std::size_t)2768,mesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(651050,std::accumulate(mesh->getNodalConnectivity()->getPointer(),mesh->getNodalConnectivity()->getPointer()+2768,0));
+  CPPUNIT_ASSERT_EQUAL(725943,std::accumulate(mesh->getNodalConnectivityIndex()->getPointer(),mesh->getNodalConnectivityIndex()->getPointer()+539,0));
+  mesh->decrRef();
+  //
+  std::vector<std::string> fieldsName=MEDLoader::GetCellFieldNamesOnMesh(fileName.c_str(),meshNames[0].c_str());
+  CPPUNIT_ASSERT_EQUAL(3,(int)fieldsName.size());
+  CPPUNIT_ASSERT(fieldsName[0]=="bord_:_distorsion");
+  CPPUNIT_ASSERT(fieldsName[1]=="bord_:_familles");
+  CPPUNIT_ASSERT(fieldsName[2]=="bord_:_non-ortho");
+  std::vector<std::pair<int,int> > its0=MEDLoader::GetCellFieldIterations(fileName.c_str(),meshNames[0].c_str(),fieldsName[0].c_str());
+  CPPUNIT_ASSERT_EQUAL(1,(int)its0.size());
+  MEDCouplingFieldDouble *field=MEDLoader::ReadFieldCell(fileName.c_str(),meshNames[0].c_str(),0,fieldsName[0].c_str(),its0[0].first,its0[0].second);
+  field->checkCoherency();
+  CPPUNIT_ASSERT(field->getName()==fieldsName[0]);
+  CPPUNIT_ASSERT_EQUAL(1,field->getNumberOfComponents());
+  CPPUNIT_ASSERT_EQUAL(538,field->getNumberOfTuples());
+  const MEDCouplingUMesh *constMesh=dynamic_cast<const MEDCouplingUMesh *>(field->getMesh());
+  CPPUNIT_ASSERT(constMesh);
+  CPPUNIT_ASSERT_EQUAL(3,constMesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(2,constMesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(538,constMesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(579,constMesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(2,(int)constMesh->getAllGeoTypes().size());
+  for(int i=0;i<514;i++)
+    CPPUNIT_ASSERT_EQUAL(NORM_QUAD4,constMesh->getTypeOfCell(i));
+  for(int i=514;i<538;i++)
+    CPPUNIT_ASSERT_EQUAL(NORM_POLYGON,constMesh->getTypeOfCell(i));
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,std::accumulate(constMesh->getCoords()->getConstPointer(),constMesh->getCoords()->getConstPointer()+1737,0),1e-12);
+  std::transform(constMesh->getCoords()->getConstPointer(),constMesh->getCoords()->getConstPointer()+12,expectedVals1,diffValue1,std::minus<double>());
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::max_element(diffValue1,diffValue1+12),1e-12);
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(0.,*std::min_element(diffValue1,diffValue1+12),1e-12);
+  CPPUNIT_ASSERT_EQUAL((std::size_t)2768,constMesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(651050,std::accumulate(constMesh->getNodalConnectivity()->getConstPointer(),constMesh->getNodalConnectivity()->getConstPointer()+2768,0));
+  CPPUNIT_ASSERT_EQUAL(725943,std::accumulate(constMesh->getNodalConnectivityIndex()->getConstPointer(),constMesh->getNodalConnectivityIndex()->getConstPointer()+539,0));
+  const double *values=field->getArray()->getPointer();
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(2.87214203182918,std::accumulate(values,values+538,0.),1e-12);
+  field->decrRef();
+}
+
+void ParaMEDMEMTest::testMEDLoaderPolyhedronRead()
+{
+  string fileName=getResourceFile("poly3D.med");
+  vector<string> meshNames=MEDLoader::GetMeshNames(fileName.c_str());
+  CPPUNIT_ASSERT_EQUAL(1,(int)meshNames.size());
+  CPPUNIT_ASSERT(meshNames[0]=="poly3D");
+  MEDCouplingUMesh *mesh=MEDLoader::ReadUMeshFromFile(fileName.c_str(),meshNames[0].c_str(),0);
+  mesh->checkCoherency();
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(19,mesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(2,(int)mesh->getAllGeoTypes().size());
+  CPPUNIT_ASSERT_EQUAL(NORM_TETRA4,mesh->getTypeOfCell(0));
+  CPPUNIT_ASSERT_EQUAL(NORM_POLYHED,mesh->getTypeOfCell(1));
+  CPPUNIT_ASSERT_EQUAL(NORM_POLYHED,mesh->getTypeOfCell(2));
+  CPPUNIT_ASSERT_EQUAL((std::size_t)98,mesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(725,std::accumulate(mesh->getNodalConnectivity()->getPointer(),mesh->getNodalConnectivity()->getPointer()+98,0));
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(110.,std::accumulate(mesh->getCoords()->getPointer(),mesh->getCoords()->getPointer()+57,0),1e-12);
+  CPPUNIT_ASSERT_EQUAL(155,std::accumulate(mesh->getNodalConnectivityIndex()->getPointer(),mesh->getNodalConnectivityIndex()->getPointer()+4,0));
+  mesh->decrRef();
+  //
+  mesh=MEDLoader::ReadUMeshFromFile(fileName.c_str(),meshNames[0].c_str(),-1);
+  mesh->checkCoherency();
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(2,mesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(17,mesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(19,mesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(3,(int)mesh->getAllGeoTypes().size());
+  CPPUNIT_ASSERT_EQUAL(NORM_POLYGON,mesh->getTypeOfCell(0));
+  CPPUNIT_ASSERT_EQUAL(NORM_QUAD4,mesh->getTypeOfCell(1));
+  CPPUNIT_ASSERT_EQUAL(NORM_QUAD4,mesh->getTypeOfCell(2));
+  CPPUNIT_ASSERT_EQUAL(NORM_QUAD4,mesh->getTypeOfCell(3));
+  CPPUNIT_ASSERT_EQUAL(NORM_QUAD4,mesh->getTypeOfCell(4));
+  CPPUNIT_ASSERT_EQUAL(NORM_QUAD4,mesh->getTypeOfCell(5));
+  CPPUNIT_ASSERT_EQUAL(NORM_QUAD4,mesh->getTypeOfCell(6));
+  CPPUNIT_ASSERT_EQUAL(NORM_TRI3,mesh->getTypeOfCell(7));
+  CPPUNIT_ASSERT_EQUAL(NORM_POLYGON,mesh->getTypeOfCell(8));
+  CPPUNIT_ASSERT_EQUAL(NORM_POLYGON,mesh->getTypeOfCell(9));
+  CPPUNIT_ASSERT_EQUAL(NORM_QUAD4,mesh->getTypeOfCell(10));
+  CPPUNIT_ASSERT_EQUAL(NORM_TRI3,mesh->getTypeOfCell(11));
+  CPPUNIT_ASSERT_EQUAL(NORM_TRI3,mesh->getTypeOfCell(12));
+  CPPUNIT_ASSERT_EQUAL(NORM_TRI3,mesh->getTypeOfCell(13));
+  CPPUNIT_ASSERT_EQUAL(NORM_TRI3,mesh->getTypeOfCell(14));
+  CPPUNIT_ASSERT_EQUAL(NORM_QUAD4,mesh->getTypeOfCell(15));
+  CPPUNIT_ASSERT_EQUAL(NORM_TRI3,mesh->getTypeOfCell(16));
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(110.,std::accumulate(mesh->getCoords()->getPointer(),mesh->getCoords()->getPointer()+57,0),1e-12);
+  CPPUNIT_ASSERT_EQUAL((std::size_t)83,mesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(619,std::accumulate(mesh->getNodalConnectivity()->getPointer(),mesh->getNodalConnectivity()->getPointer()+83,0));
+  mesh->decrRef();
+  //
+  vector<string> families=MEDLoader::GetMeshFamiliesNames(fileName.c_str(),meshNames[0].c_str());
+  CPPUNIT_ASSERT_EQUAL(4,(int)families.size());
+  CPPUNIT_ASSERT(families[0]=="FAMILLE_FACE_POLYGONS3");
+  CPPUNIT_ASSERT(families[1]=="FAMILLE_FACE_QUAD41");
+  CPPUNIT_ASSERT(families[2]=="FAMILLE_FACE_TRIA32");
+  CPPUNIT_ASSERT(families[3]=="FAMILLE_ZERO");
+  vector<string> families2;
+  families2.push_back(families[0]);
+  mesh=MEDLoader::ReadUMeshFromFamilies(fileName.c_str(),meshNames[0].c_str(),-1,families2);
+  mesh->checkCoherency();
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(2,mesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(19,mesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(1,(int)mesh->getAllGeoTypes().size());
+  for(int i=0;i<3;i++)
+    CPPUNIT_ASSERT_EQUAL(NORM_POLYGON,mesh->getTypeOfCell(i));
+  CPPUNIT_ASSERT_EQUAL((std::size_t)19,mesh->getNodalConnectivity()->getNbOfElems());
+  CPPUNIT_ASSERT_EQUAL(117,std::accumulate(mesh->getNodalConnectivity()->getPointer(),mesh->getNodalConnectivity()->getPointer()+19,0));
+  mesh->decrRef();
+  //
+  mesh=MEDLoader::ReadUMeshFromFamilies(fileName.c_str(),meshNames[0].c_str(),0,families2);
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getSpaceDimension());
+  CPPUNIT_ASSERT_EQUAL(0,mesh->getNumberOfCells());
+  CPPUNIT_ASSERT_EQUAL(19,mesh->getNumberOfNodes());
+  CPPUNIT_ASSERT_EQUAL(3,mesh->getMeshDimension());
+  CPPUNIT_ASSERT_EQUAL(0,(int)mesh->getAllGeoTypes().size());
+  mesh->decrRef();
+}
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest_MPIProcessorGroup.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTest_MPIProcessorGroup.cxx
new file mode 100644 (file)
index 0000000..91ef8bb
--- /dev/null
@@ -0,0 +1,149 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMEDMEMTest.hxx"
+#include <cppunit/TestAssert.h>
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "InterpolationUtils.hxx"
+
+#include <string>
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+
+using namespace std;
+using namespace ParaMEDMEM;
+/*
+ * Check methods defined in MPPIProcessorGroup.hxx
+ *
+ (+) MPIProcessorGroup(const CommInterface& interface);
+ (+) MPIProcessorGroup(const CommInterface& interface, set<int> proc_ids);
+ (u) MPIProcessorGroup (const ProcessorGroup& proc_group, set<int> proc_ids);
+ (+) MPIProcessorGroup(const CommInterface& interface,int pstart, int pend);
+ (+) virtual ~MPIProcessorGroup();
+ (+) virtual ProcessorGroup* fuse (const ProcessorGroup&) const;
+ (u) void intersect (ProcessorGroup&){};
+ (+) int myRank() const {int rank; MPI_Comm_rank(_comm,&rank); return rank;}
+ (+) bool containsMyRank() const { int rank; MPI_Group_rank(_group, &rank); return (rank!=MPI_UNDEFINED);}
+ (+) int translateRank(const ProcessorGroup* group, int rank) const;
+ (+) const MPI_Comm* getComm() const {return &_comm;}
+ (+) ProcessorGroup* createComplementProcGroup() const;
+ (o) ProcessorGroup* createProcGroup() const;
+   
+*/
+void ParaMEDMEMTest::testMPIProcessorGroup_constructor()
+{
+  CommInterface comm_interface;
+  MPIProcessorGroup* group = new MPIProcessorGroup(comm_interface);;
+  int size;
+  MPI_Comm_size(MPI_COMM_WORLD, &size);
+  CPPUNIT_ASSERT_EQUAL(size,group->size());
+  int size2;
+  const MPI_Comm* communicator=group->getComm();
+  MPI_Comm_size(*communicator, &size2);
+  CPPUNIT_ASSERT_EQUAL(size,size2);
+  delete group;
+
+  set <int> procs;
+
+  procs.insert(0);
+  procs.insert(1);
+  if (size==1)
+    CPPUNIT_ASSERT_THROW(group=new MPIProcessorGroup(comm_interface,procs),INTERP_KERNEL::Exception);
+  else
+    {
+      CPPUNIT_ASSERT_NO_THROW(  group=new MPIProcessorGroup(comm_interface,procs));
+      CPPUNIT_ASSERT_EQUAL (group->size(),2);
+      delete group;
+    }
+
+  //throws because plast<pfirst
+  CPPUNIT_ASSERT_THROW(group=new MPIProcessorGroup(comm_interface,1,0),INTERP_KERNEL::Exception);
+  //throws because plast is beyond size-1
+  CPPUNIT_ASSERT_THROW(group=new MPIProcessorGroup(comm_interface,0,size),INTERP_KERNEL::Exception);
+  if (size>1)
+    {
+      group=new MPIProcessorGroup(comm_interface,0,size-2);
+      CPPUNIT_ASSERT_EQUAL(group->size(),size-1);
+      delete group;
+    }
+
+}
+void ParaMEDMEMTest::testMPIProcessorGroup_boolean()
+{
+  int size;
+  MPI_Comm_size(MPI_COMM_WORLD, &size);
+  
+  CommInterface comm_interface;
+  MPIProcessorGroup group(comm_interface,0,0);
+  MPIProcessorGroup group2(comm_interface,size-1,size-1);
+  ProcessorGroup* group_fuse=group.fuse(group2);
+  int group_fuse_size=(size==1)?1:2;
+  CPPUNIT_ASSERT_EQUAL(group_fuse_size,group_fuse->size());
+  ProcessorGroup* group_complement=((MPIProcessorGroup*)group_fuse)->createComplementProcGroup();
+  CPPUNIT_ASSERT_EQUAL(group_complement->size(),size-group_fuse_size);
+  
+  delete group_fuse;
+  delete group_complement;
+
+  //intersect not implemented yet
+  //   if (size>1)
+  //   {
+  //     MPIProcessorGroup group3(comm_interface,0,size-2);
+  //     MPIProcessorGroup group4(comm_interface,1,size-1);
+  //     group3.intersect(group4);
+  //     CPPUNIT_ASSERT_EQUAL(group3.size(),size-2);
+  //   }
+}
+
+void ParaMEDMEMTest::testMPIProcessorGroup_rank()
+{
+  int size;
+  MPI_Comm_size(MPI_COMM_WORLD, &size);
+  int rank;
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  
+  CommInterface comm_interface;
+  MPIProcessorGroup group(comm_interface,0,0);
+  MPIProcessorGroup group2(comm_interface,size-1,size-1);
+  ProcessorGroup* group_fuse=group2.fuse(group);
+  
+  if (group.containsMyRank())
+    CPPUNIT_ASSERT_EQUAL (group.myRank(), rank);
+
+  if (group2.containsMyRank())
+    {
+      int trank=group_fuse->translateRank(&group2,0);
+      if (size==1)
+        CPPUNIT_ASSERT_EQUAL(trank,0);
+      else  
+        CPPUNIT_ASSERT_EQUAL(trank,1);
+    }
+  delete group_fuse;
+}
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest_NonCoincidentDEC.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTest_NonCoincidentDEC.cxx
new file mode 100644 (file)
index 0000000..6ab7130
--- /dev/null
@@ -0,0 +1,256 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#ifdef MED_ENABLE_FVM
+
+#include "ParaMEDMEMTest.hxx"
+#include <cppunit/TestAssert.h>
+
+#include "MEDMEM_Exception.hxx"
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "Topology.hxx"
+#include "DEC.hxx"
+#include "NonCoincidentDEC.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "UnstructuredParaSUPPORT.hxx"
+#include "ICoCoMEDField.hxx"
+#include <string>
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+
+using namespace std;
+using namespace ParaMEDMEM;
+using namespace MEDMEM;
+/*
+ * Check methods defined in InterpKernelDEC.hxx
+ *
+ InterpKernelDEC();
+ InterpKernelDEC(ProcessorGroup& local_group, ProcessorGroup& distant_group);
+ virtual ~InterpKernelDEC();
+ void synchronize();
+ void recvData();
+ void sendData();
+*/
+
+void ParaMEDMEMTest::testNonCoincidentDEC_2D()
+{
+
+  int size;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+
+  //the test is meant to run on five processors
+  if (size !=5) return ;
+  
+  testNonCoincidentDEC( "/share/salome/resources/med/square1_split",
+                        "Mesh_2",
+                        "/share/salome/resources/med/square2_split",
+                        "Mesh_3",
+                        3,
+                        1e-6);
+} 
+
+void ParaMEDMEMTest::testNonCoincidentDEC_3D()
+{
+  int size;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  
+  //the test is meant to run on five processors
+  if (size !=4) return ;
+  
+  testNonCoincidentDEC( "/share/salome/resources/med/blade_12000_split2",
+                        "Mesh_1",
+                        "/share/salome/resources/med/blade_3000_split2",
+                        "Mesh_1",
+                        2,
+                        1e4);
+} 
+
+void ParaMEDMEMTest::testNonCoincidentDEC(const string& filename1,
+                                          const string& meshname1,
+                                          const string& filename2,
+                                          const string& meshname2,
+                                          int nproc_source,
+                                          double epsilon)
+{
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+   
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  
+  ParaMEDMEM::CommInterface interface;
+    
+  ParaMEDMEM::ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ParaMEDMEM::ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ParaMEDMEM::ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  
+  ParaMEDMEM::ParaMESH* source_mesh=0;
+  ParaMEDMEM::ParaMESH* target_mesh=0;
+  ParaMEDMEM::ParaSUPPORT* parasupport=0;
+  //loading the geometry for the source group
+
+  ParaMEDMEM::NonCoincidentDEC dec (*source_group,*target_group);
+
+  MEDMEM::MESH* mesh;
+  MEDMEM::SUPPORT* support;
+  MEDMEM::FIELD<double>* field;
+  ParaMEDMEM::ParaMESH* paramesh;
+  ParaMEDMEM::ParaFIELD* parafield;
+  
+  string filename_xml1              = getResourceFile(filename1);
+  string filename_xml2              = getResourceFile(filename2); 
+  //string filename_seq_wr            = makeTmpFile("");
+  //string filename_seq_med           = makeTmpFile("myWrField_seq_pointe221.med");
+  
+  // To remove tmp files from disk
+  ParaMEDMEMTest_TmpFilesRemover aRemover;
+  //aRemover.Register(filename_seq_wr);
+  //aRemover.Register(filename_seq_med);
+  MPI_Barrier(MPI_COMM_WORLD);
+  ICoCo::Field* icocofield;
+  if (source_group->containsMyRank())
+    {
+      string master = filename_xml1;
+      
+      ostringstream strstream;
+      strstream <<master<<rank+1<<".med";
+      ostringstream meshname ;
+      meshname<< meshname1<<"_"<< rank+1;
+      
+      CPPUNIT_ASSERT_NO_THROW(mesh = new MESH(MED_DRIVER,strstream.str(),meshname.str()));
+      support=new MEDMEM::SUPPORT(mesh,"all elements",MED_EN::MED_CELL);
+    
+      paramesh=new ParaMESH (*mesh,*source_group,"source mesh");
+    
+      parasupport=new UnstructuredParaSUPPORT( support,*source_group);
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafield = new ParaFIELD(parasupport, comptopo);
+
+      
+      int nb_local=support->getNumberOfElements(MED_EN::MED_ALL_ELEMENTS);
+      double * value= new double[nb_local];
+      for(int ielem=0; ielem<nb_local;ielem++)
+        value[ielem]=1.0;
+      parafield->getField()->setValue(value);
+
+      icocofield=new ICoCo::MEDField(paramesh,parafield);
+     
+      dec.attachLocalField(icocofield);
+      delete [] value;
+    }
+  
+  //loading the geometry for the target group
+  if (target_group->containsMyRank())
+    {
+      string master= filename_xml2;
+      ostringstream strstream;
+      strstream << master<<(rank-nproc_source+1)<<".med";
+      ostringstream meshname ;
+      meshname<< meshname2<<"_"<<rank-nproc_source+1;
+      
+      CPPUNIT_ASSERT_NO_THROW(mesh = new MESH(MED_DRIVER,strstream.str(),meshname.str()));
+      support=new MEDMEM::SUPPORT(mesh,"all elements",MED_EN::MED_CELL);
+      
+      paramesh=new ParaMESH (*mesh,*target_group,"target mesh");
+      parasupport=new UnstructuredParaSUPPORT(support,*target_group);
+      ParaMEDMEM::ComponentTopology comptopo;
+      parafield = new ParaFIELD(parasupport, comptopo);
+
+      
+      int nb_local=support->getNumberOfElements(MED_EN::MED_ALL_ELEMENTS);
+      double * value= new double[nb_local];
+      for(int ielem=0; ielem<nb_local;ielem++)
+        value[ielem]=0.0;
+      parafield->getField()->setValue(value);
+      icocofield=new ICoCo::MEDField(paramesh,parafield);
+      
+      dec.attachLocalField(icocofield);
+      delete [] value;
+    }
+    
+  
+  //attaching a DEC to the source group 
+  double field_before_int;
+  double field_after_int;
+  
+  if (source_group->containsMyRank())
+    { 
+      field_before_int = parafield->getVolumeIntegral(1);
+      MPI_Bcast(&field_before_int, 1,MPI_DOUBLE, 0,MPI_COMM_WORLD);
+      dec.synchronize();
+      cout<<"DEC usage"<<endl;
+      dec.setOption("ForcedRenormalization",false);
+
+      dec.sendData();
+      //      paramesh->write(MED_DRIVER,"./sourcesquarenc");
+      //parafield->write(MED_DRIVER,"./sourcesquarenc","boundary");  
+   
+      
+    }
+  
+  //attaching a DEC to the target group
+  if (target_group->containsMyRank())
+    {
+      MPI_Bcast(&field_before_int, 1,MPI_DOUBLE, 0,MPI_COMM_WORLD);
+     
+      dec.synchronize();
+      dec.setOption("ForcedRenormalization",false);
+      dec.recvData();
+      //paramesh->write(MED_DRIVER, "./targetsquarenc");
+      //parafield->write(MED_DRIVER, "./targetsquarenc", "boundary");
+      field_after_int = parafield->getVolumeIntegral(1);
+      
+    }
+  MPI_Bcast(&field_before_int,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
+  MPI_Bcast(&field_after_int, 1,MPI_DOUBLE, size-1,MPI_COMM_WORLD);
+     
+  CPPUNIT_ASSERT_DOUBLES_EQUAL(field_before_int, field_after_int, epsilon);
+    
+  delete source_group;
+  delete target_group;
+  delete self_group;
+  delete icocofield;
+  delete paramesh;
+  delete parafield;
+  delete support;
+  delete parasupport;
+  delete mesh;
+  MPI_Barrier(MPI_COMM_WORLD);
+  
+}
+#endif
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest_OverlapDEC.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTest_OverlapDEC.cxx
new file mode 100644 (file)
index 0000000..c7a6104
--- /dev/null
@@ -0,0 +1,212 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMEDMEMTest.hxx"
+#include <cppunit/TestAssert.h>
+
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "Topology.hxx"
+#include "OverlapDEC.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "ComponentTopology.hxx"
+
+#include "MEDCouplingUMesh.hxx"
+
+#include <set>
+
+void ParaMEDMEMTest::testOverlapDEC1()
+{
+  std::string srcM("P0");
+  std::string targetM("P0");
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+
+  if (size != 3) return ;
+   
+  int nproc = 3;
+  std::set<int> procs;
+  
+  for (int i=0; i<nproc; i++)
+    procs.insert(i);
+  
+  ParaMEDMEM::CommInterface interface;
+
+  ParaMEDMEM::OverlapDEC dec(procs);
+
+  ParaMEDMEM::MEDCouplingUMesh* meshS=0;
+  ParaMEDMEM::MEDCouplingUMesh* meshT=0;
+  ParaMEDMEM::ParaMESH* parameshS=0;
+  ParaMEDMEM::ParaMESH* parameshT=0;
+  ParaMEDMEM::ParaFIELD* parafieldS=0;
+  ParaMEDMEM::ParaFIELD* parafieldT=0;
+  
+  MPI_Barrier(MPI_COMM_WORLD);
+  if(rank==0)
+    {
+      const double coordsS[10]={0.,0.,0.5,0.,1.,0.,0.,0.5,0.5,0.5};
+      const double coordsT[6]={0.,0.,1.,0.,1.,1.};
+      meshS=ParaMEDMEM::MEDCouplingUMesh::New();
+      meshS->setMeshDimension(2);
+      ParaMEDMEM::DataArrayDouble *myCoords=ParaMEDMEM::DataArrayDouble::New();
+      myCoords->alloc(5,2);
+      std::copy(coordsS,coordsS+10,myCoords->getPointer());
+      meshS->setCoords(myCoords);
+      myCoords->decrRef();
+      int connS[7]={0,3,4,1, 1,4,2};
+      meshS->allocateCells(2);
+      meshS->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,connS);
+      meshS->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,connS+4);
+      meshS->finishInsertingCells();
+      ParaMEDMEM::ComponentTopology comptopo;
+      parameshS=new ParaMEDMEM::ParaMESH(meshS,*dec.getGrp(),"source mesh");
+      parafieldS=new ParaMEDMEM::ParaFIELD(ParaMEDMEM::ON_CELLS,ParaMEDMEM::NO_TIME,parameshS,comptopo);
+      parafieldS->getField()->setNature(ParaMEDMEM::ConservativeVolumic);//IntegralGlobConstraint
+      double *valsS=parafieldS->getField()->getArray()->getPointer();
+      valsS[0]=7.; valsS[1]=8.;
+      //
+      meshT=ParaMEDMEM::MEDCouplingUMesh::New();
+      meshT->setMeshDimension(2);
+      myCoords=ParaMEDMEM::DataArrayDouble::New();
+      myCoords->alloc(3,2);
+      std::copy(coordsT,coordsT+6,myCoords->getPointer());
+      meshT->setCoords(myCoords);
+      myCoords->decrRef();
+      int connT[3]={0,2,1};
+      meshT->allocateCells(1);
+      meshT->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,connT);
+      meshT->finishInsertingCells();
+      parameshT=new ParaMEDMEM::ParaMESH(meshT,*dec.getGrp(),"target mesh");
+      parafieldT=new ParaMEDMEM::ParaFIELD(ParaMEDMEM::ON_CELLS,ParaMEDMEM::NO_TIME,parameshT,comptopo);
+      parafieldT->getField()->setNature(ParaMEDMEM::ConservativeVolumic);//IntegralGlobConstraint
+      double *valsT=parafieldT->getField()->getArray()->getPointer();
+      valsT[0]=7.;
+    }
+  //
+  if(rank==1)
+    {
+      const double coordsS[10]={1.,0.,0.5,0.5,1.,0.5,0.5,1.,1.,1.};
+      const double coordsT[6]={0.,0.,0.5,0.5,0.,1.};
+      meshS=ParaMEDMEM::MEDCouplingUMesh::New();
+      meshS->setMeshDimension(2);
+      ParaMEDMEM::DataArrayDouble *myCoords=ParaMEDMEM::DataArrayDouble::New();
+      myCoords->alloc(5,2);
+      std::copy(coordsS,coordsS+10,myCoords->getPointer());
+      meshS->setCoords(myCoords);
+      myCoords->decrRef();
+      int connS[7]={0,1,2, 1,3,4,2};
+      meshS->allocateCells(2);
+      meshS->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,connS);
+      meshS->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,connS+3);
+      meshS->finishInsertingCells();
+      ParaMEDMEM::ComponentTopology comptopo;
+      parameshS=new ParaMEDMEM::ParaMESH(meshS,*dec.getGrp(),"source mesh");
+      parafieldS=new ParaMEDMEM::ParaFIELD(ParaMEDMEM::ON_CELLS,ParaMEDMEM::NO_TIME,parameshS,comptopo);
+      parafieldS->getField()->setNature(ParaMEDMEM::ConservativeVolumic);//IntegralGlobConstraint
+      double *valsS=parafieldS->getField()->getArray()->getPointer();
+      valsS[0]=9.; valsS[1]=11.;
+      //
+      meshT=ParaMEDMEM::MEDCouplingUMesh::New();
+      meshT->setMeshDimension(2);
+      myCoords=ParaMEDMEM::DataArrayDouble::New();
+      myCoords->alloc(3,2);
+      std::copy(coordsT,coordsT+6,myCoords->getPointer());
+      meshT->setCoords(myCoords);
+      myCoords->decrRef();
+      int connT[3]={0,2,1};
+      meshT->allocateCells(1);
+      meshT->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,connT);
+      meshT->finishInsertingCells();
+      parameshT=new ParaMEDMEM::ParaMESH(meshT,*dec.getGrp(),"target mesh");
+      parafieldT=new ParaMEDMEM::ParaFIELD(ParaMEDMEM::ON_CELLS,ParaMEDMEM::NO_TIME,parameshT,comptopo);
+      parafieldT->getField()->setNature(ParaMEDMEM::ConservativeVolumic);//IntegralGlobConstraint
+      double *valsT=parafieldT->getField()->getArray()->getPointer();
+      valsT[0]=8.;
+    }
+  //
+  if(rank==2)
+    {
+      const double coordsS[8]={0.,0.5, 0.5,0.5, 0.,1., 0.5,1.};
+      const double coordsT[6]={0.5,0.5,0.,1.,1.,1.};
+      meshS=ParaMEDMEM::MEDCouplingUMesh::New();
+      meshS->setMeshDimension(2);
+      ParaMEDMEM::DataArrayDouble *myCoords=ParaMEDMEM::DataArrayDouble::New();
+      myCoords->alloc(4,2);
+      std::copy(coordsS,coordsS+8,myCoords->getPointer());
+      meshS->setCoords(myCoords);
+      myCoords->decrRef();
+      int connS[4]={0,2,3,1};
+      meshS->allocateCells(1);
+      meshS->insertNextCell(INTERP_KERNEL::NORM_QUAD4,4,connS);
+      meshS->finishInsertingCells();
+      ParaMEDMEM::ComponentTopology comptopo;
+      parameshS=new ParaMEDMEM::ParaMESH(meshS,*dec.getGrp(),"source mesh");
+      parafieldS=new ParaMEDMEM::ParaFIELD(ParaMEDMEM::ON_CELLS,ParaMEDMEM::NO_TIME,parameshS,comptopo);
+      parafieldS->getField()->setNature(ParaMEDMEM::ConservativeVolumic);//IntegralGlobConstraint
+      double *valsS=parafieldS->getField()->getArray()->getPointer();
+      valsS[0]=10.;
+      //
+      meshT=ParaMEDMEM::MEDCouplingUMesh::New();
+      meshT->setMeshDimension(2);
+      myCoords=ParaMEDMEM::DataArrayDouble::New();
+      myCoords->alloc(3,2);
+      std::copy(coordsT,coordsT+6,myCoords->getPointer());
+      meshT->setCoords(myCoords);
+      myCoords->decrRef();
+      int connT[3]={0,1,2};
+      meshT->allocateCells(1);
+      meshT->insertNextCell(INTERP_KERNEL::NORM_TRI3,3,connT);
+      meshT->finishInsertingCells();
+      parameshT=new ParaMEDMEM::ParaMESH(meshT,*dec.getGrp(),"target mesh");
+      parafieldT=new ParaMEDMEM::ParaFIELD(ParaMEDMEM::ON_CELLS,ParaMEDMEM::NO_TIME,parameshT,comptopo);
+      parafieldT->getField()->setNature(ParaMEDMEM::ConservativeVolumic);//IntegralGlobConstraint
+      double *valsT=parafieldT->getField()->getArray()->getPointer();
+      valsT[0]=9.;
+    }
+  dec.attachSourceLocalField(parafieldS);
+  dec.attachTargetLocalField(parafieldT);
+  dec.synchronize();
+  dec.sendRecvData(true);
+  //
+  if(rank==0)
+    {
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(8.75,parafieldT->getField()->getArray()->getIJ(0,0),1e-12);
+    }
+  if(rank==1)
+    {
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(8.5,parafieldT->getField()->getArray()->getIJ(0,0),1e-12);
+    }
+  if(rank==2)
+    {
+      CPPUNIT_ASSERT_DOUBLES_EQUAL(10.5,parafieldT->getField()->getArray()->getIJ(0,0),1e-12);
+    }
+  delete parafieldS;
+  delete parafieldT;
+  delete parameshS;
+  delete parameshT;
+  meshS->decrRef();
+  meshT->decrRef();
+
+  MPI_Barrier(MPI_COMM_WORLD);
+}
+
diff --git a/src/ParaMEDMEMTest/ParaMEDMEMTest_StructuredCoincidentDEC.cxx b/src/ParaMEDMEMTest/ParaMEDMEMTest_StructuredCoincidentDEC.cxx
new file mode 100644 (file)
index 0000000..491bbf9
--- /dev/null
@@ -0,0 +1,160 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include "ParaMEDMEMTest.hxx"
+#include <cppunit/TestAssert.h>
+
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "Topology.hxx"
+#include "DEC.hxx"
+#include "StructuredCoincidentDEC.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "ComponentTopology.hxx"
+#include "ICoCoMEDField.hxx"
+#include "MEDLoader.hxx"
+
+#include <string>
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+/*
+ * Check methods defined in StructuredCoincidentDEC.hxx
+ *
+ StructuredCoincidentDEC();
+ StructuredCoincidentDEC(ProcessorGroup& local_group, ProcessorGroup& distant_group);
+ virtual ~StructuredCoincidentDEC();
+ void synchronize();
+ void recvData();
+ void sendData();
+*/
+
+void ParaMEDMEMTest::testStructuredCoincidentDEC() {
+  string testname="ParaMEDMEM - testStructured CoincidentDEC";
+  //  MPI_Init(&argc, &argv); 
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD, &size);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  if (size<4) {
+    return;
+  }
+
+  ParaMEDMEM::CommInterface interface;
+
+  ParaMEDMEM::MPIProcessorGroup self_group (interface,rank,rank);
+  ParaMEDMEM::MPIProcessorGroup target_group(interface,3,size-1);
+  ParaMEDMEM::MPIProcessorGroup source_group (interface,0,2);
+
+  ParaMEDMEM::MEDCouplingUMesh* mesh;
+  ParaMEDMEM::ParaMESH* paramesh;
+  ParaMEDMEM::ParaFIELD* parafield;
+
+  string filename_xml1 = getResourceFile("square1_split");
+  string filename_2    = getResourceFile("square1.med");
+  //string filename_seq_wr  = makeTmpFile("");
+  //string filename_seq_med = makeTmpFile("myWrField_seq_pointe221.med");
+
+  // To remove tmp files from disk
+  ParaMEDMEMTest_TmpFilesRemover aRemover;
+
+  //loading the geometry for the source group
+
+  ParaMEDMEM::StructuredCoincidentDEC dec(source_group, target_group);
+
+  MPI_Barrier(MPI_COMM_WORLD);
+  if (source_group.containsMyRank()) {
+    string master = filename_xml1;
+
+    ostringstream strstream;
+    strstream <<master<<rank+1<<".med";
+    ostringstream meshname;
+    meshname<< "Mesh_2_"<< rank+1;
+
+    mesh=MEDLoader::ReadUMeshFromFile(strstream.str().c_str(),meshname.str().c_str(),0);
+    
+
+    paramesh=new ParaMESH (mesh,source_group,"source mesh");
+
+    ParaMEDMEM::ComponentTopology comptopo(6);
+    parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+
+    int nb_local=mesh->getNumberOfCells();
+    const int* global_numbering = paramesh->getGlobalNumberingCell();
+    
+    double *value=parafield->getField()->getArray()->getPointer();
+    for(int ielem=0; ielem<nb_local;ielem++)
+      for (int icomp=0; icomp<6; icomp++)
+        value[ielem*6+icomp]=global_numbering[ielem]*6+icomp;
+
+    //ICoCo::Field* icocofield=new ICoCo::MEDField((MEDCouplingUMesh *)paramesh->getCellMesh(),parafield->getField());
+
+    dec.attachLocalField(parafield);
+    dec.synchronize();
+    dec.sendData();
+    //delete icocofield;
+  }
+
+  //loading the geometry for the target group
+  if (target_group.containsMyRank()) {
+
+    string meshname2("Mesh_2");
+    mesh = MEDLoader::ReadUMeshFromFile(filename_2.c_str(),meshname2.c_str(),0);
+    
+    paramesh=new ParaMESH (mesh,self_group,"target mesh");
+    ParaMEDMEM::ComponentTopology comptopo(6, &target_group);
+
+    parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+
+    int nb_local=mesh->getNumberOfCells();
+    double *value=parafield->getField()->getArray()->getPointer();
+    for (int ielem=0; ielem<nb_local; ielem++)
+      for (int icomp=0; icomp<comptopo.nbLocalComponents(); icomp++)
+        value[ielem*comptopo.nbLocalComponents()+icomp]=0.0;
+    //ICoCo::Field* icocofield=new ICoCo::MEDField((MEDCouplingUMesh *)paramesh->getCellMesh(),parafield->getField());
+
+    dec.attachLocalField(parafield);
+    dec.synchronize();
+    dec.recvData();
+
+    //checking validity of field
+    const double* recv_value = parafield->getField()->getArray()->getPointer();
+    for (int i=0; i< nb_local; i++) {
+      int first = comptopo.firstLocalComponent();
+      for (int icomp = 0; icomp < comptopo.nbLocalComponents(); icomp++)
+        CPPUNIT_ASSERT_DOUBLES_EQUAL(recv_value[i*comptopo.nbLocalComponents()+icomp],(double)(i*6+icomp+first),1e-12);
+    }
+    //delete icocofield;
+  }
+  delete parafield;
+  delete paramesh;
+  mesh->decrRef();
+
+  //  MPI_Barrier(MPI_COMM_WORLD);
+
+}
diff --git a/src/ParaMEDMEMTest/TestMPIAccess.cxx b/src/ParaMEDMEMTest/TestMPIAccess.cxx
new file mode 100644 (file)
index 0000000..3b456a5
--- /dev/null
@@ -0,0 +1,30 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+// --- include all MPIAccess Test
+//
+#include "MPIAccessTest.hxx"
+
+// --- Registers the fixture into the 'registry'
+
+CPPUNIT_TEST_SUITE_REGISTRATION( MPIAccessTest );
+
+// --- generic Main program from KERNEL_SRC/src/Basics/Test
+
+#include "MPIMainTest.hxx"
diff --git a/src/ParaMEDMEMTest/TestMPIAccessDEC.cxx b/src/ParaMEDMEMTest/TestMPIAccessDEC.cxx
new file mode 100644 (file)
index 0000000..15ed208
--- /dev/null
@@ -0,0 +1,30 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+// --- include all MPIAccessDEC Test
+//
+#include "MPIAccessDECTest.hxx"
+
+// --- Registers the fixture into the 'registry'
+
+CPPUNIT_TEST_SUITE_REGISTRATION( MPIAccessDECTest );
+
+// --- generic Main program from KERNEL_SRC/src/Basics/Test
+
+#include "MPIMainTest.hxx"
diff --git a/src/ParaMEDMEMTest/TestParaMEDMEM.cxx b/src/ParaMEDMEMTest/TestParaMEDMEM.cxx
new file mode 100644 (file)
index 0000000..e1b804c
--- /dev/null
@@ -0,0 +1,30 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+// --- include all MEDMEM Test
+//
+#include "ParaMEDMEMTest.hxx"
+
+// --- Registers the fixture into the 'registry'
+
+CPPUNIT_TEST_SUITE_REGISTRATION( ParaMEDMEMTest );
+
+// --- generic Main program from KERNEL_SRC/src/Basics/Test
+
+#include "MPIMainTest.hxx"
diff --git a/src/ParaMEDMEMTest/test_AllToAllDEC.cxx b/src/ParaMEDMEMTest/test_AllToAllDEC.cxx
new file mode 100644 (file)
index 0000000..7b08256
--- /dev/null
@@ -0,0 +1,170 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessDECTest.hxx"
+#include <cppunit/TestAssert.h>
+#include "MPIAccessDEC.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessDECTest::test_AllToAllDECSynchronousPointToPoint() {
+  test_AllToAllDEC( false ) ;
+}
+void MPIAccessDECTest::test_AllToAllDECAsynchronousPointToPoint() {
+  test_AllToAllDEC( true ) ;
+}
+
+static void chksts( int sts , int myrank , ParaMEDMEM::MPIAccess mpi_access ) {
+  char msgerr[MPI_MAX_ERROR_STRING] ;
+  int lenerr ;
+  if ( sts != MPI_SUCCESS ) {
+    mpi_access.errorString(sts, msgerr, &lenerr) ;
+    debugStream << "test" << myrank << " lenerr " << lenerr << " "
+         << msgerr << endl ;
+    ostringstream strstream ;
+    strstream << "===========================================================" << endl
+              << "test_AllToAllDEC" << myrank << " KO" << endl
+              << "==========================================================="
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  return ;
+}
+
+void MPIAccessDECTest::test_AllToAllDEC( bool Asynchronous ) {
+
+  debugStream << "test_AllToAllDEC" << endl ;
+
+  //  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 || size > 11 ) {
+    ostringstream strstream ;
+    strstream << "usage :" << endl
+              << "mpirun -np <nbprocs> test_AllToAllDEC" << endl
+              << " (nbprocs >=2)" << endl
+              << "test must be runned with more than 1 proc and less than 12 procs"
+              << endl ;
+    cerr << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  debugStream << "test_AllToAllDEC" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+  std::set<int> sourceprocs;
+  std::set<int> targetprocs;
+  int i ;
+  for ( i = 0 ; i < size/2 ; i++ ) {
+    sourceprocs.insert(i);
+  }
+  for ( i = size/2 ; i < size ; i++ ) {
+    targetprocs.insert(i);
+  }
+
+  ParaMEDMEM::MPIProcessorGroup* sourcegroup = new ParaMEDMEM::MPIProcessorGroup(interface,sourceprocs) ;
+  ParaMEDMEM::MPIProcessorGroup* targetgroup = new ParaMEDMEM::MPIProcessorGroup(interface,targetprocs) ;
+
+  MPIAccessDEC * MyMPIAccessDEC = new MPIAccessDEC( *sourcegroup , *targetgroup ,
+                                                    Asynchronous ) ;
+  
+  MPIAccess * mpi_access = MyMPIAccessDEC->getMPIAccess() ;
+
+#define maxreq 100
+#define datamsglength 10
+
+  //  int sts ;
+  int sendcount = datamsglength ;
+  int recvcount = datamsglength ;
+  int * recvbuf = new int[datamsglength*size] ;
+
+  int ireq ;
+  for ( ireq = 0 ; ireq < maxreq ; ireq++ ) {
+    int * sendbuf = new int[datamsglength*size] ;
+    int j ;
+    for ( j = 0 ; j < datamsglength*size ; j++ ) {
+      sendbuf[j] = myrank*1000000 + ireq*1000 + j ;
+      recvbuf[j] = -1 ;
+    }
+
+    MyMPIAccessDEC->allToAll( sendbuf, sendcount , MPI_INT ,
+                              recvbuf, recvcount , MPI_INT ) ;
+
+    int nRecvReq = mpi_access->recvRequestIdsSize() ;
+    int *ArrayOfRecvRequests = new int[nRecvReq] ;
+    int nReq = mpi_access->recvRequestIds( nRecvReq, ArrayOfRecvRequests ) ;
+    mpi_access->waitAll( nReq , ArrayOfRecvRequests ) ;
+    mpi_access->deleteRequests( nReq , ArrayOfRecvRequests ) ;
+    delete [] ArrayOfRecvRequests ;
+  }
+
+  int nSendReq = mpi_access->sendRequestIdsSize() ;
+  debugStream << "test_AllToAllDEC" << myrank << " final SendRequestIds " << nSendReq << " SendRequests"
+       << endl ;
+  if ( nSendReq ) {
+    int *ArrayOfSendRequests = new int[nSendReq] ;
+    int nReq = mpi_access->sendRequestIds( nSendReq, ArrayOfSendRequests ) ;
+    mpi_access->waitAll( nReq , ArrayOfSendRequests ) ;
+    delete [] ArrayOfSendRequests ;
+  }
+
+  int nRecvReq = mpi_access->recvRequestIdsSize() ;
+  if ( nRecvReq ) {
+    ostringstream strstream ;
+    strstream << "test_AllToAllDEC" << myrank << " final RecvRequestIds " << nRecvReq
+              << " RecvRequests # 0 Error" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  else {
+    debugStream << "test_AllToAllDEC" << myrank << " final RecvRequestIds " << nRecvReq
+         << " RecvRequests = 0 OK" << endl ;
+  }
+
+  mpi_access->barrier() ;
+
+  delete sourcegroup ;
+  delete targetgroup ;
+  delete MyMPIAccessDEC ;
+  delete [] recvbuf ;
+
+  //  MPI_Finalize();
+
+  debugStream << "test_AllToAllDEC" << myrank << " OK" << endl ;
+
+  return ;
+}
diff --git a/src/ParaMEDMEMTest/test_AllToAllTimeDEC.cxx b/src/ParaMEDMEMTest/test_AllToAllTimeDEC.cxx
new file mode 100644 (file)
index 0000000..2f50e66
--- /dev/null
@@ -0,0 +1,267 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessDECTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccessDEC.hxx"
+#include "LinearTimeInterpolator.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessDECTest::test_AllToAllTimeDECSynchronousPointToPoint() {
+  test_AllToAllTimeDEC( false ) ;
+}
+void MPIAccessDECTest::test_AllToAllTimeDECAsynchronousPointToPoint() {
+  test_AllToAllTimeDEC( true ) ;
+}
+
+static void chksts( int sts , int myrank , ParaMEDMEM::MPIAccess * mpi_access ) {
+  char msgerr[MPI_MAX_ERROR_STRING] ;
+  int lenerr ;
+  if ( sts != MPI_SUCCESS ) {
+    mpi_access->errorString(sts, msgerr, &lenerr) ;
+    debugStream << "test_AllToAllTimeDEC" << myrank << " lenerr " << lenerr << " "
+         << msgerr << endl ;
+    ostringstream strstream ;
+    strstream << "==========================================================="
+              << "test_AllToAllTimeDEC" << myrank << " KO"
+              << "==========================================================="
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  return ;
+}
+
+void MPIAccessDECTest::test_AllToAllTimeDEC( bool Asynchronous ) {
+
+  debugStream << "test_AllToAllTimeDEC" << endl ;
+
+  //  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 || size > 11 ) {
+    ostringstream strstream ;
+    strstream << "usage :" << endl
+              << "mpirun -np <nbprocs> test_AllToAllTimeDEC" << endl
+              << " (nbprocs >=2)" << endl
+              << "test must be runned with more than 1 proc and less than 12 procs"
+              << endl ;
+    cerr << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  //  int Asynchronous = atoi(argv[1]);
+
+  debugStream << "test_AllToAllTimeDEC" << myrank << " Asynchronous " << Asynchronous << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+  std::set<int> sourceprocs;
+  std::set<int> targetprocs;
+  int i ;
+  for ( i = 0 ; i < size/2 ; i++ ) {
+    sourceprocs.insert(i);
+  }
+  for ( i = size/2 ; i < size ; i++ ) {
+    targetprocs.insert(i);
+  }
+
+  ParaMEDMEM::MPIProcessorGroup* sourcegroup = new ParaMEDMEM::MPIProcessorGroup(interface,sourceprocs) ;
+  ParaMEDMEM::MPIProcessorGroup* targetgroup = new ParaMEDMEM::MPIProcessorGroup(interface,targetprocs) ;
+
+  //  LinearTimeInterpolator * aLinearInterpDEC = new LinearTimeInterpolator( 0.5 ) ;
+  MPIAccessDEC * MyMPIAccessDEC = new MPIAccessDEC( *sourcegroup , *targetgroup ,
+                                                    Asynchronous ) ;
+  //                                                    Asynchronous , LinearInterp , 0.5 ) ;
+  MyMPIAccessDEC->setTimeInterpolator( LinearTimeInterp ) ;
+  MPIAccess * mpi_access = MyMPIAccessDEC->getMPIAccess() ;
+
+  debugStream << "test_AllToAllTimeDEC" << myrank << " Barrier :" << endl ;
+  mpi_access->barrier() ;
+  debugStream << "test_AllToAllTimeDEC" << myrank << " Barrier done" << endl ;
+  
+#define maxproc 11
+#define maxreq 10000
+#define datamsglength 10
+
+  int sts ;
+  int sendcount = datamsglength ;
+  int recvcount = datamsglength ;
+
+  double time = 0 ;
+  //  double deltatime[maxproc] = {1.,2.1,3.2,4.3,5.4,6.5,7.6,8.7,9.8,10.9,11.} ;
+  double deltatime[maxproc] = {1.,2.,3.,4.,5.,6.,7.,8.,9.,10.,11.} ;
+  double maxtime = maxreq ;
+  double nextdeltatime = deltatime[myrank] ;
+  //  MyMPIAccessDEC->InitTime( time , deltatime[myrank] , maxtime ) ;
+  //  for ( time = 0 ; time <= maxtime ; time+=deltatime[myrank] ) {
+  for ( time = 0 ; time <= maxtime && nextdeltatime != 0 ; time+=nextdeltatime ) {
+    if ( time != 0 ) {
+      nextdeltatime = deltatime[myrank] ;
+      if ( time+nextdeltatime > maxtime ) {
+        nextdeltatime = 0 ;
+      }
+      //       MyMPIAccessDEC->NextTime( nextdeltatime ) ;
+    }
+    MyMPIAccessDEC->setTime( time , nextdeltatime ) ;
+    debugStream << "test_AllToAllTimeDEC" << myrank << "=====TIME " << time << "=====DELTATIME "
+         << nextdeltatime << "=====MAXTIME " << maxtime << " ======" << endl ; 
+    int * sendbuf = new int[datamsglength*size] ;
+    //     int * sendbuf = (int *) malloc(sizeof(int)*datamsglength*size) ;
+    int * recvbuf = new int[datamsglength*size] ;
+    int j ;
+    for ( j = 0 ; j < datamsglength*size ; j++ ) {
+      sendbuf[j] = myrank*1000000 + (j/datamsglength)*1000 + j ;
+      recvbuf[j] = -1 ;
+    }
+
+    int sts = MyMPIAccessDEC->allToAllTime( sendbuf, sendcount , MPI_INT ,
+                                            recvbuf, recvcount , MPI_INT ) ;
+    chksts( sts , myrank , mpi_access ) ;
+
+    //     debugStream << "test_AllToAllTimeDEC" << myrank << " recvbuf before CheckSent" ;
+    //     for ( i = 0 ; i < datamsglength*size ; i++ ) {
+    //        debugStream << " " << recvbuf[i] ;
+    //     }
+    //     debugStream << endl ;
+
+    //     debugStream << "test_AllToAllTimeDEC" << myrank << " sendbuf " << sendbuf << endl ;
+    //     MyMPIAccessDEC->CheckSent() ;
+
+    int nRecvReq = mpi_access->recvRequestIdsSize() ;
+    if ( nRecvReq != 0 ) {
+      ostringstream strstream ;
+      strstream << "=============================================================" << endl
+                << "test_AllToAllTimeDEC" << myrank << " WaitAllRecv " << nRecvReq << " Requests # 0 ERROR"
+                << endl << "============================================================="
+                << endl ;
+      int *ArrayOfRecvRequests = new int[nRecvReq] ;
+      int nReq = mpi_access->recvRequestIds( nRecvReq, ArrayOfRecvRequests ) ;
+      mpi_access->waitAll( nReq , ArrayOfRecvRequests ) ;
+      delete [] ArrayOfRecvRequests ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+
+    //     debugStream << "test_AllToAllTimeDEC" << myrank << " recvbuf" << endl ;
+    bool badrecvbuf = false ;
+    for ( i = 0 ; i < datamsglength*size ; i++ ) {
+      if ( recvbuf[i] != (i/datamsglength)*1000000 + myrank*1000 +
+           myrank*datamsglength+(i%datamsglength) ) {
+        badrecvbuf = true ;
+        debugStream << "test_AllToAllTimeDEC" << myrank << " recvbuf[" << i << "] "
+             << recvbuf[i] << " # " << (i/datamsglength)*1000000 + myrank*1000 +
+          myrank*datamsglength+(i%datamsglength) << endl ;
+      }
+      else if ( badrecvbuf ) {
+        debugStream << "test_AllToAllTimeDEC" << myrank << " recvbuf[" << i << "] "
+             << recvbuf[i] << " == " << (i/datamsglength)*1000000 + myrank*1000 +
+          myrank*datamsglength+(i%datamsglength) << endl ;
+      }
+    }
+    if ( badrecvbuf ) {
+      ostringstream strstream ;
+      strstream << "==============================================================" << endl
+                << "test_AllToAllTimeDEC" << myrank << " badrecvbuf"
+                << endl << "============================================================="
+                << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    delete [] recvbuf ;
+  }
+
+  debugStream << "test_AllToAllTimeDEC" << myrank << " final CheckSent" << endl ;
+  sts = MyMPIAccessDEC->checkSent() ;
+  if ( sts != MPI_SUCCESS ) {
+    ostringstream strstream ;
+    strstream << "================================================================" << endl
+              << "test_AllToAllTimeDEC" << myrank << " final CheckSent ERROR"
+              << endl << "================================================================"
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  int nSendReq = mpi_access->sendRequestIdsSize() ;
+  debugStream << "test_AllToAllTimeDEC" << myrank << " final SendRequestIds " << nSendReq << " SendRequests"
+       << endl ;
+  if ( nSendReq ) {
+    int *ArrayOfSendRequests = new int[nSendReq] ;
+    int nReq = mpi_access->sendRequestIds( nSendReq, ArrayOfSendRequests ) ;
+    mpi_access->waitAll( nReq , ArrayOfSendRequests ) ;
+    delete [] ArrayOfSendRequests ;
+  }
+
+  int nRecvReq = mpi_access->recvRequestIdsSize() ;
+  if ( nRecvReq ) {
+    ostringstream strstream ;
+    strstream << "===============================================================" << endl
+              << "test_AllToAllTimeDEC" << myrank << " RecvRequestIds " << nRecvReq
+              << " RecvRequests # 0 Error"
+              << endl << "==============================================================="
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  else {
+    debugStream << "test_AllToAllTimeDEC" << myrank << " RecvRequestIds " << nRecvReq
+         << " RecvRequests = 0 OK" << endl ;
+  }
+
+  debugStream << "test_AllToAllTimeDEC" << myrank << " Barrier :" << endl ;
+  mpi_access->barrier() ;
+  debugStream << "test_AllToAllTimeDEC" << myrank << " Barrier done" << endl ;
+
+  delete sourcegroup ;
+  delete targetgroup ;
+  //  delete aLinearInterpDEC ;
+  delete MyMPIAccessDEC ;
+
+  //  MPI_Finalize();
+
+  debugStream << "test_AllToAllTimeDEC" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_AllToAllvDEC.cxx b/src/ParaMEDMEMTest/test_AllToAllvDEC.cxx
new file mode 100644 (file)
index 0000000..490b404
--- /dev/null
@@ -0,0 +1,212 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessDECTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccessDEC.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessDECTest::test_AllToAllvDECSynchronousPointToPoint() {
+  test_AllToAllvDEC( false ) ;
+}
+void MPIAccessDECTest::test_AllToAllvDECAsynchronousPointToPoint() {
+  test_AllToAllvDEC( true ) ;
+}
+
+static void chksts( int sts , int myrank , ParaMEDMEM::MPIAccess mpi_access ) {
+  char msgerr[MPI_MAX_ERROR_STRING] ;
+  int lenerr ;
+  if ( sts != MPI_SUCCESS ) {
+    mpi_access.errorString(sts, msgerr, &lenerr) ;
+    debugStream << "test_AllToAllvDEC" << myrank << " lenerr " << lenerr << " "
+         << msgerr << endl ;
+    ostringstream strstream ;
+    strstream << "==========================================================="
+              << "test_AllToAllvDEC" << myrank << " KO"
+              << "==========================================================="
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  return ;
+}
+
+void MPIAccessDECTest::test_AllToAllvDEC( bool Asynchronous ) {
+
+  debugStream << "test_AllToAllvDEC" << endl ;
+
+  //  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 || size > 11 ) {
+    ostringstream strstream ;
+    strstream << "usage :" << endl
+              << "mpirun -np <nbprocs> test_AllToAllvDEC" << endl
+              << " (nbprocs >=2)" << endl
+              << "test must be runned with more than 1 proc and less than 12 procs"
+              << endl ;
+    cerr << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  //  int Asynchronous = atoi(argv[1]);
+
+  debugStream << "test_AllToAllvDEC" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+  std::set<int> sourceprocs;
+  std::set<int> targetprocs;
+  int i ;
+  for ( i = 0 ; i < size/2 ; i++ ) {
+    sourceprocs.insert(i);
+  }
+  for ( i = size/2 ; i < size ; i++ ) {
+    targetprocs.insert(i);
+  }
+
+  ParaMEDMEM::MPIProcessorGroup* sourcegroup = new ParaMEDMEM::MPIProcessorGroup(interface,sourceprocs) ;
+  ParaMEDMEM::MPIProcessorGroup* targetgroup = new ParaMEDMEM::MPIProcessorGroup(interface,targetprocs) ;
+
+  MPIAccessDEC * MyMPIAccessDEC = new MPIAccessDEC( *sourcegroup , *targetgroup ,
+                                                    Asynchronous ) ;
+  
+  MPIAccess * mpi_access = MyMPIAccessDEC->getMPIAccess() ;
+
+#define maxreq 100
+#define datamsglength 10
+
+  //  int sts ;
+  int *sendcounts = new int[size] ;
+  int *sdispls = new int[size] ;
+  int *recvcounts = new int[size] ;
+  int *rdispls = new int[size] ;
+  for ( i = 0 ; i < size ; i++ ) {
+    sendcounts[i] = datamsglength-i;
+    sdispls[i] = i*datamsglength ;
+    recvcounts[i] = datamsglength-myrank;
+    rdispls[i] = i*datamsglength ;
+  }
+  int * recvbuf = new int[datamsglength*size] ;
+
+  int ireq ;
+  for ( ireq = 0 ; ireq < maxreq ; ireq++ ) {
+    int * sendbuf = new int[datamsglength*size] ;
+    //    int * sendbuf = (int *) malloc( sizeof(int)*datamsglength*size) ;
+    int j ;
+    for ( j = 0 ; j < datamsglength*size ; j++ ) {
+      sendbuf[j] = myrank*1000000 + ireq*1000 + j ;
+      recvbuf[j] = -1 ;
+    }
+
+    MyMPIAccessDEC->allToAllv( sendbuf, sendcounts , sdispls , MPI_INT ,
+                               recvbuf, recvcounts , rdispls , MPI_INT ) ;
+
+    //    debugStream << "test_AllToAllvDEC" << myrank << " recvbuf before CheckSent" ;
+    //    for ( i = 0 ; i < datamsglength*size ; i++ ) {
+    //       debugStream << " " << recvbuf[i] ;
+    //    }
+    //    debugStream << endl ;
+
+    //    debugStream << "test_AllToAllvDEC" << myrank << " sendbuf " << sendbuf << endl ;
+    //    MyMPIAccessDEC->CheckSent() ;
+
+    int nRecvReq = mpi_access->recvRequestIdsSize() ;
+    //    debugStream << "test_AllToAllvDEC" << myrank << " WaitAllRecv " << nRecvReq << " Requests" << endl ;
+    int *ArrayOfRecvRequests = new int[nRecvReq] ;
+    int nReq = mpi_access->recvRequestIds( nRecvReq, ArrayOfRecvRequests ) ;
+    mpi_access->waitAll( nReq , ArrayOfRecvRequests ) ;
+    mpi_access->deleteRequests( nReq , ArrayOfRecvRequests ) ;
+    delete [] ArrayOfRecvRequests ;
+
+    //    debugStream << "test_AllToAllvDEC" << myrank << " recvbuf" ;
+    //    for ( i = 0 ; i < datamsglength*size ; i++ ) {
+    //       debugStream << " " << recvbuf[i] ;
+    //    }
+    //    debugStream << endl ;
+  }
+
+  //  debugStream << "test_AllToAllvDEC" << myrank << " final CheckSent" << endl ;
+  //  MyMPIAccessDEC->CheckSent() ;
+
+  int nSendReq = mpi_access->sendRequestIdsSize() ;
+  debugStream << "test_AllToAllvDEC" << myrank << " final SendRequestIds " << nSendReq << " SendRequests"
+       << endl ;
+  if ( nSendReq ) {
+    int *ArrayOfSendRequests = new int[nSendReq] ;
+    int nReq = mpi_access->sendRequestIds( nSendReq, ArrayOfSendRequests ) ;
+    mpi_access->waitAll( nReq , ArrayOfSendRequests ) ;
+    delete [] ArrayOfSendRequests ;
+  }
+
+  int nRecvReq = mpi_access->recvRequestIdsSize() ;
+  if ( nRecvReq ) {
+    ostringstream strstream ;
+    strstream << "test_AllToAllvDEC" << myrank << " final RecvRequestIds " << nRecvReq
+              << " RecvRequests # 0 Error" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  else {
+    debugStream << "test_AllToAllvDEC" << myrank << " final RecvRequestIds " << nRecvReq
+         << " RecvRequests = 0 OK" << endl ;
+  }
+
+  mpi_access->barrier() ;
+
+  delete sourcegroup ;
+  delete targetgroup ;
+  delete MyMPIAccessDEC ;
+  delete [] sendcounts ;
+  delete [] sdispls ;
+  delete [] recvcounts ;
+  delete [] rdispls ;
+  delete [] recvbuf ;
+
+  //  MPI_Finalize();
+
+  debugStream << "test_AllToAllvDEC" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_AllToAllvTimeDEC.cxx b/src/ParaMEDMEMTest/test_AllToAllvTimeDEC.cxx
new file mode 100644 (file)
index 0000000..bd6f4b5
--- /dev/null
@@ -0,0 +1,363 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+#include <ctime>
+
+#include "MPIAccessDECTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccessDEC.hxx"
+#include "LinearTimeInterpolator.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessDECTest::test_AllToAllvTimeDECSynchronousNative() {
+  test_AllToAllvTimeDEC( false , true ) ;
+}
+void MPIAccessDECTest::test_AllToAllvTimeDECSynchronousPointToPoint() {
+  test_AllToAllvTimeDEC( false , false ) ;
+}
+void MPIAccessDECTest::test_AllToAllvTimeDECAsynchronousPointToPoint() {
+  test_AllToAllvTimeDEC( true , false ) ;
+}
+
+static void chksts( int sts , int myrank , ParaMEDMEM::MPIAccess * mpi_access ) {
+  char msgerr[MPI_MAX_ERROR_STRING] ;
+  int lenerr ;
+  if ( sts != MPI_SUCCESS ) {
+    mpi_access->errorString(sts, msgerr, &lenerr) ;
+    debugStream << "test_AllToAllvTimeDEC" << myrank << " lenerr " << lenerr << " "
+         << msgerr << endl ;
+    ostringstream strstream ;
+    strstream << "==========================================================="
+              << "test_AllToAllvTimeDEC" << myrank << " KO"
+              << "==========================================================="
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  return ;
+}
+
+void MPIAccessDECTest::test_AllToAllvTimeDEC( bool Asynchronous , bool UseMPINative ) {
+
+  debugStream << "test_AllToAllvTimeDEC" << endl ;
+
+  //  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 || size > 11 ) {
+    ostringstream strstream ;
+    strstream << "usage :" << endl
+              << "mpirun -np <nbprocs> test_AllToAllTimeDEC" << endl
+              << " (nbprocs >=2)" << endl
+              << "test must be runned with more than 1 proc and less than 12 procs"
+              << endl ;
+    cerr << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  //  int Asynchronous = atoi(argv[1]) ;
+  int UseMPI_Alltoallv = UseMPINative ;
+  //  if ( argc == 3 ) {
+  //    UseMPI_Alltoallv = atoi(argv[2]) ;
+  //  }
+
+  debugStream << "test_AllToAllvTimeDEC" << myrank << " Asynchronous " << Asynchronous
+       << " UseMPI_Alltoallv " << UseMPI_Alltoallv << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+  std::set<int> sourceprocs;
+  std::set<int> targetprocs;
+  int i ;
+  for ( i = 0 ; i < size/2 ; i++ ) {
+    sourceprocs.insert(i);
+  }
+  for ( i = size/2 ; i < size ; i++ ) {
+    targetprocs.insert(i);
+  }
+
+  ParaMEDMEM::MPIProcessorGroup* sourcegroup = new ParaMEDMEM::MPIProcessorGroup(interface,sourceprocs) ;
+  ParaMEDMEM::MPIProcessorGroup* targetgroup = new ParaMEDMEM::MPIProcessorGroup(interface,targetprocs) ;
+
+  //  TimeInterpolator * aLinearInterpDEC = new LinearTimeInterpolator( 0.5 ) ;
+  MPIAccessDEC * MyMPIAccessDEC = new MPIAccessDEC( *sourcegroup , *targetgroup ,
+                                                    Asynchronous ) ;
+  //                                                    Asynchronous , LinearInterp , 0.5 ) ;
+  MyMPIAccessDEC->setTimeInterpolator( LinearTimeInterp , 0.5 ) ;
+  MPIAccess * mpi_access = MyMPIAccessDEC->getMPIAccess() ;
+
+  debugStream << "test_AllToAllvTimeDEC" << myrank << " Barrier :" << endl ;
+  mpi_access->barrier() ;
+  debugStream << "test_AllToAllvTimeDEC" << myrank << " Barrier done" << endl ;
+
+#define maxproc 11
+#define maxreq 10000
+#define datamsglength 10
+
+  int sts ;
+  int *sendcounts = new int[size] ;
+  int *sdispls = new int[size] ;
+  int *recvcounts = new int[size] ;
+  int *rdispls = new int[size] ;
+  int *sendtimecounts = new int[size] ;
+  int *stimedispls = new int[size] ;
+  int *recvtimecounts = new int[size] ;
+  int *rtimedispls = new int[size] ;
+  for ( i = 0 ; i < size ; i++ ) {
+    sendcounts[i] = datamsglength-i ;
+    sdispls[i] = i*datamsglength ;
+    recvcounts[i] = datamsglength-myrank ;
+    rdispls[i] = i*datamsglength ;
+    sendtimecounts[i] = 1 ;
+    stimedispls[i] = 0 ;
+    recvtimecounts[i] = 1 ;
+    rtimedispls[i] = i ;
+    //rtimedispls[i] = i*mpi_access->TimeExtent() ;
+  }
+
+  double timeLoc = 0 ;
+  double deltatime[maxproc] = {1.,2.1,3.2,4.3,5.4,6.5,7.6,8.7,9.8,10.9,11.} ;
+  double maxtime ;
+  double nextdeltatime = deltatime[myrank] ;
+  if ( UseMPI_Alltoallv ) {
+    maxtime = maxreq*nextdeltatime - 0.1 ;
+  }
+  else {
+    maxtime = maxreq ;
+    //    MyMPIAccessDEC->InitTime( time , nextdeltatime , maxtime ) ;
+  }
+  time_t begintime = time(NULL) ;
+  //  for ( time = 0 ; time <= maxtime ; time+=deltatime[myrank] ) {
+  for ( timeLoc = 0 ; timeLoc <= maxtime && nextdeltatime != 0 ; timeLoc+=nextdeltatime ) {
+    nextdeltatime = deltatime[myrank] ;
+    if ( timeLoc != 0 ) {
+      nextdeltatime = deltatime[myrank] ;
+      if ( timeLoc+nextdeltatime > maxtime ) {
+        nextdeltatime = 0 ;
+      }
+      //       MyMPIAccessDEC->NextTime( nextdeltatime ) ;
+    }
+    MyMPIAccessDEC->setTime( timeLoc , nextdeltatime ) ;
+    debugStream << "test_AllToAllvTimeDEC" << myrank << "=====TIME " << time << "=====DELTATIME "
+         << nextdeltatime << "=====MAXTIME " << maxtime << " ======" << endl ; 
+    int * sendbuf = new int[datamsglength*size] ;
+    //     int * sendbuf = (int *) malloc(sizeof(int)*datamsglength*size) ;
+    int * recvbuf = new int[datamsglength*size] ;
+    int j ;
+    for ( j = 0 ; j < datamsglength*size ; j++ ) {
+      sendbuf[j] = myrank*1000000 + (j/datamsglength)*1000 + j ;
+      recvbuf[j] = -1 ;
+    }
+
+    if ( UseMPI_Alltoallv ) {
+      const MPI_Comm* comm = MyMPIAccessDEC->getComm();
+      TimeMessage * aSendTimeMessage = new TimeMessage ;
+      aSendTimeMessage->time = timeLoc ;
+      //       aSendTimeMessage->deltatime = deltatime[myrank] ;
+      aSendTimeMessage->deltatime = nextdeltatime ;
+      //       aSendTimeMessage->maxtime = maxtime ;
+      aSendTimeMessage->tag = (int ) (timeLoc/deltatime[myrank]) ;
+      TimeMessage * aRecvTimeMessage = new TimeMessage[size] ;
+      interface.allToAllV(aSendTimeMessage, sendtimecounts , stimedispls ,
+                          mpi_access->timeType() ,
+                          aRecvTimeMessage, recvtimecounts , rtimedispls ,
+                          mpi_access->timeType() , *comm ) ;
+      //       for ( j = 0 ; j < size ; j++ ) {
+      //          debugStream << "test_AllToAllvTimeDEC" << myrank << " TimeMessage received " << j << " "
+      //               << aRecvTimeMessage[j] << endl ;
+      //       }
+      delete aSendTimeMessage ;
+      delete [] aRecvTimeMessage ;
+      interface.allToAllV(sendbuf, sendcounts , sdispls , MPI_INT ,
+                          recvbuf, recvcounts , rdispls , MPI_INT , *comm ) ;
+      //       free(sendbuf) ;
+      delete [] sendbuf ;
+    }
+    else {
+      int sts = MyMPIAccessDEC->allToAllvTime( sendbuf, sendcounts , sdispls , MPI_INT ,
+                                               recvbuf, recvcounts , rdispls , MPI_INT ) ;
+      chksts( sts , myrank , mpi_access ) ;
+    }
+
+    //     debugStream << "test_AllToAllvTimeDEC" << myrank << " recvbuf before CheckSent" ;
+    //     for ( i = 0 ; i < datamsglength*size ; i++ ) {
+    //        debugStream << " " << recvbuf[i] ;
+    //     }
+    //     debugStream << endl ;
+
+    //     debugStream << "test_AllToAllvTimeDEC" << myrank << " sendbuf " << sendbuf << endl ;
+    //     MyMPIAccessDEC->CheckSent() ;
+
+    int nRecvReq = mpi_access->recvRequestIdsSize() ;
+    if ( nRecvReq != 0 ) {
+      ostringstream strstream ;
+      strstream << "=============================================================" << endl
+                << "test_AllToAllvTimeDEC" << myrank << " WaitAllRecv " << nRecvReq << " Requests # 0 ERROR"
+                << endl << "============================================================="
+                << endl ;
+      int *ArrayOfRecvRequests = new int[nRecvReq] ;
+      int nReq = mpi_access->recvRequestIds( nRecvReq, ArrayOfRecvRequests ) ;
+      mpi_access->waitAll( nReq , ArrayOfRecvRequests ) ;
+      delete [] ArrayOfRecvRequests ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+
+    //     debugStream << "test_AllToAllvTimeDEC" << myrank << " check of recvbuf" << endl ;
+    bool badrecvbuf = false ;
+    for ( i = 0 ; i < size ; i++ ) {
+      int j ;
+      for ( j = 0 ; j < datamsglength ; j++ ) {
+        int index = i*datamsglength+j ;
+        if ( j < recvcounts[i] ) {
+          if ( recvbuf[index] != (index/datamsglength)*1000000 + myrank*1000 +
+               myrank*datamsglength+(index%datamsglength) ) {
+            badrecvbuf = true ;
+            debugStream << "test_AllToAllvTimeDEC" << myrank << " recvbuf[" << index << "] "
+                 << recvbuf[index] << " # " << (index/datamsglength)*1000000 +
+              myrank*1000 +
+              myrank*datamsglength+(index%datamsglength) << endl ;
+          }
+          else if ( badrecvbuf ) {
+            debugStream << "test_AllToAllvTimeDEC" << myrank << " recvbuf[" << index << "] "
+                 << recvbuf[index] << " == " << (index/datamsglength)*1000000 +
+              myrank*1000 +
+              myrank*datamsglength+(index%datamsglength) << endl ;
+          }
+        }
+        else if ( recvbuf[index] != -1 ) {
+          badrecvbuf = true ;
+          debugStream << "test_AllToAllvTimeDEC" << myrank << " recvbuf[" << index << "] "
+               << recvbuf[index] << " # -1" << endl ;
+        }
+      }
+    }
+    if ( badrecvbuf ) {
+      ostringstream strstream ;
+      strstream << "==============================================================" << endl
+                << "test_AllToAllvTimeDEC" << myrank << " badrecvbuf"
+                << endl << "============================================================="
+                << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    delete [] recvbuf ;
+  }
+
+  debugStream << "test_AllToAllvTimeDEC" << myrank << " Barrier :" << endl ;
+  mpi_access->barrier() ;
+  debugStream << "test_AllToAllvTimeDEC" << myrank << " Barrier done" << endl ;
+
+  debugStream << "test_AllToAllvTimeDEC" << myrank << " CheckFinalSent" << endl ;
+  sts = MyMPIAccessDEC->checkFinalSent() ;
+  if ( sts != MPI_SUCCESS ) {
+    ostringstream strstream ;
+    strstream << "================================================================" << endl
+              << "test_AllToAllvTimeDEC" << myrank << " final CheckSent ERROR"
+              << endl << "================================================================"
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  debugStream << "test_AllToAllvTimeDEC" << myrank << " CheckFinalRecv" << endl ;
+  sts = MyMPIAccessDEC->checkFinalRecv() ;
+  if ( sts != MPI_SUCCESS ) {
+    ostringstream strstream ;
+    strstream << "================================================================" << endl
+              << "test_AllToAllvTimeDEC" << myrank << " CheckFinalRecv ERROR"
+              << endl << "================================================================"
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  int nRecvReq = mpi_access->recvRequestIdsSize() ;
+  if ( nRecvReq ) {
+    ostringstream strstream ;
+    strstream << "===============================================================" << endl
+              << "test_AllToAllvTimeDEC" << myrank << " RecvRequestIds " << nRecvReq
+              << " RecvRequests # 0 Error"
+              << endl << "==============================================================="
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  else {
+    debugStream << "test_AllToAllvTimeDEC" << myrank << " RecvRequestIds " << nRecvReq
+         << " RecvRequests = 0 OK" << endl ;
+  }
+
+  time_t endtime = time(NULL) ;
+  debugStream << "test_AllToAllvTimeDEC" << myrank << " begintime " << begintime << " endtime " << endtime
+       << " elapse " << endtime-begintime << " " << maxtime/deltatime[myrank]
+       << " calls to AllToAll" << endl ;
+
+  debugStream << "test_AllToAllvTimeDEC" << myrank << " Barrier :" << endl ;
+  mpi_access->barrier() ;
+  debugStream << "test_AllToAllvTimeDEC" << myrank << " Barrier done" << endl ;
+
+  delete sourcegroup ;
+  delete targetgroup ;
+  delete MyMPIAccessDEC ;
+  //  delete aLinearInterpDEC ;
+
+  delete [] sendcounts ;
+  delete [] sdispls ;
+  delete [] recvcounts ;
+  delete [] rdispls ;
+  delete [] sendtimecounts ;
+  delete [] stimedispls ;
+  delete [] recvtimecounts ;
+  delete [] rtimedispls ;
+
+  //  MPI_Finalize();
+
+  endtime = time(NULL) ;
+
+  debugStream << "test_AllToAllvTimeDEC" << myrank << " OK begintime " << begintime << " endtime " << endtime
+       << " elapse " << endtime-begintime << " " << maxtime/deltatime[myrank]
+       << " calls to AllToAll" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_AllToAllvTimeDoubleDEC.cxx b/src/ParaMEDMEMTest/test_AllToAllvTimeDoubleDEC.cxx
new file mode 100644 (file)
index 0000000..01b7bd6
--- /dev/null
@@ -0,0 +1,337 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <math.h>
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+#include <ctime>
+
+#include "MPIAccessDECTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccessDEC.hxx"
+#include "LinearTimeInterpolator.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessDECTest::test_AllToAllvTimeDoubleDECSynchronousPointToPoint() {
+  test_AllToAllvTimeDoubleDEC( false ) ;
+}
+void MPIAccessDECTest::test_AllToAllvTimeDoubleDECAsynchronousPointToPoint() {
+  test_AllToAllvTimeDoubleDEC( true ) ;
+}
+
+static void chksts( int sts , int myrank , ParaMEDMEM::MPIAccess * mpi_access ) {
+  char msgerr[MPI_MAX_ERROR_STRING] ;
+  int lenerr ;
+  if ( sts != MPI_SUCCESS ) {
+    mpi_access->errorString(sts, msgerr, &lenerr) ;
+    debugStream << "test" << myrank << " lenerr " << lenerr << " "
+         << msgerr << endl ;
+    ostringstream strstream ;
+    strstream << "==========================================================="
+              << "test" << myrank << " KO"
+              << "==========================================================="
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  return ;
+}
+
+void MPIAccessDECTest::test_AllToAllvTimeDoubleDEC( bool Asynchronous ) {
+
+  debugStream << "test_AllToAllvTimeDoubleDEC" << endl ;
+
+//  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 || size > 11 ) {
+    ostringstream strstream ;
+    strstream << "usage :" << endl
+              << "mpirun -np <nbprocs> test_AllToAllTimeDEC" << endl
+              << " (nbprocs >=2)" << endl
+              << "test must be runned with more than 1 proc and less than 12 procs"
+              << endl ;
+    cerr << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+//  int Asynchronous = atoi(argv[1]) ;
+
+  debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " Asynchronous " << Asynchronous << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+  std::set<int> sourceprocs;
+  std::set<int> targetprocs;
+  int i ;
+  for ( i = 0 ; i < size/2 ; i++ ) {
+     sourceprocs.insert(i);
+  }
+  for ( i = size/2 ; i < size ; i++ ) {
+     targetprocs.insert(i);
+  }
+
+  ParaMEDMEM::MPIProcessorGroup* sourcegroup = new ParaMEDMEM::MPIProcessorGroup(interface,sourceprocs) ;
+  ParaMEDMEM::MPIProcessorGroup* targetgroup = new ParaMEDMEM::MPIProcessorGroup(interface,targetprocs) ;
+
+//  TimeInterpolator * aLinearInterpDEC = new LinearTimeInterpolator( 0 ) ;
+  MPIAccessDEC * MyMPIAccessDEC = new MPIAccessDEC( *sourcegroup , *targetgroup ,
+                                                    Asynchronous ) ;
+//                                                    Asynchronous , LinearInterp , 0.5 ) ;
+  MyMPIAccessDEC->setTimeInterpolator( LinearTimeInterp ) ;
+  MPIAccess * mpi_access = MyMPIAccessDEC->getMPIAccess() ;
+
+  debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " Barrier :" << endl ;
+  mpi_access->barrier() ;
+
+#define maxproc 11
+#define maxreq 100
+#define datamsglength 10
+
+  int sts ;
+  int *sendcounts = new int[size] ;
+  int *sdispls = new int[size] ;
+  int *recvcounts = new int[size] ;
+  int *rdispls = new int[size] ;
+  int *sendtimecounts = new int[size] ;
+  int *stimedispls = new int[size] ;
+  int *recvtimecounts = new int[size] ;
+  int *rtimedispls = new int[size] ;
+  for ( i = 0 ; i < size ; i++ ) {
+     sendcounts[i] = datamsglength-i ;
+     sdispls[i] = i*datamsglength ;
+     recvcounts[i] = datamsglength-myrank ;
+     rdispls[i] = i*datamsglength ;
+     sendtimecounts[i] = 1 ;
+     stimedispls[i] = 0 ;
+     recvtimecounts[i] = 1 ;
+     rtimedispls[i] = i ;
+  }
+
+  double timeLoc[maxproc] ;
+  double deltatime[maxproc] = {1.,2.1,3.2,4.3,5.4,6.5,7.6,8.7,9.8,10.9,11.} ;
+  double maxtime[maxproc] ;
+  double nextdeltatime[maxproc] ;
+  for ( i = 0 ; i < size ; i++ ) {
+     timeLoc[i] = 0 ;
+     maxtime[i] = maxreq ;
+     nextdeltatime[i] = deltatime[i] ;
+  }
+  time_t begintime = time(NULL) ;
+  for ( timeLoc[myrank] = 0 ; timeLoc[myrank] <= maxtime[myrank] && nextdeltatime[myrank] != 0 ;
+        timeLoc[myrank]+=nextdeltatime[myrank] ) {
+//local and target times
+     int target ;
+     for ( target = 0 ; target < size ; target++ ) {
+        nextdeltatime[target] = deltatime[target] ;
+        if ( timeLoc[target] != 0 ) {
+          if ( timeLoc[target]+nextdeltatime[target] > maxtime[target] ) {
+            nextdeltatime[target] = 0 ;
+          }
+        }
+        if ( target != myrank ) {
+          while ( timeLoc[myrank] >= timeLoc[target] ) {
+               timeLoc[target] += deltatime[target] ;
+          }
+        }
+     }
+     MyMPIAccessDEC->setTime( timeLoc[myrank] , nextdeltatime[myrank] ) ;
+     debugStream << "test" << myrank << "=====TIME " << timeLoc[myrank] << "=====DELTATIME "
+          << nextdeltatime[myrank] << "=====MAXTIME " << maxtime[myrank] << " ======"
+          << endl ; 
+     double * sendbuf = new double[datamsglength*size] ;
+//     double * sendbuf = (double *) malloc(sizeof(double)*datamsglength*size) ;
+     double * recvbuf = new double[datamsglength*size] ;
+     int j ;
+     //debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " sendbuf" ;
+     for ( target = 0 ; target < size ; target++ ) {
+        for ( j = 0 ; j < datamsglength ; j++ ) {
+           //sendbuf[j] = myrank*10000 + (j/datamsglength)*100 + j ;
+           sendbuf[target*datamsglength+j] = myrank*1000000 + target*10000 +
+                                             (timeLoc[myrank]/deltatime[myrank])*100 + j ;
+           //debugStream << " " << (int ) sendbuf[target*datamsglength+j] ;
+           recvbuf[target*datamsglength+j] = -1 ;
+        }
+        //debugStream << endl ;
+     }
+
+     int sts = MyMPIAccessDEC->allToAllvTime( sendbuf, sendcounts , sdispls , MPI_DOUBLE ,
+                                            recvbuf, recvcounts , rdispls , MPI_DOUBLE ) ;
+     chksts( sts , myrank , mpi_access ) ;
+
+//     debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " recvbuf before CheckSent" ;
+//     for ( i = 0 ; i < datamsglength*size ; i++ ) {
+//        debugStream << " " << recvbuf[i] ;
+//     }
+//     debugStream << endl ;
+
+     int nRecvReq = mpi_access->recvRequestIdsSize() ;
+     if ( nRecvReq != 0 ) {
+       ostringstream strstream ;
+       strstream << "=============================================================" << endl
+                 << "test_AllToAllvTimeDoubleDEC" << myrank << " WaitAllRecv "
+                 << nRecvReq << " Requests # 0 ERROR"
+                 << endl << "============================================================"
+                 << endl ;
+       int *ArrayOfRecvRequests = new int[nRecvReq] ;
+       int nReq = mpi_access->recvRequestIds( nRecvReq, ArrayOfRecvRequests ) ;
+       mpi_access->waitAll( nReq , ArrayOfRecvRequests ) ;
+       delete [] ArrayOfRecvRequests ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+
+//     debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " check of recvbuf" << endl ;
+     bool badrecvbuf = false ;
+     for ( target = 0 ; target < size ; target++ ) {
+        int j ;
+        for ( j = 0 ; j < datamsglength ; j++ ) {
+           int index = target*datamsglength+j ;
+           if ( j < recvcounts[target] ) {
+             if ( fabs(recvbuf[index] - (target*1000000 + myrank*10000 +
+                  (timeLoc[target]/deltatime[target])*100 + j)) > 101) {
+               badrecvbuf = true ;
+               debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " target " << target << " timeLoc[target] "
+                    << timeLoc[target] << " recvbuf[" << index << "] " << (int ) recvbuf[index]
+                    << " # " << (int ) (target*1000000 +
+                       myrank*10000 + (timeLoc[target]/deltatime[target])*100 + j)
+                    << endl ;
+             }
+             else if ( badrecvbuf ) {
+               debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " recvbuf[" << index << "] "
+                    << recvbuf[index] << " ~= " << (int ) (target*1000000 +
+                       myrank*10000 + (timeLoc[target]/deltatime[target])*100 + j) << endl ;
+             }
+           }
+           else if ( recvbuf[index] != -1 ) {
+             badrecvbuf = true ;
+             debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " recvbuf[" << index << "] "
+                  << recvbuf[index] << " # -1" << endl ;
+           }
+        }
+     }
+     if ( badrecvbuf ) {
+       ostringstream strstream ;
+       strstream << "==================================================================" << endl
+                 << "test_AllToAllvTimeDoubleDEC" << myrank << " badrecvbuf"
+                 << endl << "=================================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     delete [] recvbuf ;
+  }
+
+  debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " Barrier :" << endl ;
+  mpi_access->barrier() ;
+
+  debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " CheckFinalSent" << endl ;
+  sts = MyMPIAccessDEC->checkFinalSent() ;
+  if ( sts != MPI_SUCCESS ) {
+    ostringstream strstream ;
+    strstream << "=================================================================" << endl
+              << "test_AllToAllvTimeDoubleDEC" << myrank << " CheckFinalSent ERROR"
+              << endl << "================================================================="
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " CheckFinalRecv" << endl ;
+  sts = MyMPIAccessDEC->checkFinalRecv() ;
+  if ( sts != MPI_SUCCESS ) {
+    ostringstream strstream ;
+    strstream << "=================================================================" << endl
+              << "test_AllToAllvTimeDoubleDEC" << myrank << " CheckFinalRecv ERROR"
+              << endl << "================================================================"
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  int nRecvReq = mpi_access->recvRequestIdsSize() ;
+  if ( nRecvReq ) {
+    ostringstream strstream ;
+    strstream << "===============================================================" << endl
+              << "test_AllToAllvTimeDoubleDEC" << myrank << " RecvRequestIds " << nRecvReq
+              << " RecvRequests # 0 Error"
+              << endl << "==============================================================="
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  else {
+    debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " RecvRequestIds " << nRecvReq
+         << " RecvRequests = 0 OK" << endl ;
+  }
+
+  time_t endtime = time(NULL) ;
+  debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " begintime " << begintime << " endtime " << endtime
+       << " elapse " << endtime-begintime << " " << maxtime[myrank]/deltatime[myrank]
+       << " calls to AllToAll" << endl ;
+
+  debugStream << "test" << myrank << " Barrier :" << endl ;
+  mpi_access->barrier() ;
+
+  delete sourcegroup ;
+  delete targetgroup ;
+  delete MyMPIAccessDEC ;
+//  delete aLinearInterpDEC ;
+
+  delete [] sendcounts ;
+  delete [] sdispls ;
+  delete [] recvcounts ;
+  delete [] rdispls ;
+  delete [] sendtimecounts ;
+  delete [] stimedispls ;
+  delete [] recvtimecounts ;
+  delete [] rtimedispls ;
+
+//  MPI_Finalize();
+
+  endtime = time(NULL) ;
+
+  debugStream << "test_AllToAllvTimeDoubleDEC" << myrank << " OK begintime " << begintime << " endtime " << endtime
+       << " elapse " << endtime-begintime << " " << maxtime[myrank]/deltatime[myrank]
+       << " calls to AllToAll" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_Cancel.cxx b/src/ParaMEDMEMTest/test_MPI_Access_Cancel.cxx
new file mode 100644 (file)
index 0000000..8b26823
--- /dev/null
@@ -0,0 +1,326 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <time.h>
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#ifndef WIN32
+#include <unistd.h>
+#endif
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_Cancel() {
+
+  debugStream << "test_MPI_Access_Cancel" << endl ;
+
+//  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+    ostringstream strstream ;
+    strstream << "test_MPI_Access_Cancel must be runned with 2 procs" << endl ;
+    cerr << strstream.str() << endl ;
+    //CPPUNIT_FAIL( strstream.str() ) ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_Cancel" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+  if ( myrank >= 2 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int intsendbuf[5] ;
+  double doublesendbuf[10] ;
+  int RequestId[10] ;
+  int sts ;
+  int i , j ;
+  for ( j = 0 ; j < 3 ; j++ ) {
+     for ( i = 0 ; i < 10 ; i++ ) {
+        debugStream << "test" << myrank << " ============================ i " << i
+             << "============================" << endl ;
+        if ( myrank == 0 ) {
+          if ( i < 5 ) {
+            intsendbuf[i] = i ;
+            sts = mpi_access.ISend(&intsendbuf[i],1,MPI_INT,target, RequestId[i]) ;
+            debugStream << "test" << myrank << " Send MPI_INT RequestId " << RequestId[i]
+                 << endl ;
+          }
+          else {
+            doublesendbuf[i] = i ;
+            sts = mpi_access.ISend(&doublesendbuf[i],1,MPI_DOUBLE,target,
+                                   RequestId[i]) ;
+            debugStream << "test" << myrank << " Send MPI_DOUBLE RequestId " << RequestId[i]
+                 << endl ;
+          }
+        }
+        else {
+          int flag = false ;
+          while ( !flag ) {
+               int source, tag, outcount ;
+               MPI_Datatype datatype ;
+               sts = mpi_access.IProbe(target, source, tag, datatype, outcount,
+                                       flag ) ;
+               if ( flag ) {
+                 debugStream << "test" << myrank << " " << i << " IProbe target " << target
+                      << " source " << source << " tag " << tag
+                      << " outcount " << outcount << " flag " << flag << endl ;
+               }
+               else {
+                 debugStream << "test" << myrank << " flag " << flag << endl ;
+                 sleep( 1 ) ;
+               }
+               if ( flag ) {
+                 int recvbuf ;
+                 sts = mpi_access.IRecv(&recvbuf,outcount,MPI_INT,source,
+                                        RequestId[i] ) ;
+                 if ( datatype == MPI_INT ) {
+                   int source, tag, error, outcount ;
+                   mpi_access.wait( RequestId[i] ) ;
+                   mpi_access.status( RequestId[i], source, tag, error, outcount,
+                                      true ) ;
+                   if ( (outcount != 1) | (recvbuf != i) ) {
+                     ostringstream strstream ;
+                     strstream << "======================================================"
+                               << endl << "test" << myrank << " outcount " << outcount
+                               << " recvbuf " << recvbuf << " KO" << endl
+                               << "======================================================"
+                               << endl ;
+                     debugStream << strstream.str() << endl ;
+                     CPPUNIT_FAIL( strstream.str() ) ;
+                   }
+                   debugStream << "========================================================"
+                        << endl << "test" << myrank << " outcount " << outcount
+                        << " recvbuf " << recvbuf << " OK" << endl
+                        << "========================================================"
+                        << endl ;
+                 }
+               }
+          }
+        }
+        char msgerr[MPI_MAX_ERROR_STRING] ;
+        int lenerr ;
+        mpi_access.errorString(sts, msgerr, &lenerr) ;
+        debugStream << "test" << myrank << " lenerr " << lenerr << " "
+             << msgerr << endl ;
+        if ( sts != MPI_SUCCESS ) {
+          ostringstream strstream ;
+          strstream << "==========================================================="
+                    << endl << "test" << myrank << " KO"
+                    << "==========================================================="
+                    << endl ;
+          debugStream << strstream.str() << endl ;
+          CPPUNIT_FAIL( strstream.str() ) ;
+        }
+        if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+     }
+
+     if ( myrank != 0 ) {
+       int iprobe ;
+       for ( iprobe = 5 ; iprobe < 10 ; iprobe++ ) {
+          debugStream << "test" << myrank << " ============================ iprobe "
+               << iprobe << "============================" << endl ;
+          int source, tag, outcount ;
+          MPI_Datatype datatype ;
+          int probeflag = false ;
+          while ( !probeflag ) {
+               sts = mpi_access.IProbe( target, source, tag, datatype, outcount,
+                                        probeflag ) ;
+               char msgerr[MPI_MAX_ERROR_STRING] ;
+               int lenerr ;
+               mpi_access.errorString(sts, msgerr, &lenerr) ;
+               debugStream << "test" << myrank << " IProbe iprobe " << iprobe
+                    << " target " << target << " probeflag " << probeflag
+                    << " tag " << tag << " outcount " << outcount << " datatype "
+                    << datatype << " lenerr " << lenerr << " " << msgerr << endl ;
+               if ( sts != MPI_SUCCESS ) {
+                 ostringstream strstream ;
+                 strstream << "=========================================================="
+                           << endl << "test" << myrank << " IProbe KO iprobe " << iprobe
+                           << endl
+                           << "=========================================================="
+                           << endl ;
+                 debugStream << strstream.str() << endl ;
+                 CPPUNIT_FAIL( strstream.str() ) ;
+               }
+               if ( !probeflag ) {
+                 //debugStream << "========================================================"
+                 //     << endl << "test" << myrank << " IProbe KO(OK) iprobe " << iprobe
+                 //     << " probeflag " << probeflag << endl
+                 //     << "========================================================"
+                 //     << endl ;
+               }
+               else {
+                 debugStream << "test" << myrank << " " << iprobe << " IProbe target "
+                      << target << " source " << source << " tag " << tag
+                      << " outcount " << outcount << " probeflag " << probeflag
+                      << endl ;
+                 if ( datatype != MPI_DOUBLE ) {
+                   ostringstream strstream ;
+                   strstream << "========================================================"
+                             << endl << "test" << myrank << " MPI_DOUBLE KO" << endl
+                             << "========================================================"
+                             << endl ;
+                   debugStream << strstream.str() << endl ;
+                   CPPUNIT_FAIL( strstream.str() ) ;
+                 }
+                 else {
+                   int flag ;
+                   sts = mpi_access.cancel( source, tag, datatype, outcount, flag ) ;
+                   if ( sts != MPI_SUCCESS || !flag ) {
+                     mpi_access.errorString(sts, msgerr, &lenerr) ;
+                     debugStream << "======================================================"
+                          << endl << "test" << myrank << " lenerr " << lenerr << " "
+                          << msgerr << endl << "test" << myrank
+                          << " Cancel PendingIrecv KO flag " << flag << " iprobe "
+                          << iprobe << " Irecv completed" << endl
+                          << "======================================================"
+                          << endl ;
+                     //return 1 ;
+                   }
+                   else {
+                     debugStream << "======================================================"
+                          << endl << "test" << myrank
+                          << " Cancel PendingIrecv OK RequestId " << " flag "
+                          << flag << " iprobe " << iprobe << endl
+                          << "======================================================"
+                          << endl ;
+                   }
+                 }
+                 int Reqtarget, Reqtag, Reqerror, Reqoutcount ;
+                 mpi_access.status( RequestId[iprobe], Reqtarget, Reqtag, Reqerror,
+                                    Reqoutcount, true ) ;
+                 debugStream << "test" << myrank << " Status Reqtarget "<< Reqtarget
+                      << " Reqtag " << Reqtag << " Reqoutcount " << Reqoutcount
+                      << endl ;
+                 int Reqflag ;
+                 sts = mpi_access.cancel( RequestId[iprobe] , Reqflag ) ;
+                 debugStream << "test" << myrank << " " << iprobe
+                      << " Cancel Irecv done Reqtarget " << Reqtarget
+                      << " Reqtag " << Reqtag << " Reqoutcount " << Reqoutcount
+                      << " Reqflag " << Reqflag << endl ;
+                 if ( sts != MPI_SUCCESS || !Reqflag ) {
+                   mpi_access.errorString(sts, msgerr, &lenerr) ;
+                   ostringstream strstream ;
+                   strstream << "========================================================"
+                             << endl << "test" << myrank << " lenerr " << lenerr << " "
+                             << msgerr << endl << "test" << myrank
+                             << " Cancel Irecv KO Reqflag " << Reqflag << " iprobe "
+                             << iprobe << endl
+                             << "========================================================"
+                             << endl ;
+                   debugStream << strstream.str() << endl ;
+                   CPPUNIT_FAIL( strstream.str() ) ;
+                 }
+                 else {
+                   debugStream << "========================================================"
+                        << endl << "test" << myrank
+                        << " Cancel Irecv OK RequestId " << RequestId[iprobe]
+                        << " Reqflag " << Reqflag << " iprobe " << iprobe << endl
+                        << "========================================================"
+                        << endl ;
+                   probeflag = Reqflag ;
+                 }
+               }
+          }
+       }
+     }
+     mpi_access.waitAll(10,RequestId) ;
+     mpi_access.deleteRequests(10,RequestId) ;
+  }
+
+  int source, tag, outcount, flag ;
+  MPI_Datatype datatype ;
+  sts = mpi_access.IProbe(target, source, tag, datatype, outcount, flag ) ;
+  char msgerr[MPI_MAX_ERROR_STRING] ;
+  int lenerr ;
+  mpi_access.errorString(sts, msgerr, &lenerr) ;
+  debugStream << "test" << myrank << " lenerr " << lenerr << " "
+       << msgerr << endl ;
+  if ( sts != MPI_SUCCESS || flag ) {
+    ostringstream strstream ;
+    strstream << "==========================================================="
+              << endl << "test" << myrank << " IProbe KO flag " << flag
+              << " remaining unread/cancelled message :" << endl
+              << " source " << source << " tag " << tag << endl
+              << "==========================================================="
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  mpi_access.testAll(10,RequestId,flag) ;
+  mpi_access.waitAll(10,RequestId) ;
+  mpi_access.deleteRequests(10,RequestId) ;
+  mpi_access.testAll(10,RequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "test" << myrank << " flag " << flag << " KO" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+//  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_Cyclic_ISend_IRecv.cxx b/src/ParaMEDMEMTest/test_MPI_Access_Cyclic_ISend_IRecv.cxx
new file mode 100644 (file)
index 0000000..00ce3e6
--- /dev/null
@@ -0,0 +1,271 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_Cyclic_ISend_IRecv() {
+
+  debugStream << "test_MPI_Access_Cyclic_ISend_IRecv" << endl ;
+
+//  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 3 ) {
+      cerr << "test_MPI_Access_Cyclic_ISend_IRecv must be runned with 3 procs" << endl ;
+    //CPPUNIT_FAIL("test_MPI_Access_Cyclic_ISend_IRecv must be runned with 3 procs") ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_Cyclic_ISend_IRecv" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+#define maxsend 100
+
+  if ( myrank >= 3 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int alltarget[3] = {1 , 2 , 0 } ;
+  int allsource[3] = {2 , 0 , 1 } ;
+  int SendRequestId[maxsend] ;
+  int RecvRequestId[maxsend] ;
+  int sendbuf[maxsend] ;
+  int recvbuf[maxsend] ;
+  int sts ;
+  int i = 0 ;
+  if ( myrank == 0 ) {
+    sendbuf[i] = i ;
+    sts = mpi_access.ISend(&sendbuf[i],1,MPI_INT,alltarget[myrank],
+                           SendRequestId[i]) ;
+    debugStream << "test" << myrank << " Send RequestId " << SendRequestId[i]
+         << " tag " << mpi_access.sendMPITag(alltarget[myrank]) << endl ;
+  }
+  for ( i = 0 ; i < maxsend ; i++ ) {
+     recvbuf[i] = -1 ;
+     sts = mpi_access.IRecv(&recvbuf[i],1,MPI_INT,allsource[myrank],
+                            RecvRequestId[i]) ;
+     debugStream << "test" << myrank << " Recv RequestId " << RecvRequestId[i]
+          << " tag " << mpi_access.recvMPITag(allsource[myrank]) << endl ;
+     char msgerr[MPI_MAX_ERROR_STRING] ;
+     int lenerr ;
+     mpi_access.errorString(sts, msgerr, &lenerr) ;
+     debugStream << "test" << myrank << " lenerr " << lenerr
+          << " " << msgerr << endl ;
+
+     if ( sts != MPI_SUCCESS ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " KO"
+                 << "==========================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     int j ;
+     for (j = 0 ; j <= i ; j++) {
+        int flag ;
+        if ( j < i ) {
+          debugStream << "test" << myrank << " " << j << " -> Test-Send("<< SendRequestId[j]
+               << ")" << endl ;
+          mpi_access.test( SendRequestId[j], flag ) ;
+          if ( flag ) {
+            int target, tag, error, outcount ;
+            mpi_access.status( SendRequestId[j], target, tag, error, outcount,
+                               true ) ;
+            debugStream << "test" << myrank << " Send RequestId " << SendRequestId[j]
+                 << " target " << target << " tag " << tag << " error " << error
+                 << endl ;
+            mpi_access.deleteRequest( SendRequestId[j] ) ;
+          }
+        }
+        debugStream << "test" << myrank << " " << j << " -> Test-Recv("<< SendRequestId[j]
+             << ")" << endl ;
+        mpi_access.test( RecvRequestId[j], flag ) ;
+        if ( flag ) {
+          int source, tag, error, outcount ;
+          mpi_access.status( RecvRequestId[j], source, tag, error, outcount,
+                             true ) ;
+          debugStream << "test" << myrank << " Recv RequestId" << j << " "
+               << RecvRequestId[j] << " source " << source << " tag " << tag
+               << " error " << error << " outcount " << outcount << endl ;
+          if ( (outcount != 1) | (recvbuf[j] != j) ) {
+            ostringstream strstream ;
+            strstream << "====================================================="
+                      << endl << "test" << myrank << " outcount "
+                      << outcount << " recvbuf[ " << j << " ] " << recvbuf[j] << " KO"
+                      << endl << "====================================================="
+                      << endl ;
+            debugStream << strstream.str() << endl ;
+            CPPUNIT_FAIL( strstream.str() ) ;
+          }
+        }
+     }
+     if ( myrank == 0 ) {
+       if ( i != maxsend-1 ) {
+         sendbuf[i+1] = i + 1 ;
+         sts = mpi_access.ISend(&sendbuf[i+1],1,MPI_INT,alltarget[myrank],
+                                SendRequestId[i+1]) ;
+         debugStream << "test" << myrank << " Send RequestId " << SendRequestId[i+1]
+              << " tag " << mpi_access.sendMPITag(alltarget[myrank]) << endl ;
+       }
+     }
+     else {
+       sendbuf[i] = i ;
+       sts = mpi_access.ISend(&sendbuf[i],1,MPI_INT,alltarget[myrank],
+                              SendRequestId[i]) ;
+       debugStream << "test" << myrank << " Send RequestId " << SendRequestId[i]
+            << " tag " << mpi_access.sendMPITag(alltarget[myrank]) << endl ;
+     }
+     mpi_access.errorString(sts, msgerr, &lenerr) ;
+     debugStream << "test" << myrank << " lenerr " << lenerr
+          << " " << msgerr << endl ;
+
+     if ( sts != MPI_SUCCESS ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " KO"
+                 << "==========================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  }
+
+  int flag ;
+  mpi_access.testAll(maxsend,SendRequestId,flag) ;
+  mpi_access.testAll(maxsend,RecvRequestId,flag) ;
+  mpi_access.waitAll(maxsend,SendRequestId) ;
+  mpi_access.deleteRequests(maxsend,SendRequestId) ;
+  mpi_access.waitAll(maxsend,RecvRequestId) ;
+  mpi_access.deleteRequests(maxsend,RecvRequestId) ;
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  mpi_access.testAll(maxsend,SendRequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "=========================================================" << endl
+              << "test" << myrank << " TestAllSendflag " << flag << " KO" << endl
+              << "=========================================================" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  else {
+    debugStream << "=========================================================" << endl
+         << "test" << myrank << " TestAllSendflag " << flag << " OK" << endl
+         << "=========================================================" << endl ;
+  }
+  mpi_access.testAll(maxsend,RecvRequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "=========================================================" << endl
+              << "test" << myrank << " TestAllRecvflag " << flag << " KO" << endl
+              << "=========================================================" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  else {
+    debugStream << "=========================================================" << endl
+         << "test" << myrank << " TestAllRecvflag " << flag << " OK" << endl
+         << "=========================================================" << endl ;
+  }
+
+  int sendrequests[maxsend] ;
+  int sendreqsize = mpi_access.sendRequestIds( alltarget[myrank] , maxsend ,
+                                               sendrequests ) ;
+  if ( sendreqsize != 0 ) {
+    ostringstream strstream ;
+    strstream << "=========================================================" << endl
+              << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+              << "=========================================================" << endl ;
+    debugStream << strstream.str() << endl ;
+    int source, tag, error, outcount ;
+    mpi_access.status(sendrequests[0], source, tag, error, outcount, true) ;
+    debugStream << "test" << myrank << " RequestId " << sendrequests[0]
+         << " source " << source << " tag " << tag << " error " << error
+         << " outcount " << outcount << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  else {
+    debugStream << "=========================================================" << endl
+         << "test" << myrank << " sendreqsize " << sendreqsize << " OK" << endl
+         << "=========================================================" << endl ;
+  }
+  int recvrequests[maxsend] ;
+  int recvreqsize = mpi_access.sendRequestIds( allsource[myrank] , maxsend ,
+                                               recvrequests ) ;
+  if ( recvreqsize != 0 ) {
+    ostringstream strstream ;
+    strstream << "=========================================================" << endl
+              << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+              << "=========================================================" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  else {
+    debugStream << "=========================================================" << endl
+         << "test" << myrank << " recvreqsize " << recvreqsize << " OK" << endl
+         << "=========================================================" << endl ;
+  }
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+//  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_Cyclic_Send_Recv.cxx b/src/ParaMEDMEMTest/test_MPI_Access_Cyclic_Send_Recv.cxx
new file mode 100644 (file)
index 0000000..67c687f
--- /dev/null
@@ -0,0 +1,188 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_Cyclic_Send_Recv() {
+
+  debugStream << "test_MPI_Access_Cyclic_Send_Recv" << endl ;
+
+//  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 3 ) {
+      cerr << "test_MPI_Access_Send_Recv must be runned with 3 procs" << endl ;
+    //CPPUNIT_FAIL("test_MPI_Access_Send_Recv must be runned with 3 procs") ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_Cyclic_Send_Recv" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+  if ( myrank >= 3 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int alltarget[3] = {1 , 2 , 0 } ;
+  int allsource[3] = {2 , 0 , 1 } ;
+  int RequestId[10] ;
+  int sts ;
+  int i = 0 ;
+  if ( myrank == 0 ) {
+    sts = mpi_access.send(&i,1,MPI_INT,alltarget[myrank], RequestId[i]) ;
+    debugStream << "test" << myrank << " Send RequestId " << RequestId[i]
+         << " tag " << mpi_access.sendMPITag(alltarget[myrank]) << endl ;
+  }
+  for ( i = 0 ; i < 10 ; i++ ) {
+     int recvbuf ;
+     int outcount ;
+     if ( i & 1 ) {
+       outcount = 0 ;
+       sts = mpi_access.recv(&recvbuf,1,MPI_INT,allsource[myrank], RequestId[i],
+                             &outcount) ;
+     }
+     else {
+       sts = mpi_access.recv(&recvbuf,1,MPI_INT,allsource[myrank], RequestId[i]) ;
+       outcount = 1 ;
+     }
+     //int source, tag, error, outcount ;
+     //mpi_access.Status( RequestId[i], source, tag, error, outcount, true) ;
+     debugStream << "test" << myrank << " Recv RequestId " << RequestId[i]
+          << " tag " << mpi_access.recvMPITag(allsource[myrank])
+          << " outcount " << outcount << endl ;
+     if ( (outcount != 1) | (recvbuf != i) ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " outcount "
+                 << outcount << " recvbuf " << recvbuf << " KO"
+                 << "==========================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     if ( myrank == 0 ) {
+       if ( i != 9 ) {
+         int ii = i + 1 ;
+         sts = mpi_access.send(&ii,1,MPI_INT,alltarget[myrank], RequestId[i]) ;
+         debugStream << "test" << myrank << " Send RequestId " << RequestId[i]
+              << " tag " << mpi_access.sendMPITag(alltarget[myrank]) << endl ;
+       }
+     }
+     else {
+       sts = mpi_access.send(&i,1,MPI_INT,alltarget[myrank], RequestId[i]) ;
+       debugStream << "test" << myrank << " Send RequestId " << RequestId[i]
+            << " tag " << mpi_access.sendMPITag(alltarget[myrank]) << endl ;
+     }
+     char msgerr[MPI_MAX_ERROR_STRING] ;
+     int lenerr ;
+     mpi_access.errorString(sts, msgerr, &lenerr) ;
+     debugStream << "test" << myrank << " lenerr " << lenerr
+          << " " << msgerr << endl ;
+
+     if ( sts != MPI_SUCCESS ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " KO"
+                 << "==========================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  }
+
+  int flag ;
+  mpi_access.testAll(10,RequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "test" << myrank << " flag " << flag << " KO" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  mpi_access.waitAll(10,RequestId) ;
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+
+  int sendrequests[10] ;
+  int sendreqsize = mpi_access.sendRequestIds( alltarget[myrank] , 10 ,
+                                               sendrequests ) ;
+  if ( sendreqsize != 0 ) {
+    ostringstream strstream ;
+    strstream << "=========================================================" << endl
+              << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+              << "=========================================================" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  int recvrequests[10] ;
+  int recvreqsize = mpi_access.sendRequestIds( allsource[myrank] , 10 ,
+                                               recvrequests ) ;
+  if ( recvreqsize != 0 ) {
+    ostringstream strstream ;
+    strstream << "=========================================================" << endl
+              << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+              << "=========================================================" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+//  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_IProbe.cxx b/src/ParaMEDMEMTest/test_MPI_Access_IProbe.cxx
new file mode 100644 (file)
index 0000000..5fd4c2d
--- /dev/null
@@ -0,0 +1,173 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <time.h>
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#ifndef WIN32
+#include <unistd.h>
+#endif
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_IProbe() {
+
+  debugStream << "test_MPI_Access_IProbe" << endl ;
+
+//  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+    ostringstream strstream ;
+    strstream << "test_MPI_Access_IProbe must be runned with 2 procs" << endl ;
+    cerr << strstream.str() << endl ;
+    //CPPUNIT_FAIL( strstream.str() ) ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_IProbe" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+  if ( myrank >= 2 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int sendbuf[10] ;
+  int RequestId[10] ;
+  int sts ;
+  int i ;
+  for ( i = 0 ; i < 10 ; i++ ) {
+     if ( myrank == 0 ) {
+       sendbuf[i] = i ;
+       sts = mpi_access.ISend(&sendbuf[i],1,MPI_INT,target, RequestId[i]) ;
+       debugStream << "test" << myrank << " Send RequestId " << RequestId[i]
+            << endl ;
+     }
+     else {
+       int flag = false ;
+       while ( !flag ) {
+            int source, tag, outcount ;
+            MPI_Datatype datatype ;
+            sts = mpi_access.IProbe(target, source, tag, datatype, outcount, flag ) ;
+            if ( flag ) {
+              debugStream << "test" << myrank << " " << i << " IProbe target " << target
+                   << " source " << source << " tag " << tag
+                   << " outcount " << outcount << " flag " << flag << endl ;
+            }
+            else {
+              debugStream << "test" << myrank << " IProbe flag " << flag << endl ;
+              sleep( 1 ) ;
+            }
+            if ( flag ) {
+              int recvbuf ;
+              sts = mpi_access.recv(&recvbuf,outcount,datatype,source, RequestId[i],
+                                    &outcount) ;
+              if ( (outcount != 1) | (recvbuf != i) ) {
+                ostringstream strstream ;
+                strstream << "==========================================================="
+                          << endl << "test" << myrank << " outcount " << outcount
+                          << " recvbuf " << recvbuf << " KO" << endl
+                          << "==========================================================="
+                          << endl ;
+                debugStream << strstream.str() << endl ;
+                CPPUNIT_FAIL( strstream.str() ) ;
+              }
+              debugStream << "==========================================================="
+                   << endl << "test" << myrank << " outcount " << outcount
+                   << " recvbuf " << recvbuf << " OK" << endl
+                   << "==========================================================="
+                   << endl ;
+            }
+       }
+     }
+     char msgerr[MPI_MAX_ERROR_STRING] ;
+     int lenerr ;
+     mpi_access.errorString(sts, msgerr, &lenerr) ;
+     debugStream << "test" << myrank << " lenerr " << lenerr << " "
+          << msgerr << endl ;
+
+     if ( sts != MPI_SUCCESS ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " KO"
+                 << "==========================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  }
+  int flag ;
+  mpi_access.testAll(10,RequestId,flag) ;
+  mpi_access.waitAll(10,RequestId) ;
+  mpi_access.deleteRequests(10,RequestId) ;
+  mpi_access.testAll(10,RequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "test" << myrank << " flag " << flag << " KO" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+//  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_ISendRecv.cxx b/src/ParaMEDMEMTest/test_MPI_Access_ISendRecv.cxx
new file mode 100644 (file)
index 0000000..126cc8e
--- /dev/null
@@ -0,0 +1,217 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_ISendRecv() {
+
+  debugStream << "test_MPI_Access_ISendRecv" << endl ;
+
+//  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+      cerr << "test_MPI_Access_ISendRecv must be runned with 2 procs" << endl ;
+    //CPPUNIT_FAIL("test_MPI_Access_ISendRecv must be runned with 2 procs") ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_ISendRecv" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+  if ( myrank >= 2 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int SendRequestId[10] ;
+  int RecvRequestId[10] ;
+  int sendbuf[10] ;
+  int recvbuf[10] ;
+  int sts ;
+  int i ;
+  for ( i = 0 ; i < 10 ; i++ ) {
+     sendbuf[i] = i ;
+     sts = mpi_access.ISendRecv(&sendbuf[i],1,MPI_INT,target, SendRequestId[i],
+                                &recvbuf[i],1,MPI_INT,target, RecvRequestId[i]) ;
+     debugStream << "test" << myrank << " Send sendRequestId " << SendRequestId[i]
+          << " tag " << mpi_access.sendMPITag(target)
+          << " recvRequestId " << RecvRequestId[i]
+          << " tag " << mpi_access.recvMPITag(target) << endl ;
+     char msgerr[MPI_MAX_ERROR_STRING] ;
+     int lenerr ;
+     mpi_access.errorString(sts, msgerr, &lenerr) ;
+     debugStream << "test" << myrank << " lenerr " << lenerr
+          << " " << msgerr << endl ;
+
+     if ( sts != MPI_SUCCESS ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " KO"
+                 << "==========================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     int j ;
+     for (j = 0 ; j <= i ; j++) {
+        int flag ;
+        if ( j < i ) {
+          debugStream << "test" << myrank << " " << j << " -> Test-Send("<< SendRequestId[j]
+               << ")" << endl ;
+          mpi_access.test( SendRequestId[j], flag ) ;
+          if ( flag ) {
+            int target, tag, error, outcount ;
+              mpi_access.status( SendRequestId[j], target, tag, error, outcount,
+                                 true ) ;
+              debugStream << "test" << myrank << " Send RequestId " << SendRequestId[j]
+                   << " target " << target << " tag " << tag << " error " << error
+                   << endl ;
+            mpi_access.deleteRequest( SendRequestId[j] ) ;
+          }
+        }
+        debugStream << "test" << myrank << " " << j << " -> Test-Recv("<< SendRequestId[j]
+             << ")" << endl ;
+        mpi_access.test( RecvRequestId[j], flag ) ;
+        if ( flag ) {
+          int source, tag, error, outcount ;
+          mpi_access.status( RecvRequestId[j], source, tag, error, outcount,
+                             true ) ;
+          debugStream << "test" << myrank << " Recv RequestId" << j << " "
+               << RecvRequestId[j] << " source " << source << " tag " << tag
+               << " error " << error << " outcount " << outcount << endl ;
+          if ( (outcount != 1) | (recvbuf[j] != j) ) {
+             ostringstream strstream ;
+             strstream << "==========================================================="
+                       << "test" << myrank << " outcount "
+                       << outcount << " recvbuf[ " << j << " ] " << recvbuf[j] << " KO"
+                       << "==========================================================="
+                       << endl ;
+            debugStream << strstream.str() << endl ;
+            CPPUNIT_FAIL( strstream.str() ) ;
+          }
+        }
+     }
+     mpi_access.errorString(sts, msgerr, &lenerr) ;
+     debugStream << "test" << myrank << " lenerr " << lenerr << " "
+          << msgerr << endl ;
+     if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  }
+
+  int flag ;
+  mpi_access.testAll(10,SendRequestId,flag) ;
+  mpi_access.waitAll(10,SendRequestId) ;
+  mpi_access.deleteRequests(10,SendRequestId) ;
+  mpi_access.testAll(10,SendRequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "test" << myrank << " flag " << flag << " KO" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  mpi_access.testAll(10,RecvRequestId,flag) ;
+  mpi_access.waitAll(10,RecvRequestId) ;
+  mpi_access.deleteRequests(10,RecvRequestId) ;
+  mpi_access.testAll(10,RecvRequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "test" << myrank << " flag " << flag << " KO" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+
+  int sendrequests[10] ;
+  int sendreqsize = mpi_access.sendRequestIds( target , 10 , sendrequests ) ;
+  if ( sendreqsize != 0 ) {
+    ostringstream strstream ;
+    strstream << "=========================================================" << endl
+              << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+              << "=========================================================" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  else {
+    debugStream << "=========================================================" << endl
+         << "test" << myrank << " sendreqsize " << sendreqsize << " OK" << endl
+         << "=========================================================" << endl ;
+  }
+  int recvrequests[10] ;
+  int recvreqsize = mpi_access.sendRequestIds( target , 10 , recvrequests ) ;
+  if ( recvreqsize != 0 ) {
+    ostringstream strstream ;
+    strstream << "=========================================================" << endl
+              << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+              << "=========================================================" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  else {
+    debugStream << "=========================================================" << endl
+         << "test" << myrank << " recvreqsize " << recvreqsize << " OK" << endl
+         << "=========================================================" << endl ;
+  }
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+//  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv.cxx b/src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv.cxx
new file mode 100644 (file)
index 0000000..baa3572
--- /dev/null
@@ -0,0 +1,222 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_ISend_IRecv() {
+
+  debugStream << "test_MPI_Access_ISend_IRecv" << endl ;
+
+  //  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+      cerr << "test_MPI_Access_ISend_IRecv must be runned with 2 procs" << endl ;
+    //CPPUNIT_FAIL("test_MPI_Access_ISend_IRecv must be runned with 2 procs") ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_ISend_IRecv" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+#define maxreq 100
+
+  if ( myrank >= 2 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int SendRequestId[maxreq] ;
+  int RecvRequestId[maxreq] ;
+  int sts ;
+  int sendbuf[maxreq] ;
+  int recvbuf[maxreq] ;
+  int i ;
+  for ( i = 0 ; i < maxreq ; i++ ) {
+    if ( myrank == 0 ) {
+      sendbuf[i] = i ;
+      sts = mpi_access.ISend(&sendbuf[i],1,MPI_INT,target, SendRequestId[i]) ;
+      debugStream << "test" << myrank << " ISend RequestId " << SendRequestId[i]
+           << " tag " << mpi_access.sendMPITag(target) << endl ;
+    }
+    else {
+      sts = mpi_access.IRecv(&recvbuf[i],1,MPI_INT,target, RecvRequestId[i]) ;
+      debugStream << "test" << myrank << " IRecv RequestId " << RecvRequestId[i]
+           << " tag " << mpi_access.recvMPITag(target) << endl ;
+    }
+    int j ;
+    for (j = 0 ; j <= i ; j++) {
+      int flag ;
+      if ( myrank == 0 ) {
+        mpi_access.test( SendRequestId[j], flag ) ;
+      }
+      else {
+        mpi_access.test( RecvRequestId[j], flag ) ;
+      }
+      if ( flag ) {
+        int target,source, tag, error, outcount ;
+        if ( myrank == 0 ) {
+          mpi_access.status( SendRequestId[j], target, tag, error, outcount,
+                             true ) ;
+          debugStream << "test" << myrank << " Test(Send RequestId " << SendRequestId[j]
+               << ") : target " << target << " tag " << tag << " error " << error
+               << " flag " << flag << endl ;
+        }
+        else {
+          mpi_access.status( RecvRequestId[j], source, tag, error, outcount,
+                             true ) ;
+          debugStream << "test" << myrank << " Test(Recv RequestId "
+               << RecvRequestId[j] << ") : source " << source << " tag " << tag
+               << " error " << error << " outcount " << outcount
+               << " flag " << flag << endl ;
+          if ( (outcount != 1) | (recvbuf[j] != j) ) {
+            ostringstream strstream ;
+            strstream << "==========================================================="
+                      << endl << "test" << myrank << " outcount "
+                      << outcount << " recvbuf " << recvbuf[j] << " KO" << endl
+                      << "==========================================================="
+                      << endl ;
+            debugStream << strstream.str() << endl ;
+            CPPUNIT_FAIL( strstream.str() ) ;
+          }
+          //else {
+          //  debugStream << "==========================================================="
+          //       << endl << "test" << myrank << " outcount " << outcount
+          //       << " RequestId " << RecvRequestId[j] << " recvbuf "
+          //       << recvbuf[j] << " OK" << endl
+          //       << "==========================================================="
+          //       << endl ;
+          //}
+        }
+      }
+    }
+    char msgerr[MPI_MAX_ERROR_STRING] ;
+    int lenerr ;
+    mpi_access.errorString(sts, msgerr, &lenerr) ;
+    debugStream << "test" << myrank << " lenerr " << lenerr << " "
+         << msgerr << endl ;
+
+    if ( sts != MPI_SUCCESS ) {
+      ostringstream strstream ;
+      strstream << "==========================================================="
+                << "test" << myrank << " KO"
+                << "==========================================================="
+                << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+  }
+
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  if ( myrank == 0 ) {
+    mpi_access.waitAll(maxreq, SendRequestId) ;
+    mpi_access.deleteRequests(maxreq, SendRequestId) ;
+  }
+  else {
+    mpi_access.waitAll(maxreq, RecvRequestId) ;
+    mpi_access.deleteRequests(maxreq, RecvRequestId) ;
+  }
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+
+  if ( myrank == 0 ) {
+    int sendrequests[maxreq] ;
+    int sendreqsize = mpi_access.sendRequestIds( target , maxreq , sendrequests ) ;
+    int i ;
+    if ( sendreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      for ( i = 0 ; i < sendreqsize ; i++ ) {
+        debugStream << "test" << myrank << " sendrequests[ " << i << " ] = "
+             << sendrequests[i] << endl ;
+      }
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " sendreqsize " << sendreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+  else {
+    int recvrequests[maxreq] ;
+    int recvreqsize = mpi_access.sendRequestIds( target , maxreq , recvrequests ) ;
+    if ( recvreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " recvreqsize " << recvreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+  //  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv_BottleNeck.cxx b/src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv_BottleNeck.cxx
new file mode 100644 (file)
index 0000000..8203401
--- /dev/null
@@ -0,0 +1,226 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <time.h>
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_ISend_IRecv_BottleNeck() {
+
+  debugStream << "test_MPI_Access_ISend_IRecv_BottleNeck" << endl ;
+
+//  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+    ostringstream strstream ;
+    strstream << "test_MPI_Access_ISend_IRecv_BottleNeck must be runned with 2 procs"
+              << endl ;
+    cerr << strstream.str() << endl ;
+    //CPPUNIT_FAIL( strstream.str() ) ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_ISend_IRecv_BottleNeck" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+#define maxreq 10000
+
+  if ( myrank >= 2 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int SendRequestId[maxreq] ;
+  int RecvRequestId[maxreq] ;
+  int sts ;
+  int sendbuf[maxreq] ;
+  int recvbuf[maxreq] ;
+  int i ;
+  for ( i = 0 ; i < maxreq ; i++ ) {
+     if ( myrank == 0 ) {
+       sendbuf[i] = i ;
+       sts = mpi_access.ISend(sendbuf,i,MPI_INT,target, SendRequestId[i]) ;
+       debugStream << "test" << myrank << " ISend RequestId " << SendRequestId[i]
+            << " tag " << mpi_access.sendMPITag(target) << endl ;
+     }
+     else {
+       //sleep( 1 ) ;
+       sts = mpi_access.IRecv(recvbuf,i,MPI_INT,target, RecvRequestId[i]) ;
+       debugStream << "test" << myrank << " IRecv RequestId " << RecvRequestId[i]
+            << " tag " << mpi_access.recvMPITag(target) << endl ;
+       int recvreqsize = mpi_access.recvRequestIdsSize() ;
+       int * recvrequests = new int[ recvreqsize ] ;
+       recvreqsize = mpi_access.recvRequestIds( target , recvreqsize , recvrequests ) ;
+       int j ;
+       for (j = 0 ; j < recvreqsize ; j++) {
+          int flag ;
+          mpi_access.test( recvrequests[j], flag ) ;
+          if ( flag ) {
+            int source, tag, error, outcount ;
+            mpi_access.status( recvrequests[j], source, tag, error, outcount,
+                               true ) ;
+            debugStream << "test" << myrank << " Test(Recv RequestId "
+                 << recvrequests[j] << ") : source " << source << " tag " << tag
+                 << " error " << error << " outcount " << outcount
+                 << " flag " << flag << " : DeleteRequest" << endl ;
+            mpi_access.deleteRequest( recvrequests[j] ) ;
+          }
+          else {
+//            debugStream << "test" << myrank << " Test(Recv RequestId "
+//                 << recvrequests[j] << ") flag " << flag << endl ;
+          }
+       }
+       delete [] recvrequests ;
+     }
+     if ( sts != MPI_SUCCESS ) {
+       char msgerr[MPI_MAX_ERROR_STRING] ;
+       int lenerr ;
+       mpi_access.errorString(sts, msgerr, &lenerr) ;
+       debugStream << "test" << myrank << " lenerr " << lenerr << " "
+            << msgerr << endl ;
+     }
+
+     if ( sts != MPI_SUCCESS ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " KO"
+                 << "==========================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+  }
+
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  if ( myrank == 0 ) {
+    int size = mpi_access.sendRequestIdsSize() ;
+    debugStream << "test" << myrank << " before WaitAll sendreqsize " << size << endl ;
+    mpi_access.waitAll(maxreq, SendRequestId) ;
+    size = mpi_access.sendRequestIdsSize() ;
+    debugStream << "test" << myrank << " after WaitAll sendreqsize " << size << endl ;
+    int * ArrayOfSendRequests = new int[ size ] ;
+    int nSendRequest = mpi_access.sendRequestIds( size , ArrayOfSendRequests ) ;
+    int i ;
+    for ( i = 0 ; i < nSendRequest ; i++ ) {
+       mpi_access.deleteRequest( ArrayOfSendRequests[i] ) ;
+    }
+    delete [] ArrayOfSendRequests ;
+  }
+  else {
+    int size = mpi_access.recvRequestIdsSize() ;
+    debugStream << "test" << myrank << " before WaitAll recvreqsize " << size << endl ;
+    mpi_access.waitAll(maxreq, RecvRequestId) ;
+    size = mpi_access.recvRequestIdsSize() ;
+    debugStream << "test" << myrank << " after WaitAll recvreqsize " << size << endl ;
+    int * ArrayOfRecvRequests = new int[ size ] ;
+    int nRecvRequest = mpi_access.recvRequestIds( size , ArrayOfRecvRequests ) ;
+    int i ;
+    for ( i = 0 ; i < nRecvRequest ; i++ ) {
+       mpi_access.deleteRequest( ArrayOfRecvRequests[i] ) ;
+    }
+    delete [] ArrayOfRecvRequests ;
+  }
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+
+  if ( myrank == 0 ) {
+    int sendrequests[maxreq] ;
+    int sendreqsize = mpi_access.sendRequestIds( target , maxreq , sendrequests ) ;
+    int i ;
+    if ( sendreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      for ( i = 0 ; i < sendreqsize ; i++ ) {
+         debugStream << "test" << myrank << " sendrequests[ " << i << " ] = "
+              << sendrequests[i] << endl ;
+      }
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " sendreqsize " << sendreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+  else {
+    int recvrequests[maxreq] ;
+    int recvreqsize = mpi_access.recvRequestIds( target , maxreq , recvrequests ) ;
+    if ( recvreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " recvreqsize " << recvreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+//  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv_Length.cxx b/src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv_Length.cxx
new file mode 100644 (file)
index 0000000..d1b556c
--- /dev/null
@@ -0,0 +1,234 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_ISend_IRecv_Length() {
+
+  debugStream << "test_MPI_Access_ISend_IRecv_Length" << endl ;
+
+  //  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+    ostringstream strstream ;
+    strstream << "test_MPI_Access_ISend_IRecv_Length must be runned with 2 procs" << endl ;
+    cerr << strstream.str() << endl ;
+    //CPPUNIT_FAIL( strstream.str() ) ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_ISend_IRecv_Length" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+#define maxreq 10
+
+  if ( myrank >= 2 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int SendRequestId[maxreq] ;
+  int RecvRequestId[maxreq] ;
+  int sts ;
+  int sendbuf[1000*(maxreq-1)] ;
+  int recvbuf[maxreq][1000*(maxreq-1)] ;
+  int i ;
+  for ( i = 0 ; i < 1000*(maxreq-1) ; i++ ) {
+    sendbuf[i] = i ;
+  }
+  for ( i = 0 ; i < maxreq ; i++ ) {
+    if ( myrank == 0 ) {
+      sts = mpi_access.ISend( sendbuf, 1000*i, MPI_INT, target, SendRequestId[i] ) ;
+      debugStream << "test" << myrank << " ISend RequestId " << SendRequestId[i]
+           << " tag " << mpi_access.sendMPITag(target) << endl ;
+    }
+    else {
+      sts = mpi_access.IRecv( recvbuf[i], 1000*i, MPI_INT, target,
+                              RecvRequestId[i] ) ;
+      debugStream << "test" << myrank << " IRecv RequestId " << RecvRequestId[i]
+           << " tag " << mpi_access.recvMPITag(target) << endl ;
+    }
+    int j ;
+    for (j = 0 ; j <= i ; j++) {
+      int flag ;
+      if ( myrank == 0 ) {
+        mpi_access.test( SendRequestId[j], flag ) ;
+      }
+      else {
+        mpi_access.test( RecvRequestId[j], flag ) ;
+      }
+      if ( flag ) {
+        int target,source, tag, error, outcount ;
+        if ( myrank == 0 ) {
+          mpi_access.status( SendRequestId[j], target, tag, error, outcount,
+                             true ) ;
+          debugStream << "test" << myrank << " Test(Send RequestId " << SendRequestId[j]
+               << ") : target " << target << " tag " << tag << " error " << error
+               << " flag " << flag << endl ;
+        }
+        else {
+          mpi_access.status( RecvRequestId[j], source, tag, error, outcount,
+                             true ) ;
+          debugStream << "test" << myrank << " Test(Recv RequestId "
+               << RecvRequestId[j] << ") : source " << source << " tag " << tag
+               << " error " << error << " outcount " << outcount
+               << " flag " << flag << endl ;
+          if ( outcount != 0 ) {
+            if ( (outcount != 1000*j) |
+                 (recvbuf[j][outcount-1] != (outcount-1)) ) {
+              ostringstream strstream ;
+              strstream << "==========================================================="
+                        << endl << "test" << myrank << " outcount "
+                        << outcount << " recvbuf " << recvbuf[j][outcount-1] << " KO"
+                        << endl
+                        << "==========================================================="
+                        << endl ;
+              debugStream << strstream.str() << endl ;
+              CPPUNIT_FAIL( strstream.str() ) ;
+            }
+            else {
+              debugStream << "==========================================================="
+                   << endl << "test" << myrank << " outcount " << outcount
+                   << " RequestId " << RecvRequestId[j] << " recvbuf "
+                   << recvbuf[j][outcount-1] << " OK" << endl
+                   << "==========================================================="
+                   << endl ;
+            }
+          }
+          else {
+            debugStream << "==========================================================="
+                 << endl << "test" << myrank << " outcount " << outcount
+                 << " RequestId " << RecvRequestId[j] << " OK" << endl
+                 << "==========================================================="
+                 << endl ;
+          }
+        }
+      }
+    }
+    char msgerr[MPI_MAX_ERROR_STRING] ;
+    int lenerr ;
+    mpi_access.errorString(sts, msgerr, &lenerr) ;
+    debugStream << "test" << myrank << " lenerr " << lenerr << " "
+         << msgerr << endl ;
+
+    if ( sts != MPI_SUCCESS ) {
+      ostringstream strstream ;
+      strstream << "==========================================================="
+                << "test" << myrank << " KO"
+                << "==========================================================="
+                << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+  }
+
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  debugStream << "test" << myrank << " WaitAll" << endl ;
+  if ( myrank == 0 ) {
+    mpi_access.waitAll(maxreq, SendRequestId) ;
+    mpi_access.deleteRequests(maxreq, SendRequestId) ;
+  }
+  else {
+    mpi_access.waitAll(maxreq, RecvRequestId) ;
+    mpi_access.deleteRequests(maxreq, RecvRequestId) ;
+  }
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+
+  if ( myrank == 0 ) {
+    int sendrequests[maxreq] ;
+    int sendreqsize = mpi_access.sendRequestIds( target , maxreq , sendrequests ) ;
+    if ( sendreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " sendreqsize " << sendreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+  else {
+    int recvrequests[maxreq] ;
+    int recvreqsize = mpi_access.recvRequestIds( target , maxreq , recvrequests ) ;
+    if ( recvreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " recvreqsize " << recvreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+  //  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv_Length_1.cxx b/src/ParaMEDMEMTest/test_MPI_Access_ISend_IRecv_Length_1.cxx
new file mode 100644 (file)
index 0000000..966bfc4
--- /dev/null
@@ -0,0 +1,306 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_ISend_IRecv_Length_1() {
+
+  //  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+    ostringstream strstream ;
+    strstream << "test_MPI_Access_ISend_IRecv_Length_1 must be runned with 2 procs" << endl ;
+    cerr << strstream.str() << endl ;
+    //CPPUNIT_FAIL( strstream.str() ) ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_ISend_IRecv_Length_1" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+#define maxreq 10
+
+  if ( myrank >= 2 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int SendRequestId[maxreq] ;
+  int RecvRequestId[maxreq] ;
+  int sts ;
+  int sendbuf[1000*(maxreq-1)] ;
+  int recvbuf[maxreq][1000*(maxreq-1)] ;
+  int maxirecv = 1 ;
+  int i ;
+  RecvRequestId[0] = -1 ;
+  for ( i = 0 ; i < 1000*(maxreq-1) ; i++ ) {
+    sendbuf[i] = i ;
+  }
+  for ( i = 0 ; i < maxreq ; i++ ) {
+    sts = MPI_SUCCESS ;
+    if ( myrank == 0 ) {
+      sts = mpi_access.ISend( sendbuf, 1000*i, MPI_INT, target, SendRequestId[i] ) ;
+      debugStream << "test" << myrank << " ISend RequestId " << SendRequestId[i]
+           << " tag " << mpi_access.sendMPITag(target) << endl ;
+    }
+    int j ;
+    for (j = 1 ; j <= i ; j++) {
+      int source ;
+      MPI_Datatype datatype ;
+      int outcount ;
+      int flag ;
+      if ( myrank == 0 ) {
+        mpi_access.test( SendRequestId[j], flag ) ;
+      }
+      else {
+        int MPITag ;
+        sts = mpi_access.IProbe( target , source, MPITag, datatype,
+                                 outcount, flag) ;
+        char msgerr[MPI_MAX_ERROR_STRING] ;
+        int lenerr ;
+        mpi_access.errorString(sts, msgerr, &lenerr) ;
+        debugStream << "test" << myrank << " IProbe lenerr " << lenerr << " "
+             << msgerr << endl ;
+        if ( sts != MPI_SUCCESS ) {
+          ostringstream strstream ;
+          strstream << "==========================================================="
+                    << "test" << myrank << " IProbe KO"
+                    << "==========================================================="
+                    << endl ;
+          debugStream << strstream.str() << endl ;
+          CPPUNIT_FAIL( strstream.str() ) ;
+        }
+        debugStream << "test" << myrank << " IProbe i/j " << i << "/" << j
+             << " MPITag " << MPITag << " datatype " << datatype
+             << " outcount " << outcount << " flag " << flag << endl ;
+      }
+      if ( flag ) {
+        if ( myrank == 0 ) {
+          int target, tag, error, outcount ;
+          mpi_access.status( SendRequestId[j], target, tag, error, outcount,
+                             true ) ;
+          debugStream << "test" << myrank << " Test(Send RequestId " << SendRequestId[j]
+               << ") : target " << target << " tag " << tag << " error " << error
+               << " flag " << flag << endl ;
+        }
+        else {
+          sts = mpi_access.IRecv( recvbuf[maxirecv], outcount, datatype, source,
+                                  RecvRequestId[maxirecv] ) ;
+          debugStream << "test" << myrank << " maxirecv " << maxirecv << " IRecv RequestId "
+               << RecvRequestId[maxirecv] << " source " << source
+               << " outcount " << outcount << " tag "
+               << mpi_access.recvMPITag(target) << endl ;
+          maxirecv = maxirecv + 1 ;
+        }
+      }
+      else if ( myrank == 1 && i == maxreq-1 && j >= maxirecv ) {
+        sts = mpi_access.IRecv( recvbuf[j], 1000*j, MPI_INT, target,
+                                RecvRequestId[j] ) ;
+        debugStream << "test" << myrank << " maxirecv " << maxirecv << " IRecv RequestId "
+             << RecvRequestId[j] << " target " << target << " length " << 1000*j
+             << " tag " << mpi_access.recvMPITag(target) << endl ;
+        maxirecv = maxirecv + 1 ;
+      }
+    }
+    char msgerr[MPI_MAX_ERROR_STRING] ;
+    int lenerr ;
+    mpi_access.errorString(sts, msgerr, &lenerr) ;
+    debugStream << "test" << myrank << " lenerr " << lenerr << " "
+         << msgerr << endl ;
+
+    if ( sts != MPI_SUCCESS ) {
+      ostringstream strstream ;
+      strstream << "==========================================================="
+                << endl << "test" << myrank << " KO" << endl 
+                << "==========================================================="
+                << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+  }
+
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  int flag ;
+  if ( myrank == 0 ) {
+    mpi_access.testAll( maxreq, SendRequestId, flag ) ;
+    debugStream << "test" << myrank << " TestAll SendRequest flag " << flag << endl ;
+  }
+  else {
+    int i ;
+    int source ;
+    int outcount ;
+    int flag ;
+    if ( maxirecv != maxreq ) {
+      ostringstream strstream ;
+      strstream << "==========================================================="
+                << endl << "test" << myrank << " KO" << " maxirecv " << maxirecv
+                << " != maxreq " << maxreq << endl 
+                << "==========================================================="
+                << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    while ( maxirecv > 0 ) {
+      for ( i = 1 ; i < maxreq ; i++ ) {
+        debugStream << "test" << myrank << " IProbe : " << endl ;
+        sts = mpi_access.test( RecvRequestId[i] , flag ) ;
+        char msgerr[MPI_MAX_ERROR_STRING] ;
+        int lenerr ;
+        mpi_access.errorString(sts, msgerr, &lenerr) ;
+        debugStream << "test" << myrank << " flag " << flag << " lenerr "
+             << lenerr << " " << msgerr << " maxirecv " << maxirecv << endl ;
+        if ( sts != MPI_SUCCESS ) {
+          ostringstream strstream ;
+          strstream << "==========================================================="
+                    << "test" << myrank << " KO"
+                    << "==========================================================="
+                    << endl ;
+          debugStream << strstream.str() << endl ;
+          CPPUNIT_FAIL( strstream.str() ) ;
+        }
+        debugStream << "test" << myrank << " Test flag " << flag << endl ;
+        if ( flag ) {
+          int tag, error ;
+          mpi_access.status( RecvRequestId[i] , source , tag , error ,
+                             outcount ) ;
+          if ( i != 0 ) {
+            if ( outcount != 1000*i |
+                 (recvbuf[i][outcount-1] != (outcount-1)) ) {
+              ostringstream strstream ;
+              strstream << "========================================================"
+                        << endl << "test" << myrank << " outcount " << outcount
+                        << " KO" << " i " << i
+                        << " recvbuf " << recvbuf[i][outcount-1] << endl
+                        << "========================================================"
+                        << endl ;
+              debugStream << strstream.str() << endl ;
+              CPPUNIT_FAIL( strstream.str() ) ;
+            }
+          }
+          else if ( outcount != 0 ) {
+            ostringstream strstream ;
+            strstream << "========================================================"
+                      << endl << "test" << myrank << " outcount " << outcount
+                      << " KO" << " i " << i << endl
+                      << "========================================================"
+                      << endl ;
+            debugStream << strstream.str() << endl ;
+            CPPUNIT_FAIL( strstream.str() ) ;
+          }
+          maxirecv = maxirecv - 1 ;
+        }
+      }
+    }
+    mpi_access.testAll( maxreq, RecvRequestId, flag ) ;
+    debugStream << "test" << myrank << " TestAll RecvRequest flag " << flag << endl ;
+  }
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  debugStream << "test" << myrank << " WaitAll :" << endl ;
+  if ( myrank == 0 ) {
+    mpi_access.waitAll( maxreq, SendRequestId ) ;
+    mpi_access.deleteRequests( maxreq, SendRequestId ) ;
+  }
+  else {
+    mpi_access.waitAll( maxreq, RecvRequestId ) ;
+    mpi_access.deleteRequests( maxreq, RecvRequestId ) ;
+  }
+
+  if ( myrank == 0 ) {
+    int sendrequests[maxreq] ;
+    int sendreqsize = mpi_access.sendRequestIds( target , maxreq , sendrequests ) ;
+    sendreqsize = mpi_access.sendRequestIds( target , maxreq , sendrequests ) ;
+    if ( sendreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " sendreqsize " << sendreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+  else {
+    int recvrequests[maxreq] ;
+    int recvreqsize = mpi_access.sendRequestIds( target , maxreq , recvrequests ) ;
+    if ( recvreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " recvreqsize " << recvreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+  //  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_Probe.cxx b/src/ParaMEDMEMTest/test_MPI_Access_Probe.cxx
new file mode 100644 (file)
index 0000000..632c5d2
--- /dev/null
@@ -0,0 +1,145 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_Probe() {
+
+  debugStream << "test_MPI_Access_Probe" << endl ;
+
+//  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+      cerr << "test_MPI_Access_Probe must be runned with 2 procs" << endl ;
+    //CPPUNIT_FAIL("test_MPI_Access_Probe must be runned with 2 procs") ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_Probe" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+  if ( myrank >= 2 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int RequestId[10] ;
+  int sts ;
+  int i ;
+  for ( i = 0 ; i < 10 ; i++ ) {
+     if ( myrank == 0 ) {
+       sts = mpi_access.send(&i,1,MPI_INT,target, RequestId[i]) ;
+       debugStream << "test" << myrank << " Send RequestId " << RequestId[i]
+            << endl ;
+     }
+     else {
+       int source, tag, outcount ;
+       MPI_Datatype datatype ;
+       sts = mpi_access.probe(target, source, tag, datatype, outcount ) ;
+       debugStream << "test" << myrank << " Probe target " << target << " source " << source
+            << " tag " << tag << " outcount " << outcount << endl ;
+       int recvbuf ;
+       sts = mpi_access.recv(&recvbuf,outcount,datatype,source, RequestId[i],
+                             &outcount) ;
+       if ( (outcount != 1) | (recvbuf != i) ) {
+         ostringstream strstream ;
+         strstream << "==========================================================="
+                   << "test" << myrank << " outcount " << outcount
+                   << " recvbuf " << recvbuf << " KO"
+                   << "==========================================================="
+                   << endl ;
+         debugStream << strstream.str() << endl ;
+         CPPUNIT_FAIL( strstream.str() ) ;
+       }
+     }
+     char msgerr[MPI_MAX_ERROR_STRING] ;
+     int lenerr ;
+     mpi_access.errorString(sts, msgerr, &lenerr) ;
+     debugStream << "test" << myrank << " lenerr " << lenerr << " "
+          << msgerr << endl ;
+
+     if ( sts != MPI_SUCCESS ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " KO"
+                 << "==========================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  }
+  int flag ;
+  mpi_access.testAll(10,RequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "test" << myrank << " flag " << flag << " KO" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  mpi_access.waitAll(10,RequestId) ;
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+//  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_SendRecv.cxx b/src/ParaMEDMEMTest/test_MPI_Access_SendRecv.cxx
new file mode 100644 (file)
index 0000000..c7cbf7d
--- /dev/null
@@ -0,0 +1,181 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_SendRecv() {
+
+  debugStream << "MPIAccessTest::test_MPI_Access_SendRecv" << endl ;
+
+//  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+      cerr << "MPIAccessTest::test_MPI_Access_SendRecv must be runned with 2 procs" << endl ;
+    //CPPUNIT_FAIL("test_MPI_Access_SendRecv must be runned with 2 procs") ;
+    return;
+  }
+
+  debugStream << "MPIAccessTest::test_MPI_Access_SendRecv" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+  if ( myrank >= 2 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int sendRequestId[10] ;
+  int recvRequestId[10] ;
+  int sts ;
+  int i ;
+  for ( i = 0 ; i < 10 ; i++ ) {
+     int recvbuf ;
+     int outcount ;
+     if ( i & 1 ) {
+       outcount = -1 ;
+       sts = mpi_access.sendRecv(&i,1,MPI_INT,target, sendRequestId[i],
+                                 &recvbuf,1,MPI_INT,target, recvRequestId[i],
+                                 &outcount) ;
+     }
+     else {
+       sts = mpi_access.sendRecv(&i,1,MPI_INT,target, sendRequestId[i],
+                                 &recvbuf,1,MPI_INT,target, recvRequestId[i]) ;
+//       outcount = mpi_access.MPIOutCount( recvRequestId[i] ) ;
+       outcount = 1 ;
+     }
+     debugStream << "test" << myrank << " Send sendRequestId " << sendRequestId[i]
+          << " tag " << mpi_access.sendMPITag(target)
+          << " recvRequestId " << recvRequestId[i]
+          << " tag " << mpi_access.recvMPITag(target)
+          << " outcount " << outcount << " MPIOutCount "
+          << mpi_access.MPIOutCount( recvRequestId[i] ) << endl ;
+     if ( (outcount != 1) | (recvbuf != i) ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " outcount " << outcount
+                 << " recvbuf " << recvbuf << " KO"
+                 << "==========================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     char msgerr[MPI_MAX_ERROR_STRING] ;
+     int lenerr ;
+     mpi_access.errorString(sts, msgerr, &lenerr) ;
+     debugStream << "test" << myrank << " lenerr " << lenerr << " "
+          << msgerr << endl ;
+
+     if ( sts != MPI_SUCCESS ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " KO"
+                 << "==========================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  }
+
+  int flag ;
+  mpi_access.testAll(10,sendRequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "test" << myrank << " flag " << flag << " KO" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  mpi_access.waitAll(10,sendRequestId) ;
+  mpi_access.testAll(10,recvRequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "test" << myrank << " flag " << flag << " KO" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  mpi_access.waitAll(10,recvRequestId) ;
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+
+  int sendrequests[10] ;
+  int sendreqsize = mpi_access.sendRequestIds( target , 10 , sendrequests ) ;
+  if ( sendreqsize != 0 ) {
+    ostringstream strstream ;
+    strstream << "=========================================================" << endl
+              << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+              << "=========================================================" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  int recvrequests[10] ;
+  int recvreqsize = mpi_access.sendRequestIds( target , 10 , recvrequests ) ;
+  if ( recvreqsize != 0 ) {
+    ostringstream strstream ;
+    strstream << "=========================================================" << endl
+              << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+              << "=========================================================" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+//  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_Send_Recv.cxx b/src/ParaMEDMEMTest/test_MPI_Access_Send_Recv.cxx
new file mode 100644 (file)
index 0000000..cdaaabf
--- /dev/null
@@ -0,0 +1,167 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_Send_Recv() {
+
+  debugStream << "test_MPI_Access_Send_Recv" << endl ;
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+    cerr << "test_MPI_Access_Send_Recv must be runned with 2 procs" << endl ;
+    //CPPUNIT_FAIL("test_MPI_Access_Send_Recv must be runned with 2 procs") ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_Send_Recv" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+  if ( myrank >= 2 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int RequestId[10] ;
+  int sts ;
+  int i ;
+  for ( i = 0 ; i < 10 ; i++ ) {
+     if ( myrank == 0 ) {
+       sts = mpi_access.send(&i,1,MPI_INT,target, RequestId[i]) ;
+       debugStream << "test" << myrank << " Send RequestId " << RequestId[i]
+            << " tag " << mpi_access.sendMPITag(target) << endl ;
+     }
+     else {
+       int recvbuf ;
+       int outcount ;
+       sts = mpi_access.recv(&recvbuf,1,MPI_INT,target, RequestId[i],&outcount) ;
+       //int source, tag, error, outcount ;
+       //mpi_access.Status( RequestId[i], source, tag, error, outcount, true) ;
+       debugStream << "test" << myrank << " Recv RequestId " << RequestId[i]
+            << " tag " << mpi_access.recvMPITag(target)
+            << " outcount " << outcount << endl ;
+       if ( (outcount != 1) | (recvbuf != i) ) {
+         ostringstream strstream ;
+         strstream << "==========================================================="
+                   << "test" << myrank << " outcount " << outcount
+                   << " recvbuf " << recvbuf << " KO"
+                   << "==========================================================="
+                   << endl ;
+         debugStream << strstream.str() << endl ;
+         CPPUNIT_FAIL( strstream.str() ) ;
+       }
+     }
+     char msgerr[MPI_MAX_ERROR_STRING] ;
+     int lenerr ;
+     mpi_access.errorString(sts, msgerr, &lenerr) ;
+     debugStream << "test" << myrank << " lenerr " << lenerr << " "
+          << msgerr << endl ;
+
+     if ( sts != MPI_SUCCESS ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " KO"
+                 << "==========================================================="
+                 << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     if(MPI_ACCESS_VERBOSE) mpi_access.check();
+  }
+  int flag ;
+  mpi_access.testAll(10,RequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "test" << myrank << " flag " << flag << " KO" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  mpi_access.waitAll(10,RequestId) ;
+  if(MPI_ACCESS_VERBOSE) mpi_access.check();
+
+  if ( myrank == 0 ) {
+    int sendrequests[10] ;
+    int sendreqsize = mpi_access.sendRequestIds( target , 10 , sendrequests ) ;
+    if ( sendreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+  }
+  else {
+    int recvrequests[10] ;
+    int recvreqsize = mpi_access.sendRequestIds( target , 10 , recvrequests ) ;
+    if ( recvreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+  }
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+//  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_Send_Recv_Length.cxx b/src/ParaMEDMEMTest/test_MPI_Access_Send_Recv_Length.cxx
new file mode 100644 (file)
index 0000000..d3385cf
--- /dev/null
@@ -0,0 +1,191 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_Send_Recv_Length() {
+
+  debugStream << "test_MPI_Access_Send_Recv_Length" << endl ;
+
+//  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+    ostringstream strstream ;
+    strstream << "test_MPI_Access_Send_Recv_Length must be runned with 2 procs" << endl ;
+    cerr << strstream.str() << endl ;
+    //CPPUNIT_FAIL( strstream.str() ) ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_Send_Recv_Length" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+  if ( myrank >= 2 ) {
+    mpi_access.barrier() ;
+    delete group ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int RequestId[10] ;
+  int sendbuf[9000] ;
+  int recvbuf[9000] ;
+  bool recvbufok ;
+  int sts ;
+  int i , j ;
+  for ( i = 0 ; i < 9000 ; i++ ) {
+     sendbuf[i] = i ;
+  }
+  for ( i = 0 ; i < 10 ; i++ ) {
+     if ( myrank == 0 ) {
+       sts = mpi_access.send( sendbuf, 1000*i, MPI_INT, target, RequestId[i] ) ;
+       debugStream << "test" << myrank << " Send RequestId " << RequestId[i]
+            << " tag " << mpi_access.sendMPITag(target) << endl ;
+     }
+     else {
+       sts = MPI_SUCCESS ;
+       RequestId[i] = -1 ;
+       int outcount = 0 ;
+       if ( i != 0 ) {
+         sts = mpi_access.recv( recvbuf,1000*i+1,MPI_INT,target, RequestId[i],
+                                &outcount ) ;
+       }
+       //int source, tag, error, outcount ;
+       //mpi_access.Status( RequestId[i], source, tag, error, outcount, true) ;
+       debugStream << "test" << myrank << " Recv RequestId " << RequestId[i]
+            << " tag " << mpi_access.recvMPITag(target)
+            << " outcount " << outcount << endl ;
+       recvbufok = true ;
+       for ( j = 0 ; j < outcount ; j++ ) {
+          if ( recvbuf[j] != j ) {
+            debugStream << "test" << myrank << " recvbuf[ " << j << " ] = " << recvbuf[j]
+                 << endl ;
+            recvbufok = false ;
+            break ;
+          }
+       }
+       if ( (outcount != 1000*i) | !recvbufok ) {
+         ostringstream strstream ;
+         strstream << "==========================================================="
+                   << endl << "test" << myrank << " outcount " << outcount
+                   << " recvbuf " << recvbuf << " KO"
+                   << "==========================================================="
+                   << endl ;
+         debugStream << strstream.str() << endl ;
+         CPPUNIT_FAIL( strstream.str() ) ;
+       }
+     }
+     char msgerr[MPI_MAX_ERROR_STRING] ;
+     int lenerr ;
+     mpi_access.errorString(sts, msgerr, &lenerr) ;
+     debugStream << "test" << myrank << " lenerr " << lenerr << " "
+          << msgerr << endl ;
+
+     if ( sts != MPI_SUCCESS ) {
+       ostringstream strstream ;
+       strstream << "==========================================================="
+                 << "test" << myrank << " KO"
+                 << "==========================================================="
+                 << endl ;
+       debugStream << strstream.str() << endl ;
+       CPPUNIT_FAIL( strstream.str() ) ;
+     }
+     if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  }
+  int flag ;
+  mpi_access.testAll(10,RequestId,flag) ;
+  if ( !flag ) {
+    ostringstream strstream ;
+    strstream << "test" << myrank << " flag " << flag << " KO" << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+  mpi_access.waitAll(10,RequestId) ;
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+
+  if ( myrank == 0 ) {
+    int sendrequests[10] ;
+    int sendreqsize = mpi_access.sendRequestIds( target , 10 , sendrequests ) ;
+    if ( sendreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+  }
+  else {
+    int recvrequests[10] ;
+    int recvreqsize = mpi_access.sendRequestIds( target , 10 , recvrequests ) ;
+    if ( recvreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+  }
+
+  mpi_access.barrier() ;
+
+  delete group ;
+
+//  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_Time.cxx b/src/ParaMEDMEMTest/test_MPI_Access_Time.cxx
new file mode 100644 (file)
index 0000000..166af2e
--- /dev/null
@@ -0,0 +1,291 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void MPIAccessTest::test_MPI_Access_Time() {
+
+  debugStream << "test_MPI_Access_Time" << endl ;
+
+  //  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+    ostringstream strstream ;
+    strstream << "test_MPI_Access_Time must be runned with 2 procs" << endl ;
+    cerr << strstream.str() << endl ;
+    //CPPUNIT_FAIL( strstream.str() ) ;
+    return;
+  }
+
+  debugStream << "test_MPI_Access_Time" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess mpi_access( group ) ;
+
+#define maxreq 10
+
+  if ( myrank >= 2 ) {
+    debugStream << "test_MPI_Access_Time_0 rank" << myrank << " --> mpi_access->Barrier" << endl ;
+    mpi_access.barrier() ;
+    debugStream << "test_MPI_Access_Time_0 rank" << myrank << " <-- mpi_access->Barrier" << endl ;
+    delete group ;
+    debugStream << "test_MPI_Access_Time" << myrank << " OK" << endl ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int SendTimeRequestId[maxreq] ;
+  int RecvTimeRequestId[maxreq] ;
+  int SendRequestId[maxreq] ;
+  int RecvRequestId[maxreq] ;
+  int sts ;
+  int sendbuf[maxreq] ;
+  int recvbuf[maxreq] ;
+  int i = 0 ;
+  ParaMEDMEM::TimeMessage aSendTimeMsg[maxreq] ;
+  ParaMEDMEM::TimeMessage aRecvTimeMsg[maxreq] ;
+  double t ;
+  double dt = 1. ;
+  double maxt = 10. ;
+  for ( t = 0 ; t < maxt ; t = t+dt ) {
+    if ( myrank == 0 ) {
+      aSendTimeMsg[i].time = t ;
+      aSendTimeMsg[i].deltatime = dt ;
+      //aSendTimeMsg[i].maxtime = maxt ;
+      //sts = mpi_access.ISend( &aSendTimeMsg , mpi_access.timeExtent() ,
+      sts = mpi_access.ISend( &aSendTimeMsg[i] , 1 ,
+                              mpi_access.timeType() , target ,
+                              SendTimeRequestId[i]) ;
+      debugStream << "test" << myrank << " ISend RequestId " << SendTimeRequestId[i]
+           << " tag " << mpi_access.sendMPITag(target) << endl ;
+      sendbuf[i] = i ;
+      sts = mpi_access.ISend(&sendbuf[i],1,MPI_INT,target, SendRequestId[i]) ;
+      debugStream << "test" << myrank << " ISend RequestId " << SendRequestId[i]
+           << " tag " << mpi_access.sendMPITag(target) << endl ;
+    }
+    else {
+      //sts = mpi_access.IRecv( &aRecvTimeMsg , mpi_access.timeExtent() ,
+      sts = mpi_access.IRecv( &aRecvTimeMsg[i] , 1 ,
+                              mpi_access.timeType() , target ,
+                              RecvTimeRequestId[i]) ;
+      debugStream << "test" << myrank << " IRecv RequestId " << RecvTimeRequestId[i]
+           << " tag " << mpi_access.recvMPITag(target) << endl ;
+      sts = mpi_access.IRecv(&recvbuf[i],1,MPI_INT,target, RecvRequestId[i]) ;
+      debugStream << "test" << myrank << " IRecv RequestId " << RecvRequestId[i]
+           << " tag " << mpi_access.recvMPITag(target) << endl ;
+    }
+    int j ;
+    for (j = 0 ; j <= i ; j++) {
+      int flag ;
+      if ( myrank == 0 ) {
+        mpi_access.test( SendTimeRequestId[j], flag ) ;
+      }
+      else {
+        mpi_access.test( RecvTimeRequestId[j], flag ) ;
+      }
+      if ( flag ) {
+        int target,source, tag, error, outcount ;
+        if ( myrank == 0 ) {
+          mpi_access.status( SendTimeRequestId[j], target, tag, error, outcount,
+                             true ) ;
+          debugStream << "test" << myrank << " Test(Send TimeRequestId " << SendTimeRequestId[j]
+               << ") : target " << target << " tag " << tag << " error " << error
+               << " flag " << flag << aSendTimeMsg[j] << endl ;
+        }
+        else {
+          mpi_access.status( RecvTimeRequestId[j], source, tag, error, outcount,
+                             true ) ;
+          debugStream << "test" << myrank << " Test(Recv TimeRequestId "
+               << RecvTimeRequestId[j] << ") : source " << source << " tag " << tag
+               << " error " << error << " outcount " << outcount
+               << " flag " << flag << aRecvTimeMsg[j] << endl ;
+          if ( (outcount != 1) | (aRecvTimeMsg[j].time != j) ) {
+            ostringstream strstream ;
+            strstream << "==========================================================="
+                      << endl << "test" << myrank << " outcount " << outcount << " KO"
+                      << " RecvTimeRequestId " << RecvTimeRequestId[j] << endl
+                      << "==========================================================="
+                      << endl ;
+            debugStream << strstream.str() << endl ;
+            CPPUNIT_FAIL( strstream.str() ) ;
+          }
+          else {
+            debugStream << "==========================================================="
+                 << endl << "test" << myrank << " outcount " << outcount
+                 << " RecvTimeRequestId " << RecvTimeRequestId[j] << " OK" << endl
+                 << "==========================================================="
+                 << endl ;
+          }
+        }
+      }
+      if ( myrank == 0 ) {
+        mpi_access.test( SendRequestId[j], flag ) ;
+      }
+      else {
+        mpi_access.test( RecvRequestId[j], flag ) ;
+      }
+      if ( flag ) {
+        int target,source, tag, error, outcount ;
+        if ( myrank == 0 ) {
+          mpi_access.status( SendRequestId[j], target, tag, error, outcount,
+                             true ) ;
+          debugStream << "test" << myrank << " Test(Send RequestId " << SendRequestId[j]
+               << ") : target " << target << " tag " << tag << " error " << error
+               << " flag " << flag << endl ;
+        }
+        else {
+          mpi_access.status( RecvRequestId[j], source, tag, error, outcount,
+                             true ) ;
+          debugStream << "test" << myrank << " Test(Recv RequestId "
+               << RecvRequestId[j] << ") : source " << source << " tag " << tag
+               << " error " << error << " outcount " << outcount
+               << " flag " << flag << endl ;
+          if ( (outcount != 1) | (recvbuf[j] != j) ) {
+            ostringstream strstream ;
+            strstream << "==========================================================="
+                      << endl << "test" << myrank << " outcount "
+                      << outcount << " recvbuf " << recvbuf[j] << " KO" << endl
+                      << "==========================================================="
+                      << endl ;
+            debugStream << strstream.str() << endl ;
+            CPPUNIT_FAIL( strstream.str() ) ;
+          }
+          else {
+            debugStream << "==========================================================="
+                 << endl << "test" << myrank << " outcount " << outcount
+                 << " RequestId " << RecvRequestId[j] << " OK" << endl
+                 << "==========================================================="
+                 << endl ;
+          }
+        }
+      }
+    }
+    char msgerr[MPI_MAX_ERROR_STRING] ;
+    int lenerr ;
+    mpi_access.errorString(sts, msgerr, &lenerr) ;
+    debugStream << "test" << myrank << " lenerr " << lenerr << " "
+         << msgerr << endl ;
+
+    if ( sts != MPI_SUCCESS ) {
+      ostringstream strstream ;
+      strstream << "==========================================================="
+                << "test" << myrank << " KO"
+                << "==========================================================="
+                << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    i = i + 1 ;
+  }
+
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+  if ( myrank == 0 ) {
+    mpi_access.waitAll(maxreq, SendTimeRequestId) ;
+    mpi_access.deleteRequests(maxreq, SendTimeRequestId) ;
+    mpi_access.waitAll(maxreq, SendRequestId) ;
+    mpi_access.deleteRequests(maxreq, SendRequestId) ;
+  }
+  else {
+    mpi_access.waitAll(maxreq, RecvTimeRequestId) ;
+    mpi_access.deleteRequests(maxreq, RecvTimeRequestId) ;
+    mpi_access.waitAll(maxreq, RecvRequestId) ;
+    mpi_access.deleteRequests(maxreq, RecvRequestId) ;
+  }
+  if(MPI_ACCESS_VERBOSE) mpi_access.check() ;
+
+  if ( myrank == 0 ) {
+    int sendrequests[2*maxreq] ;
+    int sendreqsize = mpi_access.sendRequestIds( target , 2*maxreq , sendrequests ) ;
+    if ( sendreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " sendreqsize " << sendreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+  else {
+    int recvrequests[2*maxreq] ;
+    int recvreqsize = mpi_access.sendRequestIds( target , 2*maxreq , recvrequests ) ;
+    if ( recvreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " recvreqsize " << recvreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+
+  debugStream << "test_MPI_Access_Time_0 rank" << myrank << " --> mpi_access->Barrier" << endl ;
+  mpi_access.barrier() ;
+  debugStream << "test_MPI_Access_Time_0 rank" << myrank << " <-- mpi_access->Barrier" << endl ;
+
+  delete group ;
+
+  //  MPI_Finalize();
+
+  debugStream << "test_MPI_Access_Time" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_MPI_Access_Time_0.cxx b/src/ParaMEDMEMTest/test_MPI_Access_Time_0.cxx
new file mode 100644 (file)
index 0000000..9000e57
--- /dev/null
@@ -0,0 +1,472 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <string>
+#include <vector>
+#include <map>
+#include <iostream>
+#include <mpi.h>
+
+#include "MPIAccessTest.hxx"
+#include <cppunit/TestAssert.h>
+
+//#include "CommInterface.hxx"
+//#include "ProcessorGroup.hxx"
+//#include "MPIProcessorGroup.hxx"
+#include "MPIAccess.hxx"
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+using namespace std;
+using namespace ParaMEDMEM;
+
+void chksts( int sts , int myrank , ParaMEDMEM::MPIAccess * mpi_access ) {
+  char msgerr[MPI_MAX_ERROR_STRING] ;
+  int lenerr ;
+  if ( sts != MPI_SUCCESS ) {
+    mpi_access->errorString(sts, msgerr, &lenerr) ;
+    debugStream << "test" << myrank << " lenerr " << lenerr << " "
+         << msgerr << endl ;
+    ostringstream strstream ;
+    strstream << "==========================================================="
+              << "test" << myrank << " KO"
+              << "==========================================================="
+              << endl ;
+    debugStream << strstream.str() << endl ;
+    CPPUNIT_FAIL( strstream.str() ) ;
+  }
+return ;
+}
+
+void MPIAccessTest::test_MPI_Access_Time_0() {
+
+  debugStream << "test_MPI_Access_Time_0" << endl ;
+
+//  MPI_Init(&argc, &argv) ; 
+
+  int size ;
+  int myrank ;
+  MPI_Comm_size(MPI_COMM_WORLD,&size) ;
+  MPI_Comm_rank(MPI_COMM_WORLD,&myrank) ;
+
+  if ( size < 2 ) {
+    ostringstream strstream ;
+    strstream << "usage :" << endl
+              << "mpirun -np <nbprocs> test_MPI_Access_Time_0" <<endl
+              << " nbprocs =2" << endl
+              << "test must be runned with 2 procs" << endl ;
+    cerr << strstream.str() << endl ;
+    //CPPUNIT_FAIL( strstream.str() ) ;
+    return;
+  }
+
+#define maxreq 100
+
+  double t ;
+  double dt[2] = {2., 1.} ;
+  double maxt = maxreq/dt[myrank] ;
+
+  debugStream << "test_MPI_Access_Time_0 rank" << myrank << endl ;
+
+  ParaMEDMEM::CommInterface interface ;
+
+  ParaMEDMEM::MPIProcessorGroup* group = new ParaMEDMEM::MPIProcessorGroup(interface) ;
+
+  ParaMEDMEM::MPIAccess * mpi_access = new ParaMEDMEM::MPIAccess( group ) ;
+
+  if ( myrank >= 2 ) {
+    debugStream << "test_MPI_Access_Time_0 rank" << myrank << " --> mpi_access->barrier" << endl ;
+    mpi_access->barrier() ;
+    debugStream << "test_MPI_Access_Time_0 rank" << myrank << " <-- mpi_access->barrier" << endl ;
+    debugStream << "test_MPI_Access_Time_0 rank" << myrank << " --> mpi_access->barrier" << endl ;
+    mpi_access->barrier() ;
+    debugStream << "test_MPI_Access_Time_0 rank" << myrank << " <-- mpi_access->barrier" << endl ;
+    delete group ;
+    delete mpi_access ;
+    debugStream << "test_MPI_Access_Time" << myrank << " OK" << endl ;
+    return ;
+  }
+
+  int target = 1 - myrank ;
+  int SendTimeRequestId[maxreq] ;
+  int RecvTimeRequestId[maxreq] ;
+  int SendRequestId[maxreq] ;
+  int RecvRequestId[maxreq] ;
+  int sts ;
+  int sendbuf[maxreq] ;
+  int recvbuf[maxreq] ;
+  ParaMEDMEM::TimeMessage aSendTimeMsg[maxreq] ;
+  int lasttime = -1 ;
+  ParaMEDMEM::TimeMessage RecvTimeMessages[maxreq+1] ;
+  ParaMEDMEM::TimeMessage *aRecvTimeMsg = &RecvTimeMessages[1] ;
+//  mpi_access->Trace() ;
+  int istep = 0 ;
+  for ( t = 0 ; t < maxt ; t = t+dt[myrank] ) {
+     debugStream << "test" << myrank << " ==========================TIME " << t
+          << " ==========================" << endl ;
+     if ( myrank == 0 ) {
+       aSendTimeMsg[istep].time = t ;
+       aSendTimeMsg[istep].deltatime = dt[myrank] ;
+       //aSendTimeMsg[istep].maxtime = maxt ;
+       if ( t+dt[myrank] >= maxt ) {
+         aSendTimeMsg[istep].deltatime = 0 ;
+       }
+       sts = mpi_access->ISend( &aSendTimeMsg[istep] , 1 ,
+                               mpi_access->timeType() , target ,
+                               SendTimeRequestId[istep]) ;
+       debugStream << "test" << myrank << " ISend TimeRequestId " << SendTimeRequestId[istep]
+            << " tag " << mpi_access->MPITag(SendTimeRequestId[istep]) << endl ;
+       chksts( sts , myrank , mpi_access ) ;
+       sendbuf[istep] = istep ;
+       sts = mpi_access->ISend(&sendbuf[istep],1,MPI_INT,target, SendRequestId[istep]) ;
+       debugStream << "test" << myrank << " ISend Data RequestId " << SendRequestId[istep]
+            << " tag " << mpi_access->MPITag(SendRequestId[istep]) << endl ;
+       chksts( sts , myrank , mpi_access ) ;
+//CheckSent
+//=========
+       int sendrequests[2*maxreq] ;
+       int sendreqsize = mpi_access->sendRequestIds( target , 2*maxreq ,
+                                                    sendrequests ) ;
+       int j , flag ;
+       for ( j = 0 ; j < sendreqsize ; j++ ) {
+          sts = mpi_access->test( sendrequests[j] , flag ) ;
+          chksts( sts , myrank , mpi_access ) ;
+          if ( flag ) {
+            mpi_access->deleteRequest( sendrequests[j] ) ;
+            debugStream << "test" << myrank << " " << j << ". " << sendrequests[j]
+                 << " sendrequest deleted" << endl ;
+          }
+       }
+     }
+     else {
+//InitRecv
+//========
+       if ( t == 0 ) {
+         aRecvTimeMsg[lasttime].time = 0 ;
+         sts = mpi_access->IRecv( &aRecvTimeMsg[lasttime+1] , 1 ,
+                                 mpi_access->timeType() ,
+                                 target , RecvTimeRequestId[lasttime+1]) ;
+         debugStream << "test" << myrank << " t == 0 IRecv TimeRequestId "
+              << RecvTimeRequestId[lasttime+1]
+              << " MPITag " << mpi_access->MPITag( RecvTimeRequestId[lasttime+1] )
+              << " MPICompleted "
+              << mpi_access->MPICompleted( RecvTimeRequestId[lasttime+1] ) << endl ;
+         chksts( sts , myrank , mpi_access ) ;
+       }
+       else {
+         debugStream << "test" << myrank << " t # 0 lasttime " << lasttime << endl ;
+//InitialOutTime
+//==============
+         bool outtime = false ;
+         if ( lasttime != -1 ) {
+           if ( t <= aRecvTimeMsg[lasttime-1].time ) {
+             ostringstream strstream ;
+             strstream << "==========================================================="
+                       << endl << "test" << myrank << " t " << t << " <= "
+                       << "aRecvTimeMsg[ " << lasttime << "-1 ].time "
+                       << aRecvTimeMsg[lasttime-1].time << " KO" << endl
+                       << "==========================================================="
+                       << endl ;
+             debugStream << strstream.str() << endl ;
+             CPPUNIT_FAIL( strstream.str() ) ;
+           }
+           else {
+             debugStream << "==========================================================="
+                  << endl << "test" << myrank << " t " << t << " > "
+                  << "aRecvTimeMsg[ " << lasttime << "-1 ].time "
+                  << aRecvTimeMsg[lasttime-1].time << " OK" << endl
+                  << "==========================================================="
+                  << endl ;
+           }
+           //outtime = ((aRecvTimeMsg[lasttime].time +
+           //            aRecvTimeMsg[lasttime].deltatime) >=
+           //           aRecvTimeMsg[lasttime].maxtime) ;
+           outtime = aRecvTimeMsg[lasttime].deltatime == 0 ;
+         }
+// CheckRecv - CheckTime
+// On a lasttime tel que :
+// aRecvTimeMsg[ lasttime-1 ].time < T(i-1) <= aRecvTimeMsg[ lasttime ].time
+// On cherche lasttime tel que :
+// aRecvTimeMsg[ lasttime-1 ].time < T(i) <= aRecvTimeMsg[ lasttime ].time
+         if ( t <= aRecvTimeMsg[lasttime].time ) {
+           outtime = false ;
+         }
+         debugStream << "test" << myrank << " while outtime( " << outtime << " && t " << t
+              << " > aRecvTimeMsg[ " << lasttime << " ] "
+              << aRecvTimeMsg[lasttime].time << " )" << endl ;
+         while ( !outtime && (t > aRecvTimeMsg[lasttime].time) ) {
+              lasttime += 1 ;
+//TimeMessage
+//===========
+              sts = mpi_access->wait( RecvTimeRequestId[lasttime] ) ;
+              chksts( sts , myrank , mpi_access ) ;
+              debugStream << "test" << myrank << " Wait done RecvTimeRequestId "
+                   << RecvTimeRequestId[lasttime] << " lasttime " << lasttime
+                   << " tag " << mpi_access->MPITag(RecvTimeRequestId[lasttime])
+                   << aRecvTimeMsg[lasttime] << endl ;
+              if ( lasttime == 0 ) {
+                aRecvTimeMsg[lasttime-1] = aRecvTimeMsg[lasttime] ;
+              }
+              mpi_access->deleteRequest( RecvTimeRequestId[lasttime] ) ;
+
+              double deltatime = aRecvTimeMsg[lasttime].deltatime ;
+              //double maxtime = aRecvTimeMsg[lasttime].maxtime ;
+              double nexttime = aRecvTimeMsg[lasttime].time + deltatime ;
+              debugStream << "test" << myrank << " t " << t << " lasttime " << lasttime
+                   << " deltatime " << deltatime
+                   << " nexttime " << nexttime << endl ;
+              //if ( nexttime < maxtime && t > nexttime ) {
+              if ( deltatime != 0 && t > nexttime ) {
+//CheckRecv :
+//=========   
+                //while ( nexttime < maxtime && t > nexttime ) {
+                while ( deltatime != 0 && t > nexttime ) {
+                     int source, MPITag, outcount ;
+                     MPI_Datatype datatype ;
+                     sts = mpi_access->probe( target , source, MPITag, datatype,
+                                             outcount ) ;
+                     chksts( sts , myrank , mpi_access ) ;
+// Cancel DataMessages jusqu'a un TimeMessage
+                     int cancelflag ;
+                     while ( !mpi_access->isTimeMessage( MPITag ) ) {
+                          sts = mpi_access->cancel( source, MPITag, datatype, outcount ,
+                          //sts = mpi_access->cancel( source, datatype, outcount ,
+                                                   //RecvRequestId[lasttime] ,
+                                                   cancelflag ) ;
+                          debugStream << "test" << myrank << " Recv TO CANCEL RequestId "
+                               << RecvRequestId[lasttime]
+                               << " tag " << mpi_access->recvMPITag( target )
+                               << " cancelflag " << cancelflag << endl ;
+                          chksts( sts , myrank , mpi_access ) ;
+                          sts = mpi_access->probe( target , source, MPITag, datatype,
+                                                  outcount ) ;
+                          chksts( sts , myrank , mpi_access ) ;
+                     }
+//On peut avancer en temps
+                     nexttime += deltatime ;
+                     //if ( nexttime < maxtime && t > nexttime ) {
+                     if ( deltatime != 0 && t > nexttime ) {
+// Cancel du TimeMessage
+                       sts = mpi_access->cancel( source, MPITag, datatype, outcount ,
+                       //sts = mpi_access->cancel( source, datatype, outcount ,
+                                                //RecvRequestId[lasttime] ,
+                                                cancelflag ) ;
+                       debugStream << "test" << myrank << " Time TO CANCEL RequestId "
+                            << RecvRequestId[lasttime]
+                            << " tag " << mpi_access->recvMPITag( target )
+                            << " cancelflag " << cancelflag << endl ;
+                       chksts( sts , myrank , mpi_access ) ;
+                     }
+                }
+              }
+              else {
+//DoRecv
+//======
+                debugStream << "test" << myrank << " Recv target " << target
+                     << " lasttime " << lasttime
+                     << " lasttime-1 " << aRecvTimeMsg[lasttime-1]
+                     << " lasttime " << aRecvTimeMsg[lasttime]
+                     << endl ;
+                sts = mpi_access->recv(&recvbuf[lasttime],1,MPI_INT,target,
+                                       RecvRequestId[lasttime]) ;
+                debugStream << "test" << myrank << " Recv RequestId "
+                     << RecvRequestId[lasttime]
+                     << " tag " << mpi_access->recvMPITag( target )
+                     << endl ;
+                chksts( sts , myrank , mpi_access ) ;
+              }
+              //outtime = ((aRecvTimeMsg[lasttime].time +
+              //            aRecvTimeMsg[lasttime].deltatime) >=
+              //           aRecvTimeMsg[lasttime].maxtime) ;
+              outtime = aRecvTimeMsg[lasttime].deltatime == 0 ;
+              if ( !outtime ) {
+// Une lecture asynchrone d'un message temps a l'avance
+                sts = mpi_access->IRecv( &aRecvTimeMsg[lasttime+1] , 1 ,
+                                        mpi_access->timeType() , target ,
+                                        RecvTimeRequestId[lasttime+1]) ;
+                debugStream << "test" << myrank << " IRecv TimeRequestId "
+                     << RecvTimeRequestId[lasttime+1] << " MPITag "
+                     << mpi_access->MPITag( RecvTimeRequestId[lasttime+1] )
+                     << " MPICompleted "
+                     << mpi_access->MPICompleted( RecvTimeRequestId[lasttime+1] )
+                     << endl ;
+                chksts( sts , myrank , mpi_access ) ;
+              }
+              else if ( t <= aRecvTimeMsg[lasttime].time ) {
+                outtime = false ;
+              }
+         }
+         
+         //printf("DEBUG t %.15f Msg[lasttime-1] %.15f Msg[lasttime] %.15f \n",t,
+         //       aRecvTimeMsg[lasttime-1].time,aRecvTimeMsg[lasttime].time) ;
+         if ( ((t <= aRecvTimeMsg[lasttime-1].time) ||
+               (t > aRecvTimeMsg[lasttime].time)) && !outtime ) {
+           ostringstream strstream ;
+           strstream << "==========================================================="
+                     << endl << "test" << myrank << " t " << t << " <= "
+                     << "aRecvTimeMsg[ " << lasttime << "-1 ].time "
+                     << aRecvTimeMsg[lasttime-1].time << " ou t " << t << " > "
+                     << "aRecvTimeMsg[ " << lasttime << " ].time "
+                     << aRecvTimeMsg[lasttime].time << endl
+                     << " ou bien outtime " << outtime << " KO RequestTimeIds "
+                     << RecvTimeRequestId[lasttime-1] << " " << RecvTimeRequestId[lasttime]
+                     << " RequestIds "
+                     << RecvRequestId[lasttime-1] << " " << RecvRequestId[lasttime] << endl
+                     << "==========================================================="
+                     << endl ;
+           debugStream << strstream.str() << endl ;
+           CPPUNIT_FAIL( strstream.str() ) ;
+         }
+         else {
+           debugStream << "==========================================================="
+                << endl << "test" << myrank 
+                << " aRecvTimeMsg[ " << lasttime << "-1 ].time "
+                << aRecvTimeMsg[lasttime-1].time << " < t " << t << " <= "
+                << "aRecvTimeMsg[ " << lasttime << " ].time "
+                << aRecvTimeMsg[lasttime].time << endl
+                << " ou bien outtime " << outtime << " OK RequestTimeIds "
+                << RecvTimeRequestId[lasttime-1] << " " << RecvTimeRequestId[lasttime]
+                << " RequestIds "
+                << RecvRequestId[lasttime-1] << " " << RecvRequestId[lasttime] << endl
+                << "==========================================================="
+                << endl ;
+         }
+       }
+     }
+     chksts( sts , myrank , mpi_access ) ;
+     istep = istep + 1 ;
+  }
+
+  debugStream << "test" << myrank << " Barrier :" << endl ;
+  mpi_access->barrier() ;
+
+  if (MPI_ACCESS_VERBOSE) mpi_access->check() ;
+
+  if ( myrank == 0 ) {
+//CheckFinalSent
+//==============
+    debugStream << "test" << myrank << " CheckFinalSent :" << endl ;
+    int sendrequests[2*maxreq] ;
+    int sendreqsize = mpi_access->sendRequestIds( target , 2*maxreq , sendrequests ) ;
+    int j ;
+    for ( j = 0 ; j < sendreqsize ; j++ ) {
+       sts = mpi_access->wait( sendrequests[j] ) ;
+       chksts( sts , myrank , mpi_access ) ;
+       mpi_access->deleteRequest( sendrequests[j] ) ;
+       debugStream << "test" << myrank << " " << j << ". " << sendrequests[j] << " deleted"
+            << endl ;
+    }
+  }
+  else {
+    debugStream << "test" << myrank << " CheckFinalRecv :" << endl ;
+    int recvrequests[2*maxreq] ;
+    int recvreqsize = mpi_access->recvRequestIds( target , 2*maxreq , recvrequests ) ;
+    int cancelflag ;
+    int j ;
+    for ( j = 0 ; j < recvreqsize ; j++ ) {
+       sts = mpi_access->cancel( recvrequests[j] , cancelflag ) ;
+       chksts( sts , myrank , mpi_access ) ;
+       mpi_access->deleteRequest( recvrequests[j] ) ;
+       debugStream << "test" << myrank << " " << j << ". " << recvrequests[j] << " deleted"
+            << " cancelflag " << cancelflag << endl ;
+    }
+    int source, MPITag, outcount , flag ;
+    MPI_Datatype datatype ;
+    sts = mpi_access->IProbe( target , source, MPITag, datatype,
+                             outcount , flag ) ;
+    chksts( sts , myrank , mpi_access ) ;
+    while ( flag ) {
+         sts = mpi_access->cancel( source, MPITag, datatype, outcount ,
+         //sts = mpi_access->cancel( source, datatype, outcount ,
+                                  //RecvRequestId[lasttime] ,
+                                  cancelflag ) ;
+         debugStream << "test" << myrank << " TO CANCEL RequestId "
+              << RecvRequestId[lasttime]
+              << " tag " << mpi_access->recvMPITag( target )
+              << " cancelflag " << cancelflag << endl ;
+         chksts( sts , myrank , mpi_access ) ;
+         sts = mpi_access->IProbe( target , source, MPITag, datatype,
+                                  outcount , flag ) ;
+         chksts( sts , myrank , mpi_access ) ;
+    }
+  }
+  if(MPI_ACCESS_VERBOSE) mpi_access->check() ;
+
+  if ( myrank == 0 ) {
+    int sendrequests[2*maxreq] ;
+    int sendreqsize = mpi_access->sendRequestIds( target , 2*maxreq , sendrequests ) ;
+    if ( sendreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " sendreqsize " << sendreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " sendreqsize " << sendreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+  else {
+    int recvrequests[2*maxreq] ;
+    int recvreqsize = mpi_access->recvRequestIds( target , 2*maxreq , recvrequests ) ;
+    if ( recvreqsize != 0 ) {
+      ostringstream strstream ;
+      strstream << "=========================================================" << endl
+                << "test" << myrank << " recvreqsize " << recvreqsize << " KO" << endl
+                << "=========================================================" << endl ;
+      debugStream << strstream.str() << endl ;
+      CPPUNIT_FAIL( strstream.str() ) ;
+    }
+    else {
+      debugStream << "=========================================================" << endl
+           << "test" << myrank << " recvreqsize " << recvreqsize << " OK" << endl
+           << "=========================================================" << endl ;
+    }
+  }
+
+  int i ;
+  for ( i = 0 ; i <= lasttime ; i++ ) {
+     debugStream << "test" << myrank << " " << i << ". RecvTimeMsg "
+          << aRecvTimeMsg[i].time << " recvbuf " << recvbuf[i] << endl ;
+  }
+
+  debugStream << "test_MPI_Access_Time_0 rank" << myrank << " --> mpi_access->barrier" << endl ;
+  mpi_access->barrier() ;
+  debugStream << "test_MPI_Access_Time_0 rank" << myrank << " <-- mpi_access->barrier" << endl ;
+
+  delete group ;
+  delete mpi_access ;
+
+//  MPI_Finalize();
+
+  debugStream << "test" << myrank << " OK" << endl ;
+
+  return ;
+}
+
+
+
+
diff --git a/src/ParaMEDMEMTest/test_perf.cxx b/src/ParaMEDMEMTest/test_perf.cxx
new file mode 100644 (file)
index 0000000..2250280
--- /dev/null
@@ -0,0 +1,337 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+#include <time.h>
+#include <sys/times.h>
+#include <sys/time.h>
+#include "ParaMEDMEMTest.hxx"
+#include <cppunit/TestAssert.h>
+
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "Topology.hxx"
+#include "DEC.hxx"
+#include "MxN_Mapping.hxx"
+#include "InterpKernelDEC.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "ComponentTopology.hxx"
+#include "ICoCoMEDField.hxx"
+#include "MEDLoader.hxx"
+#include <string>
+#include <cstring>
+
+// use this define to enable lines, execution of which leads to Segmentation Fault
+#define ENABLE_FAULTS
+
+// use this define to enable CPPUNIT asserts and fails, showing bugs
+#define ENABLE_FORCED_FAILURES
+
+#ifndef CLK_TCK 
+#include <unistd.h>
+#define CLK_TCK sysconf(_SC_CLK_TCK);
+#endif 
+
+using namespace std;
+using namespace ParaMEDMEM;
+void testInterpKernelDEC_2D(const string& filename1, const string& meshname1,
+                            const string& filename2, const string& meshname2,
+                            int nproc_source, double epsilon, bool tri, bool all);
+void get_time( float *telps, float *tuser, float *tsys, float *tcpu );
+
+int main(int argc, char *argv[])
+{
+  string filename1, filename2;
+  string meshname1, meshname2;
+  int nproc_source=1, rank;
+  double epsilon=1.e-6;
+  int count=0;
+  bool tri=false;
+  bool all=false;
+
+  MPI_Init(&argc,&argv);
+
+  for(int i=1;i<argc;i++){
+    if( strcmp(argv[i],"-f1") == 0 ){
+      filename1 = argv[++i];
+      count++;
+    }
+    else if( strcmp(argv[i],"-f2") == 0 ){
+      filename2 = argv[++i];
+      count++;
+    }
+    else if( strcmp(argv[i],"-m1") == 0 ){
+      meshname1 = argv[++i];
+      count++;
+    }
+    else if( strcmp(argv[i],"-m2") == 0 ){
+      meshname2 = argv[++i];
+      count++;
+    }
+    else if( strcmp(argv[i],"-ns") == 0 ){
+      nproc_source = atoi(argv[++i]);
+    }
+    else if( strcmp(argv[i],"-eps") == 0 ){
+      epsilon = atof(argv[++i]);
+    }
+    else if( strcmp(argv[i],"-tri") == 0 ){
+      tri = true;
+    }
+    else if( strcmp(argv[i],"-all") == 0 ){
+      all = true;
+    }
+  }
+
+  if( count != 4 ){
+    cout << "usage test_perf -f1 filename1 -m1 meshname1 -f2 filename2 -m2 meshname2 (-ns nproc_source -eps epsilon -tri -all)" << endl;
+    exit(0);
+  }
+
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  testInterpKernelDEC_2D(filename1,meshname1,filename2,meshname2,nproc_source,epsilon,tri,all);
+
+  MPI_Finalize();
+}
+
+void testInterpKernelDEC_2D(const string& filename_xml1, const string& meshname1,
+                            const string& filename_xml2, const string& meshname2,
+                            int nproc_source, double epsilon, bool tri, bool all)
+{
+  float tcpu, tcpu_u, tcpu_s, telps;
+  int size;
+  int rank;
+  MPI_Comm_size(MPI_COMM_WORLD,&size);
+  MPI_Comm_rank(MPI_COMM_WORLD,&rank);
+  set<int> self_procs;
+  set<int> procs_source;
+  set<int> procs_target;
+  
+  for (int i=0; i<nproc_source; i++)
+    procs_source.insert(i);
+  for (int i=nproc_source; i<size; i++)
+    procs_target.insert(i);
+  self_procs.insert(rank);
+  
+  ParaMEDMEM::CommInterface interface;
+    
+  ParaMEDMEM::ProcessorGroup* self_group = new ParaMEDMEM::MPIProcessorGroup(interface,self_procs);
+  ParaMEDMEM::ProcessorGroup* target_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_target);
+  ParaMEDMEM::ProcessorGroup* source_group = new ParaMEDMEM::MPIProcessorGroup(interface,procs_source);
+  
+  //loading the geometry for the source group
+
+  ParaMEDMEM::InterpKernelDEC dec (*source_group,*target_group);
+  if(tri)
+    dec.setIntersectionType(INTERP_KERNEL::Triangulation);
+  else
+    dec.setIntersectionType(INTERP_KERNEL::Convex);
+
+  ParaMEDMEM::MEDCouplingUMesh* mesh;
+  ParaMEDMEM::ParaMESH* paramesh;
+  ParaMEDMEM::ParaFIELD* parafield;
+  ICoCo::MEDField* icocofield ;
+  
+  // To remove tmp files from disk
+  ParaMEDMEMTest_TmpFilesRemover aRemover;
+  
+  MPI_Barrier(MPI_COMM_WORLD);
+  if (source_group->containsMyRank()){
+    string master = filename_xml1;
+      
+    ostringstream strstream;
+    if( nproc_source == 1 )
+      strstream <<master<<".med";
+    else
+      strstream <<master<<rank+1<<".med";
+
+    ostringstream meshname ;
+    if( nproc_source == 1 )
+      meshname<< meshname1;
+    else
+      meshname<< meshname1<<"_"<< rank+1;
+      
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    mesh=MEDLoader::ReadUMeshFromFile(strstream.str().c_str(),meshname.str().c_str(),0);
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    if( rank == 0 )
+      cout << "IO : Telapse = " << telps << " TuserCPU = " << tcpu_u << " TsysCPU = " << tcpu_s << " TCPU = " << tcpu << endl;
+    mesh->incrRef();
+    
+    paramesh=new ParaMESH (mesh,*source_group,"source mesh");
+    
+    ParaMEDMEM::ComponentTopology comptopo;
+    parafield = new ParaFIELD(ON_CELLS, NO_TIME, paramesh, comptopo);
+
+    int nb_local=mesh->getNumberOfCells();
+    double *value=parafield->getField()->getArray()->getPointer();
+    for(int ielem=0; ielem<nb_local;ielem++)
+      value[ielem]=1.0;
+    
+    icocofield=new ICoCo::MEDField(parafield->getField());
+     
+    dec.attachLocalField(icocofield);
+  }
+  
+  //loading the geometry for the target group
+  if (target_group->containsMyRank()){
+    string master= filename_xml2;
+    ostringstream strstream;
+    if( (size-nproc_source) == 1 )
+      strstream << master<<".med";
+    else
+      strstream << master<<(rank-nproc_source+1)<<".med";
+    ostringstream meshname ;
+    if( (size-nproc_source) == 1 )
+      meshname<< meshname2;
+    else
+      meshname<< meshname2<<"_"<<rank-nproc_source+1;
+      
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    mesh = MEDLoader::ReadUMeshFromFile(strstream.str().c_str(),meshname.str().c_str(),0);
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    mesh->incrRef();
+
+    paramesh=new ParaMESH (mesh,*target_group,"target mesh");
+    ParaMEDMEM::ComponentTopology comptopo;
+    parafield = new ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo);
+
+    int nb_local=mesh->getNumberOfCells();
+    double *value=parafield->getField()->getArray()->getPointer();
+    for(int ielem=0; ielem<nb_local;ielem++)
+      value[ielem]=0.0;
+    icocofield=new ICoCo::MEDField(parafield->getField());
+      
+    dec.attachLocalField(icocofield);
+  }
+    
+  
+  //attaching a DEC to the source group 
+  double field_before_int;
+  double field_after_int;
+  
+  if (source_group->containsMyRank()){ 
+    field_before_int = parafield->getVolumeIntegral(0,true);
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    dec.synchronize();
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    if( rank == 0 )
+      cout << "SYNCHRONIZE : Telapse = " << telps << " TuserCPU = " << tcpu_u << " TsysCPU = " << tcpu_s << " TCPU = " << tcpu << endl;
+    cout<<"DEC usage"<<endl;
+    dec.setForcedRenormalization(false);
+    if(all)
+      dec.setAllToAllMethod(PointToPoint);
+
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    dec.sendData();
+    
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    if( rank == 0 )
+      cout << "SEND DATA : Telapse = " << telps << " TuserCPU = " << tcpu_u << " TsysCPU = " << tcpu_s << " TCPU = " << tcpu << endl;
+    dec.recvData();
+     
+    field_after_int = parafield->getVolumeIntegral(0,true);
+//    CPPUNIT_ASSERT_DOUBLES_EQUAL(field_before_int, field_after_int, epsilon);
+      
+  }
+  
+  //attaching a DEC to the target group
+  if (target_group->containsMyRank()){
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    dec.synchronize();
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    dec.setForcedRenormalization(false);
+    if(all)
+      dec.setAllToAllMethod(PointToPoint);
+
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    dec.recvData();
+    get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+    dec.sendData();
+  }
+  
+  get_time( &telps, &tcpu_u, &tcpu_s, &tcpu );
+  if( rank == 0 )
+    cout << "RECV DATA : Telapse = " << telps << " TuserCPU = " << tcpu_u << " TsysCPU = " << tcpu_s << " TCPU = " << tcpu << endl;
+
+  delete source_group;
+  delete target_group;
+  delete self_group;
+  delete paramesh;
+  delete parafield;
+  mesh->decrRef() ;
+  delete icocofield;
+
+  MPI_Barrier(MPI_COMM_WORLD);
+  cout << "end of InterpKernelDEC_2D test"<<endl;
+}
+
+void get_time( float *telps, float *tuser, float *tsys, float *tcpu )
+{
+
+  /* Variables declaration */
+  static time_t zsec = 0;
+  static long zusec = 0;
+  time_t nsec;
+  long nusec;
+  static clock_t zclock = 0;
+  clock_t nclock;
+  static clock_t zuser = 0;
+  static clock_t zsys = 0;
+  clock_t nuser, nsys;
+
+  struct timeval tp;
+  struct timezone tzp;
+  struct tms local;
+
+  MPI_Barrier(MPI_COMM_WORLD);
+
+  /* Elapsed time reading */
+
+  gettimeofday(&tp,&tzp);
+  nsec = tp.tv_sec;
+  nusec = tp.tv_usec;
+  *telps = (float)(nsec-zsec) + (float)(nusec-zusec)/(float)CLOCKS_PER_SEC;
+  
+  zsec = nsec;
+  zusec = nusec;
+
+  /* User and system CPU time reading */
+
+  times(&local);
+  nuser = local.tms_utime;
+  nsys = local.tms_stime;
+  *tuser = (float)(nuser-zuser) / (float)CLK_TCK;
+  *tsys = (float)(nsys-zsys) / (float)CLK_TCK;
+
+  zuser = nuser;
+  zsys = nsys;
+
+  /* CPU time reading */
+
+  nclock = clock();
+  *tcpu = (float)(nclock-zclock) / (float)CLOCKS_PER_SEC;
+  zclock = nclock;
+
+}
+
+
diff --git a/src/ParaMEDMEM_Swig/CMakeLists.txt b/src/ParaMEDMEM_Swig/CMakeLists.txt
new file mode 100644 (file)
index 0000000..f56c794
--- /dev/null
@@ -0,0 +1,61 @@
+# Copyright (C) 2012-2015  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+ADD_DEFINITIONS(${PYTHON_DEFINITIONS})
+
+FIND_PACKAGE(SWIG REQUIRED)
+INCLUDE(${SWIG_USE_FILE})
+
+SET_SOURCE_FILES_PROPERTIES(ParaMEDMEM.i PROPERTIES CPLUSPLUS ON)
+SET_SOURCE_FILES_PROPERTIES(ParaMEDMEM.i PROPERTIES SWIG_DEFINITIONS "-shadow")
+SET(SWIG_MODULE_ParaMEDMEM_EXTRA_FLAGS ${SWIG_EXTRA_FLAGS_FOR_NUMPYANDSCIPY})
+
+SET (ParaMEDMEM_SWIG_DPYS_FILES
+    ParaMEDMEM.typemap)
+
+INCLUDE_DIRECTORIES(
+  ${PYTHON_INCLUDE_DIRS}
+  ${MEDFILE_INCLUDE_DIRS}
+  ${HDF5_INCLUDE_DIRS}
+  ${MPI_INCLUDE_DIRS}
+  ${CMAKE_CURRENT_SOURCE_DIR}
+  ${CMAKE_CURRENT_SOURCE_DIR}/../ParaMEDMEM
+  ${CMAKE_CURRENT_SOURCE_DIR}/../MEDCoupling_Swig
+  ${CMAKE_CURRENT_SOURCE_DIR}/../MEDLoader/Swig
+  ${CMAKE_CURRENT_SOURCE_DIR}/../MEDLoader
+  ${CMAKE_CURRENT_SOURCE_DIR}/../MEDCoupling
+  ${CMAKE_CURRENT_SOURCE_DIR}/../INTERP_KERNEL
+  ${CMAKE_CURRENT_SOURCE_DIR}/../INTERP_KERNEL/Bases
+  )
+
+SET (SWIG_MODULE_ParaMEDMEM_EXTRA_DEPS ${ParaMEDMEM_SWIG_DPYS_FILES}
+    ${paramedmem_HEADERS_HXX}
+    ${medloader_HEADERS_HXX}
+    ${medcoupling_HEADERS_HXX} ${medcoupling_HEADERS_TXX}
+    ${interpkernel_HEADERS_HXX} ${interpkernel_HEADERS_TXX})
+
+SWIG_ADD_MODULE(ParaMEDMEM python ParaMEDMEM.i)
+SWIG_LINK_LIBRARIES(ParaMEDMEM ${PYTHON_LIBRARIES} paramedmem medloader)
+
+SET_SOURCE_FILES_PROPERTIES(ParaMEDMEM.i PROPERTIES CPLUSPLUS ON)
+SET_SOURCE_FILES_PROPERTIES(ParaMEDMEM.i PROPERTIES SWIG_DEFINITIONS "-shadow")
+
+INSTALL(TARGETS _ParaMEDMEM DESTINATION ${MEDTOOL_INSTALL_PYTHON})
+INSTALL(FILES ParaMEDMEM.i DESTINATION ${MEDTOOL_INSTALL_HEADERS})
+INSTALL(FILES ${CMAKE_CURRENT_BINARY_DIR}/ParaMEDMEM.py test_InterpKernelDEC.py test_NonCoincidentDEC.py test_StructuredCoincidentDEC.py DESTINATION ${MEDTOOL_INSTALL_SCRIPT_PYTHON})
diff --git a/src/ParaMEDMEM_Swig/ParaMEDMEM.i b/src/ParaMEDMEM_Swig/ParaMEDMEM.i
new file mode 100644 (file)
index 0000000..38f4c47
--- /dev/null
@@ -0,0 +1,348 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+%module ParaMEDMEM
+
+%include "ParaMEDMEM.typemap"
+%include "MEDLoaderCommon.i"
+
+%{
+#include "CommInterface.hxx"
+#include "ProcessorGroup.hxx"
+#include "Topology.hxx"
+#include "MPIProcessorGroup.hxx"
+#include "DEC.hxx"
+#include "InterpKernelDEC.hxx"
+#include "NonCoincidentDEC.hxx"
+#include "StructuredCoincidentDEC.hxx"
+#include "ParaMESH.hxx"
+#include "ParaFIELD.hxx"
+#include "ICoCoMEDField.hxx"
+#include "ComponentTopology.hxx"
+
+#include <mpi.h>
+
+using namespace ParaMEDMEM;
+using namespace ICoCo;
+      
+enum mpi_constants { mpi_comm_world, mpi_comm_self, mpi_double, mpi_int };
+%}
+
+%include "CommInterface.hxx"
+%include "ProcessorGroup.hxx"
+%include "DECOptions.hxx"
+%include "ParaMESH.hxx"
+%include "ParaFIELD.hxx"
+%include "MPIProcessorGroup.hxx"
+%include "ComponentTopology.hxx"
+%include "DEC.hxx"
+%include "InterpKernelDEC.hxx"
+%include "StructuredCoincidentDEC.hxx"
+
+%rename(ICoCoMEDField) ICoCo::MEDField;
+%include "ICoCoMEDField.hxx"
+
+%nodefaultctor;
+
+/* This object can be used only if MED_ENABLE_FVM is defined*/
+#ifdef MED_ENABLE_FVM
+class NonCoincidentDEC : public DEC
+{
+public:
+  NonCoincidentDEC(ProcessorGroup& source, ProcessorGroup& target);
+};
+#endif
+
+%extend ParaMEDMEM::ParaMESH
+{
+  PyObject *getGlobalNumberingCell2() const
+  {
+    const int *tmp=self->getGlobalNumberingCell();
+    int size=self->getCellMesh()->getNumberOfCells();
+    PyObject *ret=PyList_New(size);
+    for(int i=0;i<size;i++)
+      PyList_SetItem(ret,i,PyInt_FromLong(tmp[i])); 
+    return ret;
+  }
+
+  PyObject *getGlobalNumberingFace2() const
+  {
+    const int *tmp=self->getGlobalNumberingFace();
+    int size=self->getFaceMesh()->getNumberOfCells();
+    PyObject *ret=PyList_New(size);
+    for(int i=0;i<size;i++)
+      PyList_SetItem(ret,i,PyInt_FromLong(tmp[i])); 
+    return ret;
+  }
+
+  PyObject *getGlobalNumberingNode2() const
+  {
+    const int *tmp=self->getGlobalNumberingNode();
+    int size=self->getCellMesh()->getNumberOfNodes();
+    PyObject *ret=PyList_New(size);
+    for(int i=0;i<size;i++)
+      PyList_SetItem(ret,i,PyInt_FromLong(tmp[i])); 
+    return ret;
+  }
+}
+
+//=============================================================================================
+// Interface for MPI-realization-specific constants like MPI_COMM_WORLD.
+//
+// Type and values of constants like MPI_COMM_WORLD depends on MPI realization
+// and usually such constants actually are macros. To have such symbols in python
+// and translate them into correct values we use the following technique.
+// We define some constants (enum mpi_constants) and map them into real MPI values
+// using typemaps, and we create needed python symbols equal to 'mpi_constants'
+// via %pythoncode directive.
+
+// Constants corresponding to similar MPI definitions
+enum mpi_constants { mpi_comm_world, mpi_comm_self, mpi_double, mpi_int };
+
+// Map mpi_comm_world and mpi_comm_self -> MPI_COMM_WORLD and MPI_COMM_SELF
+%typemap(in) MPI_Comm
+{ 
+  switch (PyInt_AsLong($input))
+    {
+    case mpi_comm_world: $1 = MPI_COMM_WORLD; break;
+    case mpi_comm_self:  $1 = MPI_COMM_SELF;  break;
+    default:
+      PyErr_SetString(PyExc_TypeError,"unexpected value of MPI_Comm");
+      return NULL;
+    }
+}
+// Map mpi_double and mpi_int -> MPI_DOUBLE and MPI_INT
+%typemap(in) MPI_Datatype
+{ 
+  switch (PyInt_AsLong($input))
+    {
+    case mpi_double:     $1 = MPI_DOUBLE;     break;
+    case mpi_int:        $1 = MPI_INT;        break;
+    default:
+      PyErr_SetString(PyExc_TypeError,"unexpected value of MPI_Datatype");
+      return NULL;
+    }
+}
+// The following code gets inserted into the result python file:
+// create needed python symbols
+%pythoncode %{
+MPI_COMM_WORLD = mpi_comm_world
+MPI_COMM_SELF  = mpi_comm_self
+MPI_DOUBLE     = mpi_double
+MPI_INT        = mpi_int
+%}
+//=============================================================================================
+
+// ==============
+// MPI_Comm_size
+// ==============
+%inline %{ PyObject* MPI_Comm_size(MPI_Comm comm)
+  {
+    int res = 0;
+    int err = MPI_Comm_size(comm, &res);
+    if ( err != MPI_SUCCESS )
+      {
+        PyErr_SetString(PyExc_RuntimeError,"Erorr in MPI_Comm_size()");
+        return NULL;
+      }
+    return PyInt_FromLong( res );
+  } %}
+
+// ==============
+// MPI_Comm_rank
+// ==============
+%inline %{ PyObject* MPI_Comm_rank(MPI_Comm comm)
+  {
+    int res = 0;
+    int err = MPI_Comm_rank(comm, &res);
+    if ( err != MPI_SUCCESS )
+      {
+        PyErr_SetString(PyExc_RuntimeError,"Erorr in MPI_Comm_rank()");
+        return NULL;
+      }
+    return PyInt_FromLong( res );
+  } 
+  %}
+
+int MPI_Init(int *argc, char ***argv );
+int MPI_Barrier(MPI_Comm comm);
+int MPI_Finalize();
+
+// ==========
+// MPI_Bcast
+// ==========
+
+%inline %{ PyObject* MPI_Bcast(PyObject* buffer, int nb, MPI_Datatype type, int root, MPI_Comm c)
+  {
+    // buffer must be a list
+    if (!PyList_Check(buffer))
+      {
+        PyErr_SetString(PyExc_TypeError, "buffer is expected to be a list");
+        return NULL;
+      }
+    // check list size
+    int aSize = PyList_Size(buffer);
+    if ( aSize != nb )
+      {
+        std::ostringstream stream; stream << "buffer is expected to be of size " << nb;
+        PyErr_SetString(PyExc_ValueError, stream.str().c_str());
+        return NULL;
+      }
+    // allocate and fill a buffer
+    void* aBuf = 0;
+    int* intBuf = 0;
+    double* dblBuf = 0;
+    if ( type == MPI_DOUBLE )
+      {
+        aBuf = (void*) ( dblBuf = new double[ nb ] );
+        for ( int i = 0; i < aSize; ++i )
+          dblBuf[i] = PyFloat_AS_DOUBLE( PyList_GetItem( buffer, i ));
+      }
+    else if ( type == MPI_INT )
+      {
+        aBuf = (void*) ( intBuf = new int[ nb ] );
+        for ( int i = 0; i < aSize; ++i )
+          intBuf[i] = int( PyInt_AS_LONG( PyList_GetItem( buffer, i )));
+      }
+    else
+      {
+        PyErr_SetString(PyExc_TypeError, "Only MPI_DOUBLE and MPI_INT supported");
+        return NULL;
+      }
+    // call MPI_Bcast
+    int err = MPI_Bcast(aBuf, nb, type, root, c);
+    // treat error
+    if ( err != MPI_SUCCESS )
+      {
+        PyErr_SetString(PyExc_RuntimeError,"Erorr in MPI_Bcast()");
+        delete [] intBuf; delete [] dblBuf;
+        return NULL;
+      }
+    // put recieved data into the list
+    int pyerr = 0;
+    if ( type == MPI_DOUBLE )
+      {
+        for ( int i = 0; i < aSize && !pyerr; ++i )
+          pyerr = PyList_SetItem(buffer, i, PyFloat_FromDouble( dblBuf[i] ));
+        delete [] dblBuf;
+      }
+    else
+      {
+        for ( int i = 0; i < aSize && !pyerr; ++i )
+          pyerr = PyList_SetItem(buffer, i, PyInt_FromLong( intBuf[i] ));
+        delete [] intBuf;
+      }
+    if ( pyerr )
+      {
+        PyErr_SetString(PyExc_RuntimeError, "Error of PyList_SetItem()");
+        return NULL;
+      }
+    return PyInt_FromLong( err );
+
+  }
+  %}
+
+%pythoncode %{
+def ParaMEDMEMDataArrayDoublenew(cls,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayDouble____new___(cls,args)
+def ParaMEDMEMDataArrayDoubleIadd(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayDouble____iadd___(self, self, *args)
+def ParaMEDMEMDataArrayDoubleIsub(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayDouble____isub___(self, self, *args)
+def ParaMEDMEMDataArrayDoubleImul(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayDouble____imul___(self, self, *args)
+def ParaMEDMEMDataArrayDoubleIdiv(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayDouble____idiv___(self, self, *args)
+def ParaMEDMEMDataArrayDoubleIpow(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayDouble____ipow___(self, self, *args)
+def ParaMEDMEMDataArrayDoubleTupleIadd(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayDoubleTuple____iadd___(self, self, *args)
+def ParaMEDMEMDataArrayDoubleTupleIsub(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayDoubleTuple____isub___(self, self, *args)
+def ParaMEDMEMDataArrayDoubleTupleImul(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayDoubleTuple____imul___(self, self, *args)
+def ParaMEDMEMDataArrayDoubleTupleIdiv(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayDoubleTuple____idiv___(self, self, *args)
+def ParaMEDMEMMEDCouplingFieldDoublenew(cls,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.MEDCouplingFieldDouble____new___(cls,args)
+def ParaMEDMEMMEDCouplingFieldDoubleIadd(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.MEDCouplingFieldDouble____iadd___(self, self, *args)
+def ParaMEDMEMMEDCouplingFieldDoubleIsub(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.MEDCouplingFieldDouble____isub___(self, self, *args)
+def ParaMEDMEMMEDCouplingFieldDoubleImul(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.MEDCouplingFieldDouble____imul___(self, self, *args)
+def ParaMEDMEMMEDCouplingFieldDoubleIdiv(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.MEDCouplingFieldDouble____idiv___(self, self, *args)
+def ParaMEDMEMMEDCouplingFieldDoubleIpow(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.MEDCouplingFieldDouble____ipow___(self, self, *args)
+def ParaMEDMEMDataArrayIntnew(cls,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayInt____new___(cls,args)
+def ParaMEDMEMDataArrayIntIadd(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayInt____iadd___(self, self, *args)
+def ParaMEDMEMDataArrayIntIsub(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayInt____isub___(self, self, *args)
+def ParaMEDMEMDataArrayIntImul(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayInt____imul___(self, self, *args)
+def ParaMEDMEMDataArrayIntIdiv(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayInt____idiv___(self, self, *args)
+def ParaMEDMEMDataArrayIntImod(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayInt____imod___(self, self, *args)
+def ParaMEDMEMDataArrayIntIpow(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayInt____ipow___(self, self, *args)
+def ParaMEDMEMDataArrayIntTupleIadd(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayIntTuple____iadd___(self, self, *args)
+def ParaMEDMEMDataArrayIntTupleIsub(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayIntTuple____isub___(self, self, *args)
+def ParaMEDMEMDataArrayIntTupleImul(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayIntTuple____imul___(self, self, *args)
+def ParaMEDMEMDataArrayIntTupleIdiv(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayIntTuple____idiv___(self, self, *args)
+def ParaMEDMEMDataArrayIntTupleImod(self,*args):
+    import _ParaMEDMEM
+    return _ParaMEDMEM.DataArrayIntTuple____imod___(self, self, *args)
+%}
+
+%include "MEDCouplingFinalize.i"
diff --git a/src/ParaMEDMEM_Swig/ParaMEDMEM.typemap b/src/ParaMEDMEM_Swig/ParaMEDMEM.typemap
new file mode 100644 (file)
index 0000000..80eb725
--- /dev/null
@@ -0,0 +1,84 @@
+// Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+//
+// See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+//
+
+%include std_set.i
+%include std_string.i
+
+%template() std::set<int>;
+
+// Creates "int *argc, char ***argv" parameters from input list
+%typemap(in) (int *argc, char ***argv) {
+  int i;
+  if (!PyList_Check($input)) {
+    PyErr_SetString(PyExc_ValueError, "Expecting a list");
+    return NULL;
+  }
+  int aSize = PyList_Size($input);
+  $1 = &aSize;
+  char** aStrs = (char **) malloc((aSize+1)*sizeof(char *));
+  for (i = 0; i < aSize; i++) {
+    PyObject *s = PyList_GetItem($input,i);
+    if (!PyString_Check(s)) {
+        free(aStrs);
+        PyErr_SetString(PyExc_ValueError, "List items must be strings");
+        return NULL;
+    }
+    aStrs[i] = PyString_AsString(s);
+  }
+  aStrs[i] = 0;
+  $2 = &aStrs;
+}
+
+%typemap(freearg) (int *argc, char ***argv) {
+   if ($2) free(*($2));
+}
+
+/*  MACRO: IN typemap for std::set<TYPE> C++ object */
+%define TYPEMAP_INPUT_SET_BY_VALUE( TYPE )
+{
+  /* typemap in for set<TYPE> */
+  /* Check if is a list */
+  if (PyList_Check($input))
+  {
+    int size = PyList_Size($input);
+    std::set< TYPE > tmpSet;
+
+    for (int i=0; i < size; i++)
+    {
+      PyObject * tmp = PyList_GetItem($input,i);
+      TYPE elem = PyInt_AsLong(tmp);
+      tmpSet.insert(elem);
+    }
+    $1 = tmpSet;
+  }
+  else
+  {
+    PyErr_SetString(PyExc_TypeError,"not a list");
+    return NULL;
+  }
+}
+%enddef
+
+%typemap(in) std::set<int>
+{ 
+  TYPEMAP_INPUT_SET_BY_VALUE( int ) 
+}
+%typecheck(SWIG_TYPECHECK_POINTER) std::set<int> {
+  $1 = PyList_Check($input) ? 1 : 0;
+}
diff --git a/src/ParaMEDMEM_Swig/test_InterpKernelDEC.py b/src/ParaMEDMEM_Swig/test_InterpKernelDEC.py
new file mode 100755 (executable)
index 0000000..0d6d6cd
--- /dev/null
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+#  -*- coding: iso-8859-1 -*-
+# Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+from ParaMEDMEM import *
+import sys, os
+import unittest
+import math
+
+class ParaMEDMEMBasicsTest(unittest.TestCase):
+    def testInterpKernelDEC_2D(self):
+        MPI_Init(sys.argv)
+        size = MPI_Comm_size(MPI_COMM_WORLD)
+        rank = MPI_Comm_rank(MPI_COMM_WORLD)
+        if size != 5:
+            raise RuntimeError, "Expect MPI_COMM_WORLD size == 5"
+        print rank
+        nproc_source = 3
+        procs_source = range( nproc_source )
+        procs_target = range( size - nproc_source + 1, size)
+        
+        interface = CommInterface()
+        target_group = MPIProcessorGroup(interface, procs_target)
+        source_group = MPIProcessorGroup(interface, procs_source)
+        dec = InterpKernelDEC(source_group, target_group)
+        
+        mesh       =0
+        support    =0
+        paramesh   =0
+        parafield  =0
+        icocofield =0
+        data_dir = os.environ['MED_ROOT_DIR']
+        tmp_dir  = os.environ['TMP']
+        
+        if not tmp_dir or len(tmp_dir)==0:
+            tmp_dir = "/tmp"
+            pass
+        
+        filename_xml1 = os.path.join(data_dir, "share/salome/resources/med/square1_split")
+        filename_xml2 = os.path.join(data_dir, "share/salome/resources/med/square2_split")
+        
+        MPI_Barrier(MPI_COMM_WORLD)
+        if source_group.containsMyRank():
+            filename = filename_xml1 + str(rank+1) + ".med"
+            meshname = "Mesh_2_" + str(rank+1)
+            mesh=MEDLoader.ReadUMeshFromFile(filename,meshname,0)
+            paramesh=ParaMESH(mesh,source_group,"source mesh")
+            comptopo = ComponentTopology()
+            parafield = ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo)
+            parafield.getField().setNature(ConservativeVolumic)
+            nb_local=mesh.getNumberOfCells()
+            value = [1.0]*nb_local
+            parafield.getField().setValues(value)
+            icocofield = ICoCoMEDField(mesh,parafield.getField())
+            dec.setMethod("P0")
+            dec.attachLocalField(icocofield)
+            pass
+        else:
+            filename = filename_xml2 + str(rank - nproc_source + 1) + ".med"
+            meshname = "Mesh_3_" + str(rank - nproc_source + 1)
+            mesh=MEDLoader.ReadUMeshFromFile(filename,meshname,0)
+            paramesh=ParaMESH(mesh,target_group,"target mesh")
+            comptopo = ComponentTopology()
+            parafield = ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo)
+            parafield.getField().setNature(ConservativeVolumic)
+            nb_local=mesh.getNumberOfCells()
+            value = [0.0]*nb_local
+            parafield.getField().setValues(value)
+            icocofield = ICoCoMEDField(mesh,parafield.getField())
+            dec.setMethod("P0")
+            dec.attachLocalField(icocofield)
+            pass
+        
+        if source_group.containsMyRank():
+            field_before_int = parafield.getVolumeIntegral(0,True)
+            dec.synchronize()
+            dec.setForcedRenormalization(False)
+            dec.sendData()
+            dec.recvData()
+            field_after_int=parafield.getVolumeIntegral(0,True);
+            self.failUnless(math.fabs(field_after_int-field_before_int)<1e-8)
+            pass
+        else:
+            dec.synchronize()
+            dec.setForcedRenormalization(False)
+            dec.recvData()
+            dec.sendData()
+            pass
+        ## end
+        interface = 0
+        target_group = 0
+        source_group = 0
+        dec = 0
+        mesh       =0
+        support    =0
+        paramesh   =0
+        parafield  =0
+        icocofield =0
+        MPI_Barrier(MPI_COMM_WORLD)
+        MPI_Finalize()
+        pass
+    pass
+
+unittest.main()
diff --git a/src/ParaMEDMEM_Swig/test_NonCoincidentDEC.py b/src/ParaMEDMEM_Swig/test_NonCoincidentDEC.py
new file mode 100755 (executable)
index 0000000..acf78aa
--- /dev/null
@@ -0,0 +1,144 @@
+#!/usr/bin/env python
+#  -*- coding: iso-8859-1 -*-
+# Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+from ParaMEDMEM import *
+import sys, os
+
+MPI_Init(sys.argv)
+
+size = MPI_Comm_size(MPI_COMM_WORLD)
+rank = MPI_Comm_rank(MPI_COMM_WORLD)
+if size != 5:
+    raise RuntimeError, "Expect MPI_COMM_WORLD size == 5"
+
+nproc_source = 3
+procs_source = range( nproc_source )
+procs_target = range( size - nproc_source + 1, size)
+
+interface = CommInterface()
+
+target_group = MPIProcessorGroup(interface, procs_target)
+source_group = MPIProcessorGroup(interface, procs_source)
+
+source_mesh= 0
+target_mesh= 0
+parasupport= 0
+mesh       = 0
+support    = 0
+field      = 0
+paramesh   = 0
+parafield  = 0
+icocofield = 0
+
+dec = NonCoincidentDEC(source_group, target_group)
+
+data_dir = os.environ['MED_ROOT_DIR']
+tmp_dir  = os.environ['TMP']
+if tmp_dir == '':
+    tmp_dir = "/tmp"
+    pass
+
+filename_xml1 = data_dir + "/share/salome/resources/med/square1_split"
+filename_xml2 = data_dir + "/share/salome/resources/med/square2_split"
+
+MPI_Barrier(MPI_COMM_WORLD)
+    
+if source_group.containsMyRank():
+
+    filename = filename_xml1 + str(rank+1) + ".med"
+    meshname = "Mesh_2_" + str(rank+1)
+
+    mesh = MESH(MED_DRIVER, filename, meshname)
+    support = SUPPORT(mesh, "all elements", MED_CELL)
+    paramesh = ParaMESH(mesh, source_group, "source mesh")
+
+    parasupport = UnstructuredParaSUPPORT( support, source_group)
+    comptopo = ComponentTopology()
+
+    parafield = ParaFIELD(parasupport, comptopo)
+
+    nb_local = support.getNumberOfElements(MED_ALL_ELEMENTS);
+
+    value = [1.0]*nb_local
+
+    parafield.getField().setValue(value)
+    icocofield = ICoCo_MEDField(paramesh,parafield)
+    dec.attachLocalField(icocofield,'P0')
+    pass
+
+if target_group.containsMyRank():
+
+    filename = filename_xml2 + str(rank - nproc_source + 1) + ".med"
+    meshname = "Mesh_3_" + str(rank - nproc_source + 1)
+
+    mesh = MESH(MED_DRIVER, filename, meshname)
+    support = SUPPORT(mesh, "all elements", MED_CELL)
+    paramesh = ParaMESH(mesh, target_group, "target mesh")
+
+    parasupport = UnstructuredParaSUPPORT( support, target_group)
+    comptopo = ComponentTopology()
+    parafield = ParaFIELD(parasupport, comptopo)
+
+    nb_local = support.getNumberOfElements(MED_ALL_ELEMENTS)
+    value = [0.0]*nb_local
+
+    parafield.getField().setValue(value)
+    icocofield = ICoCo_MEDField(paramesh,parafield)
+
+    dec.attachLocalField(icocofield, 'P0')
+    pass
+
+field_before_int = [0.0]
+field_after_int = [0.0]
+
+if source_group.containsMyRank():
+
+    field_before_int = [parafield.getVolumeIntegral(1)]
+    MPI_Bcast(field_before_int, 1, MPI_DOUBLE, 0, MPI_COMM_WORLD);
+    dec.synchronize()
+    print "DEC usage"
+    dec.setForcedRenormalization(False)
+
+    dec.sendData()
+    pass
+    
+if target_group.containsMyRank():
+
+    MPI_Bcast(field_before_int, 1, MPI_DOUBLE, 0, MPI_COMM_WORLD)
+    dec.synchronize()
+    dec.setForcedRenormalization(False)
+    dec.recvData()
+    field_after_int = [parafield.getVolumeIntegral(1)]
+    pass
+
+MPI_Bcast(field_before_int, 1, MPI_DOUBLE, 0, MPI_COMM_WORLD)
+MPI_Bcast(field_after_int , 1, MPI_DOUBLE, size-1, MPI_COMM_WORLD)
+
+epsilon = 1e-6
+if abs(field_before_int[0] - field_after_int[0]) > epsilon:
+    print "Field before is not equal field after: %s != %s"%\
+          (field_before_int[0],field_after_int[0])
+    pass
+
+
+MPI_Barrier(MPI_COMM_WORLD)
+MPI_Finalize()
+print "# End of testNonCoincidentDEC"
diff --git a/src/ParaMEDMEM_Swig/test_StructuredCoincidentDEC.py b/src/ParaMEDMEM_Swig/test_StructuredCoincidentDEC.py
new file mode 100755 (executable)
index 0000000..90c9aad
--- /dev/null
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+#  -*- coding: iso-8859-1 -*-
+# Copyright (C) 2007-2015  CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+from ParaMEDMEM import *
+import sys, os
+import unittest
+import math
+
+class ParaMEDMEMBasicsTest2(unittest.TestCase):
+    def testStructuredCoincidentDEC(self):
+        MPI_Init(sys.argv)
+        #
+        size = MPI_Comm_size(MPI_COMM_WORLD)
+        rank = MPI_Comm_rank(MPI_COMM_WORLD)
+        #
+        if size < 4:
+            raise RuntimeError, "Expect MPI_COMM_WORLD size >= 4"
+        #
+        interface = CommInterface()
+        #
+        self_group   = MPIProcessorGroup(interface, rank, rank)
+        target_group = MPIProcessorGroup(interface, 3, size-1)
+        source_group = MPIProcessorGroup(interface, 0, 2)
+        #
+        mesh      = 0
+        support   = 0
+        paramesh  = 0
+        parafield = 0
+        comptopo  = 0
+        icocofield= 0
+        #
+        data_dir = os.environ['MED_ROOT_DIR']
+        tmp_dir  = os.environ['TMP']
+        if tmp_dir == '':
+            tmp_dir = "/tmp"
+            pass
+        
+        filename_xml1    = data_dir + "/share/salome/resources/med/square1_split"
+        filename_2       = data_dir + "/share/salome/resources/med/square1.med"
+        filename_seq_wr  = tmp_dir + "/"
+        filename_seq_med = tmp_dir + "/myWrField_seq_pointe221.med"
+        
+        dec = StructuredCoincidentDEC(source_group, target_group)
+        MPI_Barrier(MPI_COMM_WORLD)
+        if source_group.containsMyRank():
+            filename = filename_xml1 + str(rank+1) + ".med"
+            meshname = "Mesh_2_" + str(rank+1)
+            mesh=MEDLoader.ReadUMeshFromFile(filename,meshname,0)
+            paramesh=ParaMESH(mesh,source_group,"source mesh")
+            comptopo=ComponentTopology(6)
+            parafield=ParaFIELD(ON_CELLS,NO_TIME,paramesh,comptopo)
+            parafield.getField().setNature(ConservativeVolumic)
+            nb_local=mesh.getNumberOfCells()
+            global_numbering=paramesh.getGlobalNumberingCell2()
+            value = []
+            for ielem in range(nb_local):
+                for icomp in range(6):
+                    value.append(global_numbering[ielem]*6.0+icomp);
+                    pass
+                pass
+            parafield.getField().setValues(value)
+            icocofield = ICoCoMEDField(mesh,parafield.getField())
+            dec.setMethod("P0")  
+            dec.attachLocalField(parafield)      
+            dec.synchronize()
+            dec.sendData()
+            pass
+
+        if target_group.containsMyRank():
+            meshname2 = "Mesh_2"
+            mesh=MEDLoader.ReadUMeshFromFile(filename_2, meshname2,0)
+            paramesh=ParaMESH(mesh, self_group, "target mesh")
+            comptopo=ComponentTopology(6,target_group)
+            parafield=ParaFIELD(ON_CELLS,NO_TIME,paramesh, comptopo)
+            parafield.getField().setNature(ConservativeVolumic)
+            nb_local=mesh.getNumberOfCells()
+            value = [0.0]*(nb_local*comptopo.nbLocalComponents())
+            parafield.getField().setValues(value)
+            icocofield = ICoCoMEDField(mesh,parafield.getField())
+            dec.setMethod("P0")
+            dec.attachLocalField(parafield)
+            dec.synchronize()
+            dec.recvData()
+            recv_value = parafield.getField().getArray().getValues()
+            for i in range(nb_local):
+                first=comptopo.firstLocalComponent()
+                for icomp in range(comptopo.nbLocalComponents()):
+                    self.failUnless(math.fabs(recv_value[i*comptopo.nbLocalComponents()+icomp]-
+                                              (float)(i*6+icomp+first))<1e-12)
+                    pass
+                pass
+            pass
+        comptopo=0
+        interface = 0
+        mesh       =0
+        support    =0
+        paramesh   =0
+        parafield  =0
+        icocofield =0
+        dec=0
+        self_group =0
+        target_group = 0
+        source_group = 0
+        MPI_Barrier(MPI_COMM_WORLD)
+        MPI_Finalize()
+        print "End of test StructuredCoincidentDEC"
+        pass
+
+    
+unittest.main()
index c18ba6795b8a3870cd663bb9ae6dea0b31d30c4a..5454f1357a9aea34bbe2fe7b56369a49c2f4e9f5 100755 (executable)
@@ -189,21 +189,9 @@ class RenumberingTest(unittest.TestCase):
         pass
 
     def setUp(self):
-        srcdir   = os.getenv("srcdir")
-        med_root = os.getenv("MED_ROOT_DIR")
-        if srcdir:
-            # make test is being performed
-            self.dir_renumber="./renumber"
-            self.dir_mesh = os.path.join( srcdir, "../../resources")
-        elif med_root:
-            # hope renumber has been already installed
-            self.dir_renumber=os.path.join( med_root, "bin/salome/renumber")
-            self.dir_mesh = os.path.join( med_root, "share/salome/resources/med")
-        else:
-            # initial version
-            self.dir_renumber="../../../MED_INSTALL/bin/salome/renumber"
-            self.dir_mesh="../../resources"
-            pass
+        med_root_dir=os.getenv("MEDTOOL_ROOT_DIR")
+        self.dir_renumber=os.path.join(med_root_dir, "bin/renumber")
+        self.dir_mesh=os.path.join(med_root_dir, "share","resources","med")
         pass
     pass
 
index ca5363830a5bb104edd1304002b8650853210adb..afd126fcbe6f614c7d8e84f212333e8d0d236cd4 100644 (file)
@@ -66,9 +66,9 @@ INSTALL(FILES ${CMAKE_CURRENT_BINARY_DIR}/MEDRenumber.py DESTINATION ${MEDTOOL_I
 INSTALL(FILES MEDRenumber.i MEDRenumberCommon.i DESTINATION ${MEDTOOL_INSTALL_HEADERS})
 INSTALL(FILES MEDRenumberTest.py DESTINATION ${MEDTOOL_INSTALL_SCRIPT_PYTHON})
 
-#SALOME_GENERATE_TESTS_ENVIRONMENT(tests_env)
 ADD_TEST(MEDRenumberTest ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/MEDRenumberTest.py)
-SET_TESTS_PROPERTIES(MEDRenumberTest PROPERTIES ENVIRONMENT "${tests_env}")
+SET(MEDRenumberTest_PYTHONPATH "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}:${CMAKE_CURRENT_BINARY_DIR}/../MEDCoupling_Swig")
+SET_TESTS_PROPERTIES(MEDRenumberTest PROPERTIES ENVIRONMENT "${MEDRenumberTest_PYTHONPATH}")
 
 # Application tests