5 Branche : BR_MEDPARA : MED_SRC
6 setenv CVSROOT :pserver:rahuel@cvs.opencascade.com:/home/server/cvs/MED
13 Sources : /home/rahuel/MEDPARAsynch
14 Construction sur awa : /data/tmpawa/rahuel/MEDPARAsynch/MED_Build
15 Intallation sur awa : /data/tmpawa/rahuel/MEDPARAsynch/MED_Install
21 source /home/rahuel/MEDPARAsynch/env_products.csh
24 /data/tmpawa/vb144235/valgrind-3.2.1/valgrind_install/bin
25 /data/tmpawa/adam/Salome3/V3_2_7_AWA_OCC/Python-2.4.1
26 /data/tmpawa/vb144235/med_231_install
27 /data/tmpawa2/adam/omniORB/omniORB-4.0.7
28 /data/tmpawa/vb144235/lam_install
29 /data/tmpawa/vb144235/cppunit_install
30 /data/tmpawa/vb144235/fvm_install_lam
31 /data/tmpawa/vb144235/bft_install
32 /home/rahuel/MEDPARAsynch/ICoCo
33 /data/tmpawa2/adam/Salome3/V3_2_0_maintainance/KERNEL/KERNEL_INSTALL
36 Build_Configure et Configure :
37 ==============================
39 MEDMEM est en "stand-alone" sans KERNEL ni IHM.
42 ${MED_SRC_DIR}/build_configure --without-kernel --without-ihm
43 rm ${MED_SRC_DIR}/adm_local_without_kernel/adm_local_without_kernel
44 rm -fR $MED_BUILD_DIR/adm_local_without_kernel/adm_local_without_kernel
47 ${MED_SRC_DIR}/configure --without-kernel --without-ihm --with-lam=/data/tmpawa/vb144235/lam_install --prefix=${MED_ROOT_DIR} --with-med2=/data/tmpawa/vb144235/med_231_install --with-python=/data/tmpawa/adam/Salome3/V3_2_7_AWA_OCC/Python-2.4.1 --with-cppunit=/data/tmpawa/vb144235/cppunit_install --with-cppunit_inc=/data/tmpawa/vb144235/cppunit_install/include --with-fvm=/data/tmpawa/vb144235/fvm_install_lam
48 rm ${MED_SRC_DIR}/adm_local_without_kernel/adm_local_without_kernel
49 rm -fR $MED_BUILD_DIR/adm_local_without_kernel/adm_local_without_kernel
59 Problemes de construction :
60 ===========================
62 Liste des fichiers modifies et differents de la base CVS pour pouvoir
63 effectuer la construction et l'installation :
65 M MED_SRC/configure.in.base :
66 -----------------------------
69 CHECK_OPENMPI mis en commentaire (redefinit le resultat de CHECK_LAM)
70 CHECK_CPPUNIT a ete ajoute
72 M MED_SRC/adm_local_without_kernel/unix/config_files/check_lam.m4 :
73 -------------------------------------------------------------------
74 Debugs pour trouver la bonne configuration de LAM
76 M MED_SRC/src/INTERP_KERNEL/Makefile.in :
77 -----------------------------------------
78 Problemes de construction des tests
80 M MED_SRC/src/ParaMEDMEM/Makefile.in :
81 --------------------------------------
82 . Construction de libParaMEDMEM.a pour gcov (link statique)
83 . Ajout d'options de compilations : -fprofile-arcs -ftest-coverage -pg (gcov) ==>
84 instrumentation du code
86 C MED_SRC/src/ParaMEDMEM/Test/Makefile.in :
87 -------------------------------------------
88 . Construction de libParaMEDMEMTest.a pour gcov (link statique)
89 . Ajout d'options de compilations : -fprofile-arcs -ftest-coverage -pg (gcov) ==>
90 instrumentation du code
91 . Prise en compte de $(MED_WITH_KERNEL) avec :
92 ifeq ($(MED_WITH_KERNEL),yes)
93 LDFLAGSFORBIN += $(LDFLAGS) -lm $(MED3_LIBS) $(HDF5_LIBS) $(MPI_LIBS) \
94 -L$(CMAKE_BINARY_DIR)/lib@LIB_LOCATION_SUFFIX@/salome -lmed_V2_1 -lparamed -lmedmem \
95 ${KERNEL_LDFLAGS} -lSALOMELocalTrace -lSALOMEBasics \
99 ifeq ($(MED_WITH_KERNEL),no)
100 LDFLAGSFORBIN += $(LDFLAGS) -lm $(MED3_LIBS) $(HDF5_LIBS) $(MPI_LIBS) \
101 -L$(CMAKE_BINARY_DIR)/lib@LIB_LOCATION_SUFFIX@/salome -lmed_V2_1 -lparamed -linterpkernel -lmedmem \
102 ${KERNEL_LDFLAGS} ${FVM_LIBS} ${CPPUNIT_LIBS} -L/data/tmpawa/vb144235/bft_install/lib -lbft\
106 M MED_SRC/src/ParaMEDMEM/Test/ParaMEDMEMTest.hxx :
107 --------------------------------------------------
108 Mise en commentaire du test manquant :
109 CPPUNIT_TEST(testNonCoincidentDEC_3D);
111 U MED_SRC/src/ParaMEDMEM/Test/ParaMEDMEMTest_NonCoincidentDEC.cxx :
112 -------------------------------------------------------------------
115 Pour forcer la reconstruction des tests :
116 =========================================
120 rm src/ParaMEDMEM/*.la
121 rm src/ParaMEDMEM/test_*
122 rm src/ParaMEDMEM/.libs/*
123 rm src/ParaMEDMEM/Test/*o
124 rm src/ParaMEDMEM/Test/*.la
125 rm src/ParaMEDMEM/Test/.libs/*
128 cd $MED_BUILD_DIR/src/ParaMEDMEM/Test
137 jr[1175]> mpirun -np 5 -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestParaMEDMEM
138 21508 TestParaMEDMEM running on n0 (o)
139 21509 TestParaMEDMEM running on n0 (o)
140 21510 TestParaMEDMEM running on n0 (o)
141 21511 TestParaMEDMEM running on n0 (o)
142 21512 TestParaMEDMEM running on n0 (o)
143 - Trace /home/rahuel/MEDPARAsynch/MED_SRC/src/MEDMEM/MEDMEM_Init.cxx [54] : Med Memory Initialization with $SALOME_trace = local
144 - Trace /home/rahuel/MEDPARAsynch/MED_SRC/src/MEDMEM/MEDMEM_Init.cxx [54] : Med Memory Initialization with $SALOME_trace = local
145 - Trace /home/rahuel/MEDPARAsynch/MED_SRC/src/MEDMEM/MEDMEM_Init.cxx [54] : Med Memory Initialization with $SALOME_trace = local
146 - Trace /home/rahuel/MEDPARAsynch/MED_SRC/src/MEDMEM/MEDMEM_Init.cxx [54] : Med Memory Initialization with $SALOME_trace = local
147 - Trace /home/rahuel/MEDPARAsynch/MED_SRC/src/MEDMEM/MEDMEM_Init.cxx [54] : Med Memory Initialization with $SALOME_trace = local
148 -----------------------------------------------------------------------------
149 The selected RPI failed to initialize during MPI_INIT. This is a
150 fatal error; I must abort.
152 This occurred on host awa (n0).
153 The PID of failed process was 21508 (MPI_COMM_WORLD rank: 0)
154 -----------------------------------------------------------------------------
155 -----------------------------------------------------------------------------
156 One of the processes started by mpirun has exited with a nonzero exit
157 code. This typically indicates that the process finished in error.
158 If your process did not finish in error, be sure to include a "return
159 0" or "exit(0)" in your C code before exiting the application.
161 PID 21510 failed on node n0 (127.0.0.1) with exit status 1.
162 -----------------------------------------------------------------------------
166 Contournement du probleme lam :
167 ===============================
169 mpirun -np 5 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestParaMEDMEM
174 . Les tests avec valgrind indiquent des erreurs dans MPI_Init et
175 MPI_Finalize ainsi que dans des programmes appeles "below main".
176 . De plus on obtient un "Segmentation Violation" accompagne d'un
177 fichier "vgcore.*" (plantage de valgrind)
178 . Mais on a " All heap blocks were freed -- no leaks are possible."
179 et on n'a aucune erreur de malloc/free new/delete dans ParaMEDMEM et
182 . Cependant si on execute les tests sans valgrind, il n'y a pas
183 d'erreur ni de fichier "core.*".
186 Tests avec CPPUNIT de $MED_BUILD_DIR/src/ParaMEDMEM/Test :
187 ==========================================================
189 L'appel a MPI_Init n'est fait qu'une seule fois.
190 Il est suivi par l'execution de toute la suite des tests regroupes
191 dans les trois executables TestParaMEDMEM, TestMPIAccessDEC et
193 On a enfin un seul appel a MPI_Finalize.
195 Si un des tests d'une suite de tests comporte une anomalie cela
196 peut avoir des implications sur l'execution des tests suivants.
198 Lors de la mise au point de la suite de tests de TestMPIAccessDEC
199 cela etait le cas : il restait des messages postes dans lam mais
200 non lus. Le test suivant s'executait de plus en plus lentement
201 sans donner d'erreur (probleme difficile a identifier).
204 Lancement des tests de TestParaMEDMEM avec CPPUNIT et TotalView (option -tv) :
205 ==============================================================================
207 mpirun -np 5 -ssi rpi tcp C -tv -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestParaMEDMEM
209 Il arrive qu'on ne puisse pas utiliser totalview par manque de
214 Lancement des tests de TestParaMEDMEM avec CPPUNIT et Valgrind avec "memory leaks" :
215 ====================================================================================
217 mpirun -np 5 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full TestParaMEDMEM
220 Lancement des tests fonctionnels de MPI_AccessDEC avec CPPUNIT :
221 ================================================================
223 mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full TestMPIAccessDEC
226 Lancement des tests unitaires de MPI_Access avec CPPUNIT :
227 ==========================================================
229 mpirun -np 3 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full TestMPIAccess
232 TestMPIAccess/TestMPIAccessDEC/TestParaMEDMEM et gcov :
233 =======================================================
235 Les resultats sont dans les repertoires suivants de $MED_BUILD_DIR/src/ParaMEDMEM/Test :
239 TestMPIAccessDEC-gcov/
242 Je n'y ai pas trouve d'anomalies.
244 compilation : -fprofile-arcs -ftest-coverage
247 $MED_BUILD_DIR/src/ParaMEDMEM/makefile.in : LIB=libparamedar.a \
248 ------------------------------------------- libparamed.la
250 $MED_BUILD_DIR/src/ParaMEDMEM/Test/makefile.in : LIB = libParaMEDMEMTestar.a \
251 ------------------------------------------------ libParaMEDMEMTest.la
253 links statiques manuels :
254 -------------------------
256 g++ -g -D_DEBUG_ -Wno-deprecated -Wparentheses -Wreturn-type -Wunused -DPCLINUX -I/data/tmpawa/vb144235/cppunit_install/include -I/data/tmpawa/vb144235/lam_install/include -ftemplate-depth-42 -I/home/rahuel/MEDPARAsynch/MED_SRC/src/ParaMEDMEM -fprofile-arcs -ftest-coverage -o TestMPIAccess TestMPIAccess.lo -L../../../lib64/salome -lstdc++ -L../../../lib64/salome -lstdc++ -lm -L/data/tmpawa/vb144235/med_231_install/lib -lmed -lhdf5 -lhdf5 -L/data/tmpawa/vb144235/lam_install/lib -llam -lmpi -L../../../lib64/salome -lmed_V2_1 --whole-archive -linterpkernel -lmedmem -L/data/tmpawa/vb144235/fvm_install_lam/lib -lfvm -L/data/tmpawa/vb144235/cppunit_install/lib -lcppunit -L/data/tmpawa/vb144235/bft_install/lib -lbft -lutil -lm -lrt -ldl -Bstatic -L./ -lParaMEDMEMTestar -L../ -lparamedar -L./ -lParaMEDMEMTestar
258 g++ -g -D_DEBUG_ -Wno-deprecated -Wparentheses -Wreturn-type -Wunused -DPCLINUX -I/data/tmpawa/vb144235/cppunit_install/include -I/data/tmpawa/vb144235/lam_install/include -ftemplate-depth-42 -I/home/rahuel/MEDPARAsynch/MED_SRC/src/ParaMEDMEM -fprofile-arcs -ftest-coverage -o TestMPIAccessDEC TestMPIAccessDEC.lo -L../../../lib64/salome -lstdc++ -L../../../lib64/salome -lstdc++ -lm -L/data/tmpawa/vb144235/med_231_install/lib -lmed -lhdf5 -lhdf5 -L/data/tmpawa/vb144235/lam_install/lib -llam -lmpi -L../../../lib64/salome -lmed_V2_1 --whole-archive -linterpkernel -lmedmem -L/data/tmpawa/vb144235/fvm_install_lam/lib -lfvm -L/data/tmpawa/vb144235/cppunit_install/lib -lcppunit -L/data/tmpawa/vb144235/bft_install/lib -lbft -lutil -lm -lrt -ldl -Bstatic -L./ -lParaMEDMEMTestar -L../ -lparamedar -L./ -lParaMEDMEMTestar
260 g++ -g -D_DEBUG_ -Wno-deprecated -Wparentheses -Wreturn-type -Wunused -DPCLINUX -I/data/tmpawa/vb144235/cppunit_install/include -I/data/tmpawa/vb144235/lam_install/include -ftemplate-depth-42 -I/home/rahuel/MEDPARAsynch/MED_SRC/src/ParaMEDMEM -fprofile-arcs -ftest-coverage -o TestParaMEDMEM TestParaMEDMEM.lo -L../../../lib64/salome -lstdc++ -L../../../lib64/salome -lstdc++ -lm -L/data/tmpawa/vb144235/med_231_install/lib -lmed -lhdf5 -lhdf5 -L/data/tmpawa/vb144235/lam_install/lib -llam -lmpi -L../../../lib64/salome -lmed_V2_1 --whole-archive -linterpkernel -lmedmem -L/data/tmpawa/vb144235/fvm_install_lam/lib -lfvm -L/data/tmpawa/vb144235/cppunit_install/lib -lcppunit -L/data/tmpawa/vb144235/bft_install/lib -lbft -lutil -lm -lrt -ldl -Bstatic -L./ -lParaMEDMEMTestar -L../ -lparamedar -L./ -lParaMEDMEMTestar
262 Ne pas oublier le make install apres ...
267 Pour pouvoir traiter les .cxx de ${MED_BUILD_DIR}/src/ParaMEDMEM et de
268 ${MED_BUILD_DIR}/src/ParaMEDMEM/Test, on execute deux fois gcov.
270 cd ${MED_BUILD_DIR}/src/ParaMEDMEM/Test
272 mpirun -np 3 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestMPIAccess
274 gcov TestMPIAccess.cxx test_MPI_Access_Send_Recv.cxx \
275 test_MPI_Access_Cyclic_Send_Recv.cxx \
276 test_MPI_Access_SendRecv.cxx \
277 test_MPI_Access_ISend_IRecv.cxx \
278 test_MPI_Access_Cyclic_ISend_IRecv.cxx \
279 test_MPI_Access_ISendRecv.cxx \
280 test_MPI_Access_Probe.cxx \
281 test_MPI_Access_IProbe.cxx \
282 test_MPI_Access_Cancel.cxx \
283 test_MPI_Access_Send_Recv_Length.cxx \
284 test_MPI_Access_ISend_IRecv_Length.cxx \
285 test_MPI_Access_ISend_IRecv_Length_1.cxx \
286 test_MPI_Access_Time.cxx \
287 test_MPI_Access_Time_0.cxx \
288 test_MPI_Access_ISend_IRecv_BottleNeck.cxx \
290 gcov -o ../ TestMPIAccess.cxx test_MPI_Access_Send_Recv.cxx \
291 test_MPI_Access_Cyclic_Send_Recv.cxx \
292 test_MPI_Access_SendRecv.cxx \
293 test_MPI_Access_ISend_IRecv.cxx \
294 test_MPI_Access_Cyclic_ISend_IRecv.cxx \
295 test_MPI_Access_ISendRecv.cxx \
296 test_MPI_Access_Probe.cxx \
297 test_MPI_Access_IProbe.cxx \
298 test_MPI_Access_Cancel.cxx \
299 test_MPI_Access_Send_Recv_Length.cxx \
300 test_MPI_Access_ISend_IRecv_Length.cxx \
301 test_MPI_Access_ISend_IRecv_Length_1.cxx \
302 test_MPI_Access_Time.cxx \
303 test_MPI_Access_Time_0.cxx \
304 test_MPI_Access_ISend_IRecv_BottleNeck.cxx \
308 cd ${MED_BUILD_DIR}/src/ParaMEDMEM/Test
309 mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestMPIAccessDEC
311 gcov TestMPIAccessDEC.cxx test_AllToAllDEC.cxx \
312 test_AllToAllvDEC.cxx \
313 test_AllToAllTimeDEC.cxx \
314 test_AllToAllvTimeDEC.cxx \
315 test_AllToAllvTimeDoubleDEC.cxx \
316 ../TimeInterpolator.cxx \
317 ../LinearTimeInterpolator.cxx \
320 gcov -o ../ TestMPIAccessDEC.cxx test_AllToAllDEC.cxx \
321 test_AllToAllvDEC.cxx \
322 test_AllToAllTimeDEC.cxx \
323 test_AllToAllvTimeDEC.cxx \
324 test_AllToAllvTimeDoubleDEC.cxx \
325 ../TimeInterpolator.cxx \
326 ../LinearTimeInterpolator.cxx \
330 cd ${MED_BUILD_DIR}/src/ParaMEDMEM/Test
331 mpirun -np 5 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} TestParaMEDMEM
333 gcov TestParaMEDMEM.cxx ParaMEDMEMTest.cxx \
334 ParaMEDMEMTest_MPIProcessorGroup.cxx \
335 ParaMEDMEMTest_BlockTopology.cxx \
336 ParaMEDMEMTest_InterpKernelDEC.cxx \
337 ../BlockTopology.cxx \
338 ../ComponentTopology.cxx \
340 ../ElementLocator.cxx \
341 ../InterpolationMatrix.cxx \
342 ../InterpKernelDEC.cxx \
343 ../MPIProcessorGroup.cxx \
348 ../ProcessorGroup.cxx \
349 ../TimeInterpolator.cxx \
350 ../LinearTimeInterpolator.cxx \
354 gcov -o ../ TestParaMEDMEM.cxx ParaMEDMEMTest.cxx \
355 ParaMEDMEMTest_MPIProcessorGroup.cxx \
356 ParaMEDMEMTest_BlockTopology.cxx \
357 ParaMEDMEMTest_InterpKernelDEC.cxx \
358 ../BlockTopology.cxx \
359 ../ComponentTopology.cxx \
361 ../ElementLocator.cxx \
362 ../InterpolationMatrix.cxx \
363 ../InterpKernelDEC.cxx \
364 ../MPIProcessorGroup.cxx \
369 ../ProcessorGroup.cxx \
370 ../TimeInterpolator.cxx \
371 ../LinearTimeInterpolator.cxx \
379 Lancement des tests unitaires sans CPPUNIT :
380 ============================================
382 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Send_Recv
384 mpirun -np 3 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Cyclic_Send_Recv
386 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_SendRecv
388 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_ISend_IRecv
390 mpirun -np 3 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Cyclic_ISend_IRecv
392 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_ISendRecv
394 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Probe
396 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_IProbe
398 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Cancel
400 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Send_Recv_Length
402 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_ISend_IRecv_Length
404 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_ISend_IRecv_Length_1
406 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Time
408 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_Time_0 2 1
412 mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllDEC 0
414 mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllDEC 1
418 mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvDEC 0
420 mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvDEC 1
424 mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllTimeDEC 0
426 mpirun -np 4 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllTimeDEC 1
430 mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvTimeDEC 0 1
432 mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvTimeDEC 0
434 mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvTimeDEC 1
438 #AllToAllvTimeDoubleDEC
439 mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvTimeDoubleDEC 0
441 mpirun -np 11 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_AllToAllvTimeDoubleDEC 1
445 mpirun -np 2 -ssi rpi tcp C -v -x PATH=${PATH},LD_LIBRARY_PATH=${LD_LIBRARY_PATH} valgrind --leak-check=full test_MPI_Access_ISend_IRecv_BottleNeck