Salome HOME
[EDF28531] : non regression test linked to commit 5fb5c225
authorAnthony Geay <anthony.geay@edf.fr>
Tue, 12 Sep 2023 07:56:32 +0000 (09:56 +0200)
committerAnthony Geay <anthony.geay@edf.fr>
Tue, 12 Sep 2023 07:56:32 +0000 (09:56 +0200)
src/yacsloader_swig/Test/CMakeLists.txt
src/yacsloader_swig/Test/CTestTestfileInstall.cmake
src/yacsloader_swig/Test/testYacsPerfTest0.py [new file with mode: 0644]

index 7cfb889c3572ff3ae9a1f9c7fbf80c8166e57585..e5b2b61612fff2c87e2c89a505ded915e75c5d69 100644 (file)
@@ -42,6 +42,7 @@ IF(NOT WIN32)
     testSave.py
     testSaveLoadRun.py
     testYacsProxy.py
+    testYacsPerfTest0.py
     testYacsLoaderSwig.py
     optim_plugin.py
     testValidationChecks.py
index a80c16935ed021b568a661652d7300ab3cd28099..a76bfb846621df63c708ca3901c2a0d43b917188 100644 (file)
@@ -79,6 +79,12 @@ IF(NOT WIN32)
   SET_TESTS_PROPERTIES(${TEST_NAME} PROPERTIES
                                     LABELS "${COMPONENT_NAME}"
                       )
+                      
+  SET(TEST_NAME ${COMPONENT_NAME}_PerfTest0_swig)
+  ADD_TEST(${TEST_NAME} testYacsPerfTest0.py)
+  SET_TESTS_PROPERTIES(${TEST_NAME} PROPERTIES
+                                    LABELS "${COMPONENT_NAME}"
+                      )
 
   SET(TEST_NAME ${COMPONENT_NAME}_ValidationChecks_swig)
   ADD_TEST(${TEST_NAME} testValidationChecks.py)
diff --git a/src/yacsloader_swig/Test/testYacsPerfTest0.py b/src/yacsloader_swig/Test/testYacsPerfTest0.py
new file mode 100644 (file)
index 0000000..296e436
--- /dev/null
@@ -0,0 +1,112 @@
+#!/usr/bin/env python3
+# Copyright (C) 2023  CEA, EDF
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+import unittest
+import tempfile
+import os
+
+import pilot
+import SALOMERuntime
+import loader
+import salome
+
+class TestYacsPerf0(unittest.TestCase):
+    def test0(self):
+        """
+        [EDF28531] : Perf to check when number of threads in charge scheduler side is lower than number of // tasks
+        """
+        NB_OF_PARALLEL_NODES = 100
+        NB_OF_PARALLEL_THREADS = 10
+        salome.salome_init()
+        SALOMERuntime.RuntimeSALOME.setRuntime()
+        r=SALOMERuntime.getSALOMERuntime()
+        p=r.createProc("PerfTest0")
+        p.setProperty("executor","workloadmanager") # important line here to avoid that gg container treat several tasks in //.
+        ti=p.createType("int","int")
+        td=p.createType("double","double")
+        tdd=p.createSequenceTc("seqdouble","seqdouble",td)
+        tddd=p.createSequenceTc("seqseqdouble","seqseqdouble",tdd)
+        tdddd=p.createSequenceTc("seqseqseqdouble","seqseqseqdouble",tddd)
+        pyobj=p.createInterfaceTc("python:obj:1.0","pyobj",[])
+        seqpyobj=p.createSequenceTc("list[pyobj]","list[pyobj]",pyobj)
+        cont=p.createContainer("gg","Salome")
+        cont.setProperty("nb_parallel_procs","1")
+        cont.setAttachOnCloningStatus(True)
+        cont.setProperty("attached_on_cloning","1")
+        cont.setProperty("type","multi")
+        cont.setProperty("container_name","gg")
+        ######## Level 0
+        startNode = r.createScriptNode("Salome","start")
+        startNode.setExecutionMode("local")
+        startNode.setScript("""o2 = list(range({}))""".format(NB_OF_PARALLEL_NODES))
+        po2 = startNode.edAddOutputPort("o2",seqpyobj)
+        p.edAddChild(startNode)
+        #
+        fe = r.createForEachLoopDyn("fe",pyobj)
+        p.edAddChild(fe)
+        p.edAddCFLink(startNode,fe)
+        p.edAddLink(po2,fe.edGetSeqOfSamplesPort())
+        internalNode = r.createScriptNode("Salome","internalNode")
+        internalNode.setExecutionMode("remote")
+        internalNode.setContainer(cont)
+        internalNode.setScript("""
+ret = 3*ppp
+""")
+        fe.edSetNode(internalNode)
+        ix = internalNode.edAddInputPort("ppp",pyobj)
+        oret = internalNode.edAddOutputPort("ret",pyobj)
+        p.edAddLink( fe.edGetSamplePort(), ix )
+        #
+        endNode = r.createScriptNode("Salome","end")
+        endNode.setExecutionMode("local")
+        endNode.setContainer(None)
+        ozeret = endNode.edAddOutputPort("ozeret",seqpyobj)
+        izeret = endNode.edAddInputPort("izeret",seqpyobj)
+        endNode.setScript("""ozeret = izeret""")
+        p.edAddChild(endNode)
+        p.edAddCFLink(fe,endNode)
+        p.edAddLink( oret, izeret )
+        if False:
+            fname = "PerfTest0.xml"
+            p.saveSchema(fname)
+            
+            import loader
+            l=loader.YACSLoader()
+            p=l.load(fname)
+        print("Start computation")
+        import datetime
+        st = datetime.datetime.now()
+        ex=pilot.ExecutorSwig()
+        ex.setMaxNbOfThreads(NB_OF_PARALLEL_THREADS)
+        ex.RunW(p,0)
+        salome.cm.ShutdownContainers()
+        print("End of computation {}".format( str(datetime.datetime.now()-st) ) )
+        if p.getChildByName("end").getOutputPort("ozeret").getPyObj() != [3*i for i in range(NB_OF_PARALLEL_NODES)]:
+            raise RuntimeError("Ooops")
+
+if __name__ == '__main__':
+  with tempfile.TemporaryDirectory() as dir_test:
+    file_test = os.path.join(dir_test,"UnitTestsResult")
+    with open(file_test, 'a') as f:
+        f.write("  --- TEST src/yacsloader: testYacsPerfTest0.py\n")
+        suite = unittest.makeSuite(TestYacsPerf0)
+        result=unittest.TextTestRunner(f, descriptions=1, verbosity=1).run(suite)
+        if not result.wasSuccessful():
+           raise RuntimeError("Test failed !")