From 82003807192e97411fa5d3ad24f9f6c88d1a551b Mon Sep 17 00:00:00 2001 From: Ovidiu Mircescu Date: Wed, 3 Jun 2020 23:52:51 +0200 Subject: [PATCH] Test for workload manager. --- src/runtime/RuntimeSALOME.cxx | 37 +++-- src/yacsloader/samples/wlm_2foreach.xml | 132 ++++++++++++++++++ src/yacsloader_swig/Test/CMakeLists.txt | 1 + .../Test/CTestTestfileInstall.cmake | 10 +- .../Test/testPynodeWithCache.py | 129 +++++++++++++++++ .../Test/testWorkloadManager.py | 60 +++----- 6 files changed, 313 insertions(+), 56 deletions(-) create mode 100644 src/yacsloader/samples/wlm_2foreach.xml create mode 100755 src/yacsloader_swig/Test/testPynodeWithCache.py diff --git a/src/runtime/RuntimeSALOME.cxx b/src/runtime/RuntimeSALOME.cxx index 5bcca56fc..b23ac0b9a 100644 --- a/src/runtime/RuntimeSALOME.cxx +++ b/src/runtime/RuntimeSALOME.cxx @@ -456,14 +456,14 @@ std::vector< std::pair > RuntimeSALOME::getCatalogOfComputeNode } catch(SALOME_Exception& e) { - throw Exception("SalomeContainerToolsSpreadOverTheResDecorator::getParameters : Unable to contact the SALOME Naming Service"); + throw Exception("RuntimeSALOME::getCatalogOfComputeNodes : Unable to contact the SALOME Naming Service"); } CORBA::Object_var obj(namingService.Resolve(SALOME_ResourcesManager::_ResourcesManagerNameInNS)); if(CORBA::is_nil(obj)) - throw Exception("SalomeContainerToolsSpreadOverTheResDecorator::getParameters : Unable to access to the resource manager !"); + throw Exception("RuntimeSALOME::getCatalogOfComputeNodes : Unable to access to the resource manager !"); Engines::ResourcesManager_var resManager(Engines::ResourcesManager::_narrow(obj)); if(CORBA::is_nil(resManager)) - throw Exception("SalomeContainerToolsSpreadOverTheResDecorator::getParameters : Internal error ! The entry attached to the res manager in NS does not have right type !"); + throw Exception("RuntimeSALOME::getCatalogOfComputeNodes : Internal error ! The entry attached to the res manager in NS does not have right type !"); std::vector< std::pair > ret; Engines::ResourceParameters params; params.name = ""; @@ -478,18 +478,27 @@ std::vector< std::pair > RuntimeSALOME::getCatalogOfComputeNode params.can_launch_batch_jobs = false; params.can_run_containers = true; params.componentList.length(0); - Engines::ResourceList_var resourceList; - resourceList = resManager->GetFittingResources(params); - ret.reserve(resourceList->length()); - for(int i = 0; ilength(); i++) + try + { + Engines::ResourceList_var resourceList; + resourceList = resManager->GetFittingResources(params); + ret.reserve(resourceList->length()); + for(int i = 0; ilength(); i++) + { + const char* resource_name = resourceList[i]; + std::string std_resource_name = resource_name; + Engines::ResourceDefinition_var resource_definition + = resManager->GetResourceDefinition(resource_name); + int nb_cores = resource_definition->nb_node * + resource_definition->nb_proc_per_node; + ret.push_back(std::pair(resource_name, nb_cores)); + } + } + catch(SALOME::SALOME_Exception& e) { - const char* resource_name = resourceList[i]; - std::string std_resource_name = resource_name; - Engines::ResourceDefinition_var resource_definition - = resManager->GetResourceDefinition(resource_name); - int nb_cores = resource_definition->nb_node * - resource_definition->nb_proc_per_node; - ret.push_back(std::pair(resource_name, nb_cores)); + std::string message; + message=e.details.text.in(); + throw Exception(message); } return ret; diff --git a/src/yacsloader/samples/wlm_2foreach.xml b/src/yacsloader/samples/wlm_2foreach.xml new file mode 100644 index 000000000..02b1b2078 --- /dev/null +++ b/src/yacsloader/samples/wlm_2foreach.xml @@ -0,0 +1,132 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Begin ForEach1 + Begin ForEach2 + Begin End + ForEach1 End + ForEach2 End + + Begin t0 + End t0 + + + Begin vals + ForEach2 SmplsCollection + + + Begin vals + ForEach1 SmplsCollection + + + Begin nbbranches + ForEach1 nbBranches + + + Begin nbbranches + ForEach2 nbBranches + + + ForEach1 evalSamples + ForEach1.PyScript6 v + + + ForEach2 evalSamples + ForEach2.PyScript7 v + + + ForEach2nbBranches + 1 + + + ForEach1nbBranches + 1 + + + + + + + + + diff --git a/src/yacsloader_swig/Test/CMakeLists.txt b/src/yacsloader_swig/Test/CMakeLists.txt index e33ec979c..aee19124c 100644 --- a/src/yacsloader_swig/Test/CMakeLists.txt +++ b/src/yacsloader_swig/Test/CMakeLists.txt @@ -47,6 +47,7 @@ IF(NOT WIN32) testExecForEachGeoMesh.py async_plugin.py testWorkloadManager.py + testPynodeWithCache.py ) INSTALL(PROGRAMS ${LOCAL_TEST_FILES} DESTINATION ${LOCAL_TEST_DIR}) diff --git a/src/yacsloader_swig/Test/CTestTestfileInstall.cmake b/src/yacsloader_swig/Test/CTestTestfileInstall.cmake index c8117f763..d1d5809f5 100644 --- a/src/yacsloader_swig/Test/CTestTestfileInstall.cmake +++ b/src/yacsloader_swig/Test/CTestTestfileInstall.cmake @@ -28,12 +28,18 @@ IF(NOT WIN32) ADD_TEST(${TEST_NAME} ${SALOME_TEST_DRIVER} ${TIMEOUT} StdAloneYacsLoaderTest1.py) SET_TESTS_PROPERTIES(${TEST_NAME} PROPERTIES LABELS "${COMPONENT_NAME}" - ) + ) + + SET(TEST_NAME ${COMPONENT_NAME}_PyNodeWithCache_swig) + ADD_TEST(${TEST_NAME} ${SALOME_TEST_DRIVER} ${TIMEOUT} testPynodeWithCache.py) + SET_TESTS_PROPERTIES(${TEST_NAME} PROPERTIES + LABELS "${COMPONENT_NAME}" + ) SET(TEST_NAME ${COMPONENT_NAME}_WorkloadManager_swig) ADD_TEST(${TEST_NAME} ${SALOME_TEST_DRIVER} ${TIMEOUT} testWorkloadManager.py) SET_TESTS_PROPERTIES(${TEST_NAME} PROPERTIES LABELS "${COMPONENT_NAME}" - ) + ) ENDIF() diff --git a/src/yacsloader_swig/Test/testPynodeWithCache.py b/src/yacsloader_swig/Test/testPynodeWithCache.py new file mode 100755 index 000000000..0dd3f817c --- /dev/null +++ b/src/yacsloader_swig/Test/testPynodeWithCache.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python3 +# Copyright (C) 2006-2020 CEA/DEN, EDF R&D +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com +# + +import sys +import pilot +import SALOMERuntime +import loader +import unittest +import tempfile +import os + +dir_test = tempfile.mkdtemp(suffix=".yacstest") + +class TestEdit(unittest.TestCase): + + def setUp(self): + SALOMERuntime.RuntimeSALOME_setRuntime() + self.r = pilot.getRuntime() + self.l = loader.YACSLoader() + self.e = pilot.ExecutorSwig() + pass + + def test1(self): + """ Test the conservation of the python context between two nodes sharing + the same container. + Schema: n1 -> n2 + """ + runtime=self.r + executor=self.e + yacsloader=self.l + proc=runtime.createProc("MySchema") + ti=proc.createType("int","int") + cont=proc.createContainer("MyContainer","Salome") + # type "multi" : the workload manager chooses the resource + # type "mono" : the resource is chosen by kernel, using the old rules. + cont.setProperty("type","multi") + # number of cores used by the container + cont.setProperty("nb_parallel_procs", "1") + n1=runtime.createScriptNode("","n1") + n2=runtime.createScriptNode("","n2") + n1.setExecutionMode("remote") + n2.setExecutionMode("remote") + n1.setContainer(cont) + n2.setContainer(cont) + n1.setScript("v=42") + res_port=n2.edAddOutputPort("v", ti) + proc.edAddChild(n1) + proc.edAddChild(n2) + proc.edAddCFLink(n1,n2) + # set the default execution mode using the workload manager + # if no property is set, the old executor is used + proc.setProperty("executor", "workloadmanager") + # reuse the same python context for every execution + cont.setStoreContext(True) + # save & reload + schema_file = os.path.join(dir_test,"pynode_with_cache1.xml") + proc.saveSchema(schema_file) + reloaded_proc = yacsloader.load(schema_file) + # default run method of the executor which uses the property "executor" + # in order to choose the actual run method + executor.RunW(reloaded_proc,0) + # you can also directly call the executor you wish, ignoring the property + #executor.RunB(proc,0) # always use the "old" executor + #executor.runWlm(proc,0) # always use the workload manager based executor + reloaded_res_port = reloaded_proc.getChildByName("n2").getOutputPort("v") + self.assertEqual(reloaded_res_port.getPyObj(), 42) + + def test2(self): + """ Same as test1, but using the old executor instead of workload manager. + """ + runtime=self.r + executor=self.e + yacsloader=self.l + proc=runtime.createProc("MySchema") + ti=proc.createType("int","int") + cont=proc.createContainer("MyContainer","Salome") + # With the old executor the type multi imposes the creation of a new + # container for every node. We need the type "mono" in order to have + # the same container used for both yacs nodes. + cont.setProperty("type","mono") + n1=runtime.createScriptNode("","n1") + n2=runtime.createScriptNode("","n2") + n1.setExecutionMode("remote") + n2.setExecutionMode("remote") + n1.setContainer(cont) + n2.setContainer(cont) + n1.setScript("v=42") + res_port=n2.edAddOutputPort("v", ti) + proc.edAddChild(n1) + proc.edAddChild(n2) + proc.edAddCFLink(n1,n2) + # reuse the same python context for every execution + cont.setStoreContext(True) + # save & reload + schema_file = os.path.join(dir_test,"pynode_with_cache2.xml") + proc.saveSchema(schema_file) + reloaded_proc = yacsloader.load(schema_file) + # default run method of the executor which uses the property "executor" + # in order to choose the actual run method + executor.RunW(reloaded_proc,0) + # you can also directly call the executor you wish, ignoring the property + #executor.RunB(proc,0) # always use the "old" executor + reloaded_res_port = reloaded_proc.getChildByName("n2").getOutputPort("v") + self.assertEqual(reloaded_res_port.getPyObj(), 42) + +if __name__ == '__main__': + file_test = os.path.join(dir_test,"UnitTestsResult") + with open(file_test, 'a') as f: + f.write(" --- TEST src/yacsloader: testPynodeWithCache.py\n") + suite = unittest.makeSuite(TestEdit) + result=unittest.TextTestRunner(f, descriptions=1, verbosity=1).run(suite) + sys.exit(not result.wasSuccessful()) diff --git a/src/yacsloader_swig/Test/testWorkloadManager.py b/src/yacsloader_swig/Test/testWorkloadManager.py index ec9e8dbe3..6b522e741 100755 --- a/src/yacsloader_swig/Test/testWorkloadManager.py +++ b/src/yacsloader_swig/Test/testWorkloadManager.py @@ -25,6 +25,7 @@ import loader import unittest import tempfile import os +import salome class TestEdit(unittest.TestCase): @@ -33,48 +34,27 @@ class TestEdit(unittest.TestCase): self.r = pilot.getRuntime() self.l = loader.YACSLoader() self.e = pilot.ExecutorSwig() - pass + salome.salome_init() + resourceManager = salome.lcc.getResourcesManager() + resource_definition = resourceManager.GetResourceDefinition("localhost") + resource_definition.nb_node = 16 + resourceManager.AddResource(resource_definition, False, "") + #resource_definition = resourceManager.GetResourceDefinition("localhost") + #self.assertEqual(resource_definition.nb_node, 16) def test1(self): - """ Test the conservation of the python context between two nodes sharing - the same container. - Schema: n1 -> n2 - """ - runtime=self.r - executor=self.e - proc=runtime.createProc("MySchema") - ti=proc.createType("int","int") - cont=proc.createContainer("MyContainer","Salome") - # type "multi" : the workload manager chooses the resource - # type "mono" : the workload manager does not choose the resource - cont.setProperty("type","multi") - # number of cores used by the container - cont.setProperty("nb_parallel_procs", "1") - n1=runtime.createScriptNode("","n1") - n2=runtime.createScriptNode("","n2") - n1.setExecutionMode("remote") - n2.setExecutionMode("remote") - n1.setContainer(cont) - n2.setContainer(cont) - n1.setScript("v=42") - res_port=n2.edAddOutputPort("v", ti) - proc.edAddChild(n1) - proc.edAddChild(n2) - proc.edAddCFLink(n1,n2) - # set the default execution mode using the workload manager - proc.setProperty("executor", "workloadmanager") - # reuse the same python context for every execution - cont.setStoreContext(True) - #proc.saveSchema("mini_wlm.xml") - executor=pilot.ExecutorSwig() - # default run method of the executor which uses the property "executor" - # in order to choose the actual run method - executor.RunW(proc,0) - # you can also impose the executor, ignoring the property "executor" - #executor.RunB(proc,0) # use the "historical" executor - #executor.runWlm(proc,0) # use the workload manager based executor - - self.assertEqual(res_port.getPyObj(), 42) + """ Two parallel foreach-s with different containers + """ + proc = self.l.load("samples/wlm_2foreach.xml") + self.e.RunW(proc,0) + res_port = proc.getChildByName("End").getOutputPort("r") + # theoretical time should be 15s + execution_time = res_port.getPyObj() + # lower time means some resources are overloaded + self.assertTrue(execution_time > 13) + # The containers need some time to be launched. + # We need some room for that. + self.assertTrue(execution_time < 20) if __name__ == '__main__': dir_test = tempfile.mkdtemp(suffix=".yacstest") -- 2.39.2