From e97386254712047f27f5554da6470fe3fb65d07a Mon Sep 17 00:00:00 2001 From: Yogesh Tyagi Date: Thu, 27 Jun 2024 12:14:27 +0530 Subject: openvino-inference-engine : Remove openvino related recipes and tests * Remove all openvino related recipes, tests and other data from meta-intel layer as a new layer (meta-oepnvino) specific to openvino has been created. * Update openvino documentation meta-openvino layer URL: https://github.com/intel/meta-openvino Signed-off-by: Yogesh Tyagi Signed-off-by: Anuj Mittal --- lib/oeqa/runtime/cases/dldt_inference_engine.py | 109 ------------------------ 1 file changed, 109 deletions(-) delete mode 100644 lib/oeqa/runtime/cases/dldt_inference_engine.py (limited to 'lib/oeqa/runtime/cases/dldt_inference_engine.py') diff --git a/lib/oeqa/runtime/cases/dldt_inference_engine.py b/lib/oeqa/runtime/cases/dldt_inference_engine.py deleted file mode 100644 index fb35d52f..00000000 --- a/lib/oeqa/runtime/cases/dldt_inference_engine.py +++ /dev/null @@ -1,109 +0,0 @@ -from oeqa.runtime.case import OERuntimeTestCase -from oeqa.runtime.decorator.package import OEHasPackage -from oeqa.core.decorator.depends import OETestDepends -from oeqa.runtime.miutils.targets.oeqatarget import OEQATarget -from oeqa.runtime.miutils.tests.squeezenet_model_download_test import SqueezenetModelDownloadTest -from oeqa.runtime.miutils.tests.dldt_model_optimizer_test import DldtModelOptimizerTest -from oeqa.runtime.miutils.tests.dldt_inference_engine_test import DldtInferenceEngineTest -from oeqa.runtime.miutils.dldtutils import get_testdata_config - -class DldtInferenceEngine(OERuntimeTestCase): - - @classmethod - def setUpClass(cls): - cls.sqn_download = SqueezenetModelDownloadTest(OEQATarget(cls.tc.target), '/tmp/ie/md') - cls.sqn_download.setup() - cls.dldt_mo = DldtModelOptimizerTest(OEQATarget(cls.tc.target), '/tmp/ie/ir') - cls.dldt_mo.setup() - cls.dldt_ie = DldtInferenceEngineTest(OEQATarget(cls.tc.target), '/tmp/ie/inputs') - cls.dldt_ie.setup() - cls.ir_files_dir = cls.dldt_mo.work_dir - - @classmethod - def tearDownClass(cls): - cls.dldt_ie.tear_down() - cls.dldt_mo.tear_down() - cls.sqn_download.tear_down() - - @OEHasPackage(['dldt-model-optimizer']) - @OEHasPackage(['wget']) - def test_dldt_ie_can_create_ir_and_download_input(self): - proxy_port = get_testdata_config(self.tc.td, 'DLDT_PIP_PROXY') - if not proxy_port: - self.skipTest('Need to configure bitbake configuration (DLDT_PIP_PROXY="proxy.server:port").') - (status, output) = self.sqn_download.test_can_download_squeezenet_model(proxy_port) - self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) - (status, output) = self.sqn_download.test_can_download_squeezenet_prototxt(proxy_port) - self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) - - mo_exe_dir = get_testdata_config(self.tc.td, 'DLDT_MO_EXE_DIR') - if not mo_exe_dir: - self.skipTest('Need to configure bitbake configuration (DLDT_MO_EXE_DIR="directory_to_mo.py").') - mo_files_dir = self.sqn_download.work_dir - (status, output) = self.dldt_mo.test_dldt_mo_can_create_ir(mo_exe_dir, mo_files_dir) - self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) - - (status, output) = self.dldt_ie.test_can_download_input_file(proxy_port) - self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) - - @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) - @OEHasPackage(['dldt-inference-engine']) - @OEHasPackage(['dldt-inference-engine-samples']) - def test_dldt_ie_classification_with_cpu(self): - (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device('CPU', self.ir_files_dir) - self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) - - @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) - @OEHasPackage(['dldt-inference-engine']) - @OEHasPackage(['dldt-inference-engine-samples']) - @OEHasPackage(['intel-compute-runtime']) - @OEHasPackage(['ocl-icd']) - def test_dldt_ie_classification_with_gpu(self): - (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device('GPU', self.ir_files_dir) - self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) - - @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) - @OEHasPackage(['dldt-inference-engine']) - @OEHasPackage(['dldt-inference-engine-samples']) - @OEHasPackage(['dldt-inference-engine-vpu-firmware']) - def test_dldt_ie_classification_with_myriad(self): - device = 'MYRIAD' - (status, output) = self.dldt_ie.test_check_if_openvino_device_available(device) - if not status: - self.skipTest('OpenVINO %s device not available on target machine(availalbe devices: %s)' % (device, output)) - (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device(device, self.ir_files_dir) - self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) - - @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) - @OEHasPackage(['dldt-inference-engine']) - @OEHasPackage(['dldt-inference-engine-python3']) - @OEHasPackage(['python3-opencv']) - @OEHasPackage(['python3-numpy']) - def test_dldt_ie_classification_python_api_with_cpu(self): - (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device('CPU', self.ir_files_dir) - self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) - - @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) - @OEHasPackage(['dldt-inference-engine']) - @OEHasPackage(['dldt-inference-engine-python3']) - @OEHasPackage(['intel-compute-runtime']) - @OEHasPackage(['ocl-icd']) - @OEHasPackage(['python3-opencv']) - @OEHasPackage(['python3-numpy']) - def test_dldt_ie_classification_python_api_with_gpu(self): - (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device('GPU', self.ir_files_dir) - self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) - - @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) - @OEHasPackage(['dldt-inference-engine']) - @OEHasPackage(['dldt-inference-engine-python3']) - @OEHasPackage(['dldt-inference-engine-vpu-firmware']) - @OEHasPackage(['python3-opencv']) - @OEHasPackage(['python3-numpy']) - def test_dldt_ie_classification_python_api_with_myriad(self): - device = 'MYRIAD' - (status, output) = self.dldt_ie.test_check_if_openvino_device_available(device) - if not status: - self.skipTest('OpenVINO %s device not available on target machine(availalbe devices: %s)' % (device, output)) - (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device(device, self.ir_files_dir) - self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) -- cgit v1.2.3-54-g00ecf