diff options
author | Yogesh Tyagi <yogesh.tyagi@intel.com> | 2024-06-27 12:14:27 +0530 |
---|---|---|
committer | Anuj Mittal <anuj.mittal@intel.com> | 2024-07-12 16:40:57 +0800 |
commit | e97386254712047f27f5554da6470fe3fb65d07a (patch) | |
tree | d6e49969da310c1ac1edfce0451099a7aa91281c /lib | |
parent | ea0a7de92f983fde57ae64d642fb7b534627cd82 (diff) | |
download | meta-intel-e97386254712047f27f5554da6470fe3fb65d07a.tar.gz |
openvino-inference-engine : Remove openvino related recipes and tests
* Remove all openvino related recipes, tests and other data from meta-intel
layer as a new layer (meta-oepnvino) specific to openvino has been created.
* Update openvino documentation
meta-openvino layer URL:
https://github.com/intel/meta-openvino
Signed-off-by: Yogesh Tyagi <yogesh.tyagi@intel.com>
Signed-off-by: Anuj Mittal <anuj.mittal@intel.com>
Diffstat (limited to 'lib')
7 files changed, 0 insertions, 389 deletions
diff --git a/lib/oeqa/runtime/cases/dldt_inference_engine.py b/lib/oeqa/runtime/cases/dldt_inference_engine.py deleted file mode 100644 index fb35d52f..00000000 --- a/lib/oeqa/runtime/cases/dldt_inference_engine.py +++ /dev/null | |||
@@ -1,109 +0,0 @@ | |||
1 | from oeqa.runtime.case import OERuntimeTestCase | ||
2 | from oeqa.runtime.decorator.package import OEHasPackage | ||
3 | from oeqa.core.decorator.depends import OETestDepends | ||
4 | from oeqa.runtime.miutils.targets.oeqatarget import OEQATarget | ||
5 | from oeqa.runtime.miutils.tests.squeezenet_model_download_test import SqueezenetModelDownloadTest | ||
6 | from oeqa.runtime.miutils.tests.dldt_model_optimizer_test import DldtModelOptimizerTest | ||
7 | from oeqa.runtime.miutils.tests.dldt_inference_engine_test import DldtInferenceEngineTest | ||
8 | from oeqa.runtime.miutils.dldtutils import get_testdata_config | ||
9 | |||
10 | class DldtInferenceEngine(OERuntimeTestCase): | ||
11 | |||
12 | @classmethod | ||
13 | def setUpClass(cls): | ||
14 | cls.sqn_download = SqueezenetModelDownloadTest(OEQATarget(cls.tc.target), '/tmp/ie/md') | ||
15 | cls.sqn_download.setup() | ||
16 | cls.dldt_mo = DldtModelOptimizerTest(OEQATarget(cls.tc.target), '/tmp/ie/ir') | ||
17 | cls.dldt_mo.setup() | ||
18 | cls.dldt_ie = DldtInferenceEngineTest(OEQATarget(cls.tc.target), '/tmp/ie/inputs') | ||
19 | cls.dldt_ie.setup() | ||
20 | cls.ir_files_dir = cls.dldt_mo.work_dir | ||
21 | |||
22 | @classmethod | ||
23 | def tearDownClass(cls): | ||
24 | cls.dldt_ie.tear_down() | ||
25 | cls.dldt_mo.tear_down() | ||
26 | cls.sqn_download.tear_down() | ||
27 | |||
28 | @OEHasPackage(['dldt-model-optimizer']) | ||
29 | @OEHasPackage(['wget']) | ||
30 | def test_dldt_ie_can_create_ir_and_download_input(self): | ||
31 | proxy_port = get_testdata_config(self.tc.td, 'DLDT_PIP_PROXY') | ||
32 | if not proxy_port: | ||
33 | self.skipTest('Need to configure bitbake configuration (DLDT_PIP_PROXY="proxy.server:port").') | ||
34 | (status, output) = self.sqn_download.test_can_download_squeezenet_model(proxy_port) | ||
35 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
36 | (status, output) = self.sqn_download.test_can_download_squeezenet_prototxt(proxy_port) | ||
37 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
38 | |||
39 | mo_exe_dir = get_testdata_config(self.tc.td, 'DLDT_MO_EXE_DIR') | ||
40 | if not mo_exe_dir: | ||
41 | self.skipTest('Need to configure bitbake configuration (DLDT_MO_EXE_DIR="directory_to_mo.py").') | ||
42 | mo_files_dir = self.sqn_download.work_dir | ||
43 | (status, output) = self.dldt_mo.test_dldt_mo_can_create_ir(mo_exe_dir, mo_files_dir) | ||
44 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
45 | |||
46 | (status, output) = self.dldt_ie.test_can_download_input_file(proxy_port) | ||
47 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
48 | |||
49 | @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) | ||
50 | @OEHasPackage(['dldt-inference-engine']) | ||
51 | @OEHasPackage(['dldt-inference-engine-samples']) | ||
52 | def test_dldt_ie_classification_with_cpu(self): | ||
53 | (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device('CPU', self.ir_files_dir) | ||
54 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
55 | |||
56 | @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) | ||
57 | @OEHasPackage(['dldt-inference-engine']) | ||
58 | @OEHasPackage(['dldt-inference-engine-samples']) | ||
59 | @OEHasPackage(['intel-compute-runtime']) | ||
60 | @OEHasPackage(['ocl-icd']) | ||
61 | def test_dldt_ie_classification_with_gpu(self): | ||
62 | (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device('GPU', self.ir_files_dir) | ||
63 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
64 | |||
65 | @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) | ||
66 | @OEHasPackage(['dldt-inference-engine']) | ||
67 | @OEHasPackage(['dldt-inference-engine-samples']) | ||
68 | @OEHasPackage(['dldt-inference-engine-vpu-firmware']) | ||
69 | def test_dldt_ie_classification_with_myriad(self): | ||
70 | device = 'MYRIAD' | ||
71 | (status, output) = self.dldt_ie.test_check_if_openvino_device_available(device) | ||
72 | if not status: | ||
73 | self.skipTest('OpenVINO %s device not available on target machine(availalbe devices: %s)' % (device, output)) | ||
74 | (status, output) = self.dldt_ie.test_dldt_ie_classification_with_device(device, self.ir_files_dir) | ||
75 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
76 | |||
77 | @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) | ||
78 | @OEHasPackage(['dldt-inference-engine']) | ||
79 | @OEHasPackage(['dldt-inference-engine-python3']) | ||
80 | @OEHasPackage(['python3-opencv']) | ||
81 | @OEHasPackage(['python3-numpy']) | ||
82 | def test_dldt_ie_classification_python_api_with_cpu(self): | ||
83 | (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device('CPU', self.ir_files_dir) | ||
84 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
85 | |||
86 | @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) | ||
87 | @OEHasPackage(['dldt-inference-engine']) | ||
88 | @OEHasPackage(['dldt-inference-engine-python3']) | ||
89 | @OEHasPackage(['intel-compute-runtime']) | ||
90 | @OEHasPackage(['ocl-icd']) | ||
91 | @OEHasPackage(['python3-opencv']) | ||
92 | @OEHasPackage(['python3-numpy']) | ||
93 | def test_dldt_ie_classification_python_api_with_gpu(self): | ||
94 | (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device('GPU', self.ir_files_dir) | ||
95 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
96 | |||
97 | @OETestDepends(['dldt_inference_engine.DldtInferenceEngine.test_dldt_ie_can_create_ir_and_download_input']) | ||
98 | @OEHasPackage(['dldt-inference-engine']) | ||
99 | @OEHasPackage(['dldt-inference-engine-python3']) | ||
100 | @OEHasPackage(['dldt-inference-engine-vpu-firmware']) | ||
101 | @OEHasPackage(['python3-opencv']) | ||
102 | @OEHasPackage(['python3-numpy']) | ||
103 | def test_dldt_ie_classification_python_api_with_myriad(self): | ||
104 | device = 'MYRIAD' | ||
105 | (status, output) = self.dldt_ie.test_check_if_openvino_device_available(device) | ||
106 | if not status: | ||
107 | self.skipTest('OpenVINO %s device not available on target machine(availalbe devices: %s)' % (device, output)) | ||
108 | (status, output) = self.dldt_ie.test_dldt_ie_classification_python_api_with_device(device, self.ir_files_dir) | ||
109 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
diff --git a/lib/oeqa/runtime/cases/dldt_model_optimizer.py b/lib/oeqa/runtime/cases/dldt_model_optimizer.py deleted file mode 100644 index 736ea661..00000000 --- a/lib/oeqa/runtime/cases/dldt_model_optimizer.py +++ /dev/null | |||
@@ -1,38 +0,0 @@ | |||
1 | from oeqa.runtime.case import OERuntimeTestCase | ||
2 | from oeqa.runtime.decorator.package import OEHasPackage | ||
3 | from oeqa.runtime.miutils.targets.oeqatarget import OEQATarget | ||
4 | from oeqa.runtime.miutils.tests.squeezenet_model_download_test import SqueezenetModelDownloadTest | ||
5 | from oeqa.runtime.miutils.tests.dldt_model_optimizer_test import DldtModelOptimizerTest | ||
6 | from oeqa.runtime.miutils.dldtutils import get_testdata_config | ||
7 | |||
8 | class DldtModelOptimizer(OERuntimeTestCase): | ||
9 | |||
10 | @classmethod | ||
11 | def setUpClass(cls): | ||
12 | cls.sqn_download = SqueezenetModelDownloadTest(OEQATarget(cls.tc.target), '/tmp/mo/md') | ||
13 | cls.sqn_download.setup() | ||
14 | cls.dldt_mo = DldtModelOptimizerTest(OEQATarget(cls.tc.target), '/tmp/mo/ir') | ||
15 | cls.dldt_mo.setup() | ||
16 | |||
17 | @classmethod | ||
18 | def tearDownClass(cls): | ||
19 | cls.dldt_mo.tear_down() | ||
20 | cls.sqn_download.tear_down() | ||
21 | |||
22 | @OEHasPackage(['dldt-model-optimizer']) | ||
23 | @OEHasPackage(['wget']) | ||
24 | def test_dldt_mo_can_create_ir(self): | ||
25 | proxy_port = get_testdata_config(self.tc.td, 'DLDT_PIP_PROXY') | ||
26 | if not proxy_port: | ||
27 | self.skipTest('Need to configure bitbake configuration (DLDT_PIP_PROXY="proxy.server:port").') | ||
28 | (status, output) = self.sqn_download.test_can_download_squeezenet_model(proxy_port) | ||
29 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
30 | (status, output) = self.sqn_download.test_can_download_squeezenet_prototxt(proxy_port) | ||
31 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
32 | |||
33 | mo_exe_dir = get_testdata_config(self.tc.td, 'DLDT_MO_EXE_DIR') | ||
34 | if not mo_exe_dir: | ||
35 | self.skipTest('Need to configure bitbake configuration (DLDT_MO_EXE_DIR="directory_to_mo.py").') | ||
36 | mo_files_dir = self.sqn_download.work_dir | ||
37 | (status, output) = self.dldt_mo.test_dldt_mo_can_create_ir(mo_exe_dir, mo_files_dir) | ||
38 | self.assertEqual(status, 0, msg='status and output: %s and %s' % (status, output)) | ||
diff --git a/lib/oeqa/runtime/files/dldt-inference-engine/classification_sample.py b/lib/oeqa/runtime/files/dldt-inference-engine/classification_sample.py deleted file mode 100644 index 1906e9fe..00000000 --- a/lib/oeqa/runtime/files/dldt-inference-engine/classification_sample.py +++ /dev/null | |||
@@ -1,135 +0,0 @@ | |||
1 | #!/usr/bin/env python3 | ||
2 | """ | ||
3 | Copyright (C) 2018-2019 Intel Corporation | ||
4 | |||
5 | Licensed under the Apache License, Version 2.0 (the "License"); | ||
6 | you may not use this file except in compliance with the License. | ||
7 | You may obtain a copy of the License at | ||
8 | |||
9 | http://www.apache.org/licenses/LICENSE-2.0 | ||
10 | |||
11 | Unless required by applicable law or agreed to in writing, software | ||
12 | distributed under the License is distributed on an "AS IS" BASIS, | ||
13 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
14 | See the License for the specific language governing permissions and | ||
15 | limitations under the License. | ||
16 | """ | ||
17 | from __future__ import print_function | ||
18 | import sys | ||
19 | import os | ||
20 | from argparse import ArgumentParser, SUPPRESS | ||
21 | import cv2 | ||
22 | import numpy as np | ||
23 | import logging as log | ||
24 | from time import time | ||
25 | from openvino.inference_engine import IENetwork, IECore | ||
26 | |||
27 | |||
28 | def build_argparser(): | ||
29 | parser = ArgumentParser(add_help=False) | ||
30 | args = parser.add_argument_group('Options') | ||
31 | args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Show this help message and exit.') | ||
32 | args.add_argument("-m", "--model", help="Required. Path to an .xml file with a trained model.", required=True, | ||
33 | type=str) | ||
34 | args.add_argument("-i", "--input", help="Required. Path to a folder with images or path to an image files", | ||
35 | required=True, | ||
36 | type=str, nargs="+") | ||
37 | args.add_argument("-l", "--cpu_extension", | ||
38 | help="Optional. Required for CPU custom layers. " | ||
39 | "MKLDNN (CPU)-targeted custom layers. Absolute path to a shared library with the" | ||
40 | " kernels implementations.", type=str, default=None) | ||
41 | args.add_argument("-d", "--device", | ||
42 | help="Optional. Specify the target device to infer on; CPU, GPU, FPGA, HDDL, MYRIAD or HETERO: is " | ||
43 | "acceptable. The sample will look for a suitable plugin for device specified. Default " | ||
44 | "value is CPU", | ||
45 | default="CPU", type=str) | ||
46 | args.add_argument("--labels", help="Optional. Path to a labels mapping file", default=None, type=str) | ||
47 | args.add_argument("-nt", "--number_top", help="Optional. Number of top results", default=10, type=int) | ||
48 | |||
49 | return parser | ||
50 | |||
51 | |||
52 | def main(): | ||
53 | log.basicConfig(format="[ %(levelname)s ] %(message)s", level=log.INFO, stream=sys.stdout) | ||
54 | args = build_argparser().parse_args() | ||
55 | model_xml = args.model | ||
56 | model_bin = os.path.splitext(model_xml)[0] + ".bin" | ||
57 | |||
58 | # Plugin initialization for specified device and load extensions library if specified | ||
59 | log.info("Creating Inference Engine") | ||
60 | ie = IECore() | ||
61 | if args.cpu_extension and 'CPU' in args.device: | ||
62 | ie.add_extension(args.cpu_extension, "CPU") | ||
63 | # Read IR | ||
64 | log.info("Loading network files:\n\t{}\n\t{}".format(model_xml, model_bin)) | ||
65 | net = IENetwork(model=model_xml, weights=model_bin) | ||
66 | |||
67 | if "CPU" in args.device: | ||
68 | supported_layers = ie.query_network(net, "CPU") | ||
69 | not_supported_layers = [l for l in net.layers.keys() if l not in supported_layers] | ||
70 | if len(not_supported_layers) != 0: | ||
71 | log.error("Following layers are not supported by the plugin for specified device {}:\n {}". | ||
72 | format(args.device, ', '.join(not_supported_layers))) | ||
73 | log.error("Please try to specify cpu extensions library path in sample's command line parameters using -l " | ||
74 | "or --cpu_extension command line argument") | ||
75 | sys.exit(1) | ||
76 | |||
77 | assert len(net.inputs.keys()) == 1, "Sample supports only single input topologies" | ||
78 | assert len(net.outputs) == 1, "Sample supports only single output topologies" | ||
79 | |||
80 | log.info("Preparing input blobs") | ||
81 | input_blob = next(iter(net.inputs)) | ||
82 | out_blob = next(iter(net.outputs)) | ||
83 | net.batch_size = len(args.input) | ||
84 | |||
85 | # Read and pre-process input images | ||
86 | n, c, h, w = net.inputs[input_blob].shape | ||
87 | images = np.ndarray(shape=(n, c, h, w)) | ||
88 | for i in range(n): | ||
89 | image = cv2.imread(args.input[i]) | ||
90 | if image.shape[:-1] != (h, w): | ||
91 | log.warning("Image {} is resized from {} to {}".format(args.input[i], image.shape[:-1], (h, w))) | ||
92 | image = cv2.resize(image, (w, h)) | ||
93 | image = image.transpose((2, 0, 1)) # Change data layout from HWC to CHW | ||
94 | images[i] = image | ||
95 | log.info("Batch size is {}".format(n)) | ||
96 | |||
97 | # Loading model to the plugin | ||
98 | log.info("Loading model to the plugin") | ||
99 | exec_net = ie.load_network(network=net, device_name=args.device) | ||
100 | |||
101 | # Start sync inference | ||
102 | log.info("Starting inference in synchronous mode") | ||
103 | res = exec_net.infer(inputs={input_blob: images}) | ||
104 | |||
105 | # Processing output blob | ||
106 | log.info("Processing output blob") | ||
107 | res = res[out_blob] | ||
108 | log.info("Top {} results: ".format(args.number_top)) | ||
109 | if args.labels: | ||
110 | with open(args.labels, 'r') as f: | ||
111 | labels_map = [x.split(sep=' ', maxsplit=1)[-1].strip() for x in f] | ||
112 | else: | ||
113 | labels_map = None | ||
114 | classid_str = "classid" | ||
115 | probability_str = "probability" | ||
116 | for i, probs in enumerate(res): | ||
117 | probs = np.squeeze(probs) | ||
118 | top_ind = np.argsort(probs)[-args.number_top:][::-1] | ||
119 | print("Image {}\n".format(args.input[i])) | ||
120 | print(classid_str, probability_str) | ||
121 | print("{} {}".format('-' * len(classid_str), '-' * len(probability_str))) | ||
122 | for id in top_ind: | ||
123 | det_label = labels_map[id] if labels_map else "{}".format(id) | ||
124 | label_length = len(det_label) | ||
125 | space_num_before = (len(classid_str) - label_length) // 2 | ||
126 | space_num_after = len(classid_str) - (space_num_before + label_length) + 2 | ||
127 | space_num_before_prob = (len(probability_str) - len(str(probs[id]))) // 2 | ||
128 | print("{}{}{}{}{:.7f}".format(' ' * space_num_before, det_label, | ||
129 | ' ' * space_num_after, ' ' * space_num_before_prob, | ||
130 | probs[id])) | ||
131 | print("\n") | ||
132 | log.info("This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool\n") | ||
133 | |||
134 | if __name__ == '__main__': | ||
135 | sys.exit(main() or 0) | ||
diff --git a/lib/oeqa/runtime/miutils/dldtutils.py b/lib/oeqa/runtime/miutils/dldtutils.py deleted file mode 100644 index 45bf2e12..00000000 --- a/lib/oeqa/runtime/miutils/dldtutils.py +++ /dev/null | |||
@@ -1,3 +0,0 @@ | |||
1 | |||
2 | def get_testdata_config(testdata, config): | ||
3 | return testdata.get(config) | ||
diff --git a/lib/oeqa/runtime/miutils/tests/dldt_inference_engine_test.py b/lib/oeqa/runtime/miutils/tests/dldt_inference_engine_test.py deleted file mode 100644 index 31bfb539..00000000 --- a/lib/oeqa/runtime/miutils/tests/dldt_inference_engine_test.py +++ /dev/null | |||
@@ -1,56 +0,0 @@ | |||
1 | import os | ||
2 | script_path = os.path.dirname(os.path.realpath(__file__)) | ||
3 | files_path = os.path.join(script_path, '../../files/') | ||
4 | |||
5 | class DldtInferenceEngineTest(object): | ||
6 | ie_input_files = {'ie_python_sample': 'classification_sample.py', | ||
7 | 'input': 'chicky_512.png', | ||
8 | 'input_download': 'https://raw.githubusercontent.com/opencv/opencv/master/samples/data/chicky_512.png', | ||
9 | 'model': 'squeezenet_v1.1.xml'} | ||
10 | |||
11 | def __init__(self, target, work_dir): | ||
12 | self.target = target | ||
13 | self.work_dir = work_dir | ||
14 | |||
15 | def setup(self): | ||
16 | self.target.run('mkdir -p %s' % self.work_dir) | ||
17 | self.target.copy_to(os.path.join(files_path, 'dldt-inference-engine', self.ie_input_files['ie_python_sample']), | ||
18 | self.work_dir) | ||
19 | python_cmd = 'from openvino.inference_engine import IENetwork, IECore; ie = IECore(); print(ie.available_devices)' | ||
20 | __, output = self.target.run('python3 -c "%s"' % python_cmd) | ||
21 | self.available_devices = output | ||
22 | |||
23 | def tear_down(self): | ||
24 | self.target.run('rm -rf %s' % self.work_dir) | ||
25 | |||
26 | def test_check_if_openvino_device_available(self, device): | ||
27 | if device not in self.available_devices: | ||
28 | return False, self.available_devices | ||
29 | return True, self.available_devices | ||
30 | |||
31 | def test_can_download_input_file(self, proxy_port): | ||
32 | return self.target.run('cd %s; wget %s -e https_proxy=%s' % | ||
33 | (self.work_dir, | ||
34 | self.ie_input_files['input_download'], | ||
35 | proxy_port)) | ||
36 | |||
37 | def test_dldt_ie_classification_with_device(self, device, ir_files_dir): | ||
38 | return self.target.run('classification_sample_async -d %s -i %s -m %s' % | ||
39 | (device, | ||
40 | os.path.join(self.work_dir, self.ie_input_files['input']), | ||
41 | os.path.join(ir_files_dir, self.ie_input_files['model']))) | ||
42 | |||
43 | def test_dldt_ie_classification_python_api_with_device(self, device, ir_files_dir, extension=''): | ||
44 | if extension: | ||
45 | return self.target.run('python3 %s -d %s -i %s -m %s -l %s' % | ||
46 | (os.path.join(self.work_dir, self.ie_input_files['ie_python_sample']), | ||
47 | device, | ||
48 | os.path.join(self.work_dir, self.ie_input_files['input']), | ||
49 | os.path.join(ir_files_dir, self.ie_input_files['model']), | ||
50 | extension)) | ||
51 | else: | ||
52 | return self.target.run('python3 %s -d %s -i %s -m %s' % | ||
53 | (os.path.join(self.work_dir, self.ie_input_files['ie_python_sample']), | ||
54 | device, | ||
55 | os.path.join(self.work_dir, self.ie_input_files['input']), | ||
56 | os.path.join(ir_files_dir, self.ie_input_files['model']))) | ||
diff --git a/lib/oeqa/runtime/miutils/tests/dldt_model_optimizer_test.py b/lib/oeqa/runtime/miutils/tests/dldt_model_optimizer_test.py deleted file mode 100644 index 7d3db15b..00000000 --- a/lib/oeqa/runtime/miutils/tests/dldt_model_optimizer_test.py +++ /dev/null | |||
@@ -1,23 +0,0 @@ | |||
1 | import os | ||
2 | |||
3 | class DldtModelOptimizerTest(object): | ||
4 | mo_input_files = {'model': 'squeezenet_v1.1.caffemodel', | ||
5 | 'prototxt': 'deploy.prototxt'} | ||
6 | mo_exe = 'mo.py' | ||
7 | |||
8 | def __init__(self, target, work_dir): | ||
9 | self.target = target | ||
10 | self.work_dir = work_dir | ||
11 | |||
12 | def setup(self): | ||
13 | self.target.run('mkdir -p %s' % self.work_dir) | ||
14 | |||
15 | def tear_down(self): | ||
16 | self.target.run('rm -rf %s' % self.work_dir) | ||
17 | |||
18 | def test_dldt_mo_can_create_ir(self, mo_exe_dir, mo_files_dir): | ||
19 | return self.target.run('python3 %s --input_model %s --input_proto %s --output_dir %s --data_type FP16' % | ||
20 | (os.path.join(mo_exe_dir, self.mo_exe), | ||
21 | os.path.join(mo_files_dir, self.mo_input_files['model']), | ||
22 | os.path.join(mo_files_dir, self.mo_input_files['prototxt']), | ||
23 | self.work_dir)) | ||
diff --git a/lib/oeqa/runtime/miutils/tests/squeezenet_model_download_test.py b/lib/oeqa/runtime/miutils/tests/squeezenet_model_download_test.py deleted file mode 100644 index a3e46a0a..00000000 --- a/lib/oeqa/runtime/miutils/tests/squeezenet_model_download_test.py +++ /dev/null | |||
@@ -1,25 +0,0 @@ | |||
1 | class SqueezenetModelDownloadTest(object): | ||
2 | download_files = {'squeezenet1.1.prototxt': 'https://raw.githubusercontent.com/DeepScale/SqueezeNet/a47b6f13d30985279789d08053d37013d67d131b/SqueezeNet_v1.1/deploy.prototxt', | ||
3 | 'squeezenet1.1.caffemodel': 'https://github.com/DeepScale/SqueezeNet/raw/a47b6f13d30985279789d08053d37013d67d131b/SqueezeNet_v1.1/squeezenet_v1.1.caffemodel'} | ||
4 | |||
5 | def __init__(self, target, work_dir): | ||
6 | self.target = target | ||
7 | self.work_dir = work_dir | ||
8 | |||
9 | def setup(self): | ||
10 | self.target.run('mkdir -p %s' % self.work_dir) | ||
11 | |||
12 | def tear_down(self): | ||
13 | self.target.run('rm -rf %s' % self.work_dir) | ||
14 | |||
15 | def test_can_download_squeezenet_model(self, proxy_port): | ||
16 | return self.target.run('cd %s; wget %s -e https_proxy=%s' % | ||
17 | (self.work_dir, | ||
18 | self.download_files['squeezenet1.1.caffemodel'], | ||
19 | proxy_port)) | ||
20 | |||
21 | def test_can_download_squeezenet_prototxt(self, proxy_port): | ||
22 | return self.target.run('cd %s; wget %s -e https_proxy=%s' % | ||
23 | (self.work_dir, | ||
24 | self.download_files['squeezenet1.1.prototxt'], | ||
25 | proxy_port)) | ||