2 Copyright (c) 2019 Intel Corporation
4 Licensed under the Apache License, Version 2.0 (the "License");
5 you may not use this file except in compliance with the License.
6 You may obtain a copy of the License at
8 http://www.apache.org/licenses/LICENSE-2.0
10 Unless required by applicable law or agreed to in writing, software
11 distributed under the License is distributed on an "AS IS" BASIS,
12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 See the License for the specific language governing permissions and
14 limitations under the License.
19 from pathlib import Path
20 from typing import Union
21 from ..utils import get_path, format_key
24 def convert_model(topology_name, model=None, weights=None,
25 framework='caffe', mo_search_paths=None, mo_params=None, mo_flags=None,
26 tf_custom_op_config_dir=None, tf_object_detection_api_config_dir=None):
29 topology_name: name for converted model files.
30 model: path to the topology file.
31 weights: path to the weights file.
32 framework: framework name for original model.
33 mo_search_paths: paths where ModelOptimizer may be found. If None only default paths is used.
34 mo_params: value parameters for ModelOptimizer execution.
35 mo_flags: flags parameters for ModelOptimizer execution.
36 tf_custom_op_config_dir: path to Tensor Flow custom operations directory.
37 tf_object_detection_api_config_dir: path to Tensor Flow directory with config for object detection API.
39 paths to converted to IE IR model and weights.
42 mo_params = mo_params or {}
43 mo_flags = mo_flags or []
45 set_topology_name(mo_params, topology_name)
47 model_optimizer_executable = find_mo(mo_search_paths)
48 if not model_optimizer_executable:
49 raise EnvironmentError(
50 'Model optimizer not found. Please set MO_DIR environment variable to model optimizer folder '
51 'installation or refer to help for command line options for providing Model optimizer'
54 framework_specific_options = {
55 'caffe': {'input_model': weights, 'input_proto': model},
56 'mxnet': {'input_model': weights},
57 'tf': {'input_model': model},
58 'onnx': {'input_model': model},
59 'kaldi': {'input_model': model}
62 mo_params['framework'] = framework
63 mo_params.update(framework_specific_options.get(framework, {}))
65 set_path_to_custom_operation_configs(mo_params, framework, tf_custom_op_config_dir, model_optimizer_executable)
66 set_path_to_object_detection_api_pipeline_config(mo_params, framework, tf_object_detection_api_config_dir)
67 args = prepare_args(str(model_optimizer_executable), flag_options=mo_flags, value_options=mo_params)
69 code = exec_mo_binary(args)
71 if code.returncode != 0:
72 raise RuntimeError("Model optimizer conversion failed: ModelOptimizer returned non-zero code")
74 model_file, bin_file = find_dlsdk_ir(
75 get_path(mo_params.get('output_dir', Path.cwd()), is_directory=True), mo_params['model_name']
77 if not bin_file or not model_file:
78 raise RuntimeError("Model optimizer finished correctly, but converted model is not found.")
80 return model_file, bin_file
83 def find_dlsdk_ir(search_path: Path, model_name):
86 search_path: path with IE IR of model.
87 model_name: name of the model.
89 paths to IE IR of model.
92 xml_file = search_path / '{}.xml'.format(model_name)
93 bin_file = search_path / '{}.bin'.format(model_name)
95 return get_path(xml_file), get_path(bin_file)
98 def find_mo(search_paths=None) -> Union[Path, None]:
101 search_paths: paths where ModelOptimizer may be found. If None only default paths is used.
103 path to the ModelOptimizer or None if it wasn't found.
106 default_mo_path = ('intel', 'computer_vision_sdk', 'deployment_tools', 'model_optimizer')
107 default_paths = [Path.home().joinpath(*default_mo_path), Path('/opt').joinpath(*default_mo_path)]
110 for path in search_paths or default_paths:
112 if not path.is_dir():
115 mo = path / executable
124 def prepare_args(executable, flag_options=None, value_options=None):
127 executable: path to the executable.
128 flag_options: positional arguments for executable.
129 value_options: keyword arguments for executable.
131 list with command-line entries.
134 result = [sys.executable, executable]
136 for flag_option in flag_options or []:
137 result.append(str(format_key(flag_option)))
139 for key, value in (value_options or {}).items():
140 result.append(str(format_key(key)))
141 result.append(str(value))
146 def exec_mo_binary(args, timeout=None):
149 args: command-line entries.
150 timeout: timeout for execution.
155 return subprocess.run(args, check=False, timeout=timeout)
158 def set_path_to_custom_operation_configs(mo_params, framework, tf_custom_op_config_dir, mo_path):
159 if framework != 'tf':
162 config_path = mo_params.get('tensorflow_use_custom_operations_config')
166 if tf_custom_op_config_dir:
167 tf_custom_op_config_dir = Path(tf_custom_op_config_dir)
169 tf_custom_op_config_dir = Path('/').joinpath(*mo_path.parts[:-1]) / 'extensions' / 'front' / 'tf'
171 config_path = Path(config_path)
172 if not config_path.is_absolute():
173 config_path = tf_custom_op_config_dir / config_path
175 mo_params['tensorflow_use_custom_operations_config'] = str(get_path(config_path))
180 def set_path_to_object_detection_api_pipeline_config(mo_params, framework, object_detection_api_config_dir=None):
181 object_detection_api_config = mo_params.get('tensorflow_object_detection_api_pipeline_config')
182 if framework != 'tf' or not object_detection_api_config:
185 object_detection_api_config_dir = Path(object_detection_api_config_dir or get_path(mo_params['input_model']).parent)
186 config_path = object_detection_api_config_dir / object_detection_api_config
187 mo_params['tensorflow_object_detection_api_pipeline_config'] = str(get_path(config_path))
192 def set_topology_name(mo_params, topology_name):
193 if not mo_params.get('model_name'):
194 mo_params['model_name'] = topology_name