1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
6 * @brief a header for advanced hardware related properties for IE plugins
7 * To use in SetConfig() method of plugins
8 * LoadNetwork() method overloads that accept config as parameter
10 * @file ie_plugin_config.hpp
16 namespace InferenceEngine {
18 namespace PluginConfigParams {
21 * @brief shortcut for defining configuration keys
23 #define CONFIG_KEY(name) InferenceEngine::PluginConfigParams::_CONFIG_KEY(name)
24 #define _CONFIG_KEY(name) KEY_##name
25 #define DECLARE_CONFIG_KEY(name) static constexpr auto _CONFIG_KEY(name) = #name
29 * @brief shortcut for defining configuration values
31 #define CONFIG_VALUE(name) InferenceEngine::PluginConfigParams::name
32 #define DECLARE_CONFIG_VALUE(name) static constexpr auto name = #name
35 * @brief generic boolean values
37 DECLARE_CONFIG_VALUE(YES);
38 DECLARE_CONFIG_VALUE(NO);
41 * @brief Limit #threads that are used by Inference Engine for inference on the CPU.
43 DECLARE_CONFIG_KEY(CPU_THREADS_NUM);
46 * @brief The name for setting CPU affinity per thread option.
47 * It is passed to IInferencePlugin::SetConfig(), this option should be used with values:
48 * PluginConfigParams::YES or PluginConfigParams::NO
49 * Ignored, if the OpenVINO compiled with OpenMP threading and any affinity-related OpenMP's
50 * environment variable is set
52 DECLARE_CONFIG_KEY(CPU_BIND_THREAD);
55 * @brief Optimize CPU execution to maximize throughput.
56 * It is passed to IInferencePlugin::SetConfig(), this option should be used with values:
57 * - KEY_CPU_THROUGHPUT_NUMA creates as many streams as needed to accomodate NUMA and avoid associated penalties
58 * - KEY_CPU_THROUGHPUT_AUTO creates bare minimum of streams to improve the performance,
59 * this is the most portable option if you have no insights into how many cores you target machine will have
60 * (and what is the optimal number of streams)
61 * - finally, specifying the positive integer value creates the requested number of streams
63 DECLARE_CONFIG_VALUE(CPU_THROUGHPUT_NUMA);
64 DECLARE_CONFIG_VALUE(CPU_THROUGHPUT_AUTO);
65 DECLARE_CONFIG_KEY(CPU_THROUGHPUT_STREAMS);
69 * @brief The name for setting performance counters option.
70 * It is passed to IInferencePlugin::SetConfig(), this option should be used with values:
71 * PluginConfigParams::YES or PluginConfigParams::NO
73 DECLARE_CONFIG_KEY(PERF_COUNT);
76 * @brief The key defines dynamic limit of batch processing.
77 * Specified value is applied to all following Infer() calls. Inference Engine processes
78 * min(batch_limit, original_batch_size) first pictures from input blob. For example, if input
79 * blob has sizes 32x3x224x224 after applying plugin.SetConfig({KEY_DYN_BATCH_LIMIT, 10})
80 * Inference Engine primitives processes only beginner subblobs with size 10x3x224x224.
81 * This value can be changed before any Infer() call to specify a new batch limit.
83 * The paired parameter value should be convertible to integer number. Acceptable values:
84 * -1 - Do not limit batch processing
85 * >0 - Direct value of limit. Batch size to process is min(new batch_limit, original_batch)
87 DECLARE_CONFIG_KEY(DYN_BATCH_LIMIT);
89 DECLARE_CONFIG_KEY(DYN_BATCH_ENABLED);
92 * @brief The key controls threading inside Inference Engine.
93 * It is passed to IInferencePlugin::SetConfig(), this option should be used with values:
94 * PluginConfigParams::YES or PluginConfigParams::NO
96 DECLARE_CONFIG_KEY(SINGLE_THREAD);
99 * @brief This key directs the plugin to load a configuration file.
100 * The value should be a file name with the plugin specific configuration
102 DECLARE_CONFIG_KEY(CONFIG_FILE);
105 * @brief This key enables dumping of the kernels used by the plugin for custom layers.
106 * This option should be used with values: PluginConfigParams::YES or PluginConfigParams::NO (default)
108 DECLARE_CONFIG_KEY(DUMP_KERNELS);
111 * @brief This key controls performance tuning done or used by the plugin.
112 * This option should be used with values: PluginConfigParams::TUNING_CREATE,
113 * PluginConfigParams::TUNING_USE_EXISTING or PluginConfigParams::TUNING_DISABLED (default)
115 DECLARE_CONFIG_KEY(TUNING_MODE);
118 DECLARE_CONFIG_VALUE(TUNING_CREATE);
119 DECLARE_CONFIG_VALUE(TUNING_USE_EXISTING);
120 DECLARE_CONFIG_VALUE(TUNING_DISABLED);
123 * @brief This key defines the tuning data filename to be created/used
125 DECLARE_CONFIG_KEY(TUNING_FILE);
128 * @brief the key for setting desirable log level.
129 * This option should be used with values: PluginConfigParams::LOG_NONE (default),
130 * PluginConfigParams::LOG_WARNING, PluginConfigParams::LOG_INFO, PluginConfigParams::LOG_DEBUG
132 DECLARE_CONFIG_KEY(LOG_LEVEL);
134 DECLARE_CONFIG_VALUE(LOG_NONE);
135 DECLARE_CONFIG_VALUE(LOG_WARNING);
136 DECLARE_CONFIG_VALUE(LOG_INFO);
137 DECLARE_CONFIG_VALUE(LOG_DEBUG);
140 * @brief the key for setting of required device to execute on
141 * values: device id starts from "0" - first device, "1" - second device, etc
143 DECLARE_CONFIG_KEY(DEVICE_ID);
146 * @brief the key for enabling exclusive mode for async requests of different executable networks and the same plugin.
147 * Sometimes it's necessary to avoid oversubscription requests that are sharing the same device in parallel.
148 * E.g. There 2 task executors for CPU device: one - in the Hetero plugin, another - in pure CPU plugin.
149 * Parallel execution both of them might lead to oversubscription and not optimal CPU usage. More efficient
150 * to run the corresponding tasks one by one via single executor.
151 * By default, the option is set to YES for hetero cases, and to NO for conventional (single-plugin) cases
152 * Notice that setting YES disables the CPU streams feature (see another config key in this file)
154 DECLARE_CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS);
157 * @brief This key enables dumping of the internal primitive graph.
158 * Should be passed into LoadNetwork method to enable dumping of internal graph of primitives and
159 * corresponding configuration information. Value is a name of output dot file without extension.
160 * Files <dot_file_name>_init.dot and <dot_file_name>_perf.dot will be produced.
162 DECLARE_CONFIG_KEY(DUMP_EXEC_GRAPH_AS_DOT);
164 } // namespace PluginConfigParams
165 } // namespace InferenceEngine