2 * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
22 #include <json/json.h>
27 // This function parses a json object and returns as a vector of integers
29 // [0, [1, 2, 3, 4], 3, 40, 4, []] in JSON
35 // } in std::unordered_map. Note that the value type is still Json::Value.
36 std::unordered_map<uint32_t, Json::Value> argArrayToMap(const Json::Value &jsonval)
38 if (!jsonval.isArray() || (jsonval.size() % 2 != 0))
40 std::cerr << "JSON argument must be an even-sized array in JSON\n";
44 std::unordered_map<uint32_t, Json::Value> ret;
45 for (uint32_t i = 0; i < jsonval.size(); i += 2)
47 if (!jsonval[i].isUInt())
49 std::cerr << "Key values(values in even indices) must be unsigned integers\n";
52 uint32_t key = jsonval[i].asUInt();
53 Json::Value val = jsonval[i + 1];
54 ret[key] = jsonval[i + 1];
59 // param shape_str is a form of, e.g., "[1, [2, 3], 3, []]" or "h5"
60 void handleShapeJsonParam(onert_run::TensorShapeMap &shape_map, const std::string &shape_str)
64 if (!reader.parse(shape_str, root, false))
66 std::cerr << "Invalid JSON format for output_sizes \"" << shape_str << "\"\n";
70 auto arg_map = argArrayToMap(root);
71 for (auto &pair : arg_map)
73 uint32_t key = pair.first;
74 Json::Value &shape_json = pair.second;
75 if (!shape_json.isArray())
77 std::cerr << "All the values must be list: " << shape_str << "\n";
81 std::vector<int> shape;
82 for (auto &dim_json : shape_json)
84 if (!dim_json.isUInt())
86 std::cerr << "All the dims should be dim >= 0: " << shape_str << "\n";
90 shape.emplace_back(dim_json.asUInt64());
93 shape_map[key] = shape;
97 void checkModelfile(const std::string &model_filename)
99 if (model_filename.empty())
101 // TODO Print usage instead of the below message
102 std::cerr << "Please specify model file. Run with `--help` for usage."
109 if (access(model_filename.c_str(), F_OK) == -1)
111 std::cerr << "Model file not found: " << model_filename << "\n";
117 void checkPackage(const std::string &package_filename)
119 if (package_filename.empty())
121 // TODO Print usage instead of the below message
122 std::cerr << "Please specify nnpackage file. Run with `--help` for usage."
129 if (access(package_filename.c_str(), F_OK) == -1)
131 std::cerr << "nnpackage not found: " << package_filename << "\n";
142 Args::Args(const int argc, char **argv)
148 void Args::Initialize(void)
150 auto process_nnpackage = [&](const std::string &package_filename) {
151 _package_filename = package_filename;
153 std::cerr << "Package Filename " << _package_filename << std::endl;
154 checkPackage(package_filename);
157 auto process_modelfile = [&](const std::string &model_filename) {
158 _model_filename = model_filename;
160 std::cerr << "Model Filename " << _model_filename << std::endl;
161 checkModelfile(model_filename);
163 _use_single_model = true;
166 auto process_path = [&](const std::string &path) {
168 if (stat(path.c_str(), &sb) == 0)
170 if (sb.st_mode & S_IFDIR)
172 _package_filename = path;
174 std::cerr << "Package Filename " << path << std::endl;
178 _model_filename = path;
179 checkModelfile(path);
180 std::cerr << "Model Filename " << path << std::endl;
181 _use_single_model = true;
186 std::cerr << "Cannot find: " << path << "\n";
191 auto process_output_sizes = [&](const std::string &output_sizes_json_str) {
194 if (!reader.parse(output_sizes_json_str, root, false))
196 std::cerr << "Invalid JSON format for output_sizes \"" << output_sizes_json_str << "\"\n";
200 auto arg_map = argArrayToMap(root);
201 for (auto &pair : arg_map)
203 uint32_t key = pair.first;
204 Json::Value &val_json = pair.second;
205 if (!val_json.isUInt())
207 std::cerr << "All the values in `output_sizes` must be unsigned integers\n";
210 uint32_t val = val_json.asUInt();
211 _output_sizes[key] = val;
215 auto process_shape_prepare = [&](const std::string &shape_str) {
216 #if defined(ONERT_HAVE_HDF5) && ONERT_HAVE_HDF5 == 1
217 if (shape_str == "H5" || shape_str == "h5")
219 _when_to_use_h5_shape = WhenToUseH5Shape::PREPARE;
225 handleShapeJsonParam(_shape_prepare, shape_str);
227 catch (const std::exception &e)
229 std::cerr << "error with '--shape_prepare' option: " << shape_str << std::endl;
234 auto process_shape_run = [&](const std::string &shape_str) {
235 #if defined(ONERT_HAVE_HDF5) && ONERT_HAVE_HDF5 == 1
236 if (shape_str == "H5" || shape_str == "h5")
238 _when_to_use_h5_shape = WhenToUseH5Shape::RUN;
244 handleShapeJsonParam(_shape_run, shape_str);
246 catch (const std::exception &e)
248 std::cerr << "error with '--shape_run' option: " << shape_str << std::endl;
254 po::options_description general("General options", 100);
257 general.add_options()
258 ("help,h", "Print available options")
259 ("version", "Print version and exit immediately")
260 ("nnpackage", po::value<std::string>()->notifier(process_nnpackage), "NN Package file(directory) name")
261 ("modelfile", po::value<std::string>()->notifier(process_modelfile), "NN Model filename")
262 ("path", po::value<std::string>()->notifier(process_path), "NN Package or NN Modelfile path")
263 #if defined(ONERT_HAVE_HDF5) && ONERT_HAVE_HDF5 == 1
264 ("dump,d", po::value<std::string>()->default_value("")->notifier([&](const auto &v) { _dump_filename = v; }), "Output filename")
265 ("load,l", po::value<std::string>()->default_value("")->notifier([&](const auto &v) { _load_filename = v; }), "Input filename")
267 ("dump:raw", po::value<std::string>()->default_value("")->notifier([&](const auto &v) { _dump_raw_filename = v; }), "Raw Output filename")
268 ("load:raw", po::value<std::string>()->default_value("")->notifier([&](const auto &v) { _load_raw_filename = v; }), "Raw Input filename")
269 ("output_sizes", po::value<std::string>()->notifier(process_output_sizes),
270 "The output buffer size in JSON 1D array\n"
271 "If not given, the model's output sizes are used\n"
272 "e.g. '[0, 40, 2, 80]' to set 0th tensor to 40 and 2nd tensor to 80.\n")
273 ("num_runs,r", po::value<int>()->default_value(1)->notifier([&](const auto &v) { _num_runs = v; }), "The number of runs")
274 ("warmup_runs,w", po::value<int>()->default_value(0)->notifier([&](const auto &v) { _warmup_runs = v; }), "The number of warmup runs")
275 ("run_delay,t", po::value<int>()->default_value(-1)->notifier([&](const auto &v) { _run_delay = v; }), "Delay time(us) between runs (as default no delay")
276 ("gpumem_poll,g", po::value<bool>()->default_value(false)->notifier([&](const auto &v) { _gpumem_poll = v; }), "Check gpu memory polling separately")
277 ("mem_poll,m", po::value<bool>()->default_value(false)->notifier([&](const auto &v) { _mem_poll = v; }), "Check memory polling")
278 ("write_report,p", po::value<bool>()->default_value(false)->notifier([&](const auto &v) { _write_report = v; }),
280 "{exec}-{nnpkg|modelfile}-{backend}.csv will be generated.\n"
281 "e.g. onert_run-UNIT_Add_000-acl_cl.csv.\n"
282 "{nnpkg|modelfile} name may be changed to realpath if you use symbolic-link.")
283 ("shape_prepare", po::value<std::string>()->default_value("[]")->notifier(process_shape_prepare),
284 "Please refer to the description of 'shape_run'")
285 ("shape_run", po::value<std::string>()->default_value("[]")->notifier(process_shape_run),
286 "'--shape_prepare: set shape of tensors before compilation (before calling nnfw_prepare()).\n"
287 "'--shape_run: set shape of tensors before running (before calling nnfw_run()).\n"
289 "'[0, [1, 2], 2, []]': set 0th tensor to [1, 2] and 2nd tensor to [] (scalar).\n"
290 #if defined(ONERT_HAVE_HDF5) && ONERT_HAVE_HDF5 == 1
291 "'h5': read shape(s) from H5 input file. '--load' should also be provided.\n"
292 "if '--load' option is provided but '--shape_prepare' or '--shape_run' is not provided,\n"
293 "'--shape_run h5' will be used by default.\n"
295 "For detailed description, please consutl the description of nnfw_set_input_tensorinfo()\n"
297 ("verbose_level,v", po::value<int>()->default_value(0)->notifier([&](const auto &v) { _verbose_level = v; }),
299 "0: prints the only result. Messages btw run don't print\n"
300 "1: prints result and message btw run\n"
301 "2: prints all of messages to print\n")
305 _options.add(general);
306 _positional.add("path", -1);
309 void Args::Parse(const int argc, char **argv)
311 po::variables_map vm;
312 po::store(po::command_line_parser(argc, argv).options(_options).positional(_positional).run(),
315 if (vm.count("help"))
317 std::cout << "onert_run\n\n";
318 std::cout << "Usage: " << argv[0] << " path to nnpackage root directory [<options>]\n\n";
319 std::cout << _options;
325 if (vm.count("version"))
327 _print_version = true;
332 auto conflicting_options = [&](const std::string &o1, const std::string &o2) {
333 if ((vm.count(o1) && !vm[o1].defaulted()) && (vm.count(o2) && !vm[o2].defaulted()))
335 throw boost::program_options::error(std::string("Two options '") + o1 + "' and '" + o2 +
336 "' cannot be given at once.");
340 // calling, e.g., "onert_run .. -- shape_prepare .. --shape_run .." should theoretically
341 // work but allowing both options together on command line makes the usage and implemenation
342 // of onert_run too complicated. Therefore let's not allow those option together.
343 conflicting_options("shape_prepare", "shape_run");
345 // Cannot use both single model file and nnpackage at once
346 conflicting_options("modelfile", "nnpackage");
348 // Require modelfile, nnpackage, or path
349 if (!vm.count("modelfile") && !vm.count("nnpackage") && !vm.count("path"))
350 throw boost::program_options::error(
351 std::string("Require one of options modelfile, nnpackage, or path."));
358 catch (const std::bad_cast &e)
360 std::cerr << "Bad cast error - " << e.what() << '\n';
364 // This must be run after `notify` as `_warm_up_runs` must have been processed before.
365 if (vm.count("mem_poll"))
367 // Instead of EXECUTE to avoid overhead, memory polling runs on WARMUP
368 if (_mem_poll && _warmup_runs == 0)
375 bool Args::shapeParamProvided()
377 bool provided = false;
378 #if defined(ONERT_HAVE_HDF5) && ONERT_HAVE_HDF5 == 1
379 // "--shape_run h5" or "--shape_prepare h5" was provided
380 provided = (getWhenToUseH5Shape() != WhenToUseH5Shape::NOT_PROVIDED);
382 // specific shape was provided
383 // e.g., "--shape_run '[0, [10, 1]]'" or "--shape_prepare '[0, [10, 1]]'"
384 provided |= (!getShapeMapForPrepare().empty()) || (!getShapeMapForRun().empty());
389 } // end of namespace onert_run