2 * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #include "nnfw_api_internal.h"
18 #include "CustomKernelRegistry.h"
19 #include "compiler/CompilerFactory.h"
20 #include "util/ConfigSource.h"
21 #include "util/Exceptions.h"
22 #include "util/logging.h"
23 #include "exec/Execution.h"
24 #include "circle_loader.h"
25 #include "tflite_loader.h"
26 #include "trix_loader.h"
27 #include "json/json.h"
29 #include "ir/OpCode.h"
30 #include "util/TracingCtx.h"
37 #include <misc/string_helpers.h>
40 * API does not accept string argument longer than max length below
42 #define MAX_BACKEND_NAME_LENGTH 32
43 #define MAX_OP_NAME_LENGTH 64
44 #define MAX_PATH_LENGTH 1024
45 #define MAX_TENSOR_NAME_LENGTH 64
50 // Is null-terminating in length ?
51 bool null_terminating(const char *str, uint32_t length)
53 for (uint32_t i = 0; i < length; i++)
55 if (*(str + i) == '\0')
63 onert::ir::Layout convertLayout(NNFW_LAYOUT layout)
65 if (layout == NNFW_LAYOUT_CHANNELS_LAST)
67 return onert::ir::Layout::NHWC;
69 else if (layout == NNFW_LAYOUT_CHANNELS_FIRST)
71 return onert::ir::Layout::NCHW;
73 return onert::ir::Layout::UNKNOWN;
76 NNFW_STATUS getTensorIndexImpl(const onert::ir::Graph &graph, const char *tensorname,
77 uint32_t *index, bool is_input)
79 if (!tensorname || !index)
80 return NNFW_STATUS_UNEXPECTED_NULL;
82 if (!null_terminating(tensorname, MAX_TENSOR_NAME_LENGTH))
84 std::cerr << "nnpackage path is too long" << std::endl;
85 return NNFW_STATUS_ERROR;
88 auto ind_found = is_input ? graph.getInputIndex(tensorname) : graph.getOutputIndex(tensorname);
90 if (ind_found.undefined())
93 return NNFW_STATUS_ERROR;
97 *index = ind_found.value();
98 return NNFW_STATUS_NO_ERROR;
102 std::string trim(const std::string &value)
104 std::string whitespace = " \t";
105 auto begin = value.find_first_not_of(whitespace);
106 if (begin == std::string::npos)
107 return ""; // no content
109 auto end = value.find_last_not_of(whitespace);
110 auto range = end - begin + 1;
111 return value.substr(begin, range);
114 bool loadConfigure(const std::string cfgfile, onert::util::CfgKeyValues &keyValues)
116 std::ifstream ifs(cfgfile);
120 while (std::getline(ifs, line))
122 auto cmtpos = line.find('#');
123 if (cmtpos != std::string::npos)
125 line = line.substr(0, cmtpos);
127 std::istringstream isline(line);
129 if (std::getline(isline, key, '='))
132 if (std::getline(isline, value))
135 keyValues[key] = trim(value);
145 NNFW_TYPE datatype_to_nnfw_dtype(onert::ir::DataType dt)
147 using onert::ir::DataType;
150 case DataType::FLOAT32:
151 return NNFW_TYPE_TENSOR_FLOAT32;
152 case DataType::INT32:
153 return NNFW_TYPE_TENSOR_INT32;
154 case DataType::QUANT_UINT8_ASYMM:
155 return NNFW_TYPE_TENSOR_QUANT8_ASYMM;
156 case DataType::BOOL8:
157 return NNFW_TYPE_TENSOR_BOOL;
158 case DataType::UINT8:
159 return NNFW_TYPE_TENSOR_UINT8;
160 case DataType::INT64:
161 return NNFW_TYPE_TENSOR_INT64;
162 case DataType::QUANT_INT8_ASYMM:
163 return NNFW_TYPE_TENSOR_QUANT8_ASYMM_SIGNED;
164 case DataType::QUANT_INT16_SYMM:
165 return NNFW_TYPE_TENSOR_QUANT16_SYMM_SIGNED;
166 case DataType::UINT32:
167 case DataType::QUANT_INT8_SYMM:
169 throw std::runtime_error("Error: Model has type that runtime API does not support.");
173 void fillTensorInfo(nnfw_tensorinfo *ti, const onert::ir::Shape &shape,
174 const onert::ir::DataType &dtype)
176 ti->rank = shape.rank();
177 for (int j = 0; j < ti->rank; ++j)
179 ti->dims[j] = shape.dim(j);
181 ti->dtype = datatype_to_nnfw_dtype(dtype);
184 std::unique_ptr<onert::ir::Model> loadModel(const std::string filename,
185 const std::string model_type)
187 if (model_type == "tflite")
188 return onert::tflite_loader::loadModel(filename.c_str());
189 if (model_type == "circle")
190 return onert::circle_loader::loadModel(filename.c_str());
191 if (model_type == "tvn")
192 return onert::trix_loader::loadModel(filename.c_str());
194 std::cerr << "Unsupported model type" << std::endl;
195 return std::unique_ptr<onert::ir::Model>(nullptr);
200 nnfw_session::nnfw_session()
201 : _nnpkg{nullptr}, _coptions{}, _compiler_artifact{nullptr}, _execution{nullptr},
202 _kernel_registry{nullptr}
207 NNFW_STATUS nnfw_session::create(nnfw_session **session)
209 if (session == nullptr)
210 return NNFW_STATUS_UNEXPECTED_NULL;
213 auto new_session = std::unique_ptr<nnfw_session>(new nnfw_session());
214 new_session->_kernel_registry = std::make_shared<onert::api::CustomKernelRegistry>();
215 *session = new_session.release();
217 catch (const std::bad_alloc &e)
219 std::cerr << "Error during session creation" << std::endl;
220 *session = nullptr; // Set nullptr on error to keep the old behavior
221 return NNFW_STATUS_OUT_OF_MEMORY;
223 catch (const std::exception &e)
225 std::cerr << "Error during session initialization : " << e.what() << std::endl;
226 *session = nullptr; // Set nullptr on error to keep the old behavior
227 return NNFW_STATUS_ERROR;
229 return NNFW_STATUS_NO_ERROR;
232 nnfw_session::~nnfw_session() = default;
234 NNFW_STATUS nnfw_session::load_circle_from_buffer(uint8_t *buffer, size_t size)
236 if (!isStateInitialized())
237 return NNFW_STATUS_INVALID_STATE;
240 return NNFW_STATUS_UNEXPECTED_NULL;
243 return NNFW_STATUS_ERROR;
247 auto model = onert::circle_loader::loadModel(buffer, size);
248 _nnpkg = std::make_shared<onert::ir::NNPkg>(std::move(model));
249 _coptions.push_back(onert::compiler::CompilerOptions::fromGlobalConfig());
250 _state = State::MODEL_LOADED;
252 catch (const std::exception &e)
254 std::cerr << "Error during model loading : " << e.what() << std::endl;
255 return NNFW_STATUS_ERROR;
257 return NNFW_STATUS_NO_ERROR;
260 NNFW_STATUS nnfw_session::load_model_from_modelfile(const char *model_file_path)
262 if (!isStateInitialized())
263 return NNFW_STATUS_INVALID_STATE;
265 if (!model_file_path)
267 std::cerr << "Model file path is null." << std::endl;
268 return NNFW_STATUS_UNEXPECTED_NULL;
271 std::string filename{model_file_path};
272 // TODO: Use std::filesystem::path when we can use c++17.
273 auto dotidx = filename.find_last_of('.');
274 if (dotidx == std::string::npos)
276 std::cerr << "Invalid model file path. Please use file with extension." << std::endl;
277 return NNFW_STATUS_ERROR;
279 std::string model_type = filename.substr(dotidx + 1); // + 1 to exclude dot
282 auto model = loadModel(filename, model_type);
283 if (model == nullptr)
284 return NNFW_STATUS_ERROR;
285 _nnpkg = std::make_shared<onert::ir::NNPkg>(std::move(model));
286 _coptions.push_back(onert::compiler::CompilerOptions::fromGlobalConfig());
287 _state = State::MODEL_LOADED;
289 catch (const std::exception &e)
291 std::cerr << "Error during model loading : " << e.what() << std::endl;
292 return NNFW_STATUS_ERROR;
294 return NNFW_STATUS_NO_ERROR;
297 NNFW_STATUS nnfw_session::load_model_from_nnpackage(const char *package_dir)
299 if (!isStateInitialized())
300 return NNFW_STATUS_INVALID_STATE;
304 std::cerr << "package_dir is null." << std::endl;
305 return NNFW_STATUS_UNEXPECTED_NULL;
308 if (!null_terminating(package_dir, MAX_PATH_LENGTH))
310 std::cerr << "nnpackage path is too long" << std::endl;
311 return NNFW_STATUS_ERROR;
314 // TODO : add support for zipped package file load
316 if (!(dir = opendir(package_dir)))
318 std::cerr << "invalid nnpackge directory: " << package_dir << std::endl;
319 return NNFW_STATUS_ERROR;
325 std::string package_path(package_dir);
326 std::string manifest_file_name = package_path + "/metadata/MANIFEST";
327 std::ifstream mfs(manifest_file_name);
329 // extract the filename of the first(index 0) model
330 // e.g. In MANIFEST file, { "models" : [ "firstmodel.tflite", "2nd.tflite" ] }
333 const Json::Value &models = root["models"];
334 const Json::Value &model_types = root["model-types"];
335 const Json::Value &configs = root["configs"];
337 if (!configs.empty() && !configs[0].empty())
339 auto filepath = package_path + std::string("/metadata/") + configs[0].asString();
341 onert::util::CfgKeyValues keyValues;
342 if (loadConfigure(filepath, keyValues))
344 onert::util::setConfigKeyValues(keyValues);
347 _nnpkg = std::make_shared<onert::ir::NNPkg>();
348 auto num_models = models.size();
349 if (num_models == 0 || (num_models - 1) > onert::ir::ModelIndex::max())
351 std::cerr << "Invalid model size - " << std::to_string(num_models) << std::endl;
352 return NNFW_STATUS_ERROR;
355 for (uint16_t i = 0; i < num_models; ++i)
357 auto model_file_path = package_path + std::string("/") + models[i].asString();
358 auto model_type = model_types[i].asString();
359 auto model = loadModel(model_file_path, model_type);
360 if (model == nullptr)
361 return NNFW_STATUS_ERROR;
362 model->primary_subgraph()->bindKernelBuilder(_kernel_registry->getBuilder());
363 _nnpkg->push(onert::ir::ModelIndex{i}, std::move(model));
364 _coptions.push_back(onert::compiler::CompilerOptions::fromGlobalConfig());
367 auto toIODesc = [](std::string str) {
368 auto indices = nnfw::misc::split(str, ':');
369 if (indices.size() != 3)
371 std::cerr << "IODesc should be 3-tuple." << std::endl;
372 return onert::ir::IODesc{};
374 auto model_idx = static_cast<uint32_t>(std::stoi(indices.at(0)));
375 auto subgraph_idx = static_cast<uint32_t>(std::stoi(indices.at(1)));
376 auto operand_idx = static_cast<uint32_t>(std::stoi(indices.at(2)));
377 return onert::ir::IODesc{model_idx, subgraph_idx, operand_idx};
379 // read pkg-inputs and pkg-outputs
380 const Json::Value &pkg_inputs = root["pkg-inputs"];
381 for (uint32_t i = 0; i < pkg_inputs.size(); ++i)
382 _nnpkg->addInput(toIODesc(pkg_inputs[i].asString()));
383 const Json::Value &pkg_outputs = root["pkg-outputs"];
384 for (uint32_t i = 0; i < pkg_outputs.size(); ++i)
385 _nnpkg->addOutput(toIODesc(pkg_outputs[i].asString()));
386 // read model-connect
387 const Json::Value &fromtos = root["model-connect"];
388 for (uint32_t i = 0; i < fromtos.size(); ++i)
390 const Json::Value &tos = fromtos[i]["to"];
391 for (uint32_t j = 0; j < tos.size(); ++j)
392 _nnpkg->addEdge(toIODesc(fromtos[i]["from"].asString()), toIODesc(tos[j].asString()));
396 _state = State::MODEL_LOADED;
398 catch (const std::exception &e)
400 std::cerr << "Error during model loading : " << e.what() << std::endl;
401 return NNFW_STATUS_ERROR;
403 return NNFW_STATUS_NO_ERROR;
406 NNFW_STATUS nnfw_session::prepare()
408 // NOTE. If users want to run prepare() more than one time, this could be removed.
409 if (!isStateModelLoaded())
411 std::cerr << "Error during model prepare : ";
412 if (isStateInitialized())
414 std::cerr << "prepare should be run once";
418 std::cerr << "invalid state";
420 std::cerr << std::endl;
421 return NNFW_STATUS_INVALID_STATE;
426 auto compiler = onert::compiler::CompilerFactory::get().create(_nnpkg, _coptions);
428 _compiler_artifact = compiler->compile();
429 _execution = std::make_unique<onert::exec::Execution>(_compiler_artifact->_executors);
431 catch (const std::exception &e)
433 std::cerr << "Error during model prepare : " << e.what() << std::endl;
434 return NNFW_STATUS_ERROR;
437 _state = State::PREPARED;
438 return NNFW_STATUS_NO_ERROR;
441 NNFW_STATUS nnfw_session::prepare_pipeline(const char *)
443 std::cerr << "Pipeline prepare_pipeline: deprecated feature " << std::endl;
444 return NNFW_STATUS_ERROR;
447 NNFW_STATUS nnfw_session::run()
449 if (!isStatePreparedOrFinishedRun())
451 std::cerr << "Error during nnfw_session::run : "
452 << "run should be run after prepare" << std::endl;
453 return NNFW_STATUS_INVALID_STATE;
458 _execution->execute();
460 catch (const onert::InsufficientBufferSizeException &e)
462 // Currently insufficient buffer always means output buffer.
463 std::cerr << "Error during nnfw_session::run : " << e.what() << std::endl;
464 return NNFW_STATUS_INSUFFICIENT_OUTPUT_SIZE;
466 catch (const std::exception &e)
468 std::cerr << "Error during nnfw_session::run : " << e.what() << std::endl;
469 return NNFW_STATUS_ERROR;
472 _state = State::FINISHED_RUN;
473 return NNFW_STATUS_NO_ERROR;
476 NNFW_STATUS nnfw_session::run_async()
478 if (!isStatePreparedOrFinishedRun())
480 std::cerr << "Error during nnfw_session::run_async : "
481 << "run_async should be run after prepare" << std::endl;
482 return NNFW_STATUS_INVALID_STATE;
485 _execution->startExecute();
487 _state = State::RUNNING;
488 return NNFW_STATUS_NO_ERROR;
491 NNFW_STATUS nnfw_session::await()
493 if (!isStateRunning())
495 std::cerr << "Error during nnfw_session::run_await : "
496 << "run_await should be run after run_async" << std::endl;
497 return NNFW_STATUS_ERROR;
500 _execution->waitFinish();
502 _state = State::FINISHED_RUN;
503 return NNFW_STATUS_NO_ERROR;
506 NNFW_STATUS nnfw_session::set_input(uint32_t index, NNFW_TYPE /*type*/, const void *buffer,
509 if (!isStatePreparedOrFinishedRun())
511 std::cerr << "Error during nnfw_session::set_input : invalid state" << std::endl;
512 return NNFW_STATUS_INVALID_STATE;
515 if (!buffer && length != 0)
518 << "Error during nnfw_session::set_input : given buffer is NULL but the length is not 0"
520 return NNFW_STATUS_ERROR;
525 _execution->setInput(onert::ir::IOIndex(index), buffer, length);
527 catch (const std::exception &e)
529 std::cerr << "Error during nnfw_session::set_input : " << e.what() << std::endl;
530 return NNFW_STATUS_ERROR;
532 return NNFW_STATUS_NO_ERROR;
535 NNFW_STATUS nnfw_session::set_output(uint32_t index, NNFW_TYPE /*type*/, void *buffer,
538 if (!isStatePreparedOrFinishedRun())
540 std::cerr << "Error during nnfw_session::set_output : invalid state" << std::endl;
541 return NNFW_STATUS_INVALID_STATE;
544 if (!buffer && length != 0)
547 << "Error during nnfw_session::set_output : given buffer is NULL but the length is not 0"
549 return NNFW_STATUS_ERROR;
554 _execution->setOutput(onert::ir::IOIndex(index), buffer, length);
556 catch (const std::exception &e)
558 std::cerr << "Error during nnfw_session::set_output : " << e.what() << std::endl;
559 return NNFW_STATUS_ERROR;
561 return NNFW_STATUS_NO_ERROR;
564 NNFW_STATUS nnfw_session::input_size(uint32_t *number)
566 if (isStateInitialized()) // Model is not loaded
567 return NNFW_STATUS_INVALID_STATE;
571 if (number == nullptr)
573 std::cerr << "Error during nnfw_session::input_size, number is null pointer." << std::endl;
574 return NNFW_STATUS_UNEXPECTED_NULL;
576 *number = getInputSize();
578 catch (const std::exception &e)
580 std::cerr << "Error during nnfw_session::input_size : " << e.what() << std::endl;
581 return NNFW_STATUS_ERROR;
583 return NNFW_STATUS_NO_ERROR;
586 NNFW_STATUS nnfw_session::output_size(uint32_t *number)
588 if (isStateInitialized()) // Model is not loaded
589 return NNFW_STATUS_INVALID_STATE;
593 if (number == nullptr)
595 std::cerr << "Error during nnfw_session::output_size, number is null pointer." << std::endl;
596 return NNFW_STATUS_UNEXPECTED_NULL;
598 *number = getOutputSize();
600 catch (const std::exception &e)
602 std::cerr << "Error during nnfw_session::output_size" << e.what() << std::endl;
603 return NNFW_STATUS_ERROR;
605 return NNFW_STATUS_NO_ERROR;
608 NNFW_STATUS nnfw_session::set_input_layout(uint32_t index, NNFW_LAYOUT layout)
610 if (!isStatePreparedOrFinishedRun())
612 std::cerr << "Error during nnfw_session::set_input_layout : "
613 << "run should be run after prepare" << std::endl;
614 return NNFW_STATUS_INVALID_STATE;
619 if (layout != NNFW_LAYOUT_NONE && layout != NNFW_LAYOUT_CHANNELS_FIRST &&
620 layout != NNFW_LAYOUT_CHANNELS_LAST)
622 std::cerr << "Error during nnfw_session::set_input_layout, not supported layout" << std::endl;
623 return NNFW_STATUS_ERROR;
626 _execution->setInputLayout(onert::ir::IOIndex(index), convertLayout(layout));
628 catch (const std::exception &e)
630 std::cerr << "Error during nnfw_session::set_input_layout : " << e.what() << std::endl;
631 return NNFW_STATUS_ERROR;
633 return NNFW_STATUS_NO_ERROR;
636 NNFW_STATUS nnfw_session::set_output_layout(uint32_t index, NNFW_LAYOUT layout)
638 if (!isStatePreparedOrFinishedRun())
640 std::cerr << "Error during nnfw_session::set_output_layout : "
641 << "run should be run after prepare" << std::endl;
642 return NNFW_STATUS_INVALID_STATE;
647 if (layout != NNFW_LAYOUT_NONE && layout != NNFW_LAYOUT_CHANNELS_FIRST &&
648 layout != NNFW_LAYOUT_CHANNELS_LAST)
650 std::cerr << "Error during nnfw_session::set_output_layout, not supported layout"
652 return NNFW_STATUS_ERROR;
655 _execution->setOutputLayout(onert::ir::IOIndex(index), convertLayout(layout));
657 catch (const std::exception &e)
659 std::cerr << "Error during nnfw_session::set_output_layout : " << e.what() << std::endl;
660 return NNFW_STATUS_ERROR;
662 return NNFW_STATUS_NO_ERROR;
665 NNFW_STATUS nnfw_session::apply_tensorinfo(uint32_t index, nnfw_tensorinfo ti)
669 if (isStateInitialized())
671 std::cerr << "Error during set_input_tensorinfo : should be run after load_model"
673 return NNFW_STATUS_INVALID_STATE;
676 if (ti.rank <= 0 || ti.rank > NNFW_MAX_RANK)
678 std::cerr << "unsupported rank: " << ti.rank << std::endl;
679 return NNFW_STATUS_ERROR;
682 for (int32_t i = 0; i < ti.rank; ++i)
686 std::cerr << "dim must be positive integer but was " << ti.dims[i] << std::endl;
687 return NNFW_STATUS_ERROR;
692 onert::ir::Shape new_shape(ti.rank);
693 for (int32_t i = 0; i < ti.rank; i++)
694 new_shape.dim(i) = ti.dims[i];
696 if (!isStatePreparedOrFinishedRun())
699 // In this case, if we apply input shape, it will propagate after compilation and excution
700 auto &info = _nnpkg->inputInfo(index);
701 info.shape(new_shape);
703 else // when called after nnfw_session::prepare()
704 _execution->changeInputShape(onert::ir::IOIndex(index), new_shape);
706 return NNFW_STATUS_NO_ERROR;
709 NNFW_STATUS nnfw_session::set_input_tensorinfo(uint32_t index, const nnfw_tensorinfo *ti)
711 nnfw_tensorinfo ti_copy = *ti;
712 return apply_tensorinfo(index, ti_copy);
715 NNFW_STATUS nnfw_session::input_tensorinfo(uint32_t index, nnfw_tensorinfo *ti)
717 if (isStateInitialized())
718 return NNFW_STATUS_INVALID_STATE;
724 std::cerr << "Error during nnfw_session::input_tensorinfo, tensorinfo is null pointer."
726 return NNFW_STATUS_UNEXPECTED_NULL;
729 if (index >= getInputSize())
731 std::cerr << "Error during nnfw_session::input_tensorinfo, index is out of range."
733 return NNFW_STATUS_ERROR;
736 if (isStateModelLoaded())
738 auto info = _nnpkg->inputInfo(index);
739 fillTensorInfo(ti, info.shape(), info.typeInfo().type());
743 auto io_index = onert::ir::IOIndex{index};
744 auto shape = _execution->getInputShape(io_index);
745 auto dtype = _compiler_artifact->_executors->inputInfo(io_index).typeInfo().type();
746 fillTensorInfo(ti, shape, dtype);
749 catch (const std::exception &e)
751 std::cerr << "Error during nnfw_session::input_tensorinfo : " << e.what() << std::endl;
752 return NNFW_STATUS_ERROR;
754 return NNFW_STATUS_NO_ERROR;
757 NNFW_STATUS nnfw_session::output_tensorinfo(uint32_t index, nnfw_tensorinfo *ti)
759 if (isStateInitialized())
760 return NNFW_STATUS_INVALID_STATE;
764 std::cerr << "Error during nnfw_session::output_tensorinfo, tensorinfo is null pointer."
766 return NNFW_STATUS_UNEXPECTED_NULL;
771 if (index >= getOutputSize())
773 std::cerr << "Error during nnfw_session::output_tensorinfo, index is out of range."
775 return NNFW_STATUS_ERROR;
778 if (isStateModelLoaded())
780 auto info = _nnpkg->outputInfo(index);
781 fillTensorInfo(ti, info.shape(), info.typeInfo().type());
785 auto io_index = onert::ir::IOIndex{index};
786 auto shape = _execution->getOutputShape(io_index);
787 auto dtype = _compiler_artifact->_executors->outputInfo(io_index).typeInfo().type();
788 fillTensorInfo(ti, shape, dtype);
791 catch (const std::exception &e)
793 std::cerr << "Error during nnfw_session::output_tensorinfo : " << e.what() << std::endl;
794 return NNFW_STATUS_ERROR;
797 return NNFW_STATUS_NO_ERROR;
800 NNFW_STATUS nnfw_session::push_pipeline_input(std::vector<void *> *, std::vector<uint32_t> *)
802 std::cerr << "Pipeline push_pipeline_input: deprecated feature " << std::endl;
803 return NNFW_STATUS_ERROR;
806 NNFW_STATUS nnfw_session::pop_pipeline_output(std::vector<void *> *)
808 std::cerr << "Pipeline pop_pipeline_output: deprecated feature " << std::endl;
809 return NNFW_STATUS_ERROR;
812 NNFW_STATUS nnfw_session::register_custom_operation(const std::string &id,
813 nnfw_custom_eval eval_func)
815 _kernel_registry->registerKernel(id, eval_func);
816 return NNFW_STATUS_NO_ERROR;
819 static std::string get_op_backend_string(std::string op)
821 #define MAP_MACRO(CircleName, OneRTName) {#CircleName, #OneRTName},
823 static std::unordered_map<std::string, std::string> operation_map = {
829 auto n = operation_map.find(op);
831 if (n == operation_map.end())
833 // this return value is handled by a caller to return error code
834 return std::string("");
842 NNFW_STATUS nnfw_session::set_available_backends(const char *backends)
844 if (!isStateModelLoaded())
845 return NNFW_STATUS_INVALID_STATE;
850 return NNFW_STATUS_UNEXPECTED_NULL;
851 if (null_terminating(backends, MAX_BACKEND_NAME_LENGTH) == false)
852 return NNFW_STATUS_ERROR;
854 auto &options = *_coptions[0];
856 using namespace onert::util;
858 options.backend_list = nnfw::misc::split(std::string{backends}, ';');
860 catch (const std::exception &e)
862 std::cerr << "Error during nnfw_session::set_available_backends : " << e.what() << std::endl;
863 return NNFW_STATUS_ERROR;
865 return NNFW_STATUS_NO_ERROR;
868 NNFW_STATUS nnfw_session::set_op_backend(const char *op, const char *backend)
870 if (!isStateModelLoaded())
871 return NNFW_STATUS_INVALID_STATE;
876 return NNFW_STATUS_UNEXPECTED_NULL;
877 if (!null_terminating(op, MAX_OP_NAME_LENGTH) ||
878 !null_terminating(backend, MAX_BACKEND_NAME_LENGTH))
879 return NNFW_STATUS_ERROR;
881 auto key = get_op_backend_string(op);
885 return NNFW_STATUS_ERROR;
888 auto &opcode_to_backend = _coptions[0]->manual_scheduler_options.opcode_to_backend;
889 opcode_to_backend.emplace(onert::ir::toOpCode(key), backend);
891 catch (const std::exception &e)
893 std::cerr << "Error during nnfw_session::set_op_backend : " << e.what() << std::endl;
894 return NNFW_STATUS_ERROR;
896 return NNFW_STATUS_NO_ERROR;
899 NNFW_STATUS nnfw_session::set_config(const char *key, const char *value)
901 if (!isStateModelLoaded())
902 return NNFW_STATUS_INVALID_STATE;
905 return NNFW_STATUS_UNEXPECTED_NULL;
907 auto &options = *_coptions[0];
909 using namespace onert::util;
911 const std::string skey = key;
913 if (skey == config::TRACE_FILEPATH)
915 options.trace_filepath = value;
917 else if (skey == config::GRAPH_DOT_DUMP)
919 options.graph_dump_level = toInt(value);
921 else if (skey == config::EXECUTOR)
923 options.executor = value;
925 else if (skey == config::OP_BACKEND_ALLOPS)
927 options.manual_scheduler_options.backend_for_all = value;
929 else if (skey == config::USE_SCHEDULER)
931 options.he_scheduler = toBool(value);
933 else if (skey == config::PROFILING_MODE)
935 options.he_profiling_mode = toBool(value);
939 return NNFW_STATUS_ERROR;
941 return NNFW_STATUS_NO_ERROR;
944 const onert::ir::Graph *nnfw_session::primary_subgraph()
946 if (_nnpkg != nullptr)
948 assert(_execution == nullptr);
949 return _nnpkg->primary_model()->primary_subgraph().get();
953 assert(_execution != nullptr);
954 // We assumed the graph will not change after compilation, but shape could change
955 return &_execution->primary_subgraph();
959 uint32_t nnfw_session::getInputSize()
961 if (isStateInitialized())
962 throw std::runtime_error{"Model is not loaded yet"};
964 if (isStateModelLoaded())
965 return _nnpkg->inputSize();
967 // Session is prepared (general inference)
968 return _compiler_artifact->_executors->inputSize();
971 uint32_t nnfw_session::getOutputSize()
973 if (isStateInitialized())
974 throw std::runtime_error{"Model is not loaded yet"};
976 if (isStateModelLoaded())
977 return _nnpkg->outputSize();
979 // Session is prepared (general inference)
980 return _compiler_artifact->_executors->outputSize();
983 NNFW_STATUS nnfw_session::get_config(const char *key, char *value, size_t value_size)
985 if (!isStateModelLoaded())
986 return NNFW_STATUS_INVALID_STATE;
989 return NNFW_STATUS_UNEXPECTED_NULL;
991 auto &options = *_coptions[0];
993 auto check_boundary = [](size_t dest_size, std::string &src) {
994 if (dest_size < src.length() + 1 /* for '\0' */)
996 std::cerr << "buffer is small to copy config value." << std::endl;
1002 const std::string skey = key;
1004 if (skey == onert::util::config::BACKENDS)
1006 if (options.backend_list.size() == 0)
1007 return NNFW_STATUS_NO_ERROR; // no setting backend is not an error of get_config_str()
1009 auto str = nnfw::misc::join(options.backend_list.begin(), options.backend_list.end(), ";");
1011 if (!check_boundary(value_size, str))
1012 return NNFW_STATUS_ERROR;
1014 strncpy(value, str.c_str(), value_size);
1016 else if (skey == onert::util::config::EXECUTOR)
1018 if (!check_boundary(value_size, options.executor))
1019 return NNFW_STATUS_ERROR;
1021 strncpy(value, options.executor.c_str(), options.executor.length());
1025 return NNFW_STATUS_ERROR;
1028 return NNFW_STATUS_NO_ERROR;
1031 bool nnfw_session::isStateInitialized()
1033 if (_state == State::INITIALIZED)
1035 assert(_nnpkg == nullptr);
1036 assert(_coptions.empty());
1037 assert(_execution == nullptr);
1046 bool nnfw_session::isStateModelLoaded()
1048 if (_state == State::MODEL_LOADED)
1050 assert(_nnpkg != nullptr);
1051 assert(!_coptions.empty());
1052 assert(_execution == nullptr);
1061 bool nnfw_session::isStatePrepared()
1063 if (_state == State::PREPARED)
1065 assert(_nnpkg == nullptr);
1066 assert(!_coptions.empty());
1067 assert(_execution != nullptr);
1076 bool nnfw_session::isStateRunning()
1078 if (_state == State::RUNNING)
1080 assert(_nnpkg == nullptr);
1081 assert(!_coptions.empty());
1082 assert(_execution != nullptr);
1088 bool nnfw_session::isStateFinishedRun()
1090 if (_state == State::FINISHED_RUN)
1092 assert(_nnpkg == nullptr);
1093 assert(!_coptions.empty());
1094 assert(_execution != nullptr);
1103 bool nnfw_session::isStatePreparedOrFinishedRun()
1105 return isStatePrepared() || isStateFinishedRun();
1108 NNFW_STATUS nnfw_session::input_tensorindex(const char *tensorname, uint32_t *index)
1110 return getTensorIndexImpl(*primary_subgraph(), tensorname, index, true);
1113 NNFW_STATUS nnfw_session::output_tensorindex(const char *tensorname, uint32_t *index)
1115 return getTensorIndexImpl(*primary_subgraph(), tensorname, index, false);
1118 NNFW_STATUS nnfw_session::set_backends_per_operation(const char *backend_settings)
1120 if (backend_settings == NULL)
1121 return NNFW_STATUS_ERROR;
1123 if (!isStateModelLoaded())
1124 return NNFW_STATUS_INVALID_STATE;
1127 auto &ms_options = _coptions[0]->manual_scheduler_options;
1128 ms_options.setBackendMap(std::string{backend_settings});
1130 return NNFW_STATUS_NO_ERROR;