From d7f9c9efc296460a1ae23470cd71ffe713f61bdb Mon Sep 17 00:00:00 2001 From: MyungJoo Ham Date: Fri, 29 Mar 2019 10:45:41 +0900 Subject: [PATCH] [Filter/TF-Lite] Enable NNAPI with configurations This rephrases #1013 with nnstreamer configuration support. Add TRUE or 1 or ON of "enable_nnapi" in .ini file or Use NNSTREAMER_tensorflowlite_enable_nnapi=1 as an environmental variable to enable NNAPI for tensorflow-lite. CC: @daeinki Signed-off-by: MyungJoo Ham --- ext/nnstreamer/tensor_filter/tensor_filter_tensorflow_lite_core.cc | 5 +++++ ext/nnstreamer/tensor_filter/tensor_filter_tensorflow_lite_core.h | 1 + nnstreamer.ini.in | 4 ++++ 3 files changed, 10 insertions(+) diff --git a/ext/nnstreamer/tensor_filter/tensor_filter_tensorflow_lite_core.cc b/ext/nnstreamer/tensor_filter/tensor_filter_tensorflow_lite_core.cc index cc32503..beaf2f6 100644 --- a/ext/nnstreamer/tensor_filter/tensor_filter_tensorflow_lite_core.cc +++ b/ext/nnstreamer/tensor_filter/tensor_filter_tensorflow_lite_core.cc @@ -25,6 +25,7 @@ #include #include +#include #include "tensor_filter_tensorflow_lite_core.h" /** @@ -43,6 +44,7 @@ TFLiteCore::TFLiteCore (const char * _model_path) { model_path = _model_path; + use_nnapi = nnsconf_get_custom_value_bool ("tensorflowlite", "enable_nnapi", FALSE); gst_tensors_info_init (&inputTensorMeta); gst_tensors_info_init (&outputTensorMeta); @@ -127,6 +129,9 @@ TFLiteCore::loadModel () return -2; } + /* Set inference path of tensorflow-lite */ + interpreter->UseNNAPI (use_nnapi); + /** set allocation type to dynamic for in/out tensors */ int tensor_idx; diff --git a/ext/nnstreamer/tensor_filter/tensor_filter_tensorflow_lite_core.h b/ext/nnstreamer/tensor_filter/tensor_filter_tensorflow_lite_core.h index 6b8304d..78cea50 100644 --- a/ext/nnstreamer/tensor_filter/tensor_filter_tensorflow_lite_core.h +++ b/ext/nnstreamer/tensor_filter/tensor_filter_tensorflow_lite_core.h @@ -54,6 +54,7 @@ public: private: const char *model_path; + bool use_nnapi; GstTensorsInfo inputTensorMeta; /**< The tensor info of input tensors */ GstTensorsInfo outputTensorMeta; /**< The tensor info of output tensors */ diff --git a/nnstreamer.ini.in b/nnstreamer.ini.in index 71caaa5..308f2ef 100644 --- a/nnstreamer.ini.in +++ b/nnstreamer.ini.in @@ -9,3 +9,7 @@ decoders=@SUBPLUGIN_INSTALL_PREFIX@/decoders/ # It may break in some special cases (running tensorflow & nnstreamer in a chroot of a AWS VM); in such a case, keep it 0 or FALSE. [tensorflow] mem_optmz=@TF_MEM_OPTMZ@ + +# Set 1 or TRUE if you want to use NNAPI with tensorflow-lite, which enables to use NNAPI backend, which may use GPU or NPU/TPU. +[tensorflowlite] +enable_nnapi=FALSE -- 2.7.4