Add tflite 2.3 gpu delegate support 94/250894/1
authorInki Dae <inki.dae@samsung.com>
Tue, 3 Nov 2020 09:03:46 +0000 (18:03 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 5 Jan 2021 08:47:42 +0000 (17:47 +0900)
Change-Id: I289316756e3d6a4c266810c57db68a0596b1733b
Signed-off-by: Inki Dae <inki.dae@samsung.com>
CMakeLists.txt
packaging/inference-engine-tflite.spec
src/inference_engine_tflite.cpp
src/inference_engine_tflite_private.h

index 49e6c16205badfe71683d158e2a25a1fee2863e4..9b705486edde67085b24c6fe4409c15945bb7bdd 100644 (file)
@@ -18,7 +18,7 @@ FOREACH(flag ${${fw_name}_CFLAGS})
 ENDFOREACH(flag)
 
 FOREACH(flag ${${fw_name}_LDFLAGS})
-    SET(EXTRA_LDFLAGS "${EXTRA_LDFLAGS} ${flag}")
+    SET(EXTRA_LDFLAGS "${EXTRA_LDFLAGS} ${flag} -lEGL -lGLESv2 -ltensorflowlite -ltensorflowlite_gpu_delegate")
 ENDFOREACH(flag)
 #Remove leading whitespace POLICY CMP0004
 STRING(REGEX REPLACE "^ " "" EXTRA_LDFLAGS ${EXTRA_LDFLAGS})
@@ -26,7 +26,7 @@ STRING(REGEX REPLACE "^ " "" EXTRA_LDFLAGS ${EXTRA_LDFLAGS})
 SET(CMAKE_C_FLAGS "-I./include -I./include/headers ${CMAKE_C_FLAGS} ${EXTRA_CFLAGS} -fPIC -Wall -w")
 SET(CMAKE_C_FLAGS_DEBUG "-O0 -g")
 
-SET(CMAKE_CXX_FLAGS "-I./include -I./include/headers ${CMAKE_CXX_FLAGS} ${EXTRA_CXXFLAGS} -fPIC")
+SET(CMAKE_CXX_FLAGS "-I./include -I./include/headers -I/usr/include/tensorflow2/tensorflow ${CMAKE_CXX_FLAGS} ${EXTRA_CXXFLAGS} -fPIC")
 SET(CMAKE_CXX_FLAGS_DEBUG "-O0 -g --w")
 
 ADD_DEFINITIONS("-DPREFIX=\"${CMAKE_INSTALL_PREFIX}\"")
index ab41e13f60446f3abae921970e5bb382f7906e3f..086ced929019303044fc933a5f83bf9834fb6bcd 100644 (file)
@@ -11,7 +11,9 @@ BuildRequires: cmake
 BuildRequires: python
 BuildRequires: pkgconfig(dlog)
 BuildRequires: pkgconfig(inference-engine-interface-common)
+BuildRequires: coregl-devel
 BuildRequires: tensorflow-lite-devel
+BuildRequires: tensorflow2-lite-devel
 
 %description
 Tensorflow-Lite based implementation of inference-engine-interface
index 78e4f64cc60114a500d0c616e2836809eb8d6c4d..4c265fd9912e5f573dd146c8c8610e6f23f14643 100644 (file)
@@ -52,6 +52,21 @@ namespace TFLiteImpl
        {
                LOGI("ENTER");
 
+               switch (types) {
+               case INFERENCE_TARGET_CPU:
+                       LOGI("Device type is CPU.");
+                       break;
+               case INFERENCE_TARGET_GPU:
+                       LOGI("Device type is GPU.");
+                       break;
+               case INFERENCE_TARGET_CUSTOM:
+               case INFERENCE_TARGET_NONE:
+               default:
+                       LOGW("Not supported device type [%d], Set CPU mode",
+                                (int) mTargetTypes);
+                       return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
                mTargetTypes = types;
 
                LOGI("LEAVE");
@@ -89,18 +104,18 @@ namespace TFLiteImpl
 
                LOGI("Inferece targets are: [%d]", mTargetTypes);
 
-               switch (mTargetTypes) {
-               case INFERENCE_TARGET_CPU:
-                       mInterpreter->UseNNAPI(false);
-                       break;
-               case INFERENCE_TARGET_GPU:
-                       mInterpreter->UseNNAPI(true);
-                       break;
-               case INFERENCE_TARGET_CUSTOM:
-               case INFERENCE_TARGET_NONE:
-               default:
-                       LOGW("Not supported device type [%d], Set CPU mode",
-                                (int) mTargetTypes);
+               if (mTargetTypes == INFERENCE_TARGET_GPU) {
+                       TfLiteDelegate *delegate = TfLiteGpuDelegateV2Create(nullptr);
+                       if (!delegate){
+                               LOGE("Failed to GPU delegate");
+                               return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
+                       }
+
+                       if (mInterpreter->ModifyGraphWithDelegate(delegate) != kTfLiteOk)
+                       {
+                               LOGE("Failed to construct GPU delegate");
+                               return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
+                       }
                }
 
                mInterpreter->SetNumThreads(MV_INFERENCE_TFLITE_MAX_THREAD_NUM);
index 0c665e000d280ad23fac0a140c7c0a4ba0e18179..ce92ee934edc045d07e1833d59ec0d0aeb6ea51b 100644 (file)
 
 #include <inference_engine_common.h>
 
-#include "tensorflow/contrib/lite/string.h"
-#include "tensorflow/contrib/lite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/context.h"
+#include "tensorflow1/contrib/lite/string.h"
+#include "tensorflow1/contrib/lite/kernels/register.h"
+#include "tensorflow1/contrib/lite/model.h"
+#include "tensorflow1/contrib/lite/context.h"
+
+#include "tensorflow2/lite/delegates/gpu/delegate.h"
+#include "tensorflow2/lite/kernels/register.h"
+#include "tensorflow2/lite/model.h"
+#include "tensorflow2/lite/optional_debug_tools.h"
 
 #include <memory>
 #include <dlog.h>