Fix UNINIT.CTOR issue and remove useless call 37/232737/4 accepted/tizen/unified/20200510.220413 submit/tizen/20200508.021019
authorHyunsoo Park <hance.park@samsung.com>
Thu, 7 May 2020 09:59:57 +0000 (18:59 +0900)
committerHyunsoo Park <hance.park@samsung.com>
Fri, 8 May 2020 01:14:42 +0000 (10:14 +0900)
Change-Id: I695f9a89e0d89e0ffb776c0f4ef2a77dc5a4afa1
Signed-off-by: Hyunsoo Park <hance.park@samsung.com>
packaging/inference-engine-tflite.spec
src/inference_engine_tflite.cpp

index 971f8c6a43333a5f86deb9ac3c28d8fb44d2fa6b..9f2a99abcc9e1e60bbed680e85dab7604fbdff20 100644 (file)
@@ -1,7 +1,7 @@
 Name:       inference-engine-tflite
 Summary:    Tensorflow-Lite based implementation of inference-engine-interface
 Version:    0.0.1
-Release:    9
+Release:    10
 Group:      Multimedia/Libraries
 License:    Apache-2.0
 Source0:    %{name}-%{version}.tar.gz
index 83a5f7a28a17a5c9b28d0ce70bfec73ff8305784..9316f1678b095ea5b92dc3954ca375a2a79787ca 100644 (file)
@@ -29,7 +29,8 @@
 namespace InferenceEngineImpl {
 namespace TFLiteImpl {
 
-InferenceTFLite::InferenceTFLite(void)
+InferenceTFLite::InferenceTFLite(void) :
+    mTargetTypes(INFERENCE_TARGET_NONE)
 {
     LOGI("ENTER");
     LOGI("LEAVE");
@@ -64,8 +65,6 @@ int InferenceTFLite::Load(std::vector<std::string> model_paths, inference_model_
         return INFERENCE_ENGINE_ERROR_INVALID_DATA;
     }
 
-    mFlatBuffModel->error_reporter();
-
     tflite::ops::builtin::BuiltinOpResolver resolver;
 
     tflite::InterpreterBuilder(*mFlatBuffModel, resolver)(&mInterpreter);