[tensorflow/hw] Added supported accelerators
authorParichay Kapoor <pk.kapoor@samsung.com>
Tue, 11 Feb 2020 10:16:58 +0000 (19:16 +0900)
committerMyungJoo Ham <myungjoo.ham@samsung.com>
Wed, 4 Mar 2020 04:22:08 +0000 (13:22 +0900)
Added supported list of hardwares for tensorflow's tensor filter's extension
Added corresponding unit-tests using single API

Signed-off-by: Parichay Kapoor <pk.kapoor@samsung.com>
ext/nnstreamer/tensor_filter/tensor_filter_tensorflow.cc
tests/tizen_capi/unittest_tizen_capi.cc

index 332d9b0..3049adb 100644 (file)
 #define DBG FALSE
 #endif
 
+static const gchar *tf_accl_support[] = {
+  ACCL_AUTO_STR,
+  ACCL_DEFAULT_STR,
+  NULL
+};
+
 /**
  * @brief      Internal data structure for tensorflow
  */
@@ -741,6 +747,19 @@ tf_destroyNotify (void **private_data, void *data)
   }
 }
 
+/**
+ * @brief Check support of the backend
+ * @param[in] hw backend to check support of
+ */
+static int
+tf_checkAvailability (accl_hw hw)
+{
+  if (g_strv_contains (tf_accl_support, get_accl_hw_str (hw)))
+    return 0;
+
+  return -ENOENT;
+}
+
 static gchar filter_subplugin_tensorflow[] = "tensorflow";
 
 static GstTensorFilterFramework NNS_support_tensorflow = {
@@ -762,6 +781,7 @@ init_filter_tf (void)
   NNS_support_tensorflow.getInputDimension = tf_getInputDim;
   NNS_support_tensorflow.getOutputDimension = tf_getOutputDim;
   NNS_support_tensorflow.destroyNotify = tf_destroyNotify;
+  NNS_support_tensorflow.checkAvailability = tf_checkAvailability;
 
   nnstreamer_filter_probe (&NNS_support_tensorflow);
 }
index 5650392..0ff76ea 100644 (file)
@@ -1318,7 +1318,7 @@ TEST (nnstreamer_capi_util, availability_01)
 /**
  * @brief Test NNStreamer Utility for checking availability of Tensorflow-lite backend
  */
-TEST (nnstreamer_capi_util, availability_fail_n)
+TEST (nnstreamer_capi_util, availability_fail_01_n)
 {
   bool result;
   int status;
@@ -1345,6 +1345,42 @@ TEST (nnstreamer_capi_util, availability_fail_n)
 }
 #endif /* ENABLE_TENSORFLOW_LITE */
 
+#ifdef ENABLE_TENSORFLOW
+/**
+ * @brief Test NNStreamer Utility for checking availability of Tensorflow backend
+ */
+TEST (nnstreamer_capi_util, availability_02)
+{
+  bool result;
+  int status;
+
+  status = ml_check_nnfw_availability (ML_NNFW_TYPE_TENSORFLOW, ML_NNFW_HW_ANY, &result);
+  EXPECT_EQ (status, ML_ERROR_NONE);
+  EXPECT_EQ (result, true);
+
+  status = ml_check_nnfw_availability (ML_NNFW_TYPE_TENSORFLOW, ML_NNFW_HW_AUTO, &result);
+  EXPECT_EQ (status, ML_ERROR_NONE);
+  EXPECT_EQ (result, true);
+}
+
+/**
+ * @brief Test NNStreamer Utility for checking availability of Tensorflow backend
+ */
+TEST (nnstreamer_capi_util, availability_fail_02_n)
+{
+  bool result;
+  int status;
+
+  status = ml_check_nnfw_availability (ML_NNFW_TYPE_TENSORFLOW, ML_NNFW_HW_CPU, &result);
+  EXPECT_EQ (status, ML_ERROR_NONE);
+  EXPECT_EQ (result, false);
+
+  status = ml_check_nnfw_availability (ML_NNFW_TYPE_TENSORFLOW, ML_NNFW_HW_GPU, &result);
+  EXPECT_EQ (status, ML_ERROR_NONE);
+  EXPECT_EQ (result, false);
+}
+#endif /** ENABLE_TENSORFLOW */
+
 /**
  * @brief Test NNStreamer Utility for checking tensors info handle
  */