#define TAG_NAME "nnstreamer-capi"
+#define ML_INF_FEATURE_PATH "tizen.org/feature/machine_learning.inference"
+
+#define check_feature_state() \
+ if (ML_ERROR_NONE != ml_get_feature_enabled()) \
+ return ML_ERROR_NOT_SUPPORTED;
+
#if defined(__TIZEN__)
#include <dlog.h>
*/
GstCaps * ml_tensors_info_get_caps (const ml_tensors_info_s *info);
+/**
+ * @brief Checks whether machine_learning.inference feature is enabled or not.
+ */
+int ml_get_feature_enabled (void);
+
+/**
+ * @brief Set the feature status of machine_learning.inference.
+ * This is only used for Unit test.
+ */
+int ml_set_feature_status (int status);
+
#ifdef __cplusplus
}
#endif /* __cplusplus */
message('CAPI is in Tizen mode')
tizen_deps = [
dependency('capi-base-common'),
+ dependency('capi-system-info'),
dependency('dlog')
]
else
ml_pipeline *pipe_h;
+ check_feature_state ();
+
if (pipe == NULL)
return ML_ERROR_INVALID_PARAMETER;
GstStateChangeReturn scret;
GstState state;
+ check_feature_state ();
+
if (p == NULL)
return ML_ERROR_INVALID_PARAMETER;
GstState _state;
GstStateChangeReturn scret;
+ check_feature_state ();
+
if (p == NULL || state == NULL)
return ML_ERROR_INVALID_PARAMETER;
ml_pipeline *p = pipe;
GstStateChangeReturn scret;
+ check_feature_state ();
+
if (p == NULL)
return ML_ERROR_INVALID_PARAMETER;
ml_pipeline *p = pipe;
GstStateChangeReturn scret;
+ check_feature_state ();
+
if (p == NULL)
return ML_ERROR_INVALID_PARAMETER;
ml_pipeline_sink *sink;
int ret = ML_ERROR_NONE;
+ check_feature_state ();
+
if (h == NULL) {
ml_loge ("The argument sink handle is not valid.");
return ML_ERROR_INVALID_PARAMETER;
{
handle_init (sink, sink, h);
+ check_feature_state ();
+
if (elem->handle_id > 0) {
g_signal_handler_disconnect (elem->element, elem->handle_id);
elem->handle_id = 0;
ml_pipeline_src *src;
int ret = ML_ERROR_NONE;
+ check_feature_state ();
+
if (h == NULL) {
ml_loge ("The argument source handle is not valid.");
return ML_ERROR_INVALID_PARAMETER;
{
handle_init (src, src, h);
+ check_feature_state ();
+
elem->handles = g_list_remove (elem->handles, src);
g_free (src);
handle_init (src, src, h);
+ check_feature_state ();
+
_data = (ml_tensors_data_s *) data;
if (!_data) {
ml_loge ("The given param data is invalid.");
{
handle_init (src, src, h);
+ check_feature_state ();
+
if (info == NULL) {
ret = ML_ERROR_INVALID_PARAMETER;
goto unlock_return;
ml_pipeline_switch *swtc;
int ret = ML_ERROR_NONE;
+ check_feature_state ();
+
if (h == NULL) {
ml_loge ("The argument switch handle is not valid.");
return ML_ERROR_INVALID_PARAMETER;
{
handle_init (switch, swtc, h);
+ check_feature_state ();
+
elem->handles = g_list_remove (elem->handles, swtc);
g_free (swtc);
handle_init (switch, swtc, h);
+ check_feature_state ();
+
if (pad_name == NULL) {
ml_loge ("The second argument, pad name, is not valid.");
ret = ML_ERROR_INVALID_PARAMETER;
handle_init (switch, swtc, h);
+ check_feature_state ();
+
if (list == NULL) {
ml_loge ("The second argument, list, is not valid.");
ret = ML_ERROR_INVALID_PARAMETER;
ml_pipeline_valve *valve;
int ret = ML_ERROR_NONE;
+ check_feature_state ();
+
if (h == NULL) {
ml_loge ("The argument valve handle is not valid.");
return ML_ERROR_INVALID_PARAMETER;
{
handle_init (valve, valve, h);
+ check_feature_state ();
+
elem->handles = g_list_remove (elem->handles, valve);
g_free (valve);
gboolean drop = FALSE;
handle_init (valve, valve, h);
+ check_feature_state ();
+
g_object_get (G_OBJECT (elem->element), "drop", &drop, NULL);
if ((open != false) != (drop != FALSE)) {
bool available = false;
bool valid = false;
+ check_feature_state ();
+
/* Validate the params */
if (!single) {
ml_loge ("The given param, single is invalid.");
ml_single *single_h;
int status;
+ check_feature_state ();
+
if (!single) {
ml_loge ("The given param, single is invalid.");
return ML_ERROR_INVALID_PARAMETER;
GstFlowReturn ret;
int i, status = ML_ERROR_NONE;
+ check_feature_state ();
+
if (!single || !input || !output) {
ml_loge ("The given param is invalid.");
return ML_ERROR_INVALID_PARAMETER;
gchar *val;
guint rank;
+ check_feature_state ();
+
if (!single || !info)
return ML_ERROR_INVALID_PARAMETER;
gchar *val;
guint rank;
+ check_feature_state ();
+
if (!single || !info)
return ML_ERROR_INVALID_PARAMETER;
#include "nnstreamer.h"
#include "nnstreamer-capi-private.h"
+#if defined(__TIZEN__)
+ #include <system_info.h>
+#endif
+
+/** -1: Not checked yet, 0: Not supported, 1: Supported */
+static int feature_enabled = -1;
+
/**
* @brief Allocates a tensors information handle with default value.
*/
{
ml_tensors_info_s *tensors_info;
+ check_feature_state ();
+
if (!info)
return ML_ERROR_INVALID_PARAMETER;
{
ml_tensors_info_s *tensors_info;
+ check_feature_state ();
+
tensors_info = (ml_tensors_info_s *) info;
if (!tensors_info)
ml_tensors_info_s *tensors_info;
guint i;
+ check_feature_state ();
+
if (!valid)
return ML_ERROR_INVALID_PARAMETER;
{
ml_tensors_info_s *tensors_info;
+ check_feature_state ();
+
if (!info || count > ML_TENSOR_SIZE_LIMIT)
return ML_ERROR_INVALID_PARAMETER;
{
ml_tensors_info_s *tensors_info;
+ check_feature_state ();
+
if (!info || !count)
return ML_ERROR_INVALID_PARAMETER;
{
ml_tensors_info_s *tensors_info;
+ check_feature_state ();
+
if (!info)
return ML_ERROR_INVALID_PARAMETER;
{
ml_tensors_info_s *tensors_info;
+ check_feature_state ();
+
if (!info || !name)
return ML_ERROR_INVALID_PARAMETER;
{
ml_tensors_info_s *tensors_info;
+ check_feature_state ();
+
if (!info)
return ML_ERROR_INVALID_PARAMETER;
{
ml_tensors_info_s *tensors_info;
+ check_feature_state ();
+
if (!info || !type)
return ML_ERROR_INVALID_PARAMETER;
ml_tensors_info_s *tensors_info;
guint i;
+ check_feature_state ();
+
if (!info)
return ML_ERROR_INVALID_PARAMETER;
ml_tensors_info_s *tensors_info;
guint i;
+ check_feature_state ();
+
if (!info)
return ML_ERROR_INVALID_PARAMETER;
ml_tensors_data_s *_data;
guint i;
+ check_feature_state ();
+
if (!data)
return ML_ERROR_INVALID_PARAMETER;
ml_tensors_info_s *tensors_info;
gint i;
+ check_feature_state ();
+
if (!info || !data)
return ML_ERROR_INVALID_PARAMETER;
{
ml_tensors_data_s *_data;
+ check_feature_state ();
+
if (!data)
return ML_ERROR_INVALID_PARAMETER;
{
ml_tensors_data_s *_data;
+ check_feature_state ();
+
if (!data)
return ML_ERROR_INVALID_PARAMETER;
ml_tensors_info_s *dest_info, *src_info;
guint i, j;
+ check_feature_state ();
+
dest_info = (ml_tensors_info_s *) dest;
src_info = (ml_tensors_info_s *) src;
ml_check_nnfw_availability (ml_nnfw_type_e nnfw, ml_nnfw_hw_e hw,
bool * available)
{
+ check_feature_state ();
+
if (!available)
return ML_ERROR_INVALID_PARAMETER;
done:
return ML_ERROR_NONE;
}
+
+/**
+ * @brief Checks whether machine_learning.inference feature is enabled or not.
+ */
+int ml_get_feature_enabled (void)
+{
+#if defined(__TIZEN__)
+ if (0 == feature_enabled) {
+ ml_loge ("machine_learning.inference NOT supported");
+ return ML_ERROR_NOT_SUPPORTED;
+ } else if (-1 == feature_enabled) {
+ bool ml_inf_supported = false;
+ if (0 == system_info_get_platform_bool(ML_INF_FEATURE_PATH, &ml_inf_supported)) {
+ if (false == ml_inf_supported) {
+ ml_loge ("machine_learning.inference NOT supported");
+ ml_set_feature_status (0);
+ return ML_ERROR_NOT_SUPPORTED;
+ }
+
+ ml_set_feature_status (1);
+ } else {
+ ml_loge ("failed to get feature value of machine_learning.inference");
+ return ML_ERROR_NOT_SUPPORTED;
+ }
+ }
+#endif
+ return ML_ERROR_NONE;
+}
+
+/**
+ * @brief Set the feature status of machine_learning.inference.
+ */
+int ml_set_feature_status (int status)
+{
+ GMutex mutex;
+ g_mutex_init (&mutex);
+ g_mutex_lock (&mutex);
+
+ /** Update feature status
+ * -1: Not checked yet, 0: Not supported, 1: Supported
+ */
+ feature_enabled = status;
+
+ g_mutex_unlock (&mutex);
+ g_mutex_clear (&mutex);
+ return ML_ERROR_NONE;
+}
override_dh_auto_configure:
mkdir -p build
- meson --buildtype=plain --prefix=/usr --sysconfdir=/etc --libdir=lib/$(DEB_HOST_MULTIARCH) --bindir=lib/nnstreamer/bin --includedir=include -Dinstall-example=true -Denable-tensorflow=$(enable_tf) -Denable-capi=true build
+ meson --buildtype=plain --prefix=/usr --sysconfdir=/etc --libdir=lib/$(DEB_HOST_MULTIARCH) --bindir=lib/nnstreamer/bin --includedir=include -Dinstall-example=true -Denable-tensorflow=$(enable_tf) -Denable-capi=true -Denable-tizen=false build
override_dh_auto_build:
ninja -C build
# BuildRequires: taos-ci-unittest-coverage-assessment
%endif
%if %{with tizen}
+BuildRequires: pkgconfig(capi-system-info)
BuildRequires: pkgconfig(capi-base-common)
BuildRequires: pkgconfig(dlog)
BuildRequires: gst-plugins-bad-devel
#include <gtest/gtest.h>
#include <glib.h>
#include <glib/gstdio.h> /* GStatBuf */
+#include <nnstreamer-capi-private.h>
/**
* @brief Struct to check the pipeline state changes.
{
testing::InitGoogleTest (&argc, argv);
+ ml_set_feature_status(1);
+
return RUN_ALL_TESTS ();
}