From: Yongjoo Ahn Date: Fri, 8 Jul 2022 06:01:22 +0000 (+0900) Subject: [ml-service] Add service APIs using pipelind d-bus interface X-Git-Tag: accepted/tizen/unified/20220819.122510~2 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=4e6251c113b390fa923e0e54e49530994e67e667;p=platform%2Fcore%2Fapi%2Fmachine-learning.git [ml-service] Add service APIs using pipelind d-bus interface - Implement some service APIs using pipeline d-bus interface Signed-off-by: Yongjoo Ahn --- diff --git a/c/meson.build b/c/meson.build index 23adfbd..38991cd 100644 --- a/c/meson.build +++ b/c/meson.build @@ -17,6 +17,10 @@ nns_capi_common_srcs += join_paths(meson.current_source_dir(), 'src', 'ml-api-in nns_capi_single_srcs += join_paths(meson.current_source_dir(), 'src', 'ml-api-inference-single.c') nns_capi_pipeline_srcs += join_paths(meson.current_source_dir(), 'src', 'ml-api-inference-pipeline.c') +if get_option('enable-machine-learning-agent') + nns_capi_service_srcs += join_paths(meson.current_source_dir(), 'src', 'ml-api-service-agent-client.c') +endif + if get_option('enable-tizen') if get_option('enable-tizen-feature-check') nns_capi_common_srcs += join_paths(meson.current_source_dir(), 'src', 'ml-api-common-tizen-feature-check.c') @@ -43,7 +47,7 @@ endif # Dependencies nns_capi_common_deps = [glib_dep, gmodule_dep, nnstreamer_single_dep] -nns_capi_deps = [nnstreamer_dep, glib_dep, gmodule_dep, gst_dep, gst_app_dep, leveldb_dep] +nns_capi_deps = [nnstreamer_dep, glib_dep, gmodule_dep, gio_dep, gst_dep, gst_app_dep, leveldb_dep] if (get_option('enable-tizen')) message('C-API is in Tizen mode') @@ -97,6 +101,9 @@ if (get_option('enable-tizen')) nns_capi_deps += [dependency('dlog')] endif +if (get_option('enable-machine-learning-agent')) + nns_capi_deps += [gio_unix_dep, gdbus_gen_header_dep] +endif # Single-shot API. nns_capi_single_shared_lib = shared_library ('capi-ml-inference-single', diff --git a/c/src/ml-api-service-agent-client.c b/c/src/ml-api-service-agent-client.c new file mode 100644 index 0000000..bb5cc4c --- /dev/null +++ b/c/src/ml-api-service-agent-client.c @@ -0,0 +1,211 @@ +/* SPDX-License-Identifier: Apache-2.0 */ +/** + * Copyright (c) 2022 Samsung Electronics Co., Ltd. All Rights Reserved. + * + * @file ml-api-service-agent-client.c + * @date 20 Jul 2022 + * @brief agent (dbus) implementation of NNStreamer/Service C-API + * @see https://github.com/nnstreamer/nnstreamer + * @author Yongjoo Ahn + * @bug No known bugs except for NYI items + */ + +#include + +#include "ml-api-internal.h" +#include "ml-api-service.h" +#include "pipeline-dbus.h" + +/** + * @brief Structure for ml_service_h + */ +typedef struct +{ + gint64 id; + gchar *service_name; +} ml_service_s; + +/** + * @brief Internal function to get proxy of the pipeline d-bus interface + */ +static MachinelearningServicePipeline * +_get_proxy_new_for_bus_sync (void) +{ + /** @todo deal with GError */ + return + machinelearning_service_pipeline_proxy_new_for_bus_sync + (G_BUS_TYPE_SESSION, G_DBUS_PROXY_FLAGS_NONE, + "org.tizen.machinelearning.service", + "/Org/Tizen/MachineLearning/Service/Pipeline", NULL, NULL); +} + +int +ml_service_launch_pipeline (const char *name, ml_service_h * h) +{ + ml_service_s *server; + gint out_return_code; + gint64 out_id; + MachinelearningServicePipeline *mlsp; + + check_feature_state (ML_FEATURE_SERVICE); + + if (!h) + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'h' is NULL. It should be a valid ml_service_h"); + + server = g_new0 (ml_service_s, 1); + + if (server == NULL) + _ml_error_report_return (ML_ERROR_OUT_OF_MEMORY, + "Failed to allocate memory for the service_server. Out of memory?"); + + + mlsp = _get_proxy_new_for_bus_sync (); + machinelearning_service_pipeline_call_launch_pipeline_sync (mlsp, name, + &out_return_code, &out_id, NULL, NULL); + + if (out_return_code != 0) { + g_free (server); + g_object_unref (mlsp); + _ml_error_report_return (out_return_code, + "Failed to launch pipeline, please check its integrity."); + } + + server->id = out_id; + server->service_name = g_strdup (name); + *h = (ml_service_h *) server; + + g_object_unref (mlsp); + + return ML_ERROR_NONE; +} + +/** + * @brief Start the pipeline of given ml_service_h + */ +int +ml_service_start_pipeline (ml_service_h h) +{ + gint out_result; + ml_service_s *server = (ml_service_s *) h; + MachinelearningServicePipeline *mlsp; + + check_feature_state (ML_FEATURE_SERVICE); + + if (!h) + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'h' is NULL. It should be a valid ml_service_h"); + + mlsp = _get_proxy_new_for_bus_sync (); + machinelearning_service_pipeline_call_start_pipeline_sync (mlsp, server->id, + &out_result, NULL, NULL); + + g_object_unref (mlsp); + + return ML_ERROR_NONE; +} + +/** + * @brief Stop the pipeline of given ml_service_h + */ +int +ml_service_stop_pipeline (ml_service_h h) +{ + gint out_result; + ml_service_s *server = (ml_service_s *) h; + MachinelearningServicePipeline *mlsp; + + check_feature_state (ML_FEATURE_SERVICE); + + if (!h) + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'h' is NULL. It should be a valid ml_service_h"); + + mlsp = _get_proxy_new_for_bus_sync (); + machinelearning_service_pipeline_call_stop_pipeline_sync (mlsp, server->id, + &out_result, NULL, NULL); + + g_object_unref (mlsp); + + return ML_ERROR_NONE; +} + +/** + * @brief Destroy the pipeline of given ml_service_h + */ +int +ml_service_destroy_pipeline (ml_service_h h) +{ + gint out_result; + ml_service_s *server = (ml_service_s *) h; + MachinelearningServicePipeline *mlsp; + + check_feature_state (ML_FEATURE_SERVICE); + + if (!h) + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'h' is NULL. It should be a valid ml_service_h"); + + mlsp = _get_proxy_new_for_bus_sync (); + machinelearning_service_pipeline_call_destroy_pipeline_sync (mlsp, server->id, + &out_result, NULL, NULL); + + g_object_unref (mlsp); + + g_free (server->service_name); + g_free (server); + return ML_ERROR_NONE; +} + +/** + * @brief Return state of given ml_service_h + */ +int +ml_service_getstate_pipeline (ml_service_h h, ml_pipeline_state_e * state) +{ + gint out_result; + gint _state; + ml_service_s *server = (ml_service_s *) h; + MachinelearningServicePipeline *mlsp; + + check_feature_state (ML_FEATURE_SERVICE); + + if (!h) + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'h' is NULL. It should be a valid ml_service_h"); + + mlsp = _get_proxy_new_for_bus_sync (); + machinelearning_service_pipeline_call_get_state_sync (mlsp, server->id, + &out_result, &_state, NULL, NULL); + + *state = (ml_pipeline_state_e) _state; + + g_object_unref (mlsp); + + return ML_ERROR_NONE; +} + +/** + * @brief Return the pipeline description of given ml_service_h + */ +int +ml_service_getdesc_pipeline (ml_service_h h, char **desc) +{ + gint out_result; + ml_service_s *server = (ml_service_s *) h; + MachinelearningServicePipeline *mlsp; + + check_feature_state (ML_FEATURE_SERVICE); + + if (!h) + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'h' is NULL. It should be a valid ml_service_h"); + + mlsp = _get_proxy_new_for_bus_sync (); + machinelearning_service_pipeline_call_get_description_sync (mlsp, server->id, + &out_result, desc, NULL, NULL); + + g_object_unref (mlsp); + + return ML_ERROR_NONE; +} diff --git a/meson.build b/meson.build index b9dadc3..458bf09 100644 --- a/meson.build +++ b/meson.build @@ -155,13 +155,13 @@ api_conf.set('EXEC_PREFIX', api_install_bindir) api_conf.set('LIB_INSTALL_DIR', api_install_libdir) api_conf.set('INCLUDE_INSTALL_DIR', api_install_includedir) -# Build C-API -subdir('c') - if get_option('enable-machine-learning-agent') subdir('daemon') endif +# Build C-API +subdir('c') + # Build JNI wrapper when developer sets java-home # (e.g., -Djava-home=$JAVA_HOME from environment variables) java_home = get_option('java-home').strip()