From: Changyeon Lee Date: Thu, 18 Jul 2024 09:03:28 +0000 (+0900) Subject: e_blur: Add Blur Video Capture X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=refs%2Fchanges%2F84%2F315384%2F1;p=platform%2Fupstream%2Fenlightenment.git e_blur: Add Blur Video Capture Blur Video Capture is a feature for targets that cannot use the video buffer as a texture and can only be obtained through capture. Change-Id: I2f227619e2efcbad3df66b0c22cf11583e5f049e --- diff --git a/src/bin/Makefile.mk b/src/bin/Makefile.mk index 5610289..8b142d6 100644 --- a/src/bin/Makefile.mk +++ b/src/bin/Makefile.mk @@ -130,8 +130,8 @@ src/include/e_view.h \ src/include/e_view_rect.h \ src/include/e_view_image.h \ src/include/e_view_client.h \ -src/include/e_view_edje.h - +src/include/e_view_edje.h \ +src/include/e_blur_video_capture.h enlightenment_src = \ src/bin/e_comp_screen.c \ @@ -185,6 +185,7 @@ src/bin/server/e_eom.c \ src/bin/server/e_dpms.c \ src/bin/server/e_blender.c \ src/bin/server/e_blur.c \ +src/bin/server/e_blur_video_capture.c \ src/bin/server/e_linux_dmabuf.c \ src/bin/server/e_explicit_sync.c \ src/bin/server/e_presentation_time.c \ diff --git a/src/bin/core/e_comp_cfdata.c b/src/bin/core/e_comp_cfdata.c index 7f181a5..02c7245 100644 --- a/src/bin/core/e_comp_cfdata.c +++ b/src/bin/core/e_comp_cfdata.c @@ -94,6 +94,7 @@ e_comp_cfdata_edd_init(E_Config_DD **conf_edd, E_Config_DD **match_edd) E_CONFIG_VAL(D, T, e_wheel_click_angle, INT); E_CONFIG_VAL(D, T, input_output_assign_policy, INT); E_CONFIG_VAL(D, T, touch_block_on_palm, INT); + E_CONFIG_VAL(D, T, blur_video_capture, UCHAR); } EINTERN E_Comp_Config * @@ -268,6 +269,8 @@ e_comp_cfdata_config_new(void) cfg->touch_block_on_palm = 0; + cfg->blur_video_capture = 0; + return cfg; error: diff --git a/src/bin/server/e_blur.c b/src/bin/server/e_blur.c index 593deb7..bb17fce 100644 --- a/src/bin/server/e_blur.c +++ b/src/bin/server/e_blur.c @@ -1,5 +1,6 @@ #include "e.h" #include "e_blur_intern.h" +#include "e_blur_video_capture_intern.h" #include #include @@ -25,6 +26,7 @@ struct _E_Blur_Manager struct wl_listener new_blur; } listener; + Eina_Bool video_capture_enabled; Eina_Bool blur_enabled; Eina_List *blurs; @@ -133,7 +135,8 @@ _e_blur_manager_blur_enabled_changed(Eina_Bool set) if (blur_manager->blur_enabled == set) return; - e_video_debug_display_primary_plane_set(EINA_TRUE); + if (!blur_manager->video_capture_enabled) + e_video_debug_display_primary_plane_set(EINA_TRUE); } else { @@ -146,7 +149,8 @@ _e_blur_manager_blur_enabled_changed(Eina_Bool set) if (blur_manager->blur_enabled == set) return; - e_video_debug_display_primary_plane_set(EINA_FALSE); + if (!blur_manager->video_capture_enabled) + e_video_debug_display_primary_plane_set(EINA_FALSE); } blur_manager->blur_enabled = set; @@ -587,6 +591,12 @@ e_blur_manager_init(void) ds_tizen_blur_manager_add_new_blur_listener(blur_manager->tizen_blur_manager, &blur_manager->listener.new_blur); + if (conf->blur_video_capture) + { + e_blur_video_capture_init(); + blur_manager->video_capture_enabled = EINA_TRUE; + } + _blur_manager = blur_manager; return EINA_TRUE; @@ -601,6 +611,7 @@ fail: EINTERN void e_blur_manager_shutdown(void) { + e_blur_video_capture_deinit(); } EINTERN E_Blur_Hook * @@ -671,4 +682,5 @@ e_blur_trace_debug(Eina_Bool onoff) if (onoff == blur_trace) return; blur_trace = onoff; INF("Blur Debug is %s", onoff?"ON":"OFF"); + e_blur_video_capture_trace_debug(onoff); } diff --git a/src/bin/server/e_blur_video_capture.c b/src/bin/server/e_blur_video_capture.c new file mode 100644 index 0000000..8a699d2 --- /dev/null +++ b/src/bin/server/e_blur_video_capture.c @@ -0,0 +1,1015 @@ +#include "e.h" +#include "e_comp_intern.h" +#include "e_blur_intern.h" +#include "e_hwc_window_intern.h" +#include "e_blur_video_capture.h" + +#define DEQUEUE_TIMEOUT_MS 10000 + +#define BLUR_VC_TRACE(f, ec, x... ) \ + do \ + { \ + if (blur_video_capture_trace) \ + { \ + ELOGF("BLUR_VC", f, \ + (ec), ##x); \ + } \ + } \ + while (0) + +typedef struct _E_Blur_Video_Capture E_Blur_Video_Capture; +typedef struct _E_Blur_Video_Capture_Client E_Blur_Video_Capture_Client; +typedef struct _E_Blur_Video_Capture_Object E_Blur_Video_Capture_Object; + +struct _E_Blur_Video_Capture +{ + int width, height, fps; + tbm_format format; + + Ecore_Thread *thread; + tbm_surface_queue_h tqueue; + tbm_surface_h current_tsurface; + + Evas_Object *source_obj; + + Eina_List *clients; + + E_Blur_Video_Capture_Funcs *funcs; +}; + +struct _E_Blur_Video_Capture_Client +{ + E_Client *ec; + E_Object_Delfn *ec_delfn; + + Eina_Bool visible; + Eina_Bool presentation_visible; + + Eina_List *capture_objs; + + E_Hwc_Presentation_Callback *presentation_callback; +}; + +struct _E_Blur_Video_Capture_Object +{ + E_Blur_Video_Capture_Client *client; + E_Blur_Rectangle *blur_rectangle; + + Evas_Object *blur_rectangle_obj; + + Evas_Object *blur_clip_obj; + Evas_Object *source_clip_obj; + Evas_Object *source_obj; + + Eina_Bool visible; +}; + +typedef struct { + tbm_format tbm_format; + int evas_colorspace; +} tbm_format_data; + +static const tbm_format_data tbm_formats[] = { + {TBM_FORMAT_ARGB8888, EVAS_COLORSPACE_ARGB8888}, + {TBM_FORMAT_YUV422, EMILE_COLORSPACE_YCBCR422P601_PL}, + {TBM_FORMAT_NV12, EVAS_COLORSPACE_YCBCR420NV12601_PL}, + {TBM_FORMAT_NV12MT, EMILE_COLORSPACE_YCBCR420TM12601_PL}, +}; + +#define C(b,m) (((b) >> (m)) & 0xFF) +#define FOURCC_STR(id) C(id,0), C(id,8), C(id,16), C(id,24) +#define ROUNDUP(s,c) (((s) + (c-1)) & ~(c-1)) + +#define NUM_TBM_FORMATS (sizeof(tbm_formats) / sizeof(tbm_formats[0])) + +static E_Blur_Video_Capture *_video_capture = NULL; +static Eina_List *_e_blur_video_capture_hwc_window_hooks = NULL; +static Eina_List *_e_blur_video_capture_blur_hooks = NULL; +static Eina_Bool blur_video_capture_trace = EINA_FALSE; + +static Eina_Bool _e_blur_video_capture_start(void); +static Eina_Bool _e_blur_video_capture_stop(void); + +static Eina_Bool +_e_blur_video_capture_source_object_visible_get(void) +{ + E_Blur_Video_Capture *video_capture = _video_capture; + + if (!video_capture->source_obj) return EINA_FALSE; + + return evas_object_visible_get(video_capture->source_obj); +} + +static void +_e_blur_video_capture_source_object_deinit(void) +{ + E_Blur_Video_Capture *video_capture = _video_capture; + E_Blur_Video_Capture_Client *client; + E_Blur_Video_Capture_Object *object; + Eina_List *l, *l2; + + if (!video_capture->source_obj) return; + + EINA_LIST_FOREACH(video_capture->clients, l, client) + { + EINA_LIST_FOREACH(client->capture_objs, l2, object) + evas_object_image_source_set(object->source_obj, NULL); + } + + evas_object_image_native_surface_set(video_capture->source_obj, NULL); + evas_object_del(video_capture->source_obj); + video_capture->source_obj = NULL; +} + +static Eina_Bool +_e_blur_video_capture_source_object_init(void) +{ + E_Blur_Video_Capture *video_capture = _video_capture; + E_Blur_Video_Capture_Client *client; + E_Blur_Video_Capture_Object *object; + Eina_List *l, *l2; + + if (video_capture->source_obj) return EINA_TRUE; + + video_capture->source_obj = evas_object_image_filled_add(e_comp->evas); + EINA_SAFETY_ON_NULL_RETURN_VAL(video_capture->source_obj, EINA_FALSE); + + evas_object_name_set(video_capture->source_obj, "blur_video_capture"); + evas_object_image_size_set(video_capture->source_obj, 1, 1); + evas_object_layer_set(video_capture->source_obj, E_LAYER_BOTTOM); + evas_object_lower(video_capture->source_obj); + + EINA_LIST_FOREACH(video_capture->clients, l, client) + { + EINA_LIST_FOREACH(client->capture_objs, l2, object) + evas_object_image_source_set(object->source_obj, video_capture->source_obj); + } + + return EINA_TRUE; +} + +static void +_e_blur_video_capture_object_show(E_Blur_Video_Capture_Object *object) +{ + evas_object_show(object->source_obj); + evas_object_show(object->source_clip_obj); + evas_object_show(object->blur_clip_obj); + + object->visible = EINA_TRUE; + + ELOGF("E_BLUR", "Show Video_Capture_Object:%p", object->client->ec, object); +} + +static void +_e_blur_video_capture_object_hide(E_Blur_Video_Capture_Object *object) +{ + evas_object_hide(object->source_obj); + evas_object_hide(object->source_clip_obj); + evas_object_hide(object->blur_clip_obj); + + object->visible = EINA_FALSE; + + ELOGF("E_BLUR", "Hide Video_Capture_Object:%p", object->client->ec, object); +} + +static void +_e_blur_video_capture_object_destroy(E_Blur_Video_Capture_Object *object) +{ + E_Blur_Video_Capture_Client *client = object->client; + + ELOGF("E_BLUR", "Destroy Video_Capture_Object:%p", client->ec, object); + + evas_object_del(object->blur_clip_obj); + evas_object_del(object->source_clip_obj); + evas_object_del(object->source_obj); + + client->capture_objs = eina_list_remove(client->capture_objs, object); + + free(object); +} + +static void +_e_blur_video_capture_object_evas_cb_show(void *data, Evas *evas EINA_UNUSED, Evas_Object *obj EINA_UNUSED, void *event EINA_UNUSED) +{ + E_Blur_Video_Capture_Object *object; + + if (!(object = data)) return; + + if (!object->client->presentation_visible) return; + + if (_e_blur_video_capture_source_object_visible_get()) + _e_blur_video_capture_object_show(object); + + _e_blur_video_capture_start(); +} + +static void +_e_blur_video_capture_object_evas_cb_hide(void *data, Evas *evas EINA_UNUSED, Evas_Object *obj EINA_UNUSED, void *event EINA_UNUSED) +{ + E_Blur_Video_Capture_Object *object; + + if (!(object = data)) return; + + _e_blur_video_capture_object_hide(object); + _e_blur_video_capture_stop(); +} + +static void +_e_blur_video_capture_object_evas_cb_resize(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED) +{ + E_Blur_Video_Capture_Object *object; + int x, y, w, h; + + if (!(object = data)) return; + + evas_object_geometry_get(obj, &x, &y, &w, &h); + evas_object_geometry_set(object->blur_clip_obj, x, y, w, h); +} + +static void +_e_blur_video_capture_object_evas_cb_move(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED) +{ + E_Blur_Video_Capture_Object *object; + int x, y, w, h; + + if (!(object = data)) return; + + evas_object_geometry_get(obj, &x, &y, &w, &h); + evas_object_geometry_set(object->blur_clip_obj, x, y, w, h); +} + +static void +_e_blur_video_capture_object_evas_cb_restack(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED) +{ + E_Blur_Video_Capture_Object *object; + + if (!(object = data)) return; + + /* TODO:restack capture object */ +} + +static void +_e_blur_video_capture_object_evas_cb_del(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED) +{ + E_Blur_Video_Capture_Object *object; + + if (!(object = data)) return; + + _e_blur_video_capture_object_destroy(object); + _e_blur_video_capture_stop(); +} + +static E_Blur_Video_Capture_Object * +_e_blur_video_capture_object_get(E_Blur_Video_Capture_Client *client, E_Blur_Rectangle *blur_rectangle) +{ + E_Blur_Video_Capture *video_capture = _video_capture; + Evas_Object *obj; + E_Blur_Video_Capture_Object *object; + const char *program, *name; + int x, y, w, h; + Eina_List *l; + + if (!blur_rectangle) return NULL; + + EINA_LIST_FOREACH(client->capture_objs, l, object) + { + if (object->blur_rectangle == blur_rectangle) + return object; + } + + obj = e_blur_rectangle_object_get(blur_rectangle); + if (!obj) return NULL;; + + object = E_NEW(E_Blur_Video_Capture_Object, 1); + EINA_SAFETY_ON_NULL_RETURN_VAL(object, NULL); + + object->blur_clip_obj = evas_object_rectangle_add(e_comp->evas); + EINA_SAFETY_ON_NULL_GOTO(object->blur_clip_obj, fail); + evas_object_name_set(object->blur_clip_obj, "blur_video_capture_blur_clip_obj"); + evas_object_pass_events_set(object->blur_clip_obj, EINA_TRUE); + + object->source_clip_obj = evas_object_rectangle_add(e_comp->evas); + EINA_SAFETY_ON_NULL_GOTO(object->source_clip_obj, fail); + evas_object_name_set(object->source_clip_obj, "blur_video_capture_source_clip_obj"); + evas_object_pass_events_set(object->source_clip_obj, EINA_TRUE); + + object->source_obj = evas_object_image_filled_add(e_comp->evas); + EINA_SAFETY_ON_NULL_GOTO(object->source_obj, fail); + evas_object_name_set(object->source_obj, "blur_video_capture_source_obj"); + evas_object_pass_events_set(object->source_obj, EINA_TRUE); + + if (_e_blur_video_capture_source_object_visible_get()) + evas_object_image_source_set(object->source_obj, video_capture->source_obj); + + object->blur_rectangle_obj = obj; + + evas_object_event_callback_add(object->blur_rectangle_obj, EVAS_CALLBACK_SHOW, + _e_blur_video_capture_object_evas_cb_show, object); + evas_object_event_callback_add(object->blur_rectangle_obj, EVAS_CALLBACK_HIDE, + _e_blur_video_capture_object_evas_cb_hide, object); + evas_object_event_callback_add(object->blur_rectangle_obj, EVAS_CALLBACK_RESIZE, + _e_blur_video_capture_object_evas_cb_resize, object); + evas_object_event_callback_add(object->blur_rectangle_obj, EVAS_CALLBACK_MOVE, + _e_blur_video_capture_object_evas_cb_move, object); + evas_object_event_callback_add(object->blur_rectangle_obj, EVAS_CALLBACK_RESTACK, + _e_blur_video_capture_object_evas_cb_restack, object); + evas_object_event_callback_add(object->blur_rectangle_obj, EVAS_CALLBACK_DEL, + _e_blur_video_capture_object_evas_cb_del, object); + + efl_gfx_filter_program_get(object->blur_rectangle_obj, &program, &name); + efl_gfx_filter_program_set(object->source_obj, program, name); + + evas_object_layer_set(object->blur_clip_obj, evas_object_layer_get(client->ec->frame)); + evas_object_stack_above(object->blur_clip_obj, client->ec->frame); + + evas_object_layer_set(object->source_clip_obj, evas_object_layer_get(client->ec->frame)); + evas_object_stack_below(object->source_clip_obj, object->blur_clip_obj); + + evas_object_layer_set(object->source_obj, evas_object_layer_get(client->ec->frame)); + evas_object_stack_below(object->source_obj, object->source_clip_obj); + + evas_object_geometry_get(object->blur_rectangle_obj, &x, &y, &w, &h); + evas_object_geometry_set(object->blur_clip_obj, x, y, w, h); + + e_client_geometry_get(client->ec, &x, &y, &w, &h); + evas_object_geometry_set(object->source_clip_obj, x, y, w, h); + + evas_object_geometry_set(object->source_obj, 0, 0, e_comp->w, e_comp->h); + + evas_object_clip_set(object->source_obj, object->source_clip_obj); + evas_object_clip_set(object->source_clip_obj, object->blur_clip_obj); + + object->client = client; + client->capture_objs = eina_list_append(client->capture_objs, object); + + ELOGF("E_BLUR", "Create Video_Capture_Object:%p", client->ec, object); + + return object; + +fail: + if (object->blur_clip_obj) + evas_object_del(object->blur_clip_obj); + + if (object->source_clip_obj) + evas_object_del(object->source_clip_obj); + + if (object->source_obj) + evas_object_del(object->source_obj); + + free(object); + + return NULL; +} + +static int +_convert_to_colorspace_from_tbm_format(int tbm_format) +{ + int i; + + for (i = 0; i < NUM_TBM_FORMATS; i++) + if (tbm_formats[i].tbm_format == tbm_format) + return tbm_formats[i].evas_colorspace; + + ERR("fail to find evas_colorspace with tbm_format(%c%c%c%c)", FOURCC_STR(tbm_format)); + + return 0; +} + +static void +_e_blur_video_capture_thread_run_cb(void *data, Ecore_Thread *thread) +{ + E_Blur_Video_Capture *video_capture = (E_Blur_Video_Capture *)data; + tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE; + tbm_surface_h tsurface; + static double last_time = 0, current_time = 0, frame_delta = 0; + E_Blur_Video_Capture_Error ret; + + while(1) + { + if (thread) + { + if (ecore_thread_check(thread)) + return; + } + + current_time = ecore_time_get(); + frame_delta = current_time - last_time; + + if ((last_time == 0) || ((frame_delta) >= (1.0 / video_capture->fps))) + { + tsq_err = tbm_surface_queue_can_dequeue_wait_timeout(video_capture->tqueue, + DEQUEUE_TIMEOUT_MS); + EINA_SAFETY_ON_FALSE_RETURN(tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE); + + tsq_err = tbm_surface_queue_dequeue(video_capture->tqueue, &tsurface); + EINA_SAFETY_ON_FALSE_RETURN(tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE); + + last_time = ecore_time_get(); + + BLUR_VC_TRACE("Blur Video Capture Start", NULL); + ret = video_capture->funcs->video_capture(tsurface); + if (ret == E_BLUR_VIDEO_CAPTURE_ERROR_NONE) + { + BLUR_VC_TRACE("Blur Video Capture End", NULL); + tsq_err = tbm_surface_queue_enqueue(video_capture->tqueue, tsurface); + if (tsq_err != TBM_SURFACE_QUEUE_ERROR_NONE) + { + ERR("fail to tbm_surface_queue_enqueue tsq_err:%d", tsq_err); + tbm_surface_queue_release(video_capture->tqueue, tsurface); + } + + if (thread) + ecore_thread_feedback(thread, NULL); + } + else + { + BLUR_VC_TRACE("Fail to Blur Video Capture ret:%d", NULL, ret); + tbm_surface_queue_release(video_capture->tqueue, tsurface); + } + + + if (!thread) break; + } + else + { + usleep(((1.0 / video_capture->fps) - (frame_delta)) * 1000000); + } + } +} + +static void +_e_blur_video_capture_thread_run_notify_cb(void *data, Ecore_Thread *thread, void *msg_data) +{ + E_Blur_Video_Capture *video_capture = (E_Blur_Video_Capture *)data; + Evas_Native_Surface ns; + tbm_surface_queue_error_e tsq_err = TBM_SURFACE_QUEUE_ERROR_NONE; + tbm_surface_h tsurface; + E_Blur_Video_Capture_Client *client; + E_Blur_Video_Capture_Object *object; + Eina_List *l, *l2; + + if (!tbm_surface_queue_can_acquire(video_capture->tqueue, 0)) return; + + tsq_err = tbm_surface_queue_acquire(video_capture->tqueue, &tsurface); + EINA_SAFETY_ON_FALSE_RETURN(tsq_err == TBM_SURFACE_QUEUE_ERROR_NONE); + + EINA_SAFETY_ON_NULL_GOTO(video_capture->source_obj, update); + + memset(&ns, 0, sizeof(Evas_Native_Surface)); + ns.type = EVAS_NATIVE_SURFACE_TBM; + ns.version = EVAS_NATIVE_SURFACE_VERSION; + ns.data.tbm.buffer = tsurface; + + BLUR_VC_TRACE("Blur Video Capture Source Update Start", NULL); + evas_object_image_size_set(video_capture->source_obj, video_capture->width, video_capture->height); + evas_object_image_colorspace_set(video_capture->source_obj, + _convert_to_colorspace_from_tbm_format(video_capture->format)); + evas_object_image_native_surface_set(video_capture->source_obj, &ns); + evas_object_image_data_update_add(video_capture->source_obj, 0, 0, + video_capture->width, video_capture->height); + evas_object_geometry_set(video_capture->source_obj, -e_comp->w, -e_comp->h, e_comp->w, e_comp->h); + BLUR_VC_TRACE("Blur Video Capture Source Update End", NULL); + + if (!_e_blur_video_capture_source_object_visible_get()) + { + EINA_LIST_FOREACH(video_capture->clients, l, client) + { + if (!client->presentation_visible) continue; + + EINA_LIST_FOREACH(client->capture_objs, l2, object) + { + if (!evas_object_visible_get(object->blur_rectangle_obj)) continue; + + _e_blur_video_capture_object_show(object); + } + } + + evas_object_show(video_capture->source_obj); + } + +update: + if (video_capture->current_tsurface) + { + tbm_surface_internal_unref(video_capture->current_tsurface); + tbm_surface_queue_release(video_capture->tqueue, video_capture->current_tsurface); + } + + video_capture->current_tsurface = tsurface; + tbm_surface_internal_ref(video_capture->current_tsurface); +} + +static void +_e_blur_video_capture_thread_run_end_cb(void *data, Ecore_Thread *thread) +{ + E_Blur_Video_Capture *video_capture = (E_Blur_Video_Capture *)data; + + video_capture->thread = NULL; +} + +static void +_e_blur_video_capture_thread_run_cancel_cb(void *data, Ecore_Thread *thread) +{ + E_Blur_Video_Capture *video_capture = (E_Blur_Video_Capture *)data; + + video_capture->thread = NULL; +} + +static Eina_Bool +_e_blur_video_capture_visible_get() +{ + E_Blur_Video_Capture *video_capture = _video_capture; + E_Blur_Video_Capture_Client *client; + E_Blur_Video_Capture_Object *object; + Eina_List *l, *l2; + + EINA_LIST_FOREACH(video_capture->clients, l, client) + { + if (!client->presentation_visible) continue; + + EINA_LIST_FOREACH(client->capture_objs, l2, object) + { + if (evas_object_visible_get(object->blur_rectangle_obj)) + return EINA_TRUE; + } + } + + return EINA_FALSE; +} + +static Eina_Bool +_e_blur_video_capture_start(void) +{ + E_Blur_Video_Capture *video_capture = _video_capture; + + EINA_SAFETY_ON_NULL_RETURN_VAL(video_capture, EINA_FALSE); + + if (video_capture->thread) return EINA_TRUE; + if (!_e_blur_video_capture_visible_get()) return EINA_TRUE; + + video_capture->tqueue = tbm_surface_queue_create(2, video_capture->width, video_capture->height, + video_capture->format, TBM_BO_DEFAULT); + EINA_SAFETY_ON_NULL_GOTO(video_capture->tqueue, fail); + + if (!_e_blur_video_capture_source_object_init()) + { + ERR("fail to _e_blur_video_capture_source_object_init"); + goto fail; + } + + video_capture->thread = ecore_thread_feedback_run(_e_blur_video_capture_thread_run_cb, + _e_blur_video_capture_thread_run_notify_cb, + _e_blur_video_capture_thread_run_end_cb, + _e_blur_video_capture_thread_run_cancel_cb, + video_capture, + EINA_TRUE); + EINA_SAFETY_ON_NULL_RETURN_VAL(video_capture->thread, EINA_FALSE); + + ELOGF("E_BLUR", "Run video capture", NULL); + + return EINA_TRUE; + +fail: + _e_blur_video_capture_source_object_deinit(); + + if (video_capture->tqueue) + { + tbm_surface_queue_destroy(video_capture->tqueue); + video_capture->tqueue = NULL; + } + + return EINA_FALSE; +} + +static Eina_Bool +_e_blur_video_capture_stop(void) +{ + E_Blur_Video_Capture *video_capture = _video_capture; + + EINA_SAFETY_ON_NULL_RETURN_VAL(video_capture, EINA_FALSE); + + if (!video_capture->thread) return EINA_TRUE; + if (_e_blur_video_capture_visible_get()) return EINA_TRUE; + + ecore_thread_cancel(video_capture->thread); + ecore_thread_wait(video_capture->thread, 10); + video_capture->thread = NULL; + + _e_blur_video_capture_source_object_deinit(); + + tbm_surface_queue_destroy(video_capture->tqueue); + video_capture->tqueue = NULL; + + if (video_capture->current_tsurface) + { + tbm_surface_internal_unref(video_capture->current_tsurface); + video_capture->current_tsurface = NULL; + } + + ELOGF("E_BLUR", "Stop video capture", NULL); + + return EINA_TRUE; +} + +static void +_e_blur_video_capture_cb_rectangle_create(void *data, E_Blur *blur) +{ + E_Blur_Video_Capture *video_capture = _video_capture; + E_Blur_Video_Capture_Client *client; + E_Blur_Video_Capture_Object *object; + E_Blur_Rectangle *blur_rectangle; + Eina_List *blur_rectangles; + Eina_List *l, *l2; + + EINA_SAFETY_ON_NULL_RETURN(video_capture->funcs); + + blur_rectangles = e_blur_rectangles_get(blur); + if (!blur_rectangles) return; + if (!video_capture->clients) return; + + EINA_LIST_FOREACH(blur_rectangles, l, blur_rectangle) + { + EINA_LIST_FOREACH(video_capture->clients, l2, client) + { + object = _e_blur_video_capture_object_get(client, blur_rectangle); + + if (!client->presentation_visible) continue; + if (!evas_object_visible_get(object->blur_rectangle_obj)) continue; + if (!_e_blur_video_capture_source_object_visible_get()) continue; + + _e_blur_video_capture_object_show(object); + } + } + + _e_blur_video_capture_start(); +} + +static void +_e_blur_video_capture_client_cb_presentation_callback(void *data, E_Hwc_Presentation_Callback *callback) +{ + E_Blur_Video_Capture_Client *client = (E_Blur_Video_Capture_Client *)data; + E_Blur_Video_Capture_Object *object; + Eina_List *l; + + e_hwc_presentation_callback_del(client->presentation_callback); + client->presentation_callback = NULL; + + client->presentation_visible = EINA_TRUE; + + ELOGF("E_BLUR", "Presentation Show Video_Capture_Client:%p", client->ec, client); + + if (_e_blur_video_capture_source_object_visible_get()) + { + EINA_LIST_FOREACH(client->capture_objs, l, object) + { + if (!evas_object_visible_get(object->blur_rectangle_obj)) + continue; + + _e_blur_video_capture_object_show(object); + } + } + + _e_blur_video_capture_start(); +} + +static void +_e_blur_video_capture_client_evas_cb_show(void *data, Evas *evas EINA_UNUSED, Evas_Object *obj EINA_UNUSED, void *event EINA_UNUSED) +{ + E_Blur_Video_Capture_Client *client; + E_Blur_Video_Capture_Object *object; + Eina_List *l; + + if (!(client = data)) return; + + client->visible = EINA_TRUE; + client->presentation_visible = e_comp_is_on_overlay(client->ec); + + ELOGF("E_BLUR", "Show Video_Capture_Client:%p presentation:%d", client->ec, client, + client->presentation_visible); + + if (client->presentation_visible) + { + if (_e_blur_video_capture_source_object_visible_get()) + { + EINA_LIST_FOREACH(client->capture_objs, l, object) + { + if (!evas_object_visible_get(object->blur_rectangle_obj)) + continue; + + _e_blur_video_capture_object_show(object); + } + } + + _e_blur_video_capture_start(); + } + else + { + if (client->presentation_callback) + e_hwc_presentation_callback_del(client->presentation_callback); + + client->presentation_callback = e_client_hwc_presentation_callback_add(client->ec, + _e_blur_video_capture_client_cb_presentation_callback, + client); + EINA_SAFETY_ON_NULL_RETURN(client->presentation_callback); + } +} + +static void +_e_blur_video_capture_client_evas_cb_hide(void *data, Evas *evas EINA_UNUSED, Evas_Object *obj EINA_UNUSED, void *event EINA_UNUSED) +{ + E_Blur_Video_Capture_Client *client; + E_Blur_Video_Capture_Object *object; + Eina_List *l; + + if (!(client = data)) return; + + client->visible = EINA_FALSE; + client->presentation_visible = EINA_FALSE; + + if (client->presentation_callback) + e_hwc_presentation_callback_del(client->presentation_callback); + + EINA_LIST_FOREACH(client->capture_objs, l, object) + _e_blur_video_capture_object_hide(object); + + _e_blur_video_capture_stop(); + + ELOGF("E_BLUR", "Hide Video_Capture_Client:%p", client->ec, client); +} + +static void +_e_blur_video_capture_source_clip_object_geometry_set(Evas_Object *source_clip, E_Client *ec) +{ + int x, y, w, h; + + e_client_geometry_get(ec, &x, &y, &w, &h); + evas_object_geometry_set(source_clip, x, y, w, h); +} + +static void +_e_blur_video_capture_client_evas_cb_resize(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED) +{ + E_Blur_Video_Capture_Client *client; + E_Blur_Video_Capture_Object *object; + Eina_List *l; + E_Client *ec; + + if (!(client = data)) return; + + ec = client->ec; + if (!ec) return; + + EINA_LIST_FOREACH(client->capture_objs, l, object) + _e_blur_video_capture_source_clip_object_geometry_set(object->source_clip_obj, ec); +} + +static void +_e_blur_video_capture_client_evas_cb_move(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED) +{ + E_Blur_Video_Capture_Client *client; + E_Blur_Video_Capture_Object *object; + Eina_List *l; + E_Client *ec; + + if (!(client = data)) return; + + ec = client->ec; + if (!ec) return; + + EINA_LIST_FOREACH(client->capture_objs, l, object) + _e_blur_video_capture_source_clip_object_geometry_set(object->source_clip_obj, ec); +} + +static void +_e_blur_video_capture_client_evas_cb_restack(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED) +{ + E_Blur_Video_Capture_Client *client; + E_Blur_Video_Capture_Object *object; + Eina_List *l; + short layer; + + if (!(client = data)) return; + + layer = evas_object_layer_get(obj); + + EINA_LIST_FOREACH(client->capture_objs, l, object) + { + evas_object_layer_set(object->blur_clip_obj, layer); + evas_object_stack_above(object->blur_clip_obj, obj); + evas_object_layer_set(object->source_clip_obj, layer); + evas_object_stack_below(object->source_clip_obj, object->blur_clip_obj); + evas_object_layer_set(object->source_obj, layer); + evas_object_stack_below(object->source_obj, object->source_clip_obj); + } +} + +static void +_e_blur_video_capture_client_cb_ec_free(void *data, void *obj) +{ + E_Blur_Video_Capture *video_capture = _video_capture; + E_Blur_Video_Capture_Client *client = (E_Blur_Video_Capture_Client *)data; + E_Blur_Video_Capture_Object *object; + Eina_List *l, *ll; + + EINA_SAFETY_ON_NULL_RETURN(client); + + ELOGF("E_BLUR", "Destroy Video_Capture_Client:%p", client->ec, client); + + if (client->presentation_callback) + e_hwc_presentation_callback_del(client->presentation_callback); + + EINA_LIST_FOREACH_SAFE(client->capture_objs, l, ll, object) + { + evas_object_event_callback_del(object->blur_rectangle_obj, EVAS_CALLBACK_SHOW, + _e_blur_video_capture_object_evas_cb_show); + evas_object_event_callback_del(object->blur_rectangle_obj, EVAS_CALLBACK_HIDE, + _e_blur_video_capture_object_evas_cb_hide); + evas_object_event_callback_del(object->blur_rectangle_obj, EVAS_CALLBACK_RESIZE, + _e_blur_video_capture_object_evas_cb_resize); + evas_object_event_callback_del(object->blur_rectangle_obj, EVAS_CALLBACK_MOVE, + _e_blur_video_capture_object_evas_cb_move); + evas_object_event_callback_del(object->blur_rectangle_obj, EVAS_CALLBACK_RESTACK, + _e_blur_video_capture_object_evas_cb_restack); + evas_object_event_callback_del(object->blur_rectangle_obj, EVAS_CALLBACK_DEL, + _e_blur_video_capture_object_evas_cb_del); + + _e_blur_video_capture_object_destroy(object); + } + + client->ec_delfn = NULL; + + evas_object_event_callback_del(client->ec->frame, EVAS_CALLBACK_SHOW, + _e_blur_video_capture_client_evas_cb_show); + evas_object_event_callback_del(client->ec->frame, EVAS_CALLBACK_HIDE, + _e_blur_video_capture_client_evas_cb_hide); + evas_object_event_callback_del(client->ec->frame, EVAS_CALLBACK_RESIZE, + _e_blur_video_capture_client_evas_cb_resize); + evas_object_event_callback_del(client->ec->frame, EVAS_CALLBACK_MOVE, + _e_blur_video_capture_client_evas_cb_move); + + video_capture->clients = eina_list_remove(video_capture->clients, client); + + free(client); + + _e_blur_video_capture_stop(); +} + +static void +_e_blur_video_capture_cb_hwc_window_video_set(void *data, E_Hwc_Window *hwc_window) +{ + E_Blur_Video_Capture_Client *client; + E_Blur_Video_Capture *video_capture = _video_capture; + E_Blur_Video_Capture_Object *object; + Eina_List *blurs, *blur_rectangles, *l, *l2; + E_Blur *blur; + E_Blur_Rectangle *blur_rectangle; + + EINA_SAFETY_ON_NULL_RETURN(video_capture->funcs); + + if (!hwc_window->ec) return; + if (e_object_is_del(E_OBJECT(hwc_window->ec))) return; + + client = E_NEW(E_Blur_Video_Capture_Client, 1); + EINA_SAFETY_ON_NULL_RETURN(client); + + client->ec = hwc_window->ec; + client->ec_delfn = e_object_delfn_add(E_OBJECT(hwc_window->ec), + _e_blur_video_capture_client_cb_ec_free, + client); + + client->visible = evas_object_visible_get(client->ec->frame); + client->presentation_visible = e_comp_is_on_overlay(client->ec); + + evas_object_event_callback_add(client->ec->frame, EVAS_CALLBACK_SHOW, + _e_blur_video_capture_client_evas_cb_show, client); + evas_object_event_callback_add(client->ec->frame, EVAS_CALLBACK_HIDE, + _e_blur_video_capture_client_evas_cb_hide, client); + evas_object_event_callback_add(client->ec->frame, EVAS_CALLBACK_RESIZE, + _e_blur_video_capture_client_evas_cb_resize, client); + evas_object_event_callback_add(client->ec->frame, EVAS_CALLBACK_MOVE, + _e_blur_video_capture_client_evas_cb_move, client); + evas_object_event_callback_add(client->ec->frame, EVAS_CALLBACK_RESTACK, + _e_blur_video_capture_client_evas_cb_restack, client); + + video_capture->clients = eina_list_append(video_capture->clients, client); + + ELOGF("E_BLUR", "Create Video_Capture_Client:%p", client->ec, client); + + blurs = e_blur_manager_blurs_get(); + if (!blurs) return; + + EINA_LIST_FOREACH(blurs, l, blur) + { + blur_rectangles = e_blur_rectangles_get(blur); + if (!blur_rectangles) return; + + EINA_LIST_FOREACH(blur_rectangles, l2, blur_rectangle) + { + object = _e_blur_video_capture_object_get(client, blur_rectangle); + + if (!client->presentation_visible) continue; + if (!evas_object_visible_get(object->blur_rectangle_obj)) continue; + if (_e_blur_video_capture_source_object_visible_get()) continue; + + _e_blur_video_capture_object_show(object); + } + } + + _e_blur_video_capture_start(); +} + +EINTERN Eina_Bool +e_blur_video_capture_init(void) +{ + E_Blur_Video_Capture *video_capture; + + if (_video_capture) return EINA_TRUE; + + video_capture = E_NEW(E_Blur_Video_Capture, 1); + EINA_SAFETY_ON_NULL_RETURN_VAL(video_capture, EINA_FALSE); + + E_HWC_WINDOW_HOOK_APPEND(_e_blur_video_capture_hwc_window_hooks, E_HWC_WINDOW_HOOK_VIDEO_SET, + _e_blur_video_capture_cb_hwc_window_video_set, NULL); + E_BLUR_HOOK_APPEND(_e_blur_video_capture_blur_hooks, E_BLUR_HOOK_RECTANGLE_CREATE, + _e_blur_video_capture_cb_rectangle_create, NULL); + + _video_capture = video_capture; + + return EINA_TRUE; +} + +EINTERN void +e_blur_video_capture_deinit(void) +{ + E_Blur_Video_Capture *video_capture; + + video_capture = _video_capture; + if (!video_capture) return; + + _e_blur_video_capture_stop(); + + if (video_capture->funcs) + e_blur_video_capture_module_func_unset(); + + E_FREE_LIST(_e_blur_video_capture_hwc_window_hooks, e_hwc_window_hook_del); + E_FREE_LIST(_e_blur_video_capture_blur_hooks, e_blur_hook_del); + E_FREE(video_capture); +} + +E_API Eina_Bool +e_blur_video_capture_module_func_set(E_Blur_Video_Capture_Funcs *fn) +{ + E_Blur_Video_Capture *video_capture = _video_capture; + E_Blur_Video_Capture_Funcs *funcs; + + EINA_SAFETY_ON_NULL_RETURN_VAL(fn, EINA_FALSE); + EINA_SAFETY_ON_NULL_RETURN_VAL(fn->module_name, EINA_FALSE); + EINA_SAFETY_ON_NULL_RETURN_VAL(fn->video_capture_config_get, EINA_FALSE); + EINA_SAFETY_ON_NULL_RETURN_VAL(fn->video_capture, EINA_FALSE); + EINA_SAFETY_ON_NULL_RETURN_VAL(video_capture, EINA_FALSE); + EINA_SAFETY_ON_TRUE_RETURN_VAL(video_capture->funcs != NULL, EINA_FALSE); + + funcs = E_NEW(E_Blur_Video_Capture_Funcs, 1); + EINA_SAFETY_ON_NULL_RETURN_VAL(funcs, EINA_FALSE); + + funcs->module_name = strdup(fn->module_name); + funcs->video_capture_config_get = fn->video_capture_config_get; + funcs->video_capture = fn->video_capture; + + video_capture->funcs = funcs; + + video_capture->funcs->video_capture_config_get(&video_capture->width, + &video_capture->height, + &video_capture->format, + &video_capture->fps); + + ELOGF("E_BLUR", "Video Capture Config module_name:%s width:%d height:%d format:%x fps:%d", + NULL, video_capture->funcs->module_name, + video_capture->width, video_capture->height, + video_capture->format, video_capture->fps); + + return EINA_TRUE; +} + +E_API Eina_Bool +e_blur_video_capture_module_func_unset(void) +{ + E_Blur_Video_Capture *video_capture = _video_capture; + + EINA_SAFETY_ON_NULL_RETURN_VAL(video_capture, EINA_FALSE); + EINA_SAFETY_ON_NULL_RETURN_VAL(video_capture->funcs, EINA_FALSE); + + free(video_capture->funcs->module_name); + free(video_capture->funcs); + + video_capture->funcs = NULL; + + return EINA_TRUE; +} + +EINTERN void +e_blur_video_capture_trace_debug(Eina_Bool onoff) +{ + if (onoff == blur_video_capture_trace) return; + blur_video_capture_trace = onoff; + INF("Blur Video Capture Debug is %s", onoff?"ON":"OFF"); +} diff --git a/src/bin/server/e_blur_video_capture_intern.h b/src/bin/server/e_blur_video_capture_intern.h new file mode 100644 index 0000000..ce8d7f4 --- /dev/null +++ b/src/bin/server/e_blur_video_capture_intern.h @@ -0,0 +1,10 @@ +#ifndef E_BLUR_VIDEO_CAPTURE_INTERN_H +#define E_BLUR_VIDEO_CAPTURE_INTERN_H + +#include "e_intern.h" + +EINTERN Eina_Bool e_blur_video_capture_init(void); +EINTERN void e_blur_video_capture_deinit(void); +EINTERN void e_blur_video_capture_trace_debug(Eina_Bool onoff); + +#endif // E_BLUR_VIDEO_CAPTURE_INTERN_H diff --git a/src/include/e_blur_video_capture.h b/src/include/e_blur_video_capture.h new file mode 100644 index 0000000..655729c --- /dev/null +++ b/src/include/e_blur_video_capture.h @@ -0,0 +1,31 @@ +#ifndef E_BLUR_VIDEO_CAPTURE_H +#define E_BLUR_VIDEO_CAPTURE_H + +#include +#include + +typedef struct _E_Blur_Video_Capture_Funcs E_Blur_Video_Capture_Funcs; + +typedef enum +{ + E_BLUR_VIDEO_CAPTURE_ERROR_NONE = 0, + E_BLUR_VIDEO_CAPTURE_ERROR_UNKNOWN = -1, + E_BLUR_VIDEO_CAPTURE_ERROR_AGAIN = -2, + E_BLUR_VIDEO_CAPTURE_ERROR_SOURCE_NOT_READY = -3, + E_BLUR_VIDEO_CAPTURE_ERROR_COPY_PROTECTION = -4, + E_BLUR_VIDEO_CAPTURE_ERROR_MEMORY = -5, +} E_Blur_Video_Capture_Error; + +struct _E_Blur_Video_Capture_Funcs +{ + char *module_name; + + Eina_Bool (*video_capture_config_get)(int *width, int *height, tbm_format *format, int *fps); + /* capture_video func will be called in thread */ + E_Blur_Video_Capture_Error (*video_capture)(tbm_surface_h tsurface); +}; + +E_API Eina_Bool e_blur_video_capture_module_func_set(E_Blur_Video_Capture_Funcs *fn); +E_API Eina_Bool e_blur_video_capture_module_func_unset(void); + +#endif diff --git a/src/include/e_comp_cfdata.h b/src/include/e_comp_cfdata.h index 94fe875..37a84fe 100644 --- a/src/include/e_comp_cfdata.h +++ b/src/include/e_comp_cfdata.h @@ -85,6 +85,7 @@ struct _E_Comp_Config int e_wheel_click_angle; int input_output_assign_policy; int touch_block_on_palm; + unsigned char blur_video_capture; }; struct _E_Comp_Match diff --git a/src/include/e_includes.h b/src/include/e_includes.h index d691ab0..8205a95 100644 --- a/src/include/e_includes.h +++ b/src/include/e_includes.h @@ -64,4 +64,5 @@ #include "e_desk_area.h" #include "e_comp_wl_capture.h" #include "e_map.h" -#include "e_input_thread_client.h" \ No newline at end of file +#include "e_input_thread_client.h" +#include "e_blur_video_capture.h" \ No newline at end of file