[Tizen][WebRTC] Appsink implementation with fimcconvert
authorsonal.g1@samsung.com <sonal.g1@samsung.com>
Wed, 25 Feb 2015 15:37:58 +0000 (21:07 +0530)
committerYoungsoo Choi <kenshin.choi@samsung.com>
Tue, 10 Jul 2018 06:57:09 +0000 (06:57 +0000)
Appsink has been implemented using fimcconvert, it will be used if
pixmap(zero-copy) is not working. It works by registering a callback with
appsink. After receiving the buffer it will send buffer to renderer process
via shared memory.
Reviewed by: Min-Soo Koo, Sharath Kamath

Change-Id: I270ad24f5ebd6ccfd8d9609a6a3259db68b25a25
Signed-off-by: sonal.g1@samsung.com <sonal.g1@samsung.com>
tizen_src/impl/content/common/gpu/media/tizen/tizen_video_decode_accelerator.cc

index d83aa96..d79e7e6 100644 (file)
@@ -17,6 +17,8 @@
 #include "base/time/time.h"
 #if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
 #include "ui/gl/efl_pixmap.h"
+#else
+#include "base/process/process.h"
 #endif
 
 #if GST_VERSION_MAJOR == 1
@@ -55,9 +57,7 @@ namespace content {
 enum {
   MAX_BITRATE = 2000000,                 // bps.
   INPUT_BUFFER_SIZE = MAX_BITRATE / 8,   // bytes. 1 sec for H.264 HD video.
-#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
   ID_LAST = 0x3FFFFFFF,                  // wrap round ID after this
-#endif
 };
 
 media::VideoDecodeAccelerator* CreateTizenVideoDecodeAccelerator() {
@@ -113,15 +113,18 @@ struct TizenVideoDecodeAccelerator::Impl {
         sink_(NULL),
         appsrc_(NULL),
         io_message_loop_proxy_(base::MessageLoopProxy::current()),
+        bitstream_buffer_id_(0),
         gst_thread_("TizenDecoderThreadGst")
 #if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
-        ,bitstream_buffer_id_(0),
-        pixmap_id_(0),
+        ,pixmap_id_(0),
         gst_width_(0),
         gst_height_(0),
         damage_(0),
         damage_handler_(NULL),
         is_x_window_handle_set_(false)
+#else
+        ,caps_width_(0),
+        caps_height_(0)
 #endif
 {
 #if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
@@ -129,6 +132,16 @@ struct TizenVideoDecodeAccelerator::Impl {
 #endif
 }
 
+#if !defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+  static GstFlowReturn OnDecoded(GstAppSink* sink, gpointer app_data);
+  void DeliverVideoFrame(GstBuffer* buffer,
+                         int32 bitstream_buffer_id,
+                         gfx::Rect frame_size);
+  void CreateAppSinkElement();
+  static void OnSinkCapChanged(
+      GstPad* sink_pad, GParamSpec* gparamspec, void* user_data);
+#endif
+
   static GstBusSyncReply OnBusMessage(
       GstBus* bus, GstMessage* msg, gpointer data) {
     switch (GST_MESSAGE_TYPE(msg)) {
@@ -208,8 +221,8 @@ struct TizenVideoDecodeAccelerator::Impl {
   scoped_refptr<base::MessageLoopProxy> io_message_loop_proxy_;
   scoped_ptr<base::WeakPtrFactory<Client> > io_client_weak_factory_;
   base::Thread gst_thread_;
-#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
   int bitstream_buffer_id_;
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
   scoped_refptr<gfx::EflPixmap> pixmap_surface_;
   int pixmap_id_;
   gint gst_width_;
@@ -219,6 +232,9 @@ struct TizenVideoDecodeAccelerator::Impl {
   bool is_x_window_handle_set_;
   typedef std::map<int16, scoped_refptr<gfx::EflPixmap> > PixmapSurfaceTizenMap;
   PixmapSurfaceTizenMap xpixmap_buffer_map_;
+#else
+  int caps_width_;
+  int caps_height_;
 #endif
 };
 
@@ -238,6 +254,10 @@ bool TizenVideoDecodeAccelerator::Initialize(
   GstElement* gst_parser = NULL;
   GstBus* gst_bus = NULL;
   GstPad* video_sink_pad = NULL;
+#if !defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+  GstElement* video_filter_ = NULL;
+  GstElement* gst_converter = NULL;
+#endif
   scoped_ptr<GstElement, GstElementDeleter> gst_pipeline;
   static GstAppSrcCallbacks appsrc_callbacks =
       {&Impl::StartFeed, &Impl::StopFeed, NULL};
@@ -331,7 +351,7 @@ bool TizenVideoDecodeAccelerator::Initialize(
     gst_object_unref(gst_decoder);
     return false;
   }
-
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
   // sink initialization.
   if (!(impl_->sink_ = gst_element_factory_make("xvimagesink", "xvimagesink"))) {
     LOG(ERROR) << __FUNCTION__ << " cannot create xvimagesink.";
@@ -343,15 +363,15 @@ bool TizenVideoDecodeAccelerator::Initialize(
     return false;
   }
   g_object_set(impl_->sink_, "rotate", 0, NULL);
-#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+
   if (!(video_sink_pad = gst_element_get_static_pad(impl_->sink_, "sink"))) {
     return false;
   }
-  g_signal_connect(
-      video_sink_pad, "notify::caps", G_CALLBACK(impl_->OnSinkCapChanged), impl_);
+  g_signal_connect(video_sink_pad, "notify::caps",
+                   G_CALLBACK(impl_->OnSinkCapChanged),
+                   impl_);
   impl_->SetXWindowHandle(false);
   gst_object_unref(video_sink_pad);
-#endif
 
   // linking the elements.
   if (!gst_element_link(impl_->appsrc_, gst_parser)) {
@@ -367,7 +387,62 @@ bool TizenVideoDecodeAccelerator::Initialize(
     LOG(ERROR) << __FUNCTION__ << " Decoder and Sink could not be linked";
     return false;
   }
+#else
+  impl_->CreateAppSinkElement();
+  if (!impl_->sink_) {
+    LOG(ERROR) << "Could not create and add appsink element";
+    return false;
+  }
+  if (!gst_bin_add(GST_BIN(gst_pipeline.get()), impl_->sink_)) {
+    gst_object_unref(impl_->sink_);
+    impl_->sink_ = NULL;
+    return false;
+  }
+  if (!(gst_converter = gst_element_factory_make("fimcconvert", "cvt"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create fimcconvert.";
+    return false;
+  }
+  if(!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_converter)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot add fimcconvert into pipeline.";
+    gst_object_unref(gst_converter);
+    return false;
+  }
+  if (!(video_filter_ = gst_element_factory_make("capsfilter", "VideoFilter"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create capsfilter.";
+    return false;
+  }
+  if(!gst_bin_add(GST_BIN(gst_pipeline.get()), video_filter_)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot add videoFilter into pipeline.";
+    gst_object_unref(video_filter_);
+    return false;
+  }
 
+  // FIXME: SONAL
+  // OnSinkCapChanged callback is not coming for Appsink implementation
+  if (!(video_sink_pad =
+          gst_element_get_static_pad(impl_->sink_, "sink"))) {
+    LOG(ERROR) << "Could not create video sink pad";
+    return false;
+  }
+  g_signal_connect(
+      video_sink_pad, "notify::caps",
+      G_CALLBACK(&Impl::OnSinkCapChanged), impl_);
+  gst_object_unref(video_sink_pad);
+  if (!gst_element_link_many(impl_->appsrc_,
+                             gst_parser,
+                             gst_decoder,
+                             gst_converter,
+                             video_filter_,
+                             impl_->sink_,
+                             NULL)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot link some elements in decode pipeline";
+    return false;
+  }
+  g_object_set(G_OBJECT(video_filter_),
+               "caps",
+               gst_caps_new_simple("video/x-raw","format", G_TYPE_STRING, "I420",NULL),
+               NULL);
+#endif
 #else
   DVLOG(1) << "######################################";
   DVLOG(1) << "      USING ffdec_h264 DECODER";
@@ -511,6 +586,173 @@ void TizenVideoDecodeAccelerator::StartDecoder() {
   gst_element_set_state(impl_->pipeline_, GST_STATE_PLAYING);
 };
 
+#if !defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+void TizenVideoDecodeAccelerator::Impl::OnSinkCapChanged(
+      GstPad* sink_pad, GParamSpec* gparamspec, void* user_data) {
+  content::TizenVideoDecodeAccelerator::Impl* impl =
+  static_cast<TizenVideoDecodeAccelerator::Impl*>(user_data);
+  int width = 0, height = 0;
+#if GST_VERSION_MAJOR == 1
+  GstCaps* caps = gst_pad_get_current_caps(GST_PAD(sink_pad));
+  if (caps) {
+    GstVideoInfo info;
+    gst_video_info_init(&info);
+    if (gst_video_info_from_caps(&info, caps)) {
+      if ((impl->caps_width_ != width) || (impl->caps_height_ != height)) {
+        impl->caps_width_ = info.width;
+        impl->caps_height_ = info.height;
+      }
+    }
+  }
+#else
+  if (gst_video_get_size(sink_pad, &width, &height)) {
+    if ((impl->caps_width_ != width) || (impl->caps_height_ != height)) {
+      impl->caps_width_ = width;
+      impl->caps_height_ = height;
+    }
+  }
+#endif
+}
+
+GstFlowReturn TizenVideoDecodeAccelerator::Impl::OnDecoded(
+    GstAppSink* sink, gpointer app_data) {
+  GstBuffer* gst_output_buf = NULL;
+  content::TizenVideoDecodeAccelerator::Impl* self =
+      static_cast<TizenVideoDecodeAccelerator::Impl*>(app_data);
+  // FIXME: SONAL
+  // Once OnSinkCapChanged callback startes coming dont find height
+  // and width for all buffers, move this code under if block
+#if GST_VERSION_MAJOR == 1
+  GstSample* sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
+  gst_output_buf = gst_sample_get_buffer(sample);
+  GstMapInfo map;
+  if (!gst_buffer_map(gst_output_buf, &map, GST_MAP_READ))
+    LOG (ERROR) << "Decoded Buffer contains invalid or no info!";
+  GstCaps* caps = gst_sample_get_caps(sample);
+#else
+    gst_output_buf = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
+    GstCaps* caps = gst_buffer_get_caps(GST_BUFFER(gst_output_buf));
+#endif
+  if (!self->caps_width_ || !self->caps_height_) {
+    if (!caps) {
+      LOG(ERROR) << __FUNCTION__ << "Could not fetch caps from buffer";
+      gst_buffer_unref(gst_output_buf);
+      return GST_FLOW_ERROR;
+    } else {
+      // No need to unref |GstStructure|
+      const GstStructure* str = gst_caps_get_structure(caps, 0);
+      if (!str) {
+        gst_buffer_unref(gst_output_buf);
+        gst_caps_unref(caps);
+        return GST_FLOW_ERROR;
+      }
+      if (!gst_structure_get_int(str, "width", &self->caps_width_) ||
+          !gst_structure_get_int(str, "height", &self->caps_height_)) {
+        LOG(ERROR) << "Buffer information could not be obtained";
+        gst_buffer_unref(gst_output_buf);
+        gst_caps_unref(caps);
+        return GST_FLOW_ERROR;
+      }
+      gst_caps_unref(caps);
+    }
+  }
+
+  if (gst_output_buf) {
+#if GST_VERSION_MAJOR == 1
+    if (map.data) {
+#else
+    if (gst_output_buf->data) {
+#endif
+      gfx::Rect frame_size =
+          gfx::Rect(self->caps_width_, self->caps_height_);
+      self->gst_thread_.message_loop()->PostTask(
+          FROM_HERE,
+          base::Bind(&TizenVideoDecodeAccelerator::Impl::DeliverVideoFrame,
+                      base::Unretained(self),
+                      gst_output_buf,
+                      self->bitstream_buffer_id_,
+                      frame_size));
+      self->bitstream_buffer_id_ = (self->bitstream_buffer_id_ + 1) & ID_LAST;
+    }
+  } else {
+    gst_buffer_unref(gst_output_buf);
+#if GST_VERSION_MAJOR == 1
+    gst_sample_unref(sample);
+#endif
+    LOG(ERROR) << __FUNCTION__
+               << " DECODING FRAME FAILED : frame_id"
+               << self->bitstream_buffer_id_;
+  }
+#if GST_VERSION_MAJOR == 1
+  gst_buffer_unmap(gst_output_buf, &map);
+#endif
+  return GST_FLOW_OK;
+}
+
+
+void TizenVideoDecodeAccelerator::Impl::CreateAppSinkElement() {
+  GstAppSinkCallbacks appsink_callbacks =
+      {NULL, NULL, &OnDecoded, NULL};
+
+  if (!(sink_ = gst_element_factory_make("appsink", "sink"))) {
+    LOG(ERROR) << __FUNCTION__ << "Appsink could not be created";
+    return;
+  }
+  gst_app_sink_set_callbacks(GST_APP_SINK(sink_),
+                             &appsink_callbacks,
+                             static_cast<gpointer>(this),
+                             NULL);
+  gst_app_sink_set_max_buffers(GST_APP_SINK(sink_), 1);
+}
+
+void TizenVideoDecodeAccelerator::Impl::DeliverVideoFrame(
+    GstBuffer* buffer,
+    int32 bitstream_buffer_id,
+    gfx::Rect frame_size) {
+  base::SharedMemory shared_memory;
+  base::SharedMemoryHandle shared_memory_handle;
+
+#if GST_VERSION_MAJOR == 1
+  GstMapInfo map;
+  if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
+      LOG (ERROR) << "Encoded Buffer contains invalid or no info.!";
+      return;
+  }
+  uint32 buffer_size = map.size;
+#else
+  uint32 buffer_size = buffer->size;
+#endif
+  if (!shared_memory.CreateAndMapAnonymous(buffer_size)) {
+    LOG (ERROR) << "Shared Memory creation failed.";
+  } else {
+    if (!shared_memory.ShareToProcess(base::GetCurrentProcessHandle(),
+                                      &shared_memory_handle)) {
+      LOG(ERROR) << __FUNCTION__ << "Could not get handle of Shared Memory";
+    } else {
+      memcpy(shared_memory.memory(),
+#if GST_VERSION_MAJOR == 1
+                   map.data,
+#else
+                  GST_BUFFER_DATA(buffer),
+#endif
+             buffer_size);
+      io_message_loop_proxy_->PostTask(
+          FROM_HERE,
+          base::Bind(&media::VideoDecodeAccelerator::Client::NotifyDecodeDone,
+                      io_client_weak_factory_->GetWeakPtr(),
+                      shared_memory_handle,
+                      bitstream_buffer_id_,
+                      buffer_size,
+                      frame_size));
+    }
+  }
+#if GST_VERSION_MAJOR == 1
+  gst_buffer_unmap(buffer, &map);
+#endif
+  gst_buffer_unref(buffer);
+}
+#endif
+
 #if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
 void TizenVideoDecodeAccelerator::Impl::OnXWindowIdPrepared(
     GstMessage* message) {