[Tizen][WebRTC] Appsink implementation with fimcconvert
[platform/framework/web/chromium-efl.git] / tizen_src / impl / content / common / gpu / media / tizen / tizen_video_decode_accelerator.cc
index 72b521d..d79e7e6 100644 (file)
@@ -1,12 +1,12 @@
 // Copyright 2014 Samsung Electronics Inc. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
+
 #include "content/common/gpu/media/tizen/tizen_video_decode_accelerator.h"
 
-#include <gst/gst.h>
 #include <gst/app/gstappsink.h>
 #include <gst/app/gstappsrc.h>
-#include <gst/interfaces/xoverlay.h>
+#include <gst/gst.h>
 #include <gst/video/gstvideosink.h>
 #include <gst/video/video.h>
 
 #include "base/message_loop/message_loop_proxy.h"
 #include "base/synchronization/waitable_event.h"
 #include "base/time/time.h"
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+#include "ui/gl/efl_pixmap.h"
+#else
+#include "base/process/process.h"
+#endif
+
+#if GST_VERSION_MAJOR == 1
+#include <gst/video/videooverlay.h>
+#else
+#include <gst/interfaces/xoverlay.h>
+#endif
 
 using media::VideoFrame;
 
@@ -27,19 +38,28 @@ struct GstElementDeleter {
   }
 };
 
+// Gstreamer elements and names.
+const char* kDecoderName = "decoder";
+#if GST_VERSION_MAJOR == 1
+const char* kDecoderGstElement = "omxh264dec";
+#else
+const char* kDecoderGstElement = "omx_h264dec";
+#endif
+
+// Generating Unique Key from given width and height.
+int32 ConvertWidthAndHeightToKey(int width, int height) {
+  return ((width << 16) | height);
+}
 } // namespace
 
 namespace content {
 
 enum {
-  INPUT_BUFFER_SIZE = 319488,   // bytes. 1 sec for H.264 HD video.
+  MAX_BITRATE = 2000000,                 // bps.
+  INPUT_BUFFER_SIZE = MAX_BITRATE / 8,   // bytes. 1 sec for H.264 HD video.
+  ID_LAST = 0x3FFFFFFF,                  // wrap round ID after this
 };
 
-// Generating Unique Key from given width and height.
-int32 ConvertWidthAndHeightToKey(int width, int height) {
-  return ((width << 16) | height);
-}
-
 media::VideoDecodeAccelerator* CreateTizenVideoDecodeAccelerator() {
   return new TizenVideoDecodeAccelerator();
 }
@@ -87,30 +107,81 @@ struct TizenVideoDecodeAccelerator::BitstreamBufferRef {
 
 struct TizenVideoDecodeAccelerator::Impl {
   Impl()
-  : can_feed_(true),
-    is_destroying_(false),
-    pipeline_(NULL),
-    sink_(NULL),
-    appsrc_(NULL),
-    io_message_loop_proxy_(base::MessageLoopProxy::current()),
-    gst_thread_("TizenDecoderThreadGst") {}
+      : can_feed_(true),
+        is_destroying_(false),
+        pipeline_(NULL),
+        sink_(NULL),
+        appsrc_(NULL),
+        io_message_loop_proxy_(base::MessageLoopProxy::current()),
+        bitstream_buffer_id_(0),
+        gst_thread_("TizenDecoderThreadGst")
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+        ,pixmap_id_(0),
+        gst_width_(0),
+        gst_height_(0),
+        damage_(0),
+        damage_handler_(NULL),
+        is_x_window_handle_set_(false)
+#else
+        ,caps_width_(0),
+        caps_height_(0)
+#endif
+{
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+  xpixmap_buffer_map_.clear();
+#endif
+}
+
+#if !defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+  static GstFlowReturn OnDecoded(GstAppSink* sink, gpointer app_data);
+  void DeliverVideoFrame(GstBuffer* buffer,
+                         int32 bitstream_buffer_id,
+                         gfx::Rect frame_size);
+  void CreateAppSinkElement();
+  static void OnSinkCapChanged(
+      GstPad* sink_pad, GParamSpec* gparamspec, void* user_data);
+#endif
 
   static GstBusSyncReply OnBusMessage(
-      GstBus *bus, GstMessage *msg, gpointer data) {
+      GstBus* bus, GstMessage* msg, gpointer data) {
     switch (GST_MESSAGE_TYPE(msg)) {
-      case GST_MESSAGE_EOS:
-        break;
       case GST_MESSAGE_ERROR: {
-        gchar *debug = NULL;
-        GError *error = NULL;
+        gchardebug = NULL;
+        GErrorerror = NULL;
         gst_message_parse_error(msg, &error, &debug);
-        g_free(debug);
         LOG(ERROR) << __FUNCTION__
-                   << "Error Message Received in Gst Decoder Bus: "
-                   << error->message;
+                   << " GSTError happens from bus at "
+                   << GST_OBJECT_NAME(msg->src)
+                   << ":" << error->message;
+        LOG(ERROR) << __FUNCTION__
+                   << " Debugging Info: "
+                   << (debug != NULL ? debug : "none");
         g_error_free(error);
+        g_free(debug);
         break;
       }
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+      case GST_MESSAGE_ELEMENT: {
+        TizenVideoDecodeAccelerator::Impl* obj_impl =
+            static_cast<TizenVideoDecodeAccelerator::Impl*>(data);
+        if (obj_impl) {
+          if (obj_impl->IsXWindowHandleSet()) {
+#if GST_VERSION_MAJOR == 1
+            if (gst_is_video_overlay_prepare_window_handle_message(msg)) {
+#else
+            if (gst_structure_has_name(msg->structure, "prepare-xid")) {
+#endif
+              obj_impl->OnXWindowIdPrepared(msg);
+              gst_message_unref(msg);
+              return GST_BUS_PASS;
+            }
+          }
+        } else {
+          LOG(ERROR) << __FUNCTION__ << "Accelerator is NULL";
+        }
+        break;
+      }
+#endif
       default:
         break;
     }
@@ -118,17 +189,30 @@ struct TizenVideoDecodeAccelerator::Impl {
   }
 
   static void StartFeed(GstAppSrc *source, guint size, gpointer app) {
+    DCHECK(source);
     content::TizenVideoDecodeAccelerator::Impl* impl =
         static_cast<content::TizenVideoDecodeAccelerator::Impl*>(app);
     impl->can_feed_ = true;
   }
 
   static void StopFeed(GstAppSrc *source, gpointer app) {
+    DCHECK(source);
     content::TizenVideoDecodeAccelerator::Impl* impl =
         static_cast<content::TizenVideoDecodeAccelerator::Impl*>(app);
     impl->can_feed_ = false;
   }
 
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+  bool IsXWindowHandleSet() const {return is_x_window_handle_set_;}
+  void OnXWindowIdPrepared(GstMessage* message);
+  void SetXWindowHandle(bool handle_set);
+  void SetPixmap(const int32& gst_width, const int32& gst_height);
+  void DeregisterDamageHandler();
+  static Eina_Bool OnSurfaceChanged(void* ptr_acc, int type, void* event);
+  static void OnSinkCapChanged(
+      GstPad* sink_pad, GParamSpec* gparamspec, void* user_data);
+#endif
+
   volatile bool can_feed_;
   volatile bool is_destroying_;
   GstElement* pipeline_;
@@ -137,6 +221,21 @@ struct TizenVideoDecodeAccelerator::Impl {
   scoped_refptr<base::MessageLoopProxy> io_message_loop_proxy_;
   scoped_ptr<base::WeakPtrFactory<Client> > io_client_weak_factory_;
   base::Thread gst_thread_;
+  int bitstream_buffer_id_;
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+  scoped_refptr<gfx::EflPixmap> pixmap_surface_;
+  int pixmap_id_;
+  gint gst_width_;
+  gint gst_height_;
+  Ecore_X_Damage damage_;
+  Ecore_Event_Handler* damage_handler_;
+  bool is_x_window_handle_set_;
+  typedef std::map<int16, scoped_refptr<gfx::EflPixmap> > PixmapSurfaceTizenMap;
+  PixmapSurfaceTizenMap xpixmap_buffer_map_;
+#else
+  int caps_width_;
+  int caps_height_;
+#endif
 };
 
 TizenVideoDecodeAccelerator::TizenVideoDecodeAccelerator()
@@ -152,11 +251,17 @@ bool TizenVideoDecodeAccelerator::Initialize(
   GError* error = NULL;
   GstCaps* video_caps = NULL;
   GstElement* gst_decoder = NULL;
+  GstElement* gst_parser = NULL;
   GstBus* gst_bus = NULL;
+  GstPad* video_sink_pad = NULL;
+#if !defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+  GstElement* video_filter_ = NULL;
+  GstElement* gst_converter = NULL;
+#endif
   scoped_ptr<GstElement, GstElementDeleter> gst_pipeline;
   static GstAppSrcCallbacks appsrc_callbacks =
       {&Impl::StartFeed, &Impl::StopFeed, NULL};
-  CHECK(impl_ == NULL );
+  CHECK(impl_ == NULL);
   impl_ = new Impl();
   impl_->io_client_weak_factory_.reset(
       new base::WeakPtrFactory<Client>(client));
@@ -179,20 +284,23 @@ bool TizenVideoDecodeAccelerator::Initialize(
     return false;
   }
 
-  // pipeline
+  // pipeline initialization.
   gst_pipeline.reset(gst_pipeline_new("h264_decode"));
   if (!gst_pipeline) {
     LOG(ERROR) << __FUNCTION__ << "cannot initialize gst pipeline.";
     return false;
   }
-  // Add a message handler
   if (!(gst_bus = gst_pipeline_get_bus(GST_PIPELINE(gst_pipeline.get())))) {
     return false;
   }
+#if GST_VERSION_MAJOR == 1
+  gst_bus_set_sync_handler(gst_bus, Impl::OnBusMessage, impl_, NULL);
+#else
   gst_bus_set_sync_handler(gst_bus, Impl::OnBusMessage, impl_);
+#endif
   gst_object_unref(gst_bus);
 
-  // appsrc
+  // appsrc initialization.
   if (!(impl_->appsrc_ = gst_element_factory_make("appsrc", "src"))) {
     LOG(ERROR) << __FUNCTION__ << "cannot initialize gst appsrc.";
     return false;
@@ -214,24 +322,37 @@ bool TizenVideoDecodeAccelerator::Initialize(
   if (!(video_caps = gst_caps_from_string("video/x-h264,framerate=30/1"))) {
     return false;
   }
-  g_object_set(G_OBJECT(impl_->appsrc_), "caps", video_caps, NULL);
+  gst_app_src_set_caps(GST_APP_SRC(impl_->appsrc_), video_caps);
   gst_caps_unref(video_caps);
 
 #if defined(OS_TIZEN)
   DVLOG(1) << "######################################";
   DVLOG(1) << "      USING omx_h264dec DECODER " << (unsigned int)this;
   DVLOG(1) << "######################################";
-  // decoder
-  if (!(gst_decoder = gst_element_factory_make("omx_h264dec", "decoder"))) {
-    LOG(ERROR) << __FUNCTION__ << " cannot create omx_h264dec.";
+
+  // parser initialization
+  if (!(gst_parser = gst_element_factory_make("h264parse", "h264parse"))) {
+    LOG(ERROR) << " cannot create h264parse.";
+    return false;
+  }
+  if(!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_parser)) {
+    LOG(ERROR) << " cannot add h264parse into decoder pipeline.";
+    gst_object_unref(gst_parser);
+    return false;
+  }
+
+  // decoder initialization.
+  if (!(gst_decoder = gst_element_factory_make(kDecoderGstElement, kDecoderName))) {
+    LOG(ERROR) << " cannot create " << kDecoderGstElement << ".";
     return false;
   }
   if (!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_decoder)) {
+    LOG(ERROR) << " cannot add " << kDecoderGstElement << " to pipeline.";
     gst_object_unref(gst_decoder);
     return false;
   }
-
-  // sink
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+  // sink initialization.
   if (!(impl_->sink_ = gst_element_factory_make("xvimagesink", "xvimagesink"))) {
     LOG(ERROR) << __FUNCTION__ << " cannot create xvimagesink.";
     return false;
@@ -243,23 +364,92 @@ bool TizenVideoDecodeAccelerator::Initialize(
   }
   g_object_set(impl_->sink_, "rotate", 0, NULL);
 
-  // linking
-  if (!gst_element_link(impl_->appsrc_, gst_decoder)) {
-    LOG(ERROR) << __FUNCTION__ << " Source and Decoder could not be linked";
+  if (!(video_sink_pad = gst_element_get_static_pad(impl_->sink_, "sink"))) {
+    return false;
+  }
+  g_signal_connect(video_sink_pad, "notify::caps",
+                   G_CALLBACK(impl_->OnSinkCapChanged),
+                   impl_);
+  impl_->SetXWindowHandle(false);
+  gst_object_unref(video_sink_pad);
+
+  // linking the elements.
+  if (!gst_element_link(impl_->appsrc_, gst_parser)) {
+    LOG(ERROR) << " Source and gst_parser could not be linked";
+    return false;
+  }
+
+  if (!gst_element_link(gst_parser, gst_decoder)) {
+    LOG(ERROR) << " gst_parser and Decoder could not be linked";
     return false;
   }
   if (!gst_element_link(gst_decoder, impl_->sink_)) {
     LOG(ERROR) << __FUNCTION__ << " Decoder and Sink could not be linked";
     return false;
   }
+#else
+  impl_->CreateAppSinkElement();
+  if (!impl_->sink_) {
+    LOG(ERROR) << "Could not create and add appsink element";
+    return false;
+  }
+  if (!gst_bin_add(GST_BIN(gst_pipeline.get()), impl_->sink_)) {
+    gst_object_unref(impl_->sink_);
+    impl_->sink_ = NULL;
+    return false;
+  }
+  if (!(gst_converter = gst_element_factory_make("fimcconvert", "cvt"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create fimcconvert.";
+    return false;
+  }
+  if(!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_converter)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot add fimcconvert into pipeline.";
+    gst_object_unref(gst_converter);
+    return false;
+  }
+  if (!(video_filter_ = gst_element_factory_make("capsfilter", "VideoFilter"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create capsfilter.";
+    return false;
+  }
+  if(!gst_bin_add(GST_BIN(gst_pipeline.get()), video_filter_)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot add videoFilter into pipeline.";
+    gst_object_unref(video_filter_);
+    return false;
+  }
 
+  // FIXME: SONAL
+  // OnSinkCapChanged callback is not coming for Appsink implementation
+  if (!(video_sink_pad =
+          gst_element_get_static_pad(impl_->sink_, "sink"))) {
+    LOG(ERROR) << "Could not create video sink pad";
+    return false;
+  }
+  g_signal_connect(
+      video_sink_pad, "notify::caps",
+      G_CALLBACK(&Impl::OnSinkCapChanged), impl_);
+  gst_object_unref(video_sink_pad);
+  if (!gst_element_link_many(impl_->appsrc_,
+                             gst_parser,
+                             gst_decoder,
+                             gst_converter,
+                             video_filter_,
+                             impl_->sink_,
+                             NULL)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot link some elements in decode pipeline";
+    return false;
+  }
+  g_object_set(G_OBJECT(video_filter_),
+               "caps",
+               gst_caps_new_simple("video/x-raw","format", G_TYPE_STRING, "I420",NULL),
+               NULL);
+#endif
 #else
   DVLOG(1) << "######################################";
   DVLOG(1) << "      USING ffdec_h264 DECODER";
   DVLOG(1) << "######################################";
   GstElement* gst_colorspace = NULL;
 
-  // decoder
+  // decoder initialization
   if (!(gst_decoder = gst_element_factory_make("ffdec_h264", "H264-decoder"))) {
     LOG(ERROR) << __FUNCTION__ << " cannot create ffdec_h264.";
     return false;
@@ -269,7 +459,7 @@ bool TizenVideoDecodeAccelerator::Initialize(
     return false;
   }
 
-  // colorspace
+  // colorspace initialization
   if (!(gst_colorspace = gst_element_factory_make("ffmpegcolorspace", "cs"))) {
     LOG(ERROR) << __FUNCTION__ << " cannot create ffmpegcolorspace.";
     return false;
@@ -305,8 +495,6 @@ bool TizenVideoDecodeAccelerator::Initialize(
       base::Bind(&TizenVideoDecodeAccelerator::StartDecoder,
       base::Unretained(this)));
 
-  // To enable the gst pipeline graph, set environment variable.
-  // $ export GST_DEBUG_DUMP_DOT_DIR=/opt/usr
   GST_DEBUG_BIN_TO_DOT_FILE(
       GST_BIN(gst_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "decoder_graph.dot");
 
@@ -343,8 +531,7 @@ void TizenVideoDecodeAccelerator::Decode(
         base::Bind(&TizenVideoDecodeAccelerator::OnDecode,
                    base::Unretained(this),
                    base::Passed(&buffer_ref)));
-  }
-  else {
+  } else {
     DVLOG(2) << __FUNCTION__
              << " Frame drop on decoder:"
              << " INPUT Q is FULL";
@@ -370,18 +557,23 @@ void TizenVideoDecodeAccelerator::Reset() {
 }
 
 void TizenVideoDecodeAccelerator::Destroy() {
-  if (impl_) {
+  if (impl_ != NULL) {
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+    impl_->SetXWindowHandle(false);
+    impl_->DeregisterDamageHandler();
+    impl_->xpixmap_buffer_map_.clear();
+#endif
     if (impl_->gst_thread_.IsRunning()) {
       impl_->gst_thread_.Stop();
     }
     gst_app_src_end_of_stream(GST_APP_SRC(impl_->appsrc_));
     impl_->is_destroying_ = true;
-
     if (impl_->pipeline_) {
       gst_element_set_state(impl_->pipeline_, GST_STATE_NULL);
       gst_object_unref(GST_OBJECT(impl_->pipeline_));
     }
     delete impl_;
+    impl_ = NULL;
   }
   delete this;
 }
@@ -394,12 +586,317 @@ void TizenVideoDecodeAccelerator::StartDecoder() {
   gst_element_set_state(impl_->pipeline_, GST_STATE_PLAYING);
 };
 
+#if !defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+void TizenVideoDecodeAccelerator::Impl::OnSinkCapChanged(
+      GstPad* sink_pad, GParamSpec* gparamspec, void* user_data) {
+  content::TizenVideoDecodeAccelerator::Impl* impl =
+  static_cast<TizenVideoDecodeAccelerator::Impl*>(user_data);
+  int width = 0, height = 0;
+#if GST_VERSION_MAJOR == 1
+  GstCaps* caps = gst_pad_get_current_caps(GST_PAD(sink_pad));
+  if (caps) {
+    GstVideoInfo info;
+    gst_video_info_init(&info);
+    if (gst_video_info_from_caps(&info, caps)) {
+      if ((impl->caps_width_ != width) || (impl->caps_height_ != height)) {
+        impl->caps_width_ = info.width;
+        impl->caps_height_ = info.height;
+      }
+    }
+  }
+#else
+  if (gst_video_get_size(sink_pad, &width, &height)) {
+    if ((impl->caps_width_ != width) || (impl->caps_height_ != height)) {
+      impl->caps_width_ = width;
+      impl->caps_height_ = height;
+    }
+  }
+#endif
+}
+
+GstFlowReturn TizenVideoDecodeAccelerator::Impl::OnDecoded(
+    GstAppSink* sink, gpointer app_data) {
+  GstBuffer* gst_output_buf = NULL;
+  content::TizenVideoDecodeAccelerator::Impl* self =
+      static_cast<TizenVideoDecodeAccelerator::Impl*>(app_data);
+  // FIXME: SONAL
+  // Once OnSinkCapChanged callback startes coming dont find height
+  // and width for all buffers, move this code under if block
+#if GST_VERSION_MAJOR == 1
+  GstSample* sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
+  gst_output_buf = gst_sample_get_buffer(sample);
+  GstMapInfo map;
+  if (!gst_buffer_map(gst_output_buf, &map, GST_MAP_READ))
+    LOG (ERROR) << "Decoded Buffer contains invalid or no info!";
+  GstCaps* caps = gst_sample_get_caps(sample);
+#else
+    gst_output_buf = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
+    GstCaps* caps = gst_buffer_get_caps(GST_BUFFER(gst_output_buf));
+#endif
+  if (!self->caps_width_ || !self->caps_height_) {
+    if (!caps) {
+      LOG(ERROR) << __FUNCTION__ << "Could not fetch caps from buffer";
+      gst_buffer_unref(gst_output_buf);
+      return GST_FLOW_ERROR;
+    } else {
+      // No need to unref |GstStructure|
+      const GstStructure* str = gst_caps_get_structure(caps, 0);
+      if (!str) {
+        gst_buffer_unref(gst_output_buf);
+        gst_caps_unref(caps);
+        return GST_FLOW_ERROR;
+      }
+      if (!gst_structure_get_int(str, "width", &self->caps_width_) ||
+          !gst_structure_get_int(str, "height", &self->caps_height_)) {
+        LOG(ERROR) << "Buffer information could not be obtained";
+        gst_buffer_unref(gst_output_buf);
+        gst_caps_unref(caps);
+        return GST_FLOW_ERROR;
+      }
+      gst_caps_unref(caps);
+    }
+  }
+
+  if (gst_output_buf) {
+#if GST_VERSION_MAJOR == 1
+    if (map.data) {
+#else
+    if (gst_output_buf->data) {
+#endif
+      gfx::Rect frame_size =
+          gfx::Rect(self->caps_width_, self->caps_height_);
+      self->gst_thread_.message_loop()->PostTask(
+          FROM_HERE,
+          base::Bind(&TizenVideoDecodeAccelerator::Impl::DeliverVideoFrame,
+                      base::Unretained(self),
+                      gst_output_buf,
+                      self->bitstream_buffer_id_,
+                      frame_size));
+      self->bitstream_buffer_id_ = (self->bitstream_buffer_id_ + 1) & ID_LAST;
+    }
+  } else {
+    gst_buffer_unref(gst_output_buf);
+#if GST_VERSION_MAJOR == 1
+    gst_sample_unref(sample);
+#endif
+    LOG(ERROR) << __FUNCTION__
+               << " DECODING FRAME FAILED : frame_id"
+               << self->bitstream_buffer_id_;
+  }
+#if GST_VERSION_MAJOR == 1
+  gst_buffer_unmap(gst_output_buf, &map);
+#endif
+  return GST_FLOW_OK;
+}
+
+
+void TizenVideoDecodeAccelerator::Impl::CreateAppSinkElement() {
+  GstAppSinkCallbacks appsink_callbacks =
+      {NULL, NULL, &OnDecoded, NULL};
+
+  if (!(sink_ = gst_element_factory_make("appsink", "sink"))) {
+    LOG(ERROR) << __FUNCTION__ << "Appsink could not be created";
+    return;
+  }
+  gst_app_sink_set_callbacks(GST_APP_SINK(sink_),
+                             &appsink_callbacks,
+                             static_cast<gpointer>(this),
+                             NULL);
+  gst_app_sink_set_max_buffers(GST_APP_SINK(sink_), 1);
+}
+
+void TizenVideoDecodeAccelerator::Impl::DeliverVideoFrame(
+    GstBuffer* buffer,
+    int32 bitstream_buffer_id,
+    gfx::Rect frame_size) {
+  base::SharedMemory shared_memory;
+  base::SharedMemoryHandle shared_memory_handle;
+
+#if GST_VERSION_MAJOR == 1
+  GstMapInfo map;
+  if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
+      LOG (ERROR) << "Encoded Buffer contains invalid or no info.!";
+      return;
+  }
+  uint32 buffer_size = map.size;
+#else
+  uint32 buffer_size = buffer->size;
+#endif
+  if (!shared_memory.CreateAndMapAnonymous(buffer_size)) {
+    LOG (ERROR) << "Shared Memory creation failed.";
+  } else {
+    if (!shared_memory.ShareToProcess(base::GetCurrentProcessHandle(),
+                                      &shared_memory_handle)) {
+      LOG(ERROR) << __FUNCTION__ << "Could not get handle of Shared Memory";
+    } else {
+      memcpy(shared_memory.memory(),
+#if GST_VERSION_MAJOR == 1
+                   map.data,
+#else
+                  GST_BUFFER_DATA(buffer),
+#endif
+             buffer_size);
+      io_message_loop_proxy_->PostTask(
+          FROM_HERE,
+          base::Bind(&media::VideoDecodeAccelerator::Client::NotifyDecodeDone,
+                      io_client_weak_factory_->GetWeakPtr(),
+                      shared_memory_handle,
+                      bitstream_buffer_id_,
+                      buffer_size,
+                      frame_size));
+    }
+  }
+#if GST_VERSION_MAJOR == 1
+  gst_buffer_unmap(buffer, &map);
+#endif
+  gst_buffer_unref(buffer);
+}
+#endif
+
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+void TizenVideoDecodeAccelerator::Impl::OnXWindowIdPrepared(
+    GstMessage* message) {
+#if GST_VERSION_MAJOR == 1
+  const GstStructure* structure = gst_message_get_structure(message);
+  gst_structure_get_int(structure, "video-width", &gst_width_);
+  gst_structure_get_int(structure, "video-height", &gst_height_);
+#else
+  gst_structure_get_int(message->structure, "video-width", &gst_width_);
+  gst_structure_get_int(message->structure, "video-height", &gst_height_);
+#endif
+  SetPixmap(gst_width_, gst_height_);
+  SetXWindowHandle(true);
+}
+
+void TizenVideoDecodeAccelerator::Impl::SetXWindowHandle(
+    bool handle_set) {
+  is_x_window_handle_set_ = handle_set;
+}
+
+void TizenVideoDecodeAccelerator::Impl::SetPixmap(
+    const int32& gst_width, const int32& gst_height) {
+  int32 key_wh = ConvertWidthAndHeightToKey(gst_width, gst_height);
+  PixmapSurfaceTizenMap::iterator it = xpixmap_buffer_map_.find(key_wh);
+  if (it != xpixmap_buffer_map_.end()) {
+    pixmap_surface_ = it->second;
+    pixmap_id_ = pixmap_surface_->GetId();
+  } else {
+    pixmap_surface_ =
+        gfx::EflPixmap::Create(gfx::EflPixmapBase::UsageType::SURFACE,
+                               gfx::Size(gst_width, gst_height));
+    if (pixmap_surface_.get() == NULL) {
+      LOG(ERROR) << __FUNCTION__ << "Failed to create pixmap Surface";
+      return;
+    }
+    pixmap_id_ = pixmap_surface_->GetId();
+    xpixmap_buffer_map_[key_wh] = pixmap_surface_;
+  }
+  gst_width_ = gst_width;
+  gst_height_ = gst_height;
+  DeregisterDamageHandler();
+
+  // Register to get notification from ecore for damage updates.
+  damage_ = ecore_x_damage_new(pixmap_id_,
+                               ECORE_X_DAMAGE_REPORT_RAW_RECTANGLES);
+  damage_handler_ = ecore_event_handler_add(ECORE_X_EVENT_DAMAGE_NOTIFY,
+                                            OnSurfaceChanged,
+                                            this);
+#if GST_VERSION_MAJOR == 1
+  gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink_), pixmap_id_);
+#else
+  gst_x_overlay_set_window_handle(GST_X_OVERLAY(sink_), pixmap_id_);
+#endif
+}
+
+void TizenVideoDecodeAccelerator::Impl::DeregisterDamageHandler() {
+  if (damage_) {
+    ecore_x_damage_free(damage_);
+    damage_ = 0;
+  }
+  if (damage_handler_) {
+    ecore_event_handler_del(damage_handler_);
+    damage_handler_ = NULL;
+  }
+}
+
+// Callback received when pixmap surface is changed/damaged
+Eina_Bool TizenVideoDecodeAccelerator::Impl::OnSurfaceChanged(void* ptr_acc,
+                                                              int type,
+                                                              void* event) {
+  TizenVideoDecodeAccelerator::Impl* self =
+      static_cast<TizenVideoDecodeAccelerator::Impl*>(ptr_acc);
+
+  if (self) {
+    media::Picture picture(self->pixmap_id_,
+                           self->bitstream_buffer_id_,
+                           gfx::Rect(self->gst_width_, self->gst_height_));
+
+    self->io_message_loop_proxy_->PostTask(
+        FROM_HERE,
+        base::Bind(&media::VideoDecodeAccelerator::Client::PictureReady,
+                   self->io_client_weak_factory_->GetWeakPtr(),
+                   picture) );
+    self->bitstream_buffer_id_ = (self->bitstream_buffer_id_ + 1) & ID_LAST;
+  } else {
+    LOG(ERROR) << __FUNCTION__ << "Accelerator is NULL";
+    return ECORE_CALLBACK_CANCEL;
+  }
+  return ECORE_CALLBACK_PASS_ON;
+}
+
+void TizenVideoDecodeAccelerator::Impl::OnSinkCapChanged(
+    GstPad* sink_pad, GParamSpec* gparamspec,void* user_data) {
+  TizenVideoDecodeAccelerator::Impl* self =
+      static_cast<TizenVideoDecodeAccelerator::Impl*>(user_data);
+  if (!self) {
+    LOG(ERROR) << __FUNCTION__ << "Accelerator is NULL";
+    return;
+  }
+
+  int width = 0, height = 0;
+#if GST_VERSION_MAJOR == 1
+  GstCaps* caps = gst_pad_get_current_caps(GST_PAD(sink_pad));
+  if (caps) {
+    GstVideoInfo info;
+    gst_video_info_init(&info);
+
+    if (gst_video_info_from_caps(&info, caps)) {
+      width = info.width;
+      height = info.height;
+      if ((self->gst_width_ != width) || (self->gst_height_ != height)) {
+        self->SetPixmap(width, height);
+      }
+    }
+  }
+#else
+  if (gst_video_get_size(sink_pad, &width, &height)) {
+    if ((self->gst_width_ != width) || (self->gst_height_ != height)) {
+      self->SetPixmap(width, height);
+    }
+  }
+#endif
+}
+#endif
+
 void TizenVideoDecodeAccelerator::OnDecode(
     scoped_ptr<BitstreamBufferRef> buffer_ref) {
   if (!buffer_ref) {
     return;
   }
-
+#if GST_VERSION_MAJOR == 1
+  buffer_ref->gst_buffer_ =
+      gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,
+                                  static_cast<guint8*>(buffer_ref->shm_->memory()),
+                                  buffer_ref->size_,
+                                  0,
+                                  buffer_ref->size_,
+                                  reinterpret_cast<guint8*>(buffer_ref.get()),
+                                  BitstreamBufferRef::Destruct);
+  if (!buffer_ref->gst_buffer_ || !GST_IS_BUFFER(buffer_ref->gst_buffer_)) {
+    LOG(ERROR) << " gst_buffer_new_wrapped_full failed to allocate memory.!";
+    return;
+  }
+#else
   if (!(buffer_ref->gst_buffer_ = gst_buffer_new())) {
     return;
   }
@@ -410,7 +907,7 @@ void TizenVideoDecodeAccelerator::OnDecode(
   GST_BUFFER_SIZE(buffer_ref->gst_buffer_) = buffer_ref->size_;
   GST_BUFFER_DATA(buffer_ref->gst_buffer_) =
       static_cast<guint8*>(buffer_ref->shm_->memory());
-
+#endif
   if (GST_FLOW_OK !=
           gst_app_src_push_buffer(GST_APP_SRC(impl_->appsrc_),
                                   buffer_ref->gst_buffer_)) {
@@ -430,3 +927,4 @@ void TizenVideoDecodeAccelerator::NotifyError(
 }
 
 }  // namespace content
+