[Tizen][WebRTC] Enable H/W accelerated H.264 support for WebRTC on Tizen
authormsu.koo <msu.koo@samsung.com>
Wed, 12 Nov 2014 12:01:26 +0000 (21:01 +0900)
committerYoungsoo Choi <kenshin.choi@samsung.com>
Tue, 10 Jul 2018 06:57:09 +0000 (06:57 +0000)
- Implemented TizenVideoDecodeAccelerator.
- Implemented TizenVideoEncodeAccelerator.

Current implementation uses xvimagesink without linking with
Native Texture Surface.
Surface adaptation is on progress at
http://107.108.218.239/bugzilla/show_bug.cgi?id=9181

Together with: I292dabd6acc376fcbe6040d19a5dabbbc5ee3a0f

Bug: http://107.108.218.239/bugzilla/show_bug.cgi?id=8362
Reviewed by: SeungSeop Park, Viatcheslav Ostapenko
Reviewed by: Min-Soo Koo, Vinod Keshav

Change-Id: I2feae6d6929d64473e4be6a259ca4780609e4e3c
Signed-off-by: msu.koo <msu.koo@samsung.com>
tizen_src/impl/chromium-efl-mm.gypi
tizen_src/impl/command_line_efl.cc
tizen_src/impl/content/common/gpu/media/tizen/tizen_video_decode_accelerator.cc [new file with mode: 0644]
tizen_src/impl/content/common/gpu/media/tizen/tizen_video_decode_accelerator.h [new file with mode: 0644]
tizen_src/impl/content/common/gpu/media/tizen/tizen_video_encode_accelerator.cc [new file with mode: 0644]
tizen_src/impl/content/common/gpu/media/tizen/tizen_video_encode_accelerator.h [new file with mode: 0644]

index 5edee67..49ece1f 100644 (file)
@@ -2,6 +2,14 @@
   'include_dirs': [
     '<(chrome_src_dir)/v8/include',
   ],
+  'variables': {
+    'webrtc_sources': [
+      'content/common/gpu/media/tizen/tizen_video_decode_accelerator.h',
+      'content/common/gpu/media/tizen/tizen_video_decode_accelerator.cc',
+      'content/common/gpu/media/tizen/tizen_video_encode_accelerator.h',
+      'content/common/gpu/media/tizen/tizen_video_encode_accelerator.cc',
+    ],
+  },
   'sources': [
     'media/base/tizen/webaudio_media_codec_info_tizen.h',
     'content/browser/media/tizen/webaudio_decoder_browser_gstreamer.cc',
@@ -34,6 +42,7 @@
     'media/base/tizen/webmediaplayer_tizen.cc',
     'media/base/tizen/webmediaplayer_tizen.h',#ME and MSE
     'media/video/capture/tizen/video_capture_device_factory_tizen_helper.cc',
+    '<@(webrtc_sources)',
   ],
   'conditions': [
     ['building_for_tizen==1', {
@@ -64,4 +73,7 @@
       ],
     }],
   ],
+  'dependencies': [
+    '<(chrome_src_dir)/third_party/webrtc/modules/modules.gyp:video_processing',
+  ],
 }
index 0ebd50c..9731d55 100644 (file)
@@ -59,6 +59,10 @@ content::MainFunctionParams CommandLineEfl::GetDefaultPortParams() {
   // Enables glFinish call
   p_command_line->AppendSwitch(switches::kWaitForFrameComplete);
 
+#if defined(TIZEN_MULTIMEDIA_SUPPORT) && defined(ENABLE_WEBRTC)
+  p_command_line->AppendSwitch(switches::kEnableWebRtcHWH264Encoding);
+#endif
+
 #if defined(OS_TIZEN)
   p_command_line->AppendSwitch(switches::kEnableOverscrollNotifications);
 #if !defined(EWK_BRINGUP)
diff --git a/tizen_src/impl/content/common/gpu/media/tizen/tizen_video_decode_accelerator.cc b/tizen_src/impl/content/common/gpu/media/tizen/tizen_video_decode_accelerator.cc
new file mode 100644 (file)
index 0000000..94d2233
--- /dev/null
@@ -0,0 +1,427 @@
+// Copyright 2014 Samsung Electronics Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/common/gpu/media/tizen/tizen_video_decode_accelerator.h"
+
+#include <gst/gst.h>
+#include <gst/app/gstappsink.h>
+#include <gst/app/gstappsrc.h>
+#include <gst/interfaces/xoverlay.h>
+#include <gst/video/gstvideosink.h>
+#include <gst/video/video.h>
+
+#include "base/bind.h"
+#include "base/memory/shared_memory.h"
+#include "base/message_loop/message_loop_proxy.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/time/time.h"
+
+using media::VideoFrame;
+
+namespace {
+
+struct GstElementDeleter {
+  void operator()(GstElement* ptr) const {
+    DCHECK(ptr != NULL);
+    gst_object_unref(ptr);
+  }
+};
+
+} // namespace
+
+namespace content {
+
+enum {
+  MAX_BITRATE = 2000000,                 // bps.
+  INPUT_BUFFER_SIZE = MAX_BITRATE / 8,   // bytes. 1 sec for H.264 HD video.
+};
+
+media::VideoDecodeAccelerator* CreateTizenVideoDecodeAccelerator() {
+  return new TizenVideoDecodeAccelerator();
+}
+
+struct TizenVideoDecodeAccelerator::BitstreamBufferRef {
+  BitstreamBufferRef(
+      base::WeakPtr<media::VideoDecodeAccelerator::Client> client,
+      const scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
+      base::SharedMemory* shm,
+      size_t size,
+      int32 input_id)
+      : client_(client),
+        client_message_loop_proxy_(client_message_loop_proxy),
+        shm_(shm),
+        size_(size),
+        bytes_used_(0),
+        input_id_(input_id),
+        gst_buffer_(NULL) {}
+
+  ~BitstreamBufferRef() {
+    if (input_id_ >= 0) {
+      client_message_loop_proxy_->PostTask(
+          FROM_HERE,
+          base::Bind(
+              &media::VideoDecodeAccelerator::Client::NotifyEndOfBitstreamBuffer,
+              client_,
+              input_id_));
+    }
+  }
+
+  static void Destruct(gpointer data) {
+    DCHECK(data != NULL);
+    BitstreamBufferRef* pRef = static_cast<BitstreamBufferRef*>(data);
+    delete pRef;
+  }
+
+  base::WeakPtr<media::VideoDecodeAccelerator::Client> client_;
+  scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy_;
+  scoped_ptr<base::SharedMemory> shm_;
+  size_t size_;
+  off_t bytes_used_;
+  int32 input_id_;
+  GstBuffer* gst_buffer_;
+};
+
+struct TizenVideoDecodeAccelerator::Impl {
+  Impl()
+      : can_feed_(true),
+        is_destroying_(false),
+        pipeline_(NULL),
+        sink_(NULL),
+        appsrc_(NULL),
+        io_message_loop_proxy_(base::MessageLoopProxy::current()),
+        gst_thread_("TizenDecoderThreadGst") {}
+
+  static GstBusSyncReply OnBusMessage(
+      GstBus* bus, GstMessage* msg, gpointer data) {
+    switch (GST_MESSAGE_TYPE(msg)) {
+      case GST_MESSAGE_ERROR: {
+        gchar* debug = NULL;
+        GError* error = NULL;
+        gst_message_parse_error(msg, &error, &debug);
+        LOG(ERROR) << __FUNCTION__
+                   << " GSTError happens from bus at "
+                   << GST_OBJECT_NAME(msg->src)
+                   << ":" << error->message;
+        LOG(ERROR) << __FUNCTION__
+                   << " Debugging Info: "
+                   << (debug != NULL ? debug : "none");
+        g_error_free(error);
+        g_free(debug);
+        break;
+      }
+    }
+    return GST_BUS_PASS;
+  }
+
+  static void StartFeed(GstAppSrc *source, guint size, gpointer app) {
+    DCHECK(source);
+    content::TizenVideoDecodeAccelerator::Impl* impl =
+        static_cast<content::TizenVideoDecodeAccelerator::Impl*>(app);
+    impl->can_feed_ = true;
+  }
+
+  static void StopFeed(GstAppSrc *source, gpointer app) {
+    DCHECK(source);
+    content::TizenVideoDecodeAccelerator::Impl* impl =
+        static_cast<content::TizenVideoDecodeAccelerator::Impl*>(app);
+    impl->can_feed_ = false;
+  }
+
+  volatile bool can_feed_;
+  volatile bool is_destroying_;
+  GstElement* pipeline_;
+  GstElement* sink_;
+  GstElement* appsrc_;
+  scoped_refptr<base::MessageLoopProxy> io_message_loop_proxy_;
+  scoped_ptr<base::WeakPtrFactory<Client> > io_client_weak_factory_;
+  base::Thread gst_thread_;
+};
+
+TizenVideoDecodeAccelerator::TizenVideoDecodeAccelerator()
+    : impl_(NULL) {
+}
+
+TizenVideoDecodeAccelerator::~TizenVideoDecodeAccelerator() {
+}
+
+bool TizenVideoDecodeAccelerator::Initialize(
+    media::VideoCodecProfile profile,
+    Client* client) {
+  GError* error = NULL;
+  GstCaps* video_caps = NULL;
+  GstElement* gst_decoder = NULL;
+  GstBus* gst_bus = NULL;
+  scoped_ptr<GstElement, GstElementDeleter> gst_pipeline;
+  static GstAppSrcCallbacks appsrc_callbacks =
+      {&Impl::StartFeed, &Impl::StopFeed, NULL};
+  CHECK(impl_ == NULL);
+  impl_ = new Impl();
+  impl_->io_client_weak_factory_.reset(
+      new base::WeakPtrFactory<Client>(client));
+
+  switch (profile) {
+    case media::H264PROFILE_BASELINE:
+      DVLOG(1) << "Initialize(): profile -> H264PROFILE_BASELINE";
+      break;
+    case media::H264PROFILE_MAIN:
+      DVLOG(1) << "Initialize(): profile -> H264PROFILE_MAIN";
+      break;
+    default:
+      LOG(ERROR) << "Initialize(): unsupported profile=" << profile;
+      return false;
+  };
+
+  if (!gst_is_initialized() && !gst_init_check(NULL, NULL, &error)) {
+    LOG(ERROR) << __FUNCTION__ << "cannot initialize gstreamer.";
+    g_error_free(error);
+    return false;
+  }
+
+  // pipeline initialization.
+  gst_pipeline.reset(gst_pipeline_new("h264_decode"));
+  if (!gst_pipeline) {
+    LOG(ERROR) << __FUNCTION__ << "cannot initialize gst pipeline.";
+    return false;
+  }
+  if (!(gst_bus = gst_pipeline_get_bus(GST_PIPELINE(gst_pipeline.get())))) {
+    return false;
+  }
+  gst_bus_set_sync_handler(gst_bus, Impl::OnBusMessage, impl_);
+  gst_object_unref(gst_bus);
+
+  // appsrc initialization.
+  if (!(impl_->appsrc_ = gst_element_factory_make("appsrc", "src"))) {
+    LOG(ERROR) << __FUNCTION__ << "cannot initialize gst appsrc.";
+    return false;
+  }
+  if (!gst_bin_add(GST_BIN(gst_pipeline.get()), impl_->appsrc_)) {
+    gst_object_unref(impl_->appsrc_);
+    impl_->appsrc_ = NULL;
+    return false;
+  }
+  gst_app_src_set_max_bytes(GST_APP_SRC(impl_->appsrc_), INPUT_BUFFER_SIZE);
+  gst_app_src_set_callbacks(GST_APP_SRC(impl_->appsrc_), &appsrc_callbacks,
+                            static_cast<gpointer>(impl_), NULL);
+  g_object_set(G_OBJECT(impl_->appsrc_),
+               "is-live", TRUE,
+               "block", FALSE,
+               "min-percent", 80, // if buffer below 80%, need-data emits.
+               "stream-type", GST_APP_STREAM_TYPE_STREAM,
+               NULL);
+  if (!(video_caps = gst_caps_from_string("video/x-h264,framerate=30/1"))) {
+    return false;
+  }
+  g_object_set(G_OBJECT(impl_->appsrc_), "caps", video_caps, NULL);
+  gst_caps_unref(video_caps);
+
+#if defined(OS_TIZEN)
+  DVLOG(1) << "######################################";
+  DVLOG(1) << "      USING omx_h264dec DECODER " << (unsigned int)this;
+  DVLOG(1) << "######################################";
+  // decoder initialization.
+  if (!(gst_decoder = gst_element_factory_make("omx_h264dec", "decoder"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create omx_h264dec.";
+    return false;
+  }
+  if (!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_decoder)) {
+    gst_object_unref(gst_decoder);
+    return false;
+  }
+
+  // sink initialization.
+  if (!(impl_->sink_ = gst_element_factory_make("xvimagesink", "xvimagesink"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create xvimagesink.";
+    return false;
+  }
+  if (!gst_bin_add(GST_BIN(gst_pipeline.get()), impl_->sink_)) {
+    gst_object_unref(impl_->sink_);
+    impl_->sink_ = NULL;
+    return false;
+  }
+  g_object_set(impl_->sink_, "rotate", 0, NULL);
+
+  // linking the elements.
+  if (!gst_element_link(impl_->appsrc_, gst_decoder)) {
+    LOG(ERROR) << __FUNCTION__ << " Source and Decoder could not be linked";
+    return false;
+  }
+  if (!gst_element_link(gst_decoder, impl_->sink_)) {
+    LOG(ERROR) << __FUNCTION__ << " Decoder and Sink could not be linked";
+    return false;
+  }
+
+#else
+  DVLOG(1) << "######################################";
+  DVLOG(1) << "      USING ffdec_h264 DECODER";
+  DVLOG(1) << "######################################";
+  GstElement* gst_colorspace = NULL;
+
+  // decoder initialization
+  if (!(gst_decoder = gst_element_factory_make("ffdec_h264", "H264-decoder"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create ffdec_h264.";
+    return false;
+  }
+  if (!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_decoder)) {
+    gst_object_unref(gst_decoder);
+    return false;
+  }
+
+  // colorspace initialization
+  if (!(gst_colorspace = gst_element_factory_make("ffmpegcolorspace", "cs"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create ffmpegcolorspace.";
+    return false;
+  }
+  if (!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_colorspace)) {
+    gst_object_unref(gst_colorspace);
+    return false;
+  }
+
+  if (!(impl_->sink_ = gst_element_factory_make("autovideosink", "sink"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create autovideosink.";
+    return false;
+  }
+  if (!gst_bin_add(GST_BIN(gst_pipeline.get()), impl_->sink_)) {
+    gst_object_unref(impl_->sink_);
+    impl_->sink_ = NULL;
+    return false;
+  }
+
+  if(!gst_element_link_many(impl_->appsrc_, gst_decoder, gst_colorspace,
+                            impl_->sink_, NULL)) {
+    LOG(ERROR) << __FUNCTION__ << " Some element could not be linked";
+    return false;
+  }
+#endif
+  if (!impl_->gst_thread_.Start()) {
+    LOG(ERROR) << __FUNCTION__ << " gst_thread_ failed to start";
+    return false;
+  }
+
+  impl_->gst_thread_.message_loop()->PostTask(
+      FROM_HERE,
+      base::Bind(&TizenVideoDecodeAccelerator::StartDecoder,
+      base::Unretained(this)));
+
+  GST_DEBUG_BIN_TO_DOT_FILE(
+      GST_BIN(gst_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "decoder_graph.dot");
+
+  impl_->pipeline_ = gst_pipeline.release();
+  return true;
+}
+
+void TizenVideoDecodeAccelerator::Decode(
+    const media::BitstreamBuffer& bitstream_buffer) {
+  scoped_ptr<BitstreamBufferRef> buffer_ref;
+  scoped_ptr<base::SharedMemory> shm(
+      new base::SharedMemory(bitstream_buffer.handle(), true));
+
+  if (!shm->Map(bitstream_buffer.size())) {
+    LOG(ERROR) << __FUNCTION__ << " could not map bitstream_buffer";
+    NotifyError(media::VideoDecodeAccelerator::UNREADABLE_INPUT);
+    return;
+  }
+
+  buffer_ref.reset(new BitstreamBufferRef(
+      impl_->io_client_weak_factory_->GetWeakPtr(),
+      base::MessageLoopProxy::current(),
+      shm.release(),
+      bitstream_buffer.size(),
+      bitstream_buffer.id()));
+
+  if (!buffer_ref) {
+    return;
+  }
+
+  if (impl_->can_feed_ && !impl_->is_destroying_) {
+    impl_->gst_thread_.message_loop()->PostTask(
+        FROM_HERE,
+        base::Bind(&TizenVideoDecodeAccelerator::OnDecode,
+                   base::Unretained(this),
+                   base::Passed(&buffer_ref)));
+  } else {
+    DVLOG(2) << __FUNCTION__
+             << " Frame drop on decoder:"
+             << " INPUT Q is FULL";
+  }
+}
+
+void TizenVideoDecodeAccelerator::AssignPictureBuffers(
+    const std::vector<media::PictureBuffer>& buffers) {
+  NOTIMPLEMENTED();
+}
+
+void TizenVideoDecodeAccelerator::ReusePictureBuffer(
+    int32 picture_buffer_id) {
+  NOTIMPLEMENTED();
+}
+
+void TizenVideoDecodeAccelerator::Flush() {
+  NOTIMPLEMENTED();
+}
+
+void TizenVideoDecodeAccelerator::Reset() {
+  NOTIMPLEMENTED();
+}
+
+void TizenVideoDecodeAccelerator::Destroy() {
+  if (impl_ != NULL) {
+    if (impl_->gst_thread_.IsRunning()) {
+      impl_->gst_thread_.Stop();
+    }
+    gst_app_src_end_of_stream(GST_APP_SRC(impl_->appsrc_));
+    impl_->is_destroying_ = true;
+    if (impl_->pipeline_) {
+      gst_element_set_state(impl_->pipeline_, GST_STATE_NULL);
+      gst_object_unref(GST_OBJECT(impl_->pipeline_));
+    }
+    delete impl_;
+    impl_ = NULL;
+  }
+  delete this;
+}
+
+bool TizenVideoDecodeAccelerator::CanDecodeOnIOThread(){
+  return false;
+}
+
+void TizenVideoDecodeAccelerator::StartDecoder() {
+  gst_element_set_state(impl_->pipeline_, GST_STATE_PLAYING);
+};
+
+void TizenVideoDecodeAccelerator::OnDecode(
+    scoped_ptr<BitstreamBufferRef> buffer_ref) {
+  if (!buffer_ref) {
+    return;
+  }
+
+  if (!(buffer_ref->gst_buffer_ = gst_buffer_new())) {
+    return;
+  }
+
+  GST_BUFFER_MALLOCDATA(buffer_ref->gst_buffer_) =
+      reinterpret_cast<guint8*>(buffer_ref.get());
+  GST_BUFFER_FREE_FUNC(buffer_ref->gst_buffer_) = BitstreamBufferRef::Destruct;
+  GST_BUFFER_SIZE(buffer_ref->gst_buffer_) = buffer_ref->size_;
+  GST_BUFFER_DATA(buffer_ref->gst_buffer_) =
+      static_cast<guint8*>(buffer_ref->shm_->memory());
+
+  if (GST_FLOW_OK !=
+          gst_app_src_push_buffer(GST_APP_SRC(impl_->appsrc_),
+                                  buffer_ref->gst_buffer_)) {
+    LOG(ERROR) << __FUNCTION__ << " fail to push buffer into decoder pipeline";
+    return;
+  }
+
+  // lifecycle of buffer_ref will be handled by gstreamer.
+  buffer_ref.release();
+}
+
+void TizenVideoDecodeAccelerator::NotifyError(
+    media::VideoDecodeAccelerator::Error error) {
+  if (impl_->io_client_weak_factory_->GetWeakPtr()) {
+    impl_->io_client_weak_factory_->GetWeakPtr()->NotifyError(error);
+  }
+}
+
+}  // namespace content
diff --git a/tizen_src/impl/content/common/gpu/media/tizen/tizen_video_decode_accelerator.h b/tizen_src/impl/content/common/gpu/media/tizen/tizen_video_decode_accelerator.h
new file mode 100644 (file)
index 0000000..10a42f5
--- /dev/null
@@ -0,0 +1,46 @@
+// Copyright 2014 Samsung Electronics Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_COMMON_GPU_MEDIA_TIZEN_VIDEO_DECODE_ACCELERATOR_H_
+#define CONTENT_COMMON_GPU_MEDIA_TIZEN_VIDEO_DECODE_ACCELERATOR_H_
+
+#include "base/synchronization/lock.h"
+#include "base/threading/thread.h"
+#include "content/common/content_export.h"
+#include "media/video/video_decode_accelerator.h"
+
+namespace content {
+
+class CONTENT_EXPORT TizenVideoDecodeAccelerator
+    : public media::VideoDecodeAccelerator {
+ public:
+  TizenVideoDecodeAccelerator();
+  ~TizenVideoDecodeAccelerator() override;
+
+  bool Initialize(media::VideoCodecProfile profile, Client* client) override;
+  void Decode(const media::BitstreamBuffer& bitstream_buffer) override;
+  void AssignPictureBuffers(
+      const std::vector<media::PictureBuffer>& buffers) override;
+  void ReusePictureBuffer(int32 picture_buffer_id) override;
+  void Flush() override;
+  void Reset() override;
+  void Destroy() override;
+  bool CanDecodeOnIOThread() override;
+
+ private:
+  struct BitstreamBufferRef;
+  struct Impl;
+
+  void OnDecode(scoped_ptr<BitstreamBufferRef> buffer_ref);
+  void NotifyError(media::VideoDecodeAccelerator::Error error);
+  void StartDecoder();
+
+  Impl* impl_;
+
+  DISALLOW_COPY_AND_ASSIGN(TizenVideoDecodeAccelerator);
+};
+
+}  // namespace content
+
+#endif  // CONTENT_COMMON_GPU_MEDIA_TIZEN_VIDEO_DECODE_ACCELERATOR_H_
diff --git a/tizen_src/impl/content/common/gpu/media/tizen/tizen_video_encode_accelerator.cc b/tizen_src/impl/content/common/gpu/media/tizen/tizen_video_encode_accelerator.cc
new file mode 100644 (file)
index 0000000..891f2a2
--- /dev/null
@@ -0,0 +1,637 @@
+// Copyright 2014 Samsung Electronics Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/common/gpu/media/tizen/tizen_video_encode_accelerator.h"
+
+#include <gst/gst.h>
+#include <gst/app/gstappsink.h>
+#include <gst/app/gstappsrc.h>
+#include <gst/video/video.h>
+
+#include "base/bind.h"
+#include "base/command_line.h"
+#include "base/logging.h"
+#include "base/message_loop/message_loop.h"
+#include "base/timer/timer.h"
+#include "gpu/command_buffer/service/gpu_switches.h"
+#include "media/base/bitstream_buffer.h"
+#include "third_party/webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+
+using media::VideoFrame;
+
+namespace content {
+
+struct GstBufferDeleter {
+  inline void operator()(GstBuffer* ptr) const {
+    DCHECK(ptr != NULL);
+    gst_buffer_unref(ptr);
+  }
+};
+
+struct GstElementDeleter {
+  inline void operator()(GstElement* ptr) const {
+    DCHECK(ptr != NULL);
+    gst_object_unref(ptr);
+  }
+};
+
+enum {
+  // Arbitrary choice.
+  INITIAL_FRAMERATE = 30,
+  // Until there are non-realtime users, no need for unrequested I-frames.
+  INPUT_BUFFER_COUNT = 5, // default input buffer counts of omx_h264enc
+  MAX_BUFFERING = 60,
+  MAX_FRAME_RATE = 30,
+  // Max bitrate in bps
+  MAX_BITRATE = 2000000
+};
+
+media::VideoEncodeAccelerator* CreateTizenVideoEncodeAccelerator() {
+  return new TizenVideoEncodeAccelerator();
+}
+
+struct TizenVideoEncodeAccelerator::BitstreamBufferRef {
+  BitstreamBufferRef(
+      const scoped_refptr<media::VideoFrame>& frame,
+      base::WeakPtr<media::VideoEncodeAccelerator::Client> client_delegate,
+      const scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
+      size_t size)
+      : frame_(frame),
+        client_delegate_(client_delegate),
+        client_message_loop_proxy_(client_message_loop_proxy),
+        size_(size),
+        bytes_used_(0),
+        gst_buffer_(NULL) {}
+
+  ~BitstreamBufferRef() {}
+
+  static void Destruct(gpointer data) {
+    DCHECK(data != NULL);
+    BitstreamBufferRef* pRef = static_cast<BitstreamBufferRef*>(data);
+    delete pRef;
+  }
+
+  scoped_refptr<media::VideoFrame> frame_;
+  base::WeakPtr<media::VideoEncodeAccelerator::Client> client_delegate_;
+  scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy_;
+  size_t size_;
+  off_t bytes_used_;
+  GstBuffer* gst_buffer_;
+};
+
+struct TizenVideoEncodeAccelerator::Impl {
+  Impl(media::VideoEncodeAccelerator::Client* client,
+       scoped_refptr<base::MessageLoopProxy> msg_loop)
+      : pipeline_(NULL),
+        encoder_(NULL),
+        appsrc_(NULL),
+        gst_thread_("GSTEncoder"),
+        enable_framedrop_(false),
+        io_client_weak_factory_(client),
+        child_message_loop_proxy_(msg_loop),
+        gst_bitrate_(0),
+        is_running_(false),
+        is_destroying_(false),
+        can_feed_(false) {}
+
+  void DeliverVideoFrame(GstBuffer* buffer, bool key_frame);
+  static GstFlowReturn OnEncoded(GstAppSink *sink,gpointer data);
+  static void StartFeed(GstAppSrc *source, guint size, gpointer data);
+  static void StopFeed(GstAppSrc *source, gpointer data);
+
+  GstElement* pipeline_;
+  GstElement* encoder_;
+  GstElement* appsrc_;
+  base::Thread gst_thread_;
+  bool enable_framedrop_;
+  std::vector<media::BitstreamBuffer> encoder_output_queue_;
+  base::WeakPtrFactory<media::VideoEncodeAccelerator::Client> io_client_weak_factory_;
+  const scoped_refptr<base::MessageLoopProxy> child_message_loop_proxy_;
+  base::Lock destroy_lock_;
+  base::ThreadChecker thread_checker_;
+  gfx::Size view_size_;
+  uint32 gst_bitrate_;
+  volatile bool is_running_;
+  volatile bool is_destroying_;
+  volatile bool can_feed_;
+};
+
+void TizenVideoEncodeAccelerator::Impl::DeliverVideoFrame(GstBuffer* buffer,
+                                                          bool key_frame) {
+  media::BitstreamBuffer* bs_buffer = NULL;
+  scoped_ptr<base::SharedMemory> shm;
+  scoped_ptr<GstBuffer, GstBufferDeleter> gst_buffer(buffer);
+
+  if (gst_buffer == NULL) {
+    LOG(ERROR) << __FUNCTION__ << " Invalid buffer for delivering";
+    return;
+  }
+
+  if (enable_framedrop_ && key_frame) {
+    enable_framedrop_ = false;
+  }
+
+  if (enable_framedrop_) {
+    DVLOG(2) << "OUTPUT QUEUE IS EMPTY !!!!!!!! DELTA FRAME DROP.";
+    return;
+  }
+
+  if (encoder_output_queue_.empty()) {
+    enable_framedrop_ = true;
+    DVLOG(2) << "OUTPUT QUEUE IS EMPTY !!!!!!!! FRAME DROP.";
+    return;
+  }
+
+  bs_buffer = &encoder_output_queue_.back();
+  encoder_output_queue_.pop_back();
+
+  DVLOG(3) << __FUNCTION__
+           << " output buffer in use : buffer_id: " << bs_buffer->id()
+           << " queue size: " << encoder_output_queue_.size();
+
+  shm.reset(new base::SharedMemory(bs_buffer->handle(), false));
+  if (!shm->Map(bs_buffer->size())) {
+    LOG(ERROR) << "Failed to map SHM";
+    return;
+  }
+  if (gst_buffer->size > shm->mapped_size()) {
+    LOG(ERROR) << "Encoded buff too large: "
+               << gst_buffer->size << ">" << shm->mapped_size();
+    return;
+  }
+
+  //copying data to shared memory.
+  memcpy(static_cast<uint8*>(shm->memory()), gst_buffer->data, gst_buffer->size);
+
+  child_message_loop_proxy_->PostTask(
+      FROM_HERE,
+      base::Bind(&media::VideoEncodeAccelerator::Client::BitstreamBufferReady,
+                 io_client_weak_factory_.GetWeakPtr(),
+                 bs_buffer->id(),
+                 gst_buffer->size,
+                 key_frame));
+}
+
+GstFlowReturn TizenVideoEncodeAccelerator::Impl::OnEncoded(
+    GstAppSink *sink, gpointer data) {
+  bool key_frame = false;
+  GstBuffer* gst_output_buf = NULL;
+  TizenVideoEncodeAccelerator::Impl* impl =
+      static_cast<TizenVideoEncodeAccelerator::Impl*>(data);
+
+  gst_output_buf = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
+
+  if (gst_output_buf) {
+    if (!GST_BUFFER_FLAG_IS_SET(gst_output_buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
+      key_frame = true;
+    }
+    if (gst_output_buf->data) {
+      impl->gst_thread_.message_loop()->PostTask(
+          FROM_HERE,
+          base::Bind(&TizenVideoEncodeAccelerator::Impl::DeliverVideoFrame,
+                     base::Unretained(impl),
+                     gst_output_buf,
+                     key_frame));
+    }
+  } else {
+    LOG(ERROR) << __FUNCTION__ << " ENCODING FRAME FAILED";
+  }
+
+  return GST_FLOW_OK;
+}
+
+void TizenVideoEncodeAccelerator::Impl::StartFeed(
+    GstAppSrc *source, guint size, gpointer data) {
+  TizenVideoEncodeAccelerator::Impl* impl =
+      static_cast<TizenVideoEncodeAccelerator::Impl*>(data);
+  DCHECK(impl);
+  impl->can_feed_ = true;
+}
+
+void TizenVideoEncodeAccelerator::Impl::StopFeed(
+    GstAppSrc *source, gpointer data) {
+  TizenVideoEncodeAccelerator::Impl* impl =
+      static_cast<TizenVideoEncodeAccelerator::Impl*>(data);
+  DCHECK(impl);
+  impl->can_feed_ = false;
+}
+
+TizenVideoEncodeAccelerator::TizenVideoEncodeAccelerator()
+    : impl_(NULL) {}
+
+TizenVideoEncodeAccelerator::~TizenVideoEncodeAccelerator() {}
+
+std::vector<media::VideoEncodeAccelerator::SupportedProfile>
+TizenVideoEncodeAccelerator::GetSupportedProfiles() {
+  std::vector<media::VideoEncodeAccelerator::SupportedProfile> profiles;
+  media::VideoEncodeAccelerator::SupportedProfile profile;
+  profile.profile = media::H264PROFILE_BASELINE;
+  profile.max_resolution.SetSize(1280, 720);
+  profile.max_framerate_numerator = 30;
+  profile.max_framerate_denominator = 1;
+  profiles.push_back(profile);
+
+  return profiles;
+}
+
+bool TizenVideoEncodeAccelerator::Initialize(
+    media::VideoFrame::Format input_format,
+    const gfx::Size& input_visible_size,
+    media::VideoCodecProfile output_profile,
+    uint32 initial_bitrate,
+    Client* client) {
+  DVLOG(1) << __FUNCTION__
+           << " size :" << input_visible_size.ToString()
+           << " max bitrate :" << MAX_BITRATE << "bps";
+  DCHECK(impl_ == NULL);
+  if (media::H264PROFILE_MIN > output_profile ||
+      media::H264PROFILE_MAX < output_profile) {
+    NOTREACHED();
+    return false;
+  }
+
+  impl_ = new Impl(client, base::MessageLoopProxy::current());
+  impl_->gst_bitrate_ = initial_bitrate;
+  impl_->view_size_ = input_visible_size;
+  impl_->gst_thread_.Start();
+
+  if (!StartEncoder()) {
+    delete impl_;
+    impl_ = NULL;
+    return false;
+  }
+
+  base::MessageLoop::current()->PostTask(
+      FROM_HERE,
+      base::Bind(&media::VideoEncodeAccelerator::Client::RequireBitstreamBuffers,
+                 impl_->io_client_weak_factory_.GetWeakPtr(),
+                 (unsigned int)INPUT_BUFFER_COUNT,
+                 input_visible_size,
+                 MAX_BITRATE / 8)); // Maximum bytes for a frame by MAX_BITRATE.
+  return true;
+}
+
+void TizenVideoEncodeAccelerator::Encode(
+    const scoped_refptr<media::VideoFrame>& frame,
+    bool force_keyframe) {
+  size_t frame_size = VideoFrame::AllocationSize(VideoFrame::I420,
+                                                 frame->coded_size());
+  DVLOG(3) << __FUNCTION__
+           << " coded_size :" << frame->coded_size().ToString()
+           << " natural_size :" << frame->natural_size().ToString();
+
+  scoped_ptr<BitstreamBufferRef> buffer_ref;
+
+  buffer_ref.reset(
+      new BitstreamBufferRef(frame,
+                             impl_->io_client_weak_factory_.GetWeakPtr(),
+                             impl_->child_message_loop_proxy_,
+                             frame_size));
+
+  if (!buffer_ref) {
+    LOG(ERROR) << __FUNCTION__ << "malloc failed";
+    return;
+  }
+
+  if (impl_->can_feed_ && !impl_->is_destroying_) {
+    impl_->gst_thread_.message_loop()->PostTask(
+        FROM_HERE,
+        base::Bind(&TizenVideoEncodeAccelerator::OnEncode,
+                   base::Unretained(this),
+                   base::Passed(&buffer_ref),
+                   force_keyframe));
+  } else {
+    DVLOG(2) << __FUNCTION__ << " [WEBRTC] . FRAME DROP :"
+             << " can_feed_:" << impl_->can_feed_
+             << " is_destroying_:" << impl_->is_destroying_;
+  }
+}
+
+void TizenVideoEncodeAccelerator::UseOutputBitstreamBuffer(
+    const media::BitstreamBuffer& buffer) {
+  impl_->gst_thread_.message_loop()->PostTask(
+      FROM_HERE,
+      base::Bind(&TizenVideoEncodeAccelerator::OnUseOutputBitstreamBuffer,
+                 base::Unretained(this),
+                 buffer));
+}
+
+void TizenVideoEncodeAccelerator::RequestEncodingParametersChange(
+    uint32 bitrate, uint32 framerate) {
+  DVLOG(2) << __FUNCTION__
+           << " bitrate: " << bitrate
+           << " framerate: " << framerate;
+  if (bitrate > 0 && bitrate != impl_->gst_bitrate_) {
+    impl_->gst_bitrate_ = bitrate;
+    // Omx Encoder expects bitrate in bps whereas ffenc expects bitrate in kbps
+    // Information can be gained by gst-inspect
+#if defined(OS_TIZEN)
+    g_object_set(G_OBJECT(impl_->encoder_),
+                 "bitrate",
+                 std::min(bitrate, static_cast<uint32>(MAX_BITRATE)),
+                 NULL);
+#else
+    g_object_set(G_OBJECT(impl_->encoder_),
+                 "bitrate",
+                 std::min(bitrate, static_cast<uint32>(MAX_BITRATE)) / 1000,
+                 NULL);
+#endif
+  }
+}
+
+void TizenVideoEncodeAccelerator::Destroy() {
+  if (impl_) {
+    DCHECK(impl_->thread_checker_.CalledOnValidThread());
+    if(impl_->appsrc_)
+      gst_app_src_end_of_stream(GST_APP_SRC(impl_->appsrc_));
+
+    {
+      base::AutoLock auto_lock(impl_->destroy_lock_);
+      impl_->is_destroying_ = true;
+    }
+
+    if (impl_->gst_thread_.IsRunning())
+      impl_->gst_thread_.Stop();
+    if (impl_->pipeline_) {
+      gst_element_set_state(impl_->pipeline_, GST_STATE_NULL);
+      gst_object_unref(GST_OBJECT(impl_->pipeline_));
+    }
+
+    DVLOG(1) << __FUNCTION__
+             << " queue size: "<< impl_->encoder_output_queue_.size();
+
+    while (!impl_->encoder_output_queue_.empty()) {
+      media::BitstreamBuffer bitstream_buffer = impl_->encoder_output_queue_.back();
+      // created shm and let it go out of scope automatically.
+      scoped_ptr<base::SharedMemory> shm(
+          new base::SharedMemory(bitstream_buffer.handle(), false));
+      impl_->encoder_output_queue_.pop_back();
+    }
+    delete impl_;
+  }
+  delete this;
+}
+
+void TizenVideoEncodeAccelerator::OnEncode(
+    scoped_ptr<BitstreamBufferRef> buffer_ref, bool force_keyframe) {
+
+  BitstreamBufferRef* bufref = buffer_ref.release();
+
+  if (bufref == NULL) {
+    return;
+  }
+
+#if defined(OS_TIZEN)
+  g_object_set(impl_->encoder_,
+               "force-i-frame",
+               (force_keyframe || impl_->enable_framedrop_) ? TRUE : FALSE,
+               NULL);
+#endif
+
+  if (!(bufref->gst_buffer_ = gst_buffer_new())) {
+    LOG(ERROR) << __FUNCTION__ << " malloc failed";
+    return;
+  }
+
+  GST_BUFFER_MALLOCDATA(bufref->gst_buffer_) = reinterpret_cast<uint8*>(bufref);
+  GST_BUFFER_FREE_FUNC(bufref->gst_buffer_) = BitstreamBufferRef::Destruct;
+  GST_BUFFER_SIZE(bufref->gst_buffer_) = bufref->size_;
+  GST_BUFFER_DATA(bufref->gst_buffer_) =
+      static_cast<guint8*>(bufref->frame_->data(VideoFrame::kYPlane));
+
+  if (GST_FLOW_OK !=
+         gst_app_src_push_buffer(GST_APP_SRC(impl_->appsrc_),
+                                 bufref->gst_buffer_)) {
+    LOG(ERROR) << __FUNCTION__
+               << " error while pushing buffer int appsrc on encoder.";
+    gst_buffer_unref(bufref->gst_buffer_);
+  }
+
+  return;
+}
+
+void TizenVideoEncodeAccelerator::OnUseOutputBitstreamBuffer(
+    const media::BitstreamBuffer& buffer) {
+  impl_->encoder_output_queue_.push_back(buffer);
+
+  DVLOG(2) << __FUNCTION__
+           << " output buffer is ready to use: " << buffer.id()
+           << " out queue size: " << impl_->encoder_output_queue_.size();
+}
+
+bool TizenVideoEncodeAccelerator::StartEncoder() {
+  GError* error = NULL;
+  GstCaps* appsrc_caps = NULL, *converter_caps = NULL;
+  GstElement* gst_converter = NULL, *gst_appsink = NULL;
+  gboolean retval = FALSE;
+  scoped_ptr<GstElement, GstElementDeleter> gst_pipeline;
+
+  guint64 max_input_buffer =
+      INPUT_BUFFER_COUNT * VideoFrame::AllocationSize(VideoFrame::I420,
+                                                      impl_->view_size_);
+  GstAppSrcCallbacks appsrc_callbacks =
+      { &TizenVideoEncodeAccelerator::Impl::StartFeed,
+        &TizenVideoEncodeAccelerator::Impl::StopFeed,
+        NULL };
+  GstAppSinkCallbacks appsink_callbacks =
+      { NULL, NULL, &TizenVideoEncodeAccelerator::Impl::OnEncoded, NULL };
+
+  if (impl_->pipeline_ != NULL) {
+    return false;
+  }
+
+  if (!gst_is_initialized() && !gst_init_check(NULL, NULL, &error)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot initialize gstreamer.";
+    g_error_free(error);
+    return false;
+  }
+
+  // pipeline initialization
+  gst_pipeline.reset(gst_pipeline_new("h264_encode"));
+  if (!gst_pipeline) {
+    LOG(ERROR) << __FUNCTION__ << " cannot initialize gst pipeline.";
+    return false;
+  }
+
+  // appsrc initialization
+  if (!(impl_->appsrc_ = gst_element_factory_make ("appsrc", "src"))) {
+    LOG(ERROR) << __FUNCTION__ << "cannot initialize gst appsrc.";
+    return false;
+  }
+  if (!gst_bin_add(GST_BIN(gst_pipeline.get()), impl_->appsrc_)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot add gst appsrc into encoder pipeline.";
+    gst_object_unref(impl_->appsrc_);
+    impl_->appsrc_ = NULL;
+    return false;
+  }
+  appsrc_caps = gst_caps_new_simple(
+      "video/x-raw-yuv",
+      "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('I', '4', '2', '0'),
+      "width", G_TYPE_INT, impl_->view_size_.width(),
+      "height", G_TYPE_INT, impl_->view_size_.height(),
+      "framerate", GST_TYPE_FRACTION, 30, 1,
+      NULL);
+  if (!appsrc_caps) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create appsrc caps";
+    return false;
+  }
+  gst_app_src_set_callbacks(GST_APP_SRC(impl_->appsrc_), &appsrc_callbacks,
+                            static_cast<gpointer>(impl_), NULL);
+  gst_app_src_set_max_bytes(GST_APP_SRC(impl_->appsrc_), max_input_buffer);
+  gst_app_src_set_caps(GST_APP_SRC(impl_->appsrc_), appsrc_caps);
+
+  // appsink initialization
+  if (!(gst_appsink = gst_element_factory_make("appsink", "sink"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create appsink";
+    return false;
+  }
+  if (!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_appsink)) {
+    LOG(ERROR) << __FUNCTION__ << "cannot add gst appsink into encoder pipeline.";
+    gst_object_unref(gst_appsink);
+    return false;
+  }
+  gst_app_sink_set_callbacks(GST_APP_SINK(gst_appsink), &appsink_callbacks,
+                             static_cast<gpointer>(impl_), NULL);
+  gst_app_sink_set_max_buffers(GST_APP_SINK(gst_appsink), 1);
+
+#ifdef OS_TIZEN
+  DVLOG(1) << "######################################";
+  DVLOG(1) << "      USING omx_h264enc ENCODER";
+  DVLOG(1) << "######################################";
+
+  // encoder initialization
+  if (!(impl_->encoder_ = gst_element_factory_make("omx_h264enc", "encoder"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create omx_h264enc encoder.";
+    return false;
+  }
+  if(!gst_bin_add(GST_BIN(gst_pipeline.get()), impl_->encoder_)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot add omx_h264enc into encoder pipeline.";
+    gst_object_unref(impl_->encoder_);
+    impl_->encoder_ = NULL;
+    return false;
+  }
+  g_object_set(impl_->encoder_,
+               "byte-stream", TRUE,
+               "control-rate", 1, // 1:VBR_CFR  2:CBR_CFR  3:VBR_VFR  4:CBR_VFR
+               "encoder-profile", 1, // BASELINE_PROFILE
+               "bitrate", impl_->gst_bitrate_,
+               NULL);
+
+  // converter initialization
+  if (!(gst_converter = gst_element_factory_make("c2dconvert", "cvt"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create c2dconvert.";
+    return false;
+  }
+  if(!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_converter)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot add c2dconvert into encoder pipeline.";
+    gst_object_unref(gst_converter);
+    return false;
+  }
+
+  g_object_set(G_OBJECT(impl_->appsrc_),
+               "is-live", TRUE,
+               "block", FALSE,
+               "do-timestamp", TRUE,
+               "min-latency", (gint64)(0),
+               "max-latency", (gint64)(0),
+               "min-percent", 80, // if buffer below 80%, need-data emits.
+               "stream-type", GST_APP_STREAM_TYPE_STREAM,
+               "format", GST_FORMAT_DEFAULT,
+               NULL);
+
+  g_object_set(gst_appsink, "sync", FALSE, NULL);
+
+  converter_caps = gst_caps_new_simple(
+      "video/x-raw-yuv",
+      "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
+      "width", G_TYPE_INT, impl_->view_size_.width(),
+      "height", G_TYPE_INT, impl_->view_size_.height(),
+      "framerate", GST_TYPE_FRACTION, 30, 1,
+      NULL);
+  if (!converter_caps) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create converter_caps.";
+    return false;
+  }
+  retval = gst_element_link_filtered(impl_->appsrc_, gst_converter,
+                                     converter_caps);
+  if (retval == FALSE) {
+    LOG(ERROR) << __FUNCTION__ << " cannot link appsrc_ with converter_.";
+    return false;
+  }
+
+  if (!gst_element_link_many(gst_converter, impl_->encoder_,
+                             gst_appsink, NULL)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot link converter_ with encoder_.";
+    return false;
+  }
+#else
+  DVLOG(1) << "######################################";
+  DVLOG(1) << "      USING x264enc ENCODER";
+  DVLOG(1) << "######################################";
+  GstElement* gst_parser = NULL;
+
+  // parser initialization
+  if (!(gst_parser = gst_element_factory_make("videoparse", "parse"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create videoparse.";
+    return false;
+  }
+  if(!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_parser)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot add videoparse into encoder pipeline.";
+    gst_object_unref(gst_parser);
+    return false;
+  }
+  g_object_set(gst_parser,
+               "format", GST_VIDEO_FORMAT_I420,
+               "width", impl_->view_size_.width(),
+               "height", impl_->view_size_.height(),
+               "framerate", INITIAL_FRAMERATE, 1,
+               NULL);
+
+  if (!(impl_->encoder_ = gst_element_factory_make ("x264enc","encoder"))) {
+    LOG(ERROR) << __FUNCTION__ << " cannot create x264enc encoder.";
+    return false;
+  }
+  if(!gst_bin_add(GST_BIN(gst_pipeline.get()), impl_->encoder_)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot add x264enc into encoder pipeline.";
+    gst_object_unref(impl_->encoder_);
+    return false;
+  }
+  g_object_set(impl_->encoder_,
+               "byte-stream", TRUE,
+               "bitrate", impl_->gst_bitrate_,
+               "tune",0x00000004,"profile", 1, NULL);
+
+  g_object_set(G_OBJECT(impl_->appsrc_),
+               "is-live", TRUE,
+               "block", FALSE,
+               "do-timestamp", TRUE,
+               "stream-type", 0,
+               "min-latency", (gint64)(0),
+               "max-latency", (gint64)(0),
+               "format", GST_FORMAT_TIME,
+               NULL);
+
+  g_object_set(gst_appsink, "sync", FALSE, NULL);
+
+  if (!gst_element_link_many(impl_->appsrc_, gst_parser,
+                             impl_->encoder_, gst_appsink, NULL)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot link for encoder pipeline.";
+    return false;
+  }
+#endif
+
+  if (GST_STATE_CHANGE_FAILURE ==
+         gst_element_set_state(gst_pipeline.get(), GST_STATE_PLAYING)) {
+    LOG(ERROR) << __FUNCTION__ << " cannot start encoder pipeline.";
+    return false;
+  }
+
+  GST_DEBUG_BIN_TO_DOT_FILE(
+      GST_BIN(gst_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "encoder_graph.dot");
+
+  impl_->pipeline_ = gst_pipeline.release();
+  return true;
+}
+
+}  // namespace content
diff --git a/tizen_src/impl/content/common/gpu/media/tizen/tizen_video_encode_accelerator.h b/tizen_src/impl/content/common/gpu/media/tizen/tizen_video_encode_accelerator.h
new file mode 100644 (file)
index 0000000..92b769a
--- /dev/null
@@ -0,0 +1,51 @@
+// Copyright 2014 Samsung Electronics Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_COMMON_GPU_MEDIA_TIZEN_VIDEO_ENCODE_ACCELERATOR_H_
+#define CONTENT_COMMON_GPU_MEDIA_TIZEN_VIDEO_ENCODE_ACCELERATOR_H_
+
+#include "base/threading/thread.h"
+#include "base/synchronization/lock.h"
+#include "base/synchronization/waitable_event.h"
+#include "content/common/content_export.h"
+#include "media/video/video_encode_accelerator.h"
+
+namespace content {
+
+class CONTENT_EXPORT TizenVideoEncodeAccelerator
+    : public media::VideoEncodeAccelerator {
+ public:
+  TizenVideoEncodeAccelerator();
+  ~TizenVideoEncodeAccelerator() override;
+
+  std::vector<media::VideoEncodeAccelerator::SupportedProfile>
+      GetSupportedProfiles() override;
+  bool Initialize(media::VideoFrame::Format input_format,
+                  const gfx::Size& input_visible_size,
+                  media::VideoCodecProfile output_profile,
+                  uint32 initial_bitrate,
+                  Client* client) override;
+  void Encode(const scoped_refptr<media::VideoFrame>& frame,
+              bool force_keyframe) override;
+  void UseOutputBitstreamBuffer(const media::BitstreamBuffer& buffer) override;
+  void RequestEncodingParametersChange(uint32 bitrate,
+                                       uint32 framerate) override;
+  void Destroy() override;
+
+ private:
+  struct BitstreamBufferRef;
+  struct Impl;
+
+  void OnEncode(scoped_ptr<BitstreamBufferRef> buffer_ref, bool force_keyframe);
+  void OnUseOutputBitstreamBuffer(const media::BitstreamBuffer& buffer);
+  bool StartEncoder();
+
+  Impl* impl_;
+
+  DISALLOW_COPY_AND_ASSIGN(TizenVideoEncodeAccelerator);
+};
+
+}  // namespace content
+
+#endif  // CONTENT_COMMON_GPU_MEDIA_TIZEN_VIDEO_ENCODE_ACCELERATOR_TIZEN_H_