[MM] Remove unused gstreamer port.
authorsm.venugopal <sm.venugopal@samsung.com>
Tue, 29 Dec 2015 05:37:23 +0000 (11:07 +0530)
committerYoungsoo Choi <kenshin.choi@samsung.com>
Tue, 10 Jul 2018 07:55:23 +0000 (07:55 +0000)
1) CAPI support is added for ME / MSE / WebAudio. So, removed
gstreamer port. On desktop, chromium's pipeline will be used.
2) Tizen MM flag is enabled only for Tizen3.0.
3) Tizen MM files are compiled based on "tizen_multimedia_support"
flag.

Bug: http://107.108.218.239/bugzilla/show_bug.cgi?id=15468

Reviewed by: a1.gomes, sns.park

Change-Id: I1a397265f920c74c953b83c46bfe8f89eeae6dfc
Signed-off-by: sm.venugopal <sm.venugopal@samsung.com>
16 files changed:
tizen_src/build/gyp_chromiumefl.sh
tizen_src/chromium_impl/content/content_efl.gypi
tizen_src/chromium_impl/content/content_renderer_efl.gypi
tizen_src/chromium_impl/content/renderer/media/efl/audio_decoder_gstreamer.cc [deleted file]
tizen_src/chromium_impl/content/renderer/media/efl/webmediaplayer_efl.cc
tizen_src/chromium_impl/content/renderer/media/efl/webmediaplayer_efl.h
tizen_src/chromium_impl/media/base/efl/media_player_bridge_gstreamer.cc [deleted file]
tizen_src/chromium_impl/media/base/efl/media_player_bridge_gstreamer.h [deleted file]
tizen_src/chromium_impl/media/base/efl/media_player_util_efl.cc
tizen_src/chromium_impl/media/base/efl/media_player_util_efl.h
tizen_src/chromium_impl/media/base/efl/media_source_player_gstreamer.cc [deleted file]
tizen_src/chromium_impl/media/base/efl/media_source_player_gstreamer.h [deleted file]
tizen_src/chromium_impl/media/base/efl/webaudio_decoder_browser_gstreamer.cc [deleted file]
tizen_src/chromium_impl/media/base/efl/webaudio_decoder_browser_gstreamer.h [deleted file]
tizen_src/chromium_impl/media/media_efl.gypi
tizen_src/supplement.gypi

index f5ecc02..a21c8d4 100755 (executable)
@@ -79,7 +79,6 @@ COMMON_GYP_PARAMETERS="
                       -Duse_gnome_keyring=0
                       -Duse_pango=0
                       -Duse_cairo=0
-                      -Dtizen_multimedia_eme_support=0
                       -Denable_plugins=1
                       -Denable_extensions=1
                       -Duse_cups=0
@@ -101,7 +100,6 @@ add_desktop_flags() {
                               -Dclang=${USE_CLANG}
                               -Dwerror=
                               -Duse_wayland=0
-                              -Dtizen_multimedia_support=1
                              "
 }
 
@@ -123,6 +121,7 @@ add_tizen_flags() {
                               -Dlinux_use_bundled_gold=0
                               -Dlinux_use_bundled_binutils=0
                               -Ddisable_nacl=1
+                              -Dtizen_multimedia_eme_support=0
                              "
 
   # TODO(youngsoo): Once binutils-gold issue on tizen v3.0 ARM 64bit is fixed,
@@ -144,10 +143,10 @@ add_tizen_flags() {
   fi
 
   if [ "$tizen_version" == "3.0" ]; then
-    COMMON_GYP_PARAMETERS+="-Dtizen_multimedia_support=1
+    ADDITIONAL_GYP_PARAMETERS+="-Dtizen_multimedia_support=1
                            "
   else
-    COMMON_GYP_PARAMETERS+="-Dtizen_multimedia_support=0
+    ADDITIONAL_GYP_PARAMETERS+="-Dtizen_multimedia_support=0
                            "
   fi
 
index 98b474a..bdea8ac 100644 (file)
           '<(DEPTH)/tizen_src/build/system.gyp:ecore-wayland',
         ],
       }],
-      ['tizen_multimedia_use_capi_for_me==1', {
+      ['tizen_multimedia_support==1', {
         'external_content_browser_deps': [
           '<(DEPTH)/tizen_src/build/system.gyp:capi-media-player',
           '<(DEPTH)/tizen_src/build/system.gyp:capi-media-tool',
         ],
-      }],
-      ['tizen_multimedia_support==1', {
         'external_content_common_deps': [
           '<(DEPTH)/tizen_src/build/system.gyp:gstreamer',
         ],
-        'external_content_browser_deps': [
-          '<(DEPTH)/tizen_src/build/system.gyp:gstreamer',
-          '<(DEPTH)/ui/gl/gl.gyp:gl',
-        ],
-        'external_content_renderer_deps': [
-          '<(DEPTH)/tizen_src/build/system.gyp:gstreamer',
-          '<(DEPTH)/ui/gl/gl.gyp:gl',
-        ],
       }],
     ],
   },
index 81c624c..3af1be4 100644 (file)
@@ -20,7 +20,6 @@
       ],
       'sources': [
         'renderer/media/efl/audio_decoder_efl.h',
-        'renderer/media/efl/audio_decoder_gstreamer.cc',
         'renderer/media/efl/media_source_delegate_efl.cc',
         'renderer/media/efl/media_source_delegate_efl.h',
         'renderer/media/efl/renderer_demuxer_efl.cc',
         'renderer/media/efl/renderer_media_player_manager_efl.cc',
         'renderer/media/efl/renderer_media_player_manager_efl.h',
         'renderer/media/efl/webmediaplayer_efl.cc',
-        'renderer/media/efl/webmediaplayer_efl.h', # ME and MSE
-      ],
-      'conditions': [
-        ['tizen_multimedia_use_capi_for_me==1', {
-          'sources': [
-            'renderer/media/tizen/audio_decoder_capi.cc',
-          ],
-          'sources!': [
-            'renderer/media/efl/audio_decoder_gstreamer.cc',
-          ],
-        }],
+        'renderer/media/efl/webmediaplayer_efl.h',
+        'renderer/media/tizen/audio_decoder_capi.cc',
       ],
     }], # tizen_multimedia_support==1
   ],
diff --git a/tizen_src/chromium_impl/content/renderer/media/efl/audio_decoder_gstreamer.cc b/tizen_src/chromium_impl/content/renderer/media/efl/audio_decoder_gstreamer.cc
deleted file mode 100644 (file)
index 1e4cd15..0000000
+++ /dev/null
@@ -1,250 +0,0 @@
-// Copyright 2014 Samsung Electronics Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "content/renderer/media/efl/audio_decoder_efl.h"
-
-#include "base/basictypes.h"
-#include "base/memory/shared_memory.h"
-#include "base/posix/eintr_wrapper.h"
-#include "base/process/process.h"
-#include "base/strings/string_util.h"
-#include "base/time/time.h"
-#include "content/common/render_messages_efl.h"
-#include "media/base/audio_bus.h"
-#include "media/base/limits.h"
-#include "public/platform/Platform.h"
-#include "third_party/WebKit/public/platform/WebAudioBus.h"
-#include "media/base/efl/webaudio_media_codec_info_efl.h"
-
-namespace content {
-// This class is similar as AudioDecoderIO class of Android defined
-// in src/content/renderer/media/android/audio_decoder_android.cc
-class AudioDecoderIO {
- public:
-  AudioDecoderIO(const char* data, size_t data_size);
-  ~AudioDecoderIO();
-  bool ShareEncodedToProcess(base::SharedMemoryHandle* handle);
-
-  // Returns true if AudioDecoderIO was successfully created.
-  bool IsValid() const;
-
-  int read_fd() const { return read_fd_; }
-  int write_fd() const { return write_fd_; }
-
- private:
-  // Shared memory that will hold the encoded audio data.  This is
-  // used by MediaCodec for decoding.
-  base::SharedMemory encoded_shared_memory_;
-
-  // A pipe used to communicate with MediaCodec.  MediaCodec owns
-  // write_fd_ and writes to it.
-  int read_fd_;
-  int write_fd_;
-
-  DISALLOW_COPY_AND_ASSIGN(AudioDecoderIO);
-};
-
-AudioDecoderIO::AudioDecoderIO(const char* data, size_t data_size)
-    : read_fd_(-1),
-      write_fd_(-1) {
-
-  if (!data || !data_size || data_size > 0x80000000)
-    return;
-
-  // Create the shared memory and copy our data to it so that
-  // MediaCodec can access it.
-  if(!encoded_shared_memory_.CreateAndMapAnonymous(data_size)) {
-    LOG(ERROR) << __FUNCTION__ << " Creation of shared memory failed";
-    return;
-  }
-
-  if (!encoded_shared_memory_.memory())
-    return;
-
-  memcpy(encoded_shared_memory_.memory(), data, data_size);
-
-  // Create a pipe for reading/writing the decoded PCM data
-  int pipefd[2];
-
-  if (pipe(pipefd)) {
-    LOG(INFO) <<" Pipe is already created";
-    return;
-  }
-
-  read_fd_ = pipefd[0];
-  write_fd_ = pipefd[1];
-}
-
-AudioDecoderIO::~AudioDecoderIO() {
-  // Close the read end of the pipe.  The write end should have been
-  // closed by MediaCodec.
-  if (read_fd_ >= 0 && close(read_fd_)) {
-    LOG(WARNING) << "Cannot close read fd " << read_fd_
-             << ": " << strerror(errno);
-  }
-}
-
-bool AudioDecoderIO::IsValid() const {
-  return read_fd_ >= 0 && write_fd_ >= 0 &&
-      encoded_shared_memory_.memory();
-}
-
-bool AudioDecoderIO::ShareEncodedToProcess(base::SharedMemoryHandle* handle) {
-  return encoded_shared_memory_.ShareToProcess(base::GetCurrentProcessHandle(),
-                                               handle);
-}
-
-static float ConvertSampleToFloat(int16_t sample) {
-  const float kMaxScale = 1.0f / std::numeric_limits<int16_t>::max();
-  const float kMinScale = -1.0f / std::numeric_limits<int16_t>::min();
-
-  return sample * (sample < 0 ? kMinScale : kMaxScale);
-}
-
-static void CopyPcmDataToBus(int input_fd,
-                        blink::WebAudioBus* destination_bus,
-                        size_t num_of_frames,
-                        unsigned number_of_channels,
-                        double file_sample_rate) {
-
-  int16_t pipe_data[PIPE_BUF / sizeof(int16_t)];
-  ssize_t nread;
-  std::vector<int16_t> decoded_samples;
-
-  while ((nread = HANDLE_EINTR(read(input_fd, pipe_data, sizeof(pipe_data)))) > 0) {
-    size_t samples_in_pipe = nread / sizeof(int16_t);
-
-    if (decoded_samples.size() + samples_in_pipe > decoded_samples.capacity()) {
-      decoded_samples.reserve(std::max(samples_in_pipe,
-                                       2 * decoded_samples.capacity()));
-    }
-    std::copy(pipe_data,
-              pipe_data + samples_in_pipe,
-              back_inserter(decoded_samples));
-  }
-
-  size_t number_of_samples = decoded_samples.size();
-  size_t number_of_frames = decoded_samples.size() / number_of_channels;
-  size_t decoded_frames = 0;
-
-  destination_bus->initialize(number_of_channels,
-                              number_of_frames,
-                              file_sample_rate);
-
-  for (size_t m = 0; m < number_of_samples; m += number_of_channels) {
-    for (size_t k = 0; k < number_of_channels; ++k) {
-      int16_t sample = decoded_samples[m + k];
-      destination_bus->channelData(k)[decoded_frames] =
-        ConvertSampleToFloat(sample);
-    }
-    ++decoded_frames;
-  }
-
-  if (decoded_frames < number_of_frames)
-    destination_bus->resizeSmaller(decoded_frames);
-}
-
-static void BufferAndCopyPcmDataToBus(int input_fd,
-                                      blink::WebAudioBus* destination_bus,
-                                      unsigned number_of_channels,
-                                      double file_sample_rate) {
-  int16_t pipe_data[PIPE_BUF / sizeof(int16_t)];
-  std::vector<int16_t> decoded_samples;
-  ssize_t nread;
-
-  while ((nread = HANDLE_EINTR(read(input_fd, pipe_data, sizeof(pipe_data)))) > 0) {
-    size_t samples_in_pipe = nread / sizeof(int16_t);
-    if (decoded_samples.size() + samples_in_pipe > decoded_samples.capacity()) {
-      decoded_samples.reserve(std::max(samples_in_pipe,
-                                       2 * decoded_samples.capacity()));
-    }
-    std::copy(pipe_data,
-              pipe_data + samples_in_pipe,
-              back_inserter(decoded_samples));
-  }
-
-  // Convert the samples and save them in the audio bus.
-  size_t number_of_samples = decoded_samples.size();
-  size_t number_of_frames = decoded_samples.size() / number_of_channels;
-  size_t decoded_frames = 0;
-
-  destination_bus->initialize(number_of_channels,
-                              number_of_frames,
-                              file_sample_rate);
-
-  for (size_t m = 0; m < number_of_samples; m += number_of_channels) {
-    for (size_t k = 0; k < number_of_channels; ++k) {
-      int16_t sample = decoded_samples[m + k];
-      destination_bus->channelData(k)[decoded_frames] =
-          ConvertSampleToFloat(sample);
-    }
-    ++decoded_frames;
-  }
-
-  // number_of_frames is only an estimate.  Resize the buffer with the
-  // actual number of received frames.
-  if (decoded_frames < number_of_frames)
-    destination_bus->resizeSmaller(decoded_frames);
-}
-
-// Decode in-memory audio file data.
-bool DecodeAudioFileData(blink::WebAudioBus* destination_bus,
-                              const char* data,
-                              size_t data_size,
-                              scoped_refptr<ThreadSafeSender> sender) {
-  AudioDecoderIO audio_decoder(data, data_size);
-
-  if (!audio_decoder.IsValid()) {
-    LOG(ERROR) << "Invalid audio_decoder";
-    return false;
-  }
-
-  base::SharedMemoryHandle encoded_data_handle;
-  audio_decoder.ShareEncodedToProcess(&encoded_data_handle);
-  base::FileDescriptor fd(audio_decoder.write_fd(), true);
-
-  // Start Decoding in the browser which will read from
-  // encoded_data_handle for our shared memory and write the decoded
-  // PCM samples (16-bit integer) to our pipe.
-
-  sender->Send(new EflViewHostMsg_WebAudioDecode(encoded_data_handle,
-                                                 fd, data_size));
-
-  int input_fd = audio_decoder.read_fd();
-  struct media::WebAudioMediaCodecInfoEfl info;
-
-  ssize_t nread = HANDLE_EINTR(read(input_fd, &info, sizeof(info)));
-
-  if (nread != sizeof(info)) {
-    LOG(ERROR) << "Read Failed";
-    return false;
-  }
-
-  double file_sample_rate = static_cast<double>(info.sample_rate_);
-
-  // Sanity checks
-  if (!info.channel_count_ ||
-      info.channel_count_ > media::limits::kMaxChannels ||
-      file_sample_rate < media::limits::kMinSampleRate ||
-      file_sample_rate > media::limits::kMaxSampleRate) {
-    return false;
-  }
-
-  if (info.number_of_frames_ > 0) {
-    CopyPcmDataToBus(input_fd,
-                     destination_bus,
-                     info.number_of_frames_,
-                     info.channel_count_,
-                     file_sample_rate);
-  } else {
-    BufferAndCopyPcmDataToBus(input_fd,
-                              destination_bus,
-                              info.channel_count_,
-                              file_sample_rate);
-  }
-
-  return true;
-}
-
-}  // namespace content
index 6a12d36..ffc098d 100644 (file)
@@ -65,7 +65,8 @@ WebMediaPlayerEfl::WebMediaPlayerEfl(
           BIND_TO_RENDER_LOOP(&WebMediaPlayerEfl::OnNaturalSizeChanged),
           BIND_TO_RENDER_LOOP(&WebMediaPlayerEfl::OnOpacityChanged))),
       weak_factory_(this),
-      gst_video_format_(0),
+      video_width_(0),
+      video_height_(0),
       audio_(false),
       video_(false),
       current_time_(0),
@@ -132,8 +133,6 @@ void WebMediaPlayerEfl::load(LoadType load_type,
 void WebMediaPlayerEfl::DoLoad(LoadType load_type, const blink::WebURL& url) {
   int demuxer_client_id = 0;
   if (load_type == LoadTypeMediaSource) {
-    // EFL GST-package on desktop cannot handle AAC decoding.
-    // Disabling MSE for desktop.
     player_type_ = MEDIA_PLAYER_TYPE_MEDIA_SOURCE;
     RendererDemuxerEfl* demuxer =
         static_cast<RendererDemuxerEfl*>(
@@ -236,14 +235,14 @@ void WebMediaPlayerEfl::OnWaitingForDecryptionKey() {
 
 void WebMediaPlayerEfl::play() {
   manager_->Play(player_id_);
-  // Has to be updated from |MediaPlayerBridgeGstreamer| but IPC causes delay.
+  // Has to be updated from |MediaPlayerEfl| but IPC causes delay.
   // There are cases were play - pause are fired successively and would fail.
   is_paused_ = false;
 }
 
 void WebMediaPlayerEfl::pause() {
   manager_->Pause(player_id_, true);
-  // Has to be updated from |MediaPlayerBridgeGstreamer| but IPC causes delay.
+  // Has to be updated from |MediaPlayerEfl| but IPC causes delay.
   // There are cases were play - pause are fired successively and would fail.
   is_paused_ = true;
 }
@@ -303,7 +302,7 @@ void WebMediaPlayerEfl::seek(double seconds) {
   is_seeking_ = true;
   seek_time_ = seconds;
 
-  // Once Chunk demuxer seeks GST seek will be intiated.
+  // Once Chunk demuxer seeks |MediaPlayerEfl| seek will be intiated.
   if (media_source_delegate_)
     media_source_delegate_->StartWaitingForSeek(
         base::TimeDelta::FromSecondsD(seek_time_));
@@ -311,7 +310,7 @@ void WebMediaPlayerEfl::seek(double seconds) {
 
   // Draw empty frame during seek.
   if (video_) {
-    gfx::Size size(gst_width_, gst_height_);
+    gfx::Size size(video_width_, video_height_);
     scoped_refptr<VideoFrame> video_frame = VideoFrame::CreateBlackFrame(size);
     FrameReady(video_frame);
   }
@@ -451,7 +450,7 @@ void WebMediaPlayerEfl::SetNetworkState(WebMediaPlayer::NetworkState state) {
 void WebMediaPlayerEfl::OnNewTbmBufferAvailable(
     const gfx::TbmBufferHandle& tbm_handle, base::TimeDelta timestamp,
     const base::Closure& cb) {
-  gfx::Size size(gst_width_, gst_height_);
+  gfx::Size size(video_width_, video_height_);
   scoped_refptr<VideoFrame> video_frame =
       VideoFrame::WrapTBMSurface(size, timestamp, tbm_handle);
   video_frame->AddDestructionObserver(cb);
@@ -462,117 +461,43 @@ void WebMediaPlayerEfl::OnNewTbmBufferAvailable(
 void WebMediaPlayerEfl::OnNewFrameAvailable(base::SharedMemoryHandle handle,
                                             uint32 yuv_size,
                                             base::TimeDelta timestamp) {
-
   base::SharedMemory shared_memory(handle, false);
   if (!shared_memory.Map(yuv_size)) {
     LOG(ERROR) << "Failed to map shared memory for size " << yuv_size;
     return;
   }
-  uint8* const yuv_buffer = static_cast<uint8*>(shared_memory.memory());
 
-  gfx::Size size(gst_width_, gst_height_);
+  uint8* const yuv_buffer = static_cast<uint8*>(shared_memory.memory());
+  gfx::Size size(video_width_, video_height_);
   scoped_refptr<VideoFrame> video_frame =
       VideoFrame::CreateFrame(
           media::PIXEL_FORMAT_YV12, size, gfx::Rect(size), size, timestamp);
 
-  // decoded format is SN12 on Tizen device
-  // video format converted from SN12 to YV12
-  uint8* gst_buf = yuv_buffer;
-  switch (gst_video_format_) {
-    case media::GST_VIDEO_YU12:
-    case media::GST_VIDEO_I420: {
-      const uint c_frm_size = yuv_size / 6;
-      const uint y_frm_size = c_frm_size << 2;  // * 4;
-      // U Plane buffer.
-      uint8* gst_buf_u = gst_buf + y_frm_size;
-      // V Plane buffer.
-      uint8* gst_buf_v = gst_buf_u + c_frm_size;
-
-
-      // Get the videoframe stride size.
-      // Calculate the gstreamer buffer stride size.
-      const uint uv_rows = video_frame.get()->rows(VideoFrame::kUPlane);
-      const uint gst_stride = c_frm_size / uv_rows;
-
-      libyuv::I420Copy(gst_buf, 2 * gst_stride,
-                       gst_buf_u, gst_stride,
-                       gst_buf_v, gst_stride,
-                       video_frame.get()->data(VideoFrame::kYPlane),
-                       video_frame.get()->stride(VideoFrame::kYPlane),
-                       video_frame.get()->data(VideoFrame::kUPlane),
-                       video_frame.get()->stride(VideoFrame::kUPlane),
-                       video_frame.get()->data(VideoFrame::kVPlane),
-                       video_frame.get()->stride(VideoFrame::kVPlane),
-                       gst_width_, gst_height_);
-      break;
-    }
-    case media::GST_VIDEO_SN12: {
-      const uint tile_w_align =
-          ((gst_width_ - 1) / media::SN12_TILE_WIDTH + 2) & ~1;
-      const uint tile_luma_stride = tile_w_align * media::SN12_TILE_WIDTH;
-      uint luma_size = media::SN12_TILE_SIZE * tile_w_align
-                       * ((gst_height_-1) / media::SN12_TILE_HEIGHT + 1);
-
-      uint8* y_frm = video_frame.get()->data(VideoFrame::kYPlane);
-      const uint y_stride = video_frame.get()->stride(VideoFrame::kYPlane);
-
-      // Actually copy and convert luma buffer
-      for (int i = 0; i < gst_height_; i++) {
-        memcpy(y_frm, gst_buf, gst_width_);
-        y_frm += y_stride;
-        gst_buf += tile_luma_stride;
-      }
-
-      gst_buf = yuv_buffer + luma_size;
-      uint8* gst_buf2 = gst_buf + 1;
-      uint8* u_frm = video_frame.get()->data(VideoFrame::kUPlane);
-      uint8* v_frm = video_frame.get()->data(VideoFrame::kVPlane);
-      const uint uv_stride = video_frame.get()->stride(VideoFrame::kUPlane);
-      const uint uv_rows = video_frame.get()->rows(VideoFrame::kUPlane);
-
-      // Actually copy and convert chroma buffer
-      for (uint row = 0; row < uv_rows; ++row) {
-        for (uint i = 0; i < uv_stride; i++) {
-          u_frm[i] = gst_buf[i*2];
-          v_frm[i] = gst_buf2[i*2];
-        }
-
-        gst_buf += tile_luma_stride;
-        gst_buf2 += tile_luma_stride;
-        u_frm += uv_stride;
-        v_frm += uv_stride;
-      }
-      break;
-    }
-    case media::GST_VIDEO_NV12: {
-      // CAPI returns original width, original height and aligned size.
-      // However, aligned size doesn't passed into |webmediaplayer| class.
-      // So aligned width and height must be calculated to get
-      // exact size of planes.
-      unsigned int align_width = ALIGN(gst_width_, 16);
-      unsigned int align_height = ALIGN(gst_height_, 16);
-      const uint y_frm_size = align_width * align_height;
-
-      // UV plane buffer
-      uint8* gst_buf_uv = gst_buf + y_frm_size;
-
-      libyuv::NV12ToI420(gst_buf, align_width,
-                         gst_buf_uv, align_width,
-                         video_frame.get()->data(VideoFrame::kYPlane),
-                         video_frame.get()->stride(VideoFrame::kYPlane),
-                         video_frame.get()->data(VideoFrame::kUPlane),
-                         video_frame.get()->stride(VideoFrame::kUPlane),
-                         video_frame.get()->data(VideoFrame::kVPlane),
-                         video_frame.get()->stride(VideoFrame::kVPlane),
-                         gst_width_, gst_height_);
-      break;
-    }
-    default: {
-      LOG(ERROR) << "WebMediaPlayerEfl::" << __FUNCTION__
-          << " not supported video format";
-      break;
-    }
-  }
+  uint8* video_buf = yuv_buffer;
+  const uint c_frm_size = yuv_size / 6;
+  const uint y_frm_size = c_frm_size << 2;  // * 4;
+
+  // U Plane buffer.
+  uint8* video_buf_u = video_buf + y_frm_size;
+
+  // V Plane buffer.
+  uint8* video_buf_v = video_buf_u + c_frm_size;
+
+  // Get the videoframe stride size.
+  // Calculate the video buffer stride size.
+  const uint uv_rows = video_frame.get()->rows(VideoFrame::kUPlane);
+  const uint video_stride = c_frm_size / uv_rows;
+
+  libyuv::I420Copy(video_buf, 2 * video_stride,
+                   video_buf_u, video_stride,
+                   video_buf_v, video_stride,
+                   video_frame.get()->data(VideoFrame::kYPlane),
+                   video_frame.get()->stride(VideoFrame::kYPlane),
+                   video_frame.get()->data(VideoFrame::kUPlane),
+                   video_frame.get()->stride(VideoFrame::kUPlane),
+                   video_frame.get()->data(VideoFrame::kVPlane),
+                   video_frame.get()->stride(VideoFrame::kVPlane),
+                   video_width_, video_height_);
   FrameReady(video_frame);
 }
 
@@ -585,11 +510,11 @@ void WebMediaPlayerEfl::FrameReady(
                  frame));
 }
 
+// TODO(Venu): Remove |format| as agrument.
 void WebMediaPlayerEfl::OnMediaDataChange(int format, int height,
                                           int width, int media) {
-  gst_video_format_ = static_cast<uint32>(format);
-  gst_height_ = height;
-  gst_width_  = width;
+  video_height_ = height;
+  video_width_  = width;
   audio_ = media & media::MEDIA_AUDIO_MASK ? true : false;
   video_ = media & media::MEDIA_VIDEO_MASK ? true : false;
   natural_size_ = gfx::Size(width, height);
@@ -700,7 +625,7 @@ void WebMediaPlayerEfl::OnSeekStateChange(bool state) {
   is_seeking_ = state;
   // Draw empty frame during seek.
   if (video_ && is_seeking_) {
-    gfx::Size size(gst_width_, gst_height_);
+    gfx::Size size(video_width_, video_height_);
     scoped_refptr<VideoFrame> video_frame = VideoFrame::CreateBlackFrame(size);
     FrameReady(video_frame);
   }
index cb79e06..d016b1c 100644 (file)
@@ -227,9 +227,8 @@ class WebMediaPlayerEfl
   // Player ID assigned by the |manager_|.
   int player_id_;
 
-  uint32 gst_video_format_;
-  int gst_width_;
-  int gst_height_;
+  int video_width_;
+  int video_height_;
 
   bool audio_;
   bool video_;
diff --git a/tizen_src/chromium_impl/media/base/efl/media_player_bridge_gstreamer.cc b/tizen_src/chromium_impl/media/base/efl/media_player_bridge_gstreamer.cc
deleted file mode 100644 (file)
index 9b09f49..0000000
+++ /dev/null
@@ -1,965 +0,0 @@
-// Copyright 2014 Samsung Electronics Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/base/efl/media_player_bridge_gstreamer.h"
-
-#include <gst/app/gstappsink.h>
-#include <gst/pbutils/install-plugins.h>
-#include <gst/pbutils/missing-plugins.h>
-
-#include "base/basictypes.h"
-#include "base/thread_task_runner_handle.h"
-#include "media/base/efl/media_player_manager_efl.h"
-#include "media/base/efl/media_player_util_efl.h"
-#include "ui/gfx/geometry/size.h"
-
-namespace {
-
-// Element name
-const char* kPipelineName = "gst_pipeline";
-const char* kDecodebin = "gst_uridecodebin";
-const char* kVideoSink = "gst_video_sink";
-const char* kAudioConverter = "gst_audioconvert";
-const char* kVolume = "gst_volume";
-const char* kAudioSink = "gst_audiosink";
-#if defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-const char* kVideoConverter = "gst_videoconvert";
-#if defined(OS_TIZEN)
-const char* KVideoConvertElement = "fimcconvert";
-#else
-const char* KVideoConvertElement = "videoconvert";
-#endif
-#endif
-const char* kPropertyBufferSize = "buffer-size";
-const char* kPropertyVolume = "volume";
-const char* kPropertyUri = "uri";
-const char* kPropertyDownload = "download";
-
-// Update duration every 100ms.
-const int kDurationUpdateInterval = 100;
-const int kMaxBuffer = 1;
-const int kMaxBufPercent = 100;
-// Buffer size for audio/video buffering.
-const int kPreloadBufferSize = (3 * 1024 * 1024);  // 3MB
-
-#define GST_OBJECT_UNREF(obj) \
-    if (obj) { \
-      gst_object_unref(obj); \
-      obj = NULL; \
-    }
-
-struct GstElementDeleter {
-  void operator()(GstElement* ptr) const {
-    GST_OBJECT_UNREF(ptr);
-  }
-};
-
-}  // namespace
-
-namespace media {
-
-static GstBusSyncReply GstPipelineMessageCB(
-    GstBus* bus,
-    GstMessage* message,
-    gpointer user_data) {
-  MediaPlayerBridgeGstreamer* player =
-      static_cast<MediaPlayerBridgeGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return GST_BUS_PASS;
-
-  player->HandleMessage(message);
-  gst_message_unref(message);
-  return GST_BUS_DROP;
-}
-
-static GstFlowReturn OnGstAppsinkPrerollCB(
-    GstAppSink* sink,
-    gpointer user_data) {
-  MediaPlayerBridgeGstreamer* player =
-      static_cast<MediaPlayerBridgeGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return GST_FLOW_ERROR;
-  player->PrerollComplete();
-  return GST_FLOW_OK;
-}
-
-static GstFlowReturn OnGstAppsinkSampleCB(
-    GstAppSink* sink,
-    gpointer user_data) {
-  MediaPlayerBridgeGstreamer* player =
-      static_cast<MediaPlayerBridgeGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return GST_FLOW_ERROR;
-
-  player->SampleReady(player->PullSample());
-  return GST_FLOW_OK;
-}
-
-static void GstPipelinePadAddedCB(GstElement* src,
-                                  GstPad* new_pad,
-                                  gpointer user_data) {
-  MediaPlayerBridgeGstreamer* player =
-      static_cast<MediaPlayerBridgeGstreamer*>(user_data);
-  DCHECK(player);
-  player->OnNewPadAdded(new_pad);
-}
-
-static void GstPipelineNoMorePadsCB(GstElement* src,
-                                    gpointer user_data) {
-  MediaPlayerBridgeGstreamer* player =
-      static_cast<MediaPlayerBridgeGstreamer*>(user_data);
-  DCHECK(player);
-  player->OnNoMorePadsAvailable();
-}
-
-#if !defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-static GstAutoplugSelectResult
-GstUribinFactorySelectCB(GstElement* /*bin*/, GstPad* /*pad*/,
-                         GstCaps* /*caps*/, GstElementFactory* factory,
-                         gpointer /*user_data*/) {
-
-  if (g_str_has_prefix(GST_OBJECT_NAME(factory), "omxh264") ||
-      g_str_has_prefix(GST_OBJECT_NAME(factory), "v4l2video6dec") ) {
-    LOG(INFO) << "Skipping omxh264/v4l2video6dec decoder to use S/W Path";
-    return GST_AUTOPLUG_SELECT_SKIP;
-  }
-  return GST_AUTOPLUG_SELECT_TRY;
-}
-#endif
-
-//static
-MediaPlayerEfl* MediaPlayerEfl::CreatePlayer(
-    int player_id, const GURL& url, double volume,
-    MediaPlayerManager* manager, const std::string& user_agent) {
-  LOG(INFO) << "MediaElement is using |GST| to play media";
-  return new MediaPlayerBridgeGstreamer(player_id, url, volume, manager);
-}
-
-MediaPlayerBridgeGstreamer::MediaPlayerBridgeGstreamer(
-    int player_id,
-    const GURL& url,
-    double volume,
-    MediaPlayerManager* manager_in)
-    : MediaPlayerEfl(player_id, manager_in),
-      main_loop_(base::ThreadTaskRunnerHandle::Get()),
-      pipeline_(NULL),
-      appsink_(NULL),
-      uridecodebinsrc_(NULL),
-#if defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-      video_convert_(NULL),
-#endif
-      audio_convert_(NULL),
-      audio_volume_(NULL),
-      audio_sink_(NULL),
-      audio_sinkpad_(NULL),
-      video_sinkpad_(NULL),
-      url_(url),
-      width_(0),
-      height_(0),
-      is_prerolled_(false),
-      is_paused_(true),
-      duration_(0),
-      playback_rate_(1.0f),
-      buffered_(0),
-      video_format_(0),
-      media_type_(0),
-      is_live_stream_(false),
-      is_file_url_(false),
-      is_end_reached_(false),
-      is_seeking_(false),
-      seek_duration_(0),
-      error_occured_(false) {
-  if (!gst_is_initialized()) {
-    if (!gst_init_check(NULL, NULL, 0)) {
-      LOG(ERROR) << "Unable to initialize GST";
-      HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-      return;
-    }
-  }
-
-  if (!InitPipeline()) {
-    HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-    return;
-  }
-
-  if (gst_element_set_state(pipeline_, GST_STATE_PAUSED) ==
-         GST_STATE_CHANGE_FAILURE)
-    LOG(ERROR) << "GStreamer state change failed";
-
-  manager()->OnReadyStateChange(
-      GetPlayerId(), blink::WebMediaPlayer::ReadyStateHaveNothing);
-  manager()->OnNetworkStateChange(
-      GetPlayerId(), blink::WebMediaPlayer::NetworkStateLoading);
-
-  if (url_.SchemeIsFile())
-    is_file_url_ = true;
-}
-
-void MediaPlayerBridgeGstreamer::Destroy() {
-  if (IsPlayerDestructing())
-    return;
-
-  destructing_ = true;
-  Release();
-  main_loop_->DeleteSoon(FROM_HERE, this);
-}
-
-void MediaPlayerBridgeGstreamer::Play() {
-  if (!pipeline_ || error_occured_ || is_end_reached_)
-    return;
-
-  if (playback_rate_ == 0.0) {
-    is_paused_ = false;
-    return;
-  }
-  if (gst_element_set_state(pipeline_, GST_STATE_PLAYING) ==
-         GST_STATE_CHANGE_FAILURE) {
-    LOG(ERROR) << "GStreamer state change failed in PLAY";
-    return;
-  }
-
-#if defined(OS_TIZEN_MOBILE)
-  WakeUpDisplayAndAcquireDisplayLock();
-#endif
-
-  StartCurrentTimeUpdateTimer();
-  is_paused_ = false;
-  is_end_reached_ = false;
-}
-
-void MediaPlayerBridgeGstreamer::Pause(bool is_media_related_action) {
-  if (!pipeline_ || error_occured_)
-    return;
-
-  if (gst_element_set_state(pipeline_, GST_STATE_PAUSED) ==
-         GST_STATE_CHANGE_FAILURE) {
-    LOG(ERROR) << "GStreamer state change failed in PAUSE";
-    return;
-  }
-
-#if defined(OS_TIZEN_MOBILE)
-  ReleaseDisplayLock();
-#endif
-
-  StopCurrentTimeUpdateTimer();
-  is_paused_ = true;
-}
-
-void MediaPlayerBridgeGstreamer::SetRate(double rate) {
-  if (error_occured_ || is_live_stream_ || (playback_rate_ == rate))
-    return;
-
-  GstState state = GST_STATE_NULL;
-  GstState pending = GST_STATE_NULL;
-  gst_element_get_state(pipeline_, &state, &pending, 0);
-  if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
-      || (pending == GST_STATE_PAUSED))
-    return;
-  if (rate == 0.0) {
-    playback_rate_ = rate;
-    Pause(true);
-    return;
-  }
-
-  // If rate was zero and requested rate is non-zero, change player state
-  if (playback_rate_ == 0.0 && rate != 0.0) {
-    Play();
-    StartCurrentTimeUpdateTimer();
-  }
-
-  double current_position = GetCurrentTime() * GST_SECOND;
-  if (rate < 0 && current_position == 0.0f)
-    current_position = -1.0f;
-  if (SeekTo(ConvertToGstClockTime(current_position),
-             rate, GST_SEEK_FLAG_FLUSH)) {
-    playback_rate_ = rate;
-  } else {
-    LOG(ERROR) << "Setting Rate : " << rate << " failed";
-    HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-  }
-}
-
-void MediaPlayerBridgeGstreamer::Seek(const double time) {
-  if (IsPlayerDestructing())
-    return;
-
-  if (!pipeline_ || error_occured_ || is_live_stream_ ||
-      time == GetCurrentTime()) {
-    main_loop_->PostTask(FROM_HERE,
-                         base::Bind(&MediaPlayerBridgeGstreamer::OnTimeChanged,
-                                    base::Unretained(this)));
-    return;
-  }
-
-  GstState state = GST_STATE_NULL;
-  GstStateChangeReturn ret = gst_element_get_state(
-      pipeline_, &state, NULL, 250 * GST_NSECOND);
-  if (ret == GST_STATE_CHANGE_FAILURE || ret == GST_STATE_CHANGE_NO_PREROLL) {
-    LOG(ERROR) << "Cannot seek in "
-               << gst_element_state_change_return_get_name(ret) << " state";
-    return;
-  }
-
-  StopCurrentTimeUpdateTimer();
-  if (SeekTo(ConvertToGstClockTime(time),
-             playback_rate_, static_cast<GstSeekFlags>(
-                GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
-    UpdateSeekState(true);
-    seek_duration_ = time;
-    is_end_reached_ = time != duration_ ? false : true;
-    manager()->OnTimeUpdate(GetPlayerId(), time);
-    if (!is_paused_)
-      StartCurrentTimeUpdateTimer();
-  } else {
-    LOG(ERROR) << "MediaPlayerBridgeGstreamer::" << __FUNCTION__ << " Failed!";
-    manager()->OnTimeUpdate(GetPlayerId(), GetCurrentTime());
-    manager()->OnTimeChanged(GetPlayerId());
-  }
-}
-
-bool MediaPlayerBridgeGstreamer::SeekTo(
-    gint64 position,
-    float rate,
-    GstSeekFlags seekType) {
-  gint64 startTime = 0, endTime = 0;
-  if (rate > 0) {
-    startTime = position;
-    endTime = GST_CLOCK_TIME_NONE;
-  } else {
-    startTime = 0;
-    // If we are at beginning of media, start from the end to avoid
-    // immediate EOS.
-    if (position < 0) {
-      endTime = static_cast<gint64>(GetCurrentTime() * GST_SECOND);
-    } else {
-      endTime = position;
-    }
-  }
-
-  return gst_element_seek(pipeline_, rate, GST_FORMAT_TIME, seekType,
-                          GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET,
-                          endTime);
-}
-
-bool MediaPlayerBridgeGstreamer::InitPipeline() {
-  scoped_ptr<GstElement, GstElementDeleter> pipeline;
-  pipeline.reset(gst_pipeline_new(kPipelineName));
-  appsink_ = GetVideoSink();
-  if (!gst_bin_add(GST_BIN(pipeline.get()), appsink_)) {
-    GST_OBJECT_UNREF(appsink_);
-    return false;
-  }
-  uridecodebinsrc_ = gst_element_factory_make("uridecodebin", kDecodebin);
-  if (!gst_bin_add(GST_BIN(pipeline.get()), uridecodebinsrc_)) {
-    GST_OBJECT_UNREF(uridecodebinsrc_);
-    return false;
-  }
-#if defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-  video_convert_ =
-      gst_element_factory_make(KVideoConvertElement, kVideoConverter);
-  if (!gst_bin_add(GST_BIN(pipeline.get()), video_convert_)) {
-    GST_OBJECT_UNREF(video_convert_);
-    return false;
-  }
-  video_sinkpad_ = gst_element_get_static_pad(video_convert_, "sink");
-#else
-  video_sinkpad_ = gst_element_get_static_pad(appsink_, "sink");
-#endif
-  audio_convert_ = gst_element_factory_make("audioconvert", kAudioConverter);
-  if (!gst_bin_add(GST_BIN(pipeline.get()), audio_convert_)) {
-    GST_OBJECT_UNREF(audio_convert_);
-    return false;
-  }
-  audio_volume_ = gst_element_factory_make("volume", kVolume);
-  if (!gst_bin_add(GST_BIN(pipeline.get()), audio_volume_)) {
-    GST_OBJECT_UNREF(audio_volume_);
-    return false;
-  }
-  g_object_set(G_OBJECT(audio_volume_), "mute", false, NULL);
-
-  audio_sink_ = gst_element_factory_make("autoaudiosink", kAudioSink);
-  if (!gst_bin_add(GST_BIN(pipeline.get()), audio_sink_)) {
-    GST_OBJECT_UNREF(audio_sink_);
-    return false;
-  }
-  audio_sinkpad_ = gst_element_get_static_pad(audio_convert_, "sink");
-
-  g_signal_connect(uridecodebinsrc_, "pad-added",
-                   G_CALLBACK(GstPipelinePadAddedCB), this);
-  g_signal_connect(uridecodebinsrc_, "no-more-pads",
-                   G_CALLBACK(GstPipelineNoMorePadsCB), this);
-#if !defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-  g_signal_connect(uridecodebinsrc_, "autoplug-select",
-                   G_CALLBACK(GstUribinFactorySelectCB), this);
-#endif
-  // QoS property will enable quality-of-service features of basesink
-  // that gather statistics about real-time performance of the clock
-  // synchronisation. For each buffer received in sink, statistics are
-  // gathered and a QOS event is sent upstream. This information can
-  // then be used by upstream elements to reduce their processing rate.
-  if (!gst_base_sink_is_qos_enabled(GST_BASE_SINK(appsink_)))
-    gst_base_sink_set_qos_enabled(GST_BASE_SINK(appsink_), true);
-
-  GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline.get()));
-  if (!bus) {
-    LOG(ERROR) << "GStreamer bus creation failed";
-    return false;
-  }
-  gst_bus_set_sync_handler(
-      bus,
-      static_cast<GstBusSyncHandler>(GstPipelineMessageCB),
-      this,
-      NULL);
-  gst_object_unref(bus);
-
-  PrepareForVideoFrame();
-
-  g_object_set(G_OBJECT(uridecodebinsrc_),
-               kPropertyDownload, true, kPropertyBufferSize,
-               kPreloadBufferSize, kPropertyUri,
-               url_.spec().c_str(), NULL);
-  pipeline_ = pipeline.release();
-  return true;
-}
-
-void MediaPlayerBridgeGstreamer::Release() {
-  StopCurrentTimeUpdateTimer();
-  StopBufferingUpdateTimer();
-  if (pipeline_) {
-    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_));
-    if (bus) {
-      g_signal_handlers_disconnect_by_func(
-          bus, reinterpret_cast<gpointer>(GstPipelineMessageCB), this);
-      gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
-      gst_object_unref(bus);
-    }
-
-    manager()->OnReadyStateChange(
-        GetPlayerId(), blink::WebMediaPlayer::ReadyStateHaveNothing);
-    manager()->OnNetworkStateChange(
-        GetPlayerId(), blink::WebMediaPlayer::NetworkStateEmpty);
-
-    gst_element_set_state(pipeline_, GST_STATE_NULL);
-
-    if (uridecodebinsrc_) {
-      g_signal_handlers_disconnect_by_func(
-          uridecodebinsrc_,
-          reinterpret_cast<gpointer>(GstPipelinePadAddedCB),
-          this);
-      g_signal_handlers_disconnect_by_func(
-          uridecodebinsrc_,
-          reinterpret_cast<gpointer>(GstPipelineNoMorePadsCB),
-          this);
-#if !defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-      g_signal_handlers_disconnect_by_func(
-          uridecodebinsrc_,
-          reinterpret_cast<gpointer>(GstUribinFactorySelectCB),
-          this);
-#endif
-    }
-
-    gst_object_unref(pipeline_);
-    pipeline_ = NULL;
-    appsink_ = NULL;
-    uridecodebinsrc_ = NULL;
-    audio_sink_ = NULL;
-    audio_convert_ = NULL;
-#if defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-    video_convert_ = NULL;
-#endif
-    audio_volume_ = NULL;
-    audio_sinkpad_ = NULL;
-    video_sinkpad_ = NULL;
-  }
-}
-
-void MediaPlayerBridgeGstreamer::SetVolume(double volume) {
-  CHECK(volume >= 0.0f && volume <= 1.0f);
-  g_object_set(G_OBJECT(audio_volume_), kPropertyVolume, volume, NULL);
-}
-
-void MediaPlayerBridgeGstreamer::UpdateDuration() {
-  if (error_occured_)
-    return;
-
-  gint64 duration = 0;
-  GstFormat format = GST_FORMAT_TIME;
-  gst_element_query_duration(pipeline_, format, &duration);
-  duration_ = ConvertNanoSecondsToSeconds(duration);
-  manager()->OnDurationChange(GetPlayerId(), duration_);
-
-  // No need to buffer 'local file'. Update buffered percentage.
-  if (is_file_url_) {
-    std::vector<media::MediaPlayerEfl::TimeRanges> buffer_range;
-    media::MediaPlayerEfl::TimeRanges range;
-    range.start = 0;
-    range.end = duration_ * base::Time::kMicrosecondsPerSecond;
-    buffer_range.push_back(range);
-    manager()->OnBufferUpdate(GetPlayerId(), buffer_range);
-  }
-}
-
-double MediaPlayerBridgeGstreamer::GetCurrentTime() {
-  if (error_occured_)
-    return 0.0;
-
-  gint64 current_time = 0;
-  GstFormat format = GST_FORMAT_TIME;
-
-  if (is_end_reached_) {
-    if (is_seeking_)
-      return seek_duration_;
-    if (duration_)
-      return duration_;
-  }
-
-  gst_element_query_position(pipeline_, format, &current_time);
-  return ConvertNanoSecondsToSeconds(current_time);
-}
-
-void MediaPlayerBridgeGstreamer::OnCurrentTimeUpdateTimerFired() {
-  manager()->OnTimeUpdate(GetPlayerId(), GetCurrentTime());
-}
-
-void MediaPlayerBridgeGstreamer::StartCurrentTimeUpdateTimer() {
-  if (!current_time_update_timer_.IsRunning()) {
-    current_time_update_timer_.Start(
-        FROM_HERE,
-        base::TimeDelta::FromMilliseconds(kDurationUpdateInterval),
-        this, &MediaPlayerBridgeGstreamer::OnCurrentTimeUpdateTimerFired);
-  }
-}
-
-void MediaPlayerBridgeGstreamer::StopCurrentTimeUpdateTimer() {
-  if (current_time_update_timer_.IsRunning())
-    current_time_update_timer_.Stop();
-}
-
-void MediaPlayerBridgeGstreamer::OnBufferingUpdateTimerFired() {
-  if (IsPlayerDestructing())
-    return;
-  GetBufferedTimeRanges();
-}
-
-void MediaPlayerBridgeGstreamer::StartBufferingUpdateTimer() {
-  if (!buffering_update_timer_.IsRunning()) {
-    buffering_update_timer_.Start(
-        FROM_HERE,
-        base::TimeDelta::FromMilliseconds(kDurationUpdateInterval),
-        this, &MediaPlayerBridgeGstreamer::OnBufferingUpdateTimerFired);
-  }
-}
-
-void MediaPlayerBridgeGstreamer::StopBufferingUpdateTimer() {
-  if (buffering_update_timer_.IsRunning())
-    buffering_update_timer_.Stop();
-}
-
-GstSample* MediaPlayerBridgeGstreamer::PullSample() {
-  return gst_app_sink_pull_sample(GST_APP_SINK(appsink_));
-}
-
-void MediaPlayerBridgeGstreamer::SampleReady(GstSample* sample) {
-  GstMapInfo map;
-  GstBuffer* buffer = gst_sample_get_buffer(sample);
-
-  if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
-    LOG(ERROR) << "Sample contains invalid or no info!";
-    gst_sample_unref(sample);
-    return;
-  }
-
-  if (!width_ || !height_)
-    GetFrameDetails();
-
-  base::SharedMemory shared_memory;
-  uint32 shared_memory_size = 0;
-  base::SharedMemoryHandle foreign_memory_handle;
-
-  base::TimeDelta timestamp = base::TimeDelta::FromMicroseconds(
-      GST_BUFFER_TIMESTAMP(buffer) / base::Time::kNanosecondsPerMicrosecond);
-
-  if (video_format_ == GST_VIDEO_SN12)
-    shared_memory_size = GetSN12BufferSize(width_, height_);
-  else
-    shared_memory_size = (map.size);
-
-  if (!shared_memory.CreateAndMapAnonymous(shared_memory_size)) {
-    LOG(ERROR) << "Shared Memory creation failed.";
-    gst_buffer_unmap(buffer, &map);
-    gst_sample_unref(sample);
-    return;
-  }
-  if (!shared_memory.ShareToProcess(
-          base::Process::Current().Handle(), &foreign_memory_handle)) {
-    LOG(ERROR) << "Shared Memory handle could not be obtained";
-    gst_buffer_unmap(buffer, &map);
-    gst_sample_unref(sample);
-    return;
-  }
-
-  memcpy(shared_memory.memory(), map.data, shared_memory_size);
-  // FIMCCONVERT gives extra 12 bytes for yuv_size
-#if defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-  shared_memory_size -= 12;
-#endif
-
-  manager()->OnNewFrameAvailable(
-      GetPlayerId(), foreign_memory_handle, shared_memory_size, timestamp);
-
-  gst_buffer_unmap(buffer, &map);
-  gst_sample_unref(sample);
-}
-
-void MediaPlayerBridgeGstreamer::ProcessBufferingStats(GstMessage* message) {
-  if (IsPlayerDestructing())
-    return;
-
-  gst_message_parse_buffering(message, &buffered_);
-  main_loop_->PostTask(FROM_HERE,
-                       base::Bind(&MediaPlayerBridgeGstreamer::OnUpdateStates,
-                                  base::Unretained(this)));
-}
-
-void MediaPlayerBridgeGstreamer::GetBufferedTimeRanges() {
-  GstQuery* query = NULL;
-  gboolean result = false;
-  media::Ranges<base::TimeDelta> time_ranges;
-  std::vector<media::MediaPlayerEfl::TimeRanges> buffer_range;
-  query = gst_query_new_buffering(GST_FORMAT_PERCENT);
-  result = gst_element_query(pipeline_, query);
-  if (result) {
-    gint n_ranges = 0, range = 0;
-    n_ranges = gst_query_get_n_buffering_ranges(query);
-    for (range = 0; range < n_ranges; range++) {
-      gint64 start = 0, stop = 0;
-      gst_query_parse_nth_buffering_range(query, range, &start, &stop);
-      media::MediaPlayerEfl::TimeRanges b_range;
-      if ((start == 0 || is_end_reached_) &&
-          stop == GST_FORMAT_PERCENT_MAX)
-        StopBufferingUpdateTimer();
-
-      b_range.start = start * duration_;
-      b_range.end = stop * duration_;
-      buffer_range.push_back(b_range);
-    }
-    manager()->OnBufferUpdate(GetPlayerId(), buffer_range);
-  }
-}
-
-void MediaPlayerBridgeGstreamer::HandleMessage(GstMessage* message) {
-  if (IsPlayerDestructing())
-      return;
-
-  switch (GST_MESSAGE_TYPE(message)) {
-    case GST_MESSAGE_STREAM_STATUS:
-    case GST_MESSAGE_TAG:
-      break;
-    case GST_MESSAGE_ERROR: {
-      StopBufferingUpdateTimer();
-      GError* error = NULL;
-      gst_message_parse_error(message, &error, NULL);
-      blink::WebMediaPlayer::NetworkState network_state_error =
-          blink::WebMediaPlayer::NetworkStateEmpty;
-      if (error->code == GST_STREAM_ERROR_CODEC_NOT_FOUND
-          || error->code == GST_STREAM_ERROR_WRONG_TYPE
-          || error->code == GST_STREAM_ERROR_FAILED
-          || error->code == GST_CORE_ERROR_MISSING_PLUGIN
-          || error->code == GST_RESOURCE_ERROR_NOT_FOUND)
-        network_state_error = blink::WebMediaPlayer::NetworkStateFormatError;
-      else if (error->domain == GST_RESOURCE_ERROR)
-        network_state_error = blink::WebMediaPlayer::NetworkStateNetworkError;
-      else
-        network_state_error = blink::WebMediaPlayer::NetworkStateDecodeError;
-
-      LOG(ERROR) << "Error Message : " << error->message << " Recieved From : "
-                 << GST_MESSAGE_SRC_NAME(message)
-                 << ", and Blink Error Code  : " << network_state_error;
-      g_error_free(error);
-      HandleError(network_state_error);
-      break;
-    }
-    case GST_MESSAGE_EOS: {
-      main_loop_->PostTask(FROM_HERE,
-                           base::Bind(
-                               &MediaPlayerBridgeGstreamer::OnPlaybackComplete,
-                               base::Unretained(this)));
-      break;
-    }
-    case GST_MESSAGE_ASYNC_DONE: {
-      if (is_seeking_) {
-        UpdateSeekState(false);
-        main_loop_->PostTask(FROM_HERE,
-                             base::Bind(
-                                 &MediaPlayerBridgeGstreamer::OnTimeChanged,
-                                 base::Unretained(this)));
-      }
-
-      // Without |audio-sink| for audio tracks no preroll message is received.
-      // To update track details |PrerollComplete| is called. For Video tracks,
-      // preroll message is received long before control reaches here.
-      if (!is_prerolled_)
-        PrerollComplete();
-      break;
-    }
-    case GST_MESSAGE_QOS: {
-      LOG(INFO) << " QoS received from player : " << GetPlayerId();
-      break;
-    }
-    case GST_MESSAGE_STATE_CHANGED: {
-      if (strcmp(kPipelineName, GST_MESSAGE_SRC_NAME(message)))
-        break;
-
-      main_loop_->PostTask(FROM_HERE,
-                           base::Bind(
-                               &MediaPlayerBridgeGstreamer::OnUpdateStates,
-                               base::Unretained(this)));
-      break;
-    }
-    case GST_MESSAGE_BUFFERING: {
-      ProcessBufferingStats(message);
-      break;
-    }
-    case GST_MESSAGE_CLOCK_LOST: {
-      /* Get a new clock */
-      gst_element_set_state(pipeline_, GST_STATE_PAUSED);
-      gst_element_set_state(pipeline_, GST_STATE_PLAYING);
-      break;
-    }
-    default: {
-      LOG(ERROR) << "Unhandled GStreamer message type : "
-                 << GST_MESSAGE_TYPE_NAME(message);
-      break;
-    }
-  }
-}
-
-void MediaPlayerBridgeGstreamer::UpdateStates() {
-  if (IsPlayerDestructing())
-    return;
-  if (error_occured_)
-    return;
-
-  GstState state = GST_STATE_NULL;
-  GstState pending = GST_STATE_NULL;
-  GstStateChangeReturn ret = gst_element_get_state(
-      pipeline_, &state, &pending, 250 * GST_NSECOND);
-
-  switch (ret) {
-    case GST_STATE_CHANGE_SUCCESS: {
-      if (!duration_)
-        UpdateDuration();
-
-      switch (state) {
-        case GST_STATE_VOID_PENDING:
-        case GST_STATE_NULL:
-        case GST_STATE_READY:
-          break;
-        case GST_STATE_PAUSED:
-        case GST_STATE_PLAYING: {
-          if (!is_file_url_)
-            StartBufferingUpdateTimer();
-
-          if (buffered_ == kMaxBufPercent || is_file_url_) {
-            if (state == GST_STATE_PAUSED && !is_paused_) {
-              if (gst_element_set_state(pipeline_, GST_STATE_PLAYING) ==
-                     GST_STATE_CHANGE_FAILURE)
-                LOG(ERROR) << "GStreamer state change failed";
-            }
-
-            // On rapid playback on PIANO Widget, there are instences
-            // when 'ASYNC-DONE' message is not received on replay.
-            if (is_seeking_ && state == GST_STATE_PLAYING) {
-              UpdateSeekState(false);
-              main_loop_->PostTask(
-                  FROM_HERE,
-                  base::Bind(&MediaPlayerBridgeGstreamer::OnTimeChanged,
-                             base::Unretained(this)));
-            }
-            manager()->OnReadyStateChange(
-                GetPlayerId(), blink::WebMediaPlayer::ReadyStateHaveEnoughData);
-            manager()->OnNetworkStateChange(
-                GetPlayerId(), blink::WebMediaPlayer::NetworkStateLoaded);
-          } else {
-            if (state == GST_STATE_PLAYING) {
-              if (gst_element_set_state(pipeline_, GST_STATE_PAUSED) ==
-                     GST_STATE_CHANGE_FAILURE)
-                LOG(ERROR) << "GStreamer state change failed";
-              manager()->OnReadyStateChange(
-                  GetPlayerId(),
-                  blink::WebMediaPlayer::ReadyStateHaveCurrentData);
-              manager()->OnNetworkStateChange(
-                  GetPlayerId(), blink::WebMediaPlayer::NetworkStateLoading);
-            }
-          }
-          break;
-        }
-      }
-      break;
-    }
-    case GST_STATE_CHANGE_FAILURE: {
-      LOG(ERROR) << "Failure: State: "
-                 << gst_element_state_get_name(state)
-                 << " pending: "
-                 << gst_element_state_get_name(pending);
-      HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-      break;
-    }
-    case GST_STATE_CHANGE_NO_PREROLL: {
-      if (state == GST_STATE_READY) {
-        manager()->OnReadyStateChange(
-            GetPlayerId(), blink::WebMediaPlayer::ReadyStateHaveNothing);
-      } else if (state == GST_STATE_PAUSED) {
-        manager()->OnReadyStateChange(
-            GetPlayerId(), blink::WebMediaPlayer::ReadyStateHaveEnoughData);
-        is_paused_ = true;
-        is_live_stream_ = true;
-      } else if (state == GST_STATE_PLAYING) {
-        is_paused_ = false;
-      }
-      manager()->OnNetworkStateChange(
-          GetPlayerId(), blink::WebMediaPlayer::NetworkStateLoading);
-      break;
-    }
-    default:
-      break;
-  }
-}
-
-void MediaPlayerBridgeGstreamer::GetFrameDetails() {
-  GstSample* sample = gst_app_sink_pull_preroll(GST_APP_SINK(appsink_));
-  if (!sample)
-    return;
-
-  if (!GetGstVideoBufferMetaData(sample, &width_,
-                                 &height_, &video_format_)) {
-    gst_sample_unref(sample);
-    return;
-  }
-
-  // Need to update frame details before sending buffer.
-  manager()->OnMediaDataChange(
-      GetPlayerId(), static_cast<int>(video_format_),
-      height_, width_, media_type_);
-
-  SampleReady(sample);
-}
-
-void MediaPlayerBridgeGstreamer::PrerollComplete() {
-  manager()->OnMediaDataChange(
-      GetPlayerId(), static_cast<int>(video_format_),
-      height_, width_, media_type_);
-  is_prerolled_ = true;
-}
-
-void MediaPlayerBridgeGstreamer::OnPlaybackComplete() {
-#if defined(OS_TIZEN_MOBILE)
-  ReleaseDisplayLock();
-#endif
-
-  is_end_reached_ = true;
-  StopCurrentTimeUpdateTimer();
-  manager()->OnTimeUpdate(GetPlayerId(), GetCurrentTime());
-  manager()->OnTimeChanged(GetPlayerId());
-}
-
-void MediaPlayerBridgeGstreamer::UpdateSeekState(bool state) {
-  is_seeking_ = state;
-}
-
-void MediaPlayerBridgeGstreamer::OnTimeChanged() {
-  DCHECK(main_loop_->BelongsToCurrentThread());
-  manager()->OnTimeChanged(GetPlayerId());
-}
-
-void MediaPlayerBridgeGstreamer::OnUpdateStates() {
-  DCHECK(main_loop_->BelongsToCurrentThread());
-  UpdateStates();
-}
-
-GstElement* MediaPlayerBridgeGstreamer::GetVideoSink() {
-  return gst_element_factory_make("appsink", kVideoSink);
-}
-
-void MediaPlayerBridgeGstreamer::PrepareForVideoFrame() {
-  GstAppSinkCallbacks callbacks = {NULL, OnGstAppsinkPrerollCB,
-                                   OnGstAppsinkSampleCB, NULL};
-  gst_app_sink_set_callbacks(GST_APP_SINK(appsink_), &callbacks,
-                             this, NULL);
-}
-
-void MediaPlayerBridgeGstreamer::OnNewPadAdded(GstPad* new_pad) {
-  GstCaps* new_pad_caps = gst_pad_query_caps(new_pad, NULL);
-  const gchar* new_pad_type =
-      gst_structure_get_name(gst_caps_get_structure(new_pad_caps, 0));
-
-  if (g_str_has_prefix(new_pad_type, "audio/x-raw") &&
-      !gst_pad_is_linked(audio_sinkpad_)) {
-    if (!gst_element_link_many(audio_convert_, audio_volume_, audio_sink_,
-                               NULL)) {
-      LOG(ERROR) << "Could not link Audio Pipeline";
-      HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-      Release();
-    } else {
-      media_type_ |= MEDIA_AUDIO_MASK;
-      gst_pad_link(new_pad, audio_sinkpad_);
-    }
-  } else if (g_str_has_prefix(new_pad_type, "video/x-raw") &&
-             !gst_pad_is_linked(video_sinkpad_)) {
-#if defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-    if (!gst_element_link_filtered(video_convert_, appsink_,
-                                   gst_caps_new_simple("video/x-raw", "format",
-                                                       G_TYPE_STRING, "I420",
-                                                       NULL))) {
-      LOG(ERROR) << "Could not link Video Pipeline";
-      HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-      Release();
-    } else
-#endif
-    {
-      media_type_ |= MEDIA_VIDEO_MASK;
-      gst_pad_link(new_pad, video_sinkpad_);
-    }
-  }
-  gst_caps_unref(new_pad_caps);
-  new_pad_caps = NULL;
-}
-
-void MediaPlayerBridgeGstreamer::OnNoMorePadsAvailable() {
-  if ((media_type_ & MEDIA_VIDEO_MASK) == 0) {
-    gst_element_set_state(appsink_, GST_STATE_NULL);
-#if defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-    gst_element_set_state(video_convert_, GST_STATE_NULL);
-    gst_bin_remove_many(GST_BIN(pipeline_), video_convert_,
-                        appsink_, NULL);
-    video_convert_ = NULL;
-#else
-    gst_bin_remove(GST_BIN(pipeline_), appsink_);
-#endif
-    appsink_ = NULL;
-  }
-  if ((media_type_ & MEDIA_AUDIO_MASK) == 0) {
-    gst_element_set_state(audio_convert_, GST_STATE_NULL);
-    gst_element_set_state(audio_volume_, GST_STATE_NULL);
-    gst_element_set_state(audio_sink_, GST_STATE_NULL);
-    gst_bin_remove_many(GST_BIN(pipeline_), audio_convert_, audio_volume_,
-                        audio_sink_, NULL);
-    audio_convert_ = NULL;
-    audio_volume_ = NULL;
-    audio_sink_ = NULL;
-  }
-}
-
-void MediaPlayerBridgeGstreamer::HandleError(
-    blink::WebMediaPlayer::NetworkState state) {
-  LOG(ERROR) << "Error in MediaPlayerBridgeGstreamer::HandleError";
-#if defined(OS_TIZEN_MOBILE)
-  ReleaseDisplayLock();
-#endif
-
-  error_occured_ = true;
-  manager()->OnNetworkStateChange(GetPlayerId(), state);
-}
-
-}  // namespace media
diff --git a/tizen_src/chromium_impl/media/base/efl/media_player_bridge_gstreamer.h b/tizen_src/chromium_impl/media/base/efl/media_player_bridge_gstreamer.h
deleted file mode 100644 (file)
index 2d493cb..0000000
+++ /dev/null
@@ -1,126 +0,0 @@
-// Copyright 2014 Samsung Electronics Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_EFL_MEDIA_PLAYER_BRIDGE_GSTREMEAR_H_
-#define MEDIA_EFL_MEDIA_PLAYER_BRIDGE_GSTREMEAR_H_
-
-#include <gst/gst.h>
-#include <gst/pbutils/install-plugins.h>
-#include <gst/pbutils/missing-plugins.h>
-
-#include "base/memory/scoped_ptr.h"
-#include "base/memory/weak_ptr.h"
-#include "base/timer/timer.h"
-#include "content/public/browser/browser_message_filter.h"
-#include "media/base/ranges.h"
-#include "media/base/efl/media_player_efl.h"
-#include "media/base/video_frame.h"
-#include "third_party/WebKit/public/platform/WebMediaPlayer.h"
-
-namespace media {
-
-class MEDIA_EXPORT MediaPlayerBridgeGstreamer
-    : public MediaPlayerEfl {
- public:
-  MediaPlayerBridgeGstreamer(
-      int player_id,
-      const GURL& url,
-      double volume,
-      MediaPlayerManager* manager);
-  ~MediaPlayerBridgeGstreamer() override {}
-
-  // MediaPlayerEfl implementation.
-  void Play() override;
-  void Pause(bool is_media_related_action) override;
-  void SetRate(double rate) override;
-  void Seek(const double time) override;
-  void SetVolume(double volume) override;
-  double GetCurrentTime() override;
-  void Destroy() override;
-
-  // Error handling API
-  void HandleError(blink::WebMediaPlayer::NetworkState state);
-
-  void HandleMessage(GstMessage* message);
-  GstSample* PullSample();
-  void SampleReady(GstSample* buffer);
-  void PrerollComplete();
-
-  void OnNewPadAdded(GstPad* pad);
-  void OnNoMorePadsAvailable();
-
- protected:
-  bool InitPipeline();
-  void Release() override;
-
- private:
-  // |duration_update_timer_| related
-  void OnCurrentTimeUpdateTimerFired();
-  void StartCurrentTimeUpdateTimer();
-  void StopCurrentTimeUpdateTimer();
-
-  // |Buffering_update_timer_| related
-  void OnBufferingUpdateTimerFired();
-  void StartBufferingUpdateTimer();
-  void StopBufferingUpdateTimer();
-
-  void UpdateStates();
-  void OnUpdateStates();
-  void UpdateDuration();
-  void UpdateSeekState(bool state);
-
-  void OnPlaybackComplete();
-  void OnTimeChanged();
-
-  void GetFrameDetails();
-  void ProcessBufferingStats(GstMessage* message);
-  void GetBufferedTimeRanges();
-  bool SeekTo(gint64 position, float rate, GstSeekFlags seekType);
-
-  GstElement* GetVideoSink();
-  void PrepareForVideoFrame();
-
- private:
-  const scoped_refptr<base::SingleThreadTaskRunner> main_loop_;
-  GstElement* pipeline_;
-  GstElement* appsink_;
-  GstElement* uridecodebinsrc_;
-#if defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-  GstElement* video_convert_;
-#endif
-  GstElement* audio_convert_;
-  GstElement* audio_volume_;
-  GstElement* audio_sink_;
-  GstPad* audio_sinkpad_;
-  GstPad* video_sinkpad_;
-
-  GURL url_;
-  gint width_;
-  gint height_;
-
-  bool is_prerolled_;
-  bool is_paused_;
-  double duration_;
-  double playback_rate_;
-  int buffered_;
-  guint32 video_format_;
-
-  int media_type_;
-  bool is_live_stream_;
-  bool is_file_url_;
-  bool is_end_reached_;
-  bool is_seeking_;
-  double seek_duration_;
-
-  bool error_occured_;
-
-  base::RepeatingTimer current_time_update_timer_;
-  base::RepeatingTimer buffering_update_timer_;
-
-  DISALLOW_COPY_AND_ASSIGN(MediaPlayerBridgeGstreamer);
-};
-
-}  // namespace media
-
-#endif  // MEDIA_EFL_MEDIA_PLAYER_BRIDGE_GSTREMEAR_H_
index d6a5730..457ac2e 100644 (file)
 
 namespace media {
 
-GstClockTime ConvertToGstClockTime(double time) {
-  if (time < 0) {
-    LOG(ERROR) << "Invalid time:" << time << " Reset to 0";
-    time = 0;
-  }
-
-  // Extract the integer part of the time (seconds) and the fractional part
-  // (microseconds). Attempt to round the microseconds so no floating point
-  // precision is lost and we can perform an accurate seek.
-  double seconds = 0;
-  double microSeconds = std::modf(time, &seconds) * 1000000;
-  GTimeVal timeValue;
-  timeValue.tv_sec = static_cast<glong>(seconds);
-  timeValue.tv_usec = static_cast<glong>(lround(microSeconds / 10) * 10);
-  return GST_TIMEVAL_TO_TIME(timeValue);
-}
-
 double ConvertNanoSecondsToSeconds(int64 time) {
   return base::TimeDelta::FromMicroseconds(
       time /
@@ -61,44 +44,6 @@ GURL GetCleanURL(std::string url) {
   return url_;
 }
 
-int GetSN12BufferSize(int video_width, int video_height) {
-  uint tile_w_align = ((video_width - 1) / SN12_TILE_WIDTH + 2) & ~1;
-  return (SN12_TILE_SIZE * tile_w_align *
-      ((video_height - 1) / SN12_TILE_HEIGHT + 1) +
-      (((video_height + 1) & ~1) / 2) * tile_w_align * SN12_TILE_WIDTH);
-}
-
-bool GetGstVideoBufferMetaData(GstSample* sample,
-                               gint* video_width,
-                               gint* video_height,
-                               guint32* video_format) {
-  GstCaps* caps = gst_sample_get_caps(sample);
-  if (!caps)
-    return false;
-
-  GstVideoInfo vi;
-  gst_video_info_from_caps(&vi, caps);
-
-  *video_width = GST_VIDEO_INFO_WIDTH(&vi);
-  *video_height = GST_VIDEO_INFO_HEIGHT(&vi);
-  if (video_format) {
-    switch(GST_VIDEO_INFO_FORMAT(&vi)) {
-      case GST_VIDEO_FORMAT_I420: {
-        *video_format = GST_MAKE_FOURCC('I','4','2','0');
-        break;
-      }
-      case GST_VIDEO_FORMAT_NV12: {
-        *video_format = GST_MAKE_FOURCC('N','V','1','2');
-        break;
-      }
-      default:
-        LOG(ERROR) << "Unknown format : " << GST_VIDEO_INFO_FORMAT(&vi);
-        break;
-    }
-  }
-  return true;
-}
-
 #if defined(OS_TIZEN_MOBILE)
 void WakeUpDisplayAndAcquireDisplayLock() {
   if (device_power_wakeup(false) != DEVICE_ERROR_NONE)
index f6c1e92..1e4a54e 100644 (file)
@@ -5,47 +5,12 @@
 #ifndef MEDIA_BASE_EFL_MEDIA_PLAYER_UTIL_EFL_H_
 #define MEDIA_BASE_EFL_MEDIA_PLAYER_UTIL_EFL_H_
 
-#include <gst/gst.h>
-#include <gst/video/video.h>
-
 #include "base/basictypes.h"
 #include "media/base/media_export.h"
 #include "url/gurl.h"
 
 namespace media {
 
-// Fourcc for gst-video-format.
-const uint32 GST_VIDEO_SN12 = GST_MAKE_FOURCC('S', 'N', '1', '2');
-const uint32 GST_VIDEO_I420 = GST_MAKE_FOURCC('I', '4', '2', '0');
-const uint32 GST_VIDEO_NV12 = GST_MAKE_FOURCC('N', 'V', '1', '2');
-const uint32 GST_VIDEO_YU12 = GST_MAKE_FOURCC('Y', 'U', '1', '2');
-
-// Tile size for SN12.
-const uint SN12_TILE_WIDTH = 64;
-const uint SN12_TILE_HEIGHT = 32;
-const uint SN12_TILE_SIZE = SN12_TILE_WIDTH * SN12_TILE_HEIGHT;
-
-// GstPlayFlags in |gstplay-enum.h|.
-typedef enum {
-  GST_PLAY_FLAG_VIDEO             = (1 << 0),
-  GST_PLAY_FLAG_AUDIO             = (1 << 1),
-  GST_PLAY_FLAG_TEXT              = (1 << 2),
-  GST_PLAY_FLAG_VIS               = (1 << 3),
-  GST_PLAY_FLAG_SOFT_VOLUME       = (1 << 4),
-  GST_PLAY_FLAG_NATIVE_AUDIO      = (1 << 5),
-  GST_PLAY_FLAG_NATIVE_VIDEO      = (1 << 6),
-  GST_PLAY_FLAG_DOWNLOAD          = (1 << 7),
-  GST_PLAY_FLAG_BUFFERING         = (1 << 8),
-  GST_PLAY_FLAG_DEINTERLACE       = (1 << 9),
-  GST_PLAY_FLAG_SOFT_COLORBALANCE = (1 << 10)
-} GstPlayFlags;
-
-typedef enum {
-  GST_AUTOPLUG_SELECT_TRY,
-  GST_AUTOPLUG_SELECT_EXPOSE,
-  GST_AUTOPLUG_SELECT_SKIP
-} GstAutoplugSelectResult;
-
 typedef enum {
   MEDIA_SEEK_NONE,  // No seek
   MEDIA_SEEK_DEMUXER,  // Demuxer seeking
@@ -53,18 +18,12 @@ typedef enum {
   MEDIA_SEEK_PLAYER,  // Player is seeking
 } MediaSeekState;
 
-GstClockTime ConvertToGstClockTime(double time);
 double ConvertNanoSecondsToSeconds(int64 time);
 double ConvertMilliSecondsToSeconds(int time);
 double ConvertSecondsToMilliSeconds(double time);
 
 // Removes query string from URI.
 MEDIA_EXPORT GURL GetCleanURL(std::string url);
-int GetSN12BufferSize(int video_width, int video_height);
-bool GetGstVideoBufferMetaData(GstSample* sample,
-                               gint* video_width,
-                               gint* video_height,
-                               guint32* video_format);
 
 #if defined(OS_TIZEN_MOBILE)
 void WakeUpDisplayAndAcquireDisplayLock();
diff --git a/tizen_src/chromium_impl/media/base/efl/media_source_player_gstreamer.cc b/tizen_src/chromium_impl/media/base/efl/media_source_player_gstreamer.cc
deleted file mode 100644 (file)
index 04e8298..0000000
+++ /dev/null
@@ -1,1251 +0,0 @@
-// Copyright 2014 Samsung Electronics Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/base/efl/media_source_player_gstreamer.h"
-
-#include <gst/app/gstappsink.h>
-#include <gst/app/gstappsrc.h>
-#include <gst/video/video.h>
-#include <gst/video/videooverlay.h>
-
-#include "base/process/process.h"
-#include "base/thread_task_runner_handle.h"
-#include "content/browser/media/efl/browser_demuxer_efl.h"
-#include "media/base/efl/media_player_manager_efl.h"
-#include "media/base/efl/media_player_util_efl.h"
-
-namespace {
-
-// Pipeline element name
-const char* kPipelineName = "gst_pipeline";
-
-// Update duration every 100ms.
-const int kDurationUpdateInterval = 100;
-
-// For smooth playback, seeking will be done to I-Frame + kSixteenMilliSeconds
-// Reason to choose kSixteenMilliSeconds is duration of each video frame at
-// 60 fps video will be ~16 milliseconds.
-const int64 kSixteenMilliSeconds = 16000000;
-const int kMaxBufPercent = 100;
-
-const int kBytesPerMegabyte = 1048576;
-
-const char* h264elements[] = {
-    "h264parse",
-#if defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-    "omx_h264dec",
-#else
-    "avdec_h264"
-#endif
-};
-
-const char* aacelements[] = {
-    "aacparse",
-#if defined(TIZEN_MULTIMEDIA_USE_HW_CODEC)
-    "omx_aacdec", "autoaudiosink",
-#else
-    "avdec_aac", "autoaudiosink"
-#endif
-};
-
-const char* mp3elements[] = {
-    "mpegaudioparse",
-    "avdec_mp3",
-    "autoaudiosink"
-};
-
-const AudioCodecGstElementsMapping AudioMapping[] = {
-    {media::kCodecAAC, aacelements},
-    {media::kCodecMP3, mp3elements},
-    {media::kUnknownAudioCodec, NULL}
-};
-
-const VideoCodecGstElementsMapping VideoMapping[] = {
-    {media::kCodecH264, h264elements},
-    {media::kUnknownVideoCodec, NULL}
-};
-
-}  // namespace
-
-namespace media {
-
-static GstBusSyncReply GstPipelineMessageCB(
-    GstBus* bus,
-    GstMessage* message,
-    gpointer user_data) {
-  MediaSourcePlayerGstreamer* player =
-      static_cast<MediaSourcePlayerGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return GST_BUS_PASS;
-
-  player->HandleMessage(message);
-  gst_message_unref(message);
-  return GST_BUS_DROP;
-}
-
-static void  OnGstStartVideoFeedCB(
-    GstAppSrc* pipeline,
-    guint size,
-    void* user_data) {
-  MediaSourcePlayerGstreamer* player =
-      static_cast<MediaSourcePlayerGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return;
-  player->OnReadDemuxedData(media::DemuxerStream::VIDEO);
-  return;
-}
-
-static void OnGstStopVideoFeedCB(GstAppSrc* pipeline, void* user_data) {
-  MediaSourcePlayerGstreamer* player =
-      static_cast<MediaSourcePlayerGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return;
-  player->OnStopDemuxedData(media::DemuxerStream::VIDEO);
-}
-
-static gboolean OnGstSeekVideoFeedCB(
-    GstAppSrc* pipeline,
-    guint64 offset,
-    void* user_data) {
-
-  MediaSourcePlayerGstreamer* player =
-      static_cast<MediaSourcePlayerGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return FALSE;
-  player->UpdateVideoSeekOffset(offset);
-  return TRUE;
-}
-
-static GstFlowReturn OnGstAppsinkPreroll(
-    GstAppSink* sink,
-    gpointer user_data) {
-  MediaSourcePlayerGstreamer* player =
-      static_cast<MediaSourcePlayerGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return GST_FLOW_ERROR;
-  player->GetFrameDetails();
-  return GST_FLOW_OK;
-}
-
-static GstFlowReturn OnGstAppsinkBuffer(GstAppSink* sink, gpointer user_data) {
-  MediaSourcePlayerGstreamer* player =
-      static_cast<MediaSourcePlayerGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return GST_FLOW_ERROR;
-  player->OnNewFrameAvailable(player->PullSample());
-  return GST_FLOW_OK;
-}
-
-gboolean OnGstSeekAudioFeedCB(
-    GstAppSrc* pipeline,
-    guint64 offset,
-    void* user_data) {
-  MediaSourcePlayerGstreamer* player =
-      static_cast<MediaSourcePlayerGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return FALSE;
-  player->UpdateAudioSeekOffset(offset);
-  return TRUE;
-}
-
-static void OnGstStartAudioFeedCB(
-    GstAppSrc* pipeline,
-    guint size,
-    void* user_data) {
-  MediaSourcePlayerGstreamer* player =
-      static_cast<MediaSourcePlayerGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return;
-  player->OnReadDemuxedData(media::DemuxerStream::AUDIO);
-}
-
-static void OnGstStopAudioFeedCB(GstAppSrc* pipeline, void* user_data) {
-  MediaSourcePlayerGstreamer* player =
-      static_cast<MediaSourcePlayerGstreamer*>(user_data);
-  if (!player || player->IsPlayerDestructing())
-    return;
-  player->OnStopDemuxedData(media::DemuxerStream::AUDIO);
-}
-
-MediaPlayerEfl* MediaPlayerEfl::CreatePlayer(
-    int player_id, content::BrowserDemuxerEfl* demuxer,
-    int demuxer_id, MediaPlayerManager* manager) {
-  LOG(INFO) << "MediaSourceElement is using |Gstreamer| to play media";
-  return new MediaSourcePlayerGstreamer(player_id,
-                                        demuxer->CreateDemuxer(demuxer_id),
-                                        manager);
-}
-
-MediaSourcePlayerGstreamer::MediaSourcePlayerGstreamer(
-    int player_id,
-    scoped_ptr<DemuxerEfl> demuxer,
-    MediaPlayerManager* manager)
-    : MediaPlayerEfl(player_id, manager),
-      demuxer_(demuxer.Pass()),
-      task_runner_(base::ThreadTaskRunnerHandle::Get()),
-      playing_(false),
-      weak_this_(this),
-      is_xwindow_handle_set_(false),
-      pipeline_(NULL),
-      video_appsrc_(NULL),
-      video_queue_(NULL),
-      video_sink_(NULL),
-      audio_appsrc_(NULL),
-      audio_queue_(NULL),
-      audio_volume_(NULL),
-      should_feed_audio_(true),
-      should_feed_video_(false),
-      width_(0),
-      height_(0),
-      video_format_(0),
-      media_type_(0),
-      play_rate_(1.0f),
-      duration_(0),
-      is_paused_due_underflow_(false),
-      buffered_(0),
-      is_paused_(false),
-      is_seeking_(false),
-      is_demuxer_seeking_(false),
-      audio_buffered_(0),
-      video_buffered_(0),
-      is_download_finished_(false),
-      is_end_reached_(false),
-      error_occured_(false),
-      raw_video_frame_size_(0),
-      video_seek_offset_(0),
-      audio_seek_offset_(0),
-      is_seeking_iframe_(false) {
-  demuxer_->Initialize(this);
-  audio_buffer_queue_.clear();
-  video_buffer_queue_.clear();
-}
-
-void MediaSourcePlayerGstreamer::Initialize() {
-  manager()->OnInitComplete(GetPlayerId(), true);
-}
-
-void MediaSourcePlayerGstreamer::Destroy() {
-  if (IsPlayerDestructing())
-    return;
-
-  destructing_ = true;
-  Release();
-  task_runner_->DeleteSoon(FROM_HERE, this);
-}
-
-void MediaSourcePlayerGstreamer::Play() {
-  if (!pipeline_ || error_occured_)
-    return;
-  if (play_rate_ == 0.0) {
-    playing_ = true;
-    return;
-  }
-#if defined(OS_TIZEN_MOBILE)
-  WakeUpDisplayAndAcquireDisplayLock();
-#endif
-
-  gst_element_set_state(pipeline_, GST_STATE_PLAYING);
-  StartCurrentTimeUpdateTimer();
-  playing_ = true;
-  is_paused_due_underflow_ = false;
-}
-
-void MediaSourcePlayerGstreamer::Pause(bool is_media_related_action) {
-  if (!pipeline_ || error_occured_)
-    return;
-
-  gst_element_set_state(pipeline_, GST_STATE_PAUSED);
-  StopCurrentTimeUpdateTimer();
-  if (!is_media_related_action) {
-#if defined(OS_TIZEN_MOBILE)
-    ReleaseDisplayLock();
-#endif
-    is_paused_due_underflow_ = false;
-    playing_ = false;
-  }
-}
-
-void MediaSourcePlayerGstreamer::SetRate(double rate) {
-  if (play_rate_ == rate)
-    return;
-  if (rate == 0.0) {
-    play_rate_ = rate;
-    Pause(true);
-    return;
-  }
-
-  // If rate was zero and requested rate is non-zero, change the paused state
-  if (play_rate_ == 0.0 && rate != 0.0) {
-    Play();
-    StartCurrentTimeUpdateTimer();
-  }
-
-  play_rate_ = rate;
-
-  RequestPlayerSeek(GetCurrentTime());
-}
-
-void MediaSourcePlayerGstreamer::RequestPlayerSeek(double seekTime) {
-  if (is_demuxer_seeking_)
-    return;
-  GstState state = GST_STATE_VOID_PENDING;
-  gst_element_get_state(pipeline_, &state, NULL, 250 * GST_NSECOND);
-  is_demuxer_seeking_ = true;
-  if (state == GST_STATE_PLAYING)
-    Pause(true);
-  manager()->OnRequestSeek(GetPlayerId(), seekTime);
-}
-
-void MediaSourcePlayerGstreamer::Seek(const double time) {
-  GstState state = GST_STATE_VOID_PENDING;
-  gst_element_get_state(pipeline_, &state, NULL, 250 * GST_NSECOND);
-
-  is_seeking_iframe_ = false;
-  is_demuxer_seeking_ = true;
-  if (state == GST_STATE_PLAYING)
-    Pause(true);
-
-  // Input to |FromMicroseconds| is |int64|. Additional multiplication
-  // is done to avoid data loss.
-  base::TimeDelta seek_time = base::TimeDelta::FromMicroseconds(
-      static_cast<int64>(time * base::Time::kMicrosecondsPerSecond));
-  demuxer_->RequestDemuxerSeek(seek_time);
-}
-
-void MediaSourcePlayerGstreamer::SeekInternal(const GstClockTime position) {
-  if (!pipeline_ || error_occured_)
-    return;
-
-  GstClockTime startTime = 0, endTime = position;
-
-  is_demuxer_seeking_ = false;
-
-  if (play_rate_ > 0) {
-    startTime = position;
-    endTime = GST_CLOCK_TIME_NONE;
-  }
-
-  UpdateSeekState(true);
-  audio_buffer_queue_.clear();
-  video_buffer_queue_.clear();
-  if (!gst_element_seek(pipeline_, play_rate_, GST_FORMAT_TIME,
-                        static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH |
-                                                  GST_SEEK_FLAG_ACCURATE),
-                        GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET,
-                        endTime)) {
-    LOG(ERROR) << "Seek to " << position << " failed";
-    HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-  }
-}
-
-double MediaSourcePlayerGstreamer::GetCurrentTime() {
-  if (!pipeline_ || error_occured_)
-    return 0.0;
-
-  gint64 current_time = 0;
-  GstFormat format = GST_FORMAT_TIME;
-  gst_element_query_position(pipeline_, format, &current_time);
-  return ConvertNanoSecondsToSeconds(current_time);
-}
-
-void MediaSourcePlayerGstreamer::Release() {
-  DCHECK(IsPlayerDestructing());
-  playing_ = false;
-  StopCurrentTimeUpdateTimer();
-  audio_buffer_queue_.clear();
-  video_buffer_queue_.clear();
-
-  if (pipeline_) {
-    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_));
-    if (bus) {
-      g_signal_handlers_disconnect_by_func(
-          bus,
-          reinterpret_cast<gpointer>(GstPipelineMessageCB),
-          this);
-      gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
-      gst_object_unref(bus);
-    }
-
-    gst_element_set_state(pipeline_, GST_STATE_NULL);
-    gst_object_unref(pipeline_);
-    pipeline_ = NULL;
-  }
-}
-
-void MediaSourcePlayerGstreamer::SetVolume(double volume) {
-  if (audio_volume_)
-    g_object_set(G_OBJECT(audio_volume_), "volume", volume, NULL);
-}
-
-void MediaSourcePlayerGstreamer::OnDemuxerConfigsAvailable(
-    const DemuxerConfigs& configs) {
-  if (IsPlayerDestructing())
-    return;
-
-  // Video elements
-  GstElement* video_parse = NULL;
-  GstElement* video_decoder = NULL;
-
-  // Audio elements
-  GstElement* audio_decoder = NULL;
-  GstElement* audio_parse = NULL;
-  GstElement* audio_convert = NULL;
-  GstElement* audio_resampler = NULL;
-  GstElement* audio_sink = NULL;
-
-  if ((configs.video_codec == kUnknownVideoCodec ||
-      configs.video_codec != kCodecH264) &&
-      (configs.audio_codec == kUnknownAudioCodec ||
-      (configs.audio_codec != kCodecAAC &&
-      configs.audio_codec != kCodecMP3))) {
-    LOG(ERROR) << "Audio and Video codecs not supported for MediaSource";
-    HandleError(blink::WebMediaPlayer::NetworkStateFormatError);
-    return;
-  }
-
-  width_ = configs.video_size.width();
-  height_ = configs.video_size.height();
-
-  if (pipeline_ != NULL)
-    return;
-
-  if (!gst_is_initialized())
-    gst_init_check(NULL, NULL, 0);
-
-  if (!gst_is_initialized()) {
-    LOG(ERROR) << "Unable to initialize GST";
-    HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-    return;
-  }
-
-  pipeline_ = gst_pipeline_new(kPipelineName);
-  if (!pipeline_) {
-    LOG(ERROR) << "Unable to Create |Pipeline|";
-    HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-    return;
-  }
-
-  for (int i = 0; VideoMapping[i].codec != kUnknownVideoCodec; i++) {
-    if (configs.video_codec == VideoMapping[i].codec) {
-      media_type_ |= MEDIA_VIDEO_MASK;
-      video_appsrc_ = gst_element_factory_make("appsrc", "video-source");
-      if (video_appsrc_ && !gst_bin_add(GST_BIN(pipeline_), video_appsrc_)) {
-        gst_object_unref(video_appsrc_);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-      video_parse = gst_element_factory_make(VideoMapping[i].elements[0],
-                                             "video-parse");
-      if (video_parse && !gst_bin_add(GST_BIN(pipeline_), video_parse)) {
-        gst_object_unref(video_parse);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-      video_queue_ = gst_element_factory_make("queue2", "video-queue");
-      if (video_queue_ && !gst_bin_add(GST_BIN(pipeline_), video_queue_)) {
-        gst_object_unref(video_queue_);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-      video_decoder = gst_element_factory_make(VideoMapping[i].elements[1],
-                                               "video-decoder");
-      if (video_decoder && !gst_bin_add(GST_BIN(pipeline_), video_decoder)) {
-        gst_object_unref(video_decoder);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-      PrepareForVideoFrame();
-
-      if (video_sink_ && !gst_bin_add(GST_BIN(pipeline_), video_sink_)) {
-        gst_object_unref(video_sink_);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-
-      g_object_set(GST_OBJECT(video_appsrc_), "format", GST_FORMAT_TIME,
-                   NULL);
-      g_object_set(GST_OBJECT(video_appsrc_), "stream-type",
-                   GST_APP_STREAM_TYPE_SEEKABLE, NULL);
-      g_object_set(GST_OBJECT(video_appsrc_), "do-timestamp", false, NULL);
-
-      // Will make the queue to send GST_MESSAGE_BUFFERING
-      g_object_set(G_OBJECT(video_queue_), "use-buffering", true, NULL);
-
-      // Why |video_queue_| is placed after |video_appsrc_|?
-      // For understanding puprose consider http://tinyurl.com/qos-iron url.
-      // For 1080p resolution of the video in above url, each decoded frame
-      // is of size 2304000 bytes ~ 2.19 MB. If video_queue_ is placed before
-      // |video_sink_| then queue will buffer decoded frames, so to buffer
-      // two second worth of data queue will require 2304000*24(fps)*2 ~ 96MB
-      // of queue size. This property can't be set for pixmap backed playback
-      // as frame size won't be available for pixmap backed |video_sink|. And
-      // this size varies from video to video.
-      //
-      // But if |video_queue_| is placed after |video_appsrc_|, queue will
-      // buffer encoded data. For the same video of 1080p, maximum encoded
-      // frame is of 115398byte ~ 0.110052109 MB. So for 2 sec data, queue
-      // need to buffer 5308308bytes in queue ~ 5MB, this can be set
-      // dynamically. Refer |OnDemuxerDataAvailable| for setting queue size.
-
-      if (!gst_element_link_many(video_appsrc_, video_queue_, video_parse,
-                                 video_decoder, video_sink_, NULL)) {
-        LOG(ERROR) << "Video pipeline couldn't be created / linked";
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-
-      GstAppSrcCallbacks video_callbacks = {
-          OnGstStartVideoFeedCB,
-          OnGstStopVideoFeedCB,
-          OnGstSeekVideoFeedCB,
-          {NULL}};
-
-      gst_app_src_set_callbacks(GST_APP_SRC(video_appsrc_), &video_callbacks,
-                                this, NULL);
-      break;
-    }
-  }
-
-  for (int i = 0; AudioMapping[i].codec != kUnknownAudioCodec; i++) {
-    if (configs.audio_codec == AudioMapping[i].codec) {
-      media_type_ |= MEDIA_AUDIO_MASK;
-      audio_appsrc_ = gst_element_factory_make("appsrc", "audio-source");
-      if (audio_appsrc_ && !gst_bin_add(GST_BIN(pipeline_), audio_appsrc_)) {
-        gst_object_unref(audio_appsrc_);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-
-      audio_queue_ = gst_element_factory_make("queue2", "audio-queue");
-      if (audio_queue_ && !gst_bin_add(GST_BIN(pipeline_), audio_queue_)) {
-        gst_object_unref(audio_queue_);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-
-      audio_parse = gst_element_factory_make(AudioMapping[i].elements[0],
-                                             "audio-parse");
-      if (audio_parse && !gst_bin_add(GST_BIN(pipeline_), audio_parse)) {
-        gst_object_unref(audio_parse);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-
-      audio_decoder = gst_element_factory_make(AudioMapping[i].elements[1],
-                                               "audio-decoder");
-      if (audio_decoder && !gst_bin_add(GST_BIN(pipeline_), audio_decoder)) {
-        gst_object_unref(audio_decoder);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-
-      audio_convert = gst_element_factory_make("audioconvert",
-                                               "audio-convert");
-      if (audio_convert && !gst_bin_add(GST_BIN(pipeline_), audio_convert)) {
-        gst_object_unref(audio_convert);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-
-      audio_resampler = gst_element_factory_make("audioresample",
-                                                 "audio-resample");
-      if (audio_resampler &&
-          !gst_bin_add(GST_BIN(pipeline_), audio_resampler)) {
-        gst_object_unref(audio_resampler);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-
-      audio_volume_ = gst_element_factory_make("volume", "volume");
-      if (audio_volume_ && !gst_bin_add(GST_BIN(pipeline_), audio_volume_)) {
-        gst_object_unref(audio_volume_);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-
-      audio_sink = gst_element_factory_make(AudioMapping[i].elements[2],
-                                            "audio-sink");
-      if (audio_sink && !gst_bin_add(GST_BIN(pipeline_), audio_sink)) {
-        gst_object_unref(audio_sink);
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-        return;
-      }
-
-      g_object_set(GST_OBJECT(audio_appsrc_), "format", GST_FORMAT_TIME,
-                   NULL);
-      g_object_set(GST_OBJECT(audio_appsrc_), "stream-type",
-                   GST_APP_STREAM_TYPE_SEEKABLE, NULL);
-      g_object_set(GST_OBJECT(audio_appsrc_), "do-timestamp", false, NULL);
-      g_object_set(G_OBJECT(audio_queue_), "use-buffering", true, NULL);
-      g_object_set(G_OBJECT(audio_volume_), "mute", false, NULL);
-
-      if (!gst_element_link_many(audio_appsrc_, audio_queue_, audio_parse,
-                                 audio_decoder, audio_convert,
-                                 audio_resampler, audio_volume_,
-                                 audio_sink, NULL)) {
-        LOG(ERROR) << "Not all elements of audio pipeline could be linked";
-        HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-      }
-
-      GstAppSrcCallbacks audio_callbacks = {
-          OnGstStartAudioFeedCB,
-          OnGstStopAudioFeedCB,
-          OnGstSeekAudioFeedCB,
-          {NULL}};
-      gst_app_src_set_callbacks(GST_APP_SRC(audio_appsrc_),
-                                &audio_callbacks, this, NULL);
-      break;
-    }
-  }
-
-  GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_));
-  if (!bus) {
-    LOG(ERROR) << "GStreamer bus creation failed";
-    HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-    return;
-  }
-  gst_bus_set_sync_handler(
-      bus,
-      static_cast<GstBusSyncHandler>(GstPipelineMessageCB),
-      this,
-      NULL);
-  gst_object_unref(bus);
-
-  manager()->OnMediaDataChange(GetPlayerId(), video_format_, height_,
-                               width_, media_type_);
-  manager()->OnReadyStateChange(GetPlayerId(),
-                                blink::WebMediaPlayer::ReadyStateHaveMetadata);
-
-  if (gst_element_set_state(pipeline_, GST_STATE_PAUSED) ==
-          GST_STATE_CHANGE_FAILURE) {
-    LOG(ERROR) << "GStreamer state change failed";
-  }
-}
-
-void MediaSourcePlayerGstreamer::PrepareForVideoFrame() {
-  is_xwindow_handle_set_ = true;
-  video_sink_ = gst_element_factory_make("appsink", "sink");
-  GstAppSinkCallbacks callbacks = {
-      NULL,
-      OnGstAppsinkPreroll,
-      OnGstAppsinkBuffer};
-  gst_app_sink_set_callbacks(GST_APP_SINK(video_sink_),
-                             &callbacks, this, NULL);
-  g_object_set(G_OBJECT(video_sink_), "max-buffers",
-               static_cast<guint>(1), NULL);
-}
-
-void MediaSourcePlayerGstreamer::ReadDemuxedData(
-    media::DemuxerStream::Type type) {
-  if (IsPlayerDestructing())
-    return;
-
-  if (type == media::DemuxerStream::AUDIO) {
-    should_feed_audio_ = true;
-  } else if (type == media::DemuxerStream::VIDEO) {
-    should_feed_video_ = true;
-  } else {
-    LOG(ERROR) << "Unknown Media Type";
-    return;
-  }
-  demuxer_->RequestDemuxerData(type);
-}
-
-void MediaSourcePlayerGstreamer::OnReadDemuxedData(
-    media::DemuxerStream::Type type) {
-  if (IsPlayerDestructing()) {
-    LOG(ERROR) << "GST is deinitializing. Just return";
-    return;
-  }
-  task_runner_->PostTask(
-      FROM_HERE, base::Bind(&MediaSourcePlayerGstreamer::ReadDemuxedData,
-                            base::Unretained(this),
-                            type));
-}
-
-void MediaSourcePlayerGstreamer::OnStopDemuxedData(
-    media::DemuxerStream::Type type) {
-  if (type == media::DemuxerStream::AUDIO)
-    should_feed_audio_ = false;
-  else if (type == media::DemuxerStream::VIDEO)
-    should_feed_video_ = false;
-  else
-    LOG(ERROR) << "Unknown media stream!";
-}
-
-void MediaSourcePlayerGstreamer::OnDemuxerDataAvailable(
-    base::SharedMemoryHandle foreign_memory_handle,
-    const media::DemuxedBufferMetaData& meta_data) {
-  if (!pipeline_ || error_occured_) {
-    LOG(ERROR) << "Pipeline_ null or error occured";
-    return;
-  }
-  if (meta_data.status != media::DemuxerStream::kOk ||
-      meta_data.end_of_stream)
-    BufferMetaDataAvailable(meta_data);
-  if (meta_data.size <= 0) {
-    LOG(ERROR) << "ERROR : Size of shared memory is Zero";
-    return;
-  }
-
-  int64 metadata_timestamp_in_ms = meta_data.timestamp.InMicroseconds() * 1000;
-  if (is_seeking_ && !is_seeking_iframe_) {
-    if (meta_data.type == media::DemuxerStream::VIDEO) {
-      is_seeking_iframe_ = true;
-      if (video_seek_offset_ >
-              static_cast<guint64>(metadata_timestamp_in_ms)) {
-        int64 time = metadata_timestamp_in_ms + kSixteenMilliSeconds;
-        RequestPlayerSeek(ConvertNanoSecondsToSeconds(time));
-        return;
-      }
-    } else if (meta_data.type == media::DemuxerStream::AUDIO) {
-        if (audio_seek_offset_ >
-                static_cast<guint64>(metadata_timestamp_in_ms))
-          return;
-    }
-  }
-
-  ReadFromQueueIfAny(meta_data.type);
-  if (meta_data.type == media::DemuxerStream::VIDEO) {
-    if (meta_data.size != raw_video_frame_size_) {
-      // Dynamically Changing Video Queue Size for Smooth Playback.
-      // The default queue size limits are 100 buffers, 2MB of data,
-      // or two seconds worth of data, whichever is reached first.
-      // Adjust queue to contain two seconds worth of data for smooth playback.
-      // So need to adjust number of buffers (max-size-buffers >= 2*fps) and
-      // maximum size of queue (max-size-bytes >= 2*fps*meta_data.size).
-      //
-      // 1000000 micro seconds = 1 second.
-      // 2097152 bytes = 2 MB.
-      int no_frames_per_two_second , queue_size_for_two_sec;
-      raw_video_frame_size_ = meta_data.size;
-      no_frames_per_two_second = 2 * (1000000 /
-          (meta_data.time_duration.InMicroseconds()));
-      queue_size_for_two_sec =
-          raw_video_frame_size_ * no_frames_per_two_second;
-      if (no_frames_per_two_second > 100) {
-        g_object_set(G_OBJECT(video_queue_), "max-size-buffers",
-                     static_cast<guint>(no_frames_per_two_second), NULL);
-      }
-      if (queue_size_for_two_sec > 2 * kBytesPerMegabyte) {
-        g_object_set(G_OBJECT(video_queue_), "max-size-bytes",
-                     static_cast<guint>(queue_size_for_two_sec), NULL);
-      }
-    }
-  }
-  if (meta_data.type == media::DemuxerStream::AUDIO && !should_feed_audio_) {
-    // Why store the DecoderBuffer? we have requested for buffer
-    // from demuxer but gstreamer asked to stop. So need to save
-    // this buffer and use it on next |need_data| call.
-    SaveDecoderBuffer(foreign_memory_handle, meta_data);
-    return;
-  }
-  if (meta_data.type == media::DemuxerStream::VIDEO && !should_feed_video_) {
-    SaveDecoderBuffer(foreign_memory_handle, meta_data);
-    return;
-  }
-
-  // Wrapping each frame and deleting shared memory using callback
-  // will not work as possibility of Gstreamer retaining frames (such as
-  // 'i' frames) is high. In that case shared memory will crash. So, we
-  // copy frames and release shared memory right away.
-
-  base::SharedMemory shared_memory(foreign_memory_handle, false);
-  if (!shared_memory.Map(meta_data.size)) {
-    LOG(ERROR) << "Failed to map shared memory of size " << meta_data.size;
-    return;
-  }
-  gint size = meta_data.size;
-  GstFlowReturn ret = GST_FLOW_OK;
-  GstBuffer* buffer = gst_buffer_new_allocate(NULL, size, NULL);
-  gst_buffer_fill(buffer, 0, shared_memory.memory(), size);
-
-  GST_BUFFER_TIMESTAMP(buffer) =
-      static_cast<guint64>(metadata_timestamp_in_ms);
-  GST_BUFFER_DURATION(buffer) =
-      static_cast<guint64>(meta_data.time_duration.InMicroseconds() * 1000);
-
-  if (meta_data.type == media::DemuxerStream::AUDIO)
-    ret = gst_app_src_push_buffer(GST_APP_SRC(audio_appsrc_),
-                                  buffer);
-  else if (meta_data.type == media::DemuxerStream::VIDEO)
-    ret = gst_app_src_push_buffer(GST_APP_SRC(video_appsrc_),
-                                  buffer);
-
-  // gst_app_src_push_buffer() takes ownership of the buffer.
-  // Hence no need to unref buffer.
-  if (ret != GST_FLOW_OK) {
-    LOG(ERROR) << "Gstreamer appsrc push failed : " << ret;
-    return;
-  }
-
-  if (meta_data.type == media::DemuxerStream::AUDIO && should_feed_audio_)
-    OnReadDemuxedData(media::DemuxerStream::AUDIO);
-  else if (meta_data.type == media::DemuxerStream::VIDEO && should_feed_video_)
-    OnReadDemuxedData(media::DemuxerStream::VIDEO);
-  return;
-}
-
-void MediaSourcePlayerGstreamer::BufferMetaDataAvailable(
-    const media::DemuxedBufferMetaData& meta_data) {
-  if (!pipeline_ || error_occured_) {
-    LOG(ERROR) << "Pipeline_ null or error occured";
-    return;
-  }
-
-  switch (meta_data.status) {
-    case media::DemuxerStream::kAborted:
-      if (meta_data.type == media::DemuxerStream::AUDIO && should_feed_audio_)
-        OnReadDemuxedData(media::DemuxerStream::AUDIO);
-      else if (meta_data.type == media::DemuxerStream::VIDEO &&
-               should_feed_video_)
-        OnReadDemuxedData(media::DemuxerStream::VIDEO);
-      break;
-
-    case media::DemuxerStream::kConfigChanged:
-      if (meta_data.type == media::DemuxerStream::AUDIO && should_feed_audio_)
-        OnReadDemuxedData(media::DemuxerStream::AUDIO);
-      else if (meta_data.type == media::DemuxerStream::VIDEO &&
-               should_feed_video_)
-        OnReadDemuxedData(media::DemuxerStream::VIDEO);
-      break;
-
-    case media::DemuxerStream::kOk:
-      if (meta_data.end_of_stream) {
-        ReadFromQueueIfAny(meta_data.type);
-        LOG(ERROR) <<"[BROWSER] : DemuxerStream::kOk but |end_of_stream|";
-        if (meta_data.type == media::DemuxerStream::AUDIO)
-          gst_app_src_end_of_stream(GST_APP_SRC(audio_appsrc_));
-        if (meta_data.type == media::DemuxerStream::VIDEO)
-          gst_app_src_end_of_stream(GST_APP_SRC(video_appsrc_));
-        if (playing_)
-          Play();
-      }
-      break;
-
-    default:
-      NOTREACHED();
-  }
-}
-
-void MediaSourcePlayerGstreamer::ReadFromQueueIfAny(
-    DemuxerStream::Type type) {
-  if (!pipeline_ || error_occured_) {
-    LOG(ERROR) << "Pipeline_ null or error occured";
-    return;
-  }
-
-  if (type == media::DemuxerStream::AUDIO) {
-    if (audio_buffer_queue_.empty() || !should_feed_audio_)
-      return;
-  }
-
-  if (type == media::DemuxerStream::VIDEO) {
-    if (video_buffer_queue_.empty() || !should_feed_video_)
-      return;
-  }
-
-  scoped_refptr<DecoderBuffer> decoder_buffer;
-  if (type == media::DemuxerStream::AUDIO) {
-    decoder_buffer = audio_buffer_queue_.front();
-    audio_buffer_queue_.pop_front();
-  } else {
-    decoder_buffer = video_buffer_queue_.front();
-    video_buffer_queue_.pop_front();
-  }
-
-  // Wrapping each frame and deleting shared memory using callback
-  // will not work as possibility of Gstreamer retaining frames (such as
-  // 'i' frames) is high. In that case shared memory will crash. So, we
-  // copy frames and release shared memory right away.
-
-  GstFlowReturn ret;
-  gint size = decoder_buffer.get()->data_size();
-  GstBuffer* buffer = gst_buffer_new_allocate(NULL, size, NULL);
-  if (!buffer)
-    return;
-
-  gst_buffer_fill(buffer, 0, decoder_buffer.get()->writable_data(), size);
-
-  GST_BUFFER_TIMESTAMP(buffer) =
-      static_cast<guint64>(decoder_buffer.get()->timestamp().InMicroseconds() *
-                           1000);
-  GST_BUFFER_DURATION(buffer) =
-      static_cast<guint64>(decoder_buffer.get()->duration().InMicroseconds() *
-                           1000);
-
-  if (type == media::DemuxerStream::AUDIO)
-    ret = gst_app_src_push_buffer(GST_APP_SRC(audio_appsrc_), buffer);
-  else
-    ret = gst_app_src_push_buffer(GST_APP_SRC(video_appsrc_), buffer);
-  if (ret != GST_FLOW_OK)
-    return;
-
-  // Empty the Buffer before reading the new buffer from render process.
-  ReadFromQueueIfAny(type);
-  return;
-}
-
-void MediaSourcePlayerGstreamer::SaveDecoderBuffer(
-    base::SharedMemoryHandle foreign_memory_handle,
-    const media::DemuxedBufferMetaData& meta_data) {
-  if (!pipeline_ || error_occured_) {
-    LOG(ERROR) << "Pipeline_ null or error occured";
-    return;
-  }
-
-  base::SharedMemory shared_memory(foreign_memory_handle, false);
-  if (!shared_memory.Map(meta_data.size)) {
-    LOG(ERROR) << "Failed to map shared memory of size " << meta_data.size;
-    return;
-  }
-  scoped_refptr<DecoderBuffer> buffer;
-  buffer = DecoderBuffer::CopyFrom(static_cast<const uint8*> (
-      shared_memory.memory()), meta_data.size);
-
-  if (!buffer.get()) {
-    LOG(ERROR) << "DecoderBuffer::CopyFrom failed";
-    return;
-  }
-
-  buffer->set_timestamp(meta_data.timestamp);
-  buffer->set_duration(meta_data.time_duration);
-
-  if (meta_data.type == media::DemuxerStream::AUDIO)
-    audio_buffer_queue_.push_back(buffer);
-  else
-    video_buffer_queue_.push_back(buffer);
-}
-
-void MediaSourcePlayerGstreamer::GetFrameDetails() {
-  if (!pipeline_ || error_occured_)
-    return;
-
-  GstState state = GST_STATE_VOID_PENDING;
-  GstState pending = GST_STATE_VOID_PENDING;
-  gst_element_get_state(pipeline_, &state, &pending, 250 * GST_NSECOND);
-
-  // Get details only after prerolling.
-  if (pending >= GST_STATE_PAUSED)
-    task_runner_->PostTask(
-        FROM_HERE,
-        base::Bind(&MediaSourcePlayerGstreamer::OnGetFrameDetails,
-                   base::Unretained(this)));
-}
-
-void MediaSourcePlayerGstreamer::OnGetFrameDetails() {
-  if (!pipeline_ || IsPlayerDestructing() || error_occured_)
-    return;
-
-  GstSample* sample = gst_app_sink_pull_preroll(GST_APP_SINK(video_sink_));
-  if (!sample)
-    return;
-
-  if (!GetGstVideoBufferMetaData(sample, &width_,
-                                 &height_, &video_format_)) {
-    gst_sample_unref(sample);
-    return;
-  }
-
-  gst_sample_unref(sample);
-
-  if (video_format_ == GST_VIDEO_SN12)
-    sn12_bufsize_ = GetSN12BufferSize(width_, height_);
-
-  manager()->OnMediaDataChange(GetPlayerId(), video_format_, height_,
-                               width_, media_type_);
-}
-
-GstSample* MediaSourcePlayerGstreamer::PullSample() {
-  return gst_app_sink_pull_sample(GST_APP_SINK(video_sink_));
-}
-
-void MediaSourcePlayerGstreamer::OnNewFrameAvailable(GstSample* sample) {
-  if (!sample)
-    return;
-
-  if (!pipeline_ || error_occured_) {
-    gst_sample_unref(sample);
-    return;
-  }
-
-  GstMapInfo map;
-  GstBuffer* buffer = gst_sample_get_buffer(sample);
-  if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
-    LOG(ERROR) << "Sample contains invalid or no info!";
-    return;
-  }
-
-  if (!width_ || !height_)
-    GetFrameDetails();
-  int width;
-  int height;
-  if (!GetGstVideoBufferMetaData(sample, &width,
-                                 &height, NULL)) {
-    gst_sample_unref(sample);
-    return;
-  }
-
-  if (width != width_ || height != height_) {
-    width_ = width;
-    height_ = height;
-    manager()->OnMediaDataChange(GetPlayerId(), video_format_, height_,
-                                 width_, media_type_);
-  }
-  base::SharedMemory shared_memory;
-  uint32 shared_memory_size = 0;
-  base::SharedMemoryHandle foreign_memory_handle;
-
-  base::TimeDelta timestamp =
-      base::TimeDelta::FromMicroseconds(
-          GST_BUFFER_TIMESTAMP(buffer) /
-          base::Time::kNanosecondsPerMicrosecond);
-
-  if (video_format_ == GST_VIDEO_SN12)
-    shared_memory_size = (sn12_bufsize_);
-  else
-    shared_memory_size = (map.size);
-
-  if (!shared_memory.CreateAndMapAnonymous(shared_memory_size)) {
-    LOG(ERROR) << "Shared Memory creation failed.";
-    gst_buffer_unmap(buffer, &map);
-    gst_sample_unref(sample);
-    return;
-  }
-
-  if (!shared_memory.ShareToProcess(base::Process::Current().Handle(),
-      &foreign_memory_handle)) {
-    LOG(ERROR) << "Shared Memory handle could not be obtained";
-    gst_buffer_unmap(buffer, &map);
-    gst_sample_unref(sample);
-    return;
-  }
-
-  memcpy(shared_memory.memory(), map.data, shared_memory_size);
-  manager()->OnNewFrameAvailable(
-      GetPlayerId(), foreign_memory_handle, shared_memory_size, timestamp);
-
-  gst_buffer_unmap(buffer, &map);
-  gst_sample_unref(sample);
-}
-
-void MediaSourcePlayerGstreamer::OnDemuxerDurationChanged(
-    base::TimeDelta duration) {
-  duration_ = duration.InSecondsF();
-}
-
-void MediaSourcePlayerGstreamer::OnDemuxerSeekDone(
-    const base::TimeDelta& actual_browser_seek_time) {
-  SeekInternal(ConvertToGstClockTime(actual_browser_seek_time.InSecondsF()));
-}
-
-bool MediaSourcePlayerGstreamer::HasVideo() {
-  return media_type_ & MEDIA_VIDEO_MASK;
-}
-
-bool MediaSourcePlayerGstreamer::HasAudio() {
-  return media_type_ & MEDIA_AUDIO_MASK;
-}
-
-void MediaSourcePlayerGstreamer::OnCurrentTimeUpdateTimerFired() {
-  manager()->OnTimeUpdate(GetPlayerId(), GetCurrentTime());
-}
-
-void MediaSourcePlayerGstreamer::StartCurrentTimeUpdateTimer() {
-  if (!current_time_update_timer_.IsRunning()) {
-    current_time_update_timer_.Start(
-        FROM_HERE,
-        base::TimeDelta::FromMilliseconds(kDurationUpdateInterval),
-        this,
-        &MediaSourcePlayerGstreamer::OnCurrentTimeUpdateTimerFired);
-  }
-}
-
-void MediaSourcePlayerGstreamer::StopCurrentTimeUpdateTimer() {
-  if (current_time_update_timer_.IsRunning())
-    current_time_update_timer_.Stop();
-}
-
-void MediaSourcePlayerGstreamer::HandleMessage(GstMessage* message) {
-  if (!pipeline_ || error_occured_)
-    return;
-
-  switch (GST_MESSAGE_TYPE(message)) {
-    case GST_MESSAGE_ERROR: {
-      GError* error = NULL;
-      blink::WebMediaPlayer::NetworkState network_state_error =
-          blink::WebMediaPlayer::NetworkStateEmpty;
-      gst_message_parse_error(message, &error, NULL);
-      if (error->code == GST_STREAM_ERROR_CODEC_NOT_FOUND ||
-          error->code == GST_STREAM_ERROR_WRONG_TYPE ||
-          error->code == GST_STREAM_ERROR_FAILED ||
-          error->code == GST_RESOURCE_ERROR_NOT_FOUND) {
-        network_state_error = blink::WebMediaPlayer::NetworkStateFormatError;
-      } else if (error->domain == GST_RESOURCE_ERROR) {
-        network_state_error = blink::WebMediaPlayer::NetworkStateNetworkError;
-      } else {
-        network_state_error = blink::WebMediaPlayer::NetworkStateDecodeError;
-      }
-
-      LOG(ERROR) << "Error Message : " << error->message << " Recieved From : "
-                 << GST_MESSAGE_SRC_NAME(message)
-                 << ", and Blink Error Code  = " << network_state_error;
-      g_error_free(error);
-      HandleError(network_state_error);
-      break;
-    }
-    case GST_MESSAGE_EOS:
-      task_runner_->PostTask(FROM_HERE, base::Bind(
-          &MediaSourcePlayerGstreamer::OnPlaybackComplete,
-          base::Unretained(this)));
-      break;
-    case GST_MESSAGE_ASYNC_DONE:
-      if (is_seeking_) {
-        is_seeking_iframe_ = false;
-        task_runner_->PostTask(
-            FROM_HERE,
-            base::Bind(&MediaSourcePlayerGstreamer::UpdateSeekState,
-                       base::Unretained(this),
-                       false));
-
-        // Initiate play for internal seeks.
-        if (playing_)
-          task_runner_->PostTask(FROM_HERE,
-                                 base::Bind(&MediaSourcePlayerGstreamer::Play,
-                                 base::Unretained(this)));
-
-        manager()->OnTimeUpdate(GetPlayerId(), GetCurrentTime());
-        task_runner_->PostTask(
-            FROM_HERE,
-            base::Bind(&MediaSourcePlayerGstreamer::OnTimeChanged,
-                       base::Unretained(this)));
-      }
-      break;
-    case GST_MESSAGE_STATE_CHANGED:
-      if (strcmp(kPipelineName, GST_MESSAGE_SRC_NAME(message)))
-        break;
-      task_runner_->PostTask(
-            FROM_HERE,
-            base::Bind(&MediaSourcePlayerGstreamer::OnUpdateStates,
-                       base::Unretained(this)));
-      break;
-    case GST_MESSAGE_BUFFERING: {
-      int buffered = 0;
-      gst_message_parse_buffering(message, &buffered);
-
-      if (audio_queue_ && GST_MESSAGE_SRC(message) ==
-              GST_OBJECT(audio_queue_))
-        audio_buffered_ = buffered;
-      else if (video_queue_ && GST_MESSAGE_SRC(message) ==
-                   GST_OBJECT(video_queue_))
-        video_buffered_ = buffered;
-
-      if (playing_) {
-        task_runner_->PostTask(
-            FROM_HERE,
-            base::Bind(&MediaSourcePlayerGstreamer::HandleBufferingMessage,
-                       base::Unretained(this)));
-      }
-      break;
-    }
-    default:
-      break;
-  }
-}
-
-void MediaSourcePlayerGstreamer::OnUpdateStates() {
-  DCHECK(task_runner_->BelongsToCurrentThread());
-  if (!pipeline_ || IsPlayerDestructing() || error_occured_)
-    return;
-
-  GstState state = GST_STATE_VOID_PENDING;
-  GstState pending = GST_STATE_VOID_PENDING;
-  GstStateChangeReturn ret = gst_element_get_state(
-      pipeline_, &state, &pending, 250 * GST_NSECOND);
-
-  switch (ret) {
-  case GST_STATE_CHANGE_SUCCESS:
-    switch (state) {
-    case GST_STATE_PAUSED:
-      manager()->OnReadyStateChange(
-          GetPlayerId(),
-          blink::WebMediaPlayer::ReadyStateHaveEnoughData);
-      break;
-    default:
-      break;
-    }
-    break;
-  case GST_STATE_CHANGE_FAILURE:
-    LOG(ERROR) << "Failure: State: "
-               << gst_element_state_get_name(state)
-               << " pending: "
-               << gst_element_state_get_name(pending);
-    HandleError(blink::WebMediaPlayer::NetworkStateDecodeError);
-    break;
-  default:
-    break;
-  }
-}
-
-void MediaSourcePlayerGstreamer::HandleBufferingMessage() {
-  if (IsPlayerDestructing())
-    return;
-  if (!is_paused_due_underflow_ &&
-      ((HasAudio() && audio_buffered_ < kMaxBufPercent) ||
-        (HasVideo() && video_buffered_ < kMaxBufPercent))) {
-    is_paused_due_underflow_ = true;
-    Pause(true);
-    manager()->OnReadyStateChange(
-        GetPlayerId(),
-        blink::WebMediaPlayer::ReadyStateHaveCurrentData);
-    manager()->OnNetworkStateChange(
-        GetPlayerId(),
-        blink::WebMediaPlayer::NetworkStateLoading);
-  } else if (is_paused_due_underflow_ &&
-             (!HasAudio() || audio_buffered_ == kMaxBufPercent) &&
-             (!HasVideo() || video_buffered_ == kMaxBufPercent)) {
-    is_paused_due_underflow_ = false;
-    Play();
-    manager()->OnReadyStateChange(
-        GetPlayerId(),
-        blink::WebMediaPlayer::ReadyStateHaveEnoughData);
-    manager()->OnNetworkStateChange(
-        GetPlayerId(),
-        blink::WebMediaPlayer::NetworkStateLoaded);
-  }
-}
-
-void MediaSourcePlayerGstreamer::OnPlaybackComplete() {
-  // GStreamer pipeline EOS time and media duration doesnt match.
-  double time = GetCurrentTime() != duration_ ? duration_ : GetCurrentTime();
-  is_end_reached_ = true;
-  is_download_finished_ = false;
-  StopCurrentTimeUpdateTimer();
-  manager()->OnTimeUpdate(GetPlayerId(), time);
-  manager()->OnTimeChanged(GetPlayerId());
-#if defined(OS_TIZEN_MOBILE)
-  ReleaseDisplayLock();
-#endif
-}
-
-void MediaSourcePlayerGstreamer::UpdateSeekState(bool state) {
-  manager()->OnSeekStateChange(GetPlayerId(), state);
-  is_seeking_ = state;
-}
-
-void MediaSourcePlayerGstreamer::OnTimeChanged() {
-  DCHECK(task_runner_->BelongsToCurrentThread());
-  manager()->OnTimeChanged(GetPlayerId());
-}
-
-void MediaSourcePlayerGstreamer::HandleError(
-    blink::WebMediaPlayer::NetworkState state) {
-  error_occured_ = true;
-  manager()->OnNetworkStateChange(GetPlayerId(), state);
-#if defined(OS_TIZEN_MOBILE)
-  ReleaseDisplayLock();
-#endif
-}
-
-}  // namespace media
diff --git a/tizen_src/chromium_impl/media/base/efl/media_source_player_gstreamer.h b/tizen_src/chromium_impl/media/base/efl/media_source_player_gstreamer.h
deleted file mode 100644 (file)
index c39fd0c..0000000
+++ /dev/null
@@ -1,189 +0,0 @@
-// Copyright 2014 Samsung Electronics Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_BASE_EFL_MEDIA_SOURCE_PLAYER_GSTREAMER_H_
-#define MEDIA_BASE_EFL_MEDIA_SOURCE_PLAYER_GSTREAMER_H_
-
-#include <Ecore.h>
-#include <gst/gst.h>
-#include "ecore_x_wayland_wrapper.h"
-
-#include "base/cancelable_callback.h"
-#include "base/memory/scoped_ptr.h"
-#include "base/memory/shared_memory.h"
-#include "base/threading/thread.h"
-#include "base/time/default_tick_clock.h"
-#include "base/timer/timer.h"
-#include "media/base/efl/demuxer_efl.h"
-#include "media/base/efl/media_player_efl.h"
-#include "media/base/decoder_buffer.h"
-#include "third_party/WebKit/public/platform/WebMediaPlayer.h"
-
-namespace {
-struct AudioCodecGstElementsMapping {
-  media::AudioCodec codec;
-  const char** elements;
-};
-
-struct VideoCodecGstElementsMapping {
-  media::VideoCodec codec;
-  const char** elements;
-};
-}  // namespace
-
-namespace media {
-
-// This class handles media source extensions for Gstreamer port.
-class MEDIA_EXPORT MediaSourcePlayerGstreamer : public MediaPlayerEfl,
-    public DemuxerEflClient {
- public:
-  // Constructs a player with the given ID and demuxer. |manager| must outlive
-  // the lifetime of this object.
-  MediaSourcePlayerGstreamer(
-      int player_id,
-      scoped_ptr<DemuxerEfl> demuxer,
-      MediaPlayerManager* manager);
-
-  ~MediaSourcePlayerGstreamer() override {};
-
-  // MediaPlayerEfl implementation.
-  void Play() override;
-  void Pause(bool is_media_related_action) override;
-  void SetRate(double rate) override;
-  void Seek(const double time) override;
-  void SetVolume(double volume) override;
-  double GetCurrentTime() override;
-  void Initialize() override;
-  void Destroy() override;
-
-  // DemuxerEflClient implementation.
-  void OnDemuxerConfigsAvailable(
-      const DemuxerConfigs& params) override;
-  void OnDemuxerDataAvailable(
-      base::SharedMemoryHandle foreign_memory_handle,
-      const media::DemuxedBufferMetaData& meta_data) override;
-  void OnDemuxerSeekDone(
-      const base::TimeDelta& actual_browser_seek_time) override;
-  void OnDemuxerDurationChanged(base::TimeDelta duration) override;
-
-  // GStreamer Message handler
-  void HandleMessage(GstMessage* message);
-
-  // AppSink related
-  void GetFrameDetails();
-  GstSample* PullSample();
-  void OnNewFrameAvailable(GstSample* sample);
-
-  void OnReadDemuxedData(media::DemuxerStream::Type type);
-  void OnStopDemuxedData(media::DemuxerStream::Type type);
-
-  void UpdateVideoSeekOffset(guint64 video_seek_offset) {
-    video_seek_offset_ = video_seek_offset;
-  }
-  void UpdateAudioSeekOffset(guint64 audio_seek_offset) {
-    audio_seek_offset_ = audio_seek_offset;
-  }
-
- protected:
-  void Release() override;
-
- private:
-  void PrepareForVideoFrame();
-
-  void BufferMetaDataAvailable(const media::DemuxedBufferMetaData& meta_data);
-
-  void ReadDemuxedData(media::DemuxerStream::Type type);
-  void SaveDecoderBuffer(
-      base::SharedMemoryHandle foreign_memory_handle,
-      const media::DemuxedBufferMetaData& meta_data);
-  void ReadFromQueueIfAny(DemuxerStream::Type type);
-
-  bool HasAudio();
-  bool HasVideo();
-
-  // For internal seeks.
-  void RequestPlayerSeek(double seekTime);
-  void SeekInternal(const GstClockTime time);
-
-  // |current_time_update_timer_| related
-  void OnCurrentTimeUpdateTimerFired();
-  void StartCurrentTimeUpdateTimer();
-  void StopCurrentTimeUpdateTimer();
-  void OnGetFrameDetails();
-
-  void OnUpdateStates();
-  void HandleBufferingMessage();
-  void OnPlaybackComplete();
-  void UpdateSeekState(bool state);
-  void OnTimeChanged();
-
-  // Error handling API
-  void HandleError(blink::WebMediaPlayer::NetworkState state);
-
-  scoped_ptr<DemuxerEfl> demuxer_;
-
-  const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
-
-  // Stats about the media.
-  bool playing_;
-
-  // Weak pointer passed to media decoder jobs for callbacks.
-  base::WeakPtrFactory<MediaSourcePlayerGstreamer> weak_this_;
-
-  // Will allow to listen to |prepare-xid| message only once.
-  bool is_xwindow_handle_set_;
-
-  GstElement* pipeline_;
-  GstElement* video_appsrc_;
-  GstElement* video_queue_;
-  GstElement* video_sink_;
-  GstElement* audio_appsrc_;
-  GstElement* audio_queue_;
-  GstElement* audio_volume_;
-  bool should_feed_audio_;
-  bool should_feed_video_;
-
-  int width_;
-  int height_;
-  guint32 video_format_;
-  int media_type_;
-  int sn12_bufsize_;
-  double play_rate_;
-  double duration_;
-  bool is_paused_due_underflow_;
-
-  int buffered_;
-  bool is_paused_;
-  bool is_seeking_;
-  bool is_demuxer_seeking_;
-  int audio_buffered_;
-  int video_buffered_;
-
-  std::deque<scoped_refptr<DecoderBuffer> > audio_buffer_queue_;
-  std::deque<scoped_refptr<DecoderBuffer> > video_buffer_queue_;
-
-  bool is_download_finished_;
-  bool is_end_reached_;
-  bool error_occured_;
-
-  base::RepeatingTimer current_time_update_timer_;
-  int raw_video_frame_size_;
-  guint64 video_seek_offset_;
-  guint64 audio_seek_offset_;
-
-  // When seeked to |x| seconds, |ChunkDemuxer| will send data from
-  // |x - delta_x| where |x-delta_x| is location of I-Frame. And gstreamer
-  // decoder is supposed to play media from seek time i.e. |x| seconds.
-  // But in the gstreamer pipeline created here it starts playing from I-Frame
-  // i.e. |x-delta_x| but gst-pipeline time starts running from |x|. Hence
-  // jerky playback is observed for some time during seek. To avoid this, now
-  // gst-pipeline is also seeked to nearest I-Frame i.e. |x-delta_x|.
-  bool is_seeking_iframe_;
-
-  DISALLOW_COPY_AND_ASSIGN(MediaSourcePlayerGstreamer);
-};
-
-}  // namespace media
-
-#endif  // MEDIA_BASE_EFL_MEDIA_SOURCE_PLAYER_GSTREAMER_H_
diff --git a/tizen_src/chromium_impl/media/base/efl/webaudio_decoder_browser_gstreamer.cc b/tizen_src/chromium_impl/media/base/efl/webaudio_decoder_browser_gstreamer.cc
deleted file mode 100644 (file)
index f0dd19a..0000000
+++ /dev/null
@@ -1,431 +0,0 @@
-// Copyright 2014 Samsung Electronics Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/base/efl/webaudio_decoder_browser_gstreamer.h"
-
-#include <gst/app/gstappsink.h>
-#include <gst/app/gstappsrc.h>
-#include <gst/audio/audio.h>
-#include <gst/gst.h>
-
-#include "base/bind.h"
-#include "base/strings/string_util.h"
-#include "base/time/time.h"
-#include "media/base/audio_bus.h"
-#include "media/base/limits.h"
-#include "media/base/efl/webaudio_media_codec_info_efl.h"
-#include "third_party/WebKit/public/platform/WebAudioBus.h"
-
-namespace media {
-
-#define CHUNK_SIZE 204800  // (4096*50)
-#define GST_OBJECT_UNREF(obj) \
-    if (obj) { \
-      gst_object_unref(obj); \
-      obj = NULL; \
-    }
-
-static int gst_dec_count_ = 32;
-static int audio_width_ = 16;
-
-struct GstElementDeleter {
-  void operator()(GstElement* ptr) const {
-    GST_OBJECT_UNREF(ptr);
-  }
-};
-
-// GSTDecoder class - declaration
-class GSTDecoder {
- public:
-  GSTDecoder(uint8_t* data, int pcm_output, uint32_t data_size);
-  ~GSTDecoder();
-  void InitializeGstDestination(int pcm_output,
-                                uint16_t number_of_channels,
-                                uint32_t sample_rate,
-                                size_t number_of_frames);
-  void SendGstOutputUsinghandle(int pcm_output, uint8_t* buffer, int buf_size);
-  void MediaFileDecoder();
-
-  // callbacks
-  static void OnNewPadAdded(GstElement* bin, GstPad* pad,
-                            gpointer user_data);
-  static void OnNeedData(GstElement* source, guint size, gpointer user_data);
-  static void OnEOS(GstAppSink* sink, gpointer user_data);
-  static GstFlowReturn OnNewPreroll(GstAppSink* sink, gpointer user_data);
-  static GstFlowReturn OnNewBuffer(GstAppSink* sink, gpointer user_data);
-  static GstBusSyncReply OnBusMessage(GstBus* bus, GstMessage* message,
-                                      gpointer user_data);
- private:
-  GstElement* app_src_;
-  GstElement* audioconvert_;
-  guint8* encodeddata_;
-  gsize enc_length_;
-  guint64 enc_offset_;
-  int pcm_output_;
-  bool is_running_;
-  bool is_endofstream_;
-  bool is_new_request_;
-};  // GSTDecoder class
-
-GSTDecoder::GSTDecoder(uint8_t* data, int pcm_output, uint32_t data_size)
-  : app_src_(NULL),
-    audioconvert_(NULL),
-    encodeddata_(data),
-    enc_length_(data_size),
-    enc_offset_(0),
-    pcm_output_(pcm_output),
-    is_running_(true),
-    is_endofstream_(false),
-    is_new_request_(true) {
-  DCHECK(encodeddata_);
-}
-
-GSTDecoder::~GSTDecoder() {
-  delete encodeddata_;
-  encodeddata_ = NULL;
-}
-
-void GSTDecoder::InitializeGstDestination(int pcm_output,
-                                          uint16_t number_of_channels,
-                                          uint32_t sample_rate,
-                                          size_t number_of_frames) {
-  media::WebAudioMediaCodecInfoEfl info(
-      static_cast<unsigned long>(number_of_channels),
-      static_cast<unsigned long>(sample_rate),
-      static_cast<unsigned long>(number_of_frames));
-
-  HANDLE_EINTR(write(pcm_output, &info, sizeof(info)));
-}
-
-void GSTDecoder::SendGstOutputUsinghandle(int pcm_output, uint8_t* buffer,
-                                          int buf_size) {
-  size_t count = buf_size;
-  while (count > 0) {
-    int bytes_to_write = (count >= PIPE_BUF) ? PIPE_BUF : count;
-    ssize_t bytes_written =
-        HANDLE_EINTR(write(pcm_output, buffer, bytes_to_write));
-    if (bytes_written == -1)
-      break;
-    count -= bytes_written;
-    buffer += bytes_written;
-  }
-  return;
-}
-
-void GSTDecoder::MediaFileDecoder() {
-  if (!gst_is_initialized()) {
-    GError* err = NULL;
-    if (!gst_init_check(NULL, NULL, &err)) {
-      LOG(ERROR) << "Gst could not be initialized";
-      close(pcm_output_);
-      return;
-    }
-  }
-
-  scoped_ptr<GstElement, GstElementDeleter> pipeline;
-  gchar pipeline_name[16] = {0,};
-  sprintf(pipeline_name, "pipeline_%d", gst_dec_count_);
-
-  // makes gst-pipeline
-  pipeline.reset(gst_pipeline_new((const gchar*)&pipeline_name));
-  GstBus* bus = NULL;
-  if (!(bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline.get())))) {
-    LOG(ERROR) << "GStreamer bus creation failed";
-    return;
-  }
-  gst_bus_set_sync_handler(
-      bus, (GstBusSyncHandler)OnBusMessage, this, NULL);
-
-  // App Src init
-  app_src_ = gst_element_factory_make("appsrc", NULL);
-  if (!gst_bin_add(GST_BIN(pipeline.get()), app_src_)) {
-    GST_OBJECT_UNREF(app_src_);
-    return;
-  }
-  g_signal_connect(app_src_, "need-data", G_CALLBACK(OnNeedData), this);
-
-  // App Sink init
-  GstElement* app_sink = gst_element_factory_make("appsink", NULL);
-  if (!gst_bin_add(GST_BIN(pipeline.get()), app_sink)) {
-    GST_OBJECT_UNREF(app_sink);
-    return;
-  }
-  GstAppSinkCallbacks callbacks = {OnEOS, OnNewPreroll, OnNewBuffer};
-  gst_app_sink_set_callbacks(GST_APP_SINK(app_sink), &callbacks, this, NULL);
-  g_object_set(G_OBJECT(app_sink), "sync", FALSE, NULL);
-
-  // Decoder init
-  GstElement* decoder = gst_element_factory_make("decodebin", NULL);
-  if (!gst_bin_add(GST_BIN(pipeline.get()), decoder)) {
-    GST_OBJECT_UNREF(decoder);
-    return;
-  }
-  g_signal_connect(decoder, "pad-added", G_CALLBACK(OnNewPadAdded), this);
-
-  // audio converter init
-  audioconvert_ = gst_element_factory_make("audioconvert", NULL);
-  if (!gst_bin_add(GST_BIN(pipeline.get()), audioconvert_)) {
-    GST_OBJECT_UNREF(audioconvert_);
-    return;
-  }
-
-  // Resample init
-  GstElement* sampler = gst_element_factory_make("audioresample", NULL);
-  if (!gst_bin_add(GST_BIN(pipeline.get()), sampler)) {
-    GST_OBJECT_UNREF(sampler);
-    return;
-  }
-
-  // Capsfilter init
-  GstElement* capsfilter = gst_element_factory_make("capsfilter", NULL);
-  if (!gst_bin_add(GST_BIN(pipeline.get()), capsfilter)) {
-    GST_OBJECT_UNREF(capsfilter);
-    return;
-  }
-
-  GstCaps* caps = gst_caps_new_simple("audio/x-raw",
-                                      "format", G_TYPE_STRING, "S16LE",
-                                      "rate", G_TYPE_INT, 44100,
-                                      "channels", G_TYPE_INT, 2,
-                                      "layout", G_TYPE_STRING, "interleaved",
-                                      NULL);
-
-  g_object_set(G_OBJECT(capsfilter), "caps", caps, NULL);
-  gst_caps_unref(caps);
-
-  gst_dec_count_++;
-  if (gst_dec_count_ > 126)
-    gst_dec_count_ = 32;
-
-  if (!gst_element_link(app_src_, decoder)) {
-    LOG(ERROR) << " Something wrong on gst initialization";
-    return;
-  }
-
-  if (!gst_element_link_many(audioconvert_, sampler,
-      capsfilter, app_sink, NULL)) {
-    LOG(ERROR) << "Some element could not be linked";
-    return;
-  }
-
-  // actually works decoding
-  gst_element_set_state(pipeline.get(), GST_STATE_PLAYING);
-
-  //FIXME: Check if its possible to remove usleep() and make any gst
-  //async call so that GST wont block on Browser UI thread.
-  while (is_running_) {
-    usleep(10);
-  }
-
-  // returns resource(s)
-  g_signal_handlers_disconnect_by_func(
-      bus, reinterpret_cast<gpointer>(OnBusMessage), this);
-  gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
-  GST_OBJECT_UNREF(bus);
-  gst_element_set_state(pipeline.get(), GST_STATE_NULL);
-}
-
-void GSTDecoder::OnNewPadAdded(GstElement* /*bin*/, GstPad* pad,
-                               gpointer data) {
-  GSTDecoder* decoder = static_cast<GSTDecoder*>(data);
-  GstPad* sink_pad = gst_element_get_static_pad(decoder->audioconvert_, "sink");
-  if (!GST_PAD_IS_LINKED(sink_pad)) {
-    gst_pad_link(pad, sink_pad);
-  }
-  g_object_unref(sink_pad);
-}
-
-void GSTDecoder::OnNeedData(GstElement* /*source*/, guint /*size*/,
-                            gpointer data) {
-  GSTDecoder* decoder = static_cast<GSTDecoder*>(data);
-  if (decoder->is_endofstream_)
-    return;
-
-  guint len = CHUNK_SIZE;
-  if ((decoder->enc_offset_ + len ) > decoder->enc_length_)
-    len = decoder->enc_length_ - decoder->enc_offset_;
-
-  GstBuffer* buffer =
-      gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,
-                                  decoder->encodeddata_ + decoder->enc_offset_,
-                                  len, 0, len, NULL, NULL);
-  if (!buffer) {
-    LOG(ERROR) << "OnNeedData: buffer creation: FAILED";
-    return;
-  }
-  decoder->enc_offset_ += len;
-
-  GstFlowReturn ret = GST_FLOW_OK;
-  g_signal_emit_by_name(decoder->app_src_, "push-buffer", buffer, &ret);
-
-  if (ret != GST_FLOW_OK) {
-    LOG(ERROR) << "OnNeedData: push-buffer ret: FAILED";
-    decoder->is_running_ = false;
-  }
-
-  if (decoder->enc_offset_ >= decoder->enc_length_) {
-    decoder->is_endofstream_ = TRUE;
-    g_signal_emit_by_name(decoder->app_src_, "end-of-stream", &ret);
-  }
-  gst_buffer_unref(buffer);
-}
-
-void GSTDecoder::OnEOS(GstAppSink* sink, gpointer data) {
-  GSTDecoder* decoder = static_cast<GSTDecoder*>(data);
-  if (!decoder->is_endofstream_) {
-    LOG(ERROR) << "not end of stream yet appsrc-side";
-  }
-
-  close(decoder->pcm_output_);
-  decoder->is_running_ = false;
-}
-
-GstFlowReturn GSTDecoder::OnNewPreroll(GstAppSink*, gpointer) {
-  return GST_FLOW_OK;
-}
-
-GstFlowReturn GSTDecoder::OnNewBuffer(GstAppSink* sink, gpointer data) {
-  GSTDecoder* decoder = static_cast<GSTDecoder*>(data);
-  GstSample* sample = gst_app_sink_pull_sample(sink);
-  if (!sample)
-    return GST_FLOW_ERROR;
-
-  GstBuffer* buffer = gst_sample_get_buffer(sample);
-
-  if (!buffer) {
-    gst_sample_unref(sample);
-    return GST_FLOW_ERROR;
-  }
-
-  if (decoder->is_new_request_) {
-    GstCaps* caps = NULL;
-    GstStructure* str = NULL;
-    gboolean ret = true;
-    gint channel = 0;
-    gint rate = 0;
-    caps = gst_sample_get_caps(sample);
-    str = gst_caps_get_structure(caps, 0);
-    if (!caps || !str) {
-      gst_sample_unref(sample);
-      return GST_FLOW_ERROR;
-    }
-    ret &= gst_structure_get_int(str, "channels", &channel);
-    ret &= gst_structure_get_int(str, "rate", &rate);
-
-    if (!ret || !channel || !rate) {
-      gst_sample_unref(sample);
-      return GST_FLOW_ERROR;
-    }
-
-    GstClockTime duration =
-        (static_cast<guint64>(gst_buffer_get_size(buffer)) * 8 * GST_SECOND) /
-        (channel * rate * audio_width_);
-    int frames = GST_CLOCK_TIME_TO_FRAMES(duration, rate);
-
-    decoder->InitializeGstDestination(decoder->pcm_output_, channel,
-                                      rate, frames);
-    decoder->is_new_request_ = false;
-  }
-
-  GstMapInfo gst_map;
-  gst_buffer_map(buffer, &gst_map, static_cast<GstMapFlags>(GST_MAP_READ));
-  if (gst_map.size > 0) {
-    decoder->SendGstOutputUsinghandle(decoder->pcm_output_,
-                                      gst_map.data,
-                                      gst_map.size);
-    gst_buffer_unmap(buffer, &gst_map);
-  }
-
-  gst_sample_unref(sample);
-  return GST_FLOW_OK;
-}
-
-GstBusSyncReply GSTDecoder::OnBusMessage(GstBus* bus,
-                                         GstMessage* message,
-                                         gpointer data) {
-  GSTDecoder* decoder = static_cast<GSTDecoder*>(data);
-  switch (GST_MESSAGE_TYPE(message)) {
-    case GST_MESSAGE_ERROR:
-      GError* error;
-      gst_message_parse_error(message, &error, NULL);
-      LOG(ERROR) << "Error message : " << error->message
-          << " recieved from : "<< GST_MESSAGE_SRC_NAME(message)
-          << ", error code : " << error->code;
-      g_error_free(error);
-
-      if (decoder->is_running_) {
-        close(decoder->pcm_output_);
-        decoder->is_running_ = false;
-      }
-      break;
-    default:
-      DVLOG(1) << "Unhandled GStreamer message type : "
-               << GST_MESSAGE_TYPE_NAME(message);
-      break;
-  }
-  gst_message_unref(message);
-  return GST_BUS_DROP;
-}
-
-// WebAudioDecoderGStreamer class
-// static
-WebAudioDecoder* WebAudioDecoder::GetWebAudioDecoder() {
-  return WebAudioDecoderGStreamer::GetInstance();
-}
-
-// static
-WebAudioDecoderGStreamer* WebAudioDecoderGStreamer::GetInstance() {
-  return base::Singleton<WebAudioDecoderGStreamer>::get();
-}
-
-WebAudioDecoderGStreamer::WebAudioDecoderGStreamer()
-    : gst_thread_("GstThread") {
-}
-
-WebAudioDecoderGStreamer::~WebAudioDecoderGStreamer() {
-}
-
-void WebAudioDecoderGStreamer::DecodeUsingGST(
-    base::SharedMemoryHandle foreign_memory_handle,
-    base::FileDescriptor pcm_output,
-    uint32_t data_size) {
-
-  base::SharedMemory shared_memory(foreign_memory_handle, false);
-  if (!shared_memory.Map(data_size)) {
-    LOG(ERROR) << "Failed to map shared memory for size " << data_size;
-    return;
-  }
-
-  uint8_t* encoded_data = new uint8_t[data_size];
-  if (!encoded_data) {
-    LOG(ERROR) << "Memory allocation failed for size = " << data_size;
-    return;
-  }
-
-  memcpy(encoded_data,
-         static_cast<uint8_t*>(shared_memory.memory()),
-         data_size);
-
-  // This will execute until decoding is done
-  GSTDecoder decoder(encoded_data, pcm_output.fd, data_size);
-  decoder.MediaFileDecoder();
-}
-
-void WebAudioDecoderGStreamer::EncodedDataReceived(
-    base::SharedMemoryHandle foreign_memory_handle,
-    base::FileDescriptor pcm_output,
-    uint32_t data_size) {
-
-  if (!gst_thread_.IsRunning() && !gst_thread_.Start()) {
-    LOG(ERROR) << "Starting GStreamer thread failed";
-    return;
-  }
-
-  gst_thread_.message_loop()->PostTask(FROM_HERE,
-      base::Bind(&WebAudioDecoderGStreamer::DecodeUsingGST,
-      base::Unretained(this), foreign_memory_handle,
-      pcm_output, data_size));
-}
-
-}  // namespace media
diff --git a/tizen_src/chromium_impl/media/base/efl/webaudio_decoder_browser_gstreamer.h b/tizen_src/chromium_impl/media/base/efl/webaudio_decoder_browser_gstreamer.h
deleted file mode 100644 (file)
index ea1a7fe..0000000
+++ /dev/null
@@ -1,42 +0,0 @@
-// Copyright 2014 Samsung Electronics Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_BASE_EFL_WEBAUDIO_DECODER_BROWSER_GSTREAMER_H_
-#define MEDIA_BASE_EFL_WEBAUDIO_DECODER_BROWSER_GSTREAMER_H_
-
-#include "base/basictypes.h"
-#include "base/memory/singleton.h"
-#include "base/memory/shared_memory.h"
-#include "base/threading/thread.h"
-#include "content/common/content_export.h"
-#include "media/base/efl/webaudio_decoder_efl.h"
-
-namespace media {
-
-// WebAudioDecoderGStreamer class
-class MEDIA_EXPORT WebAudioDecoderGStreamer : public WebAudioDecoder {
- public:
-  static WebAudioDecoderGStreamer* GetInstance();
-
-  // media::WebAudioDecoder implementation.
-  void EncodedDataReceived(base::SharedMemoryHandle foreign_memory_handle,
-                           base::FileDescriptor pcm_output,
-                           uint32_t data_size) override;
-
- private:
-  friend struct base::DefaultSingletonTraits<WebAudioDecoderGStreamer>;
-  WebAudioDecoderGStreamer();
-  virtual ~WebAudioDecoderGStreamer();
-  void DecodeUsingGST(base::SharedMemoryHandle foreign_memory_handle,
-                      base::FileDescriptor pcm_output,
-                      uint32_t data_size);
-
-  base::Thread gst_thread_;
-
-  DISALLOW_COPY_AND_ASSIGN(WebAudioDecoderGStreamer);
-};
-
-}  // namespace media
-
-#endif  // MEDIA_BASE_EFL_WEBAUDIO_DECODER_BROWSER_GSTREAMER_H_
index a8f24c6..3aabd65 100644 (file)
@@ -3,18 +3,6 @@
 # found in the LICENSE file.
 
 {
-  'variables': {
-    'variables': {
-      'tizen_multimedia_use_capi_for_me%' : 1,
-      'conditions': [
-        ['building_for_tizen==0', {
-          'tizen_multimedia_use_capi_for_me%' : 0,
-        }],
-      ],
-    },
-    'tizen_multimedia_use_capi_for_me%': '<(tizen_multimedia_use_capi_for_me)',
-  },
-
   'conditions': [
 
     ['use_ozone==1', {
     }],
 
     ['tizen_multimedia_support==1', {
-
       'variables': {
         'external_media_deps': [
-          '<(DEPTH)/tizen_src/build/system.gyp:ecore',
-          '<(DEPTH)/tizen_src/build/system.gyp:gstreamer',
+          '<(DEPTH)/tizen_src/build/system.gyp:capi-media-audio-io',
           '<(DEPTH)/tizen_src/build/system.gyp:capi-media-camera',
+          '<(DEPTH)/tizen_src/build/system.gyp:capi-media-player',
+          '<(DEPTH)/tizen_src/build/system.gyp:capi-media-tool',
+          '<(DEPTH)/tizen_src/build/system.gyp:ecore',
           '<(DEPTH)/tizen_src/build/system.gyp:mm-player',
         ],
         'conditions': [
               '<(DEPTH)/tizen_src/build/system.gyp:ecore-wayland',
             ],
           }],
-          ['building_for_tizen==1', {
-            'external_media_deps': [
-              '<(DEPTH)/tizen_src/build/system.gyp:capi-media-audio-io',
-            ],
-          }],
           ['building_for_tizen_mobile==1', {
             'external_media_deps': [
               '<(DEPTH)/tizen_src/build/system.gyp:capi-system-device',
             ],
           }],
-          ['tizen_multimedia_use_capi_for_me==1', {
-            'external_media_deps': [
-              '<(DEPTH)/tizen_src/build/system.gyp:capi-media-player',
-              '<(DEPTH)/tizen_src/build/system.gyp:capi-media-tool',
-            ],
-          }],
         ],
       },
-
       'target_defaults': {
         'target_conditions': [
-
           ['_target_name=="media"', {
-             'include_dirs': [
-               '<(DEPTH)/third_party/WebKit',
-               '<(DEPTH)/third_party/libyuv/include',
-               '<(DEPTH)/third_party/mojo/src',
-               '<(DEPTH)/v8/include',
-             ],
-             'sources': [
-               'base/efl/webaudio_media_codec_info_efl.h',
-               'base/efl/demuxer_stream_player_params_efl.cc',
-               'base/efl/demuxer_stream_player_params_efl.h',
-               'base/efl/demuxer_efl.h',
-               'base/efl/media_player_efl.cc',
-               'base/efl/media_player_efl.h',
-               'base/efl/media_player_bridge_gstreamer.cc',
-               'base/efl/media_player_bridge_gstreamer.h',
-               'base/efl/media_player_manager_efl.h',
-               'base/efl/media_player_util_efl.cc',
-               'base/efl/media_player_util_efl.h',
-               'base/efl/media_source_player_gstreamer.cc',
-               'base/efl/media_source_player_gstreamer.h',
-               'base/efl/webaudio_decoder_efl.h',
-               'base/efl/webaudio_decoder_browser_gstreamer.cc',
-               'base/efl/webaudio_decoder_browser_gstreamer.h',
-             ],
-
+            'defines': [
+              'TIZEN_VIDEO_CAPTURE_SUPPORT',
+            ],
+            'include_dirs': [
+              '<(DEPTH)/third_party/WebKit',
+              '<(DEPTH)/third_party/libyuv/include',
+              '<(DEPTH)/third_party/mojo/src',
+              '<(DEPTH)/v8/include',
+            ],
+            'sources': [
+              'audio/tizen/audio_manager_capi.cc',
+              'audio/tizen/audio_manager_capi.h',
+              'audio/tizen/audio_manager_tizen.cc',
+              'audio/tizen/capi_audio_input.cc',
+              'audio/tizen/capi_audio_input.h',
+              'audio/tizen/capi_audio_output.cc',
+              'audio/tizen/capi_audio_output.h',
+              'audio/tizen/capi_util.cc',
+              'audio/tizen/capi_util.h',
+              'base/efl/demuxer_efl.h',
+              'base/efl/demuxer_stream_player_params_efl.cc',
+              'base/efl/demuxer_stream_player_params_efl.h',
+              'base/efl/media_player_efl.cc',
+              'base/efl/media_player_efl.h',
+              'base/efl/media_player_manager_efl.h',
+              'base/efl/media_player_util_efl.cc',
+              'base/efl/media_player_util_efl.h',
+              'base/efl/webaudio_decoder_efl.h',
+              'base/efl/webaudio_media_codec_info_efl.h',
+              'base/tizen/media_player_bridge_capi.cc',
+              'base/tizen/media_player_bridge_capi.h',
+              'base/tizen/media_source_player_capi.cc',
+              'base/tizen/media_source_player_capi.h',
+              'base/tizen/webaudio_decoder_browser_capi.cc',
+              'base/tizen/webaudio_decoder_browser_capi.h',
+              'capture/video/tizen/video_capture_device_factory_tizen.cc',
+              'capture/video/tizen/video_capture_device_factory_tizen.h',
+              'capture/video/tizen/video_capture_device_tizen.cc',
+              'capture/video/tizen/video_capture_device_tizen.h',
+            ],
+            'sources/': [
+              [ 'exclude', 'audio/linux/audio_manager_linux.cc$'],
+              [ 'exclude', 'capture/video/linux/video_capture_device_factory_linux.cc$' ],
+              [ 'exclude', 'capture/video/linux/video_capture_device_factory_linux.h$' ],
+              [ 'exclude', 'capture/video/linux/video_capture_device_linux.cc$' ],
+              [ 'exclude', 'capture/video/linux/video_capture_device_linux.h$' ],
+            ],
             'conditions': [
-
-              ['building_for_tizen==1', {
-                'defines': [
-                 'TIZEN_VIDEO_CAPTURE_SUPPORT=1',
-                ],
-                'sources/': [
-                  [ 'exclude', 'audio/linux/audio_manager_linux.cc$'],
-                  [ 'exclude', 'capture/video/linux/video_capture_device_factory_linux.cc$' ],
-                  [ 'exclude', 'capture/video/linux/video_capture_device_factory_linux.h$' ],
-                  [ 'exclude', 'capture/video/linux/video_capture_device_linux.h$' ],
-                  [ 'exclude', 'capture/video/linux/video_capture_device_linux.cc$' ],
-                ],
-                'sources': [
-                  'audio/tizen/audio_manager_capi.cc',
-                  'audio/tizen/audio_manager_capi.h',
-                  'audio/tizen/audio_manager_tizen.cc',
-                  'audio/tizen/capi_audio_input.h',
-                  'audio/tizen/capi_audio_input.cc',
-                  'audio/tizen/capi_audio_output.cc',
-                  'audio/tizen/capi_audio_output.h',
-                  'audio/tizen/capi_util.cc',
-                  'audio/tizen/capi_util.h',
-                  'capture/video/tizen/video_capture_device_tizen.cc',
-                  'capture/video/tizen/video_capture_device_tizen.h',
-                  'capture/video/tizen/video_capture_device_factory_tizen.cc',
-                  'capture/video/tizen/video_capture_device_factory_tizen.h',
-                ],
-              }],
-
               ['tizen_multimedia_eme_support==1', {
                 'defines': [
                   'TIZEN_MULTIMEDIA_EME_SUPPORT=1',
                 ],
               }],
-              ['tizen_multimedia_use_capi_for_me==1', {
-                'sources': [
-                  'base/tizen/media_player_bridge_capi.cc',
-                  'base/tizen/media_player_bridge_capi.h', # ME
-                  'base/tizen/media_source_player_capi.cc',
-                  'base/tizen/media_source_player_capi.h',
-                  'base/tizen/webaudio_decoder_browser_capi.cc',
-                  'base/tizen/webaudio_decoder_browser_capi.h',
-                ],
-                # Exclude the sources that depend on CAPI-MEDIA-PLAYER
-                'sources!': [
-                  'base/efl/media_player_bridge_gstreamer.cc',
-                  'base/efl/media_player_bridge_gstreamer.h', # ME
-                  'base/efl/media_source_player_gstreamer.cc',
-                  'base/efl/media_source_player_gstreamer.h',
-                  'base/efl/webaudio_decoder_browser_gstreamer.cc',
-                  'base/efl/webaudio_decoder_browser_gstreamer.h',
-                ],
-              }],
             ], # conditions
           }], # _target_name=="media"
         ], # target_conditions
index c94c2ec..46d732e 100644 (file)
@@ -27,7 +27,6 @@
 
     'xwalk_link_against_chromium_ewk%': 0,
 
-    'tizen_multimedia_support%': 1,
     'tizen_multimedia_eme_support%': 1,
 # [M44_2403] Temporary disabling the tizen_disable_gpu_thread for switching to new chromium
 #            FIXME: http://web.sec.samsung.net/bugzilla/show_bug.cgi?id=14046
         ], # conditions
       }],
 
-      ['tizen_multimedia_support==1', {
-        'defines': [
-          'TIZEN_MULTIMEDIA_SUPPORT=1',
-        ],
-      }],
-
       ['tizen_disable_gpu_thread==1', {
         'defines': [
           'TIZEN_DISABLE_GPU_THREAD=1',
         'defines': [
           'OS_TIZEN=1',
           'WTF_OS_TIZEN=1',
-          'TIZEN_MULTIMEDIA_USE_CAPI_AUDIO_IO=1',
         ],
         'conditions': [
           ['chromium_efl_tizen_version=="3.0"', {
               'USE_WAYLAND=1',
             ],
           }],
+          ['tizen_multimedia_support==1', {
+            'defines': [
+              'TIZEN_MULTIMEDIA_SUPPORT=1',
+              'TIZEN_MULTIMEDIA_USE_CAPI_AUDIO_IO=1',
+            ],
+          }],
           # TODO: There are X11 dependencies in following condition.
           #       The files need to be implemented based on Wayland.
           ['wayland_bringup==1', {