- Media Element with GST works with AppSink.
- Media Source Extension works with AppSink.
PIXMAP support for video playback is not present
- WebAudio works
- WebRtc needs platform support
Merged commits:
Ib377730584f3f3ffd61776facb63d8d196f64d1f
I0cc3c9c1c43d97c2c3d518655401be40757c47c7
Bug: http://web.sec.samsung.net/bugzilla/show_bug.cgi?id=10745
Reviewed by: SeungSeop Park, Sharath Kamath, Venugopal S M
Change-Id: I36675af2bbfd63d7d8c7fdfb5bf34585ca57ffd1
Signed-off-by: Venugopal S M <sm.venugopal@samsung.com>
{
'target_name': 'gstreamer',
'type': 'none',
- 'variables': {
- 'dependent_pkgs': [
- 'glib-2.0',
- 'gstreamer-0.10',
- 'gstreamer-base-0.10',
- 'gstreamer-app-0.10',
- 'gstreamer-pbutils-0.10',
- 'gstreamer-interfaces-0.10',
- 'gstreamer-video-0.10',
- ],
- },
- 'direct_dependent_settings': {
- 'cflags': [
- '<!@(<(pkg-config) --cflags <(dependent_pkgs))',
- ],
- },
- 'link_settings': {
- 'ldflags': [
- '<!@(<(pkg-config) --libs-only-L --libs-only-other <(dependent_pkgs))',
- ],
- 'libraries': [
- '<!@(<(pkg-config) --libs-only-l <(dependent_pkgs))',
- ],
- },
+ 'conditions': [
+ ['building_for_tizen_mobile==1', {
+ 'variables': {
+ 'dependent_pkgs': [
+ 'glib-2.0',
+ 'gstreamer-1.0',
+ 'gstreamer-base-1.0',
+ 'gstreamer-app-1.0',
+ 'gstreamer-pbutils-1.0',
+ 'gstreamer-video-1.0',
+ ],
+ },
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags <(dependent_pkgs))',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other <(dependent_pkgs))',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l <(dependent_pkgs))',
+ ],
+ },
+ }],
+ ['building_for_tizen_tv==1 or building_for_tizen==0', {
+ 'variables': {
+ 'dependent_pkgs': [
+ 'glib-2.0',
+ 'gstreamer-0.10',
+ 'gstreamer-base-0.10',
+ 'gstreamer-app-0.10',
+ 'gstreamer-pbutils-0.10',
+ 'gstreamer-interfaces-0.10',
+ 'gstreamer-video-0.10',
+ ],
+ },
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags <(dependent_pkgs))',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other <(dependent_pkgs))',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l <(dependent_pkgs))',
+ ],
+ },
+ }],
+ ],
},
], # targets
static int gst_dec_count_ = 32;
+#if GST_VERSION_MAJOR == 1
+static int audio_width_ = 16;
+const char* kdecodebin = "decodebin";
+#endif
+
////////////////////////////////////////
// GSTDecoder class - declaration
class GSTDecoder {
// makes gst-element(s)
appData->app_src_ = gst_element_factory_make("appsrc", NULL);
appData->app_sink_ = gst_element_factory_make("appsink", NULL);
+#if GST_VERSION_MAJOR == 1
+ appData->decoder_ = gst_element_factory_make(kdecodebin, NULL);
+#else
appData->decoder_ = gst_element_factory_make("decodebin2", NULL);
+#endif
appData->convert_ = gst_element_factory_make("audioconvert", NULL);
appData->resample_ = gst_element_factory_make ("audioresample", NULL);
appData->capsfilter_ = gst_element_factory_make("capsfilter", NULL);
+#if GST_VERSION_MAJOR == 1
+ appData->caps_ = gst_caps_new_simple("audio/x-raw", "format", G_TYPE_STRING,
+ "S16LE", "rate", G_TYPE_INT, 44100,
+ "channels", G_TYPE_INT, 2, "layout",
+ G_TYPE_STRING, "interleaved", NULL);
+#else
appData->caps_ = gst_caps_new_simple("audio/x-raw-int", "width", G_TYPE_INT, 16, NULL);
+#endif
if (!appData->app_src_ || !appData->app_sink_ || !appData->decoder_ ||
!appData->convert_ || !appData->resample_ || !appData->capsfilter_ ||
g_signal_connect(appData->app_src_, "need-data", G_CALLBACK(cb_need_data), appData);
g_signal_connect(appData->decoder_, "pad-added", G_CALLBACK(cb_newpad), appData);
+#if GST_VERSION_MAJOR == 1
+ GstAppSinkCallbacks callbacks = { cb_eos, cb_new_preroll,
+ cb_new_buffer};
+#else
GstAppSinkCallbacks callbacks = { cb_eos, cb_new_preroll,
cb_new_buffer, NULL,
{ NULL, NULL, NULL } };
+#endif
gst_app_sink_set_callbacks(GST_APP_SINK(appData->app_sink_), &callbacks, appData, NULL);
//FIXME: gst-element(s) can have 94 each name, but this can not be enough
gst_object_unref(appData->pipeline_);
return;
}
+#if GST_VERSION_MAJOR == 1
+ gst_bus_set_sync_handler(bus, (GstBusSyncHandler)cb_pipeline_message, appData,
+ NULL);
+#else
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)cb_pipeline_message, appData);
+#endif
// actually works decoding
gst_element_set_state(appData->pipeline_, GST_STATE_PLAYING);
// returns resource(s)
g_signal_handlers_disconnect_by_func(
bus, reinterpret_cast<gpointer>(cb_pipeline_message), appData);
+#if GST_VERSION_MAJOR == 1
+ gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
+#else
gst_bus_set_sync_handler(bus, NULL, NULL);
+#endif
gst_object_unref(bus);
gst_element_set_state(appData->pipeline_, GST_STATE_NULL);
gst_object_unref(appData->pipeline_);
}
void GSTDecoder::cb_newpad(GstElement* decodebin, GstPad* pad, GstAppData* data) {
+#if GST_VERSION_MAJOR == 1
+ GstPad* sink_pad = gst_element_get_static_pad(data->audioout_, "sink");
+#else
GstPad* sink_pad = gst_element_get_pad(data->audioout_, "sink");
+#endif
if (GST_PAD_IS_LINKED(sink_pad)) {
g_object_unref(sink_pad);
return;
GstCaps* caps = NULL;
GstStructure* str = NULL;
+#if GST_VERSION_MAJOR == 1
+ caps = gst_pad_query_caps(pad, NULL);
+#else
caps = gst_pad_get_caps(pad);
+#endif
if (caps) {
str = gst_caps_get_structure(caps, 0);
if (str) {
if (data->isEndOfStream_)
return;
- GstBuffer* buffer = gst_buffer_new();
- if (!buffer)
- return;
-
guint len = CHUNK_SIZE;
if ((data->enc_offset_ + len ) > data->enc_length_)
len = data->enc_length_ - data->enc_offset_;
+#if GST_VERSION_MAJOR == 1
+ GstBuffer* buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,
+ data->encodeddata_ + data->enc_offset_,
+ len, 0, len, NULL, NULL);
+ if (!buffer)
+ {
+ LOG(ERROR) << __FUNCTION__ << "cb_need_data: buffer creation: FAILED";
+ return;
+ }
+#else
+ GstBuffer* buffer = gst_buffer_new();
+ if (!buffer)
+ return;
+
GST_BUFFER_DATA(buffer) = data->encodeddata_ + data->enc_offset_;
GST_BUFFER_SIZE(buffer) = len;
+#endif
data->enc_offset_ += len;
GstFlowReturn ret = GST_FLOW_OK;
}
GstFlowReturn GSTDecoder::cb_new_buffer (GstAppSink* sink, gpointer user_data) {
+#if GST_VERSION_MAJOR == 1
+ GstSample* sample = gst_app_sink_pull_sample(sink);
+ GstBuffer* buffer = gst_sample_get_buffer(sample);
+#else
GstBuffer* buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
+#endif
if (!buffer)
return GST_FLOW_ERROR;
if (data->isNewRequest_) {
GstCaps* caps = NULL;
GstStructure* str = NULL;
+#if GST_VERSION_MAJOR == 1
+ caps = gst_sample_get_caps(sample);
+#else
caps = gst_buffer_get_caps(buffer);
+#endif
if (caps)
str = gst_caps_get_structure(caps, 0);
if (caps && str) {
ret &= gst_structure_get_int(str, "channels", &channel);
ret &= gst_structure_get_int(str, "rate", &rate);
+#if !(GST_VERSION_MAJOR == 1)
ret &= gst_structure_get_int(str, "width", &width);
+#endif
}
+#if GST_VERSION_MAJOR == 1
+ if (!caps || !str || !ret || !channel || !rate) {
+#else
if (!caps || !str || !ret || !channel || !rate || !width) {
+#endif
gst_caps_unref(caps);
gst_buffer_unref(buffer);
return GST_FLOW_ERROR;
}
+#if GST_VERSION_MAJOR == 1
+ GstClockTime duration = (static_cast<guint64> (gst_buffer_get_size(buffer))
+ * 8 * GST_SECOND) / (channel * rate * audio_width_);
+#else
GstClockTime duration = (static_cast<guint64> (GST_BUFFER_SIZE(buffer))*8*GST_SECOND)
/ (channel*rate*width);
+#endif
int frames = GST_CLOCK_TIME_TO_FRAMES(duration, rate);
data->gst_decoder_->InitializeGstDestination(data->pcm_output_, channel, rate, frames);
data->isNewRequest_ = false;
}
+#if GST_VERSION_MAJOR == 1
+ GstMapInfo gst_map;
+ gst_buffer_map(buffer, &gst_map, (GstMapFlags)(GST_MAP_READ));
+ if (buffer && gst_map.size > 0) {
+ data->gst_decoder_->SendGstOutputUsinghandle(data->pcm_output_,
+ gst_map.data,
+ gst_map.size);
+ gst_buffer_unmap(buffer, &gst_map);
+#else
if (buffer && buffer->size > 0) {
data->gst_decoder_->SendGstOutputUsinghandle(data->pcm_output_,
buffer->data,
buffer->size);
+#endif
}
gst_buffer_unref(buffer);
#include "content/common/gpu/media/tizen/tizen_video_decode_accelerator.h"
-#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <gst/app/gstappsrc.h>
-#include <gst/interfaces/xoverlay.h>
+#include <gst/gst.h>
#include <gst/video/gstvideosink.h>
#include <gst/video/video.h>
#include "base/synchronization/waitable_event.h"
#include "base/time/time.h"
+#if GST_VERSION_MAJOR == 1
+#include <gst/video/videooverlay.h>
+#else
+#include <gst/interfaces/xoverlay.h>
+#endif
+
using media::VideoFrame;
namespace {
}
};
+// Gstreamer elements and names.
+const char* kDecoderName = "decoder";
+#if GST_VERSION_MAJOR == 1
+const char* kDecoderGstElement = "omxh264dec";
+#else
+const char* kDecoderGstElement = "omx_h264dec";
+#endif
+
} // namespace
namespace content {
g_free(debug);
break;
}
+ default: NOTREACHED();
}
return GST_BUS_PASS;
}
if (!(gst_bus = gst_pipeline_get_bus(GST_PIPELINE(gst_pipeline.get())))) {
return false;
}
+#if GST_VERSION_MAJOR == 1
+ gst_bus_set_sync_handler(gst_bus, Impl::OnBusMessage, impl_, NULL);
+#else
gst_bus_set_sync_handler(gst_bus, Impl::OnBusMessage, impl_);
+#endif
gst_object_unref(gst_bus);
// appsrc initialization.
DVLOG(1) << " USING omx_h264dec DECODER " << (unsigned int)this;
DVLOG(1) << "######################################";
// decoder initialization.
- if (!(gst_decoder = gst_element_factory_make("omx_h264dec", "decoder"))) {
- LOG(ERROR) << __FUNCTION__ << " cannot create omx_h264dec.";
+ if (!(gst_decoder = gst_element_factory_make(kDecoderGstElement, kDecoderName))) {
+ LOG(ERROR) << " cannot create " << kDecoderGstElement << ".";
return false;
}
if (!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_decoder)) {
+ LOG(ERROR) << " cannot add " << kDecoderGstElement << " to pipeline.";
gst_object_unref(gst_decoder);
return false;
}
if (!buffer_ref) {
return;
}
-
+#if GST_VERSION_MAJOR == 1
+ buffer_ref->gst_buffer_ =
+ gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,
+ static_cast<guint8*>(buffer_ref->shm_->memory()),
+ buffer_ref->size_,
+ 0,
+ buffer_ref->size_,
+ reinterpret_cast<guint8*>(buffer_ref.get()),
+ BitstreamBufferRef::Destruct);
+ if (!buffer_ref->gst_buffer_) {
+ LOG(ERROR) << " gst_buffer_new_wrapped_full failed to allocate memory.!";
+ return;
+ }
+#else
if (!(buffer_ref->gst_buffer_ = gst_buffer_new())) {
return;
}
GST_BUFFER_SIZE(buffer_ref->gst_buffer_) = buffer_ref->size_;
GST_BUFFER_DATA(buffer_ref->gst_buffer_) =
static_cast<guint8*>(buffer_ref->shm_->memory());
-
+#endif
if (GST_FLOW_OK !=
gst_app_src_push_buffer(GST_APP_SRC(impl_->appsrc_),
buffer_ref->gst_buffer_)) {
}
} // namespace content
+
// Max bitrate in bps
MAX_BITRATE = 2000000
};
+// Gstreamer elements and names.
+const char* kEncoderName = "encoder";
+const char* kConvertorName = "cvt";
+#if GST_VERSION_MAJOR == 1
+const char* kEncoderGstElement = "omxh264enc";
+const char* kConvertorGstElement = "fimcconvert";
+#else
+const char* kEncoderGstElement = "omx_h264enc";
+const char* kConvertorGstElement = "c2dconvert";
+#endif
+
media::VideoEncodeAccelerator* CreateTizenVideoEncodeAccelerator() {
return new TizenVideoEncodeAccelerator();
media::BitstreamBuffer* bs_buffer = NULL;
scoped_ptr<base::SharedMemory> shm;
scoped_ptr<GstBuffer, GstBufferDeleter> gst_buffer(buffer);
+ gsize gstBufferSize = 0;
+#if GST_VERSION_MAJOR == 1
+ GstMapInfo map;
+#endif
if (gst_buffer == NULL) {
LOG(ERROR) << __FUNCTION__ << " Invalid buffer for delivering";
LOG(ERROR) << "Failed to map SHM";
return;
}
- if (gst_buffer->size > shm->mapped_size()) {
- LOG(ERROR) << "Encoded buff too large: "
- << gst_buffer->size << ">" << shm->mapped_size();
- return;
+#if GST_VERSION_MAJOR == 1
+ if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
+ LOG (ERROR) << "Encoded Buffer contains invalid or no info.!";
+ return;
+ }
+ gstBufferSize = map.size;
+#else
+ gstBufferSize = gst_buffer->size;
+#endif
+
+ if (gstBufferSize > shm->mapped_size()) {
+ LOG(ERROR) << "Encoded buff too large: "
+ << gstBufferSize << ">" << shm->mapped_size();
+ return;
}
//copying data to shared memory.
- memcpy(static_cast<uint8*>(shm->memory()), gst_buffer->data, gst_buffer->size);
+#if GST_VERSION_MAJOR == 1
+ memcpy(static_cast<uint8*>(shm->memory()), map.data, gstBufferSize);
+#else
+ memcpy(static_cast<uint8*>(shm->memory()), gst_buffer->data, gstBufferSize);
+#endif
child_message_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&media::VideoEncodeAccelerator::Client::BitstreamBufferReady,
io_client_weak_factory_.GetWeakPtr(),
bs_buffer->id(),
- gst_buffer->size,
+ gstBufferSize,
key_frame));
}
GstBuffer* gst_output_buf = NULL;
TizenVideoEncodeAccelerator::Impl* impl =
static_cast<TizenVideoEncodeAccelerator::Impl*>(data);
-
+#if GST_VERSION_MAJOR == 1
+ gst_output_buf = gst_sample_get_buffer(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
+ GstMapInfo map;
+ if (!gst_buffer_map(gst_output_buf, &map, GST_MAP_READ))
+ LOG (ERROR) << "Encoded Buffer contains invalid or no info!";
+#else
gst_output_buf = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
+#endif
if (gst_output_buf) {
if (!GST_BUFFER_FLAG_IS_SET(gst_output_buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
key_frame = true;
}
+#if GST_VERSION_MAJOR == 1
+ if (map.data) {
+#else
if (gst_output_buf->data) {
+#endif
impl->gst_thread_.message_loop()->PostTask(
FROM_HERE,
base::Bind(&TizenVideoEncodeAccelerator::Impl::DeliverVideoFrame,
scoped_ptr<BitstreamBufferRef> buffer_ref, bool force_keyframe) {
BitstreamBufferRef* bufref = buffer_ref.release();
-
if (bufref == NULL) {
return;
}
NULL);
#endif
+#if GST_VERSION_MAJOR == 1
+ bufref->gst_buffer_ =
+ gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,
+ static_cast<guint8*>(
+ bufref->frame_->data(VideoFrame::kYPlane)),
+ bufref->size_,
+ 0,
+ bufref->size_,
+ reinterpret_cast<uint8*>(bufref),
+ BitstreamBufferRef::Destruct);
+ if (!bufref->gst_buffer_) {
+ LOG(ERROR) << " gst_buffer_new_wrapped_full failed to allocate memory.";
+ return;
+ }
+#else
if (!(bufref->gst_buffer_ = gst_buffer_new())) {
- LOG(ERROR) << __FUNCTION__ << " malloc failed";
+ LOG(ERROR) << __FUNCTION__ << " gst_buffer_new failed to allocate memory.!";
return;
}
GST_BUFFER_SIZE(bufref->gst_buffer_) = bufref->size_;
GST_BUFFER_DATA(bufref->gst_buffer_) =
static_cast<guint8*>(bufref->frame_->data(VideoFrame::kYPlane));
-
+#endif
if (GST_FLOW_OK !=
gst_app_src_push_buffer(GST_APP_SRC(impl_->appsrc_),
bufref->gst_buffer_)) {
}
appsrc_caps = gst_caps_new_simple(
"video/x-raw-yuv",
+#if GST_VERSION_MAJOR == 1
+ "format", G_TYPE_STRING, gst_video_format_to_string(GST_VIDEO_FORMAT_I420),
+#else
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('I', '4', '2', '0'),
+#endif
"width", G_TYPE_INT, impl_->view_size_.width(),
"height", G_TYPE_INT, impl_->view_size_.height(),
"framerate", GST_TYPE_FRACTION, 30, 1,
DVLOG(1) << "######################################";
// encoder initialization
- if (!(impl_->encoder_ = gst_element_factory_make("omx_h264enc", "encoder"))) {
- LOG(ERROR) << __FUNCTION__ << " cannot create omx_h264enc encoder.";
+ if (!(impl_->encoder_ = gst_element_factory_make(kEncoderGstElement, kEncoderName))) {
+ LOG(ERROR) << __FUNCTION__ << " cannot create " << kEncoderGstElement << ".";
return false;
}
if(!gst_bin_add(GST_BIN(gst_pipeline.get()), impl_->encoder_)) {
NULL);
// converter initialization
- if (!(gst_converter = gst_element_factory_make("c2dconvert", "cvt"))) {
- LOG(ERROR) << __FUNCTION__ << " cannot create c2dconvert.";
+ if (!(gst_converter = gst_element_factory_make(kConvertorGstElement, kConvertorName))) {
+ LOG(ERROR) << __FUNCTION__ << " cannot create " << kConvertorGstElement << ".";
return false;
}
if(!gst_bin_add(GST_BIN(gst_pipeline.get()), gst_converter)) {
- LOG(ERROR) << __FUNCTION__ << " cannot add c2dconvert into encoder pipeline.";
+ LOG(ERROR) << __FUNCTION__ << " cannot add " << kConvertorGstElement << " into encoder pipeline.";
gst_object_unref(gst_converter);
return false;
}
converter_caps = gst_caps_new_simple(
"video/x-raw-yuv",
+#if GST_VERSION_MAJOR == 1
+ "format", G_TYPE_STRING, gst_video_format_to_string(GST_VIDEO_FORMAT_I420),
+#else
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
+#endif
"width", G_TYPE_INT, impl_->view_size_.width(),
"height", G_TYPE_INT, impl_->view_size_.height(),
"framerate", GST_TYPE_FRACTION, 30, 1,
#include <gst/app/gstappsink.h>
#include <gst/pbutils/install-plugins.h>
#include <gst/pbutils/missing-plugins.h>
-
-#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
-#include <gst/interfaces/xoverlay.h>
-#include <gst/video/gstvideosink.h>
#include <gst/video/video.h>
-#endif
#include "base/basictypes.h"
#include "base/message_loop/message_loop_proxy.h"
#include <device/power.h>
#endif
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+#if GST_VERSION_MAJOR == 1
+#include <gst/video/videooverlay.h>
+#else
+#include <gst/interfaces/xoverlay.h>
+#endif
+#endif
+
namespace {
// fourcc for gst-video-format
const int kDurationUpdateInterval = 100;
const int kMaxBuffer = 1;
-// buffer size for audio/video buffering
+// sample size for audio/video buffering
const int kPreloadBufferSize = (3 * 1024 * 1024); // 3MB
const GstClockTime ConvertToGstClockTime(double time) {
return GST_FLOW_OK;
}
-static GstFlowReturn on_gst_appsink_buffer(
+static GstFlowReturn on_gst_appsink_sample(
GstAppSink* sink,
gpointer user_data) {
MediaPlayerBridgeGstreamer* player =
if (!player)
return GST_FLOW_ERROR;
- player->BufferReady(player->PullBuffer());
+ player->SampleReady(player->PullSample());
return GST_FLOW_OK;
}
}
if (gst_is_initialized()) {
+#if GST_VERSION_MAJOR == 1
+ gst_playbin_ = gst_element_factory_make("playbin", kPlaybinName);
+#else
gst_playbin_ = gst_element_factory_make("playbin2", kPlaybinName);
+#endif
gst_appsink_ = GetVideoSink();
if (gst_playbin_ && gst_appsink_) {
g_object_set(gst_playbin_, "video-sink", gst_appsink_, NULL);
// QoS property will enable the quality-of-service features of the
// basesink which gather statistics about the real-time performance
- // of the clock synchronisation. For each buffer received in the sink,
+ // of the clock synchronisation. For each sample received in the sink,
// statistics are gathered and a QOS event is sent upstream with
// these numbers. This information can then be used by upstream
// elements to reduce their processing rate, for example.
HandleError(MediaPlayerTizen::NetworkStateDecodeError);
return;
}
+#if GST_VERSION_MAJOR == 1
+ gst_bus_set_sync_handler(
+ bus, (GstBusSyncHandler)gst_pipeline_message_cb, this, NULL);
+#else
gst_bus_set_sync_handler(
bus, (GstBusSyncHandler)gst_pipeline_message_cb, this);
+#endif
gst_object_unref (bus);
#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
G_OBJECT(gst_playbin_), kPropertyUri, url_.spec().c_str(), NULL);
if (gst_element_set_state(
- gst_playbin_, GST_STATE_READY) == GST_STATE_CHANGE_FAILURE)
+ gst_playbin_, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE)
LOG(ERROR) << "GStreamer state change failed";
manager()->OnReadyStateChange(
if (bus) {
g_signal_handlers_disconnect_by_func(
bus, reinterpret_cast<gpointer>(gst_pipeline_message_cb), this);
+#if GST_VERSION_MAJOR == 1
+ gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
+#else
gst_bus_set_sync_handler(bus, NULL, NULL);
+#endif
gst_object_unref(bus);
}
gint64 duration = 0;
GstFormat format = GST_FORMAT_TIME;
+#if GST_VERSION_MAJOR == 1
+ gst_element_query_duration(gst_playbin_, format, &duration);
+#else
gst_element_query_duration(gst_playbin_, &format, &duration);
+#endif
duration_ = ConvertNanoSecondsToSeconds(duration);
manager()->OnDurationChange(GetPlayerId(), duration_);
- // No need to buffer 'local file'. Update buffered percentage.
+ // No need to sample 'local file'. Update buffered percentage.
if(is_file_url_) {
std::vector<media::MediaPlayerTizen::TimeRanges> buffer_range;
media::MediaPlayerTizen::TimeRanges range;
// FIXME: Should null be sent here?
}
+#if GST_VERSION_MAJOR == 1
+ gst_element_query_position(gst_playbin_, format, ¤t_time);
+#else
gst_element_query_position(gst_playbin_, &format, ¤t_time);
+#endif
return ConvertNanoSecondsToSeconds(current_time);
}
buffering_update_timer_.Stop();
}
-GstBuffer* MediaPlayerBridgeGstreamer::PullBuffer() {
+#if GST_VERSION_MAJOR == 1
+GstSample* MediaPlayerBridgeGstreamer::PullSample() {
+ return gst_app_sink_pull_sample(GST_APP_SINK(gst_appsink_));
+}
+
+void MediaPlayerBridgeGstreamer::SampleReady(GstSample* sample) {
+ GstMapInfo map;
+ GstBuffer* buffer = gst_sample_get_buffer(sample);
+
+ if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
+ LOG (ERROR) << "Sample contains invalid or no info!";
+ return;
+ }
+
+ if (!gst_width_ || !gst_height_)
+ GetFrameDetails();
+
+ base::TimeDelta timestamp = base::TimeDelta::FromMicroseconds(
+ GST_BUFFER_TIMESTAMP(buffer) / base::Time::kNanosecondsPerMicrosecond);
+
+ if (video_format_ == GST_VIDEO_SN12)
+ shared_memory_size = (bufsize_sn12_);
+ else
+ shared_memory_size = (map.size);
+
+ if (!shared_memory.CreateAndMapAnonymous(shared_memory_size)) {
+ LOG (ERROR) << "Shared Memory creation failed.";
+ gst_buffer_unmap(buffer, &map);
+ gst_sample_unref(sample);
+ return;
+ }
+ if (!shared_memory.ShareToProcess(
+ base::Process::Current().Handle(), &foreign_memory_handle)) {
+ LOG (ERROR) << "Shared Memory handle could not be obtained";
+ shared_memory.Close();
+ gst_buffer_unmap(buffer, &map);
+ gst_sample_unref(sample);
+ return;
+ }
+ memcpy(shared_memory.memory(), map.data, shared_memory_size);
+ manager()->OnNewFrameAvailable(
+ GetPlayerId(), foreign_memory_handle, shared_memory_size, timestamp);
+
+ shared_memory.Close();
+ gst_buffer_unmap(buffer, &map);
+ gst_sample_unref(sample);
+}
+#else
+GstBuffer* MediaPlayerBridgeGstreamer::PullSample() {
return gst_app_sink_pull_buffer(GST_APP_SINK(gst_appsink_));
}
-void MediaPlayerBridgeGstreamer::BufferReady(
+void MediaPlayerBridgeGstreamer::SampleReady(
const GstBuffer* buffer) {
if (!GST_BUFFER_DATA(buffer) || !GST_BUFFER_SIZE(buffer))
return;
shared_memory.Close();
gst_buffer_unref(GST_BUFFER(buffer));
}
+#endif
// Updates networkState and ReadyState based on buffering percentage.
void MediaPlayerBridgeGstreamer::ProcessBufferingStats(GstMessage* message) {
return;
}
}
-#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
- if (!IsXWindowHadleSet()) {
- if (message->structure &&
- gst_structure_has_name(message->structure, "prepare-xid")) {
- VLOG(1) << "Received message : prepare-xid";
- XWindowIdPrepared(message);
- gst_message_unref(message);
- return;
- }
- }
-#endif
+
//FIXME: Add and handle all conditions
switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_ERROR:
missing_plugins_ = result == GST_INSTALL_PLUGINS_STARTED_OK;
g_free(detail);
}
+
+ //gst_message_has_name (msg, "prepare-window-handle")
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+ if (!IsXWindowHadleSet() &&
+#if GST_VERSION_MAJOR == 1
+ gst_is_video_overlay_prepare_window_handle_message(message)) {
+#else
+ gst_structure_has_name(message->structure, "prepare-xid")) {
+#endif
+ LOG(INFO) << "Received message : Video overlay prepared";
+ XWindowIdPrepared(message);
+ gst_message_unref(message);
+ return;
+ }
+#endif
break;
default:
LOG(ERROR) << "Unhandled GStreamer message type: "
}
void MediaPlayerBridgeGstreamer::GetFrameDetails() {
- GstBuffer* buffer = gst_app_sink_pull_preroll(GST_APP_SINK(gst_appsink_));
- if (!buffer)
+#if GST_VERSION_MAJOR == 1
+ GstSample* sample = gst_app_sink_pull_preroll(GST_APP_SINK(gst_appsink_));
+ if (!sample)
return;
- GstCaps* caps = gst_buffer_get_caps(GST_BUFFER(buffer));
+ GstCaps* caps = gst_sample_get_caps(sample);
+ if (!caps)
+ return;
+
+ GstVideoInfo vi;
+ gst_video_info_from_caps(&vi, caps);
+
+ gst_width_ = GST_VIDEO_INFO_WIDTH(&vi);
+ gst_height_ = GST_VIDEO_INFO_HEIGHT(&vi);
+ switch(GST_VIDEO_INFO_FORMAT(&vi)) {
+ case GST_VIDEO_FORMAT_I420:{
+ video_format_ = GST_MAKE_FOURCC('I','4','2','0');
+ break;
+ }
+ case GST_VIDEO_FORMAT_NV12:{
+ video_format_ = GST_MAKE_FOURCC('N','V','1','2');
+ break;
+ }
+ default:
+ LOG(ERROR) << "Unknown format : " << GST_VIDEO_INFO_FORMAT(&vi);
+ break;
+ }
+#else
+ GstBuffer* sample = gst_app_sink_pull_preroll(GST_APP_SINK(gst_appsink_));
+ if (!sample)
+ return;
+
+ GstCaps* caps = gst_buffer_get_caps(GST_BUFFER(sample));
if (!caps)
return;
!gst_structure_get_int(str, "height", &gst_height_) ||
!gst_structure_get_fourcc(str, "format", &video_format_))
LOG(ERROR) << "Pre-roll buffer info could not be obtained";
+#endif
- // Need to update frame details before sending buffer.
+ // Need to update frame details before sending sample.
manager()->OnMediaDataChange(
GetPlayerId(), static_cast<int>(video_format_),
gst_height_, gst_width_, media_type_);
SN12_TILE_WIDTH;
}
- BufferReady(buffer);
+ SampleReady(sample);
}
// FIXME: Works well for video playback. Do the same for /consider audio
~GST_PLAY_FLAG_DEINTERLACE & ~GST_PLAY_FLAG_SOFT_COLORBALANCE;
#endif
g_object_set(gst_playbin_, "flags", flags, NULL);
+#if GST_VERSION_MAJOR == 1
+ GstAppSinkCallbacks callbacks = {on_gst_appsink_eos, on_gst_appsink_preroll,
+ on_gst_appsink_sample};
+#else
GstAppSinkCallbacks callbacks = {on_gst_appsink_eos,
- on_gst_appsink_preroll,
- on_gst_appsink_buffer,
- NULL, {NULL, NULL, NULL}};
+ on_gst_appsink_preroll,
+ on_gst_appsink_sample,
+ NULL,
+ {NULL, NULL, NULL}};
+#endif
gst_app_sink_set_callbacks(GST_APP_SINK(gst_appsink_), &callbacks,
this, NULL);
+
g_object_set(G_OBJECT(gst_playbin_), kPropertyBufferSize,
kPreloadBufferSize, NULL);
g_object_set(G_OBJECT(gst_appsink_), kPropertyMaxBuffers,
void MediaPlayerBridgeGstreamer::XWindowIdPrepared(GstMessage* message) {
// It is called just once after video src is set.
- // FIXME: video resolution change should be handled Separatly
+#if GST_VERSION_MAJOR == 1
+ const GstStructure* structure = gst_message_get_structure(message);
+ gst_structure_get_int(structure, "video-width", &gst_width_);
+ gst_structure_get_int(structure, "video-height", &gst_height_);
+#else
gst_structure_get_int(message->structure, "video-width", &gst_width_);
gst_structure_get_int(message->structure, "video-height", &gst_height_);
+#endif
SetPixmap();
}
void MediaPlayerBridgeGstreamer::PlatformSurfaceUpdated() {
gint64 current_time = 0;
GstFormat format = GST_FORMAT_TIME;
+#if GST_VERSION_MAJOR == 1
+ gst_element_query_position(gst_playbin_, format, ¤t_time);
+#else
gst_element_query_position(gst_playbin_, &format, ¤t_time);
+#endif
base::TimeDelta timestamp = base::TimeDelta::FromMicroseconds(
current_time / base::Time::kNanosecondsPerMicrosecond);
manager()->OnPlatformSurfaceUpdated(GetPlayerId(), pixmap_id_, timestamp);
void HandleMessage(GstMessage* message);
void HandlePluginInstallerResult(GstInstallPluginsReturn result);
- GstBuffer* PullBuffer();
- void BufferReady(const GstBuffer* buffer);
+#if GST_VERSION_MAJOR == 1
+ GstSample* PullSample();
+ void SampleReady(GstSample* buffer);
+#else
+ GstBuffer* PullSample();
+ void SampleReady(const GstBuffer* buffer);
+#endif
void PrerollComplete();
#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
#include <gst/app/gstappsink.h>
#include <gst/app/gstappsrc.h>
-#include <gst/interfaces/xoverlay.h>
#include <gst/video/video.h>
#include "base/process/process.h"
#include <device/power.h>
#endif
+#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
+#if GST_VERSION_MAJOR == 1
+#include <gst/video/videooverlay.h>
+#else
+#include <gst/interfaces/xoverlay.h>
+#endif
+#endif
+
namespace {
const uint32 GST_VIDEO_SN12 = GST_MAKE_FOURCC('S','N','1','2');
const char *h264elements[] = {
"h264parse"
-#if defined(OS_TIZEN)
+#if defined(OS_TIZEN_MOBILE)
+ , "avdec_h264"
+#elif defined(OS_TIZEN_TV)
, "omx_h264dec"
#else
, "ffdec_h264"
const char *aacelements[] = {
"aacparse"
#if defined(OS_TIZEN_MOBILE)
- , "ffdec_aac", "autoaudiosink"
+ , "avdec_aac", "autoaudiosink"
#elif defined(OS_TIZEN_TV)
, "omx_aacdec", "alsasink"
#else
// Also the connection might be different when converting it to code.
const char *mp3elements[] = {
"mpegaudioparse"
+#if defined(OS_TIZEN_MOBILE)
+ , "avdec_mp3"
+#else
, "ffdec_mp3"
+#endif
,"autoaudiosink"
};
return GST_FLOW_OK;
}
-static GstFlowReturn on_gst_appsink_buffer(
+static GstFlowReturn on_gst_appsink_sample(
GstAppSink* sink,
gpointer user_data) {
MediaSourcePlayerGstreamer* player =
static_cast<MediaSourcePlayerGstreamer*>(user_data);
if (!player || player->IsPlayerDestructing())
return GST_FLOW_ERROR;
- player->OnNewFrameAvailable(player->PullBuffer());
+ player->OnNewFrameAvailable(player->PullSample());
return GST_FLOW_OK;
}
gint64 current_time = 0;
GstFormat format = GST_FORMAT_TIME;
+#if GST_VERSION_MAJOR == 1
+ gst_element_query_position(pipeline_, format, ¤t_time);
+#else
gst_element_query_position(pipeline_, &format, ¤t_time);
+#endif
return ConvertNanoSecondsToSeconds(current_time);
}
if (bus) {
g_signal_handlers_disconnect_by_func(
bus, reinterpret_cast<gpointer>(gst_pipeline_message_cb), this);
+#if GST_VERSION_MAJOR == 1
+ gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
+#else
gst_bus_set_sync_handler(bus, NULL, NULL);
+#endif
gst_object_unref(bus);
}
// Will trigger need-data callback if buffer goes below 30%,
// default is 10%. Data type of property "low-percent" differs
// on desktop and TIZEN platform.
-#if defined(OS_TIZEN)
+#if GST_VERSION_MAJOR < 1
g_object_set(G_OBJECT(video_queue_), "low-percent", (double)30.0, NULL);
#else
g_object_set(G_OBJECT(video_queue_), "low-percent", 30, NULL);
g_object_set(G_OBJECT(audio_queue_), "use-buffering", true, NULL);
-#if defined(OS_TIZEN)
+#if GST_VERSION_MAJOR < 1
g_object_set(
G_OBJECT(audio_queue_), "low-percent", (double)30.0, NULL);
#else
HandleError(MediaPlayerTizen::NetworkStateDecodeError);
return;
}
+#if GST_VERSION_MAJOR == 1
+ gst_bus_set_sync_handler(
+ bus, (GstBusSyncHandler)gst_pipeline_message_cb, this, NULL);
+#else
gst_bus_set_sync_handler(
bus, (GstBusSyncHandler)gst_pipeline_message_cb, this);
+#endif
gst_object_unref(bus);
manager()->OnMediaDataChange(GetPlayerId(), video_format_,
is_xwindow_handle_set_ = true;
VLOG(1) << "MediaSource using |appsink| for Video Playback";
video_sink_ = gst_element_factory_make("appsink", "sink");
+#if GST_VERSION_MAJOR == 1
+ GstAppSinkCallbacks callbacks = {NULL, on_gst_appsink_preroll,
+ on_gst_appsink_sample};
+#else
GstAppSinkCallbacks callbacks = {NULL,
- on_gst_appsink_preroll,
- on_gst_appsink_buffer,
- NULL, {NULL, NULL, NULL}};
- gst_app_sink_set_callbacks(GST_APP_SINK(video_sink_), &callbacks,
- this, NULL);
+ on_gst_appsink_preroll,
+ on_gst_appsink_sample,
+ NULL,
+ {NULL, NULL, NULL}};
+#endif
+ gst_app_sink_set_callbacks(GST_APP_SINK(video_sink_), &callbacks, this,
+ NULL);
g_object_set(G_OBJECT(video_sink_), "max-buffers", (guint)1, NULL);
}
ptr = g_malloc(size);
memcpy(ptr, shared_memory.memory(), size);
+#if GST_VERSION_MAJOR == 1
+ GstBuffer* buffer = gst_buffer_new_allocate(NULL, meta_data.size, NULL);
+ gst_buffer_fill (buffer, 0, ptr, meta_data.size);
+#else
GstBuffer* buffer = gst_buffer_new();
GST_BUFFER_MALLOCDATA(buffer) = (uint8*)ptr;
GST_BUFFER_SIZE(buffer) = size;
GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);
+#endif
GST_BUFFER_TIMESTAMP (buffer) =
(guint64)(meta_data.timestamp.InMicroseconds() * 1000);
GST_BUFFER_DURATION (buffer) =
gint size = decoder_buffer.get()->data_size();
ptr = g_malloc(size);
memcpy(ptr, (void*)decoder_buffer.get()->writable_data(), size);
+#if GST_VERSION_MAJOR == 1
+ GstBuffer* buffer = gst_buffer_new_allocate(NULL, size, NULL);
+ gst_buffer_fill (buffer, 0, ptr, size);
+#else
GstBuffer* buffer = gst_buffer_new();
GST_BUFFER_MALLOCDATA(buffer) = (uint8*)ptr;
GST_BUFFER_SIZE(buffer) = size;
GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);
+#endif
GST_BUFFER_TIMESTAMP (buffer) =
(guint64)(decoder_buffer.get()->timestamp().InMicroseconds() * 1000);
GST_BUFFER_DURATION (buffer) =
void MediaSourcePlayerGstreamer::OnGetFrameDetails() {
if (!pipeline_ || IsPlayerDestructing() || error_occured_)
return;
+#if GST_VERSION_MAJOR == 1
+ GstSample* sample = gst_app_sink_pull_preroll(GST_APP_SINK(video_sink_));
+ if (!sample)
+ return;
+
+ GstCaps* caps = gst_sample_get_caps(sample);
+ if (!caps)
+ return;
- GstBuffer* buffer = gst_app_sink_pull_preroll(GST_APP_SINK(video_sink_));
+ GstVideoInfo vi;
+ gst_video_info_from_caps(&vi, caps);
- if (!buffer)
+ gst_width_ = GST_VIDEO_INFO_WIDTH(&vi);
+ gst_height_ = GST_VIDEO_INFO_HEIGHT(&vi);
+ switch(GST_VIDEO_INFO_FORMAT(&vi)) {
+ case GST_VIDEO_FORMAT_I420:{
+ video_format_ = GST_MAKE_FOURCC('I','4','2','0');
+ break;
+ }
+ case GST_VIDEO_FORMAT_NV12:{
+ video_format_ = GST_MAKE_FOURCC('N','V','1','2');
+ break;
+ }
+ default:
+ LOG(ERROR) << "Unknown format : " << GST_VIDEO_INFO_FORMAT(&vi);
+ break;
+ }
+#else
+
+ GstBuffer* sample = gst_app_sink_pull_preroll(GST_APP_SINK(video_sink_));
+
+ if (!sample)
return;
- GstCaps* caps = gst_buffer_get_caps(GST_BUFFER(buffer));
+ GstCaps* caps = gst_buffer_get_caps(GST_BUFFER(sample));
if (!caps) {
- gst_buffer_unref(buffer);
+ gst_buffer_unref(sample);
return;
}
// No need to unref |GstStructure|
const GstStructure* str = gst_caps_get_structure(caps, 0);
gst_caps_unref(caps);
- gst_buffer_unref(buffer);
+ gst_buffer_unref(sample);
if (!str)
return;
if (!gst_structure_get_int(str, "width", &gst_width_) ||
!gst_structure_get_int(str, "height", &gst_height_) ||
!gst_structure_get_fourcc(str, "format", &video_format_)) {
- LOG(ERROR) << "Pre-rolled buffer information could not be obtained";
+ LOG(ERROR) << "Pre-rolled sample information could not be obtained";
}
+#endif
if(video_format_ == GST_VIDEO_SN12) {
uint tile_w_align = ((gst_width_ - 1) / SN12_TILE_WIDTH + 2) & ~1;
gst_height_, gst_width_, media_type);
}
-GstBuffer* MediaSourcePlayerGstreamer::PullBuffer() {
+#if GST_VERSION_MAJOR == 1
+GstSample* MediaSourcePlayerGstreamer::PullSample() {
+ return gst_app_sink_pull_sample(GST_APP_SINK(video_sink_));
+}
+
+void MediaSourcePlayerGstreamer::OnNewFrameAvailable(GstSample* sample) {
+ if (!pipeline_ || error_occured_)
+ return;
+
+ GstMapInfo map;
+ GstBuffer* buffer = gst_sample_get_buffer(sample);
+ if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
+ LOG (ERROR) << "Sample contains invalid or no info!";
+ return;
+ }
+
+ if (!gst_width_ || !gst_height_)
+ GetFrameDetails();
+
+ base::TimeDelta timestamp =
+ base::TimeDelta::FromMicroseconds(
+ GST_BUFFER_TIMESTAMP(buffer) /
+ base::Time::kNanosecondsPerMicrosecond);
+
+ if(video_format_ == GST_VIDEO_SN12)
+ shared_memory_size = (bufsize_sn12_);
+ else
+ shared_memory_size = (map.size);
+
+ if (!shared_memory.CreateAndMapAnonymous(shared_memory_size)) {
+ LOG (ERROR) << "Shared Memory creation failed.";
+ gst_buffer_unmap(buffer, &map);
+ gst_sample_unref(sample);
+ return;
+ }
+
+ if (!shared_memory.ShareToProcess(base::Process::Current().Handle(),
+ &foreign_memory_handle)) {
+ LOG (ERROR) << "Shared Memory handle could not be obtained";
+ shared_memory.Close();
+ gst_buffer_unmap(buffer, &map);
+ gst_sample_unref(sample);
+ return;
+ }
+
+ memcpy(shared_memory.memory(), map.data, shared_memory_size);
+ manager()->OnNewFrameAvailable(
+ GetPlayerId(), foreign_memory_handle, shared_memory_size, timestamp);
+
+ shared_memory.Close();
+ gst_buffer_unmap(buffer, &map);
+ gst_sample_unref(sample);
+}
+#else
+GstBuffer* MediaSourcePlayerGstreamer::PullSample() {
return gst_app_sink_pull_buffer(GST_APP_SINK(video_sink_));
}
shared_memory.Close();
gst_buffer_unref(GST_BUFFER(buffer));
}
+#endif
#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
void MediaSourcePlayerGstreamer::XWindowIdPrepared(GstMessage* message) {
gint width, height;
+#if GST_VERSION_MAJOR == 1
+ const GstStructure* structure = gst_message_get_structure(message);
+
+ gst_structure_get_int(structure, "video-width", &width);
+ gst_structure_get_int(structure, "video-height", &height);
+#else
gst_structure_get_int(message->structure, "video-width", &width);
gst_structure_get_int(message->structure, "video-height", &height);
+#endif
if ((gst_width_ != width) || (gst_height_ != height)) {
LOG(ERROR) << "Demuxer Video Configs and Gstreamer Video Configs doesn't"
<<" match.From Demuxer : width : "<<gst_width_
void MediaSourcePlayerGstreamer::PlatformSurfaceUpdated() {
gint64 current_time = 0;
GstFormat format = GST_FORMAT_TIME;
+#if GST_VERSION_MAJOR == 1
+ gst_element_query_position(pipeline_, format, ¤t_time);
+#else
gst_element_query_position(pipeline_, &format, ¤t_time);
+#endif
base::TimeDelta timestamp = base::TimeDelta::FromMicroseconds(
current_time / base::Time::kNanosecondsPerMicrosecond);
manager()->OnPlatformSurfaceUpdated(GetPlayerId(), pixmap_id_, timestamp);
}
void MediaSourcePlayerGstreamer::SetPixmap() {
+#if GST_VERSION_MAJOR == 1
+ gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(video_sink_), pixmap_id_);
+#else
#if defined(OS_TIZEN_TV)
// Using below statements on mobile to set pixmap was causing two issue.
// 1. Video size was different than the required one whenever configaration
#else
gst_x_overlay_set_window_handle(GST_X_OVERLAY(video_sink_), pixmap_id_);
#endif
+#endif
m_damage = ecore_x_damage_new(pixmap_id_,
ECORE_X_DAMAGE_REPORT_RAW_RECTANGLES);
m_damageHandler = ecore_event_handler_add(ECORE_X_EVENT_DAMAGE_NOTIFY,
VLOG(1) << "Video Configs Changed, so changing the pixmap";
gint width, height;
- if (gst_video_get_size(video_sink_pad_, &width, &height)) {
- if ((gst_width_ != width) || (gst_height_ != height)) {
- LOG(ERROR) << "Demuxer Video Configs and Gstreamer Video Configs doesn't"
- <<" match.From Demuxer : width : "<<gst_width_
- << " and height :" <<gst_height_
- << " | From Gstreamer width : " <<width
- << " and Height : " <<height;
- gst_width_ = width;
- gst_height_ = height;
- UnregisterDamageHandler();
- CreatePixmap();
- SetPixmap();
- manager()->OnMediaDataChange(
- GetPlayerId(), video_format_, gst_height_, gst_width_, media_type);
+#if GST_VERSION_MAJOR == 1
+ GstCaps* caps = gst_pad_get_current_caps(GST_PAD(video_sink_pad_));
+ if (caps) {
+ GstVideoInfo info;
+ gst_video_info_init(&info);
+
+ if (gst_video_info_from_caps(&info, caps)) {
+ width = info.width;
+ height = info.height;
}
}
+#else
+ if (gst_video_get_size(video_sink_pad_, &width, &height)) {
+ LOG(ERROR) << "Cannot get width / height!";
+ return;
+ }
+#endif
+ if ((gst_width_ != width) || (gst_height_ != height)) {
+ LOG(ERROR) << "Demuxer Video Configs and Gstreamer Video Configs doesn't"
+ <<" match.From Demuxer : width : "<<gst_width_
+ << " and height :" <<gst_height_
+ << " | From Gstreamer width : " <<width
+ << " and Height : " << height;
+ gst_width_ = width;
+ gst_height_ = height;
+ UnregisterDamageHandler();
+ CreatePixmap();
+ SetPixmap();
+ manager()->OnMediaDataChange(
+ GetPlayerId(), video_format_, gst_height_, gst_width_, media_type);
+ }
}
void MediaSourcePlayerGstreamer::CreatePixmap() {
<< " received from element " << GST_MESSAGE_SRC_NAME(message)
<< " ID " << GetPlayerId();
+ switch (GST_MESSAGE_TYPE(message)) {
+ case GST_MESSAGE_ELEMENT:
#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
- if (!IsXWindowHadleSet()) {
- if (message->structure && gst_structure_has_name(
- message->structure, "prepare-xid")) {
- VLOG(1) << "Received message-prepare-xid";
+ if (!IsXWindowHadleSet() &&
+#if GST_VERSION_MAJOR == 1
+ gst_is_video_overlay_prepare_window_handle_message(message)) {
+#else
+ message->structure &&
+ gst_structure_has_name(message->structure, "prepare-xid")) {
+#endif
+ LOG(INFO) << "Received message : Video overlay prepared";
XWindowIdPrepared(message);
return;
}
- }
#endif
-
- switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_ERROR:
GError* error;
gst_message_parse_error(message, &error, NULL);
void HandleMessage(GstMessage* message);
// AppSink related
- GstBuffer* PullBuffer();
void GetFrameDetails();
+#if GST_VERSION_MAJOR == 1
+ GstSample* PullSample();
+ void OnNewFrameAvailable(GstSample* sample);
+#else
+ GstBuffer* PullSample();
void OnNewFrameAvailable(const GstBuffer* buffer);
+#endif
#if defined(TIZEN_MULTIMEDIA_PIXMAP_SUPPORT)
void PlatformSurfaceUpdated();
BuildRequires: pkgconfig(ttrace)
BuildRequires: pkgconfig(capi-network-connection)
BuildRequires: pkgconfig(capi-system-sensor)
+BuildRequires: pkgconfig(gstreamer-1.0)
+BuildRequires: pkgconfig(gstreamer-plugins-base-1.0)
+BuildRequires: pkgconfig(gstreamer-app-1.0)
%else # TIZEN_PROFILE_TV
BuildRequires: pkgconfig(audio-session-mgr)
BuildRequires: pkgconfig(mm-session)
-%endif
BuildRequires: pkgconfig(gstreamer-0.10)
BuildRequires: pkgconfig(gstreamer-plugins-base-0.10)
BuildRequires: pkgconfig(gstreamer-app-0.10)
+%endif
BuildRequires: pkgconfig(capi-media-camera)
BuildRequires: pkgconfig(capi-media-audio-io)
BuildRequires: pkgconfig(capi-media-player)