return type;
}
+TrackType TrackRenderer::GetTrackType_(GstElement* element) {
+ TrackType type = kTrackTypeMax;
+ if (strstr(GST_ELEMENT_NAME(element), "audio"))
+ type = kTrackTypeAudio;
+ else if (strstr(GST_ELEMENT_NAME(element), "video"))
+ type = kTrackTypeVideo;
+ else if (strstr(GST_ELEMENT_NAME(element), "subtitle"))
+ type = kTrackTypeSubtitle;
+ else
+ TRACKRENDERER_ERROR("invalid appsrc %s", GST_ELEMENT_NAME(element));
+ return type;
+}
+
bool TrackRenderer::SetMatroskaColorInfo(const std::string& color_info) {
TRACKRENDERER_ENTER;
std::unique_lock<std::mutex> lock(resource_m_);
return true;
}
-void TrackRenderer::CreateAppSrc_(TrackType type, const std::string& mimetype) {
- TRACKRENDERER_ENTER;
- if (internal::IsTzAppSrcElementNecessary(drm_property_, mimetype)) {
- if (type == kTrackTypeVideo) {
- pipeline_->FactoryMake(Elements::kAppSrcVideo, "tzappsrc",
- "tz_video_appsrc");
- } else {
- pipeline_->FactoryMake(Elements::kAppSrcAudio, "tzappsrc",
- "tz_audio_appsrc");
- }
- } else {
- if (type == kTrackTypeVideo) {
- pipeline_->FactoryMake(Elements::kAppSrcVideo, "appsrc", "video_appsrc");
- pipeline_->PadAddProbe(Elements::kAppSrcVideo, kPadProbeAppsrcEvent, "src",
- (GstPadProbeType)(GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM),
- GstPadProbeAppsrcEventCb_, this, nullptr);
- } else {
- pipeline_->FactoryMake(Elements::kAppSrcAudio, "appsrc", "audio_appsrc");
- pipeline_->PadAddProbe(Elements::kAppSrcAudio, kPadProbeAppsrcEvent, "src",
- (GstPadProbeType)(GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM),
- GstPadProbeAppsrcEventCb_, this, nullptr);
- }
+void TrackRenderer::CreateAppSrc_(TrackType type, const GstCaps* caps) {
+ TRACKRENDERER_ENTER;
+
+ Elements element = Elements::kAppSrcSubtitle;
+ const char* appsrc_name = "subtitle_appsrc";
+
+ if (type == kTrackTypeAudio) {
+ element = Elements::kAppSrcAudio;
+ appsrc_name = "audio_appsrc";
+ } else if (type == kTrackTypeVideo) {
+ element = Elements::kAppSrcVideo;
+ appsrc_name = "video_appsrc";
}
+
+ pipeline_->FactoryMake(element, "appsrc", appsrc_name);
+ pipeline_->PadAddProbe(element, kPadProbeAppsrcEvent, "src",
+ (GstPadProbeType)(GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM),
+ GstPadProbeAppsrcEventCb_, this, nullptr);
+ pipeline_->SignalConnect(element, "need-data",
+ G_CALLBACK(GstNeedDataCb_), this);
+ pipeline_->SignalConnect(element, "enough-data",
+ G_CALLBACK(GstEnoughDataCb_), this);
+ pipeline_->SignalConnect(element, "seek-data",
+ G_CALLBACK(GstSeekDataCb_), this);
+
+ pipeline_->SetProperty(element, "format", GST_FORMAT_TIME);
+
+ if (type == kTrackTypeSubtitle)
+ return;
+
+ pipeline_->SetProperty(element, "stream-type", GST_APP_STREAM_TYPE_SEEKABLE);
+ if (caps)
+ pipeline_->SetAppSrcCaps(element, caps);
+
TRACKRENDERER_LEAVE;
}
bool TrackRenderer::CreateVideoPipeline_(const Track* track) {
TRACKRENDERER_ENTER;
- CreateAppSrc_(kTrackTypeVideo, track->mimetype);
auto caps = caps_builder_.Build(*track, internal::IsDrmEmeElementNecessary(
drm_property_, track->mimetype));
+ CreateAppSrc_(kTrackTypeVideo, caps.GetCaps_());
+
if (internal::IsDecryptorElementNecessary(drm_property_)) {
auto caps_for_parser = caps_builder_.BuildOrgMediaType(*track, internal::IsDrmEmeElementNecessary(
drm_property_, track->mimetype));
pipeline_->SignalConnect(Elements::kCCExtractor, "pad-added",
G_CALLBACK(GstClosedCaptionPadAddedCb_), this);
- pipeline_->SignalConnect(Elements::kAppSrcVideo, "need-data",
- G_CALLBACK(GstVideoNeedDataCb_), this);
- pipeline_->SignalConnect(Elements::kAppSrcVideo, "enough-data",
- G_CALLBACK(GstVideoEnoughDataCb_), this);
- pipeline_->SignalConnect(Elements::kAppSrcVideo, "seek-data",
- G_CALLBACK(GstVideoSeekDataCb_), this);
- pipeline_->SetProperty(Elements::kAppSrcVideo, "format", GST_FORMAT_TIME);
// TODO: need to implement drmdecryptor plugin
// FIXME: drm plugin should be added before parser
pipeline_->BinAddSimple(Elements::kPipeline, Elements::kBinVideo);
- pipeline_->SetAppSrcCaps(Elements::kAppSrcVideo, caps);
-
- pipeline_->SetProperty(Elements::kAppSrcVideo, "stream-type",
- GST_APP_STREAM_TYPE_SEEKABLE);
-
TRACKRENDERER_LEAVE;
return true;
}
bool TrackRenderer::CreateAudioPipeline_(const Track* track) {
TRACKRENDERER_ENTER;
- CreateAppSrc_(kTrackTypeAudio, track->mimetype);
auto caps = caps_builder_.Build(*track, false);
+ CreateAppSrc_(kTrackTypeAudio, caps.GetCaps_());
if (track->mimetype.find("audio/x-opus") != std::string::npos)
SetOpusChannelMappingFamily(caps);
GstElementLowLatency_(kTrackTypeAudio);
pipeline_->CreateBin(Elements::kBinAudio, "audiobin");
- pipeline_->SignalConnect(Elements::kAppSrcAudio, "need-data",
- G_CALLBACK(GstAudioNeedDataCb_), this);
- pipeline_->SignalConnect(Elements::kAppSrcAudio, "enough-data",
- G_CALLBACK(GstAudioEnoughDataCb_), this);
- pipeline_->SignalConnect(Elements::kAppSrcAudio, "seek-data",
- G_CALLBACK(GstAudioSeekDataCb_), this);
- pipeline_->SetProperty(Elements::kAppSrcAudio, "format", GST_FORMAT_TIME);
-
pipeline_->FactoryMake(Elements::kAudioConvert, "audioconvert", nullptr);
pipeline_->FactoryMake(Elements::kAudioResample, "audioresample", nullptr);
pipeline_->BinAddSimple(Elements::kPipeline, Elements::kBinAudio);
- pipeline_->SetAppSrcCaps(Elements::kAppSrcAudio, caps);
-
- pipeline_->SetProperty(Elements::kAppSrcAudio, "stream-type",
- GST_APP_STREAM_TYPE_SEEKABLE);
-
- if (volume_ != kVolumeNone) {
+ if (volume_ != kVolumeNone)
SetVolume_();
- }
+
TRACKRENDERER_LEAVE;
return true;
}
bool TrackRenderer::CreateRawAudioPipeline_(const Track* track) {
- TRACKRENDERER_ENTER;
- CreateAppSrc_(kTrackTypeAudio, track->mimetype);
+ TRACKRENDERER_ENTER;
+
+ auto caps = caps_builder_.Build(*track, false);
+ CreateAppSrc_(kTrackTypeAudio, caps.GetCaps_());
pipeline_->FactoryMake(Elements::kSinkAudio, kAudioSinkName, NULL);
pipeline_->SetProperty(Elements::kSinkAudio, "drift-tolerance",
GstElementLowLatency_(kTrackTypeAudio);
pipeline_->CreateBin(Elements::kBinAudio, "audiobin");
- pipeline_->SignalConnect(Elements::kAppSrcAudio, "need-data",
- G_CALLBACK(GstAudioNeedDataCb_), this);
- pipeline_->SignalConnect(Elements::kAppSrcAudio, "enough-data",
- G_CALLBACK(GstAudioEnoughDataCb_), this);
- pipeline_->SignalConnect(Elements::kAppSrcAudio, "seek-data",
- G_CALLBACK(GstAudioSeekDataCb_), this);
- pipeline_->SetProperty(Elements::kAppSrcAudio, "format", GST_FORMAT_TIME);
-
pipeline_->FactoryMake(Elements::kAudioConvert, "audioconvert", nullptr);
pipeline_->FactoryMake(Elements::kCapsFillterDefault, "capsfilter", nullptr);
auto caps1 = gstguard::make_guard(
pipeline_->BinAddSimple(Elements::kPipeline, Elements::kBinAudio);
- auto caps = caps_builder_.Build(*track, false);
- pipeline_->SetAppSrcCaps(Elements::kAppSrcAudio, caps);
-
- pipeline_->SetProperty(Elements::kAppSrcAudio, "stream-type",
- GST_APP_STREAM_TYPE_SEEKABLE);
-
- if (volume_ != kVolumeNone) {
+ if (volume_ != kVolumeNone)
SetVolume_();
- }
+
TRACKRENDERER_LEAVE;
return true;
}
bool TrackRenderer::CreateSubtitlePipeline_(const Track* track) {
- TRACKRENDERER_ENTER;
- pipeline_->FactoryMake(Elements::kAppSrcSubtitle, "appsrc",
- "subtitle_appsrc");
- pipeline_->PadAddProbe(Elements::kAppSrcSubtitle, kPadProbeAppsrcEvent, "src",
- (GstPadProbeType)(GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM),
- GstPadProbeAppsrcEventCb_, this, nullptr);
+ TRACKRENDERER_ENTER;
+
+ CreateAppSrc_(kTrackTypeSubtitle, nullptr);
pipeline_->FactoryMake(Elements::kSinkSubtitle, "fakesink", "subtitle_sink");
+
pipeline_->CreateBin(Elements::kBinSubtitle, "subtitlebin");
- pipeline_->SignalConnect(Elements::kAppSrcSubtitle, "need-data",
- G_CALLBACK(GstSubtitleNeedDataCb_), this);
- pipeline_->SignalConnect(Elements::kAppSrcSubtitle, "enough-data",
- G_CALLBACK(GstSubtitleEnoughDataCb_), this);
- pipeline_->SignalConnect(Elements::kAppSrcSubtitle, "seek-data",
- G_CALLBACK(GstSubtitleSeekDataCb_), this);
- pipeline_->SetProperty(Elements::kAppSrcSubtitle, "format", GST_FORMAT_TIME);
pipeline_->BinAdd(Elements::kBinSubtitle, Elements::kAppSrcSubtitle,
Elements::kSinkSubtitle);
pipeline_->BinAddSimple(Elements::kPipeline, Elements::kBinSubtitle);
return GST_PAD_PROBE_REMOVE;
}
-void TrackRenderer::GstAudioNeedDataCb_(GstElement* element, guint size,
- gpointer userdata) {
- auto trackrenderer = static_cast<TrackRenderer*>(userdata);
- trackrenderer->trackctx_[kTrackTypeAudio].is_enough_data = false;
- trackrenderer->eventlistener_->OnBufferStatus(kTrackTypeAudio,
- BufferStatus::kUnderrun);
-}
-
-void TrackRenderer::GstVideoNeedDataCb_(GstElement* element, guint size,
+void TrackRenderer::GstNeedDataCb_(GstElement* element, guint size,
gpointer userdata) {
auto trackrenderer = static_cast<TrackRenderer*>(userdata);
- trackrenderer->trackctx_[kTrackTypeVideo].is_enough_data = false;
-
- trackrenderer->eventlistener_->OnBufferStatus(kTrackTypeVideo,
- BufferStatus::kUnderrun);
-}
-
-void TrackRenderer::GstSubtitleNeedDataCb_(GstElement* element, guint size,
- gpointer userdata) {
- auto trackrenderer = static_cast<TrackRenderer*>(userdata);
- if (trackrenderer->trackctx_[kTrackTypeSubtitle].index == kInvalidTrackIndex)
+ TrackType type = trackrenderer->GetTrackType_(element);
+ if (type == kTrackTypeMax) {
+ TRACKRENDERER_ERROR("invalid track type");
return;
- trackrenderer->trackctx_[kTrackTypeSubtitle].is_enough_data = false;
+ }
+ trackrenderer->trackctx_[type].is_enough_data = false;
+ trackrenderer->eventlistener_->OnBufferStatus(type,
+ BufferStatus::kUnderrun);
}
-void TrackRenderer::GstAudioEnoughDataCb_(GstElement* element,
+void TrackRenderer::GstEnoughDataCb_(GstElement* element,
gpointer userdata) {
auto trackrenderer = static_cast<TrackRenderer*>(userdata);
- trackrenderer->trackctx_[kTrackTypeAudio].is_enough_data = true;
- trackrenderer->eventlistener_->OnBufferStatus(kTrackTypeAudio,
+ TrackType type = trackrenderer->GetTrackType_(element);
+ if (type == kTrackTypeMax) {
+ TRACKRENDERER_ERROR("invalid track type");
+ return;
+ }
+
+ trackrenderer->trackctx_[type].is_enough_data = true;
+ trackrenderer->eventlistener_->OnBufferStatus(type,
BufferStatus::kOverrun);
}
-void TrackRenderer::GstVideoEnoughDataCb_(GstElement* element,
- gpointer userdata) {
+gboolean TrackRenderer::GstSeekDataCb_(GstElement* element, guint64 offset,
+ gpointer userdata) {
TRACKRENDERER_ENTER;
- auto trackrenderer = static_cast<TrackRenderer*>(userdata);
- trackrenderer->trackctx_[kTrackTypeVideo].is_enough_data = true;
- trackrenderer->eventlistener_->OnBufferStatus(kTrackTypeVideo,
- BufferStatus::kOverrun);
- TRACKRENDERER_LEAVE;
-}
+ TRACKRENDERER_INFO("offset : %" PRIu64 "", offset);
-void TrackRenderer::GstSubtitleEnoughDataCb_(GstElement* element,
- gpointer userdata) {
auto trackrenderer = static_cast<TrackRenderer*>(userdata);
- if (trackrenderer->trackctx_[kTrackTypeSubtitle].index == kInvalidTrackIndex)
- return;
- trackrenderer->trackctx_[kTrackTypeSubtitle].is_enough_data = true;
-}
+ TrackType type = trackrenderer->GetTrackType_(element);
+ if (type == kTrackTypeMax) {
+ TRACKRENDERER_ERROR("invalid track type");
+ return false;
+ }
-gboolean TrackRenderer::GstAudioSeekDataCb_(GstElement* element, guint64 offset,
- gpointer user_data) {
- TRACKRENDERER_ENTER;
- TRACKRENDERER_INFO("offset : %" PRIu64 "", offset);
- auto trackrenderer = static_cast<TrackRenderer*>(user_data);
- trackrenderer->eventlistener_->OnSeekData(kTrackTypeAudio,
- offset / GST_MSECOND);
- TRACKRENDERER_LEAVE;
- return true;
-}
+ if (type == kTrackTypeSubtitle)
+ return true;
-gboolean TrackRenderer::GstVideoSeekDataCb_(GstElement* element, guint64 offset,
- gpointer user_data) {
- TRACKRENDERER_ENTER;
- TRACKRENDERER_INFO("offset : %" PRIu64 "", offset);
- auto trackrenderer = static_cast<TrackRenderer*>(user_data);
- trackrenderer->eventlistener_->OnSeekData(kTrackTypeVideo,
- offset / GST_MSECOND);
+ trackrenderer->eventlistener_->OnSeekData(type, offset / GST_MSECOND);
TRACKRENDERER_LEAVE;
return true;
}
-gboolean TrackRenderer::GstSubtitleSeekDataCb_(GstElement* element,
- guint64 offset,
- gpointer user_data) {
- TRACKRENDERER_ENTER;
- TRACKRENDERER_INFO("offset : %" PRIu64 "", offset);
- return true;
-}
-
gboolean TrackRenderer::GstAudioDrmInitDataCb_(int* drmhandle, unsigned int len,
unsigned char* psshdata,
void* userdata) {