extern "C" {
#endif
+/**
+ * @brief Enumerations for the state of getting decoded packet
+ */
+enum TrackRendererGetDecodedVideoFrameState {
+ TrackRendererGetDecodedVideoFrameStateErrorNone,
+ TrackRendererGetDecodedVideoFrameStateNoRemainingBufferError,
+ TrackRendererGetDecodedVideoFrameStateNoFilledBufferError,
+ TrackRendererGetDecodedVideoFrameStateUnknownError,
+};
+
typedef void* TrackRendererHandle;
/**
TrackRendererDisplayType type,
void* ecore_wl2_window, int x,
int y, int w, int h);
+/**
+ * @brief Get decoded video frame. To use it, user has to set manual copy
+ * mode by trackrenderer_set_video_frame_buffer_type(handle,
+ * kTrackRendererDecodedVideoFrameBufferManualCopy).
+ *
+ * @param [in] handle : trackrenderer handle ptr.
+ * @param [out] packet : decoded video packet.
+ * @param [out] state : get state. it is nullable.
+ * @return Return 0 if trackrenderer gets the decoded video frame
+ * correctly. Otherwise -1.
+ * @pre None
+ * @post None
+ * @see None
+ */
+int trackrenderer_get_decoded_video_frame(
+ TrackRendererHandle handle, TrackRendererDecodedVideoPacket* packet,
+ TrackRendererGetDecodedVideoFrameState* state);
+
+/**
+ * @brief Return decoded video packet user acquired by
+ * trackrenderer_get_decoded_video_frame().
+ *
+ * @param [in] handle : trackrenderer handle ptr.
+ * @param [in] packet : decoded video packet user wants to return.
+ * @return Return 0 if trackrenderer returns the packet correctly.
+ * Otherwise -1.
+ * @pre None
+ * @post None
+ * @see None
+ */
+int trackrenderer_return_decoded_video_frame(
+ TrackRendererHandle handle, const TrackRendererDecodedVideoPacket* packet);
+
#ifdef __cplusplus
}
#endif
Name: libtrackrenderer
Summary: new multimedia streaming player trackrenderer
-Version: 0.0.28
+Version: 0.0.29
Release: 0
Group: Multimedia/Libraries
License: Apache-2.0
kOverrun // buffer everrun
};
+/**
+ * @brief Enumerations for the state of getting decoded packet
+ */
+enum class GetDecodedVideoFrameState {
+ kErrorNone,
+ kNoRemainingBufferError,
+ kNoFilledBufferError,
+ kUnknownError,
+};
+
enum class DecodedVideoFrameBufferType { kNone, kCopy, kReference };
struct DecodedVideoPacket {
uint64_t pts = 0;
uint64_t duration = 0;
- tbm_surface_h surface_data = nullptr; // tbm_surface
+ void *surface_data = nullptr; // tbm_surface
void *buffer_addr = nullptr;
};
void SetVideoFrameBufferType(DecodedVideoFrameBufferType type) {
decoded_buffer_type_ = type;
}
+ GetDecodedVideoFrameState GetDecodedPacket(DecodedVideoPacket& packet);
+ bool ReturnDecodedPacket(const DecodedVideoPacket& packet);
bool EnableVideoHole(bool value);
private:
#include "trackrenderer_capi/iniproperty.h"
#include "trackrenderer_capi/track.h"
#include "trackrenderer_capi/trackrenderer_capi.h"
+#include "trackrenderer_capi/trackrenderer_internal.h"
namespace plusplayer {
const BufferStatus& status);
TrackRendererDecodedVideoPacket ConvertToDecodedVideoPacket(
const DecodedVideoPacket& packet);
+DecodedVideoPacket ConvertToDecodedVideoPacket(
+ const TrackRendererDecodedVideoPacket& packet);
DecodedVideoFrameBufferType ConvertToVideoFrameBufferType(
const TrackRendererDecodedVideoFrameBufferType& type);
-
+TrackRendererGetDecodedVideoFrameState ConverToGetDecodedVideoFrameState(
+ const GetDecodedVideoFrameState state);
} // namespace capi_utils
} // namespace trackrenderer
}
constexpr int kMaxPlane = 4;
-tbm_surface_h CreateTbmSurfaceWithBuffer(GstMemory* mem, GstPad* pad) {
+tbm_surface_h CreateTbmSurfaceWithBuffer(GstMemory* mem, GstCaps* caps) {
int width = 0;
int height = 0;
GstMapInfo mapinfo = GST_MAP_INFO_INIT;
TRACKRENDERER_ENTER;
+ if (!caps) {
+ TRACKRENDERER_ERROR("invalid caps");
+ return nullptr;
+ }
bool is_mapped = gst_memory_map(mem, &mapinfo, GST_MAP_READWRITE);
if (!is_mapped) {
return nullptr;
}
- auto caps = gstguard::make_guard(gst_pad_get_current_caps(pad));
- if (!caps.get()) {
- TRACKRENDERER_ERROR("fail to get caps");
- return nullptr;
- }
-
- structure = gst_caps_get_structure(caps.get(), 0);
+ structure = gst_caps_get_structure(caps, 0);
gst_structure_get_int(structure, "width", &width);
gst_structure_get_int(structure, "height", &height);
string_format = gst_structure_get_string(structure, "format");
DecodedVideoPacket packet;
auto trackrenderer = static_cast<TrackRenderer*>(userdata);
- GstMemory* mem;
+ GstMemory* mem = gst_buffer_peek_memory(buffer, 0);
tbm_surface_h tbm_surf;
- tbm_surface_info_s info;
-
- mem = gst_buffer_peek_memory(buffer, 0);
- memset(&info, 0, sizeof(info));
if (gst_is_tizen_memory(mem)) {
tbm_surf = (tbm_surface_h)gst_tizen_memory_get_surface(mem);
packet.buffer_addr = gst_buffer_ref(buffer);
} else {
- tbm_surf = internal::CreateTbmSurfaceWithBuffer(mem, pad);
+ auto caps = gstguard::make_guard(gst_pad_get_current_caps(pad));
+ tbm_surf = internal::CreateTbmSurfaceWithBuffer(mem, caps.get());
}
if (!tbm_surf) {
return true;
}
+GetDecodedVideoFrameState TrackRenderer::GetDecodedPacket(
+ DecodedVideoPacket& packet) {
+ std::lock_guard<std::mutex> lock(resource_m_);
+
+ if (state_ == State::kStopped)
+ return GetDecodedVideoFrameState::kUnknownError;
+
+ if (state_ == State::kResourceConflicted)
+ return GetDecodedVideoFrameState::kUnknownError;
+
+ if (!pipeline_)
+ return GetDecodedVideoFrameState::kUnknownError;
+
+ GstState pipeline_state = GST_STATE_VOID_PENDING;
+ bool ret = pipeline_->GetState(Elements::kPipeline, &pipeline_state, NULL,
+ 10 * GST_MSECOND);
+ if (!ret) {
+ TRACKRENDERER_ERROR("Failed to get pipeline state");
+ return GetDecodedVideoFrameState::kUnknownError;
+ }
+
+ if (!(pipeline_state == GST_STATE_PAUSED) && !(pipeline_state == GST_STATE_PLAYING)) {
+ TRACKRENDERER_ERROR("Invalid pipeline state, state: %s",
+ gst_element_state_get_name(pipeline_state));
+ return GetDecodedVideoFrameState::kUnknownError;
+ }
+
+ GstSample* sample = NULL;
+ pipeline_->GetProperty(Elements::kSinkVideo, "last-sample", &sample);
+ if (!sample) {
+ TRACKRENDERER_ERROR("failed to get last sample from sink");
+ return GetDecodedVideoFrameState::kUnknownError;
+ }
+
+ GstBuffer* buffer = gst_sample_get_buffer(sample);
+ if (!buffer) {
+ gst_sample_unref(sample);
+ TRACKRENDERER_ERROR("failed to get gst buffer from sample");
+ return GetDecodedVideoFrameState::kUnknownError;
+ }
+
+ GstMemory* mem = gst_buffer_peek_memory(buffer, 0);
+ tbm_surface_h tbm_surf;
+
+ if (gst_is_tizen_memory(mem)) {
+ tbm_surf = static_cast<tbm_surface_h>(gst_tizen_memory_get_surface(mem));
+ packet.buffer_addr = gst_buffer_ref(buffer);
+ } else {
+ auto caps = gstguard::make_guard(pipeline_->GetSinkPadCaps(Elements::kSinkVideo));
+ tbm_surf = internal::CreateTbmSurfaceWithBuffer(mem, caps.get());
+ packet.buffer_addr = nullptr;
+ }
+
+ if (!tbm_surf) {
+ gst_sample_unref(sample);
+ TRACKRENDERER_ERROR("failed to get tbm surface");
+ if (packet.buffer_addr)
+ gst_buffer_unref(buffer);
+ return GetDecodedVideoFrameState::kUnknownError;
+ }
+
+#ifdef __DEBUG__
+ internal::DumpVideoFrame(tbm_surf);
+#endif
+
+ packet.pts = GST_TIME_AS_MSECONDS(GST_BUFFER_PTS(buffer)); // ns -> ms
+ packet.surface_data = tbm_surf;
+ gst_sample_unref(sample);
+
+ return GetDecodedVideoFrameState::kErrorNone;
+}
+
+bool TrackRenderer::ReturnDecodedPacket(const DecodedVideoPacket& packet) {
+ if (packet.surface_data == nullptr) return false;
+ TRACKRENDERER_DEBUG("packet.surface_data [%p] will be deleted", packet.surface_data);
+
+ if (packet.buffer_addr) // hw dec
+ gst_buffer_unref(GST_BUFFER_CAST(packet.buffer_addr));
+ else
+ tbm_surface_destroy(static_cast<tbm_surface_h>(packet.surface_data));
+
+ return true;
+}
+
void TrackRenderer::EnableVideoHole_() {
const gchar* sink_name = nullptr;
pipeline_->GetProperty(Elements::kSinkVideo, "name", &sink_name);
priv->video_decoded_cb = callback;
priv->video_decoded_cb_userdata = userdata;
}
+
+int trackrenderer_get_decoded_video_frame(
+ TrackRendererHandle handle, TrackRendererDecodedVideoPacket* packet,
+ TrackRendererGetDecodedVideoFrameState* state) {
+ auto priv = static_cast<TrackRendererPrivPtr>(handle);
+ if (!priv || !packet) return kFailed;
+ plusplayer::trackrenderer::DecodedVideoPacket pkt;
+ auto ret_state = priv->renderer->GetDecodedPacket(pkt);
+ auto ret = kFailed;
+
+ if (ret_state ==
+ plusplayer::trackrenderer::GetDecodedVideoFrameState::kErrorNone) {
+ *packet =
+ plusplayer::trackrenderer::capi_utils::ConvertToDecodedVideoPacket(pkt);
+ ret = kSuccess;
+ }
+
+ if (state) {
+ *state = plusplayer::trackrenderer::capi_utils::
+ ConverToGetDecodedVideoFrameState(ret_state);
+ }
+
+ return ret;
+}
+
+int trackrenderer_return_decoded_video_frame(
+ TrackRendererHandle handle, const TrackRendererDecodedVideoPacket* packet) {
+ auto priv = static_cast<TrackRendererPrivPtr>(handle);
+ if (!priv || !packet) return kFailed;
+
+ auto pkt = plusplayer::trackrenderer::capi_utils::ConvertToDecodedVideoPacket(
+ *packet);
+
+ if (priv->renderer->ReturnDecodedPacket(pkt) == false) {
+ return kFailed;
+ }
+
+ return kSuccess;
+}
TRACKRENDERER_ERROR("Unknown buffern status");
return kTrackRendererBufferStatusUnderrun;
}
+
TrackRendererDecodedVideoPacket ConvertToDecodedVideoPacket(
const DecodedVideoPacket& packet) {
TrackRendererDecodedVideoPacket _packet;
_packet.buffer_addr = packet.buffer_addr;
return _packet;
}
+
+DecodedVideoPacket ConvertToDecodedVideoPacket(
+ const TrackRendererDecodedVideoPacket& packet) {
+ DecodedVideoPacket _packet;
+ _packet.pts = packet.pts;
+ _packet.duration = packet.duration;
+ _packet.surface_data = static_cast<void*>(packet.surface_data);
+ _packet.buffer_addr = packet.buffer_addr;
+ return _packet;
+}
+
DecodedVideoFrameBufferType ConvertToVideoFrameBufferType(
const TrackRendererDecodedVideoFrameBufferType& type) {
switch (type) {
}
}
+TrackRendererGetDecodedVideoFrameState ConverToGetDecodedVideoFrameState(
+ const GetDecodedVideoFrameState state) {
+ switch (state) {
+ case GetDecodedVideoFrameState::kErrorNone: {
+ return TrackRendererGetDecodedVideoFrameStateErrorNone;
+ }
+ case GetDecodedVideoFrameState::kNoRemainingBufferError: {
+ return TrackRendererGetDecodedVideoFrameStateNoRemainingBufferError;
+ }
+ case GetDecodedVideoFrameState::kNoFilledBufferError: {
+ return TrackRendererGetDecodedVideoFrameStateNoFilledBufferError;
+ }
+ case GetDecodedVideoFrameState::kUnknownError: {
+ return TrackRendererGetDecodedVideoFrameStateUnknownError;
+ }
+ default: {
+ TRACKRENDERER_ERROR("wrong state");
+ return TrackRendererGetDecodedVideoFrameStateErrorNone;
+ }
+ }
+}
+
} // namespace capi_utils
} // namespace trackrenderer