Support video capture during playback 70/297370/3
authorEunhye Choi <eunhae1.choi@samsung.com>
Thu, 17 Aug 2023 10:46:01 +0000 (19:46 +0900)
committerEunhye Choi <eunhae1.choi@samsung.com>
Fri, 18 Aug 2023 06:06:10 +0000 (15:06 +0900)
[Version] 0.0.29
[Issue Type] Add features

- Add GetDecodedPacket
- Add ReturnDecodedPacket
- the interfaces are synchronized with VD

Change-Id: I1e482c3899727d2212dfaa8aaa6694a260d42f1c

include/trackrenderer_capi/trackrenderer_internal.h
packaging/libtrackrenderer.spec
src/include_internal/trackrenderer/core/buffer.h
src/include_internal/trackrenderer/trackrenderer.h
src/include_internal/trackrenderer/trackrenderer_capi_utils.h
src/trackrenderer.cpp
src/trackrenderer_capi.cpp
src/trackrenderer_capi_utils.cpp

index 54d70ce0de01efc3aca12fca00ee812d4c427aaf..e986a84d568ef2d477d160d5a16be326faf297fb 100644 (file)
 extern "C" {
 #endif
 
+/**
+ * @brief Enumerations for the state of getting decoded packet
+ */
+enum TrackRendererGetDecodedVideoFrameState {
+  TrackRendererGetDecodedVideoFrameStateErrorNone,
+  TrackRendererGetDecodedVideoFrameStateNoRemainingBufferError,
+  TrackRendererGetDecodedVideoFrameStateNoFilledBufferError,
+  TrackRendererGetDecodedVideoFrameStateUnknownError,
+};
+
 typedef void* TrackRendererHandle;
 
 /**
@@ -65,6 +75,39 @@ int trackrenderer_set_display_ecore_wl2_window(TrackRendererHandle handle,
                                                TrackRendererDisplayType type,
                                                void* ecore_wl2_window, int x,
                                                int y, int w, int h);
+/**
+ * @brief        Get decoded video frame. To use it, user has to set manual copy
+ *               mode by trackrenderer_set_video_frame_buffer_type(handle,
+ *               kTrackRendererDecodedVideoFrameBufferManualCopy).
+ *
+ * @param        [in] handle : trackrenderer handle ptr.
+ * @param        [out] packet : decoded video packet.
+ * @param        [out] state : get state. it is nullable.
+ * @return       Return 0 if trackrenderer gets the decoded video frame
+ *               correctly. Otherwise -1.
+ * @pre          None
+ * @post         None
+ * @see          None
+ */
+int trackrenderer_get_decoded_video_frame(
+    TrackRendererHandle handle, TrackRendererDecodedVideoPacket* packet,
+    TrackRendererGetDecodedVideoFrameState* state);
+
+/**
+ * @brief        Return decoded video packet user acquired by
+ *               trackrenderer_get_decoded_video_frame().
+ *
+ * @param        [in] handle : trackrenderer handle ptr.
+ * @param        [in] packet : decoded video packet user wants to return.
+ * @return       Return 0 if trackrenderer returns the packet correctly.
+ *               Otherwise -1.
+ * @pre          None
+ * @post         None
+ * @see          None
+ */
+int trackrenderer_return_decoded_video_frame(
+    TrackRendererHandle handle, const TrackRendererDecodedVideoPacket* packet);
+
 #ifdef __cplusplus
 }
 #endif
index 224d4ce66448d3faf2ad382e590f0493db1fe8a6..d346aae98017bf88435a76f1e381514cd0e6e908 100644 (file)
@@ -1,6 +1,6 @@
 Name:       libtrackrenderer
 Summary:    new multimedia streaming player trackrenderer
-Version:    0.0.28
+Version:    0.0.29
 Release:    0
 Group:      Multimedia/Libraries
 License:    Apache-2.0
index 12531894b070041b940d78ba6e15c70feae5ad19..1cde0600752fa232f0bcfdbf5256efd50cd6dbfb 100644 (file)
@@ -22,12 +22,22 @@ enum class BufferStatus {
   kOverrun    // buffer everrun
 };
 
+/**
+ * @brief Enumerations for the state of getting decoded packet
+ */
+enum class GetDecodedVideoFrameState {
+  kErrorNone,
+  kNoRemainingBufferError,
+  kNoFilledBufferError,
+  kUnknownError,
+};
+
 enum class DecodedVideoFrameBufferType { kNone, kCopy, kReference };
 
 struct DecodedVideoPacket {
   uint64_t pts = 0;
   uint64_t duration = 0;
-  tbm_surface_h surface_data = nullptr;  // tbm_surface
+  void *surface_data = nullptr;  // tbm_surface
   void *buffer_addr = nullptr;
 };
 
index de899adef030228663aea3a35ed9513c62fa3d7f..82cdb60d09397f803b3ed30a4910671148f8b22c 100644 (file)
@@ -138,6 +138,8 @@ class TrackRenderer : public ResourceConflictListener,
   void SetVideoFrameBufferType(DecodedVideoFrameBufferType type) {
     decoded_buffer_type_ = type;
   }
+  GetDecodedVideoFrameState GetDecodedPacket(DecodedVideoPacket& packet);
+  bool ReturnDecodedPacket(const DecodedVideoPacket& packet);
   bool EnableVideoHole(bool value);
 
  private:
index c40b1661201116009950414de285ee87ee3a9600..b000571effc595d0c3ac5268e1624271446d73c5 100644 (file)
@@ -24,6 +24,7 @@
 #include "trackrenderer_capi/iniproperty.h"
 #include "trackrenderer_capi/track.h"
 #include "trackrenderer_capi/trackrenderer_capi.h"
+#include "trackrenderer_capi/trackrenderer_internal.h"
 
 namespace plusplayer {
 
@@ -77,9 +78,12 @@ TrackRendererBufferStatus ConvertToTrackRendererBufferStatus(
     const BufferStatus& status);
 TrackRendererDecodedVideoPacket ConvertToDecodedVideoPacket(
     const DecodedVideoPacket& packet);
+DecodedVideoPacket ConvertToDecodedVideoPacket(
+    const TrackRendererDecodedVideoPacket& packet);
 DecodedVideoFrameBufferType ConvertToVideoFrameBufferType(
     const TrackRendererDecodedVideoFrameBufferType& type);
-
+TrackRendererGetDecodedVideoFrameState ConverToGetDecodedVideoFrameState(
+    const GetDecodedVideoFrameState state);
 }  // namespace capi_utils
 
 }  // namespace trackrenderer
index 0c85e5e7314cecd916e0fb5cd9b8eef3bfc27150..f0823a111c03968948fe8aae7565fbdd350fa367 100644 (file)
@@ -232,7 +232,7 @@ uint32_t ConvertToTbmFormat(const gchar *data) {
 }
 
 constexpr int kMaxPlane = 4;
-tbm_surface_h CreateTbmSurfaceWithBuffer(GstMemory* mem, GstPad* pad) {
+tbm_surface_h CreateTbmSurfaceWithBuffer(GstMemory* mem, GstCaps* caps) {
 
   int width = 0;
   int height = 0;
@@ -242,6 +242,10 @@ tbm_surface_h CreateTbmSurfaceWithBuffer(GstMemory* mem, GstPad* pad) {
   GstMapInfo mapinfo = GST_MAP_INFO_INIT;
 
   TRACKRENDERER_ENTER;
+  if (!caps) {
+    TRACKRENDERER_ERROR("invalid caps");
+    return nullptr;
+  }
 
   bool is_mapped = gst_memory_map(mem, &mapinfo, GST_MAP_READWRITE);
   if (!is_mapped) {
@@ -257,13 +261,7 @@ tbm_surface_h CreateTbmSurfaceWithBuffer(GstMemory* mem, GstPad* pad) {
     return nullptr;
   }
 
-  auto caps = gstguard::make_guard(gst_pad_get_current_caps(pad));
-  if (!caps.get()) {
-    TRACKRENDERER_ERROR("fail to get caps");
-    return nullptr;
-  }
-
-  structure = gst_caps_get_structure(caps.get(), 0);
+  structure = gst_caps_get_structure(caps, 0);
   gst_structure_get_int(structure, "width", &width);
   gst_structure_get_int(structure, "height", &height);
   string_format = gst_structure_get_string(structure, "format");
@@ -1823,18 +1821,15 @@ void TrackRenderer::GstDecodedVideoBufferCb_(GstElement* element,
 
   DecodedVideoPacket packet;
   auto trackrenderer = static_cast<TrackRenderer*>(userdata);
-  GstMemory* mem;
+  GstMemory* mem = gst_buffer_peek_memory(buffer, 0);
   tbm_surface_h tbm_surf;
-  tbm_surface_info_s info;
-
-  mem = gst_buffer_peek_memory(buffer, 0);
-  memset(&info, 0, sizeof(info));
 
   if (gst_is_tizen_memory(mem)) {
     tbm_surf = (tbm_surface_h)gst_tizen_memory_get_surface(mem);
     packet.buffer_addr = gst_buffer_ref(buffer);
   } else {
-    tbm_surf = internal::CreateTbmSurfaceWithBuffer(mem, pad);
+    auto caps = gstguard::make_guard(gst_pad_get_current_caps(pad));
+    tbm_surf = internal::CreateTbmSurfaceWithBuffer(mem, caps.get());
   }
 
   if (!tbm_surf) {
@@ -2888,6 +2883,90 @@ bool TrackRenderer::EnableVideoHole(bool value) {
   return true;
 }
 
+GetDecodedVideoFrameState TrackRenderer::GetDecodedPacket(
+    DecodedVideoPacket& packet) {
+  std::lock_guard<std::mutex> lock(resource_m_);
+
+  if (state_ == State::kStopped)
+    return GetDecodedVideoFrameState::kUnknownError;
+
+  if (state_ == State::kResourceConflicted)
+    return GetDecodedVideoFrameState::kUnknownError;
+
+  if (!pipeline_)
+    return GetDecodedVideoFrameState::kUnknownError;
+
+  GstState pipeline_state = GST_STATE_VOID_PENDING;
+  bool ret = pipeline_->GetState(Elements::kPipeline, &pipeline_state, NULL,
+                            10 * GST_MSECOND);
+  if (!ret) {
+    TRACKRENDERER_ERROR("Failed to get pipeline state");
+    return GetDecodedVideoFrameState::kUnknownError;
+  }
+
+  if (!(pipeline_state == GST_STATE_PAUSED) && !(pipeline_state == GST_STATE_PLAYING)) {
+    TRACKRENDERER_ERROR("Invalid pipeline state, state: %s",
+            gst_element_state_get_name(pipeline_state));
+    return GetDecodedVideoFrameState::kUnknownError;
+  }
+
+  GstSample* sample = NULL;
+  pipeline_->GetProperty(Elements::kSinkVideo, "last-sample", &sample);
+  if (!sample) {
+    TRACKRENDERER_ERROR("failed to get last sample from sink");
+    return GetDecodedVideoFrameState::kUnknownError;
+  }
+
+  GstBuffer* buffer = gst_sample_get_buffer(sample);
+  if (!buffer) {
+    gst_sample_unref(sample);
+    TRACKRENDERER_ERROR("failed to get gst buffer from sample");
+    return GetDecodedVideoFrameState::kUnknownError;
+  }
+
+  GstMemory* mem = gst_buffer_peek_memory(buffer, 0);
+  tbm_surface_h tbm_surf;
+
+  if (gst_is_tizen_memory(mem)) {
+    tbm_surf = static_cast<tbm_surface_h>(gst_tizen_memory_get_surface(mem));
+    packet.buffer_addr = gst_buffer_ref(buffer);
+  } else {
+    auto caps = gstguard::make_guard(pipeline_->GetSinkPadCaps(Elements::kSinkVideo));
+    tbm_surf = internal::CreateTbmSurfaceWithBuffer(mem, caps.get());
+    packet.buffer_addr = nullptr;
+  }
+
+  if (!tbm_surf) {
+    gst_sample_unref(sample);
+    TRACKRENDERER_ERROR("failed to get tbm surface");
+    if (packet.buffer_addr)
+      gst_buffer_unref(buffer);
+    return GetDecodedVideoFrameState::kUnknownError;
+  }
+
+#ifdef __DEBUG__
+  internal::DumpVideoFrame(tbm_surf);
+#endif
+
+  packet.pts = GST_TIME_AS_MSECONDS(GST_BUFFER_PTS(buffer));  // ns -> ms
+  packet.surface_data = tbm_surf;
+  gst_sample_unref(sample);
+
+  return GetDecodedVideoFrameState::kErrorNone;
+}
+
+bool TrackRenderer::ReturnDecodedPacket(const DecodedVideoPacket& packet) {
+  if (packet.surface_data == nullptr) return false;
+  TRACKRENDERER_DEBUG("packet.surface_data [%p] will be deleted", packet.surface_data);
+
+  if (packet.buffer_addr) // hw dec
+    gst_buffer_unref(GST_BUFFER_CAST(packet.buffer_addr));
+  else
+    tbm_surface_destroy(static_cast<tbm_surface_h>(packet.surface_data));
+
+  return true;
+}
+
 void TrackRenderer::EnableVideoHole_() {
   const gchar* sink_name = nullptr;
   pipeline_->GetProperty(Elements::kSinkVideo, "name", &sink_name);
index f0e9d67779879a66c54dd7e7ddf58984cc03f3f3..914d7b440256d239661771f89d8661abb042a77a 100644 (file)
@@ -1192,3 +1192,42 @@ void trackrenderer_set_media_packet_video_decoded_cb(
   priv->video_decoded_cb = callback;
   priv->video_decoded_cb_userdata = userdata;
 }
+
+int trackrenderer_get_decoded_video_frame(
+    TrackRendererHandle handle, TrackRendererDecodedVideoPacket* packet,
+    TrackRendererGetDecodedVideoFrameState* state) {
+  auto priv = static_cast<TrackRendererPrivPtr>(handle);
+  if (!priv || !packet) return kFailed;
+  plusplayer::trackrenderer::DecodedVideoPacket pkt;
+  auto ret_state = priv->renderer->GetDecodedPacket(pkt);
+  auto ret = kFailed;
+
+  if (ret_state ==
+      plusplayer::trackrenderer::GetDecodedVideoFrameState::kErrorNone) {
+    *packet =
+        plusplayer::trackrenderer::capi_utils::ConvertToDecodedVideoPacket(pkt);
+    ret = kSuccess;
+  }
+
+  if (state) {
+    *state = plusplayer::trackrenderer::capi_utils::
+        ConverToGetDecodedVideoFrameState(ret_state);
+  }
+
+  return ret;
+}
+
+int trackrenderer_return_decoded_video_frame(
+    TrackRendererHandle handle, const TrackRendererDecodedVideoPacket* packet) {
+  auto priv = static_cast<TrackRendererPrivPtr>(handle);
+  if (!priv || !packet) return kFailed;
+
+  auto pkt = plusplayer::trackrenderer::capi_utils::ConvertToDecodedVideoPacket(
+      *packet);
+
+  if (priv->renderer->ReturnDecodedPacket(pkt) == false) {
+    return kFailed;
+  }
+
+  return kSuccess;
+}
index ac341d4c22f2b9db91ada019311b3170a2b18632..3975c5ac0a159e093ef4a49be172b04ece348622 100644 (file)
@@ -720,6 +720,7 @@ TrackRendererBufferStatus ConvertToTrackRendererBufferStatus(
   TRACKRENDERER_ERROR("Unknown buffern status");
   return kTrackRendererBufferStatusUnderrun;
 }
+
 TrackRendererDecodedVideoPacket ConvertToDecodedVideoPacket(
     const DecodedVideoPacket& packet) {
   TrackRendererDecodedVideoPacket _packet;
@@ -729,6 +730,17 @@ TrackRendererDecodedVideoPacket ConvertToDecodedVideoPacket(
   _packet.buffer_addr = packet.buffer_addr;
   return _packet;
 }
+
+DecodedVideoPacket ConvertToDecodedVideoPacket(
+    const TrackRendererDecodedVideoPacket& packet) {
+  DecodedVideoPacket _packet;
+  _packet.pts = packet.pts;
+  _packet.duration = packet.duration;
+  _packet.surface_data = static_cast<void*>(packet.surface_data);
+  _packet.buffer_addr = packet.buffer_addr;
+  return _packet;
+}
+
 DecodedVideoFrameBufferType ConvertToVideoFrameBufferType(
     const TrackRendererDecodedVideoFrameBufferType& type) {
   switch (type) {
@@ -747,6 +759,28 @@ DecodedVideoFrameBufferType ConvertToVideoFrameBufferType(
   }
 }
 
+TrackRendererGetDecodedVideoFrameState ConverToGetDecodedVideoFrameState(
+    const GetDecodedVideoFrameState state) {
+  switch (state) {
+    case GetDecodedVideoFrameState::kErrorNone: {
+      return TrackRendererGetDecodedVideoFrameStateErrorNone;
+    }
+    case GetDecodedVideoFrameState::kNoRemainingBufferError: {
+      return TrackRendererGetDecodedVideoFrameStateNoRemainingBufferError;
+    }
+    case GetDecodedVideoFrameState::kNoFilledBufferError: {
+      return TrackRendererGetDecodedVideoFrameStateNoFilledBufferError;
+    }
+    case GetDecodedVideoFrameState::kUnknownError: {
+      return TrackRendererGetDecodedVideoFrameStateUnknownError;
+    }
+    default: {
+      TRACKRENDERER_ERROR("wrong state");
+      return TrackRendererGetDecodedVideoFrameStateErrorNone;
+    }
+  }
+}
+
 }  // namespace capi_utils
 
 }  // namespace trackrenderer