[M120 Migration][WebRTC] Tbm mapping buffer support in webrtc video sink 18/309218/2
authorpeng.yin <peng8.yin@samsung.com>
Tue, 6 Jun 2023 06:47:24 +0000 (14:47 +0800)
committerpeng yin <peng8.yin@samsung.com>
Tue, 9 Apr 2024 01:12:28 +0000 (01:12 +0000)
video frame with PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER format is not
correctly handled in webrtc video sink pipe, this format is same as
NV12 actually, so add same logic as NV12 format to handle it.

Change-Id: Id54be9723f86910c397aea8ecc9bfece25070fe0
Signed-off-by: peng.yin <peng8.yin@samsung.com>
media/base/video_util.cc
third_party/blink/renderer/platform/webrtc/convert_to_webrtc_video_frame_buffer.cc

index aa61c52..d21b1d2 100644 (file)
@@ -811,7 +811,12 @@ EncoderStatus ConvertAndScaleFrame(const VideoFrame& src_frame,
                VideoPixelFormatToString(src_frame.format()), "dst_format",
                VideoPixelFormatToString(dst_frame.format()));
   constexpr auto kDefaultFiltering = libyuv::kFilterBox;
-  if (!src_frame.IsMappable() || !dst_frame.IsMappable())
+  if ((!src_frame.IsMappable()
+#if defined(TIZEN_TBM_SUPPORT)
+       && src_frame.format() != PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER
+#endif
+       ) ||
+      !dst_frame.IsMappable())
     return EncoderStatus::Codes::kUnsupportedFrameFormat;
 
   if ((dst_frame.format() == PIXEL_FORMAT_I420A ||
@@ -952,6 +957,9 @@ EncoderStatus ConvertAndScaleFrame(const VideoFrame& src_frame,
   if ((dst_frame.format() == PIXEL_FORMAT_NV12 ||
        dst_frame.format() == PIXEL_FORMAT_NV12A) &&
       (src_frame.format() == PIXEL_FORMAT_NV12 ||
+#if defined(TIZEN_TBM_SUPPORT)
+       src_frame.format() == PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
        src_frame.format() == PIXEL_FORMAT_NV12A)) {
     if (dst_frame.format() == PIXEL_FORMAT_NV12A) {
       libyuv::ScalePlane(
@@ -981,7 +989,11 @@ EncoderStatus ConvertAndScaleFrame(const VideoFrame& src_frame,
   }
 
   if (dst_frame.format() == PIXEL_FORMAT_I420 &&
-      src_frame.format() == PIXEL_FORMAT_NV12) {
+      (
+#if defined(TIZEN_TBM_SUPPORT)
+          src_frame.format() == PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
+          src_frame.format() == PIXEL_FORMAT_NV12)) {
     if (src_frame.visible_rect().size() == dst_frame.visible_rect().size()) {
       // Both frames have the same size, only NV12-to-I420 conversion is
       // required.
index 770f54b..7fc4d98 100644 (file)
@@ -181,6 +181,11 @@ rtc::scoped_refptr<webrtc::VideoFrameBuffer> MakeFrameAdapter(
     case media::PIXEL_FORMAT_NV12:
       return rtc::scoped_refptr<webrtc::VideoFrameBuffer>(
           new rtc::RefCountedObject<NV12FrameAdapter>(std::move(video_frame)));
+#if defined(TIZEN_TBM_SUPPORT)
+    case media::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER:
+      return rtc::scoped_refptr<webrtc::VideoFrameBuffer>(
+          new rtc::RefCountedObject<NV12FrameAdapter>(std::move(video_frame)));
+#endif
     default:
       NOTREACHED();
       return nullptr;
@@ -195,7 +200,9 @@ scoped_refptr<media::VideoFrame> MakeScaledVideoFrame(
   media::VideoPixelFormat dst_format = media::PIXEL_FORMAT_UNKNOWN;
   bool tmp_buffer_needed = false;
   if (source_is_nv12) {
+#if !defined(TIZEN_TBM_SUPPORT)
     DCHECK_EQ(source_frame->format(), media::PIXEL_FORMAT_NV12);
+#endif
     dst_format = media::PIXEL_FORMAT_NV12;
   } else {
     // ARGB pixel format may be produced by readback of texture backed frames.
@@ -253,6 +260,10 @@ scoped_refptr<media::VideoFrame> MaybeConvertAndScaleFrame(
              source_frame->format() == media::PIXEL_FORMAT_ARGB ||
              source_frame->format() == media::PIXEL_FORMAT_XRGB ||
              source_frame->format() == media::PIXEL_FORMAT_ABGR ||
+#if defined(TIZEN_TBM_SUPPORT)
+             source_frame->format() ==
+                 media::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
              source_frame->format() == media::PIXEL_FORMAT_XBGR);
   RTC_DCHECK(shared_resources);
 
@@ -260,6 +271,9 @@ scoped_refptr<media::VideoFrame> MaybeConvertAndScaleFrame(
       source_frame->format() == media::PIXEL_FORMAT_I420 ||
       source_frame->format() == media::PIXEL_FORMAT_I420A;
   const bool source_is_nv12 =
+#if defined(TIZEN_TBM_SUPPORT)
+      source_frame->format() == media::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
       source_frame->format() == media::PIXEL_FORMAT_NV12;
   const bool no_scaling_needed =
       source_frame->natural_size() == source_frame->visible_rect().size();
@@ -288,7 +302,14 @@ bool CanConvertToWebRtcVideoFrameBuffer(const media::VideoFrame* frame) {
 #if defined(TIZEN_VIDEO_HOLE)
          frame->storage_type() == media::VideoFrame::STORAGE_HOLE ||
 #endif
-         frame->format() == media::PIXEL_FORMAT_ENCODED || frame->HasTextures();
+#if BUILDFLAG(IS_TIZEN)
+         frame->format() == media::PIXEL_FORMAT_ENCODED ||
+#if defined(TIZEN_TBM_SUPPORT)
+         frame->format() ==
+             media::VideoPixelFormat::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
+#endif
+         frame->HasTextures();
 }
 
 // static
@@ -324,6 +345,18 @@ rtc::scoped_refptr<webrtc::VideoFrameBuffer> ConvertToWebRtcVideoFrameBuffer(
           video_frame->timestamp()));
     }
     return MakeFrameAdapter(std::move(converted_frame));
+#if defined(TIZEN_TBM_SUPPORT)
+  } else if (video_frame->format() ==
+             media::VideoPixelFormat::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER) {
+    // the video frame with tbm buffer type is native, handle it here
+    scoped_refptr<media::VideoFrame> scaled_frame =
+        MaybeConvertAndScaleFrame(video_frame, shared_resources);
+    if (!scaled_frame) {
+      DLOG(ERROR) << "Make scaled frame failed.";
+      return MakeFrameAdapter(std::move(video_frame));
+    }
+    return MakeFrameAdapter(std::move(scaled_frame));
+#endif
   } else if (video_frame->HasTextures()) {
     auto converted_frame =
         shared_resources