Support insertable stream with TBM 09/294909/5
authorzhishun.zhou <zhishun.zhou@samsung.com>
Wed, 28 Jun 2023 12:47:42 +0000 (20:47 +0800)
committerzhishun.zhou <zhishun.zhou@samsung.com>
Mon, 24 Jul 2023 05:46:36 +0000 (13:46 +0800)
1. When insertable pipeline start, enable TBM callback
2. Support copyTo of VideoFrame in TBM case
3. Support CanvasRenderingContext2D.drawImage() with TBM

Refer to:
https://review.tizen.org/gerrit/#/c/293079/

Change-Id: Ia8d9faf8d5e90603175f7d1edc2a3a582909d87c
Signed-off-by: zhishun.zhou <zhishun.zhou@samsung.com>
media/base/video_frame.cc
media/base/video_frame.h
media/base/video_util.cc
media/renderers/paint_canvas_video_renderer.cc
third_party/blink/renderer/modules/breakout_box/media_stream_video_track_underlying_source.cc
third_party/blink/renderer/modules/webcodecs/video_frame.cc
third_party/blink/renderer/platform/webrtc/convert_to_webrtc_video_frame_buffer.cc

index e21b8bcfac5ebee29ba2ccdde6c6f1552878d2bd..7f92c2224b94cae82c779931862dd88075db5f8a 100644 (file)
@@ -139,6 +139,9 @@ bool VideoFrame::IsStorageTypeMappable(VideoFrame::StorageType storage_type) {
       (storage_type == VideoFrame::STORAGE_UNOWNED_MEMORY ||
        storage_type == VideoFrame::STORAGE_OWNED_MEMORY ||
        storage_type == VideoFrame::STORAGE_SHMEM ||
+#if defined(TIZEN_TBM_SUPPORT)
+       storage_type == VideoFrame::STORAGE_TBM_SURFACE ||
+#endif
        storage_type == VideoFrame::STORAGE_MOJO_SHARED_BUFFER);
 }
 
@@ -452,6 +455,50 @@ scoped_refptr<VideoFrame> VideoFrame::WrapTBMInterProcessBuffer(
   frame->buffer_handle_ = handle;
   return frame;
 }
+
+scoped_refptr<VideoFrame> VideoFrame::WrapTBMInterProcessBuffer(
+    const gfx::Size& size,
+    base::TimeDelta timestamp,
+    gfx::TbmBufferHandle handle,
+    const StorageType storage) {
+  const VideoPixelFormat format = PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER;
+
+  const gfx::Rect visible_rect = gfx::Rect(size);
+  if (!IsValidConfig(format, storage, size, visible_rect, size)) {
+    DLOG(ERROR) << __FUNCTION__ << " WrapTBMInterProcessBuffer Invalid config."
+                << ConfigToString(format, storage, size, visible_rect, size);
+    return nullptr;
+  }
+
+  auto layout = VideoFrameLayout::CreateWithStrides(
+      format, size, {handle.strides[0], handle.strides[1]});
+  if (!layout) {
+    DLOG(ERROR) << "Invalid layout.";
+    return nullptr;
+  }
+
+  scoped_refptr<VideoFrame> frame(
+      new VideoFrame(*layout, storage, gfx::Rect(size), size, timestamp));
+  frame->buffer_handle_ = handle;
+  return frame;
+}
+
+scoped_refptr<VideoFrame> VideoFrame::WrapTBMInterProcessFrame(
+    scoped_refptr<VideoFrame> frame) {
+  if (frame->format() != PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER) {
+    DLOG(ERROR) << "Unsupported pixel format: "
+                << VideoPixelFormatToString(frame->format());
+    return nullptr;
+  }
+
+  scoped_refptr<VideoFrame> wrapping_frame(new VideoFrame(
+      frame->layout(), StorageType::STORAGE_TBM_SURFACE, frame->visible_rect(),
+      frame->natural_size(), frame->timestamp()));
+
+  wrapping_frame->metadata().MergeMetadataFrom(frame->metadata());
+  wrapping_frame->wrapped_frame_ = std::move(frame);
+  return wrapping_frame;
+}
 #endif
 #endif
 
@@ -1045,12 +1092,6 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame(
     wrapping_frame->BackWithSharedMemory(frame->shm_region_);
   }
 
-#if defined(TIZEN_TBM_SUPPORT)
-  if (frame->format() == PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
-      frame->format() == PIXEL_FORMAT_TBM_SURFACE) {
-    wrapping_frame->buffer_handle_ = frame->buffer_handle_;
-  }
-#endif
 
   wrapping_frame->wrapped_frame_ = std::move(frame);
   return wrapping_frame;
@@ -1389,6 +1430,66 @@ int VideoFrame::columns(size_t plane) const {
   return Columns(plane, format(), coded_size().width());
 }
 
+#if defined(TIZEN_TBM_SUPPORT)
+void VideoFrame::MapTbmMemory(size_t plane) const {
+  const VideoPixelFormat fmt = format();
+  if (PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER != fmt)
+    return;
+
+  // Calculate an offset that is properly aligned for all planes.
+  const gfx::Size alignment = CommonAlignment(format());
+  const gfx::Point offset(
+      base::bits::AlignDown(visible_rect_.x(), alignment.width()),
+      base::bits::AlignDown(visible_rect_.y(), alignment.height()));
+
+  const gfx::Size subsample = SampleSize(format(), plane);
+  DCHECK(offset.x() % subsample.width() == 0);
+  DCHECK(offset.y() % subsample.height() == 0);
+
+  if (!bufmgr_) {
+    bufmgr_ = tbm_bufmgr_init(-1);
+  }
+  {
+    base::AutoLock autolock(tbm_map_lock_);
+    if (!vp_[plane]) {
+      const auto& buffer_handle = GetTbmBuffer();
+      bo_[plane] = tbm_bo_import(bufmgr_, buffer_handle.key[plane]);
+      bo_handle_[plane] =
+          tbm_bo_map(bo_[plane], TBM_DEVICE_CPU, TBM_OPTION_READ);
+
+      vp_[plane] =
+          (const uint8_t*)bo_handle_[plane].ptr +
+          stride(plane) * (offset.y() / subsample.height()) +  // Row offset.
+          BytesPerElement(format(), plane) *                   // Column offset.
+              (offset.x() / subsample.width());
+    }
+  }
+}
+#endif
+
+const uint8_t* VideoFrame::data(size_t plane) const {
+  DCHECK(IsValidPlane(format(), plane));
+  DCHECK(IsMappable());
+#if defined(TIZEN_TBM_SUPPORT)
+  const VideoPixelFormat fmt = format();
+  if (PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER == fmt) {
+    if (!vp_[plane]) {
+      MapTbmMemory(plane);
+    }
+    return static_cast<uint8_t*>(bo_handle_[plane].ptr);
+  } else {
+#endif
+    return data_[plane];
+#if defined(TIZEN_TBM_SUPPORT)
+  }
+#endif
+}
+
+uint8_t* VideoFrame::data(size_t plane) {
+  return const_cast<uint8_t*>(
+      static_cast<const VideoFrame*>(this)->data(plane));
+}
+
 const uint8_t* VideoFrame::visible_data(size_t plane) const {
   DCHECK(IsValidPlane(format(), plane));
   DCHECK(IsMappable());
@@ -1405,22 +1506,8 @@ const uint8_t* VideoFrame::visible_data(size_t plane) const {
 #if defined(TIZEN_TBM_SUPPORT)
   const VideoPixelFormat fmt = format();
   if (PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER == fmt) {
-    if (!bufmgr_) {
-      bufmgr_ = tbm_bufmgr_init(-1);
-    }
-    {
-      base::AutoLock autolock(tbm_map_lock_);
-      if (!vp_[plane]) {
-        bo_[plane] = tbm_bo_import(bufmgr_, buffer_handle_.key[plane]);
-        bo_handle_[plane] =
-            tbm_bo_map(bo_[plane], TBM_DEVICE_CPU, TBM_OPTION_READ);
-
-        vp_[plane] =
-            (const uint8_t*)bo_handle_[plane].ptr +
-            stride(plane) * (offset.y() / subsample.height()) +  // Row offset.
-            BytesPerElement(format(), plane) *  // Column offset.
-                (offset.x() / subsample.width());
-      }
+    if (!vp_[plane]) {
+      MapTbmMemory(plane);
     }
     return vp_[plane];
   }
@@ -1448,6 +1535,20 @@ const gpu::MailboxHolder& VideoFrame::mailbox_holder(
                         : mailbox_holders_[texture_index];
 }
 
+void VideoFrame::write_mailbox_holder(size_t texture_index,
+                                      gpu::MailboxHolder holder) {
+#if defined(TIZEN_TBM_SUPPORT)
+  DCHECK(IsTBMBackend() || HasTextures());
+#else
+  DCHECK(HasTextures());
+#endif
+  DCHECK(IsValidPlane(format(), texture_index));
+  if (wrapped_frame_)
+    wrapped_frame_->write_mailbox_holder(texture_index, holder);
+  else
+    mailbox_holders_[texture_index] = holder;
+}
+
 #if defined(OS_LINUX) || defined(OS_CHROMEOS)
 const std::vector<base::ScopedFD>& VideoFrame::DmabufFds() const {
   DCHECK_EQ(storage_type_, STORAGE_DMABUFS);
@@ -1871,15 +1972,25 @@ unsigned VideoFrame::CreateTbmTextureIfNeeded(gpu::gles2::GLES2Interface* gl) {
 #if defined(USE_TTRACE)
   TTRACE(TTRACE_TAG_WEB, "VideoFrameCompositor::CreateTbmTextureIfNeeded");
 #endif
+
   base::AutoLock autolock(tbm_lock_);
-  if (!gl || texture_id_ || !buffer_handle_.tbm_surface)
-    return (gl_ == gl) ? texture_id_ : 0;
+  auto& tex = GetTbmTexture();
+  auto& gl_ctx = GetGl();
+  if (!gl || tex)
+    return (gl_ctx == gl) ? tex : 0;
 
-  gl_ = gl;
   unsigned image = gl->CreateTizenImageCHROMIUM(
-      buffer_handle_, visible_rect().width(), visible_rect().height(), GL_RGBA);
-  gl->GenTextures(1, &texture_id_);
-  gl->BindTexture(GL_TEXTURE_EXTERNAL_OES, texture_id_);
+      GetTbmBuffer(), visible_rect().width(), visible_rect().height(), GL_RGBA);
+  if (!image) {
+    LOG(ERROR) << "CreateTizenImageCHROMIUM failed, tex = " << tex;
+    return false;
+  }
+  auto& img_id = GetImageID();
+  img_id = image;
+  gl_ctx = gl;
+
+  gl->GenTextures(1, &tex);
+  gl->BindTexture(GL_TEXTURE_EXTERNAL_OES, tex);
   gl->TexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
   gl->TexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
   gl->TexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S,
@@ -1890,22 +2001,23 @@ unsigned VideoFrame::CreateTbmTextureIfNeeded(gpu::gles2::GLES2Interface* gl) {
 
 #if defined(_DEBUG_TBM_VIDEO_RENDERING) && _DEBUG_TBM_VIDEO_RENDERING
   LOG(INFO) << "VideoFrame > ++CreateTbmTextureIfNeeded >"
-            << ", tbm:" << buffer_handle_.tbm_surface << ", img:" << image
-            << ", txt:" << texture_id_;
+            << ", tbm:" << GetTbmBuffer().tbm_surface << ", img:" << image
+            << ", txt:" << tex;
 #endif
 
   gpu::Mailbox mailbox;
-  gl->ProduceTextureDirectCHROMIUM(texture_id_, mailbox.name);
+  gl->ProduceTextureDirectCHROMIUM(tex, mailbox.name);
   gl->ShallowFlushCHROMIUM();
   gpu::SyncToken sync_token;
   gl->GenSyncTokenCHROMIUM(sync_token.GetData());
-  mailbox_holders_[kARGBPlane] =
-      gpu::MailboxHolder(mailbox, sync_token, GL_TEXTURE_EXTERNAL_OES);
+
+  write_mailbox_holder(kARGBPlane, gpu::MailboxHolder(mailbox, sync_token,
+                                                      GL_TEXTURE_EXTERNAL_OES));
 
   SetReleaseMailboxCB(base::BindOnce(ReleaseTbmTexture,
                                      base::ThreadTaskRunnerHandle::Get(), gl,
-                                     texture_id_, image, context_provider_));
-  return texture_id_;
+                                     tex, image, context_provider_));
+  return tex;
 }
 
 #if defined(SAMSUNG_ELEMENTARY_MEDIA_STREAM_SOURCE)
@@ -1921,21 +2033,28 @@ bool VideoFrame::CreateTbmTextureIfNeeded(gpu::gles2::GLES2Interface* gl,
     return false;
   }
 
-  if (!buffer_handle_.tbm_surface) {
+  const auto& tbm_buffer_handle = GetTbmBuffer();
+  if (!tbm_buffer_handle.tbm_surface) {
     LOG(WARNING) << "No TBM surface registered in video_frame.";
     return false;
   }
 
   texture_id_ = textureId;
 
-  uint32_t image = gl->CreateTizenImageCHROMIUM(
-      buffer_handle_, visible_rect().width(), visible_rect().height(), GL_RGBA);
+  uint32_t image =
+      gl->CreateTizenImageCHROMIUM(tbm_buffer_handle, visible_rect().width(),
+                                   visible_rect().height(), GL_RGBA);
 
   if (!image) {
     LOG(ERROR) << "CreateTizenImageCHROMIUM failed, textureId = " << textureId;
     return false;
   }
 
+  auto& img_id = GetImageID();
+  auto& tex_id = GetTbmTexture();
+  img_id = image;
+  tex_id = textureId;
+
   gl->BindTexture(GL_TEXTURE_EXTERNAL_OES, textureId);
   gl->TexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
   gl->TexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
index 976f466471567394ebcb18265ef7b01ebaa3f526..5254674800039640b774b3e589554014547198f0 100644 (file)
@@ -363,22 +363,42 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
       const gfx::Size& size,
       base::TimeDelta timestamp,
       gfx::TbmBufferHandle handle);
+  static scoped_refptr<VideoFrame> WrapTBMInterProcessBuffer(
+      const gfx::Size& size,
+      base::TimeDelta timestamp,
+      gfx::TbmBufferHandle handle,
+      const StorageType storage);
+  static scoped_refptr<VideoFrame> WrapTBMInterProcessFrame(
+      scoped_refptr<VideoFrame> frame);
 #if defined(SAMSUNG_ELEMENTARY_MEDIA_STREAM_SOURCE)
   bool CreateTbmTextureIfNeeded(gpu::gles2::GLES2Interface* gl,
                                 unsigned textureId);
 #endif  // defined(SAMSUNG_ELEMENTARY_MEDIA_STREAM_SOURCE)
 #endif
+  void MapTbmMemory(size_t plane) const;
   // Needed when we have video-frame content in tbm surface.
   static scoped_refptr<VideoFrame> WrapTBMSurface(const gfx::Size& size,
                                                   base::TimeDelta timestamp,
                                                   gfx::TbmBufferHandle handle);
+
+  scoped_refptr<VideoFrame> GetWrapFrame() { return wrapped_frame_; }
+  const VideoFrame* GetWrapFrame() const { return wrapped_frame_.get(); }
   bool IsTBMBackend() const { return storage_type_ == STORAGE_TBM_SURFACE; }
-  unsigned GetTbmTexture() { return texture_id_; }
+  unsigned& GetTbmTexture() {
+    return wrapped_frame_ ? wrapped_frame_->GetTbmTexture() : texture_id_;
+  }
+  gpu::gles2::GLES2Interface*& GetGl() {
+    return wrapped_frame_ ? wrapped_frame_->GetGl() : gl_;
+  }
   unsigned CreateTbmTextureIfNeeded(gpu::gles2::GLES2Interface* gl);
   void ReleaseTbm();
   void SetTbmTexture(unsigned texture) { texture_id_ = texture; }
-  gfx::TbmBufferHandle GetTbmBuffer() { return buffer_handle_; }
-  unsigned GetImageID() { return image_id_; }
+  const gfx::TbmBufferHandle& GetTbmBuffer() const {
+    return wrapped_frame_ ? wrapped_frame_->GetTbmBuffer() : buffer_handle_;
+  }
+  unsigned& GetImageID() {
+    return wrapped_frame_ ? wrapped_frame_->GetImageID() : image_id_;
+  }
   void SetImageID(unsigned image_id) { image_id_ = image_id; }
   void SetContextProvider(
       scoped_refptr<viz::ContextProvider> context_provider) {
@@ -572,16 +592,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
   // Returns pointer to the buffer for a given plane, if this is an
   // IsMappable() frame type. The memory is owned by VideoFrame object and must
   // not be freed by the caller.
-  const uint8_t* data(size_t plane) const {
-    DCHECK(IsValidPlane(format(), plane));
-    DCHECK(IsMappable());
-    return data_[plane];
-  }
-  uint8_t* data(size_t plane) {
-    DCHECK(IsValidPlane(format(), plane));
-    DCHECK(IsMappable());
-    return data_[plane];
-  }
+  const uint8_t* data(size_t plane) const;
+  uint8_t* data(size_t plane);
 
   const absl::optional<gpu::VulkanYCbCrInfo>& ycbcr_info() const {
     return wrapped_frame_ ? wrapped_frame_->ycbcr_info() : ycbcr_info_;
@@ -598,6 +610,7 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
   // Only valid to call if this is a NATIVE_TEXTURE frame. Before using the
   // mailbox, the caller must wait for the included sync point.
   const gpu::MailboxHolder& mailbox_holder(size_t texture_index) const;
+  void write_mailbox_holder(size_t texture_index, gpu::MailboxHolder holder);
 
 #if defined(OS_LINUX) || defined(OS_CHROMEOS)
   // Returns a vector containing the backing DmaBufs for this frame. The number
index 8c8d5f4a431717f1c35515860d91b1c129de8b24..5c9ff67f0a558138f8ff9736a4919a7f4aac3235 100644 (file)
@@ -687,7 +687,12 @@ Status ConvertAndScaleFrame(const VideoFrame& src_frame,
                             VideoFrame& dst_frame,
                             std::vector<uint8_t>& tmp_buf) {
   constexpr auto kDefaultFiltering = libyuv::kFilterBox;
-  if (!src_frame.IsMappable() || !dst_frame.IsMappable())
+  if ((!src_frame.IsMappable()
+#if defined(TIZEN_TBM_SUPPORT)
+       && src_frame.format() != PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER
+#endif
+       ) ||
+      !dst_frame.IsMappable())
     return Status(StatusCode::kUnsupportedFrameFormatError);
 
   if ((dst_frame.format() == PIXEL_FORMAT_I420 ||
@@ -776,7 +781,11 @@ Status ConvertAndScaleFrame(const VideoFrame& src_frame,
 
   // Both frames are NV12, only scaling is required.
   if (dst_frame.format() == PIXEL_FORMAT_NV12 &&
-      src_frame.format() == PIXEL_FORMAT_NV12) {
+      (
+#if defined(TIZEN_TBM_SUPPORT)
+          src_frame.format() == PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
+          src_frame.format() == PIXEL_FORMAT_NV12)) {
     int error = libyuv::NV12Scale(
         src_frame.visible_data(VideoFrame::kYPlane),
         src_frame.stride(VideoFrame::kYPlane),
@@ -793,7 +802,12 @@ Status ConvertAndScaleFrame(const VideoFrame& src_frame,
   }
 
   if (dst_frame.format() == PIXEL_FORMAT_I420 &&
-      src_frame.format() == PIXEL_FORMAT_NV12) {
+      (
+#if defined(TIZEN_TBM_SUPPORT)
+          src_frame.format() == PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
+          src_frame.format() == PIXEL_FORMAT_NV12)) {
+
     if (src_frame.visible_rect() == dst_frame.visible_rect()) {
       // Both frames have the same size, only NV12-to-I420 conversion is
       // required.
index 3d7b2ac9e8eed9e6857d0bd35162ac3f717c069d..92858005a6ee57b23406a34a6f3d71e06de9fedc 100644 (file)
@@ -300,6 +300,13 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
   const int width = video_frame->visible_rect().width();
   const int height = video_frame->visible_rect().height();
 
+#if defined(TIZEN_TBM_SUPPORT)
+  if (video_frame->format() == media::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER) {
+    if (video_frame->GetWrapFrame())
+      video_frame = video_frame->GetWrapFrame();
+  }
+#endif
+
   size_t rows_per_chunk = 1;
   for (size_t plane = 0; plane < VideoFrame::kMaxPlanes; ++plane) {
     if (VideoFrame::IsValidPlane(format, plane)) {
@@ -450,6 +457,9 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
       break;
 
     case PIXEL_FORMAT_NV12:
+#if defined(TIZEN_TBM_SUPPORT)
+    case PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER:
+#endif
       LIBYUV_NV12_TO_ARGB_MATRIX(plane_meta[VideoFrame::kYPlane].data,
                                  plane_meta[VideoFrame::kYPlane].stride,
                                  plane_meta[VideoFrame::kUVPlane].data,
index ca8fa89ce5bcffd4e7c04edaf11dac797eca7dbb..4611bcddaf25f746b7a2d13e28575c0381ea5308 100644 (file)
@@ -87,6 +87,19 @@ void MediaStreamVideoTrackUnderlyingSource::OnFrameFromTrack(
     std::vector<scoped_refptr<media::VideoFrame>> /*scaled_media_frames*/,
     base::TimeTicks estimated_capture_time) {
   DCHECK(GetIOTaskRunner()->RunsTasksInCurrentSequence());
+
+#if defined(TIZEN_TBM_SUPPORT) && defined(OS_TIZEN_TV_PRODUCT) && \
+    defined(TIZEN_VIDEO_HOLE)
+  // wrap tbm hole frame into normal tbm frame, as we dont need hole frame for
+  // this sink.
+  if (media_frame->format() ==
+          media::VideoPixelFormat::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER &&
+      media_frame->storage_type() ==
+          media::VideoFrame::StorageType::STORAGE_HOLE)
+    media_frame =
+        media::VideoFrame::WrapTBMInterProcessFrame(std::move(media_frame));
+#endif
+
   // The scaled video frames are currently ignored.
   QueueFrame(std::move(media_frame));
 }
@@ -100,6 +113,14 @@ bool MediaStreamVideoTrackUnderlyingSource::StartFrameDelivery() {
   if (!video_track)
     return false;
 
+#if defined(OS_TIZEN_TV_PRODUCT) && defined(TIZEN_TBM_SUPPORT)
+  // enable the tbm callback
+  if (video_track) {
+    LOG(INFO) << "MediaStreamVideoTrackUnderlyingSource enable tbm callback";
+    video_track->OnWebRtcSendStreamUserDataEncoder(true);
+  }
+#endif
+
   ConnectToTrack(WebMediaStreamTrack(track_),
                  ConvertToBaseRepeatingCallback(CrossThreadBindRepeating(
                      &MediaStreamVideoTrackUnderlyingSource::OnFrameFromTrack,
index ae8f18feca91e67cf32b49fcafb231c460048b9a..37c3999286f4b24955981f043ee76d013088b181 100644 (file)
@@ -183,6 +183,9 @@ bool IsSupportedPlanarFormat(const media::VideoFrame& frame) {
     case media::PIXEL_FORMAT_I420A:
       return num_planes == 4;
     case media::PIXEL_FORMAT_NV12:
+#if defined(TIZEN_TBM_SUPPORT)
+    case media::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER:
+#endif
       return num_planes == 2;
     case media::PIXEL_FORMAT_XBGR:
     case media::PIXEL_FORMAT_XRGB:
@@ -575,6 +578,9 @@ absl::optional<V8VideoPixelFormat> VideoFrame::format() const {
     case media::PIXEL_FORMAT_I444:
       return V8VideoPixelFormat(V8VideoPixelFormat::Enum::kI444);
     case media::PIXEL_FORMAT_NV12:
+#if defined(TIZEN_TBM_SUPPORT)
+    case media::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER:
+#endif
       return V8VideoPixelFormat(V8VideoPixelFormat::Enum::kNV12);
     case media::PIXEL_FORMAT_ABGR:
       return V8VideoPixelFormat(V8VideoPixelFormat::Enum::kRGBA);
@@ -755,16 +761,25 @@ ScriptPromise VideoFrame::copyTo(ScriptState* script_state,
     return ScriptPromise();
   }
 
-  // Map buffers if necessary.
-  if (!local_frame->IsMappable()) {
-    DCHECK(local_frame->HasGpuMemoryBuffer());
-    local_frame = media::ConvertToMemoryMappedFrame(local_frame);
-    if (!local_frame) {
-      exception_state.ThrowDOMException(DOMExceptionCode::kInvalidStateError,
-                                        "Failed to read VideoFrame data.");
-      return ScriptPromise();
+#if defined(TIZEN_TBM_SUPPORT)
+  if (local_frame->format() == media::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER) {
+    if (local_frame->GetWrapFrame())
+      local_frame = local_frame->GetWrapFrame();
+  } else {
+#endif
+    // Map buffers if necessary.
+    if (!local_frame->IsMappable()) {
+      DCHECK(local_frame->HasGpuMemoryBuffer());
+      local_frame = media::ConvertToMemoryMappedFrame(local_frame);
+      if (!local_frame) {
+        exception_state.ThrowDOMException(DOMExceptionCode::kInvalidStateError,
+                                          "Failed to read VideoFrame data.");
+        return ScriptPromise();
+      }
     }
+#if defined(TIZEN_TBM_SUPPORT)
   }
+#endif
 
   // Copy data.
   for (wtf_size_t i = 0; i < layout.num_planes; i++) {
index be13f768c74d984a27892093901b4c9eb0cbbaa1..a69a458d20cd22be8f287f55f5a1d820edfdce13 100644 (file)
@@ -398,6 +398,10 @@ bool CanConvertToWebRtcVideoFrameBuffer(const media::VideoFrame* frame) {
 #if defined(TIZEN_VIDEO_HOLE)
          frame->storage_type() == media::VideoFrame::STORAGE_HOLE ||
          frame->format() == media::PIXEL_FORMAT_ENCODED ||
+#if defined(TIZEN_TBM_SUPPORT)
+         frame->format() ==
+             media::VideoPixelFormat::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+#endif
 #endif
          frame->HasTextures();
 }
@@ -441,6 +445,10 @@ rtc::scoped_refptr<webrtc::VideoFrameBuffer> ConvertToWebRtcVideoFrameBuffer(
     // the video frame with tbm buffer type is native, handle it here
     scoped_refptr<media::VideoFrame> scaled_frame =
         MaybeConvertAndScaleFrame(video_frame, shared_resources);
+    if (!scaled_frame) {
+      DLOG(ERROR) << "Make scaled frame failed.";
+      return MakeFrameAdapter(std::move(video_frame));
+    }
     return MakeFrameAdapter(std::move(scaled_frame));
 #endif
   } else if (video_frame->HasTextures()) {