[WebAI] Create tizen media packet with different types 12/317512/4
authorpeng.yin <peng8.yin@samsung.com>
Thu, 29 Aug 2024 09:48:44 +0000 (17:48 +0800)
committerpeng yin <peng8.yin@samsung.com>
Thu, 12 Sep 2024 03:07:57 +0000 (03:07 +0000)
in order to achieve high efficiency for AI video prosessing, we
should avoid repeatedly creating buffers for each video frame,
and pursue a method that the AI processed video frames can be
accessed by different program processes at the lowest costs,
so a buffer pool that supports various types is necessary:

1. kSelfOwnedRaw: raw cpu memory buffer, created and owned by us.

2. kSelfOwnedTbm: tizen tbm buffer, created and owned by us.

3. kSelfOwnedTbmFromGpuHandle: same as above, but wrapped by
a generic chromium gpu buffer handle wrapper which is more easily
accepted by video sinks (renderer & encoder) in the offical media
pipeline.

4. kMappedFromVideoFrame: the buffer is not owned by us, but comes
from external video frame.

Change-Id: I6e1d64030f568853853b5acccbd113a83cfbf0c1
Signed-off-by: peng.yin <peng8.yin@samsung.com>
13 files changed:
tizen_src/chromium_impl/webai/blink/ai_processor_video.cc
tizen_src/chromium_impl/webai/blink/ai_processor_video.h
tizen_src/chromium_impl/webai/blink/mojom/ai_processor.mojom
tizen_src/chromium_impl/webai/common/ai_logger.h
tizen_src/chromium_impl/webai/content/ai_processor_video_impl.cc
tizen_src/chromium_impl/webai/content/ai_processor_video_impl.h
tizen_src/chromium_impl/webai/content/ai_video_frame_buffer.cc
tizen_src/chromium_impl/webai/content/ai_video_frame_buffer.h
tizen_src/chromium_impl/webai/content/tbm_helpers.cc [deleted file]
tizen_src/chromium_impl/webai/content/tbm_helpers.h [deleted file]
tizen_src/chromium_impl/webai/content/video_processor_helpers.cc [new file with mode: 0644]
tizen_src/chromium_impl/webai/content/video_processor_helpers.h [new file with mode: 0644]
tizen_src/chromium_impl/webai/tizen_ai.gni

index c123d2b042fd4b7638f78bc6d2adc1b3c55a2f08..d0fe938e49f86d746168f58dc9b98f4547483b6f 100644 (file)
@@ -5,10 +5,13 @@
 #include "tizen_src/chromium_impl/webai/blink/ai_processor_video.h"
 
 #include "base/trace_event/trace_event.h"
+#include "gpu/GLES2/gl2extchromium.h"
+#include "gpu/ipc/common/gpu_memory_buffer_support.h"
 #include "media/base/video_frame.h"
 #include "media/mojo/common/media_type_converters.h"
 #include "media/mojo/mojom/media_types.mojom.h"
 #include "mojo/public/cpp/bindings/pending_associated_remote.h"
+#include "third_party/blink/public/platform/platform.h"
 #include "third_party/blink/renderer/bindings/core/v8/v8_binding_for_core.h"
 #include "third_party/blink/renderer/bindings/modules/v8/v8_ai_error_callback.h"
 #include "third_party/blink/renderer/bindings/modules/v8/v8_video_frame_output_callback.h"
@@ -67,6 +70,12 @@ AiProcessorVideo::AiProcessorVideo(ScriptState* script_state, LocalFrame* frame)
     : AiBase(script_state, frame) {
   TRACE_EVENT0("webai", "AiProcessorVideo::AiProcessorVideo");
   receiver_.Bind(std::move(pending_client_receiver_));
+
+  gpu_factories_ = Platform::Current()->GetGpuFactories();
+  sii_ = gpu_factories_->SharedImageInterface();
+  if (!sii_) {
+    AI_LOG(INFO) << "Get shared Image Interface failed.";
+  }
 }
 
 AiProcessorVideo::~AiProcessorVideo() = default;
@@ -78,11 +87,76 @@ void AiProcessorVideo::OnMessage() {
   return;
 }
 
+void AiProcessorVideo::OnProcessOutputGpuMemoryBuffer(
+    gfx::GpuMemoryBufferHandle gmb_handle,
+    const gfx::Size& size,
+    gfx::BufferFormat format,
+    uint64_t output_packet) {
+  TRACE_EVENT0("webai", "AiProcessorVideo::OnProcessOutputGpuMemoryBuffer");
+  CHECK_SCRIPT_STATE();
+
+  gpu::GpuMemoryBufferSupport support;
+  auto gmb = support.CreateGpuMemoryBufferImplFromHandle(
+      std::move(gmb_handle), size, format,
+      gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+      base::NullCallback());
+  if (!gmb || !sii_ || format != gfx::BufferFormat::YUV_420_BIPLANAR) {
+    AI_LOG(ERROR) << "Failed to create gpu memory buffer video frame.";
+    ReleaseBuffer(output_packet);
+    return;
+  }
+
+  // TODO: Reuse share image.
+  std::vector<gfx::BufferPlane> planes;
+  planes.push_back(gfx::BufferPlane::Y);
+  planes.push_back(gfx::BufferPlane::UV);
+  constexpr uint32_t usage =
+      gpu::SHARED_IMAGE_USAGE_GLES2 | gpu::SHARED_IMAGE_USAGE_RASTER |
+      gpu::SHARED_IMAGE_USAGE_DISPLAY_READ | gpu::SHARED_IMAGE_USAGE_SCANOUT;
+  gpu::MailboxHolder mailbox_holders[media::VideoFrame::kMaxPlanes] = {};
+  for (size_t plane = 0; plane < planes.size(); ++plane) {
+    auto mail_box = sii_->CreateSharedImage(
+        gmb.get(), gpu_factories_->GpuMemoryBufferManager(), planes[plane],
+        gfx::ColorSpace{}, kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType, usage,
+        "VideoProcessorFrameBuffer");
+    const gpu::SyncToken sync_token = sii_->GenVerifiedSyncToken();
+    mailbox_holders[plane] =
+        gpu::MailboxHolder(mail_box, sync_token, GL_TEXTURE_EXTERNAL_OES);
+  }
+
+  auto frame = media::VideoFrame::WrapExternalGpuMemoryBuffer(
+      gfx::Rect{0, 0, size.width(), size.height()}, size, std::move(gmb),
+      mailbox_holders, base::NullCallback(),
+      base::TimeTicks::Now() - base::TimeTicks());
+  if (!frame) {
+    AI_LOG(INFO) << "Wrap gpu memory video frame failed.";
+    ReleaseBuffer(output_packet);
+    return;
+  }
+
+  frame->AddDestructionObserver(base::BindPostTaskToCurrentDefault(
+      WTF::BindOnce(&AiProcessorVideo::ReleaseBuffer, WrapWeakPersistent(this),
+                    output_packet)));
+  for (size_t plane = 0; plane < planes.size(); ++plane) {
+    frame->AddDestructionObserver(base::BindPostTaskToCurrentDefault(
+        WTF::BindOnce(&gpu::SharedImageInterface::DestroySharedImage,
+                      base::Unretained(sii_), mailbox_holders[plane].sync_token,
+                      mailbox_holders[plane].mailbox)));
+  }
+
+  frame->set_shared_image_format_type(
+      media::SharedImageFormatType::kSharedImageFormatExternalSampler);
+
+  AI_LOG(INFO) << frame->AsHumanReadableString();
+  auto* blink_frame = MakeGarbageCollected<VideoFrame>(
+      std::move(frame), ExecutionContext::From(script_state_));
+  std::ignore = output_callback_->Invoke(nullptr, blink_frame);
+}
+
 void AiProcessorVideo::OnProcessOutputTbm(
     gfx::TbmBufferHandleInterProcess tbm_buffer_handle_process,
     const gfx::Size& natural_size) {
   TRACE_EVENT0("webai", "AiProcessorVideo::OnProcessOutputTbm");
-  AI_LOG(INFO) << __func__;
   CHECK_SCRIPT_STATE();
 
   if (!output_callback_) {
@@ -111,7 +185,6 @@ void AiProcessorVideo::OnProcessOutputTbm(
 void AiProcessorVideo::processVideo(VideoFrame* data,
                                     ExceptionState& exception_state) {
   TRACE_EVENT0("webai", "AiProcessorVideo::processVideo");
-  AI_LOG(INFO) << __func__;
   auto media_frame = data->frame();
   if (!media_frame) {
     AI_LOG(INFO) << "There is no media frame.";
@@ -124,6 +197,7 @@ void AiProcessorVideo::processVideo(VideoFrame* data,
     return;
   }
 
+  AI_LOG(INFO) << media_frame->AsHumanReadableString();
   remote_->ProcessVideo(media_frame);
 }
 
index 1f4c6722535367cd35d4ed94fd49bf581fe1172e..b8317028680ee656cf7b8428f432325ac4fe2be3 100644 (file)
@@ -7,7 +7,9 @@
 
 #include <string.h>
 
+#include "gpu/command_buffer/client/shared_image_interface.h"
 #include "media/mojo/mojom/media_types.mojom-forward.h"
+#include "media/video/gpu_video_accelerator_factories.h"
 #include "mojo/public/cpp/bindings/associated_receiver.h"
 #include "third_party/blink/renderer/bindings/core/v8/script_promise.h"
 #include "third_party/blink/renderer/bindings/core/v8/script_promise_property.h"
@@ -51,8 +53,10 @@ class MODULES_EXPORT AiProcessorVideo : public ScriptWrappable,
 
   // blink::mojom::AiProcessorClient implementation
   void OnMessage() override;
-  void OnProcessOutputVideoFrame(
-      const scoped_refptr<media::VideoFrame>& output) override{};
+  void OnProcessOutputGpuMemoryBuffer(gfx::GpuMemoryBufferHandle gmb_handle,
+                                      const gfx::Size& size,
+                                      gfx::BufferFormat format,
+                                      uint64_t output_packet) override;
   void OnProcessOutputTbm(
       gfx::TbmBufferHandleInterProcess tbm_buffer_handle_process,
       const gfx::Size& natural_size) override;
@@ -77,6 +81,9 @@ class MODULES_EXPORT AiProcessorVideo : public ScriptWrappable,
 
   mojo::AssociatedReceiver<mojom::AiProcessorClient> receiver_{this};
   AiConfiguration cfg_{};
+  raw_ptr<media::GpuVideoAcceleratorFactories, ExperimentalRenderer>
+      gpu_factories_{};
+  raw_ptr<gpu::SharedImageInterface, ExperimentalRenderer> sii_{};
 };
 
 }  // namespace blink
index 77b4652e69e8a803e0ee19cd87fbd4183f719e3a..58be98c4648bf856a4b0ddde9fd7c791a4313326 100644 (file)
@@ -48,7 +48,7 @@ interface AiProcessorVideo {
 // Interface for processed video result
 interface AiProcessorClient {
   OnMessage();
-  OnProcessOutputVideoFrame(media.mojom.VideoFrame video_frame);
+  OnProcessOutputGpuMemoryBuffer(gfx.mojom.GpuMemoryBufferHandle gmb_handle, gfx.mojom.Size size, gfx.mojom.BufferFormat format, uint64 output_packet);
   OnProcessOutputLandmark(array<array<double>> result);
   OnProcessOutputTbm(gfx.mojom.TbmBufferHandleInterProcess tbm_buffer_handle_process, gfx.mojom.Size natural_size);
 };
index ec2656667487911ba45bc0a57f69e083aaf210bb..5f36db64963840b171d138b292f839cdde1a8c39 100644 (file)
@@ -9,16 +9,23 @@
 
 #define AI_LOG(severity) LOG(severity) << "[WebAI] "
 
-#define CheckReturn(ret_val, expected_val, ignore_error) \
-  do {                                                   \
-    if (ret_val != expected_val) {                       \
-      AI_LOG(ERROR) << "error value: " << ret_val;       \
-      if (!ignore_error) {                               \
-        return;                                          \
-      }                                                  \
-    }                                                    \
+#define CheckResult(result, expected_result, ignore_error) \
+  do {                                                     \
+    if (result != expected_result) {                       \
+      AI_LOG(ERROR) << "error value: " << result;          \
+      if (!ignore_error) {                                 \
+        return;                                            \
+      }                                                    \
+    }                                                      \
   } while (0)
 
-#define DROP_FRAME_DEBUG
+#define CheckResultAndReturn(result, expected_result, true_ret, false_ret) \
+  do {                                                                     \
+    if (result != expected_result) {                                       \
+      AI_LOG(ERROR) << "error value: " << result;                          \
+      return false_ret;                                                    \
+    }                                                                      \
+    return true_ret;                                                       \
+  } while (0)
 
 #endif  // WEBAI_CONTENT_AI_LOGGER_H_
\ No newline at end of file
index e253496c814f556ea1a52ddcac8ef5e786e49d70..6af1579673a57acf1e0e808388774e93380b4948 100644 (file)
@@ -5,6 +5,7 @@
 #include "webai/content/ai_processor_video_impl.h"
 
 #include "base/json/json_writer.h"
+#include "media/base/format_utils.h"
 #include "tizen_src/chromium_impl/webai/common/ai_logger.h"
 
 namespace {
@@ -34,7 +35,7 @@ std::string AiVideoModelToJsonString(const blink::AiVideoModel& model,
 namespace content {
 
 void AiProcessorVideoImpl::AdapterDeleter::operator()(xr_adapter_h adapter) {
-  CheckReturn(xr_adapter_destroy(adapter), AIFW_RESULT_SUCCESS, false);
+  CheckResult(xr_adapter_destroy(adapter), AIFW_RESULT_SUCCESS, false);
 }
 
 AiProcessorVideoImpl::AiProcessorVideoImpl(
@@ -42,9 +43,8 @@ AiProcessorVideoImpl::AiProcessorVideoImpl(
     : AiImplBase<AiProcessorVideoImplTraits>(std::move(receiver)) {
   TRACE_EVENT0("webai", "AiProcessorVideoImpl::AiProcessorVideoImpl");
   task_runner_ = base::SequencedTaskRunner::GetCurrentDefault();
-  buffer_pool_ = base::MakeRefCounted<AiVideoFrameBufferPool>();
   xr_adapter_h handle{};
-  CheckReturn(xr_adapter_create(&handle), AIFW_RESULT_SUCCESS, false);
+  CheckResult(xr_adapter_create(&handle), AIFW_RESULT_SUCCESS, false);
   adapter_.reset(handle);
 }
 
@@ -52,15 +52,10 @@ AiProcessorVideoImpl::~AiProcessorVideoImpl() {
   AI_LOG(INFO) << __func__;
 }
 
-void ReleaseInputBuffer(std::shared_ptr<AiVideoFrameBuffer> buffer) {
-  AI_LOG(INFO) << __func__;
-  // TODO (peng8.yin): Set external memory release here.
-}
-
 void AiProcessorVideoImpl::ProcessVideo(
     const scoped_refptr<media::VideoFrame>& video_frame) {
-  AI_LOG(INFO) << video_frame->AsHumanReadableString();
   TRACE_EVENT0("webai", "AiProcessorVideoImpl::ProcessVideo");
+
   if (!is_ready_) {
     AI_LOG(ERROR) << "Configurate it first.";
     return;
@@ -73,39 +68,39 @@ void AiProcessorVideoImpl::ProcessVideo(
     return;
   }
 
-  auto input_buffer = buffer_pool_->AcquireBuffer(video_frame, required_format);
+  auto input_buffer =
+      buffer_pool_->AcquireBufferAndCopyData(video_frame, required_format);
   if (!input_buffer) {
     AI_LOG(ERROR) << "Acquire buffer for input failed.";
     return;
   }
 
   auto output_buffer = buffer_pool_->AcquireReusableBuffer(
-      video_frame->visible_rect().size(), required_format);
+      video_frame->visible_rect().size(), required_format,
+      true /* require gpu memory */);
   if (!output_buffer) {
     AI_LOG(ERROR) << "Acquire buffer for out failed.";
-    buffer_pool_->ReturnBuffer(input_buffer);
+    if (!input_buffer->is_external()) {
+      buffer_pool_->ReturnBuffer(input_buffer);
+    }
     return;
   }
-  output_buffer->set_natural_size(video_frame->natural_size());
 
-  if (input_buffer->is_external_memory()) {
-    output_buffer->SetInputBufferReleaseCb(
-        base::BindOnce(&ReleaseInputBuffer, input_buffer));
-  } else {
+  output_buffer->set_natural_size(video_frame->natural_size());
+  if (!input_buffer->is_external()) {
     output_buffer->SetInputBufferReleaseCb(
         base::BindOnce(&AiVideoFrameBufferPool::ReturnBuffer,
                        buffer_pool_->GetWeakPtr(), input_buffer));
   }
 
   TRACE_EVENT_BEGIN0("webai", "xr_adapter_submit_packet");
-  CheckReturn(xr_adapter_submit_packet(adapter_.get(), input_buffer->packet(),
+  CheckResult(xr_adapter_submit_packet(adapter_.get(), input_buffer->packet(),
                                        output_buffer->packet()),
               AIFW_RESULT_SUCCESS, false);
   TRACE_EVENT_END0("webai", "xr_adapter_submit_packet");
 }
 
 void AiProcessorVideoImpl::ReleaseBuffer(uint64_t packet) {
-  AI_LOG(INFO) << __func__ << " packet:" << packet;
   auto buffer = FindReusableBuffer(reinterpret_cast<media_packet_h>(packet));
   if (!buffer) {
     AI_LOG(ERROR) << "Unexpedted packet:" << packet;
@@ -131,30 +126,35 @@ void AiProcessorVideoImpl::Start(const blink::AiConfiguration& configure) {
 
   {
     TRACE_EVENT0("webai", "xr_adapter_set_frame_ready_cb");
-    CheckReturn(
+    CheckResult(
         xr_adapter_set_frame_ready_cb(adapter_.get(), XrFrameReadyCb, this),
         AIFW_RESULT_SUCCESS, false);
   }
 
-  CheckReturn(CreateJsonConfiguration(configure), true, false);
+  CheckResult(CreateJsonConfiguration(configure), true, false);
 
   {
     TRACE_EVENT0("webai", "xr_adapter_init");
-    CheckReturn(xr_adapter_init(adapter_.get(), config_json_.value().c_str(),
+    CheckResult(xr_adapter_init(adapter_.get(), config_json_.value().c_str(),
                                 config_json_.value().length()),
                 AIFW_RESULT_SUCCESS, false);
   }
   cfg_ = configure;
+  buffer_pool_ = base::MakeRefCounted<AiVideoFrameBufferPool>();
   is_ready_ = true;
 }
 
 void AiProcessorVideoImpl::Stop() {
   TRACE_EVENT0("webai", "AiProcessorVideoImpl::Start");
   AI_LOG(INFO) << __func__;
-  CheckReturn(xr_adapter_deinit(adapter_.get()), AIFW_RESULT_SUCCESS, false);
+  CheckResult(xr_adapter_deinit(adapter_.get()), AIFW_RESULT_SUCCESS, true);
+
+  // Release all buffers.
+  constexpr gfx::Size max_size{4096, 4096};
+  buffer_pool_->FlushPool(max_size);
 }
 
-std::shared_ptr<AiVideoFrameBuffer> AiProcessorVideoImpl::FindReusableBuffer(
+scoped_refptr<AiVideoFrameBuffer> AiProcessorVideoImpl::FindReusableBuffer(
     media_packet_h packet) {
   return buffer_pool_->FindReusableBuffer(packet);
 }
@@ -162,6 +162,7 @@ std::shared_ptr<AiVideoFrameBuffer> AiProcessorVideoImpl::FindReusableBuffer(
 void AiProcessorVideoImpl::OnProcessOutput(media_packet_h output_packet,
                                            xr_adapter_error_type_e error) {
   TRACE_EVENT0("webai", "AiProcessorVideoImpl::OnProcessOutput");
+
   auto output_buffer = FindReusableBuffer(output_packet);
   if (!output_buffer) {
     AI_LOG(ERROR) << "Unexpedted packet:" << output_buffer;
@@ -177,12 +178,19 @@ void AiProcessorVideoImpl::OnProcessOutput(media_packet_h output_packet,
     return;
   }
 
-  // Generate Tbm handle from output buffer
-  auto tbm_buffer_handle = output_buffer->GenerateTbmHandleFromBuffer();
-  tbm_buffer_handle.media_packet = reinterpret_cast<size_t>(output_packet);
-  client_->OnProcessOutputTbm(
-      gfx::ToTbmBufferHandleInterProcess(tbm_buffer_handle),
-      output_buffer->natural_size());
+  if (output_buffer->has_gpu_memory_buffer()) {
+    client_->OnProcessOutputGpuMemoryBuffer(
+        output_buffer->clone_gpu_memory_buffer(), output_buffer->size(),
+        media::VideoPixelFormatToGfxBufferFormat(output_buffer->format())
+            .value(),
+        reinterpret_cast<size_t>(output_packet));
+  } else {
+    auto tbm_buffer_handle = output_buffer->GenerateTbmHandleFromBuffer();
+    tbm_buffer_handle.media_packet = reinterpret_cast<size_t>(output_packet);
+    client_->OnProcessOutputTbm(
+        gfx::ToTbmBufferHandleInterProcess(tbm_buffer_handle),
+        output_buffer->natural_size());
+  }
 }
 
 bool AiProcessorVideoImpl::CreateJsonConfiguration(
index bb65e9016a315768f57efff2864bacde006e040c..c99e81913af7f046082cff76a55f897cc986edd0 100644 (file)
@@ -44,8 +44,7 @@ class CONTENT_EXPORT AiProcessorVideoImpl final
   void ReleaseBuffer(uint64_t packet) override;
   void Start(const blink::AiConfiguration& configure) override;
   void Stop() override;
-
-  std::shared_ptr<AiVideoFrameBuffer> FindReusableBuffer(media_packet_h packet);
+  scoped_refptr<AiVideoFrameBuffer> FindReusableBuffer(media_packet_h packet);
   void OnProcessOutput(media_packet_h output_packet,
                        xr_adapter_error_type_e error);
   auto get_runner() const { return task_runner_; }
index 25bc3d1242d94506bd39782c24ddbffa3d2dffe9..04c1d78bb368048b355865d1f0b748d0258dffed 100644 (file)
@@ -6,43 +6,19 @@
 
 #include "base/task/bind_post_task.h"
 #include "base/trace_event/trace_event.h"
+#include "gpu/ipc/common/gpu_memory_buffer_support.h"
+#include "media/base/format_utils.h"
 #include "media/base/video_frame.h"
 #include "third_party/libyuv/include/libyuv.h"
 #include "ui/gfx/gpu_memory_buffer.h"
+#include "ui/gfx/tizen_gpu_buffer.h"
 #include "webai/common/ai_logger.h"
-#include "webai/content/tbm_helpers.h"
 
 namespace content {
 
 namespace {
 
-constexpr uint32_t kMaxPoolSize = 8;
-
-libyuv::FourCC ToFourcc(media::VideoPixelFormat format) {
-  switch (format) {
-    case media::PIXEL_FORMAT_I420:
-      return libyuv::FOURCC_I420;
-    case media::PIXEL_FORMAT_NV12:
-      return libyuv::FOURCC_NV12;
-    case media::PIXEL_FORMAT_RGB24:
-      return libyuv::FOURCC_24BG;
-    default:
-      return libyuv::FOURCC_ANY;
-  }
-}
-
-media_format_mimetype_e ToMime(media::VideoPixelFormat format) {
-  switch (format) {
-    case media::PIXEL_FORMAT_I420:
-      return MEDIA_FORMAT_I420;
-    case media::PIXEL_FORMAT_NV12:
-      return MEDIA_FORMAT_NV12;
-    case media::PIXEL_FORMAT_RGB24:
-      return MEDIA_FORMAT_RGB888;
-    default:
-      return MEDIA_FORMAT_MAX;
-  }
-}
+constexpr uint32_t kMaxPoolSize = 6;
 
 }  // namespace
 
@@ -50,8 +26,9 @@ AiVideoFrameBufferPool::AiVideoFrameBufferPool() {
   DETACH_FROM_SEQUENCE(sequence_checker_);
 }
 
-std::shared_ptr<AiVideoFrameBuffer> AiVideoFrameBufferPool::AcquireBuffer(
-    scoped_refptr<media::VideoFrame> video_frame,
+scoped_refptr<AiVideoFrameBuffer>
+AiVideoFrameBufferPool::AcquireBufferAndCopyData(
+    scoped_refptr<media::VideoFrame> source_video_frame,
     media::VideoPixelFormat required_format) {
   // TODO (peng8.yin): XRFW only supports RGB24 & RGB32 for non TBM surface
   // input format, see `xrfw_effect_blur_set_input_buffer` and
@@ -60,18 +37,19 @@ std::shared_ptr<AiVideoFrameBuffer> AiVideoFrameBufferPool::AcquireBuffer(
   // as video frame format now.
   //
   // if (required_format == video_frame->format()) {
-  //   return std::make_shared<AiVideoFrameBuffer>(
-  //       video_frame->visible_rect().size(), required_format, video_frame);
+  //   return base::MakeRefCounted<AiVideoFrameBuffer>(
+  //       video_frame->visible_rect().size(), required_format, true,
+  //       video_frame);
   // }
 
-  auto buffer = AcquireReusableBuffer(video_frame->visible_rect().size(),
-                                      required_format);
+  auto buffer = AcquireReusableBuffer(source_video_frame->visible_rect().size(),
+                                      required_format, false);
   if (!buffer) {
     AI_LOG(ERROR) << "Acquire reusable buffer failed.";
     return nullptr;
   }
 
-  if (!buffer->CopyFrom(video_frame)) {
+  if (!buffer->CopyFrom(source_video_frame)) {
     AI_LOG(ERROR) << "Copy video frame into buffer failed.";
     ReturnBuffer(std::move(buffer));
     return nullptr;
@@ -80,71 +58,67 @@ std::shared_ptr<AiVideoFrameBuffer> AiVideoFrameBufferPool::AcquireBuffer(
   return buffer;
 }
 
-std::shared_ptr<AiVideoFrameBuffer>
-AiVideoFrameBufferPool::AcquireReusableBuffer(
+scoped_refptr<AiVideoFrameBuffer> AiVideoFrameBufferPool::AcquireReusableBuffer(
     gfx::Size required_size,
-    media::VideoPixelFormat required_format) {
+    media::VideoPixelFormat required_format,
+    bool need_gpu_memory_buffer) {
   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
   if (last_size_.value_or(required_size) != required_size) {
-    AI_LOG(INFO) << "required size is changed, flush buffer pool.";
+    AI_LOG(INFO) << "Required size is changed, flush buffer pool.";
     FlushPool(required_size);
   }
 
-  auto it =
-      std::find_if(reusable_buffers_.begin(), reusable_buffers_.end(),
-                   [required_size, required_format](
-                       const std::shared_ptr<AiVideoFrameBuffer>& buffer) {
-                     return !buffer->taken() && !buffer->is_mark_for_delete() &&
-                            buffer->size().width() >= required_size.width() &&
-                            buffer->size().height() >= required_size.height() &&
-                            buffer->format() == required_format;
-                   });
-
-#if defined(DROP_FRAME_DEBUG)
-  static int total = 0;
-  static int drop = 0;
-  total++;
-#endif
+  auto it = std::find_if(
+      reusable_buffers_.begin(), reusable_buffers_.end(),
+      [required_size, required_format, need_gpu_memory_buffer](
+          const scoped_refptr<AiVideoFrameBuffer>& buffer) {
+        return !buffer->taken() && !buffer->is_mark_for_delete() &&
+               buffer->size().width() >= required_size.width() &&
+               buffer->size().height() >= required_size.height() &&
+               buffer->format() == required_format &&
+               buffer->has_gpu_memory_buffer() == need_gpu_memory_buffer;
+      });
 
   if (it == reusable_buffers_.end()) {
     if (reusable_buffers_.size() >= kMaxPoolSize) {
-      AI_LOG(ERROR) << "Reach max pool size, drop frame.";
-#if defined(DROP_FRAME_DEBUG)
-      drop++;
-      AI_LOG(ERROR) << " drop:" << drop << " / total:" << total;
-#endif
+      AI_LOG(WARNING) << "Reach max pool size, drop frame.";
       return nullptr;
     }
 
-    it = AppendReusableBuffer(required_size, required_format);
+    it = AppendReusableBuffer(required_size, required_format,
+                              need_gpu_memory_buffer);
     AI_LOG(INFO) << "New buffer added, size:" << required_size.ToString()
                  << " format:" << required_format;
   }
 
+  if (it == reusable_buffers_.end()) {
+    AI_LOG(WARNING) << "Acquire buffer failed, drop frame.";
+    return nullptr;
+  }
+
   (*it)->set_taken(true);
   last_size_ = required_size;
-  AI_LOG(INFO) << " media_packet:" << (*it)->packet();
   return *it;
 }
 
 void AiVideoFrameBufferPool::ReturnBuffer(
-    std::shared_ptr<AiVideoFrameBuffer> return_buffer) {
+    scoped_refptr<AiVideoFrameBuffer> return_buffer) {
   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
   return_buffer->set_taken(false);
   if (return_buffer->is_mark_for_delete()) {
     AI_LOG(INFO) << "Delete buffer.";
     reusable_buffers_.remove_if(
-        [return_buffer](const std::shared_ptr<AiVideoFrameBuffer>& buffer) {
+        [return_buffer](const scoped_refptr<AiVideoFrameBuffer>& buffer) {
           return return_buffer == buffer;
         });
   }
 }
 
-std::shared_ptr<AiVideoFrameBuffer> AiVideoFrameBufferPool::FindReusableBuffer(
+scoped_refptr<AiVideoFrameBuffer> AiVideoFrameBufferPool::FindReusableBuffer(
     media_packet_h packet) {
   auto it =
       std::find_if(reusable_buffers_.begin(), reusable_buffers_.end(),
-                   [packet](const std::shared_ptr<AiVideoFrameBuffer>& buffer) {
+                   [packet](const scoped_refptr<AiVideoFrameBuffer>& buffer) {
                      return buffer->packet() == packet;
                    });
   if (it == reusable_buffers_.end()) {
@@ -153,13 +127,41 @@ std::shared_ptr<AiVideoFrameBuffer> AiVideoFrameBufferPool::FindReusableBuffer(
   return *it;
 }
 
-std::list<std::shared_ptr<AiVideoFrameBuffer>>::iterator
+std::list<scoped_refptr<AiVideoFrameBuffer>>::iterator
 AiVideoFrameBufferPool::AppendReusableBuffer(gfx::Size size,
-                                             media::VideoPixelFormat format) {
+                                             media::VideoPixelFormat format,
+                                             bool need_gpu_memory_buffer) {
   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
-  return reusable_buffers_.emplace(
-      reusable_buffers_.end(),
-      std::make_shared<AiVideoFrameBuffer>(size, format));
+  if (format != media::PIXEL_FORMAT_NV12) {
+    AI_LOG(ERROR) << "Unsupported format:" << format;
+    return reusable_buffers_.end();
+  }
+
+  if (!need_gpu_memory_buffer) {
+    return reusable_buffers_.emplace(reusable_buffers_.end(),
+                                     base::MakeRefCounted<AiVideoFrameBuffer>(
+                                         size, format, true /* need tbm */));
+  } else {
+    gfx::NativePixmapHandle handle{};
+    for (size_t i = 0; i < media::VideoFrameLayout::NumPlanes(format); ++i) {
+      auto plane_size = media::VideoFrame::PlaneSize(format, i, size);
+      auto buffer = gfx::TizenGpuBuffer::Allocate(plane_size.GetArea(),
+                                                  true /* scanout */);
+      handle.planes.emplace_back(plane_size.width(), 0, plane_size.GetArea(),
+                                 buffer->ExportFd());
+    }
+
+    constexpr uint64_t kFormatModifierLinear = 0;
+    handle.modifier = kFormatModifierLinear;
+    gfx::GpuMemoryBufferHandle gmb_handle;
+    static int32_t buffer_id = 0;
+    gmb_handle.type = gfx::GpuMemoryBufferType::NATIVE_PIXMAP;
+    gmb_handle.native_pixmap_handle = std::move(handle);
+    gmb_handle.id = gfx::GpuMemoryBufferId(buffer_id++);
+    return reusable_buffers_.emplace(reusable_buffers_.end(),
+                                     base::MakeRefCounted<AiVideoFrameBuffer>(
+                                         size, format, std::move(gmb_handle)));
+  }
 }
 
 void AiVideoFrameBufferPool::FlushPool(gfx::Size new_size) {
@@ -180,64 +182,70 @@ void AiVideoFrameBufferPool::FlushPool(gfx::Size new_size) {
   }
 }
 
-void AiVideoFrameBuffer::PacketDeleter::operator()(media_packet_h packet) {
-  media_packet_destroy(packet);
+AiVideoFrameBuffer::AiVideoFrameBuffer(gfx::Size size,
+                                       media::VideoPixelFormat format)
+    : size_(size), format_(format) {}
+
+AiVideoFrameBuffer::AiVideoFrameBuffer(gfx::Size size,
+                                       media::VideoPixelFormat format,
+                                       gfx::GpuMemoryBufferHandle gmb_handle)
+    : AiVideoFrameBuffer(size, format) {
+  if (gmb_handle.is_null()) {
+    AI_LOG(ERROR) << "Gmb buffer is null.";
+    return;
+  }
+
+  auto surface =
+      gfx::TbmSurface::ImportTbmSurface(gmb_handle.native_pixmap_handle, size);
+  if (!surface) {
+    AI_LOG(ERROR) << "Get tbm surface failed.";
+    return;
+  }
+
+  media_packet_h packet{};
+  CheckResult(CreateTbmMediaPacket(surface, size, format, packet), true, false);
+  CheckResult(tbm_surface_get_info(**surface, &tbm_surface_info_),
+              TBM_ERROR_NONE, false);
+  packet_.reset(packet);
+  tbm_surface_ = **surface;
+  gmb_handle_ = std::move(gmb_handle);
+  packet_type_ = PacketType::kSelfOwnedTbmFromGpuHandle;
 }
 
 AiVideoFrameBuffer::AiVideoFrameBuffer(
     gfx::Size size,
     media::VideoPixelFormat format,
     scoped_refptr<media::VideoFrame> video_frame)
-    : size_(size), format_(format), is_external_memory_(video_frame) {
-  media_format_h media_format{};
-  media_format_create(&media_format);
-  media_format_set_video_mime(media_format, ToMime(format));
-  media_format_set_video_width(media_format, size.width());
-  media_format_set_video_height(media_format, size.height());
-  media_packet_h packet{};
-
-  if (!is_external_memory_) {
-    CheckReturn(media_packet_new_alloc(media_format, NULL, NULL, &packet),
-                MEDIA_PACKET_ERROR_NONE, false);
-  } else if (video_frame->HasGpuMemoryBuffer()) {
-    auto* gmb = video_frame->GetGpuMemoryBuffer();
-    if (!gmb->Map()) {
-      AI_LOG(ERROR) << "The video frame is backed by GPU buffer, but failed "
-                       "to map it.";
-      return;
-    }
-
-    gmb_ = gmb;
-    // TODO: Calculate data size by format.
-    uint64_t nv12_size = size.width() * size.height() * 1.5;
-    AI_LOG(INFO) << "size:" << gmb->GetSize().ToString()
-                 << " format:" << static_cast<int>(gmb->GetFormat())
-                 << " stride[0]:" << gmb->stride(0);
-
-    CheckReturn(
-        media_packet_new_from_external_memory(
-            media_format, gmb->memory(media::VideoFrame::kYPlane), nv12_size,
-            AiVideoFrameBuffer::OnExternalMemoryDisposed, this, &packet),
-        MEDIA_PACKET_ERROR_NONE, false);
+    : AiVideoFrameBuffer(size, format) {
+  if (!video_frame) {
+    AI_LOG(ERROR) << "Video frame is null.";
+    return;
   }
 
+  media_packet_h packet{};
+  CheckResult(CreateMemoryMappedMediaPacket(video_frame, size, format, packet),
+              true, false);
   packet_.reset(packet);
-  media_format_unref(media_format);
+  mapped_video_frame_holder_ = std::move(video_frame);
+  packet_type_ = PacketType::kMappedFromVideoFrame;
+}
 
-  if (!is_external_memory_) {
-    CheckReturn(media_packet_get_tbm_surface(packet_.get(), &tbm_surface_),
+AiVideoFrameBuffer::AiVideoFrameBuffer(gfx::Size size,
+                                       media::VideoPixelFormat format,
+                                       bool need_tbm)
+    : AiVideoFrameBuffer(size, format) {
+  media_packet_h packet{};
+  CheckResult(CreateNewMediaPacket(size, format, packet), true, false);
+  if (need_tbm) {
+    CheckResult(media_packet_get_tbm_surface(packet, &tbm_surface_),
                 MEDIA_PACKET_ERROR_NONE, false);
-    CheckReturn(tbm_surface_get_info(tbm_surface_, &tbm_surface_info_),
+    CheckResult(tbm_surface_get_info(tbm_surface_, &tbm_surface_info_),
                 TBM_ERROR_NONE, false);
-    AI_LOG(INFO) << "Get surface info, size:" << tbm_surface_info_.width << "x"
-                 << tbm_surface_info_.height;
-  }
-}
-
-AiVideoFrameBuffer::~AiVideoFrameBuffer() {
-  if (gmb_) {
-    gmb_->Unmap();
+    packet_type_ = PacketType::kSelfOwnedTbm;
+  } else {
+    packet_type_ = PacketType::kSelfOwnedRaw;
   }
+  packet_.reset(packet);
 }
 
 void AiVideoFrameBuffer::OnExternalMemoryDisposed(media_packet_h packet,
@@ -248,8 +256,7 @@ void AiVideoFrameBuffer::OnExternalMemoryDisposed(media_packet_h packet,
 bool AiVideoFrameBuffer::CopyFrom(
     scoped_refptr<media::VideoFrame> video_frame) {
   TRACE_EVENT0("webai", "AiVideoFrameBuffer::CopyFrom");
-  if (is_external_memory_ || !tbm_surface_ || tbm_surface_info_.width == 0 ||
-      tbm_surface_info_.height == 0) {
+  if (packet_type_ != PacketType::kSelfOwnedTbm) {
     AI_LOG(ERROR) << "Invalided option.";
     return false;
   }
@@ -289,10 +296,10 @@ bool AiVideoFrameBuffer::CopyFrom(
         uv_stride = gmb->stride(media::VideoFrame::kUVPlane);
         const gfx::Point left_top{video_frame->visible_rect().x(),
                                   video_frame->visible_rect().y()};
-        y_addr =
-            GetGpuMemoryVisibleData(gmb, left_top, media::VideoFrame::kYPlane);
-        uv_addr =
-            GetGpuMemoryVisibleData(gmb, left_top, media::VideoFrame::kUVPlane);
+        y_addr = GetGpuMemoryVisibleData(gmb, left_top, video_frame->format(),
+                                         media::VideoFrame::kYPlane);
+        uv_addr = GetGpuMemoryVisibleData(gmb, left_top, video_frame->format(),
+                                          media::VideoFrame::kUVPlane);
       } else {
         y_addr = video_frame->visible_data(media::VideoFrame::kYPlane);
         uv_addr = video_frame->visible_data(media::VideoFrame::kUVPlane);
@@ -341,16 +348,4 @@ gfx::TbmBufferHandle AiVideoFrameBuffer::GenerateTbmHandleFromBuffer() {
   return CreateTbmBufferHandle(tbm_surface_, tbm_surface_info_);
 }
 
-const uint8_t* AiVideoFrameBuffer::GetGpuMemoryVisibleData(
-    gfx::GpuMemoryBuffer* gmb,
-    gfx::Point left_top,
-    size_t plane) const {
-  const gfx::Size subsample = media::VideoFrame::SampleSize(format(), plane);
-  auto stride = gmb->stride(plane);
-  return static_cast<const uint8_t*>(gmb->memory(plane)) +
-         stride * (left_top.y() / subsample.height()) +
-         media::VideoFrame::BytesPerElement(format(), plane) *
-             (left_top.x() / subsample.width());
-}
-
 }  // namespace content
index dd681a723a5ef2cead0c59e311763a3b4429f402..e7585c15d778f21fa87405af4a3cc7b914009c84 100644 (file)
 #include "third_party/abseil-cpp/absl/types/optional.h"
 #include "ui/gfx/geometry/point.h"
 #include "ui/gfx/geometry/size.h"
+#include "ui/gfx/gpu_memory_buffer.h"
 #include "ui/gfx/tbm_buffer_handle.h"
+#include "webai/content/video_processor_helpers.h"
 
 #include <aifw_api/vision/xr_adapter.h>
+#include <media_packet.h>
 
 namespace media {
 class VideoFrame;
@@ -29,14 +32,26 @@ class GpuMemoryBuffer;
 
 namespace content {
 
-class AiVideoFrameBuffer {
+enum class PacketType {
+  kNone,
+  kSelfOwnedRaw,
+  kSelfOwnedTbm,
+  kSelfOwnedTbmFromGpuHandle,
+  kMappedFromVideoFrame
+};
+
+class AiVideoFrameBuffer : public base::RefCounted<AiVideoFrameBuffer> {
  public:
   AiVideoFrameBuffer(gfx::Size size,
                      media::VideoPixelFormat format,
-                     scoped_refptr<media::VideoFrame> video_frame = nullptr);
-  ~AiVideoFrameBuffer();
-  AiVideoFrameBuffer(const AiVideoFrameBuffer&) = delete;
-  AiVideoFrameBuffer& operator=(const AiVideoFrameBuffer&) = delete;
+                     gfx::GpuMemoryBufferHandle gmb_handle);
+  AiVideoFrameBuffer(gfx::Size size,
+                     media::VideoPixelFormat format,
+                     scoped_refptr<media::VideoFrame> video_frame);
+  AiVideoFrameBuffer(gfx::Size size,
+                     media::VideoPixelFormat format,
+                     bool need_tbm);
+  virtual ~AiVideoFrameBuffer() = default;
 
   static void OnExternalMemoryDisposed(media_packet_h packet, void* userdata);
   bool CopyFrom(scoped_refptr<media::VideoFrame> video_frame);
@@ -56,30 +71,41 @@ class AiVideoFrameBuffer {
   size_t stride(size_t plane) const {
     return tbm_surface_info_.planes[plane].stride;
   }
-  bool is_external_memory() const { return is_external_memory_; }
+  bool is_external() const {
+    return packet_type_ != PacketType::kSelfOwnedRaw &&
+           packet_type_ != PacketType::kSelfOwnedTbm &&
+           packet_type_ != PacketType::kSelfOwnedTbmFromGpuHandle;
+  }
   void mark_for_delete() { mark_for_delete_ = true; }
   bool is_mark_for_delete() const { return mark_for_delete_; }
+  bool is_mapped_memory_buffer() const {
+    return mapped_video_frame_holder_ != nullptr;
+  }
+  scoped_refptr<media::VideoFrame> get_mapped_video_frame() const {
+    return mapped_video_frame_holder_;
+  }
+  bool has_gpu_memory_buffer() const { return !gmb_handle_.is_null(); }
+  gfx::GpuMemoryBufferHandle clone_gpu_memory_buffer() const {
+    return gmb_handle_.Clone();
+  }
 
  private:
-  struct PacketDeleter {
-    void operator()(media_packet_h packet);
-  };
-  const uint8_t* GetGpuMemoryVisibleData(gfx::GpuMemoryBuffer* gmb,
-                                         gfx::Point left_top,
-                                         size_t plane) const;
+  AiVideoFrameBuffer(gfx::Size size, media::VideoPixelFormat format);
+  AiVideoFrameBuffer(const AiVideoFrameBuffer&) = delete;
+  AiVideoFrameBuffer& operator=(const AiVideoFrameBuffer&) = delete;
 
   const gfx::Size size_;
   const media::VideoPixelFormat format_;
   gfx::Size natural_size_;
   bool is_taken_{};
-  std::unique_ptr<std::remove_pointer<media_packet_h>::type, PacketDeleter>
-      packet_;
+  AiPacket packet_;
   tbm_surface_h tbm_surface_{};
   tbm_surface_info_s tbm_surface_info_{};
-  bool is_external_memory_;
-  gfx::GpuMemoryBuffer* gmb_{};
   base::OnceClosure input_buffer_release_cb_;
   bool mark_for_delete_{};
+  scoped_refptr<media::VideoFrame> mapped_video_frame_holder_;
+  PacketType packet_type_{};
+  gfx::GpuMemoryBufferHandle gmb_handle_{};
 };
 
 class AiVideoFrameBufferPool : public base::RefCounted<AiVideoFrameBufferPool> {
@@ -88,24 +114,19 @@ class AiVideoFrameBufferPool : public base::RefCounted<AiVideoFrameBufferPool> {
   AiVideoFrameBufferPool(const AiVideoFrameBufferPool&) = delete;
   AiVideoFrameBufferPool& operator=(const AiVideoFrameBufferPool&) = delete;
 
-  std::shared_ptr<AiVideoFrameBuffer> AcquireBuffer(
-      scoped_refptr<media::VideoFrame> video_frame,
+  scoped_refptr<AiVideoFrameBuffer> AcquireBufferAndCopyData(
+      scoped_refptr<media::VideoFrame> source_video_frame,
       media::VideoPixelFormat required_format);
-  std::shared_ptr<AiVideoFrameBuffer> AcquireReusableBuffer(
+  scoped_refptr<AiVideoFrameBuffer> AcquireReusableBuffer(
       gfx::Size required_size,
-      media::VideoPixelFormat required_format);
-  void ReturnBuffer(std::shared_ptr<AiVideoFrameBuffer> return_buffer);
-  std::shared_ptr<AiVideoFrameBuffer> FindReusableBuffer(media_packet_h packet);
-  void ReleaseOnceBuffer(media_packet_h packet);
+      media::VideoPixelFormat required_format,
+      bool need_gpu_memory_buffer);
+  void ReturnBuffer(scoped_refptr<AiVideoFrameBuffer> return_buffer);
+  scoped_refptr<AiVideoFrameBuffer> FindReusableBuffer(media_packet_h packet);
   base::WeakPtr<AiVideoFrameBufferPool> GetWeakPtr() {
     return weak_factory_.GetWeakPtr();
   }
 
- private:
-  std::list<std::shared_ptr<AiVideoFrameBuffer>>::iterator AppendReusableBuffer(
-      gfx::Size size,
-      media::VideoPixelFormat format);
-
   // Delete all reusable buffers smaller than new size.
   // the size of the video frame usually remains same for a period of time,
   // unless the constraints of the track are explicitly applied,
@@ -113,7 +134,13 @@ class AiVideoFrameBufferPool : public base::RefCounted<AiVideoFrameBufferPool> {
   // size buffers.
   void FlushPool(gfx::Size new_size);
 
-  std::list<std::shared_ptr<AiVideoFrameBuffer>> reusable_buffers_;
+ private:
+  std::list<scoped_refptr<AiVideoFrameBuffer>>::iterator AppendReusableBuffer(
+      gfx::Size size,
+      media::VideoPixelFormat format,
+      bool need_gpu_memory_buffer);
+
+  std::list<scoped_refptr<AiVideoFrameBuffer>> reusable_buffers_;
   absl::optional<gfx::Size> last_size_;
   SEQUENCE_CHECKER(sequence_checker_);
   base::WeakPtrFactory<AiVideoFrameBufferPool> weak_factory_{this};
diff --git a/tizen_src/chromium_impl/webai/content/tbm_helpers.cc b/tizen_src/chromium_impl/webai/content/tbm_helpers.cc
deleted file mode 100644 (file)
index 4698360..0000000
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright 2024 Samsung Electronics Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "tizen_src/chromium_impl/webai/content/tbm_helpers.h"
-
-#include "base/time/time.h"
-#include "tizen_src/chromium_impl/ui/gfx/tbm_buffer_handle.h"
-#include "tizen_src/chromium_impl/webai/common/ai_logger.h"
-
-#include <tbm_surface.h>
-#include <tbm_surface_internal.h>
-
-namespace content {
-
-gfx::TbmBufferHandle CreateTbmBufferHandle(tbm_surface_h tbm_surface,
-                                           tbm_surface_info_s surface_info) {
-  AI_LOG(INFO) << __func__ << " tbm_surface:" << tbm_surface;
-  tbm_bo bo[4];
-  int32_t key_num = 0;
-  gfx::TbmBufferHandle tbm_buffer_handle;
-
-  if (!tbm_surface ||  // !::IsTBMSurfaceQueueReady(ctx_id, 1) ||
-      TBM_SURFACE_ERROR_NONE !=
-          tbm_surface_get_info(tbm_surface, &surface_info)) {
-    LOG(ERROR) << "tbm_surface_get_info failed";
-    return tbm_buffer_handle;
-  }
-
-  key_num = tbm_surface_internal_get_num_bos(tbm_surface);
-
-  for (int i = 0; i < key_num; i++) {
-    tbm_buffer_handle.strides[i] = surface_info.planes[i].stride;
-    bo[i] = tbm_surface_internal_get_bo(tbm_surface, i);
-    tbm_buffer_handle.key[i] = tbm_bo_export(bo[i]);
-  }
-
-  tbm_buffer_handle.key_num = key_num;
-  tbm_buffer_handle.tbm_surface = reinterpret_cast<size_t>(tbm_surface);
-  tbm_buffer_handle.width = surface_info.width;
-  tbm_buffer_handle.height = surface_info.height;
-  tbm_buffer_handle.pts = base::TimeTicks::Now().ToInternalValue();
-  tbm_buffer_handle.duration = 66;
-  AI_LOG(INFO) << __func__ << " tbm_surface:" << tbm_surface
-               << " ts:" << tbm_buffer_handle.pts << " key_num:" << key_num;
-
-  return tbm_buffer_handle;
-}
-
-}  // namespace content
diff --git a/tizen_src/chromium_impl/webai/content/tbm_helpers.h b/tizen_src/chromium_impl/webai/content/tbm_helpers.h
deleted file mode 100644 (file)
index ecf70ed..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2024 Samsung Electronics Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef WEBAI_CONTENT_TBM_HELPERS_H_
-#define WEBAI_CONTENT_TBM_HELPERS_H_
-
-#include <stdint.h>
-
-#include <tbm_surface.h>
-
-namespace gfx {
-struct TbmBufferHandle;
-}
-
-namespace content {
-
-inline bool TbmSurfaceHandleAvailable(int tbm_surface_id) {
-  return tbm_surface_id <= 0 ? false : true;
-}
-
-gfx::TbmBufferHandle CreateTbmBufferHandle(tbm_surface_h tbm_surface,
-                                           tbm_surface_info_s surface_info);
-
-}  // namespace content
-
-#endif  // WEBAI_CONTENT_TBM_HELPERS_H_
\ No newline at end of file
diff --git a/tizen_src/chromium_impl/webai/content/video_processor_helpers.cc b/tizen_src/chromium_impl/webai/content/video_processor_helpers.cc
new file mode 100644 (file)
index 0000000..13f9458
--- /dev/null
@@ -0,0 +1,155 @@
+// Copyright 2024 Samsung Electronics Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tizen_src/chromium_impl/webai/content/video_processor_helpers.h"
+
+#include "base/time/time.h"
+#include "media/base/video_frame.h"
+#include "third_party/libyuv/include/libyuv.h"
+#include "ui/gfx/gpu_memory_buffer.h"
+#include "ui/gfx/tbm_buffer_handle.h"
+#include "webai/common/ai_logger.h"
+
+#include <tbm_surface.h>
+#include <tbm_surface_internal.h>
+
+namespace {
+
+libyuv::FourCC ToFourcc(media::VideoPixelFormat format) {
+  switch (format) {
+    case media::PIXEL_FORMAT_I420:
+      return libyuv::FOURCC_I420;
+    case media::PIXEL_FORMAT_NV12:
+      return libyuv::FOURCC_NV12;
+    case media::PIXEL_FORMAT_RGB24:
+      return libyuv::FOURCC_24BG;
+    default:
+      return libyuv::FOURCC_ANY;
+  }
+}
+
+media_format_mimetype_e ToMime(media::VideoPixelFormat format) {
+  switch (format) {
+    case media::PIXEL_FORMAT_I420:
+      return MEDIA_FORMAT_I420;
+    case media::PIXEL_FORMAT_NV12:
+      return MEDIA_FORMAT_NV12;
+    case media::PIXEL_FORMAT_RGB24:
+      return MEDIA_FORMAT_RGB888;
+    default:
+      return MEDIA_FORMAT_MAX;
+  }
+}
+
+}  // namespace
+
+namespace content {
+
+void AiPacketDeleter::operator()(media_packet_h packet) {
+  media_packet_destroy(packet);
+}
+
+gfx::TbmBufferHandle CreateTbmBufferHandle(tbm_surface_h tbm_surface,
+                                           tbm_surface_info_s surface_info) {
+  AI_LOG(INFO) << __func__ << " tbm_surface:" << tbm_surface;
+  tbm_bo bo[4];
+  int32_t key_num = 0;
+  gfx::TbmBufferHandle tbm_buffer_handle;
+
+  if (!tbm_surface || TBM_SURFACE_ERROR_NONE !=
+                          tbm_surface_get_info(tbm_surface, &surface_info)) {
+    LOG(ERROR) << "tbm_surface_get_info failed";
+    return tbm_buffer_handle;
+  }
+
+  key_num = tbm_surface_internal_get_num_bos(tbm_surface);
+
+  for (int i = 0; i < key_num; i++) {
+    tbm_buffer_handle.strides[i] = surface_info.planes[i].stride;
+    bo[i] = tbm_surface_internal_get_bo(tbm_surface, i);
+    tbm_buffer_handle.key[i] = tbm_bo_export(bo[i]);
+  }
+
+  tbm_buffer_handle.key_num = key_num;
+  tbm_buffer_handle.tbm_surface = reinterpret_cast<size_t>(tbm_surface);
+  tbm_buffer_handle.width = surface_info.width;
+  tbm_buffer_handle.height = surface_info.height;
+  tbm_buffer_handle.pts = base::TimeTicks::Now().ToInternalValue();
+  tbm_buffer_handle.duration = 66;
+  AI_LOG(INFO) << __func__ << " tbm_surface:" << tbm_surface
+               << " ts:" << tbm_buffer_handle.pts << " key_num:" << key_num;
+
+  return tbm_buffer_handle;
+}
+
+const uint8_t* GetGpuMemoryVisibleData(gfx::GpuMemoryBuffer* gmb,
+                                       gfx::Point left_top,
+                                       media::VideoPixelFormat format,
+                                       size_t plane) {
+  const gfx::Size subsample = media::VideoFrame::SampleSize(format, plane);
+  auto stride = gmb->stride(plane);
+  return static_cast<const uint8_t*>(gmb->memory(plane)) +
+         stride * (left_top.y() / subsample.height()) +
+         media::VideoFrame::BytesPerElement(format, plane) *
+             (left_top.x() / subsample.width());
+}
+
+bool CreateTbmMediaPacket(const std::unique_ptr<gfx::TbmSurface>& surface,
+                          gfx::Size size,
+                          media::VideoPixelFormat format,
+                          media_packet_h& packet) {
+  media_format_h media_format{};
+  media_format_create(&media_format);
+  media_format_set_video_mime(media_format, ToMime(format));
+  media_format_set_video_width(media_format, size.width());
+  media_format_set_video_height(media_format, size.height());
+
+  auto result = media_packet_new_from_tbm_surface(media_format, **surface,
+                                                  nullptr, nullptr, &packet);
+  media_format_unref(media_format);
+  CheckResultAndReturn(result, MEDIA_PACKET_ERROR_NONE, true, false);
+}
+
+bool CreateNewMediaPacket(gfx::Size size,
+                          media::VideoPixelFormat format,
+                          media_packet_h& packet) {
+  media_format_h media_format{};
+  media_format_create(&media_format);
+  media_format_set_video_mime(media_format, ToMime(format));
+  media_format_set_video_width(media_format, size.width());
+  media_format_set_video_height(media_format, size.height());
+
+  auto result = media_packet_new_alloc(media_format, NULL, NULL, &packet);
+  media_format_unref(media_format);
+  CheckResultAndReturn(result, MEDIA_PACKET_ERROR_NONE, true, false);
+}
+
+bool CreateMemoryMappedMediaPacket(scoped_refptr<media::VideoFrame> frame,
+                                   gfx::Size size,
+                                   media::VideoPixelFormat format,
+                                   media_packet_h& packet) {
+  media_format_h media_format{};
+  media_format_create(&media_format);
+  media_format_set_video_mime(media_format, ToMime(format));
+  media_format_set_video_width(media_format, size.width());
+  media_format_set_video_height(media_format, size.height());
+
+  void* data = nullptr;
+  if (!frame->HasGpuMemoryBuffer()) {
+    auto* gpu_buffer = frame->GetGpuMemoryBuffer();
+    if (!gpu_buffer || !gpu_buffer->Map()) {
+      AI_LOG(ERROR) << "failed to get or map gpu buffer.";
+      return false;
+    }
+    data = gpu_buffer->memory(media::VideoFrame::kYPlane);
+  }
+
+  auto result = media_packet_new_from_external_memory(
+      media_format, data, media::VideoFrame::AllocationSize(format, size),
+      nullptr, nullptr, &packet);
+  media_format_unref(media_format);
+  CheckResultAndReturn(result, MEDIA_PACKET_ERROR_NONE, true, false);
+}
+
+}  // namespace content
diff --git a/tizen_src/chromium_impl/webai/content/video_processor_helpers.h b/tizen_src/chromium_impl/webai/content/video_processor_helpers.h
new file mode 100644 (file)
index 0000000..82a9390
--- /dev/null
@@ -0,0 +1,63 @@
+// Copyright 2024 Samsung Electronics Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef WEBAI_CONTENT_VIDEO_PROCESSOR_HELPERS_H_
+#define WEBAI_CONTENT_VIDEO_PROCESSOR_HELPERS_H_
+
+#include <stdint.h>
+
+#include "base/files/scoped_file.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_types.h"
+#include "ui/gfx/geometry/point.h"
+#include "ui/gfx/geometry/size.h"
+#include "ui/gfx/gpu_memory_buffer.h"
+#include "ui/gfx/tbm_buffer_handle.h"
+#include "ui/gfx/tbm_surface.h"
+
+#include <media_packet.h>
+#include <tbm_surface.h>
+
+namespace gfx {
+class GpuMemoryBuffer;
+}
+
+namespace content {
+
+struct AiPacketDeleter {
+  void operator()(media_packet_h packet);
+};
+
+using AiPacket =
+    std::unique_ptr<std::remove_pointer<media_packet_h>::type, AiPacketDeleter>;
+
+inline bool TbmSurfaceHandleAvailable(int tbm_surface_id) {
+  return tbm_surface_id <= 0 ? false : true;
+}
+
+gfx::TbmBufferHandle CreateTbmBufferHandle(tbm_surface_h tbm_surface,
+                                           tbm_surface_info_s surface_info);
+
+const uint8_t* GetGpuMemoryVisibleData(gfx::GpuMemoryBuffer* gmb,
+                                       gfx::Point left_top,
+                                       media::VideoPixelFormat format,
+                                       size_t plane);
+
+bool CreateTbmMediaPacket(const std::unique_ptr<gfx::TbmSurface>& surface,
+                          gfx::Size size,
+                          media::VideoPixelFormat format,
+                          media_packet_h& packet);
+
+bool CreateNewMediaPacket(gfx::Size size,
+                          media::VideoPixelFormat format,
+                          media_packet_h& packet);
+
+bool CreateMemoryMappedMediaPacket(scoped_refptr<media::VideoFrame> frame,
+                                   gfx::Size size,
+                                   media::VideoPixelFormat format,
+                                   media_packet_h& packet);
+
+}  // namespace content
+
+#endif  // WEBAI_CONTENT_VIDEO_PROCESSOR_HELPERS_H_
index 56f0e3089f236e94150456dc753c9b208d5a44d6..ed280700490b9a4b677571687a0b1d108ca9f3eb 100644 (file)
@@ -70,10 +70,10 @@ if (tizen_ai) {
     "//tizen_src/chromium_impl/webai/content/ai_processor_video_impl.h",
     "//tizen_src/chromium_impl/webai/content/ai_video_frame_buffer.cc",
     "//tizen_src/chromium_impl/webai/content/ai_video_frame_buffer.h",
-    "//tizen_src/chromium_impl/webai/content/tbm_helpers.cc",
-    "//tizen_src/chromium_impl/webai/content/tbm_helpers.h",
     "//tizen_src/chromium_impl/webai/content/mojo_broker_impl.cc",
     "//tizen_src/chromium_impl/webai/content/mojo_broker_impl.h",
+    "//tizen_src/chromium_impl/webai/content/video_processor_helpers.cc",
+    "//tizen_src/chromium_impl/webai/content/video_processor_helpers.h",
   ]
 
   tizen_ai_content_browser_configs += [