#include "tizen_src/chromium_impl/webai/blink/ai_processor_video.h"
#include "base/trace_event/trace_event.h"
+#include "gpu/GLES2/gl2extchromium.h"
+#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "media/base/video_frame.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/mojom/media_types.mojom.h"
#include "mojo/public/cpp/bindings/pending_associated_remote.h"
+#include "third_party/blink/public/platform/platform.h"
#include "third_party/blink/renderer/bindings/core/v8/v8_binding_for_core.h"
#include "third_party/blink/renderer/bindings/modules/v8/v8_ai_error_callback.h"
#include "third_party/blink/renderer/bindings/modules/v8/v8_video_frame_output_callback.h"
: AiBase(script_state, frame) {
TRACE_EVENT0("webai", "AiProcessorVideo::AiProcessorVideo");
receiver_.Bind(std::move(pending_client_receiver_));
+
+ gpu_factories_ = Platform::Current()->GetGpuFactories();
+ sii_ = gpu_factories_->SharedImageInterface();
+ if (!sii_) {
+ AI_LOG(INFO) << "Get shared Image Interface failed.";
+ }
}
AiProcessorVideo::~AiProcessorVideo() = default;
return;
}
+void AiProcessorVideo::OnProcessOutputGpuMemoryBuffer(
+ gfx::GpuMemoryBufferHandle gmb_handle,
+ const gfx::Size& size,
+ gfx::BufferFormat format,
+ uint64_t output_packet) {
+ TRACE_EVENT0("webai", "AiProcessorVideo::OnProcessOutputGpuMemoryBuffer");
+ CHECK_SCRIPT_STATE();
+
+ gpu::GpuMemoryBufferSupport support;
+ auto gmb = support.CreateGpuMemoryBufferImplFromHandle(
+ std::move(gmb_handle), size, format,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ base::NullCallback());
+ if (!gmb || !sii_ || format != gfx::BufferFormat::YUV_420_BIPLANAR) {
+ AI_LOG(ERROR) << "Failed to create gpu memory buffer video frame.";
+ ReleaseBuffer(output_packet);
+ return;
+ }
+
+ // TODO: Reuse share image.
+ std::vector<gfx::BufferPlane> planes;
+ planes.push_back(gfx::BufferPlane::Y);
+ planes.push_back(gfx::BufferPlane::UV);
+ constexpr uint32_t usage =
+ gpu::SHARED_IMAGE_USAGE_GLES2 | gpu::SHARED_IMAGE_USAGE_RASTER |
+ gpu::SHARED_IMAGE_USAGE_DISPLAY_READ | gpu::SHARED_IMAGE_USAGE_SCANOUT;
+ gpu::MailboxHolder mailbox_holders[media::VideoFrame::kMaxPlanes] = {};
+ for (size_t plane = 0; plane < planes.size(); ++plane) {
+ auto mail_box = sii_->CreateSharedImage(
+ gmb.get(), gpu_factories_->GpuMemoryBufferManager(), planes[plane],
+ gfx::ColorSpace{}, kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType, usage,
+ "VideoProcessorFrameBuffer");
+ const gpu::SyncToken sync_token = sii_->GenVerifiedSyncToken();
+ mailbox_holders[plane] =
+ gpu::MailboxHolder(mail_box, sync_token, GL_TEXTURE_EXTERNAL_OES);
+ }
+
+ auto frame = media::VideoFrame::WrapExternalGpuMemoryBuffer(
+ gfx::Rect{0, 0, size.width(), size.height()}, size, std::move(gmb),
+ mailbox_holders, base::NullCallback(),
+ base::TimeTicks::Now() - base::TimeTicks());
+ if (!frame) {
+ AI_LOG(INFO) << "Wrap gpu memory video frame failed.";
+ ReleaseBuffer(output_packet);
+ return;
+ }
+
+ frame->AddDestructionObserver(base::BindPostTaskToCurrentDefault(
+ WTF::BindOnce(&AiProcessorVideo::ReleaseBuffer, WrapWeakPersistent(this),
+ output_packet)));
+ for (size_t plane = 0; plane < planes.size(); ++plane) {
+ frame->AddDestructionObserver(base::BindPostTaskToCurrentDefault(
+ WTF::BindOnce(&gpu::SharedImageInterface::DestroySharedImage,
+ base::Unretained(sii_), mailbox_holders[plane].sync_token,
+ mailbox_holders[plane].mailbox)));
+ }
+
+ frame->set_shared_image_format_type(
+ media::SharedImageFormatType::kSharedImageFormatExternalSampler);
+
+ AI_LOG(INFO) << frame->AsHumanReadableString();
+ auto* blink_frame = MakeGarbageCollected<VideoFrame>(
+ std::move(frame), ExecutionContext::From(script_state_));
+ std::ignore = output_callback_->Invoke(nullptr, blink_frame);
+}
+
void AiProcessorVideo::OnProcessOutputTbm(
gfx::TbmBufferHandleInterProcess tbm_buffer_handle_process,
const gfx::Size& natural_size) {
TRACE_EVENT0("webai", "AiProcessorVideo::OnProcessOutputTbm");
- AI_LOG(INFO) << __func__;
CHECK_SCRIPT_STATE();
if (!output_callback_) {
void AiProcessorVideo::processVideo(VideoFrame* data,
ExceptionState& exception_state) {
TRACE_EVENT0("webai", "AiProcessorVideo::processVideo");
- AI_LOG(INFO) << __func__;
auto media_frame = data->frame();
if (!media_frame) {
AI_LOG(INFO) << "There is no media frame.";
return;
}
+ AI_LOG(INFO) << media_frame->AsHumanReadableString();
remote_->ProcessVideo(media_frame);
}
#include <string.h>
+#include "gpu/command_buffer/client/shared_image_interface.h"
#include "media/mojo/mojom/media_types.mojom-forward.h"
+#include "media/video/gpu_video_accelerator_factories.h"
#include "mojo/public/cpp/bindings/associated_receiver.h"
#include "third_party/blink/renderer/bindings/core/v8/script_promise.h"
#include "third_party/blink/renderer/bindings/core/v8/script_promise_property.h"
// blink::mojom::AiProcessorClient implementation
void OnMessage() override;
- void OnProcessOutputVideoFrame(
- const scoped_refptr<media::VideoFrame>& output) override{};
+ void OnProcessOutputGpuMemoryBuffer(gfx::GpuMemoryBufferHandle gmb_handle,
+ const gfx::Size& size,
+ gfx::BufferFormat format,
+ uint64_t output_packet) override;
void OnProcessOutputTbm(
gfx::TbmBufferHandleInterProcess tbm_buffer_handle_process,
const gfx::Size& natural_size) override;
mojo::AssociatedReceiver<mojom::AiProcessorClient> receiver_{this};
AiConfiguration cfg_{};
+ raw_ptr<media::GpuVideoAcceleratorFactories, ExperimentalRenderer>
+ gpu_factories_{};
+ raw_ptr<gpu::SharedImageInterface, ExperimentalRenderer> sii_{};
};
} // namespace blink
// Interface for processed video result
interface AiProcessorClient {
OnMessage();
- OnProcessOutputVideoFrame(media.mojom.VideoFrame video_frame);
+ OnProcessOutputGpuMemoryBuffer(gfx.mojom.GpuMemoryBufferHandle gmb_handle, gfx.mojom.Size size, gfx.mojom.BufferFormat format, uint64 output_packet);
OnProcessOutputLandmark(array<array<double>> result);
OnProcessOutputTbm(gfx.mojom.TbmBufferHandleInterProcess tbm_buffer_handle_process, gfx.mojom.Size natural_size);
};
#define AI_LOG(severity) LOG(severity) << "[WebAI] "
-#define CheckReturn(ret_val, expected_val, ignore_error) \
- do { \
- if (ret_val != expected_val) { \
- AI_LOG(ERROR) << "error value: " << ret_val; \
- if (!ignore_error) { \
- return; \
- } \
- } \
+#define CheckResult(result, expected_result, ignore_error) \
+ do { \
+ if (result != expected_result) { \
+ AI_LOG(ERROR) << "error value: " << result; \
+ if (!ignore_error) { \
+ return; \
+ } \
+ } \
} while (0)
-#define DROP_FRAME_DEBUG
+#define CheckResultAndReturn(result, expected_result, true_ret, false_ret) \
+ do { \
+ if (result != expected_result) { \
+ AI_LOG(ERROR) << "error value: " << result; \
+ return false_ret; \
+ } \
+ return true_ret; \
+ } while (0)
#endif // WEBAI_CONTENT_AI_LOGGER_H_
\ No newline at end of file
#include "webai/content/ai_processor_video_impl.h"
#include "base/json/json_writer.h"
+#include "media/base/format_utils.h"
#include "tizen_src/chromium_impl/webai/common/ai_logger.h"
namespace {
namespace content {
void AiProcessorVideoImpl::AdapterDeleter::operator()(xr_adapter_h adapter) {
- CheckReturn(xr_adapter_destroy(adapter), AIFW_RESULT_SUCCESS, false);
+ CheckResult(xr_adapter_destroy(adapter), AIFW_RESULT_SUCCESS, false);
}
AiProcessorVideoImpl::AiProcessorVideoImpl(
: AiImplBase<AiProcessorVideoImplTraits>(std::move(receiver)) {
TRACE_EVENT0("webai", "AiProcessorVideoImpl::AiProcessorVideoImpl");
task_runner_ = base::SequencedTaskRunner::GetCurrentDefault();
- buffer_pool_ = base::MakeRefCounted<AiVideoFrameBufferPool>();
xr_adapter_h handle{};
- CheckReturn(xr_adapter_create(&handle), AIFW_RESULT_SUCCESS, false);
+ CheckResult(xr_adapter_create(&handle), AIFW_RESULT_SUCCESS, false);
adapter_.reset(handle);
}
AI_LOG(INFO) << __func__;
}
-void ReleaseInputBuffer(std::shared_ptr<AiVideoFrameBuffer> buffer) {
- AI_LOG(INFO) << __func__;
- // TODO (peng8.yin): Set external memory release here.
-}
-
void AiProcessorVideoImpl::ProcessVideo(
const scoped_refptr<media::VideoFrame>& video_frame) {
- AI_LOG(INFO) << video_frame->AsHumanReadableString();
TRACE_EVENT0("webai", "AiProcessorVideoImpl::ProcessVideo");
+
if (!is_ready_) {
AI_LOG(ERROR) << "Configurate it first.";
return;
return;
}
- auto input_buffer = buffer_pool_->AcquireBuffer(video_frame, required_format);
+ auto input_buffer =
+ buffer_pool_->AcquireBufferAndCopyData(video_frame, required_format);
if (!input_buffer) {
AI_LOG(ERROR) << "Acquire buffer for input failed.";
return;
}
auto output_buffer = buffer_pool_->AcquireReusableBuffer(
- video_frame->visible_rect().size(), required_format);
+ video_frame->visible_rect().size(), required_format,
+ true /* require gpu memory */);
if (!output_buffer) {
AI_LOG(ERROR) << "Acquire buffer for out failed.";
- buffer_pool_->ReturnBuffer(input_buffer);
+ if (!input_buffer->is_external()) {
+ buffer_pool_->ReturnBuffer(input_buffer);
+ }
return;
}
- output_buffer->set_natural_size(video_frame->natural_size());
- if (input_buffer->is_external_memory()) {
- output_buffer->SetInputBufferReleaseCb(
- base::BindOnce(&ReleaseInputBuffer, input_buffer));
- } else {
+ output_buffer->set_natural_size(video_frame->natural_size());
+ if (!input_buffer->is_external()) {
output_buffer->SetInputBufferReleaseCb(
base::BindOnce(&AiVideoFrameBufferPool::ReturnBuffer,
buffer_pool_->GetWeakPtr(), input_buffer));
}
TRACE_EVENT_BEGIN0("webai", "xr_adapter_submit_packet");
- CheckReturn(xr_adapter_submit_packet(adapter_.get(), input_buffer->packet(),
+ CheckResult(xr_adapter_submit_packet(adapter_.get(), input_buffer->packet(),
output_buffer->packet()),
AIFW_RESULT_SUCCESS, false);
TRACE_EVENT_END0("webai", "xr_adapter_submit_packet");
}
void AiProcessorVideoImpl::ReleaseBuffer(uint64_t packet) {
- AI_LOG(INFO) << __func__ << " packet:" << packet;
auto buffer = FindReusableBuffer(reinterpret_cast<media_packet_h>(packet));
if (!buffer) {
AI_LOG(ERROR) << "Unexpedted packet:" << packet;
{
TRACE_EVENT0("webai", "xr_adapter_set_frame_ready_cb");
- CheckReturn(
+ CheckResult(
xr_adapter_set_frame_ready_cb(adapter_.get(), XrFrameReadyCb, this),
AIFW_RESULT_SUCCESS, false);
}
- CheckReturn(CreateJsonConfiguration(configure), true, false);
+ CheckResult(CreateJsonConfiguration(configure), true, false);
{
TRACE_EVENT0("webai", "xr_adapter_init");
- CheckReturn(xr_adapter_init(adapter_.get(), config_json_.value().c_str(),
+ CheckResult(xr_adapter_init(adapter_.get(), config_json_.value().c_str(),
config_json_.value().length()),
AIFW_RESULT_SUCCESS, false);
}
cfg_ = configure;
+ buffer_pool_ = base::MakeRefCounted<AiVideoFrameBufferPool>();
is_ready_ = true;
}
void AiProcessorVideoImpl::Stop() {
TRACE_EVENT0("webai", "AiProcessorVideoImpl::Start");
AI_LOG(INFO) << __func__;
- CheckReturn(xr_adapter_deinit(adapter_.get()), AIFW_RESULT_SUCCESS, false);
+ CheckResult(xr_adapter_deinit(adapter_.get()), AIFW_RESULT_SUCCESS, true);
+
+ // Release all buffers.
+ constexpr gfx::Size max_size{4096, 4096};
+ buffer_pool_->FlushPool(max_size);
}
-std::shared_ptr<AiVideoFrameBuffer> AiProcessorVideoImpl::FindReusableBuffer(
+scoped_refptr<AiVideoFrameBuffer> AiProcessorVideoImpl::FindReusableBuffer(
media_packet_h packet) {
return buffer_pool_->FindReusableBuffer(packet);
}
void AiProcessorVideoImpl::OnProcessOutput(media_packet_h output_packet,
xr_adapter_error_type_e error) {
TRACE_EVENT0("webai", "AiProcessorVideoImpl::OnProcessOutput");
+
auto output_buffer = FindReusableBuffer(output_packet);
if (!output_buffer) {
AI_LOG(ERROR) << "Unexpedted packet:" << output_buffer;
return;
}
- // Generate Tbm handle from output buffer
- auto tbm_buffer_handle = output_buffer->GenerateTbmHandleFromBuffer();
- tbm_buffer_handle.media_packet = reinterpret_cast<size_t>(output_packet);
- client_->OnProcessOutputTbm(
- gfx::ToTbmBufferHandleInterProcess(tbm_buffer_handle),
- output_buffer->natural_size());
+ if (output_buffer->has_gpu_memory_buffer()) {
+ client_->OnProcessOutputGpuMemoryBuffer(
+ output_buffer->clone_gpu_memory_buffer(), output_buffer->size(),
+ media::VideoPixelFormatToGfxBufferFormat(output_buffer->format())
+ .value(),
+ reinterpret_cast<size_t>(output_packet));
+ } else {
+ auto tbm_buffer_handle = output_buffer->GenerateTbmHandleFromBuffer();
+ tbm_buffer_handle.media_packet = reinterpret_cast<size_t>(output_packet);
+ client_->OnProcessOutputTbm(
+ gfx::ToTbmBufferHandleInterProcess(tbm_buffer_handle),
+ output_buffer->natural_size());
+ }
}
bool AiProcessorVideoImpl::CreateJsonConfiguration(
void ReleaseBuffer(uint64_t packet) override;
void Start(const blink::AiConfiguration& configure) override;
void Stop() override;
-
- std::shared_ptr<AiVideoFrameBuffer> FindReusableBuffer(media_packet_h packet);
+ scoped_refptr<AiVideoFrameBuffer> FindReusableBuffer(media_packet_h packet);
void OnProcessOutput(media_packet_h output_packet,
xr_adapter_error_type_e error);
auto get_runner() const { return task_runner_; }
#include "base/task/bind_post_task.h"
#include "base/trace_event/trace_event.h"
+#include "gpu/ipc/common/gpu_memory_buffer_support.h"
+#include "media/base/format_utils.h"
#include "media/base/video_frame.h"
#include "third_party/libyuv/include/libyuv.h"
#include "ui/gfx/gpu_memory_buffer.h"
+#include "ui/gfx/tizen_gpu_buffer.h"
#include "webai/common/ai_logger.h"
-#include "webai/content/tbm_helpers.h"
namespace content {
namespace {
-constexpr uint32_t kMaxPoolSize = 8;
-
-libyuv::FourCC ToFourcc(media::VideoPixelFormat format) {
- switch (format) {
- case media::PIXEL_FORMAT_I420:
- return libyuv::FOURCC_I420;
- case media::PIXEL_FORMAT_NV12:
- return libyuv::FOURCC_NV12;
- case media::PIXEL_FORMAT_RGB24:
- return libyuv::FOURCC_24BG;
- default:
- return libyuv::FOURCC_ANY;
- }
-}
-
-media_format_mimetype_e ToMime(media::VideoPixelFormat format) {
- switch (format) {
- case media::PIXEL_FORMAT_I420:
- return MEDIA_FORMAT_I420;
- case media::PIXEL_FORMAT_NV12:
- return MEDIA_FORMAT_NV12;
- case media::PIXEL_FORMAT_RGB24:
- return MEDIA_FORMAT_RGB888;
- default:
- return MEDIA_FORMAT_MAX;
- }
-}
+constexpr uint32_t kMaxPoolSize = 6;
} // namespace
DETACH_FROM_SEQUENCE(sequence_checker_);
}
-std::shared_ptr<AiVideoFrameBuffer> AiVideoFrameBufferPool::AcquireBuffer(
- scoped_refptr<media::VideoFrame> video_frame,
+scoped_refptr<AiVideoFrameBuffer>
+AiVideoFrameBufferPool::AcquireBufferAndCopyData(
+ scoped_refptr<media::VideoFrame> source_video_frame,
media::VideoPixelFormat required_format) {
// TODO (peng8.yin): XRFW only supports RGB24 & RGB32 for non TBM surface
// input format, see `xrfw_effect_blur_set_input_buffer` and
// as video frame format now.
//
// if (required_format == video_frame->format()) {
- // return std::make_shared<AiVideoFrameBuffer>(
- // video_frame->visible_rect().size(), required_format, video_frame);
+ // return base::MakeRefCounted<AiVideoFrameBuffer>(
+ // video_frame->visible_rect().size(), required_format, true,
+ // video_frame);
// }
- auto buffer = AcquireReusableBuffer(video_frame->visible_rect().size(),
- required_format);
+ auto buffer = AcquireReusableBuffer(source_video_frame->visible_rect().size(),
+ required_format, false);
if (!buffer) {
AI_LOG(ERROR) << "Acquire reusable buffer failed.";
return nullptr;
}
- if (!buffer->CopyFrom(video_frame)) {
+ if (!buffer->CopyFrom(source_video_frame)) {
AI_LOG(ERROR) << "Copy video frame into buffer failed.";
ReturnBuffer(std::move(buffer));
return nullptr;
return buffer;
}
-std::shared_ptr<AiVideoFrameBuffer>
-AiVideoFrameBufferPool::AcquireReusableBuffer(
+scoped_refptr<AiVideoFrameBuffer> AiVideoFrameBufferPool::AcquireReusableBuffer(
gfx::Size required_size,
- media::VideoPixelFormat required_format) {
+ media::VideoPixelFormat required_format,
+ bool need_gpu_memory_buffer) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (last_size_.value_or(required_size) != required_size) {
- AI_LOG(INFO) << "required size is changed, flush buffer pool.";
+ AI_LOG(INFO) << "Required size is changed, flush buffer pool.";
FlushPool(required_size);
}
- auto it =
- std::find_if(reusable_buffers_.begin(), reusable_buffers_.end(),
- [required_size, required_format](
- const std::shared_ptr<AiVideoFrameBuffer>& buffer) {
- return !buffer->taken() && !buffer->is_mark_for_delete() &&
- buffer->size().width() >= required_size.width() &&
- buffer->size().height() >= required_size.height() &&
- buffer->format() == required_format;
- });
-
-#if defined(DROP_FRAME_DEBUG)
- static int total = 0;
- static int drop = 0;
- total++;
-#endif
+ auto it = std::find_if(
+ reusable_buffers_.begin(), reusable_buffers_.end(),
+ [required_size, required_format, need_gpu_memory_buffer](
+ const scoped_refptr<AiVideoFrameBuffer>& buffer) {
+ return !buffer->taken() && !buffer->is_mark_for_delete() &&
+ buffer->size().width() >= required_size.width() &&
+ buffer->size().height() >= required_size.height() &&
+ buffer->format() == required_format &&
+ buffer->has_gpu_memory_buffer() == need_gpu_memory_buffer;
+ });
if (it == reusable_buffers_.end()) {
if (reusable_buffers_.size() >= kMaxPoolSize) {
- AI_LOG(ERROR) << "Reach max pool size, drop frame.";
-#if defined(DROP_FRAME_DEBUG)
- drop++;
- AI_LOG(ERROR) << " drop:" << drop << " / total:" << total;
-#endif
+ AI_LOG(WARNING) << "Reach max pool size, drop frame.";
return nullptr;
}
- it = AppendReusableBuffer(required_size, required_format);
+ it = AppendReusableBuffer(required_size, required_format,
+ need_gpu_memory_buffer);
AI_LOG(INFO) << "New buffer added, size:" << required_size.ToString()
<< " format:" << required_format;
}
+ if (it == reusable_buffers_.end()) {
+ AI_LOG(WARNING) << "Acquire buffer failed, drop frame.";
+ return nullptr;
+ }
+
(*it)->set_taken(true);
last_size_ = required_size;
- AI_LOG(INFO) << " media_packet:" << (*it)->packet();
return *it;
}
void AiVideoFrameBufferPool::ReturnBuffer(
- std::shared_ptr<AiVideoFrameBuffer> return_buffer) {
+ scoped_refptr<AiVideoFrameBuffer> return_buffer) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
return_buffer->set_taken(false);
if (return_buffer->is_mark_for_delete()) {
AI_LOG(INFO) << "Delete buffer.";
reusable_buffers_.remove_if(
- [return_buffer](const std::shared_ptr<AiVideoFrameBuffer>& buffer) {
+ [return_buffer](const scoped_refptr<AiVideoFrameBuffer>& buffer) {
return return_buffer == buffer;
});
}
}
-std::shared_ptr<AiVideoFrameBuffer> AiVideoFrameBufferPool::FindReusableBuffer(
+scoped_refptr<AiVideoFrameBuffer> AiVideoFrameBufferPool::FindReusableBuffer(
media_packet_h packet) {
auto it =
std::find_if(reusable_buffers_.begin(), reusable_buffers_.end(),
- [packet](const std::shared_ptr<AiVideoFrameBuffer>& buffer) {
+ [packet](const scoped_refptr<AiVideoFrameBuffer>& buffer) {
return buffer->packet() == packet;
});
if (it == reusable_buffers_.end()) {
return *it;
}
-std::list<std::shared_ptr<AiVideoFrameBuffer>>::iterator
+std::list<scoped_refptr<AiVideoFrameBuffer>>::iterator
AiVideoFrameBufferPool::AppendReusableBuffer(gfx::Size size,
- media::VideoPixelFormat format) {
+ media::VideoPixelFormat format,
+ bool need_gpu_memory_buffer) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- return reusable_buffers_.emplace(
- reusable_buffers_.end(),
- std::make_shared<AiVideoFrameBuffer>(size, format));
+ if (format != media::PIXEL_FORMAT_NV12) {
+ AI_LOG(ERROR) << "Unsupported format:" << format;
+ return reusable_buffers_.end();
+ }
+
+ if (!need_gpu_memory_buffer) {
+ return reusable_buffers_.emplace(reusable_buffers_.end(),
+ base::MakeRefCounted<AiVideoFrameBuffer>(
+ size, format, true /* need tbm */));
+ } else {
+ gfx::NativePixmapHandle handle{};
+ for (size_t i = 0; i < media::VideoFrameLayout::NumPlanes(format); ++i) {
+ auto plane_size = media::VideoFrame::PlaneSize(format, i, size);
+ auto buffer = gfx::TizenGpuBuffer::Allocate(plane_size.GetArea(),
+ true /* scanout */);
+ handle.planes.emplace_back(plane_size.width(), 0, plane_size.GetArea(),
+ buffer->ExportFd());
+ }
+
+ constexpr uint64_t kFormatModifierLinear = 0;
+ handle.modifier = kFormatModifierLinear;
+ gfx::GpuMemoryBufferHandle gmb_handle;
+ static int32_t buffer_id = 0;
+ gmb_handle.type = gfx::GpuMemoryBufferType::NATIVE_PIXMAP;
+ gmb_handle.native_pixmap_handle = std::move(handle);
+ gmb_handle.id = gfx::GpuMemoryBufferId(buffer_id++);
+ return reusable_buffers_.emplace(reusable_buffers_.end(),
+ base::MakeRefCounted<AiVideoFrameBuffer>(
+ size, format, std::move(gmb_handle)));
+ }
}
void AiVideoFrameBufferPool::FlushPool(gfx::Size new_size) {
}
}
-void AiVideoFrameBuffer::PacketDeleter::operator()(media_packet_h packet) {
- media_packet_destroy(packet);
+AiVideoFrameBuffer::AiVideoFrameBuffer(gfx::Size size,
+ media::VideoPixelFormat format)
+ : size_(size), format_(format) {}
+
+AiVideoFrameBuffer::AiVideoFrameBuffer(gfx::Size size,
+ media::VideoPixelFormat format,
+ gfx::GpuMemoryBufferHandle gmb_handle)
+ : AiVideoFrameBuffer(size, format) {
+ if (gmb_handle.is_null()) {
+ AI_LOG(ERROR) << "Gmb buffer is null.";
+ return;
+ }
+
+ auto surface =
+ gfx::TbmSurface::ImportTbmSurface(gmb_handle.native_pixmap_handle, size);
+ if (!surface) {
+ AI_LOG(ERROR) << "Get tbm surface failed.";
+ return;
+ }
+
+ media_packet_h packet{};
+ CheckResult(CreateTbmMediaPacket(surface, size, format, packet), true, false);
+ CheckResult(tbm_surface_get_info(**surface, &tbm_surface_info_),
+ TBM_ERROR_NONE, false);
+ packet_.reset(packet);
+ tbm_surface_ = **surface;
+ gmb_handle_ = std::move(gmb_handle);
+ packet_type_ = PacketType::kSelfOwnedTbmFromGpuHandle;
}
AiVideoFrameBuffer::AiVideoFrameBuffer(
gfx::Size size,
media::VideoPixelFormat format,
scoped_refptr<media::VideoFrame> video_frame)
- : size_(size), format_(format), is_external_memory_(video_frame) {
- media_format_h media_format{};
- media_format_create(&media_format);
- media_format_set_video_mime(media_format, ToMime(format));
- media_format_set_video_width(media_format, size.width());
- media_format_set_video_height(media_format, size.height());
- media_packet_h packet{};
-
- if (!is_external_memory_) {
- CheckReturn(media_packet_new_alloc(media_format, NULL, NULL, &packet),
- MEDIA_PACKET_ERROR_NONE, false);
- } else if (video_frame->HasGpuMemoryBuffer()) {
- auto* gmb = video_frame->GetGpuMemoryBuffer();
- if (!gmb->Map()) {
- AI_LOG(ERROR) << "The video frame is backed by GPU buffer, but failed "
- "to map it.";
- return;
- }
-
- gmb_ = gmb;
- // TODO: Calculate data size by format.
- uint64_t nv12_size = size.width() * size.height() * 1.5;
- AI_LOG(INFO) << "size:" << gmb->GetSize().ToString()
- << " format:" << static_cast<int>(gmb->GetFormat())
- << " stride[0]:" << gmb->stride(0);
-
- CheckReturn(
- media_packet_new_from_external_memory(
- media_format, gmb->memory(media::VideoFrame::kYPlane), nv12_size,
- AiVideoFrameBuffer::OnExternalMemoryDisposed, this, &packet),
- MEDIA_PACKET_ERROR_NONE, false);
+ : AiVideoFrameBuffer(size, format) {
+ if (!video_frame) {
+ AI_LOG(ERROR) << "Video frame is null.";
+ return;
}
+ media_packet_h packet{};
+ CheckResult(CreateMemoryMappedMediaPacket(video_frame, size, format, packet),
+ true, false);
packet_.reset(packet);
- media_format_unref(media_format);
+ mapped_video_frame_holder_ = std::move(video_frame);
+ packet_type_ = PacketType::kMappedFromVideoFrame;
+}
- if (!is_external_memory_) {
- CheckReturn(media_packet_get_tbm_surface(packet_.get(), &tbm_surface_),
+AiVideoFrameBuffer::AiVideoFrameBuffer(gfx::Size size,
+ media::VideoPixelFormat format,
+ bool need_tbm)
+ : AiVideoFrameBuffer(size, format) {
+ media_packet_h packet{};
+ CheckResult(CreateNewMediaPacket(size, format, packet), true, false);
+ if (need_tbm) {
+ CheckResult(media_packet_get_tbm_surface(packet, &tbm_surface_),
MEDIA_PACKET_ERROR_NONE, false);
- CheckReturn(tbm_surface_get_info(tbm_surface_, &tbm_surface_info_),
+ CheckResult(tbm_surface_get_info(tbm_surface_, &tbm_surface_info_),
TBM_ERROR_NONE, false);
- AI_LOG(INFO) << "Get surface info, size:" << tbm_surface_info_.width << "x"
- << tbm_surface_info_.height;
- }
-}
-
-AiVideoFrameBuffer::~AiVideoFrameBuffer() {
- if (gmb_) {
- gmb_->Unmap();
+ packet_type_ = PacketType::kSelfOwnedTbm;
+ } else {
+ packet_type_ = PacketType::kSelfOwnedRaw;
}
+ packet_.reset(packet);
}
void AiVideoFrameBuffer::OnExternalMemoryDisposed(media_packet_h packet,
bool AiVideoFrameBuffer::CopyFrom(
scoped_refptr<media::VideoFrame> video_frame) {
TRACE_EVENT0("webai", "AiVideoFrameBuffer::CopyFrom");
- if (is_external_memory_ || !tbm_surface_ || tbm_surface_info_.width == 0 ||
- tbm_surface_info_.height == 0) {
+ if (packet_type_ != PacketType::kSelfOwnedTbm) {
AI_LOG(ERROR) << "Invalided option.";
return false;
}
uv_stride = gmb->stride(media::VideoFrame::kUVPlane);
const gfx::Point left_top{video_frame->visible_rect().x(),
video_frame->visible_rect().y()};
- y_addr =
- GetGpuMemoryVisibleData(gmb, left_top, media::VideoFrame::kYPlane);
- uv_addr =
- GetGpuMemoryVisibleData(gmb, left_top, media::VideoFrame::kUVPlane);
+ y_addr = GetGpuMemoryVisibleData(gmb, left_top, video_frame->format(),
+ media::VideoFrame::kYPlane);
+ uv_addr = GetGpuMemoryVisibleData(gmb, left_top, video_frame->format(),
+ media::VideoFrame::kUVPlane);
} else {
y_addr = video_frame->visible_data(media::VideoFrame::kYPlane);
uv_addr = video_frame->visible_data(media::VideoFrame::kUVPlane);
return CreateTbmBufferHandle(tbm_surface_, tbm_surface_info_);
}
-const uint8_t* AiVideoFrameBuffer::GetGpuMemoryVisibleData(
- gfx::GpuMemoryBuffer* gmb,
- gfx::Point left_top,
- size_t plane) const {
- const gfx::Size subsample = media::VideoFrame::SampleSize(format(), plane);
- auto stride = gmb->stride(plane);
- return static_cast<const uint8_t*>(gmb->memory(plane)) +
- stride * (left_top.y() / subsample.height()) +
- media::VideoFrame::BytesPerElement(format(), plane) *
- (left_top.x() / subsample.width());
-}
-
} // namespace content
#include "third_party/abseil-cpp/absl/types/optional.h"
#include "ui/gfx/geometry/point.h"
#include "ui/gfx/geometry/size.h"
+#include "ui/gfx/gpu_memory_buffer.h"
#include "ui/gfx/tbm_buffer_handle.h"
+#include "webai/content/video_processor_helpers.h"
#include <aifw_api/vision/xr_adapter.h>
+#include <media_packet.h>
namespace media {
class VideoFrame;
namespace content {
-class AiVideoFrameBuffer {
+enum class PacketType {
+ kNone,
+ kSelfOwnedRaw,
+ kSelfOwnedTbm,
+ kSelfOwnedTbmFromGpuHandle,
+ kMappedFromVideoFrame
+};
+
+class AiVideoFrameBuffer : public base::RefCounted<AiVideoFrameBuffer> {
public:
AiVideoFrameBuffer(gfx::Size size,
media::VideoPixelFormat format,
- scoped_refptr<media::VideoFrame> video_frame = nullptr);
- ~AiVideoFrameBuffer();
- AiVideoFrameBuffer(const AiVideoFrameBuffer&) = delete;
- AiVideoFrameBuffer& operator=(const AiVideoFrameBuffer&) = delete;
+ gfx::GpuMemoryBufferHandle gmb_handle);
+ AiVideoFrameBuffer(gfx::Size size,
+ media::VideoPixelFormat format,
+ scoped_refptr<media::VideoFrame> video_frame);
+ AiVideoFrameBuffer(gfx::Size size,
+ media::VideoPixelFormat format,
+ bool need_tbm);
+ virtual ~AiVideoFrameBuffer() = default;
static void OnExternalMemoryDisposed(media_packet_h packet, void* userdata);
bool CopyFrom(scoped_refptr<media::VideoFrame> video_frame);
size_t stride(size_t plane) const {
return tbm_surface_info_.planes[plane].stride;
}
- bool is_external_memory() const { return is_external_memory_; }
+ bool is_external() const {
+ return packet_type_ != PacketType::kSelfOwnedRaw &&
+ packet_type_ != PacketType::kSelfOwnedTbm &&
+ packet_type_ != PacketType::kSelfOwnedTbmFromGpuHandle;
+ }
void mark_for_delete() { mark_for_delete_ = true; }
bool is_mark_for_delete() const { return mark_for_delete_; }
+ bool is_mapped_memory_buffer() const {
+ return mapped_video_frame_holder_ != nullptr;
+ }
+ scoped_refptr<media::VideoFrame> get_mapped_video_frame() const {
+ return mapped_video_frame_holder_;
+ }
+ bool has_gpu_memory_buffer() const { return !gmb_handle_.is_null(); }
+ gfx::GpuMemoryBufferHandle clone_gpu_memory_buffer() const {
+ return gmb_handle_.Clone();
+ }
private:
- struct PacketDeleter {
- void operator()(media_packet_h packet);
- };
- const uint8_t* GetGpuMemoryVisibleData(gfx::GpuMemoryBuffer* gmb,
- gfx::Point left_top,
- size_t plane) const;
+ AiVideoFrameBuffer(gfx::Size size, media::VideoPixelFormat format);
+ AiVideoFrameBuffer(const AiVideoFrameBuffer&) = delete;
+ AiVideoFrameBuffer& operator=(const AiVideoFrameBuffer&) = delete;
const gfx::Size size_;
const media::VideoPixelFormat format_;
gfx::Size natural_size_;
bool is_taken_{};
- std::unique_ptr<std::remove_pointer<media_packet_h>::type, PacketDeleter>
- packet_;
+ AiPacket packet_;
tbm_surface_h tbm_surface_{};
tbm_surface_info_s tbm_surface_info_{};
- bool is_external_memory_;
- gfx::GpuMemoryBuffer* gmb_{};
base::OnceClosure input_buffer_release_cb_;
bool mark_for_delete_{};
+ scoped_refptr<media::VideoFrame> mapped_video_frame_holder_;
+ PacketType packet_type_{};
+ gfx::GpuMemoryBufferHandle gmb_handle_{};
};
class AiVideoFrameBufferPool : public base::RefCounted<AiVideoFrameBufferPool> {
AiVideoFrameBufferPool(const AiVideoFrameBufferPool&) = delete;
AiVideoFrameBufferPool& operator=(const AiVideoFrameBufferPool&) = delete;
- std::shared_ptr<AiVideoFrameBuffer> AcquireBuffer(
- scoped_refptr<media::VideoFrame> video_frame,
+ scoped_refptr<AiVideoFrameBuffer> AcquireBufferAndCopyData(
+ scoped_refptr<media::VideoFrame> source_video_frame,
media::VideoPixelFormat required_format);
- std::shared_ptr<AiVideoFrameBuffer> AcquireReusableBuffer(
+ scoped_refptr<AiVideoFrameBuffer> AcquireReusableBuffer(
gfx::Size required_size,
- media::VideoPixelFormat required_format);
- void ReturnBuffer(std::shared_ptr<AiVideoFrameBuffer> return_buffer);
- std::shared_ptr<AiVideoFrameBuffer> FindReusableBuffer(media_packet_h packet);
- void ReleaseOnceBuffer(media_packet_h packet);
+ media::VideoPixelFormat required_format,
+ bool need_gpu_memory_buffer);
+ void ReturnBuffer(scoped_refptr<AiVideoFrameBuffer> return_buffer);
+ scoped_refptr<AiVideoFrameBuffer> FindReusableBuffer(media_packet_h packet);
base::WeakPtr<AiVideoFrameBufferPool> GetWeakPtr() {
return weak_factory_.GetWeakPtr();
}
- private:
- std::list<std::shared_ptr<AiVideoFrameBuffer>>::iterator AppendReusableBuffer(
- gfx::Size size,
- media::VideoPixelFormat format);
-
// Delete all reusable buffers smaller than new size.
// the size of the video frame usually remains same for a period of time,
// unless the constraints of the track are explicitly applied,
// size buffers.
void FlushPool(gfx::Size new_size);
- std::list<std::shared_ptr<AiVideoFrameBuffer>> reusable_buffers_;
+ private:
+ std::list<scoped_refptr<AiVideoFrameBuffer>>::iterator AppendReusableBuffer(
+ gfx::Size size,
+ media::VideoPixelFormat format,
+ bool need_gpu_memory_buffer);
+
+ std::list<scoped_refptr<AiVideoFrameBuffer>> reusable_buffers_;
absl::optional<gfx::Size> last_size_;
SEQUENCE_CHECKER(sequence_checker_);
base::WeakPtrFactory<AiVideoFrameBufferPool> weak_factory_{this};
+++ /dev/null
-// Copyright 2024 Samsung Electronics Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "tizen_src/chromium_impl/webai/content/tbm_helpers.h"
-
-#include "base/time/time.h"
-#include "tizen_src/chromium_impl/ui/gfx/tbm_buffer_handle.h"
-#include "tizen_src/chromium_impl/webai/common/ai_logger.h"
-
-#include <tbm_surface.h>
-#include <tbm_surface_internal.h>
-
-namespace content {
-
-gfx::TbmBufferHandle CreateTbmBufferHandle(tbm_surface_h tbm_surface,
- tbm_surface_info_s surface_info) {
- AI_LOG(INFO) << __func__ << " tbm_surface:" << tbm_surface;
- tbm_bo bo[4];
- int32_t key_num = 0;
- gfx::TbmBufferHandle tbm_buffer_handle;
-
- if (!tbm_surface || // !::IsTBMSurfaceQueueReady(ctx_id, 1) ||
- TBM_SURFACE_ERROR_NONE !=
- tbm_surface_get_info(tbm_surface, &surface_info)) {
- LOG(ERROR) << "tbm_surface_get_info failed";
- return tbm_buffer_handle;
- }
-
- key_num = tbm_surface_internal_get_num_bos(tbm_surface);
-
- for (int i = 0; i < key_num; i++) {
- tbm_buffer_handle.strides[i] = surface_info.planes[i].stride;
- bo[i] = tbm_surface_internal_get_bo(tbm_surface, i);
- tbm_buffer_handle.key[i] = tbm_bo_export(bo[i]);
- }
-
- tbm_buffer_handle.key_num = key_num;
- tbm_buffer_handle.tbm_surface = reinterpret_cast<size_t>(tbm_surface);
- tbm_buffer_handle.width = surface_info.width;
- tbm_buffer_handle.height = surface_info.height;
- tbm_buffer_handle.pts = base::TimeTicks::Now().ToInternalValue();
- tbm_buffer_handle.duration = 66;
- AI_LOG(INFO) << __func__ << " tbm_surface:" << tbm_surface
- << " ts:" << tbm_buffer_handle.pts << " key_num:" << key_num;
-
- return tbm_buffer_handle;
-}
-
-} // namespace content
+++ /dev/null
-// Copyright 2024 Samsung Electronics Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef WEBAI_CONTENT_TBM_HELPERS_H_
-#define WEBAI_CONTENT_TBM_HELPERS_H_
-
-#include <stdint.h>
-
-#include <tbm_surface.h>
-
-namespace gfx {
-struct TbmBufferHandle;
-}
-
-namespace content {
-
-inline bool TbmSurfaceHandleAvailable(int tbm_surface_id) {
- return tbm_surface_id <= 0 ? false : true;
-}
-
-gfx::TbmBufferHandle CreateTbmBufferHandle(tbm_surface_h tbm_surface,
- tbm_surface_info_s surface_info);
-
-} // namespace content
-
-#endif // WEBAI_CONTENT_TBM_HELPERS_H_
\ No newline at end of file
--- /dev/null
+// Copyright 2024 Samsung Electronics Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tizen_src/chromium_impl/webai/content/video_processor_helpers.h"
+
+#include "base/time/time.h"
+#include "media/base/video_frame.h"
+#include "third_party/libyuv/include/libyuv.h"
+#include "ui/gfx/gpu_memory_buffer.h"
+#include "ui/gfx/tbm_buffer_handle.h"
+#include "webai/common/ai_logger.h"
+
+#include <tbm_surface.h>
+#include <tbm_surface_internal.h>
+
+namespace {
+
+libyuv::FourCC ToFourcc(media::VideoPixelFormat format) {
+ switch (format) {
+ case media::PIXEL_FORMAT_I420:
+ return libyuv::FOURCC_I420;
+ case media::PIXEL_FORMAT_NV12:
+ return libyuv::FOURCC_NV12;
+ case media::PIXEL_FORMAT_RGB24:
+ return libyuv::FOURCC_24BG;
+ default:
+ return libyuv::FOURCC_ANY;
+ }
+}
+
+media_format_mimetype_e ToMime(media::VideoPixelFormat format) {
+ switch (format) {
+ case media::PIXEL_FORMAT_I420:
+ return MEDIA_FORMAT_I420;
+ case media::PIXEL_FORMAT_NV12:
+ return MEDIA_FORMAT_NV12;
+ case media::PIXEL_FORMAT_RGB24:
+ return MEDIA_FORMAT_RGB888;
+ default:
+ return MEDIA_FORMAT_MAX;
+ }
+}
+
+} // namespace
+
+namespace content {
+
+void AiPacketDeleter::operator()(media_packet_h packet) {
+ media_packet_destroy(packet);
+}
+
+gfx::TbmBufferHandle CreateTbmBufferHandle(tbm_surface_h tbm_surface,
+ tbm_surface_info_s surface_info) {
+ AI_LOG(INFO) << __func__ << " tbm_surface:" << tbm_surface;
+ tbm_bo bo[4];
+ int32_t key_num = 0;
+ gfx::TbmBufferHandle tbm_buffer_handle;
+
+ if (!tbm_surface || TBM_SURFACE_ERROR_NONE !=
+ tbm_surface_get_info(tbm_surface, &surface_info)) {
+ LOG(ERROR) << "tbm_surface_get_info failed";
+ return tbm_buffer_handle;
+ }
+
+ key_num = tbm_surface_internal_get_num_bos(tbm_surface);
+
+ for (int i = 0; i < key_num; i++) {
+ tbm_buffer_handle.strides[i] = surface_info.planes[i].stride;
+ bo[i] = tbm_surface_internal_get_bo(tbm_surface, i);
+ tbm_buffer_handle.key[i] = tbm_bo_export(bo[i]);
+ }
+
+ tbm_buffer_handle.key_num = key_num;
+ tbm_buffer_handle.tbm_surface = reinterpret_cast<size_t>(tbm_surface);
+ tbm_buffer_handle.width = surface_info.width;
+ tbm_buffer_handle.height = surface_info.height;
+ tbm_buffer_handle.pts = base::TimeTicks::Now().ToInternalValue();
+ tbm_buffer_handle.duration = 66;
+ AI_LOG(INFO) << __func__ << " tbm_surface:" << tbm_surface
+ << " ts:" << tbm_buffer_handle.pts << " key_num:" << key_num;
+
+ return tbm_buffer_handle;
+}
+
+const uint8_t* GetGpuMemoryVisibleData(gfx::GpuMemoryBuffer* gmb,
+ gfx::Point left_top,
+ media::VideoPixelFormat format,
+ size_t plane) {
+ const gfx::Size subsample = media::VideoFrame::SampleSize(format, plane);
+ auto stride = gmb->stride(plane);
+ return static_cast<const uint8_t*>(gmb->memory(plane)) +
+ stride * (left_top.y() / subsample.height()) +
+ media::VideoFrame::BytesPerElement(format, plane) *
+ (left_top.x() / subsample.width());
+}
+
+bool CreateTbmMediaPacket(const std::unique_ptr<gfx::TbmSurface>& surface,
+ gfx::Size size,
+ media::VideoPixelFormat format,
+ media_packet_h& packet) {
+ media_format_h media_format{};
+ media_format_create(&media_format);
+ media_format_set_video_mime(media_format, ToMime(format));
+ media_format_set_video_width(media_format, size.width());
+ media_format_set_video_height(media_format, size.height());
+
+ auto result = media_packet_new_from_tbm_surface(media_format, **surface,
+ nullptr, nullptr, &packet);
+ media_format_unref(media_format);
+ CheckResultAndReturn(result, MEDIA_PACKET_ERROR_NONE, true, false);
+}
+
+bool CreateNewMediaPacket(gfx::Size size,
+ media::VideoPixelFormat format,
+ media_packet_h& packet) {
+ media_format_h media_format{};
+ media_format_create(&media_format);
+ media_format_set_video_mime(media_format, ToMime(format));
+ media_format_set_video_width(media_format, size.width());
+ media_format_set_video_height(media_format, size.height());
+
+ auto result = media_packet_new_alloc(media_format, NULL, NULL, &packet);
+ media_format_unref(media_format);
+ CheckResultAndReturn(result, MEDIA_PACKET_ERROR_NONE, true, false);
+}
+
+bool CreateMemoryMappedMediaPacket(scoped_refptr<media::VideoFrame> frame,
+ gfx::Size size,
+ media::VideoPixelFormat format,
+ media_packet_h& packet) {
+ media_format_h media_format{};
+ media_format_create(&media_format);
+ media_format_set_video_mime(media_format, ToMime(format));
+ media_format_set_video_width(media_format, size.width());
+ media_format_set_video_height(media_format, size.height());
+
+ void* data = nullptr;
+ if (!frame->HasGpuMemoryBuffer()) {
+ auto* gpu_buffer = frame->GetGpuMemoryBuffer();
+ if (!gpu_buffer || !gpu_buffer->Map()) {
+ AI_LOG(ERROR) << "failed to get or map gpu buffer.";
+ return false;
+ }
+ data = gpu_buffer->memory(media::VideoFrame::kYPlane);
+ }
+
+ auto result = media_packet_new_from_external_memory(
+ media_format, data, media::VideoFrame::AllocationSize(format, size),
+ nullptr, nullptr, &packet);
+ media_format_unref(media_format);
+ CheckResultAndReturn(result, MEDIA_PACKET_ERROR_NONE, true, false);
+}
+
+} // namespace content
--- /dev/null
+// Copyright 2024 Samsung Electronics Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef WEBAI_CONTENT_VIDEO_PROCESSOR_HELPERS_H_
+#define WEBAI_CONTENT_VIDEO_PROCESSOR_HELPERS_H_
+
+#include <stdint.h>
+
+#include "base/files/scoped_file.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_types.h"
+#include "ui/gfx/geometry/point.h"
+#include "ui/gfx/geometry/size.h"
+#include "ui/gfx/gpu_memory_buffer.h"
+#include "ui/gfx/tbm_buffer_handle.h"
+#include "ui/gfx/tbm_surface.h"
+
+#include <media_packet.h>
+#include <tbm_surface.h>
+
+namespace gfx {
+class GpuMemoryBuffer;
+}
+
+namespace content {
+
+struct AiPacketDeleter {
+ void operator()(media_packet_h packet);
+};
+
+using AiPacket =
+ std::unique_ptr<std::remove_pointer<media_packet_h>::type, AiPacketDeleter>;
+
+inline bool TbmSurfaceHandleAvailable(int tbm_surface_id) {
+ return tbm_surface_id <= 0 ? false : true;
+}
+
+gfx::TbmBufferHandle CreateTbmBufferHandle(tbm_surface_h tbm_surface,
+ tbm_surface_info_s surface_info);
+
+const uint8_t* GetGpuMemoryVisibleData(gfx::GpuMemoryBuffer* gmb,
+ gfx::Point left_top,
+ media::VideoPixelFormat format,
+ size_t plane);
+
+bool CreateTbmMediaPacket(const std::unique_ptr<gfx::TbmSurface>& surface,
+ gfx::Size size,
+ media::VideoPixelFormat format,
+ media_packet_h& packet);
+
+bool CreateNewMediaPacket(gfx::Size size,
+ media::VideoPixelFormat format,
+ media_packet_h& packet);
+
+bool CreateMemoryMappedMediaPacket(scoped_refptr<media::VideoFrame> frame,
+ gfx::Size size,
+ media::VideoPixelFormat format,
+ media_packet_h& packet);
+
+} // namespace content
+
+#endif // WEBAI_CONTENT_VIDEO_PROCESSOR_HELPERS_H_
"//tizen_src/chromium_impl/webai/content/ai_processor_video_impl.h",
"//tizen_src/chromium_impl/webai/content/ai_video_frame_buffer.cc",
"//tizen_src/chromium_impl/webai/content/ai_video_frame_buffer.h",
- "//tizen_src/chromium_impl/webai/content/tbm_helpers.cc",
- "//tizen_src/chromium_impl/webai/content/tbm_helpers.h",
"//tizen_src/chromium_impl/webai/content/mojo_broker_impl.cc",
"//tizen_src/chromium_impl/webai/content/mojo_broker_impl.h",
+ "//tizen_src/chromium_impl/webai/content/video_processor_helpers.cc",
+ "//tizen_src/chromium_impl/webai/content/video_processor_helpers.h",
]
tizen_ai_content_browser_configs += [