1 // Copyright 2012 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // Notes about usage of this object by VideoCaptureImplManager.
7 // VideoCaptureImplManager access this object by using a Unretained()
8 // binding and tasks on the IO thread. It is then important that
9 // VideoCaptureImpl never post task to itself. All operations must be
12 #include "third_party/blink/renderer/platform/video_capture/video_capture_impl.h"
19 #include <GLES2/gl2extchromium.h>
20 #include "base/feature_list.h"
21 #include "base/functional/bind.h"
22 #include "base/functional/callback.h"
23 #include "base/functional/callback_helpers.h"
24 #include "base/logging.h"
25 #include "base/memory/raw_ptr.h"
26 #include "base/metrics/histogram_functions.h"
27 #include "base/task/bind_post_task.h"
28 #include "base/task/sequenced_task_runner.h"
29 #include "base/token.h"
30 #include "base/trace_event/trace_event.h"
31 #include "build/build_config.h"
32 #include "gpu/command_buffer/client/client_shared_image.h"
33 #include "gpu/command_buffer/client/shared_image_interface.h"
34 #include "gpu/command_buffer/common/shared_image_usage.h"
35 #include "gpu/ipc/common/gpu_memory_buffer_support.h"
36 #include "media/base/limits.h"
37 #include "media/base/media_switches.h"
38 #include "media/base/video_frame.h"
39 #include "media/capture/mojom/video_capture_buffer.mojom-blink.h"
40 #include "media/capture/mojom/video_capture_types.mojom-blink.h"
41 #include "media/capture/video_capture_types.h"
42 #include "media/video/gpu_video_accelerator_factories.h"
43 #include "third_party/blink/public/common/browser_interface_broker_proxy.h"
44 #include "third_party/blink/public/common/thread_safe_browser_interface_broker_proxy.h"
45 #include "third_party/blink/public/platform/platform.h"
46 #include "third_party/blink/renderer/platform/wtf/text/wtf_string.h"
47 #include "third_party/blink/renderer/platform/wtf/vector.h"
50 #include "media/base/mac/video_frame_mac.h"
51 #endif // BUILDFLAG(IS_MAC)
54 #include "gpu/command_buffer/common/shared_image_capabilities.h"
55 #endif // BUILDFLAG(IS_WIN)
59 constexpr int kMaxFirstFrameLogs = 5;
61 BASE_FEATURE(kTimeoutHangingVideoCaptureStarts,
62 "TimeoutHangingVideoCaptureStarts",
63 base::FEATURE_ENABLED_BY_DEFAULT);
65 using VideoFrameBufferHandleType = media::mojom::blink::VideoBufferHandle::Tag;
67 // A collection of all types of handles that we use to reference a camera buffer
68 // backed with GpuMemoryBuffer.
69 struct GpuMemoryBufferResources {
70 explicit GpuMemoryBufferResources(gfx::GpuMemoryBufferHandle handle)
71 : gpu_memory_buffer_handle(std::move(handle)) {}
72 // Stores the GpuMemoryBufferHandle when a new buffer is first registered.
73 // |gpu_memory_buffer_handle| is converted to |gpu_memory_buffer| below when
74 // the camera frame is ready for the first time.
75 gfx::GpuMemoryBufferHandle gpu_memory_buffer_handle;
76 // The GpuMemoryBuffer backing the camera frame.
77 std::unique_ptr<gfx::GpuMemoryBuffer> gpu_memory_buffer;
78 // The SharedImage created from |gpu_memory_buffer|.
79 gpu::Mailbox mailboxes[media::VideoFrame::kMaxPlanes];
80 // The release sync token for |mailboxes|.
81 gpu::SyncToken release_sync_token;
84 struct VideoCaptureImpl::BufferContext
85 : public base::RefCountedThreadSafe<BufferContext> {
87 BufferContext(media::mojom::blink::VideoBufferHandlePtr buffer_handle,
88 scoped_refptr<base::SequencedTaskRunner> media_task_runner)
89 : buffer_type_(buffer_handle->which()),
90 media_task_runner_(media_task_runner) {
91 switch (buffer_type_) {
92 case VideoFrameBufferHandleType::kUnsafeShmemRegion:
93 InitializeFromUnsafeShmemRegion(
94 std::move(buffer_handle->get_unsafe_shmem_region()));
96 case VideoFrameBufferHandleType::kReadOnlyShmemRegion:
97 InitializeFromReadOnlyShmemRegion(
98 std::move(buffer_handle->get_read_only_shmem_region()));
100 case VideoFrameBufferHandleType::kMailboxHandles:
101 InitializeFromMailbox(std::move(buffer_handle->get_mailbox_handles()));
103 case VideoFrameBufferHandleType::kGpuMemoryBufferHandle:
104 #if !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_WIN)
105 // On macOS, an IOSurfaces passed as a GpuMemoryBufferHandle can be
106 // used by both hardware and software paths.
107 // https://crbug.com/1125879
108 // On Windows, GMBs might be passed by the capture process even if
109 // the acceleration disabled during the capture.
110 CHECK(media_task_runner_);
112 InitializeFromGpuMemoryBufferHandle(
113 std::move(buffer_handle->get_gpu_memory_buffer_handle()));
117 BufferContext(const BufferContext&) = delete;
118 BufferContext& operator=(const BufferContext&) = delete;
120 VideoFrameBufferHandleType buffer_type() const { return buffer_type_; }
121 const uint8_t* data() const { return data_; }
122 size_t data_size() const { return data_size_; }
123 const base::ReadOnlySharedMemoryRegion* read_only_shmem_region() const {
124 return &read_only_shmem_region_;
126 const Vector<gpu::MailboxHolder>& mailbox_holders() const {
127 return mailbox_holders_;
129 media::GpuVideoAcceleratorFactories* gpu_factories() const {
130 return gpu_factories_;
132 void SetGpuFactories(media::GpuVideoAcceleratorFactories* gpu_factories) {
133 gpu_factories_ = gpu_factories;
135 GpuMemoryBufferResources* gmb_resources() const {
136 return gmb_resources_.get();
139 gfx::GpuMemoryBufferHandle TakeGpuMemoryBufferHandle() {
140 #if BUILDFLAG(IS_APPLE) || BUILDFLAG(IS_WIN)
141 // The same GpuMemoryBuffersHandles will be reused repeatedly by the
142 // unaccelerated macOS path. Each of these uses will call this function.
143 // Ensure that this function doesn't invalidate the GpuMemoryBufferHandle
144 // on macOS for this reason.
145 // https://crbug.com/1159722
146 // It will also be reused repeatedly if GPU process is unavailable in
147 // Windows zero-copy path (e.g. due to repeated GPU process crashes).
148 return gmb_resources_->gpu_memory_buffer_handle.Clone();
150 return std::move(gmb_resources_->gpu_memory_buffer_handle);
154 void SetGpuMemoryBuffer(
155 std::unique_ptr<gfx::GpuMemoryBuffer> gpu_memory_buffer) {
156 gmb_resources_->gpu_memory_buffer = std::move(gpu_memory_buffer);
158 gfx::GpuMemoryBuffer* GetGpuMemoryBuffer() {
159 return gmb_resources_->gpu_memory_buffer.get();
162 static void MailboxHolderReleased(
163 scoped_refptr<BufferContext> buffer_context,
164 const gpu::SyncToken& release_sync_token,
165 std::unique_ptr<gfx::GpuMemoryBuffer> gpu_memory_buffer) {
166 if (!buffer_context->media_task_runner_->RunsTasksInCurrentSequence()) {
167 buffer_context->media_task_runner_->PostTask(
169 base::BindOnce(&BufferContext::MailboxHolderReleased, buffer_context,
170 release_sync_token, std::move(gpu_memory_buffer)));
173 buffer_context->gmb_resources_->release_sync_token = release_sync_token;
174 // Free |gpu_memory_buffer|.
177 static void DestroyTextureOnMediaThread(
178 media::GpuVideoAcceleratorFactories* gpu_factories,
179 gpu::Mailbox mailbox,
180 gpu::SyncToken release_sync_token) {
181 if (!mailbox.IsZero()) {
182 auto* sii = gpu_factories->SharedImageInterface();
185 sii->DestroySharedImage(release_sync_token, mailbox);
189 // Public because it may be called after initialization when GPU process
190 // dies on Windows to wrap premapped GMBs.
191 void InitializeFromUnsafeShmemRegion(base::UnsafeSharedMemoryRegion region) {
192 DCHECK(region.IsValid());
193 backup_mapping_ = region.Map();
194 DCHECK(backup_mapping_.IsValid());
195 data_ = backup_mapping_.GetMemoryAsSpan<uint8_t>().data();
196 data_size_ = backup_mapping_.size();
200 void InitializeFromReadOnlyShmemRegion(
201 base::ReadOnlySharedMemoryRegion region) {
202 DCHECK(region.IsValid());
203 read_only_mapping_ = region.Map();
204 DCHECK(read_only_mapping_.IsValid());
205 data_ = read_only_mapping_.GetMemoryAsSpan<uint8_t>().data();
206 data_size_ = read_only_mapping_.size();
207 read_only_shmem_region_ = std::move(region);
210 void InitializeFromMailbox(
211 media::mojom::blink::MailboxBufferHandleSetPtr mailbox_handles) {
212 DCHECK_EQ(media::VideoFrame::kMaxPlanes,
213 mailbox_handles->mailbox_holder.size());
214 mailbox_holders_ = std::move(mailbox_handles->mailbox_holder);
217 void InitializeFromGpuMemoryBufferHandle(
218 gfx::GpuMemoryBufferHandle gpu_memory_buffer_handle) {
219 gmb_resources_ = std::make_unique<GpuMemoryBufferResources>(
220 std::move(gpu_memory_buffer_handle));
223 friend class base::RefCountedThreadSafe<BufferContext>;
224 virtual ~BufferContext() {
227 for (size_t plane = 0; plane < media::VideoFrame::kMaxPlanes; ++plane) {
228 if (!gmb_resources_->mailboxes[plane].IsSharedImage())
230 media_task_runner_->PostTask(
232 base::BindOnce(&BufferContext::DestroyTextureOnMediaThread,
233 gpu_factories_, gmb_resources_->mailboxes[plane],
234 gmb_resources_->release_sync_token));
238 VideoFrameBufferHandleType buffer_type_;
240 // Only valid for |buffer_type_ == SHARED_BUFFER_HANDLE|.
241 base::WritableSharedMemoryMapping writable_mapping_;
243 // Only valid for |buffer_type_ == READ_ONLY_SHMEM_REGION|.
244 base::ReadOnlySharedMemoryRegion read_only_shmem_region_;
245 base::ReadOnlySharedMemoryMapping read_only_mapping_;
247 // Only valid for |buffer_type == GPU_MEMORY_BUFFER_HANDLE|
248 // if on windows, gpu_factories are unavailable, and
249 // GMB comes premapped from the capturer.
250 base::WritableSharedMemoryMapping backup_mapping_;
252 // These point into one of the above mappings, which hold the mapping open for
253 // the lifetime of this object.
254 const uint8_t* data_ = nullptr;
255 size_t data_size_ = 0;
257 // Only valid for |buffer_type_ == MAILBOX_HANDLES|.
258 Vector<gpu::MailboxHolder> mailbox_holders_;
260 // The following is for |buffer_type == GPU_MEMORY_BUFFER_HANDLE|.
262 // Uses to create SharedImage from |gpu_memory_buffer_|.
263 raw_ptr<media::GpuVideoAcceleratorFactories, ExperimentalRenderer>
264 gpu_factories_ = nullptr;
265 // The task runner that |gpu_factories_| runs on.
266 const scoped_refptr<base::SequencedTaskRunner> media_task_runner_;
268 std::unique_ptr<GpuMemoryBufferResources> gmb_resources_;
271 VideoCaptureImpl::VideoFrameBufferPreparer::VideoFrameBufferPreparer(
272 VideoCaptureImpl& video_capture_impl,
273 media::mojom::blink::ReadyBufferPtr ready_buffer)
274 : video_capture_impl_(video_capture_impl),
275 buffer_id_(ready_buffer->buffer_id),
276 frame_info_(std::move(ready_buffer->info)) {}
278 int32_t VideoCaptureImpl::VideoFrameBufferPreparer::buffer_id() const {
282 const media::mojom::blink::VideoFrameInfoPtr&
283 VideoCaptureImpl::VideoFrameBufferPreparer::frame_info() const {
287 scoped_refptr<media::VideoFrame>
288 VideoCaptureImpl::VideoFrameBufferPreparer::frame() const {
292 scoped_refptr<VideoCaptureImpl::BufferContext>
293 VideoCaptureImpl::VideoFrameBufferPreparer::buffer_context() const {
294 return buffer_context_;
297 bool VideoCaptureImpl::VideoFrameBufferPreparer::Initialize() {
298 // Prior to initializing, |frame_| and |gpu_memory_buffer_| are null.
299 DCHECK(!frame_ && !gpu_memory_buffer_);
300 const auto& iter = video_capture_impl_->client_buffers_.find(buffer_id_);
301 DCHECK(iter != video_capture_impl_->client_buffers_.end());
302 buffer_context_ = iter->second;
303 switch (buffer_context_->buffer_type()) {
304 case VideoFrameBufferHandleType::kUnsafeShmemRegion:
305 // The frame is backed by a writable (unsafe) shared memory handle, but as
306 // it is not sent cross-process the region does not need to be attached to
307 // the frame. See also the case for kReadOnlyShmemRegion.
308 if (frame_info_->strides) {
309 CHECK(IsYuvPlanar(frame_info_->pixel_format) &&
310 (media::VideoFrame::NumPlanes(frame_info_->pixel_format) == 3))
311 << "Currently, only YUV formats support custom strides.";
312 uint8_t* y_data = const_cast<uint8_t*>(buffer_context_->data());
314 y_data + (media::VideoFrame::Rows(
315 media::VideoFrame::kYPlane, frame_info_->pixel_format,
316 frame_info_->coded_size.height()) *
317 frame_info_->strides->stride_by_plane[0]);
319 u_data + (media::VideoFrame::Rows(
320 media::VideoFrame::kUPlane, frame_info_->pixel_format,
321 frame_info_->coded_size.height()) *
322 frame_info_->strides->stride_by_plane[1]);
323 frame_ = media::VideoFrame::WrapExternalYuvData(
324 frame_info_->pixel_format, gfx::Size(frame_info_->coded_size),
325 gfx::Rect(frame_info_->visible_rect),
326 frame_info_->visible_rect.size(),
327 frame_info_->strides->stride_by_plane[0],
328 frame_info_->strides->stride_by_plane[1],
329 frame_info_->strides->stride_by_plane[2], y_data, u_data, v_data,
330 frame_info_->timestamp);
332 frame_ = media::VideoFrame::WrapExternalData(
333 frame_info_->pixel_format, gfx::Size(frame_info_->coded_size),
334 gfx::Rect(frame_info_->visible_rect),
335 frame_info_->visible_rect.size(),
336 const_cast<uint8_t*>(buffer_context_->data()),
337 buffer_context_->data_size(), frame_info_->timestamp);
340 case VideoFrameBufferHandleType::kReadOnlyShmemRegion:
341 // As with the kSharedBufferHandle type, it is sufficient to just wrap
342 // the data without attaching the shared region to the frame.
343 frame_ = media::VideoFrame::WrapExternalData(
344 frame_info_->pixel_format, gfx::Size(frame_info_->coded_size),
345 gfx::Rect(frame_info_->visible_rect),
346 frame_info_->visible_rect.size(),
347 const_cast<uint8_t*>(buffer_context_->data()),
348 buffer_context_->data_size(), frame_info_->timestamp);
349 frame_->BackWithSharedMemory(buffer_context_->read_only_shmem_region());
351 case VideoFrameBufferHandleType::kMailboxHandles: {
352 gpu::MailboxHolder mailbox_holder_array[media::VideoFrame::kMaxPlanes];
353 CHECK_EQ(media::VideoFrame::kMaxPlanes,
354 buffer_context_->mailbox_holders().size());
355 for (int i = 0; i < media::VideoFrame::kMaxPlanes; i++) {
356 mailbox_holder_array[i] = buffer_context_->mailbox_holders()[i];
358 frame_ = media::VideoFrame::WrapNativeTextures(
359 frame_info_->pixel_format, mailbox_holder_array,
360 media::VideoFrame::ReleaseMailboxCB(),
361 gfx::Size(frame_info_->coded_size),
362 gfx::Rect(frame_info_->visible_rect),
363 frame_info_->visible_rect.size(), frame_info_->timestamp);
366 case VideoFrameBufferHandleType::kGpuMemoryBufferHandle: {
367 #if BUILDFLAG(IS_APPLE)
368 // On macOS, an IOSurfaces passed as a GpuMemoryBufferHandle can be
369 // used by both hardware and software paths.
370 // https://crbug.com/1125879
371 if (!video_capture_impl_->gpu_factories_ ||
372 !video_capture_impl_->media_task_runner_) {
373 frame_ = media::VideoFrame::WrapUnacceleratedIOSurface(
374 buffer_context_->TakeGpuMemoryBufferHandle(),
375 gfx::Rect(frame_info_->visible_rect), frame_info_->timestamp);
379 #if BUILDFLAG(IS_WIN)
380 // The associated shared memory region is mapped only once
381 if (frame_info_->is_premapped && !buffer_context_->data()) {
382 auto gmb_handle = buffer_context_->TakeGpuMemoryBufferHandle();
383 buffer_context_->InitializeFromUnsafeShmemRegion(
384 std::move(gmb_handle.region));
385 DCHECK(buffer_context_->data());
387 // On Windows it might happen that the Renderer process loses GPU
388 // connection, while the capturer process will continue to produce
389 // GPU backed frames.
390 if (!video_capture_impl_->gpu_factories_ ||
391 !video_capture_impl_->media_task_runner_ ||
392 video_capture_impl_->gmb_not_supported_) {
393 video_capture_impl_->RequirePremappedFrames();
394 if (!frame_info_->is_premapped || !buffer_context_->data()) {
395 // If the frame isn't premapped, can't do anything here.
399 frame_ = media::VideoFrame::WrapExternalData(
400 frame_info_->pixel_format, gfx::Size(frame_info_->coded_size),
401 gfx::Rect(frame_info_->visible_rect),
402 frame_info_->visible_rect.size(),
403 const_cast<uint8_t*>(buffer_context_->data()),
404 buffer_context_->data_size(), frame_info_->timestamp);
412 CHECK(video_capture_impl_->gpu_factories_);
413 CHECK(video_capture_impl_->media_task_runner_);
414 // Create GpuMemoryBuffer from handle.
415 if (!buffer_context_->GetGpuMemoryBuffer()) {
416 gfx::BufferFormat gfx_format;
417 switch (frame_info_->pixel_format) {
418 case media::VideoPixelFormat::PIXEL_FORMAT_NV12:
419 gfx_format = gfx::BufferFormat::YUV_420_BIPLANAR;
422 LOG(FATAL) << "Unsupported pixel format";
425 // The GpuMemoryBuffer is allocated and owned by the video capture
426 // buffer pool from the video capture service process, so we don't need
427 // to destroy the GpuMemoryBuffer here.
429 video_capture_impl_->gpu_memory_buffer_support_
430 ->CreateGpuMemoryBufferImplFromHandle(
431 buffer_context_->TakeGpuMemoryBufferHandle(),
432 gfx::Size(frame_info_->coded_size), gfx_format,
433 gfx::BufferUsage::SCANOUT_VEA_CPU_READ, base::DoNothing(),
434 video_capture_impl_->gpu_factories_
435 ->GpuMemoryBufferManager(),
436 video_capture_impl_->pool_);
438 // Keep one GpuMemoryBuffer for current GpuMemoryHandle alive,
439 // so that any associated structures are kept alive while this buffer id
440 // is still used (e.g. DMA buf handles for linux/CrOS).
441 buffer_context_->SetGpuMemoryBuffer(std::move(gmb));
443 CHECK(buffer_context_->GetGpuMemoryBuffer());
445 auto buffer_handle = buffer_context_->GetGpuMemoryBuffer()->CloneHandle();
446 #if BUILDFLAG(IS_CHROMEOS)
447 is_webgpu_compatible_ =
448 buffer_handle.native_pixmap_handle.supports_zero_copy_webgpu_import;
451 #if BUILDFLAG(IS_MAC)
452 is_webgpu_compatible_ =
453 media::IOSurfaceIsWebGPUCompatible(buffer_handle.io_surface.get());
455 // No need to propagate shared memory region further as it's already
456 // exposed by |buffer_context_->data()|.
457 buffer_handle.region = base::UnsafeSharedMemoryRegion();
458 // The buffer_context_ might still have a mapped shared memory region.
459 // However, it contains valid data only if |is_premapped| is set.
460 uint8_t* premapped_data =
461 frame_info_->is_premapped
462 ? const_cast<uint8_t*>(buffer_context_->data())
465 // Clone the GpuMemoryBuffer and wrap it in a VideoFrame.
467 video_capture_impl_->gpu_memory_buffer_support_
468 ->CreateGpuMemoryBufferImplFromHandle(
469 std::move(buffer_handle),
470 buffer_context_->GetGpuMemoryBuffer()->GetSize(),
471 buffer_context_->GetGpuMemoryBuffer()->GetFormat(),
472 gfx::BufferUsage::SCANOUT_VEA_CPU_READ, base::DoNothing(),
473 video_capture_impl_->gpu_factories_->GpuMemoryBufferManager(),
474 video_capture_impl_->pool_,
475 base::span<uint8_t>(premapped_data,
476 buffer_context_->data_size()));
477 if (!gpu_memory_buffer_) {
478 LOG(ERROR) << "Failed to open GpuMemoryBuffer handle";
483 // After initializing, either |frame_| or |gpu_memory_buffer_| has been set.
484 DCHECK(frame_ || gpu_memory_buffer_);
488 bool VideoCaptureImpl::VideoFrameBufferPreparer::IsVideoFrameBound() const {
492 // Creates SharedImage mailboxes for |gpu_memory_buffer_handle_| and wraps the
493 // mailboxes with the buffer handles in a DMA-buf VideoFrame. The consumer of
494 // the VideoFrame can access the data either through mailboxes (e.g. display)
495 // or through the DMA-buf FDs (e.g. video encoder).
496 bool VideoCaptureImpl::VideoFrameBufferPreparer::BindVideoFrameOnMediaThread(
497 media::GpuVideoAcceleratorFactories* gpu_factories) {
498 DCHECK(gpu_factories);
499 DCHECK(!IsVideoFrameBound());
500 DCHECK_EQ(frame_info_->pixel_format, media::PIXEL_FORMAT_NV12);
502 bool should_recreate_shared_image = false;
503 if (gpu_factories != buffer_context_->gpu_factories()) {
504 DVLOG(1) << "GPU context changed; re-creating SharedImage objects";
505 buffer_context_->SetGpuFactories(gpu_factories);
506 should_recreate_shared_image = true;
508 #if BUILDFLAG(IS_WIN)
509 // If the renderer is running in d3d9 mode due to e.g. driver bugs
510 // workarounds, DXGI D3D11 textures won't be supported.
511 // Can't check this from the ::Initialize() since media context provider can
512 // be accessed only on the Media thread.
513 gpu::SharedImageInterface* shared_image_interface =
514 gpu_factories->SharedImageInterface();
515 if (!shared_image_interface ||
516 !shared_image_interface->GetCapabilities().shared_image_d3d) {
517 video_capture_impl_->RequirePremappedFrames();
518 video_capture_impl_->gmb_not_supported_ = true;
523 // Create GPU texture and bind GpuMemoryBuffer to the texture.
524 auto* sii = buffer_context_->gpu_factories()->SharedImageInterface();
526 DVLOG(1) << "GPU context lost";
529 // Don't check VideoFrameOutputFormat until we ensure the context has not
530 // been lost (if it is lost, then the format will be UNKNOWN).
531 const auto output_format =
532 buffer_context_->gpu_factories()->VideoFrameOutputFormat(
533 frame_info_->pixel_format);
536 media::GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB ||
538 media::GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB);
540 std::vector<gfx::BufferPlane> planes;
543 gpu::SHARED_IMAGE_USAGE_GLES2 | gpu::SHARED_IMAGE_USAGE_RASTER |
544 gpu::SHARED_IMAGE_USAGE_DISPLAY_READ | gpu::SHARED_IMAGE_USAGE_SCANOUT;
545 #if BUILDFLAG(IS_APPLE)
546 usage |= gpu::SHARED_IMAGE_USAGE_MACOS_VIDEO_TOOLBOX;
548 #if BUILDFLAG(IS_CHROMEOS)
549 usage |= gpu::SHARED_IMAGE_USAGE_WEBGPU;
552 // The feature flags here are a little subtle:
553 // * IsMultiPlaneFormatForHardwareVideoEnabled() controls whether Multiplanar
554 // SI is used (i.e., whether a single SharedImage is created via passing a
555 // viz::MultiPlaneFormat rather than the legacy codepath of passing a
557 // * kMultiPlaneVideoCaptureSharedImages controls whether planes are sampled
558 // individually rather than using external sampling.
560 // These two flags are orthogonal:
561 // * If both flags are true, one SharedImage with format MultiPlaneFormat::
562 // kNV12 will be created.
563 // * If using multiplane SI without per-plane sampling, one SharedImage with
564 // format MultiPlaneFormat::kNV12 configured to use external sampling
565 // will be created (this is supported only on Ozone-based platforms and
566 // not expected to be requested on other platforms).
567 // * If using per-plane sampling without multiplane SI, one SharedImage will
568 // be created for each plane via the legacy "pass GMB" entrypoint.
569 // * If both flags are false, one SharedImage will be created via the legacy
570 // "pass GMB" entrypoint (this uses external sampling on the other side
571 // based on the format of the GMB).
572 bool create_multiplanar_image =
573 media::IsMultiPlaneFormatForHardwareVideoEnabled();
574 bool use_per_plane_sampling =
575 base::FeatureList::IsEnabled(media::kMultiPlaneVideoCaptureSharedImages);
576 #if BUILDFLAG(IS_WIN) || BUILDFLAG(IS_MAC)
577 // External sampling isn't supported on Windows/Mac with Multiplane SI (it's
578 // not supported with legacy SI either for that matter, but we restricted
579 // the CHECK here to Multiplane SI as in the case of legacy SI the flow is
580 // more nebulous and we wanted to restrict any impact here to the Multiplane
582 // NOTE: This CHECK would ideally be done if !BUILDFLAG(IS_OZONE), but this
583 // codepath is entered in tests for Android, which does not have
584 // kMultiPlaneVideoCaptureSharedImages set. This codepath is not entered in
585 // production for Android (see
586 // https://chromium-review.googlesource.com/c/chromium/src/+/4640009/comment/29c99ef9_587e49dc/
587 // for a detailed discussion).
588 CHECK(!create_multiplanar_image || use_per_plane_sampling);
591 if (create_multiplanar_image || !use_per_plane_sampling) {
592 planes.push_back(gfx::BufferPlane::DEFAULT);
594 // Using per-plane sampling without multiplane SI.
595 planes.push_back(gfx::BufferPlane::Y);
596 planes.push_back(gfx::BufferPlane::UV);
598 CHECK(planes.size() == 1 || !create_multiplanar_image);
600 for (size_t plane = 0; plane < planes.size(); ++plane) {
601 if (should_recreate_shared_image ||
602 buffer_context_->gmb_resources()->mailboxes[plane].IsZero()) {
603 auto multiplanar_si_format = viz::MultiPlaneFormat::kNV12;
604 #if BUILDFLAG(IS_OZONE)
605 if (!use_per_plane_sampling) {
606 multiplanar_si_format.SetPrefersExternalSampler();
609 CHECK_EQ(gpu_memory_buffer_->GetFormat(),
610 gfx::BufferFormat::YUV_420_BIPLANAR);
611 if (create_multiplanar_image) {
612 auto client_shared_image = sii->CreateSharedImage(
613 multiplanar_si_format, gpu_memory_buffer_->GetSize(),
614 frame_info_->color_space, kTopLeft_GrSurfaceOrigin,
615 kPremul_SkAlphaType, usage, "VideoCaptureFrameBuffer",
616 gpu_memory_buffer_->CloneHandle());
617 CHECK(client_shared_image);
618 buffer_context_->gmb_resources()->mailboxes[plane] =
619 client_shared_image->mailbox();
621 buffer_context_->gmb_resources()->mailboxes[plane] =
622 sii->CreateSharedImage(
623 gpu_memory_buffer_.get(),
624 buffer_context_->gpu_factories()->GpuMemoryBufferManager(),
625 planes[plane], frame_info_->color_space,
626 kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType, usage,
627 "VideoCaptureFrameBuffer");
630 sii->UpdateSharedImage(
631 buffer_context_->gmb_resources()->release_sync_token,
632 buffer_context_->gmb_resources()->mailboxes[plane]);
636 const unsigned texture_target =
637 #if BUILDFLAG(IS_LINUX)
638 // Explicitly set GL_TEXTURE_EXTERNAL_OES as the
639 // `media::VideoFrame::RequiresExternalSampler()` requires it for NV12
640 // format, while the `ImageTextureTarget()` will return GL_TEXTURE_2D.
641 (frame_info_->pixel_format == media::PIXEL_FORMAT_NV12)
642 ? GL_TEXTURE_EXTERNAL_OES
645 buffer_context_->gpu_factories()->ImageTextureTarget(
646 gpu_memory_buffer_->GetFormat());
648 const gpu::SyncToken sync_token = sii->GenVerifiedSyncToken();
650 gpu::MailboxHolder mailbox_holder_array[media::VideoFrame::kMaxPlanes];
651 for (size_t plane = 0; plane < planes.size(); ++plane) {
652 DCHECK(!buffer_context_->gmb_resources()->mailboxes[plane].IsZero());
653 DCHECK(buffer_context_->gmb_resources()->mailboxes[plane].IsSharedImage());
654 mailbox_holder_array[plane] =
655 gpu::MailboxHolder(buffer_context_->gmb_resources()->mailboxes[plane],
656 sync_token, texture_target);
659 const auto gmb_size = gpu_memory_buffer_->GetSize();
660 frame_ = media::VideoFrame::WrapExternalGpuMemoryBuffer(
661 gfx::Rect(frame_info_->visible_rect), gmb_size,
662 std::move(gpu_memory_buffer_), mailbox_holder_array,
663 base::BindOnce(&BufferContext::MailboxHolderReleased, buffer_context_),
664 frame_info_->timestamp);
666 LOG(ERROR) << "Can't wrap GpuMemoryBuffer as VideoFrame";
670 // If we created a single multiplanar image, inform the VideoFrame that it
671 // should go down the normal SharedImageFormat codepath rather than the
672 // codepath used for legacy multiplanar formats.
673 if (create_multiplanar_image) {
674 frame_->set_shared_image_format_type(
675 use_per_plane_sampling
676 ? media::SharedImageFormatType::kSharedImageFormat
677 : media::SharedImageFormatType::kSharedImageFormatExternalSampler);
680 frame_->metadata().allow_overlay = true;
681 frame_->metadata().read_lock_fences_enabled = true;
682 #if BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_MAC)
683 frame_->metadata().is_webgpu_compatible = is_webgpu_compatible_;
688 void VideoCaptureImpl::VideoFrameBufferPreparer::Finalize() {
690 frame_->AddDestructionObserver(
691 base::BindOnce(&VideoCaptureImpl::DidFinishConsumingFrame,
692 base::BindPostTaskToCurrentDefault(base::BindOnce(
693 &VideoCaptureImpl::OnAllClientsFinishedConsumingFrame,
694 video_capture_impl_->weak_factory_.GetWeakPtr(),
695 buffer_id_, buffer_context_))));
696 if (frame_info_->color_space.IsValid()) {
697 frame_->set_color_space(frame_info_->color_space);
699 frame_->metadata().MergeMetadataFrom(frame_info_->metadata);
702 // Information about a video capture client of ours.
703 struct VideoCaptureImpl::ClientInfo {
704 ClientInfo() = default;
705 ClientInfo(const ClientInfo& other) = default;
706 ~ClientInfo() = default;
708 media::VideoCaptureParams params;
709 VideoCaptureStateUpdateCB state_update_cb;
710 VideoCaptureDeliverFrameCB deliver_frame_cb;
711 VideoCaptureSubCaptureTargetVersionCB sub_capture_target_version_cb;
712 VideoCaptureNotifyFrameDroppedCB frame_dropped_cb;
715 VideoCaptureImpl::VideoCaptureImpl(
716 media::VideoCaptureSessionId session_id,
717 scoped_refptr<base::SequencedTaskRunner> main_task_runner,
718 BrowserInterfaceBrokerProxy* browser_interface_broker)
719 : device_id_(session_id),
720 session_id_(session_id),
721 video_capture_host_for_testing_(nullptr),
722 state_(blink::VIDEO_CAPTURE_STATE_STOPPED),
723 main_task_runner_(std::move(main_task_runner)),
724 gpu_memory_buffer_support_(new gpu::GpuMemoryBufferSupport()),
725 pool_(base::MakeRefCounted<base::UnsafeSharedMemoryPool>()) {
726 CHECK(!session_id.is_empty());
727 DCHECK(main_task_runner_->RunsTasksInCurrentSequence());
728 DETACH_FROM_THREAD(io_thread_checker_);
730 browser_interface_broker->GetInterface(
731 pending_video_capture_host_.InitWithNewPipeAndPassReceiver());
733 gpu_factories_ = Platform::Current()->GetGpuFactories();
734 if (gpu_factories_) {
735 media_task_runner_ = gpu_factories_->GetTaskRunner();
737 weak_this_ = weak_factory_.GetWeakPtr();
740 void VideoCaptureImpl::OnGpuContextLost(
741 base::WeakPtr<VideoCaptureImpl> video_capture_impl) {
742 // Called on the main task runner.
743 auto* gpu_factories = Platform::Current()->GetGpuFactories();
744 Platform::Current()->GetIOTaskRunner()->PostTask(
746 base::BindOnce(&VideoCaptureImpl::SetGpuFactoriesHandleOnIOTaskRunner,
747 video_capture_impl, gpu_factories));
750 void VideoCaptureImpl::SetGpuFactoriesHandleOnIOTaskRunner(
751 media::GpuVideoAcceleratorFactories* gpu_factories) {
752 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
753 if (gpu_factories != gpu_factories_) {
754 LOG(ERROR) << "GPU factories handle changed; assuming GPU context lost";
755 gpu_factories_ = gpu_factories;
759 VideoCaptureImpl::~VideoCaptureImpl() {
760 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
761 if ((state_ == VIDEO_CAPTURE_STATE_STARTING ||
762 state_ == VIDEO_CAPTURE_STATE_STARTED) &&
763 GetVideoCaptureHost())
764 GetVideoCaptureHost()->Stop(device_id_);
767 void VideoCaptureImpl::SuspendCapture(bool suspend) {
768 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
770 GetVideoCaptureHost()->Pause(device_id_);
772 GetVideoCaptureHost()->Resume(device_id_, session_id_, params_);
775 void VideoCaptureImpl::StartCapture(
777 const media::VideoCaptureParams& params,
778 const VideoCaptureStateUpdateCB& state_update_cb,
779 const VideoCaptureDeliverFrameCB& deliver_frame_cb,
780 const VideoCaptureSubCaptureTargetVersionCB& sub_capture_target_version_cb,
781 const VideoCaptureNotifyFrameDroppedCB& frame_dropped_cb) {
782 DVLOG(1) << __func__ << " |device_id_| = " << device_id_;
783 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
784 OnLog("VideoCaptureImpl got request to start capture.");
786 ClientInfo client_info;
787 client_info.params = params;
788 client_info.state_update_cb = state_update_cb;
789 client_info.deliver_frame_cb = deliver_frame_cb;
790 client_info.sub_capture_target_version_cb = sub_capture_target_version_cb;
791 client_info.frame_dropped_cb = frame_dropped_cb;
794 case VIDEO_CAPTURE_STATE_STARTING:
795 case VIDEO_CAPTURE_STATE_STARTED:
796 clients_[client_id] = client_info;
797 OnLog("VideoCaptureImpl capture is already started or starting.");
798 // TODO(sheu): Allowing resolution change will require that all
799 // outstanding clients of a capture session support resolution change.
800 DCHECK_EQ(params_.resolution_change_policy,
801 params.resolution_change_policy);
803 case VIDEO_CAPTURE_STATE_STOPPING:
804 clients_pending_on_restart_[client_id] = client_info;
805 DVLOG(1) << __func__ << " Got new resolution while stopping: "
806 << params.requested_format.frame_size.ToString();
808 case VIDEO_CAPTURE_STATE_STOPPED:
809 case VIDEO_CAPTURE_STATE_ENDED:
810 clients_[client_id] = client_info;
812 params_.requested_format.frame_rate =
813 std::min(params_.requested_format.frame_rate,
814 static_cast<float>(media::limits::kMaxFramesPerSecond));
816 DVLOG(1) << "StartCapture: starting with first resolution "
817 << params_.requested_format.frame_size.ToString();
818 OnLog("VideoCaptureImpl starting capture.");
819 StartCaptureInternal();
821 case VIDEO_CAPTURE_STATE_ERROR:
822 OnLog("VideoCaptureImpl is in error state.");
823 state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_ERROR);
825 case VIDEO_CAPTURE_STATE_ERROR_SYSTEM_PERMISSIONS_DENIED:
826 OnLog("VideoCaptureImpl is in system permissions error state.");
828 blink::VIDEO_CAPTURE_STATE_ERROR_SYSTEM_PERMISSIONS_DENIED);
830 case VIDEO_CAPTURE_STATE_ERROR_CAMERA_BUSY:
831 OnLog("VideoCaptureImpl is in camera busy error state.");
832 state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_ERROR_CAMERA_BUSY);
834 case VIDEO_CAPTURE_STATE_PAUSED:
835 case VIDEO_CAPTURE_STATE_RESUMED:
836 // The internal |state_| is never set to PAUSED/RESUMED since
837 // VideoCaptureImpl is not modified by those.
843 void VideoCaptureImpl::StopCapture(int client_id) {
844 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
845 // A client ID can be in only one client list.
846 // If this ID is in any client list, we can just remove it from
847 // that client list and don't have to run the other following RemoveClient().
848 if (!RemoveClient(client_id, &clients_pending_on_restart_)) {
849 RemoveClient(client_id, &clients_);
852 if (!clients_.empty())
854 DVLOG(1) << "StopCapture: No more client, stopping ...";
856 client_buffers_.clear();
857 weak_factory_.InvalidateWeakPtrs();
860 void VideoCaptureImpl::RequestRefreshFrame() {
861 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
862 GetVideoCaptureHost()->RequestRefreshFrame(device_id_);
865 void VideoCaptureImpl::GetDeviceSupportedFormats(
866 VideoCaptureDeviceFormatsCallback callback) {
867 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
868 GetVideoCaptureHost()->GetDeviceSupportedFormats(
869 device_id_, session_id_,
870 base::BindOnce(&VideoCaptureImpl::OnDeviceSupportedFormats,
871 weak_factory_.GetWeakPtr(), std::move(callback)));
874 void VideoCaptureImpl::GetDeviceFormatsInUse(
875 VideoCaptureDeviceFormatsCallback callback) {
876 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
877 GetVideoCaptureHost()->GetDeviceFormatsInUse(
878 device_id_, session_id_,
879 base::BindOnce(&VideoCaptureImpl::OnDeviceFormatsInUse,
880 weak_factory_.GetWeakPtr(), std::move(callback)));
883 void VideoCaptureImpl::OnLog(const String& message) {
884 GetVideoCaptureHost()->OnLog(device_id_, message);
887 void VideoCaptureImpl::SetGpuMemoryBufferSupportForTesting(
888 std::unique_ptr<gpu::GpuMemoryBufferSupport> gpu_memory_buffer_support) {
889 gpu_memory_buffer_support_ = std::move(gpu_memory_buffer_support);
892 void VideoCaptureImpl::OnStateChanged(
893 media::mojom::blink::VideoCaptureResultPtr result) {
894 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
896 // Stop the startup deadline timer as something has happened.
897 startup_timeout_.Stop();
899 if (result->which() ==
900 media::mojom::blink::VideoCaptureResult::Tag::kErrorCode) {
901 DVLOG(1) << __func__ << " Failed with an error.";
902 if (result->get_error_code() ==
903 media::VideoCaptureError::kWinMediaFoundationSystemPermissionDenied) {
904 state_ = VIDEO_CAPTURE_STATE_ERROR_SYSTEM_PERMISSIONS_DENIED;
906 "VideoCaptureImpl changing state to "
907 "VIDEO_CAPTURE_STATE_ERROR_SYSTEM_PERMISSIONS_DENIED");
908 } else if (result->get_error_code() ==
909 media::VideoCaptureError::kWinMediaFoundationCameraBusy) {
910 state_ = VIDEO_CAPTURE_STATE_ERROR_CAMERA_BUSY;
912 "VideoCaptureImpl changing state to "
913 "VIDEO_CAPTURE_STATE_ERROR_CAMERA_BUSY");
915 state_ = VIDEO_CAPTURE_STATE_ERROR;
916 OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_ERROR");
918 for (const auto& client : clients_)
919 client.second.state_update_cb.Run(state_);
921 RecordStartOutcomeUMA(result->get_error_code());
925 media::mojom::VideoCaptureState state = result->get_state();
926 DVLOG(1) << __func__ << " state: " << state;
928 case media::mojom::VideoCaptureState::STARTED:
929 OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_STARTED");
930 state_ = VIDEO_CAPTURE_STATE_STARTED;
931 for (const auto& client : clients_)
932 client.second.state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_STARTED);
933 // In case there is any frame dropped before STARTED, always request for
934 // a frame refresh to start the video call with.
935 // Capture device will make a decision if it should refresh a frame.
936 RequestRefreshFrame();
937 RecordStartOutcomeUMA(media::VideoCaptureError::kNone);
939 case media::mojom::VideoCaptureState::STOPPED:
940 OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_STOPPED");
941 state_ = VIDEO_CAPTURE_STATE_STOPPED;
942 client_buffers_.clear();
943 weak_factory_.InvalidateWeakPtrs();
944 if (!clients_.empty() || !clients_pending_on_restart_.empty()) {
945 OnLog("VideoCaptureImpl restarting capture");
949 case media::mojom::VideoCaptureState::PAUSED:
950 for (const auto& client : clients_)
951 client.second.state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_PAUSED);
953 case media::mojom::VideoCaptureState::RESUMED:
954 for (const auto& client : clients_)
955 client.second.state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_RESUMED);
957 case media::mojom::VideoCaptureState::ENDED:
958 OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_ENDED");
959 // We'll only notify the client that the stream has stopped.
960 for (const auto& client : clients_)
961 client.second.state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_STOPPED);
963 state_ = VIDEO_CAPTURE_STATE_ENDED;
968 void VideoCaptureImpl::OnNewBuffer(
970 media::mojom::blink::VideoBufferHandlePtr buffer_handle) {
971 DVLOG(1) << __func__ << " buffer_id: " << buffer_id;
972 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
974 const bool inserted =
976 .emplace(buffer_id, new BufferContext(std::move(buffer_handle),
982 void VideoCaptureImpl::OnBufferReady(
983 media::mojom::blink::ReadyBufferPtr buffer) {
984 DVLOG(1) << __func__ << " buffer_id: " << buffer->buffer_id;
985 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
987 if (state_ != VIDEO_CAPTURE_STATE_STARTED) {
989 media::VideoCaptureFrameDropReason::kVideoCaptureImplNotInStartedState);
990 GetVideoCaptureHost()->ReleaseBuffer(device_id_, buffer->buffer_id,
995 base::TimeTicks reference_time = *buffer->info->metadata.reference_time;
997 if (first_frame_ref_time_.is_null()) {
998 first_frame_ref_time_ = reference_time;
999 if (num_first_frame_logs_ < kMaxFirstFrameLogs) {
1000 OnLog("First frame received for this VideoCaptureImpl instance");
1001 num_first_frame_logs_++;
1002 } else if (num_first_frame_logs_ == kMaxFirstFrameLogs) {
1004 "First frame received for this VideoCaptureImpl instance. This will "
1005 "not be logged anymore for this VideoCaptureImpl instance.");
1006 num_first_frame_logs_++;
1010 // If the timestamp is not prepared, we use reference time to make a rough
1011 // estimate. e.g. ThreadSafeCaptureOracle::DidCaptureFrame().
1012 if (buffer->info->timestamp.is_zero())
1013 buffer->info->timestamp = reference_time - first_frame_ref_time_;
1015 // If the capture_begin_time was not set use the reference time. This ensures
1016 // there is a captureTime available for local sources for
1017 // requestVideoFrameCallback.
1018 if (!buffer->info->metadata.capture_begin_time)
1019 buffer->info->metadata.capture_begin_time = reference_time;
1021 // TODO(qiangchen): Change the metric name to "reference_time" and
1022 // "timestamp", so that we have consistent naming everywhere.
1023 // Used by chrome/browser/media/cast_mirroring_performance_browsertest.cc
1024 TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived",
1025 TRACE_EVENT_SCOPE_THREAD, "timestamp",
1026 (reference_time - base::TimeTicks()).InMicroseconds(),
1027 "time_delta", buffer->info->timestamp.InMicroseconds());
1029 // Create and initialize frame preparers for the non-scaled and the scaled
1031 auto frame_preparer =
1032 std::make_unique<VideoFrameBufferPreparer>(*this, std::move(buffer));
1033 if (!frame_preparer->Initialize()) {
1034 OnFrameDropped(media::VideoCaptureFrameDropReason::
1035 kVideoCaptureImplFailedToWrapDataAsMediaVideoFrame);
1036 GetVideoCaptureHost()->ReleaseBuffer(
1037 device_id_, frame_preparer->buffer_id(), DefaultFeedback());
1041 // If the video frame needs to be bound we do a round-trip time to the media
1042 // thread, otherwise we'll go directly to OnVideoFrameReady().
1043 if (!frame_preparer->IsVideoFrameBound()) {
1044 media_task_runner_->PostTask(
1046 base::BindOnce(&VideoCaptureImpl::BindVideoFrameOnMediaThread,
1047 gpu_factories_, std::move(frame_preparer),
1048 base::BindPostTaskToCurrentDefault(base::BindOnce(
1049 &VideoCaptureImpl::OnVideoFrameReady,
1050 weak_factory_.GetWeakPtr(), reference_time)),
1053 base::BindOnce(&VideoCaptureImpl::OnGpuContextLost,
1054 weak_factory_.GetWeakPtr()))));
1057 OnVideoFrameReady(reference_time, std::move(frame_preparer));
1061 void VideoCaptureImpl::BindVideoFrameOnMediaThread(
1062 media::GpuVideoAcceleratorFactories* gpu_factories,
1063 std::unique_ptr<VideoFrameBufferPreparer> frame_preparer,
1064 base::OnceCallback<void(std::unique_ptr<VideoFrameBufferPreparer>)>
1065 on_frame_ready_callback,
1066 base::OnceCallback<void()> on_gpu_context_lost) {
1067 // This method should only be called when binding is needed, i.e. the frame is
1069 CHECK(!frame_preparer->IsVideoFrameBound());
1070 if (!frame_preparer->BindVideoFrameOnMediaThread(gpu_factories)) {
1072 std::move(on_gpu_context_lost).Run();
1073 // Proceed to invoke |on_frame_ready_callback| even though we failed - it
1074 // takes care of dropping the frame.
1076 std::move(on_frame_ready_callback).Run(std::move(frame_preparer));
1079 void VideoCaptureImpl::OnVideoFrameReady(
1080 base::TimeTicks reference_time,
1081 std::unique_ptr<VideoFrameBufferPreparer> frame_preparer) {
1082 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1084 // If the frame is not bound and ready we drop it.
1085 if (!frame_preparer->IsVideoFrameBound()) {
1086 OnFrameDropped(media::VideoCaptureFrameDropReason::
1087 kVideoCaptureImplFailedToWrapDataAsMediaVideoFrame);
1088 // Release all buffers.
1089 GetVideoCaptureHost()->ReleaseBuffer(
1090 device_id_, frame_preparer->buffer_id(), DefaultFeedback());
1093 // The buffer will be used. Finaize it.
1094 frame_preparer->Finalize();
1096 // TODO(qiangchen): Dive into the full code path to let frame metadata hold
1097 // reference time rather than using an extra parameter.
1098 for (const auto& client : clients_) {
1099 client.second.deliver_frame_cb.Run(frame_preparer->frame(), reference_time);
1103 void VideoCaptureImpl::OnBufferDestroyed(int32_t buffer_id) {
1104 DVLOG(1) << __func__ << " buffer_id: " << buffer_id;
1105 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1107 const auto& cb_iter = client_buffers_.find(buffer_id);
1108 if (cb_iter != client_buffers_.end()) {
1109 // If the BufferContext is non-null, the GpuMemoryBuffer-backed frames can
1110 // have more than one reference (held by MailboxHolderReleased). Otherwise,
1111 // only one reference should be held.
1112 DCHECK(!cb_iter->second.get() ||
1113 cb_iter->second->buffer_type() ==
1114 VideoFrameBufferHandleType::kGpuMemoryBufferHandle ||
1115 cb_iter->second->HasOneRef())
1116 << "Instructed to delete buffer we are still using.";
1117 client_buffers_.erase(cb_iter);
1121 void VideoCaptureImpl::OnFrameDropped(
1122 media::VideoCaptureFrameDropReason reason) {
1123 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1124 for (const auto& client : clients_) {
1125 client.second.frame_dropped_cb.Run(reason);
1129 void VideoCaptureImpl::OnNewSubCaptureTargetVersion(
1130 uint32_t sub_capture_target_version) {
1131 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1133 for (const auto& client : clients_) {
1134 client.second.sub_capture_target_version_cb.Run(sub_capture_target_version);
1138 constexpr base::TimeDelta VideoCaptureImpl::kCaptureStartTimeout;
1140 void VideoCaptureImpl::OnAllClientsFinishedConsumingFrame(
1142 scoped_refptr<BufferContext> buffer_context) {
1143 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1145 // Subtle race note: It's important that the |buffer_context| argument be
1146 // std::move()'ed to this method and never copied. This is so that the caller,
1147 // DidFinishConsumingFrame(), does not implicitly retain a reference while it
1148 // is running the trampoline callback on another thread. This is necessary to
1149 // ensure the reference count on the BufferContext will be correct at the time
1150 // OnBufferDestroyed() is called. http://crbug.com/797851
1152 // The BufferContext should have exactly two references to it at this point,
1153 // one is this method's second argument and the other is from
1154 // |client_buffers_|.
1155 DCHECK(!buffer_context->HasOneRef());
1156 BufferContext* const buffer_raw_ptr = buffer_context.get();
1157 buffer_context = nullptr;
1158 // For non-GMB case, there should be only one reference, from
1159 // |client_buffers_|. This DCHECK is invalid for GpuMemoryBuffer backed
1160 // frames, because MailboxHolderReleased may hold on to a reference to
1161 // |buffer_context|.
1162 if (buffer_raw_ptr->buffer_type() !=
1163 VideoFrameBufferHandleType::kGpuMemoryBufferHandle) {
1164 DCHECK(buffer_raw_ptr->HasOneRef());
1167 buffer_context = nullptr;
1170 if (require_premapped_frames_) {
1171 feedback_.require_mapped_frame = true;
1173 GetVideoCaptureHost()->ReleaseBuffer(device_id_, buffer_id, feedback_);
1174 feedback_ = media::VideoCaptureFeedback();
1177 void VideoCaptureImpl::StopDevice() {
1178 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1179 if (state_ != VIDEO_CAPTURE_STATE_STARTING &&
1180 state_ != VIDEO_CAPTURE_STATE_STARTED)
1182 state_ = VIDEO_CAPTURE_STATE_STOPPING;
1183 OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_STOPPING");
1184 GetVideoCaptureHost()->Stop(device_id_);
1185 params_.requested_format.frame_size.SetSize(0, 0);
1188 void VideoCaptureImpl::RestartCapture() {
1189 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1190 DCHECK_EQ(state_, VIDEO_CAPTURE_STATE_STOPPED);
1194 clients_.insert(clients_pending_on_restart_.begin(),
1195 clients_pending_on_restart_.end());
1196 clients_pending_on_restart_.clear();
1197 for (const auto& client : clients_) {
1198 width = std::max(width,
1199 client.second.params.requested_format.frame_size.width());
1201 height, client.second.params.requested_format.frame_size.height());
1203 params_.requested_format.frame_size.SetSize(width, height);
1204 DVLOG(1) << __func__ << " " << params_.requested_format.frame_size.ToString();
1205 StartCaptureInternal();
1208 void VideoCaptureImpl::StartCaptureInternal() {
1209 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1210 state_ = VIDEO_CAPTURE_STATE_STARTING;
1211 OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_STARTING");
1213 if (base::FeatureList::IsEnabled(kTimeoutHangingVideoCaptureStarts)) {
1214 startup_timeout_.Start(FROM_HERE, kCaptureStartTimeout,
1215 base::BindOnce(&VideoCaptureImpl::OnStartTimedout,
1216 base::Unretained(this)));
1218 start_outcome_reported_ = false;
1219 base::UmaHistogramBoolean("Media.VideoCapture.Start", true);
1221 GetVideoCaptureHost()->Start(device_id_, session_id_, params_,
1222 observer_receiver_.BindNewPipeAndPassRemote());
1225 void VideoCaptureImpl::OnStartTimedout() {
1226 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1227 OnLog("VideoCaptureImpl timed out during starting");
1229 OnStateChanged(media::mojom::blink::VideoCaptureResult::NewErrorCode(
1230 media::VideoCaptureError::kVideoCaptureImplTimedOutOnStart));
1233 void VideoCaptureImpl::OnDeviceSupportedFormats(
1234 VideoCaptureDeviceFormatsCallback callback,
1235 const Vector<media::VideoCaptureFormat>& supported_formats) {
1236 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1237 std::move(callback).Run(supported_formats);
1240 void VideoCaptureImpl::OnDeviceFormatsInUse(
1241 VideoCaptureDeviceFormatsCallback callback,
1242 const Vector<media::VideoCaptureFormat>& formats_in_use) {
1243 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1244 std::move(callback).Run(formats_in_use);
1247 bool VideoCaptureImpl::RemoveClient(int client_id, ClientInfoMap* clients) {
1248 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1250 const ClientInfoMap::iterator it = clients->find(client_id);
1251 if (it == clients->end())
1254 it->second.state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_STOPPED);
1259 media::mojom::blink::VideoCaptureHost* VideoCaptureImpl::GetVideoCaptureHost() {
1260 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1261 if (video_capture_host_for_testing_)
1262 return video_capture_host_for_testing_;
1264 if (!video_capture_host_.is_bound())
1265 video_capture_host_.Bind(std::move(pending_video_capture_host_));
1266 return video_capture_host_.get();
1269 void VideoCaptureImpl::RecordStartOutcomeUMA(
1270 media::VideoCaptureError error_code) {
1271 // Record the success or failure of starting only the first time we transition
1272 // into such a state, not eg when resuming after pausing.
1273 if (!start_outcome_reported_) {
1274 VideoCaptureStartOutcome outcome;
1275 switch (error_code) {
1276 case media::VideoCaptureError::kNone:
1277 outcome = VideoCaptureStartOutcome::kStarted;
1279 case media::VideoCaptureError::kVideoCaptureImplTimedOutOnStart:
1280 outcome = VideoCaptureStartOutcome::kTimedout;
1283 outcome = VideoCaptureStartOutcome::kFailed;
1286 base::UmaHistogramEnumeration("Media.VideoCapture.StartOutcome", outcome);
1287 base::UmaHistogramEnumeration("Media.VideoCapture.StartErrorCode",
1289 start_outcome_reported_ = true;
1294 void VideoCaptureImpl::DidFinishConsumingFrame(
1295 BufferFinishedCallback callback_to_io_thread) {
1296 // Note: This function may be called on any thread by the VideoFrame
1297 // destructor. |metadata| is still valid for read-access at this point.
1298 std::move(callback_to_io_thread).Run();
1301 void VideoCaptureImpl::ProcessFeedback(
1302 const media::VideoCaptureFeedback& feedback) {
1303 DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1304 feedback_ = feedback;
1307 void VideoCaptureImpl::RequirePremappedFrames() {
1308 require_premapped_frames_ = true;
1311 media::VideoCaptureFeedback VideoCaptureImpl::DefaultFeedback() {
1312 media::VideoCaptureFeedback feedback;
1313 feedback.require_mapped_frame = require_premapped_frames_;
1317 base::WeakPtr<VideoCaptureImpl> VideoCaptureImpl::GetWeakPtr() {
1321 } // namespace blink