68bbcb54bf8b3ec40bd31a09883a1ee807a1f8b0
[platform/framework/web/chromium-efl.git] / third_party / blink / renderer / platform / video_capture / video_capture_impl.cc
1 // Copyright 2012 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // Notes about usage of this object by VideoCaptureImplManager.
6 //
7 // VideoCaptureImplManager access this object by using a Unretained()
8 // binding and tasks on the IO thread. It is then important that
9 // VideoCaptureImpl never post task to itself. All operations must be
10 // synchronous.
11
12 #include "third_party/blink/renderer/platform/video_capture/video_capture_impl.h"
13
14 #include <stddef.h>
15 #include <algorithm>
16 #include <memory>
17 #include <utility>
18
19 #include <GLES2/gl2extchromium.h>
20 #include "base/feature_list.h"
21 #include "base/functional/bind.h"
22 #include "base/functional/callback.h"
23 #include "base/functional/callback_helpers.h"
24 #include "base/logging.h"
25 #include "base/memory/raw_ptr.h"
26 #include "base/metrics/histogram_functions.h"
27 #include "base/task/bind_post_task.h"
28 #include "base/task/sequenced_task_runner.h"
29 #include "base/token.h"
30 #include "base/trace_event/trace_event.h"
31 #include "build/build_config.h"
32 #include "gpu/command_buffer/client/client_shared_image.h"
33 #include "gpu/command_buffer/client/shared_image_interface.h"
34 #include "gpu/command_buffer/common/shared_image_usage.h"
35 #include "gpu/ipc/common/gpu_memory_buffer_support.h"
36 #include "media/base/limits.h"
37 #include "media/base/media_switches.h"
38 #include "media/base/video_frame.h"
39 #include "media/capture/mojom/video_capture_buffer.mojom-blink.h"
40 #include "media/capture/mojom/video_capture_types.mojom-blink.h"
41 #include "media/capture/video_capture_types.h"
42 #include "media/video/gpu_video_accelerator_factories.h"
43 #include "third_party/blink/public/common/browser_interface_broker_proxy.h"
44 #include "third_party/blink/public/common/thread_safe_browser_interface_broker_proxy.h"
45 #include "third_party/blink/public/platform/platform.h"
46 #include "third_party/blink/renderer/platform/wtf/text/wtf_string.h"
47 #include "third_party/blink/renderer/platform/wtf/vector.h"
48
49 #if BUILDFLAG(IS_MAC)
50 #include "media/base/mac/video_frame_mac.h"
51 #endif  // BUILDFLAG(IS_MAC)
52
53 #if BUILDFLAG(IS_WIN)
54 #include "gpu/command_buffer/common/shared_image_capabilities.h"
55 #endif  // BUILDFLAG(IS_WIN)
56
57 namespace blink {
58
59 constexpr int kMaxFirstFrameLogs = 5;
60
61 BASE_FEATURE(kTimeoutHangingVideoCaptureStarts,
62              "TimeoutHangingVideoCaptureStarts",
63              base::FEATURE_ENABLED_BY_DEFAULT);
64
65 using VideoFrameBufferHandleType = media::mojom::blink::VideoBufferHandle::Tag;
66
67 // A collection of all types of handles that we use to reference a camera buffer
68 // backed with GpuMemoryBuffer.
69 struct GpuMemoryBufferResources {
70   explicit GpuMemoryBufferResources(gfx::GpuMemoryBufferHandle handle)
71       : gpu_memory_buffer_handle(std::move(handle)) {}
72   // Stores the GpuMemoryBufferHandle when a new buffer is first registered.
73   // |gpu_memory_buffer_handle| is converted to |gpu_memory_buffer| below when
74   // the camera frame is ready for the first time.
75   gfx::GpuMemoryBufferHandle gpu_memory_buffer_handle;
76   // The GpuMemoryBuffer backing the camera frame.
77   std::unique_ptr<gfx::GpuMemoryBuffer> gpu_memory_buffer;
78   // The SharedImage created from |gpu_memory_buffer|.
79   gpu::Mailbox mailboxes[media::VideoFrame::kMaxPlanes];
80   // The release sync token for |mailboxes|.
81   gpu::SyncToken release_sync_token;
82 };
83
84 struct VideoCaptureImpl::BufferContext
85     : public base::RefCountedThreadSafe<BufferContext> {
86  public:
87   BufferContext(media::mojom::blink::VideoBufferHandlePtr buffer_handle,
88                 scoped_refptr<base::SequencedTaskRunner> media_task_runner)
89       : buffer_type_(buffer_handle->which()),
90         media_task_runner_(media_task_runner) {
91     switch (buffer_type_) {
92       case VideoFrameBufferHandleType::kUnsafeShmemRegion:
93         InitializeFromUnsafeShmemRegion(
94             std::move(buffer_handle->get_unsafe_shmem_region()));
95         break;
96       case VideoFrameBufferHandleType::kReadOnlyShmemRegion:
97         InitializeFromReadOnlyShmemRegion(
98             std::move(buffer_handle->get_read_only_shmem_region()));
99         break;
100       case VideoFrameBufferHandleType::kMailboxHandles:
101         InitializeFromMailbox(std::move(buffer_handle->get_mailbox_handles()));
102         break;
103       case VideoFrameBufferHandleType::kGpuMemoryBufferHandle:
104 #if !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_WIN)
105         // On macOS, an IOSurfaces passed as a GpuMemoryBufferHandle can be
106         // used by both hardware and software paths.
107         // https://crbug.com/1125879
108         // On Windows, GMBs might be passed by the capture process even if
109         // the acceleration disabled during the capture.
110         CHECK(media_task_runner_);
111 #endif
112         InitializeFromGpuMemoryBufferHandle(
113             std::move(buffer_handle->get_gpu_memory_buffer_handle()));
114         break;
115     }
116   }
117   BufferContext(const BufferContext&) = delete;
118   BufferContext& operator=(const BufferContext&) = delete;
119
120   VideoFrameBufferHandleType buffer_type() const { return buffer_type_; }
121   const uint8_t* data() const { return data_; }
122   size_t data_size() const { return data_size_; }
123   const base::ReadOnlySharedMemoryRegion* read_only_shmem_region() const {
124     return &read_only_shmem_region_;
125   }
126   const Vector<gpu::MailboxHolder>& mailbox_holders() const {
127     return mailbox_holders_;
128   }
129   media::GpuVideoAcceleratorFactories* gpu_factories() const {
130     return gpu_factories_;
131   }
132   void SetGpuFactories(media::GpuVideoAcceleratorFactories* gpu_factories) {
133     gpu_factories_ = gpu_factories;
134   }
135   GpuMemoryBufferResources* gmb_resources() const {
136     return gmb_resources_.get();
137   }
138
139   gfx::GpuMemoryBufferHandle TakeGpuMemoryBufferHandle() {
140 #if BUILDFLAG(IS_APPLE) || BUILDFLAG(IS_WIN)
141     // The same GpuMemoryBuffersHandles will be reused repeatedly by the
142     // unaccelerated macOS path. Each of these uses will call this function.
143     // Ensure that this function doesn't invalidate the GpuMemoryBufferHandle
144     // on macOS for this reason.
145     // https://crbug.com/1159722
146     // It will also be reused repeatedly if GPU process is unavailable in
147     // Windows zero-copy path (e.g. due to repeated GPU process crashes).
148     return gmb_resources_->gpu_memory_buffer_handle.Clone();
149 #else
150     return std::move(gmb_resources_->gpu_memory_buffer_handle);
151 #endif
152   }
153
154   void SetGpuMemoryBuffer(
155       std::unique_ptr<gfx::GpuMemoryBuffer> gpu_memory_buffer) {
156     gmb_resources_->gpu_memory_buffer = std::move(gpu_memory_buffer);
157   }
158   gfx::GpuMemoryBuffer* GetGpuMemoryBuffer() {
159     return gmb_resources_->gpu_memory_buffer.get();
160   }
161
162   static void MailboxHolderReleased(
163       scoped_refptr<BufferContext> buffer_context,
164       const gpu::SyncToken& release_sync_token,
165       std::unique_ptr<gfx::GpuMemoryBuffer> gpu_memory_buffer) {
166     if (!buffer_context->media_task_runner_->RunsTasksInCurrentSequence()) {
167       buffer_context->media_task_runner_->PostTask(
168           FROM_HERE,
169           base::BindOnce(&BufferContext::MailboxHolderReleased, buffer_context,
170                          release_sync_token, std::move(gpu_memory_buffer)));
171       return;
172     }
173     buffer_context->gmb_resources_->release_sync_token = release_sync_token;
174     // Free |gpu_memory_buffer|.
175   }
176
177   static void DestroyTextureOnMediaThread(
178       media::GpuVideoAcceleratorFactories* gpu_factories,
179       gpu::Mailbox mailbox,
180       gpu::SyncToken release_sync_token) {
181     if (!mailbox.IsZero()) {
182       auto* sii = gpu_factories->SharedImageInterface();
183       if (!sii)
184         return;
185       sii->DestroySharedImage(release_sync_token, mailbox);
186     }
187   }
188
189   // Public because it may be called after initialization when GPU process
190   // dies on Windows to wrap premapped GMBs.
191   void InitializeFromUnsafeShmemRegion(base::UnsafeSharedMemoryRegion region) {
192     DCHECK(region.IsValid());
193     backup_mapping_ = region.Map();
194     DCHECK(backup_mapping_.IsValid());
195     data_ = backup_mapping_.GetMemoryAsSpan<uint8_t>().data();
196     data_size_ = backup_mapping_.size();
197   }
198
199  private:
200   void InitializeFromReadOnlyShmemRegion(
201       base::ReadOnlySharedMemoryRegion region) {
202     DCHECK(region.IsValid());
203     read_only_mapping_ = region.Map();
204     DCHECK(read_only_mapping_.IsValid());
205     data_ = read_only_mapping_.GetMemoryAsSpan<uint8_t>().data();
206     data_size_ = read_only_mapping_.size();
207     read_only_shmem_region_ = std::move(region);
208   }
209
210   void InitializeFromMailbox(
211       media::mojom::blink::MailboxBufferHandleSetPtr mailbox_handles) {
212     DCHECK_EQ(media::VideoFrame::kMaxPlanes,
213               mailbox_handles->mailbox_holder.size());
214     mailbox_holders_ = std::move(mailbox_handles->mailbox_holder);
215   }
216
217   void InitializeFromGpuMemoryBufferHandle(
218       gfx::GpuMemoryBufferHandle gpu_memory_buffer_handle) {
219     gmb_resources_ = std::make_unique<GpuMemoryBufferResources>(
220         std::move(gpu_memory_buffer_handle));
221   }
222
223   friend class base::RefCountedThreadSafe<BufferContext>;
224   virtual ~BufferContext() {
225     if (!gmb_resources_)
226       return;
227     for (size_t plane = 0; plane < media::VideoFrame::kMaxPlanes; ++plane) {
228       if (!gmb_resources_->mailboxes[plane].IsSharedImage())
229         continue;
230       media_task_runner_->PostTask(
231           FROM_HERE,
232           base::BindOnce(&BufferContext::DestroyTextureOnMediaThread,
233                          gpu_factories_, gmb_resources_->mailboxes[plane],
234                          gmb_resources_->release_sync_token));
235     }
236   }
237
238   VideoFrameBufferHandleType buffer_type_;
239
240   // Only valid for |buffer_type_ == SHARED_BUFFER_HANDLE|.
241   base::WritableSharedMemoryMapping writable_mapping_;
242
243   // Only valid for |buffer_type_ == READ_ONLY_SHMEM_REGION|.
244   base::ReadOnlySharedMemoryRegion read_only_shmem_region_;
245   base::ReadOnlySharedMemoryMapping read_only_mapping_;
246
247   // Only valid for |buffer_type == GPU_MEMORY_BUFFER_HANDLE|
248   // if on windows, gpu_factories are unavailable, and
249   // GMB comes premapped from the capturer.
250   base::WritableSharedMemoryMapping backup_mapping_;
251
252   // These point into one of the above mappings, which hold the mapping open for
253   // the lifetime of this object.
254   const uint8_t* data_ = nullptr;
255   size_t data_size_ = 0;
256
257   // Only valid for |buffer_type_ == MAILBOX_HANDLES|.
258   Vector<gpu::MailboxHolder> mailbox_holders_;
259
260   // The following is for |buffer_type == GPU_MEMORY_BUFFER_HANDLE|.
261
262   // Uses to create SharedImage from |gpu_memory_buffer_|.
263   raw_ptr<media::GpuVideoAcceleratorFactories, ExperimentalRenderer>
264       gpu_factories_ = nullptr;
265   // The task runner that |gpu_factories_| runs on.
266   const scoped_refptr<base::SequencedTaskRunner> media_task_runner_;
267
268   std::unique_ptr<GpuMemoryBufferResources> gmb_resources_;
269 };
270
271 VideoCaptureImpl::VideoFrameBufferPreparer::VideoFrameBufferPreparer(
272     VideoCaptureImpl& video_capture_impl,
273     media::mojom::blink::ReadyBufferPtr ready_buffer)
274     : video_capture_impl_(video_capture_impl),
275       buffer_id_(ready_buffer->buffer_id),
276       frame_info_(std::move(ready_buffer->info)) {}
277
278 int32_t VideoCaptureImpl::VideoFrameBufferPreparer::buffer_id() const {
279   return buffer_id_;
280 }
281
282 const media::mojom::blink::VideoFrameInfoPtr&
283 VideoCaptureImpl::VideoFrameBufferPreparer::frame_info() const {
284   return frame_info_;
285 }
286
287 scoped_refptr<media::VideoFrame>
288 VideoCaptureImpl::VideoFrameBufferPreparer::frame() const {
289   return frame_;
290 }
291
292 scoped_refptr<VideoCaptureImpl::BufferContext>
293 VideoCaptureImpl::VideoFrameBufferPreparer::buffer_context() const {
294   return buffer_context_;
295 }
296
297 bool VideoCaptureImpl::VideoFrameBufferPreparer::Initialize() {
298   // Prior to initializing, |frame_| and |gpu_memory_buffer_| are null.
299   DCHECK(!frame_ && !gpu_memory_buffer_);
300   const auto& iter = video_capture_impl_->client_buffers_.find(buffer_id_);
301   DCHECK(iter != video_capture_impl_->client_buffers_.end());
302   buffer_context_ = iter->second;
303   switch (buffer_context_->buffer_type()) {
304     case VideoFrameBufferHandleType::kUnsafeShmemRegion:
305       // The frame is backed by a writable (unsafe) shared memory handle, but as
306       // it is not sent cross-process the region does not need to be attached to
307       // the frame. See also the case for kReadOnlyShmemRegion.
308       if (frame_info_->strides) {
309         CHECK(IsYuvPlanar(frame_info_->pixel_format) &&
310               (media::VideoFrame::NumPlanes(frame_info_->pixel_format) == 3))
311             << "Currently, only YUV formats support custom strides.";
312         uint8_t* y_data = const_cast<uint8_t*>(buffer_context_->data());
313         uint8_t* u_data =
314             y_data + (media::VideoFrame::Rows(
315                           media::VideoFrame::kYPlane, frame_info_->pixel_format,
316                           frame_info_->coded_size.height()) *
317                       frame_info_->strides->stride_by_plane[0]);
318         uint8_t* v_data =
319             u_data + (media::VideoFrame::Rows(
320                           media::VideoFrame::kUPlane, frame_info_->pixel_format,
321                           frame_info_->coded_size.height()) *
322                       frame_info_->strides->stride_by_plane[1]);
323         frame_ = media::VideoFrame::WrapExternalYuvData(
324             frame_info_->pixel_format, gfx::Size(frame_info_->coded_size),
325             gfx::Rect(frame_info_->visible_rect),
326             frame_info_->visible_rect.size(),
327             frame_info_->strides->stride_by_plane[0],
328             frame_info_->strides->stride_by_plane[1],
329             frame_info_->strides->stride_by_plane[2], y_data, u_data, v_data,
330             frame_info_->timestamp);
331       } else {
332         frame_ = media::VideoFrame::WrapExternalData(
333             frame_info_->pixel_format, gfx::Size(frame_info_->coded_size),
334             gfx::Rect(frame_info_->visible_rect),
335             frame_info_->visible_rect.size(),
336             const_cast<uint8_t*>(buffer_context_->data()),
337             buffer_context_->data_size(), frame_info_->timestamp);
338       }
339       break;
340     case VideoFrameBufferHandleType::kReadOnlyShmemRegion:
341       // As with the kSharedBufferHandle type, it is sufficient to just wrap
342       // the data without attaching the shared region to the frame.
343       frame_ = media::VideoFrame::WrapExternalData(
344           frame_info_->pixel_format, gfx::Size(frame_info_->coded_size),
345           gfx::Rect(frame_info_->visible_rect),
346           frame_info_->visible_rect.size(),
347           const_cast<uint8_t*>(buffer_context_->data()),
348           buffer_context_->data_size(), frame_info_->timestamp);
349       frame_->BackWithSharedMemory(buffer_context_->read_only_shmem_region());
350       break;
351     case VideoFrameBufferHandleType::kMailboxHandles: {
352       gpu::MailboxHolder mailbox_holder_array[media::VideoFrame::kMaxPlanes];
353       CHECK_EQ(media::VideoFrame::kMaxPlanes,
354                buffer_context_->mailbox_holders().size());
355       for (int i = 0; i < media::VideoFrame::kMaxPlanes; i++) {
356         mailbox_holder_array[i] = buffer_context_->mailbox_holders()[i];
357       }
358       frame_ = media::VideoFrame::WrapNativeTextures(
359           frame_info_->pixel_format, mailbox_holder_array,
360           media::VideoFrame::ReleaseMailboxCB(),
361           gfx::Size(frame_info_->coded_size),
362           gfx::Rect(frame_info_->visible_rect),
363           frame_info_->visible_rect.size(), frame_info_->timestamp);
364       break;
365     }
366     case VideoFrameBufferHandleType::kGpuMemoryBufferHandle: {
367 #if BUILDFLAG(IS_APPLE)
368       // On macOS, an IOSurfaces passed as a GpuMemoryBufferHandle can be
369       // used by both hardware and software paths.
370       // https://crbug.com/1125879
371       if (!video_capture_impl_->gpu_factories_ ||
372           !video_capture_impl_->media_task_runner_) {
373         frame_ = media::VideoFrame::WrapUnacceleratedIOSurface(
374             buffer_context_->TakeGpuMemoryBufferHandle(),
375             gfx::Rect(frame_info_->visible_rect), frame_info_->timestamp);
376         break;
377       }
378 #endif
379 #if BUILDFLAG(IS_WIN)
380       // The associated shared memory region is mapped only once
381       if (frame_info_->is_premapped && !buffer_context_->data()) {
382         auto gmb_handle = buffer_context_->TakeGpuMemoryBufferHandle();
383         buffer_context_->InitializeFromUnsafeShmemRegion(
384             std::move(gmb_handle.region));
385         DCHECK(buffer_context_->data());
386       }
387       // On Windows it might happen that the Renderer process loses GPU
388       // connection, while the capturer process will continue to produce
389       // GPU backed frames.
390       if (!video_capture_impl_->gpu_factories_ ||
391           !video_capture_impl_->media_task_runner_ ||
392           video_capture_impl_->gmb_not_supported_) {
393         video_capture_impl_->RequirePremappedFrames();
394         if (!frame_info_->is_premapped || !buffer_context_->data()) {
395           // If the frame isn't premapped, can't do anything here.
396           return false;
397         }
398
399         frame_ = media::VideoFrame::WrapExternalData(
400             frame_info_->pixel_format, gfx::Size(frame_info_->coded_size),
401             gfx::Rect(frame_info_->visible_rect),
402             frame_info_->visible_rect.size(),
403             const_cast<uint8_t*>(buffer_context_->data()),
404             buffer_context_->data_size(), frame_info_->timestamp);
405
406         if (!frame_) {
407           return false;
408         }
409         break;
410       }
411 #endif
412       CHECK(video_capture_impl_->gpu_factories_);
413       CHECK(video_capture_impl_->media_task_runner_);
414       // Create GpuMemoryBuffer from handle.
415       if (!buffer_context_->GetGpuMemoryBuffer()) {
416         gfx::BufferFormat gfx_format;
417         switch (frame_info_->pixel_format) {
418           case media::VideoPixelFormat::PIXEL_FORMAT_NV12:
419             gfx_format = gfx::BufferFormat::YUV_420_BIPLANAR;
420             break;
421           default:
422             LOG(FATAL) << "Unsupported pixel format";
423             return false;
424         }
425         // The GpuMemoryBuffer is allocated and owned by the video capture
426         // buffer pool from the video capture service process, so we don't need
427         // to destroy the GpuMemoryBuffer here.
428         auto gmb =
429             video_capture_impl_->gpu_memory_buffer_support_
430                 ->CreateGpuMemoryBufferImplFromHandle(
431                     buffer_context_->TakeGpuMemoryBufferHandle(),
432                     gfx::Size(frame_info_->coded_size), gfx_format,
433                     gfx::BufferUsage::SCANOUT_VEA_CPU_READ, base::DoNothing(),
434                     video_capture_impl_->gpu_factories_
435                         ->GpuMemoryBufferManager(),
436                     video_capture_impl_->pool_);
437
438         // Keep one GpuMemoryBuffer for current GpuMemoryHandle alive,
439         // so that any associated structures are kept alive while this buffer id
440         // is still used (e.g. DMA buf handles for linux/CrOS).
441         buffer_context_->SetGpuMemoryBuffer(std::move(gmb));
442       }
443       CHECK(buffer_context_->GetGpuMemoryBuffer());
444
445       auto buffer_handle = buffer_context_->GetGpuMemoryBuffer()->CloneHandle();
446 #if BUILDFLAG(IS_CHROMEOS)
447       is_webgpu_compatible_ =
448           buffer_handle.native_pixmap_handle.supports_zero_copy_webgpu_import;
449 #endif
450
451 #if BUILDFLAG(IS_MAC)
452       is_webgpu_compatible_ =
453           media::IOSurfaceIsWebGPUCompatible(buffer_handle.io_surface.get());
454 #endif
455       // No need to propagate shared memory region further as it's already
456       // exposed by |buffer_context_->data()|.
457       buffer_handle.region = base::UnsafeSharedMemoryRegion();
458       // The buffer_context_ might still have a mapped shared memory region.
459       // However, it contains valid data only if |is_premapped| is set.
460       uint8_t* premapped_data =
461           frame_info_->is_premapped
462               ? const_cast<uint8_t*>(buffer_context_->data())
463               : nullptr;
464
465       // Clone the GpuMemoryBuffer and wrap it in a VideoFrame.
466       gpu_memory_buffer_ =
467           video_capture_impl_->gpu_memory_buffer_support_
468               ->CreateGpuMemoryBufferImplFromHandle(
469                   std::move(buffer_handle),
470                   buffer_context_->GetGpuMemoryBuffer()->GetSize(),
471                   buffer_context_->GetGpuMemoryBuffer()->GetFormat(),
472                   gfx::BufferUsage::SCANOUT_VEA_CPU_READ, base::DoNothing(),
473                   video_capture_impl_->gpu_factories_->GpuMemoryBufferManager(),
474                   video_capture_impl_->pool_,
475                   base::span<uint8_t>(premapped_data,
476                                       buffer_context_->data_size()));
477       if (!gpu_memory_buffer_) {
478         LOG(ERROR) << "Failed to open GpuMemoryBuffer handle";
479         return false;
480       }
481     }
482   }
483   // After initializing, either |frame_| or |gpu_memory_buffer_| has been set.
484   DCHECK(frame_ || gpu_memory_buffer_);
485   return true;
486 }
487
488 bool VideoCaptureImpl::VideoFrameBufferPreparer::IsVideoFrameBound() const {
489   return frame_.get();
490 }
491
492 // Creates SharedImage mailboxes for |gpu_memory_buffer_handle_| and wraps the
493 // mailboxes with the buffer handles in a DMA-buf VideoFrame.  The consumer of
494 // the VideoFrame can access the data either through mailboxes (e.g. display)
495 // or through the DMA-buf FDs (e.g. video encoder).
496 bool VideoCaptureImpl::VideoFrameBufferPreparer::BindVideoFrameOnMediaThread(
497     media::GpuVideoAcceleratorFactories* gpu_factories) {
498   DCHECK(gpu_factories);
499   DCHECK(!IsVideoFrameBound());
500   DCHECK_EQ(frame_info_->pixel_format, media::PIXEL_FORMAT_NV12);
501
502   bool should_recreate_shared_image = false;
503   if (gpu_factories != buffer_context_->gpu_factories()) {
504     DVLOG(1) << "GPU context changed; re-creating SharedImage objects";
505     buffer_context_->SetGpuFactories(gpu_factories);
506     should_recreate_shared_image = true;
507   }
508 #if BUILDFLAG(IS_WIN)
509   // If the renderer is running in d3d9 mode due to e.g. driver bugs
510   // workarounds, DXGI D3D11 textures won't be supported.
511   // Can't check this from the ::Initialize() since media context provider can
512   // be accessed only on the Media thread.
513   gpu::SharedImageInterface* shared_image_interface =
514       gpu_factories->SharedImageInterface();
515   if (!shared_image_interface ||
516       !shared_image_interface->GetCapabilities().shared_image_d3d) {
517     video_capture_impl_->RequirePremappedFrames();
518     video_capture_impl_->gmb_not_supported_ = true;
519     return false;
520   }
521 #endif
522
523   // Create GPU texture and bind GpuMemoryBuffer to the texture.
524   auto* sii = buffer_context_->gpu_factories()->SharedImageInterface();
525   if (!sii) {
526     DVLOG(1) << "GPU context lost";
527     return false;
528   }
529   // Don't check VideoFrameOutputFormat until we ensure the context has not
530   // been lost (if it is lost, then the format will be UNKNOWN).
531   const auto output_format =
532       buffer_context_->gpu_factories()->VideoFrameOutputFormat(
533           frame_info_->pixel_format);
534   DCHECK(
535       output_format ==
536           media::GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB ||
537       output_format ==
538           media::GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB);
539
540   std::vector<gfx::BufferPlane> planes;
541
542   uint32_t usage =
543       gpu::SHARED_IMAGE_USAGE_GLES2 | gpu::SHARED_IMAGE_USAGE_RASTER |
544       gpu::SHARED_IMAGE_USAGE_DISPLAY_READ | gpu::SHARED_IMAGE_USAGE_SCANOUT;
545 #if BUILDFLAG(IS_APPLE)
546   usage |= gpu::SHARED_IMAGE_USAGE_MACOS_VIDEO_TOOLBOX;
547 #endif
548 #if BUILDFLAG(IS_CHROMEOS)
549   usage |= gpu::SHARED_IMAGE_USAGE_WEBGPU;
550 #endif
551
552   // The feature flags here are a little subtle:
553   // * IsMultiPlaneFormatForHardwareVideoEnabled() controls whether Multiplanar
554   //   SI is used (i.e., whether a single SharedImage is created via passing a
555   //   viz::MultiPlaneFormat rather than the legacy codepath of passing a
556   //   GMB).
557   // * kMultiPlaneVideoCaptureSharedImages controls whether planes are sampled
558   //   individually rather than using external sampling.
559   //
560   // These two flags are orthogonal:
561   // * If both flags are true, one SharedImage with format MultiPlaneFormat::
562   //   kNV12 will be created.
563   // * If using multiplane SI without per-plane sampling, one SharedImage with
564   //   format MultiPlaneFormat::kNV12 configured to use external sampling
565   //   will be created (this is supported only on Ozone-based platforms and
566   //   not expected to be requested on other platforms).
567   // * If using per-plane sampling without multiplane SI, one SharedImage will
568   //   be created for each plane via the legacy "pass GMB" entrypoint.
569   // * If both flags are false, one SharedImage will be created via the legacy
570   //   "pass GMB" entrypoint (this uses external sampling on the other side
571   //   based on the format of the GMB).
572   bool create_multiplanar_image =
573       media::IsMultiPlaneFormatForHardwareVideoEnabled();
574   bool use_per_plane_sampling =
575       base::FeatureList::IsEnabled(media::kMultiPlaneVideoCaptureSharedImages);
576 #if BUILDFLAG(IS_WIN) || BUILDFLAG(IS_MAC)
577   // External sampling isn't supported on Windows/Mac with Multiplane SI (it's
578   // not supported with legacy SI either for that matter, but we restricted
579   // the CHECK here to Multiplane SI as in the case of legacy SI the flow is
580   // more nebulous and we wanted to restrict any impact here to the Multiplane
581   // SI flow).
582   // NOTE: This CHECK would ideally be done if !BUILDFLAG(IS_OZONE), but this
583   // codepath is entered in tests for Android, which does not have
584   // kMultiPlaneVideoCaptureSharedImages set. This codepath is not entered in
585   // production for Android (see
586   // https://chromium-review.googlesource.com/c/chromium/src/+/4640009/comment/29c99ef9_587e49dc/
587   // for a detailed discussion).
588   CHECK(!create_multiplanar_image || use_per_plane_sampling);
589 #endif
590
591   if (create_multiplanar_image || !use_per_plane_sampling) {
592     planes.push_back(gfx::BufferPlane::DEFAULT);
593   } else {
594     // Using per-plane sampling without multiplane SI.
595     planes.push_back(gfx::BufferPlane::Y);
596     planes.push_back(gfx::BufferPlane::UV);
597   }
598   CHECK(planes.size() == 1 || !create_multiplanar_image);
599
600   for (size_t plane = 0; plane < planes.size(); ++plane) {
601     if (should_recreate_shared_image ||
602         buffer_context_->gmb_resources()->mailboxes[plane].IsZero()) {
603       auto multiplanar_si_format = viz::MultiPlaneFormat::kNV12;
604 #if BUILDFLAG(IS_OZONE)
605       if (!use_per_plane_sampling) {
606         multiplanar_si_format.SetPrefersExternalSampler();
607       }
608 #endif
609       CHECK_EQ(gpu_memory_buffer_->GetFormat(),
610                gfx::BufferFormat::YUV_420_BIPLANAR);
611       if (create_multiplanar_image) {
612         auto client_shared_image = sii->CreateSharedImage(
613             multiplanar_si_format, gpu_memory_buffer_->GetSize(),
614             frame_info_->color_space, kTopLeft_GrSurfaceOrigin,
615             kPremul_SkAlphaType, usage, "VideoCaptureFrameBuffer",
616             gpu_memory_buffer_->CloneHandle());
617         CHECK(client_shared_image);
618         buffer_context_->gmb_resources()->mailboxes[plane] =
619             client_shared_image->mailbox();
620       } else {
621         buffer_context_->gmb_resources()->mailboxes[plane] =
622             sii->CreateSharedImage(
623                 gpu_memory_buffer_.get(),
624                 buffer_context_->gpu_factories()->GpuMemoryBufferManager(),
625                 planes[plane], frame_info_->color_space,
626                 kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType, usage,
627                 "VideoCaptureFrameBuffer");
628       }
629     } else {
630       sii->UpdateSharedImage(
631           buffer_context_->gmb_resources()->release_sync_token,
632           buffer_context_->gmb_resources()->mailboxes[plane]);
633     }
634   }
635
636   const unsigned texture_target =
637 #if BUILDFLAG(IS_LINUX)
638       // Explicitly set GL_TEXTURE_EXTERNAL_OES as the
639       // `media::VideoFrame::RequiresExternalSampler()` requires it for NV12
640       // format, while the `ImageTextureTarget()` will return GL_TEXTURE_2D.
641       (frame_info_->pixel_format == media::PIXEL_FORMAT_NV12)
642           ? GL_TEXTURE_EXTERNAL_OES
643           :
644 #endif
645           buffer_context_->gpu_factories()->ImageTextureTarget(
646               gpu_memory_buffer_->GetFormat());
647
648   const gpu::SyncToken sync_token = sii->GenVerifiedSyncToken();
649
650   gpu::MailboxHolder mailbox_holder_array[media::VideoFrame::kMaxPlanes];
651   for (size_t plane = 0; plane < planes.size(); ++plane) {
652     DCHECK(!buffer_context_->gmb_resources()->mailboxes[plane].IsZero());
653     DCHECK(buffer_context_->gmb_resources()->mailboxes[plane].IsSharedImage());
654     mailbox_holder_array[plane] =
655         gpu::MailboxHolder(buffer_context_->gmb_resources()->mailboxes[plane],
656                            sync_token, texture_target);
657   }
658
659   const auto gmb_size = gpu_memory_buffer_->GetSize();
660   frame_ = media::VideoFrame::WrapExternalGpuMemoryBuffer(
661       gfx::Rect(frame_info_->visible_rect), gmb_size,
662       std::move(gpu_memory_buffer_), mailbox_holder_array,
663       base::BindOnce(&BufferContext::MailboxHolderReleased, buffer_context_),
664       frame_info_->timestamp);
665   if (!frame_) {
666     LOG(ERROR) << "Can't wrap GpuMemoryBuffer as VideoFrame";
667     return false;
668   }
669
670   // If we created a single multiplanar image, inform the VideoFrame that it
671   // should go down the normal SharedImageFormat codepath rather than the
672   // codepath used for legacy multiplanar formats.
673   if (create_multiplanar_image) {
674     frame_->set_shared_image_format_type(
675         use_per_plane_sampling
676             ? media::SharedImageFormatType::kSharedImageFormat
677             : media::SharedImageFormatType::kSharedImageFormatExternalSampler);
678   }
679
680   frame_->metadata().allow_overlay = true;
681   frame_->metadata().read_lock_fences_enabled = true;
682 #if BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_MAC)
683   frame_->metadata().is_webgpu_compatible = is_webgpu_compatible_;
684 #endif
685   return true;
686 }
687
688 void VideoCaptureImpl::VideoFrameBufferPreparer::Finalize() {
689   DCHECK(frame_);
690   frame_->AddDestructionObserver(
691       base::BindOnce(&VideoCaptureImpl::DidFinishConsumingFrame,
692                      base::BindPostTaskToCurrentDefault(base::BindOnce(
693                          &VideoCaptureImpl::OnAllClientsFinishedConsumingFrame,
694                          video_capture_impl_->weak_factory_.GetWeakPtr(),
695                          buffer_id_, buffer_context_))));
696   if (frame_info_->color_space.IsValid()) {
697     frame_->set_color_space(frame_info_->color_space);
698   }
699   frame_->metadata().MergeMetadataFrom(frame_info_->metadata);
700 }
701
702 // Information about a video capture client of ours.
703 struct VideoCaptureImpl::ClientInfo {
704   ClientInfo() = default;
705   ClientInfo(const ClientInfo& other) = default;
706   ~ClientInfo() = default;
707
708   media::VideoCaptureParams params;
709   VideoCaptureStateUpdateCB state_update_cb;
710   VideoCaptureDeliverFrameCB deliver_frame_cb;
711   VideoCaptureSubCaptureTargetVersionCB sub_capture_target_version_cb;
712   VideoCaptureNotifyFrameDroppedCB frame_dropped_cb;
713 };
714
715 VideoCaptureImpl::VideoCaptureImpl(
716     media::VideoCaptureSessionId session_id,
717     scoped_refptr<base::SequencedTaskRunner> main_task_runner,
718     BrowserInterfaceBrokerProxy* browser_interface_broker)
719     : device_id_(session_id),
720       session_id_(session_id),
721       video_capture_host_for_testing_(nullptr),
722       state_(blink::VIDEO_CAPTURE_STATE_STOPPED),
723       main_task_runner_(std::move(main_task_runner)),
724       gpu_memory_buffer_support_(new gpu::GpuMemoryBufferSupport()),
725       pool_(base::MakeRefCounted<base::UnsafeSharedMemoryPool>()) {
726   CHECK(!session_id.is_empty());
727   DCHECK(main_task_runner_->RunsTasksInCurrentSequence());
728   DETACH_FROM_THREAD(io_thread_checker_);
729
730   browser_interface_broker->GetInterface(
731       pending_video_capture_host_.InitWithNewPipeAndPassReceiver());
732
733   gpu_factories_ = Platform::Current()->GetGpuFactories();
734   if (gpu_factories_) {
735     media_task_runner_ = gpu_factories_->GetTaskRunner();
736   }
737   weak_this_ = weak_factory_.GetWeakPtr();
738 }
739
740 void VideoCaptureImpl::OnGpuContextLost(
741     base::WeakPtr<VideoCaptureImpl> video_capture_impl) {
742   // Called on the main task runner.
743   auto* gpu_factories = Platform::Current()->GetGpuFactories();
744   Platform::Current()->GetIOTaskRunner()->PostTask(
745       FROM_HERE,
746       base::BindOnce(&VideoCaptureImpl::SetGpuFactoriesHandleOnIOTaskRunner,
747                      video_capture_impl, gpu_factories));
748 }
749
750 void VideoCaptureImpl::SetGpuFactoriesHandleOnIOTaskRunner(
751     media::GpuVideoAcceleratorFactories* gpu_factories) {
752   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
753   if (gpu_factories != gpu_factories_) {
754     LOG(ERROR) << "GPU factories handle changed; assuming GPU context lost";
755     gpu_factories_ = gpu_factories;
756   }
757 }
758
759 VideoCaptureImpl::~VideoCaptureImpl() {
760   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
761   if ((state_ == VIDEO_CAPTURE_STATE_STARTING ||
762        state_ == VIDEO_CAPTURE_STATE_STARTED) &&
763       GetVideoCaptureHost())
764     GetVideoCaptureHost()->Stop(device_id_);
765 }
766
767 void VideoCaptureImpl::SuspendCapture(bool suspend) {
768   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
769   if (suspend)
770     GetVideoCaptureHost()->Pause(device_id_);
771   else
772     GetVideoCaptureHost()->Resume(device_id_, session_id_, params_);
773 }
774
775 void VideoCaptureImpl::StartCapture(
776     int client_id,
777     const media::VideoCaptureParams& params,
778     const VideoCaptureStateUpdateCB& state_update_cb,
779     const VideoCaptureDeliverFrameCB& deliver_frame_cb,
780     const VideoCaptureSubCaptureTargetVersionCB& sub_capture_target_version_cb,
781     const VideoCaptureNotifyFrameDroppedCB& frame_dropped_cb) {
782   DVLOG(1) << __func__ << " |device_id_| = " << device_id_;
783   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
784   OnLog("VideoCaptureImpl got request to start capture.");
785
786   ClientInfo client_info;
787   client_info.params = params;
788   client_info.state_update_cb = state_update_cb;
789   client_info.deliver_frame_cb = deliver_frame_cb;
790   client_info.sub_capture_target_version_cb = sub_capture_target_version_cb;
791   client_info.frame_dropped_cb = frame_dropped_cb;
792
793   switch (state_) {
794     case VIDEO_CAPTURE_STATE_STARTING:
795     case VIDEO_CAPTURE_STATE_STARTED:
796       clients_[client_id] = client_info;
797       OnLog("VideoCaptureImpl capture is already started or starting.");
798       // TODO(sheu): Allowing resolution change will require that all
799       // outstanding clients of a capture session support resolution change.
800       DCHECK_EQ(params_.resolution_change_policy,
801                 params.resolution_change_policy);
802       return;
803     case VIDEO_CAPTURE_STATE_STOPPING:
804       clients_pending_on_restart_[client_id] = client_info;
805       DVLOG(1) << __func__ << " Got new resolution while stopping: "
806                << params.requested_format.frame_size.ToString();
807       return;
808     case VIDEO_CAPTURE_STATE_STOPPED:
809     case VIDEO_CAPTURE_STATE_ENDED:
810       clients_[client_id] = client_info;
811       params_ = params;
812       params_.requested_format.frame_rate =
813           std::min(params_.requested_format.frame_rate,
814                    static_cast<float>(media::limits::kMaxFramesPerSecond));
815
816       DVLOG(1) << "StartCapture: starting with first resolution "
817                << params_.requested_format.frame_size.ToString();
818       OnLog("VideoCaptureImpl starting capture.");
819       StartCaptureInternal();
820       return;
821     case VIDEO_CAPTURE_STATE_ERROR:
822       OnLog("VideoCaptureImpl is in error state.");
823       state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_ERROR);
824       return;
825     case VIDEO_CAPTURE_STATE_ERROR_SYSTEM_PERMISSIONS_DENIED:
826       OnLog("VideoCaptureImpl is in system permissions error state.");
827       state_update_cb.Run(
828           blink::VIDEO_CAPTURE_STATE_ERROR_SYSTEM_PERMISSIONS_DENIED);
829       return;
830     case VIDEO_CAPTURE_STATE_ERROR_CAMERA_BUSY:
831       OnLog("VideoCaptureImpl is in camera busy error state.");
832       state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_ERROR_CAMERA_BUSY);
833       return;
834     case VIDEO_CAPTURE_STATE_PAUSED:
835     case VIDEO_CAPTURE_STATE_RESUMED:
836       // The internal |state_| is never set to PAUSED/RESUMED since
837       // VideoCaptureImpl is not modified by those.
838       NOTREACHED();
839       return;
840   }
841 }
842
843 void VideoCaptureImpl::StopCapture(int client_id) {
844   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
845   // A client ID can be in only one client list.
846   // If this ID is in any client list, we can just remove it from
847   // that client list and don't have to run the other following RemoveClient().
848   if (!RemoveClient(client_id, &clients_pending_on_restart_)) {
849     RemoveClient(client_id, &clients_);
850   }
851
852   if (!clients_.empty())
853     return;
854   DVLOG(1) << "StopCapture: No more client, stopping ...";
855   StopDevice();
856   client_buffers_.clear();
857   weak_factory_.InvalidateWeakPtrs();
858 }
859
860 void VideoCaptureImpl::RequestRefreshFrame() {
861   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
862   GetVideoCaptureHost()->RequestRefreshFrame(device_id_);
863 }
864
865 void VideoCaptureImpl::GetDeviceSupportedFormats(
866     VideoCaptureDeviceFormatsCallback callback) {
867   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
868   GetVideoCaptureHost()->GetDeviceSupportedFormats(
869       device_id_, session_id_,
870       base::BindOnce(&VideoCaptureImpl::OnDeviceSupportedFormats,
871                      weak_factory_.GetWeakPtr(), std::move(callback)));
872 }
873
874 void VideoCaptureImpl::GetDeviceFormatsInUse(
875     VideoCaptureDeviceFormatsCallback callback) {
876   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
877   GetVideoCaptureHost()->GetDeviceFormatsInUse(
878       device_id_, session_id_,
879       base::BindOnce(&VideoCaptureImpl::OnDeviceFormatsInUse,
880                      weak_factory_.GetWeakPtr(), std::move(callback)));
881 }
882
883 void VideoCaptureImpl::OnLog(const String& message) {
884   GetVideoCaptureHost()->OnLog(device_id_, message);
885 }
886
887 void VideoCaptureImpl::SetGpuMemoryBufferSupportForTesting(
888     std::unique_ptr<gpu::GpuMemoryBufferSupport> gpu_memory_buffer_support) {
889   gpu_memory_buffer_support_ = std::move(gpu_memory_buffer_support);
890 }
891
892 void VideoCaptureImpl::OnStateChanged(
893     media::mojom::blink::VideoCaptureResultPtr result) {
894   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
895
896   // Stop the startup deadline timer as something has happened.
897   startup_timeout_.Stop();
898
899   if (result->which() ==
900       media::mojom::blink::VideoCaptureResult::Tag::kErrorCode) {
901     DVLOG(1) << __func__ << " Failed with an error.";
902     if (result->get_error_code() ==
903         media::VideoCaptureError::kWinMediaFoundationSystemPermissionDenied) {
904       state_ = VIDEO_CAPTURE_STATE_ERROR_SYSTEM_PERMISSIONS_DENIED;
905       OnLog(
906           "VideoCaptureImpl changing state to "
907           "VIDEO_CAPTURE_STATE_ERROR_SYSTEM_PERMISSIONS_DENIED");
908     } else if (result->get_error_code() ==
909                media::VideoCaptureError::kWinMediaFoundationCameraBusy) {
910       state_ = VIDEO_CAPTURE_STATE_ERROR_CAMERA_BUSY;
911       OnLog(
912           "VideoCaptureImpl changing state to "
913           "VIDEO_CAPTURE_STATE_ERROR_CAMERA_BUSY");
914     } else {
915       state_ = VIDEO_CAPTURE_STATE_ERROR;
916       OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_ERROR");
917     }
918     for (const auto& client : clients_)
919       client.second.state_update_cb.Run(state_);
920     clients_.clear();
921     RecordStartOutcomeUMA(result->get_error_code());
922     return;
923   }
924
925   media::mojom::VideoCaptureState state = result->get_state();
926   DVLOG(1) << __func__ << " state: " << state;
927   switch (state) {
928     case media::mojom::VideoCaptureState::STARTED:
929       OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_STARTED");
930       state_ = VIDEO_CAPTURE_STATE_STARTED;
931       for (const auto& client : clients_)
932         client.second.state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_STARTED);
933       // In case there is any frame dropped before STARTED, always request for
934       // a frame refresh to start the video call with.
935       // Capture device will make a decision if it should refresh a frame.
936       RequestRefreshFrame();
937       RecordStartOutcomeUMA(media::VideoCaptureError::kNone);
938       break;
939     case media::mojom::VideoCaptureState::STOPPED:
940       OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_STOPPED");
941       state_ = VIDEO_CAPTURE_STATE_STOPPED;
942       client_buffers_.clear();
943       weak_factory_.InvalidateWeakPtrs();
944       if (!clients_.empty() || !clients_pending_on_restart_.empty()) {
945         OnLog("VideoCaptureImpl restarting capture");
946         RestartCapture();
947       }
948       break;
949     case media::mojom::VideoCaptureState::PAUSED:
950       for (const auto& client : clients_)
951         client.second.state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_PAUSED);
952       break;
953     case media::mojom::VideoCaptureState::RESUMED:
954       for (const auto& client : clients_)
955         client.second.state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_RESUMED);
956       break;
957     case media::mojom::VideoCaptureState::ENDED:
958       OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_ENDED");
959       // We'll only notify the client that the stream has stopped.
960       for (const auto& client : clients_)
961         client.second.state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_STOPPED);
962       clients_.clear();
963       state_ = VIDEO_CAPTURE_STATE_ENDED;
964       break;
965   }
966 }
967
968 void VideoCaptureImpl::OnNewBuffer(
969     int32_t buffer_id,
970     media::mojom::blink::VideoBufferHandlePtr buffer_handle) {
971   DVLOG(1) << __func__ << " buffer_id: " << buffer_id;
972   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
973
974   const bool inserted =
975       client_buffers_
976           .emplace(buffer_id, new BufferContext(std::move(buffer_handle),
977                                                 media_task_runner_))
978           .second;
979   DCHECK(inserted);
980 }
981
982 void VideoCaptureImpl::OnBufferReady(
983     media::mojom::blink::ReadyBufferPtr buffer) {
984   DVLOG(1) << __func__ << " buffer_id: " << buffer->buffer_id;
985   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
986
987   if (state_ != VIDEO_CAPTURE_STATE_STARTED) {
988     OnFrameDropped(
989         media::VideoCaptureFrameDropReason::kVideoCaptureImplNotInStartedState);
990     GetVideoCaptureHost()->ReleaseBuffer(device_id_, buffer->buffer_id,
991                                          DefaultFeedback());
992     return;
993   }
994
995   base::TimeTicks reference_time = *buffer->info->metadata.reference_time;
996
997   if (first_frame_ref_time_.is_null()) {
998     first_frame_ref_time_ = reference_time;
999     if (num_first_frame_logs_ < kMaxFirstFrameLogs) {
1000       OnLog("First frame received for this VideoCaptureImpl instance");
1001       num_first_frame_logs_++;
1002     } else if (num_first_frame_logs_ == kMaxFirstFrameLogs) {
1003       OnLog(
1004           "First frame received for this VideoCaptureImpl instance. This will "
1005           "not be logged anymore for this VideoCaptureImpl instance.");
1006       num_first_frame_logs_++;
1007     }
1008   }
1009
1010   // If the timestamp is not prepared, we use reference time to make a rough
1011   // estimate. e.g. ThreadSafeCaptureOracle::DidCaptureFrame().
1012   if (buffer->info->timestamp.is_zero())
1013     buffer->info->timestamp = reference_time - first_frame_ref_time_;
1014
1015   // If the capture_begin_time was not set use the reference time. This ensures
1016   // there is a captureTime available for local sources for
1017   // requestVideoFrameCallback.
1018   if (!buffer->info->metadata.capture_begin_time)
1019     buffer->info->metadata.capture_begin_time = reference_time;
1020
1021   // TODO(qiangchen): Change the metric name to "reference_time" and
1022   // "timestamp", so that we have consistent naming everywhere.
1023   // Used by chrome/browser/media/cast_mirroring_performance_browsertest.cc
1024   TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived",
1025                        TRACE_EVENT_SCOPE_THREAD, "timestamp",
1026                        (reference_time - base::TimeTicks()).InMicroseconds(),
1027                        "time_delta", buffer->info->timestamp.InMicroseconds());
1028
1029   // Create and initialize frame preparers for the non-scaled and the scaled
1030   // frames.
1031   auto frame_preparer =
1032       std::make_unique<VideoFrameBufferPreparer>(*this, std::move(buffer));
1033   if (!frame_preparer->Initialize()) {
1034     OnFrameDropped(media::VideoCaptureFrameDropReason::
1035                        kVideoCaptureImplFailedToWrapDataAsMediaVideoFrame);
1036     GetVideoCaptureHost()->ReleaseBuffer(
1037         device_id_, frame_preparer->buffer_id(), DefaultFeedback());
1038     return;
1039   }
1040
1041   // If the video frame needs to be bound we do a round-trip time to the media
1042   // thread, otherwise we'll go directly to OnVideoFrameReady().
1043   if (!frame_preparer->IsVideoFrameBound()) {
1044     media_task_runner_->PostTask(
1045         FROM_HERE,
1046         base::BindOnce(&VideoCaptureImpl::BindVideoFrameOnMediaThread,
1047                        gpu_factories_, std::move(frame_preparer),
1048                        base::BindPostTaskToCurrentDefault(base::BindOnce(
1049                            &VideoCaptureImpl::OnVideoFrameReady,
1050                            weak_factory_.GetWeakPtr(), reference_time)),
1051                        base::BindPostTask(
1052                            main_task_runner_,
1053                            base::BindOnce(&VideoCaptureImpl::OnGpuContextLost,
1054                                           weak_factory_.GetWeakPtr()))));
1055     return;
1056   }
1057   OnVideoFrameReady(reference_time, std::move(frame_preparer));
1058 }
1059
1060 // static
1061 void VideoCaptureImpl::BindVideoFrameOnMediaThread(
1062     media::GpuVideoAcceleratorFactories* gpu_factories,
1063     std::unique_ptr<VideoFrameBufferPreparer> frame_preparer,
1064     base::OnceCallback<void(std::unique_ptr<VideoFrameBufferPreparer>)>
1065         on_frame_ready_callback,
1066     base::OnceCallback<void()> on_gpu_context_lost) {
1067   // This method should only be called when binding is needed, i.e. the frame is
1068   // a GPU frame.
1069   CHECK(!frame_preparer->IsVideoFrameBound());
1070   if (!frame_preparer->BindVideoFrameOnMediaThread(gpu_factories)) {
1071     // Bind failed.
1072     std::move(on_gpu_context_lost).Run();
1073     // Proceed to invoke |on_frame_ready_callback| even though we failed - it
1074     // takes care of dropping the frame.
1075   }
1076   std::move(on_frame_ready_callback).Run(std::move(frame_preparer));
1077 }
1078
1079 void VideoCaptureImpl::OnVideoFrameReady(
1080     base::TimeTicks reference_time,
1081     std::unique_ptr<VideoFrameBufferPreparer> frame_preparer) {
1082   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1083
1084   // If the frame is not bound and ready we drop it.
1085   if (!frame_preparer->IsVideoFrameBound()) {
1086     OnFrameDropped(media::VideoCaptureFrameDropReason::
1087                        kVideoCaptureImplFailedToWrapDataAsMediaVideoFrame);
1088     // Release all buffers.
1089     GetVideoCaptureHost()->ReleaseBuffer(
1090         device_id_, frame_preparer->buffer_id(), DefaultFeedback());
1091     return;
1092   }
1093   // The buffer will be used. Finaize it.
1094   frame_preparer->Finalize();
1095
1096   // TODO(qiangchen): Dive into the full code path to let frame metadata hold
1097   // reference time rather than using an extra parameter.
1098   for (const auto& client : clients_) {
1099     client.second.deliver_frame_cb.Run(frame_preparer->frame(), reference_time);
1100   }
1101 }
1102
1103 void VideoCaptureImpl::OnBufferDestroyed(int32_t buffer_id) {
1104   DVLOG(1) << __func__ << " buffer_id: " << buffer_id;
1105   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1106
1107   const auto& cb_iter = client_buffers_.find(buffer_id);
1108   if (cb_iter != client_buffers_.end()) {
1109     // If the BufferContext is non-null, the GpuMemoryBuffer-backed frames can
1110     // have more than one reference (held by MailboxHolderReleased). Otherwise,
1111     // only one reference should be held.
1112     DCHECK(!cb_iter->second.get() ||
1113            cb_iter->second->buffer_type() ==
1114                VideoFrameBufferHandleType::kGpuMemoryBufferHandle ||
1115            cb_iter->second->HasOneRef())
1116         << "Instructed to delete buffer we are still using.";
1117     client_buffers_.erase(cb_iter);
1118   }
1119 }
1120
1121 void VideoCaptureImpl::OnFrameDropped(
1122     media::VideoCaptureFrameDropReason reason) {
1123   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1124   for (const auto& client : clients_) {
1125     client.second.frame_dropped_cb.Run(reason);
1126   }
1127 }
1128
1129 void VideoCaptureImpl::OnNewSubCaptureTargetVersion(
1130     uint32_t sub_capture_target_version) {
1131   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1132
1133   for (const auto& client : clients_) {
1134     client.second.sub_capture_target_version_cb.Run(sub_capture_target_version);
1135   }
1136 }
1137
1138 constexpr base::TimeDelta VideoCaptureImpl::kCaptureStartTimeout;
1139
1140 void VideoCaptureImpl::OnAllClientsFinishedConsumingFrame(
1141     int buffer_id,
1142     scoped_refptr<BufferContext> buffer_context) {
1143   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1144
1145 // Subtle race note: It's important that the |buffer_context| argument be
1146 // std::move()'ed to this method and never copied. This is so that the caller,
1147 // DidFinishConsumingFrame(), does not implicitly retain a reference while it
1148 // is running the trampoline callback on another thread. This is necessary to
1149 // ensure the reference count on the BufferContext will be correct at the time
1150 // OnBufferDestroyed() is called. http://crbug.com/797851
1151 #if DCHECK_IS_ON()
1152   // The BufferContext should have exactly two references to it at this point,
1153   // one is this method's second argument and the other is from
1154   // |client_buffers_|.
1155   DCHECK(!buffer_context->HasOneRef());
1156   BufferContext* const buffer_raw_ptr = buffer_context.get();
1157   buffer_context = nullptr;
1158   // For non-GMB case, there should be only one reference, from
1159   // |client_buffers_|. This DCHECK is invalid for GpuMemoryBuffer backed
1160   // frames, because MailboxHolderReleased may hold on to a reference to
1161   // |buffer_context|.
1162   if (buffer_raw_ptr->buffer_type() !=
1163       VideoFrameBufferHandleType::kGpuMemoryBufferHandle) {
1164     DCHECK(buffer_raw_ptr->HasOneRef());
1165   }
1166 #else
1167   buffer_context = nullptr;
1168 #endif
1169
1170   if (require_premapped_frames_) {
1171     feedback_.require_mapped_frame = true;
1172   }
1173   GetVideoCaptureHost()->ReleaseBuffer(device_id_, buffer_id, feedback_);
1174   feedback_ = media::VideoCaptureFeedback();
1175 }
1176
1177 void VideoCaptureImpl::StopDevice() {
1178   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1179   if (state_ != VIDEO_CAPTURE_STATE_STARTING &&
1180       state_ != VIDEO_CAPTURE_STATE_STARTED)
1181     return;
1182   state_ = VIDEO_CAPTURE_STATE_STOPPING;
1183   OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_STOPPING");
1184   GetVideoCaptureHost()->Stop(device_id_);
1185   params_.requested_format.frame_size.SetSize(0, 0);
1186 }
1187
1188 void VideoCaptureImpl::RestartCapture() {
1189   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1190   DCHECK_EQ(state_, VIDEO_CAPTURE_STATE_STOPPED);
1191
1192   int width = 0;
1193   int height = 0;
1194   clients_.insert(clients_pending_on_restart_.begin(),
1195                   clients_pending_on_restart_.end());
1196   clients_pending_on_restart_.clear();
1197   for (const auto& client : clients_) {
1198     width = std::max(width,
1199                      client.second.params.requested_format.frame_size.width());
1200     height = std::max(
1201         height, client.second.params.requested_format.frame_size.height());
1202   }
1203   params_.requested_format.frame_size.SetSize(width, height);
1204   DVLOG(1) << __func__ << " " << params_.requested_format.frame_size.ToString();
1205   StartCaptureInternal();
1206 }
1207
1208 void VideoCaptureImpl::StartCaptureInternal() {
1209   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1210   state_ = VIDEO_CAPTURE_STATE_STARTING;
1211   OnLog("VideoCaptureImpl changing state to VIDEO_CAPTURE_STATE_STARTING");
1212
1213   if (base::FeatureList::IsEnabled(kTimeoutHangingVideoCaptureStarts)) {
1214     startup_timeout_.Start(FROM_HERE, kCaptureStartTimeout,
1215                            base::BindOnce(&VideoCaptureImpl::OnStartTimedout,
1216                                           base::Unretained(this)));
1217   }
1218   start_outcome_reported_ = false;
1219   base::UmaHistogramBoolean("Media.VideoCapture.Start", true);
1220
1221   GetVideoCaptureHost()->Start(device_id_, session_id_, params_,
1222                                observer_receiver_.BindNewPipeAndPassRemote());
1223 }
1224
1225 void VideoCaptureImpl::OnStartTimedout() {
1226   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1227   OnLog("VideoCaptureImpl timed out during starting");
1228
1229   OnStateChanged(media::mojom::blink::VideoCaptureResult::NewErrorCode(
1230       media::VideoCaptureError::kVideoCaptureImplTimedOutOnStart));
1231 }
1232
1233 void VideoCaptureImpl::OnDeviceSupportedFormats(
1234     VideoCaptureDeviceFormatsCallback callback,
1235     const Vector<media::VideoCaptureFormat>& supported_formats) {
1236   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1237   std::move(callback).Run(supported_formats);
1238 }
1239
1240 void VideoCaptureImpl::OnDeviceFormatsInUse(
1241     VideoCaptureDeviceFormatsCallback callback,
1242     const Vector<media::VideoCaptureFormat>& formats_in_use) {
1243   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1244   std::move(callback).Run(formats_in_use);
1245 }
1246
1247 bool VideoCaptureImpl::RemoveClient(int client_id, ClientInfoMap* clients) {
1248   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1249
1250   const ClientInfoMap::iterator it = clients->find(client_id);
1251   if (it == clients->end())
1252     return false;
1253
1254   it->second.state_update_cb.Run(blink::VIDEO_CAPTURE_STATE_STOPPED);
1255   clients->erase(it);
1256   return true;
1257 }
1258
1259 media::mojom::blink::VideoCaptureHost* VideoCaptureImpl::GetVideoCaptureHost() {
1260   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1261   if (video_capture_host_for_testing_)
1262     return video_capture_host_for_testing_;
1263
1264   if (!video_capture_host_.is_bound())
1265     video_capture_host_.Bind(std::move(pending_video_capture_host_));
1266   return video_capture_host_.get();
1267 }
1268
1269 void VideoCaptureImpl::RecordStartOutcomeUMA(
1270     media::VideoCaptureError error_code) {
1271   // Record the success or failure of starting only the first time we transition
1272   // into such a state, not eg when resuming after pausing.
1273   if (!start_outcome_reported_) {
1274     VideoCaptureStartOutcome outcome;
1275     switch (error_code) {
1276       case media::VideoCaptureError::kNone:
1277         outcome = VideoCaptureStartOutcome::kStarted;
1278         break;
1279       case media::VideoCaptureError::kVideoCaptureImplTimedOutOnStart:
1280         outcome = VideoCaptureStartOutcome::kTimedout;
1281         break;
1282       default:
1283         outcome = VideoCaptureStartOutcome::kFailed;
1284         break;
1285     }
1286     base::UmaHistogramEnumeration("Media.VideoCapture.StartOutcome", outcome);
1287     base::UmaHistogramEnumeration("Media.VideoCapture.StartErrorCode",
1288                                   error_code);
1289     start_outcome_reported_ = true;
1290   }
1291 }
1292
1293 // static
1294 void VideoCaptureImpl::DidFinishConsumingFrame(
1295     BufferFinishedCallback callback_to_io_thread) {
1296   // Note: This function may be called on any thread by the VideoFrame
1297   // destructor.  |metadata| is still valid for read-access at this point.
1298   std::move(callback_to_io_thread).Run();
1299 }
1300
1301 void VideoCaptureImpl::ProcessFeedback(
1302     const media::VideoCaptureFeedback& feedback) {
1303   DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
1304   feedback_ = feedback;
1305 }
1306
1307 void VideoCaptureImpl::RequirePremappedFrames() {
1308   require_premapped_frames_ = true;
1309 }
1310
1311 media::VideoCaptureFeedback VideoCaptureImpl::DefaultFeedback() {
1312   media::VideoCaptureFeedback feedback;
1313   feedback.require_mapped_frame = require_premapped_frames_;
1314   return feedback;
1315 }
1316
1317 base::WeakPtr<VideoCaptureImpl> VideoCaptureImpl::GetWeakPtr() {
1318   return weak_this_;
1319 }
1320
1321 }  // namespace blink