Upload upstream chromium 120.0.6099.5
[platform/framework/web/chromium-efl.git] / media / renderers / video_resource_updater.cc
1 // Copyright 2013 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/renderers/video_resource_updater.h"
6
7 #include <stddef.h>
8 #include <stdint.h>
9
10 #include <string>
11
12 #include "base/atomic_sequence_num.h"
13 #include "base/containers/contains.h"
14 #include "base/containers/cxx20_erase.h"
15 #include "base/functional/bind.h"
16 #include "base/logging.h"
17 #include "base/memory/raw_ptr.h"
18 #include "base/memory/shared_memory_mapping.h"
19 #include "base/memory/unsafe_shared_memory_region.h"
20 #include "base/ranges/algorithm.h"
21 #include "base/strings/stringprintf.h"
22 #include "base/task/single_thread_task_runner.h"
23 #include "base/trace_event/memory_dump_manager.h"
24 #include "base/trace_event/process_memory_dump.h"
25 #include "base/trace_event/trace_event.h"
26 #include "build/build_config.h"
27 #include "cc/base/math_util.h"
28 #include "cc/paint/skia_paint_canvas.h"
29 #include "components/viz/client/client_resource_provider.h"
30 #include "components/viz/client/shared_bitmap_reporter.h"
31 #include "components/viz/common/gpu/raster_context_provider.h"
32 #include "components/viz/common/quads/compositor_render_pass.h"
33 #include "components/viz/common/quads/texture_draw_quad.h"
34 #include "components/viz/common/quads/video_hole_draw_quad.h"
35 #include "components/viz/common/quads/yuv_video_draw_quad.h"
36 #include "components/viz/common/resources/bitmap_allocation.h"
37 #include "components/viz/common/resources/resource_sizes.h"
38 #include "components/viz/common/resources/shared_image_format_utils.h"
39 #include "gpu/GLES2/gl2extchromium.h"
40 #include "gpu/command_buffer/client/context_support.h"
41 #include "gpu/command_buffer/client/shared_image_interface.h"
42 #include "gpu/command_buffer/common/shared_image_capabilities.h"
43 #include "gpu/command_buffer/common/shared_image_trace_utils.h"
44 #include "gpu/command_buffer/common/shared_image_usage.h"
45 #include "media/base/format_utils.h"
46 #include "media/base/media_switches.h"
47 #include "media/base/wait_and_replace_sync_token_client.h"
48 #include "media/renderers/paint_canvas_video_renderer.h"
49 #include "media/renderers/resource_sync_token_client.h"
50 #include "media/video/half_float_maker.h"
51 #include "third_party/khronos/GLES2/gl2.h"
52 #include "third_party/khronos/GLES2/gl2ext.h"
53 #include "third_party/khronos/GLES3/gl3.h"
54 #include "third_party/libyuv/include/libyuv.h"
55 #include "third_party/skia/include/core/SkCanvas.h"
56 #include "ui/gfx/geometry/size_conversions.h"
57 #include "ui/gfx/geometry/skia_conversions.h"
58 #include "ui/gfx/video_types.h"
59 #include "ui/gl/gl_enums.h"
60 #include "ui/gl/trace_util.h"
61
62 #if defined(TIZEN_VIDEO_HOLE)
63 #include "components/viz/common/quads/solid_color_draw_quad.h"
64 #endif
65
66 namespace media {
67 namespace {
68
69 // Generates process-unique IDs to use for tracing video resources.
70 base::AtomicSequenceNumber g_next_video_resource_updater_id;
71
72 gfx::ProtectedVideoType ProtectedVideoTypeFromMetadata(
73     const VideoFrameMetadata& metadata) {
74   if (!metadata.protected_video) {
75     return gfx::ProtectedVideoType::kClear;
76   }
77
78   return metadata.hw_protected ? gfx::ProtectedVideoType::kHardwareProtected
79                                : gfx::ProtectedVideoType::kSoftwareProtected;
80 }
81
82 VideoFrameResourceType ExternalResourceTypeForHardwarePlanes(
83     const VideoFrame& frame,
84     GLuint target,
85     viz::SharedImageFormat si_formats[VideoFrame::kMaxPlanes],
86     bool use_stream_video_draw_quad) {
87   const VideoPixelFormat format = frame.format();
88   const size_t num_textures = frame.NumTextures();
89
90   if (frame.RequiresExternalSampler()) {
91     // The texture |target| can be 0 for Fuchsia.
92     DCHECK(target == 0 || target == GL_TEXTURE_EXTERNAL_OES)
93         << "Unsupported target " << gl::GLEnums::GetStringEnum(target);
94     DCHECK_EQ(num_textures, 1u);
95     absl::optional<gfx::BufferFormat> buffer_format =
96         VideoPixelFormatToGfxBufferFormat(format);
97     DCHECK(buffer_format.has_value());
98     if (frame.shared_image_format_type() == SharedImageFormatType::kLegacy) {
99       si_formats[0] =
100           viz::GetSinglePlaneSharedImageFormat(buffer_format.value());
101     } else {
102 #if BUILDFLAG(IS_OZONE)
103       CHECK_EQ(frame.shared_image_format_type(),
104                SharedImageFormatType::kSharedImageFormatExternalSampler);
105
106       // The format must be one of NV12/YV12/P016LE, as these are the only
107       // formats for which VideoFrame::RequiresExternalSampler() will return
108       // true.
109       // NOTE: If this is ever expanded to include NV12A, it will be necessary
110       // to decide whether the value returned in that case should be RGB (as is
111       // done for other values here) or RGBA (as is done for the handling of
112       // NV12A with per-plane sampling below).
113       switch (format) {
114         case PIXEL_FORMAT_NV12:
115           si_formats[0] = viz::MultiPlaneFormat::kNV12;
116           break;
117         case PIXEL_FORMAT_YV12:
118           si_formats[0] = viz::MultiPlaneFormat::kYV12;
119           break;
120         case PIXEL_FORMAT_P016LE:
121           si_formats[0] = viz::MultiPlaneFormat::kP010;
122           break;
123         default:
124           NOTREACHED_NORETURN();
125       }
126       si_formats[0].SetPrefersExternalSampler();
127 #else
128       // MultiplanarSharedImage with external sampling is supported only on
129       // Ozone, and VideoFrames with format type
130       // kSharedImageFormatExternalSampler should not be created on other
131       // platforms.
132       NOTREACHED_NORETURN();
133 #endif
134     }
135
136     return VideoFrameResourceType::RGB;
137   }
138
139   CHECK(!frame.RequiresExternalSampler());
140
141   switch (format) {
142     case PIXEL_FORMAT_ARGB:
143     case PIXEL_FORMAT_XRGB:
144     case PIXEL_FORMAT_ABGR:
145     case PIXEL_FORMAT_XBGR:
146     case PIXEL_FORMAT_BGRA:
147       DCHECK_EQ(num_textures, 1u);
148       // This maps VideoPixelFormat back to GMB BufferFormat
149       // NOTE: ABGR == RGBA and ARGB == BGRA, they differ only byte order
150       // See: VideoFormat function in gpu_memory_buffer_video_frame_pool
151       // https://cs.chromium.org/chromium/src/media/video/gpu_memory_buffer_video_frame_pool.cc?type=cs&g=0&l=281
152       si_formats[0] =
153           (format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_XBGR)
154               ? viz::SinglePlaneFormat::kRGBA_8888
155               : viz::SinglePlaneFormat::kBGRA_8888;
156
157       switch (target) {
158         case GL_TEXTURE_EXTERNAL_OES:
159           // `use_stream_video_draw_quad` is set on Android and `dcomp_surface`
160           // is used on Windows.
161           // TODO(sunnyps): It's odd to reuse the Android path on Windows. There
162           // could be other unknown assumptions in other parts of the rendering
163           // stack about stream video quads. Investigate alternative solutions.
164           if (use_stream_video_draw_quad || frame.metadata().dcomp_surface)
165             return VideoFrameResourceType::STREAM_TEXTURE;
166           [[fallthrough]];
167         case GL_TEXTURE_2D:
168         case GL_TEXTURE_RECTANGLE_ARB:
169           return (format == PIXEL_FORMAT_XRGB)
170                      ? VideoFrameResourceType::RGB
171                      : VideoFrameResourceType::RGBA_PREMULTIPLIED;
172         default:
173           NOTREACHED();
174           break;
175       }
176       break;
177     case PIXEL_FORMAT_XR30:
178     case PIXEL_FORMAT_XB30:
179       si_formats[0] = (format == PIXEL_FORMAT_XR30)
180                           ? viz::SinglePlaneFormat::kBGRA_1010102
181                           : viz::SinglePlaneFormat::kRGBA_1010102;
182       return VideoFrameResourceType::RGB;
183     case PIXEL_FORMAT_I420:
184       if (frame.shared_image_format_type() == SharedImageFormatType::kLegacy) {
185         DCHECK_EQ(num_textures, 3u);
186         si_formats[0] = viz::SinglePlaneFormat::kR_8;
187         si_formats[1] = viz::SinglePlaneFormat::kR_8;
188         si_formats[2] = viz::SinglePlaneFormat::kR_8;
189         return VideoFrameResourceType::YUV;
190       } else {
191         DCHECK_EQ(num_textures, 1u);
192         si_formats[0] = viz::MultiPlaneFormat::kI420;
193         return VideoFrameResourceType::RGB;
194       }
195
196     case PIXEL_FORMAT_NV12:
197       // |target| is set to 0 for Vulkan textures.
198       //
199       // TODO(https://crbug.com/1116101): Note that GL_TEXTURE_EXTERNAL_OES is
200       // allowed even for two-texture NV12 frames. This is intended to handle a
201       // couple of cases: a) when these textures are connected to the
202       // corresponding plane of the contents of an EGLStream using
203       // EGL_NV_stream_consumer_gltexture_yuv; b) when D3DImageBacking is used
204       // with GL_TEXTURE_EXTERNAL_OES (note that this case should be able to be
205       // migrated to GL_TEXTURE_2D after https://crrev.com/c/3856660).
206       DCHECK(target == 0 || target == GL_TEXTURE_EXTERNAL_OES ||
207              target == GL_TEXTURE_2D || target == GL_TEXTURE_RECTANGLE_ARB)
208           << "Unsupported target " << gl::GLEnums::GetStringEnum(target);
209       if (frame.shared_image_format_type() == SharedImageFormatType::kLegacy) {
210         DCHECK_EQ(num_textures, 2u);
211         si_formats[0] = viz::SinglePlaneFormat::kR_8;
212         si_formats[1] = viz::SinglePlaneFormat::kRG_88;
213         return VideoFrameResourceType::YUV;
214       } else {
215         DCHECK_EQ(num_textures, 1u);
216         si_formats[0] = viz::MultiPlaneFormat::kNV12;
217         return VideoFrameResourceType::RGB;
218       }
219
220     case PIXEL_FORMAT_NV12A:
221       if (frame.shared_image_format_type() == SharedImageFormatType::kLegacy) {
222         DCHECK_EQ(num_textures, 3u);
223         si_formats[0] = viz::SinglePlaneFormat::kR_8;
224         si_formats[1] = viz::SinglePlaneFormat::kRG_88;
225         si_formats[2] = viz::SinglePlaneFormat::kR_8;
226         return VideoFrameResourceType::YUVA;
227       } else {
228         DCHECK_EQ(num_textures, 1u);
229         si_formats[0] = viz::MultiPlaneFormat::kNV12A;
230         return VideoFrameResourceType::RGBA;
231       }
232
233     case PIXEL_FORMAT_P016LE:
234       if (frame.shared_image_format_type() == SharedImageFormatType::kLegacy) {
235         DCHECK_EQ(num_textures, 2u);
236         // TODO(mcasas): Support other formats such as e.g. P012.
237         si_formats[0] = viz::SinglePlaneFormat::kR_16;
238         // TODO(https://crbug.com/1233228): This needs to be
239         // gfx::BufferFormat::RG_1616.
240 #if BUILDFLAG(IS_MAC) || BUILDFLAG(IS_WIN)
241         si_formats[1] = viz::SinglePlaneFormat::kRG_1616;
242 #else
243         si_formats[1] = viz::SinglePlaneFormat::kRG_88;
244 #endif
245         return VideoFrameResourceType::YUV;
246       } else {
247         DCHECK_EQ(num_textures, 1u);
248         si_formats[0] = viz::MultiPlaneFormat::kP010;
249         return VideoFrameResourceType::RGB;
250       }
251
252     case PIXEL_FORMAT_RGBAF16:
253       DCHECK_EQ(num_textures, 1u);
254       si_formats[0] = viz::SinglePlaneFormat::kRGBA_F16;
255       return VideoFrameResourceType::RGBA;
256
257     case PIXEL_FORMAT_UYVY:
258       NOTREACHED();
259       [[fallthrough]];
260     case PIXEL_FORMAT_YV12:
261     case PIXEL_FORMAT_I422:
262     case PIXEL_FORMAT_I444:
263     case PIXEL_FORMAT_I420A:
264     case PIXEL_FORMAT_NV21:
265     case PIXEL_FORMAT_YUY2:
266     case PIXEL_FORMAT_RGB24:
267     case PIXEL_FORMAT_MJPEG:
268     case PIXEL_FORMAT_YUV420P9:
269     case PIXEL_FORMAT_YUV422P9:
270     case PIXEL_FORMAT_YUV444P9:
271     case PIXEL_FORMAT_YUV420P10:
272     case PIXEL_FORMAT_YUV422P10:
273     case PIXEL_FORMAT_YUV444P10:
274     case PIXEL_FORMAT_YUV420P12:
275     case PIXEL_FORMAT_YUV422P12:
276     case PIXEL_FORMAT_YUV444P12:
277     case PIXEL_FORMAT_Y16:
278     case PIXEL_FORMAT_I422A:
279     case PIXEL_FORMAT_I444A:
280     case PIXEL_FORMAT_YUV420AP10:
281     case PIXEL_FORMAT_YUV422AP10:
282     case PIXEL_FORMAT_YUV444AP10:
283     case PIXEL_FORMAT_UNKNOWN:
284 #if defined(TIZEN_TBM_SUPPORT)
285     case PIXEL_FORMAT_TBM_SURFACE:
286 #endif
287       break;
288   }
289   return VideoFrameResourceType::NONE;
290 }
291
292 // For frames that we receive in software format, determine the dimensions of
293 // each plane in the frame.
294 gfx::Size SoftwarePlaneDimension(VideoFrame* input_frame,
295                                  bool software_compositor,
296                                  size_t plane_index) {
297   if (software_compositor)
298     return input_frame->coded_size();
299
300   int plane_width = input_frame->columns(plane_index);
301   int plane_height = input_frame->rows(plane_index);
302   return gfx::Size(plane_width, plane_height);
303 }
304
305 viz::SharedImageFormat GetRGBSharedImageFormat(VideoPixelFormat format) {
306 #if BUILDFLAG(IS_MAC)
307   // macOS IOSurfaces are always BGRA_8888.
308   return PaintCanvasVideoRenderer::GetRGBPixelsOutputFormat();
309 #else
310   // viz::SinglePlaneFormat::kRGBX_8888 and viz::SinglePlaneFormat::kBGRX_8888
311   // require upload as GL_RGB (3 bytes), while VideoFrame is always four bytes,
312   // so we can't upload directly from them.
313   switch (format) {
314     case PIXEL_FORMAT_XBGR:
315     case PIXEL_FORMAT_ABGR:
316       return viz::SinglePlaneFormat::kRGBA_8888;
317     case PIXEL_FORMAT_XRGB:
318     case PIXEL_FORMAT_ARGB:
319       return viz::SinglePlaneFormat::kBGRA_8888;
320     default:
321       NOTREACHED_NORETURN();
322   }
323 #endif
324 }
325
326 viz::SharedImageFormat GetSingleChannel8BitFormat(
327     const gpu::Capabilities& caps,
328     const gpu::SharedImageCapabilities& shared_image_caps) {
329   if (caps.texture_rg && !shared_image_caps.disable_r8_shared_images) {
330     return viz::SinglePlaneFormat::kR_8;
331   }
332
333   DCHECK(shared_image_caps.supports_luminance_shared_images);
334   return viz::SinglePlaneFormat::kLUMINANCE_8;
335 }
336
337 // Returns true if the input VideoFrame format can be stored directly in the
338 // provided output shared image format.
339 bool HasCompatibleFormat(VideoPixelFormat input_format,
340                          viz::SharedImageFormat output_format) {
341   if (input_format == PIXEL_FORMAT_XBGR)
342     return output_format == viz::SinglePlaneFormat::kRGBA_8888 ||
343            output_format == viz::SinglePlaneFormat::kRGBX_8888;
344   if (input_format == PIXEL_FORMAT_ABGR)
345     return output_format == viz::SinglePlaneFormat::kRGBA_8888;
346   if (input_format == PIXEL_FORMAT_XRGB)
347     return output_format == viz::SinglePlaneFormat::kBGRA_8888 ||
348            output_format == viz::SinglePlaneFormat::kBGRX_8888;
349   if (input_format == PIXEL_FORMAT_ARGB)
350     return output_format == viz::SinglePlaneFormat::kBGRA_8888;
351   return false;
352 }
353
354 // Returns if kRasterInterfaceInVideoResourceUpdater is enabled
355 bool CanUseRasterInterface() {
356   return base::FeatureList::IsEnabled(
357       media::kRasterInterfaceInVideoResourceUpdater);
358 }
359
360 class CopyingSyncTokenClient : public VideoFrame::SyncTokenClient {
361  public:
362   CopyingSyncTokenClient() = default;
363   CopyingSyncTokenClient(const CopyingSyncTokenClient&) = delete;
364   CopyingSyncTokenClient& operator=(const CopyingSyncTokenClient&) = delete;
365
366   ~CopyingSyncTokenClient() override = default;
367
368   void GenerateSyncToken(gpu::SyncToken* sync_token) override {
369     *sync_token = sync_token_;
370   }
371
372   void WaitSyncToken(const gpu::SyncToken& sync_token) override {
373     sync_token_ = sync_token;
374   }
375
376  private:
377   gpu::SyncToken sync_token_;
378 };
379
380 }  // namespace
381
382 VideoFrameExternalResources::VideoFrameExternalResources() = default;
383 VideoFrameExternalResources::~VideoFrameExternalResources() = default;
384
385 VideoFrameExternalResources::VideoFrameExternalResources(
386     VideoFrameExternalResources&& other) = default;
387 VideoFrameExternalResources& VideoFrameExternalResources::operator=(
388     VideoFrameExternalResources&& other) = default;
389
390 // Resource for a video plane allocated and owned by VideoResourceUpdater. There
391 // can be multiple plane resources for each video frame, depending on the
392 // format. These will be reused when possible.
393 class VideoResourceUpdater::PlaneResource {
394  public:
395   PlaneResource(uint32_t plane_resource_id,
396                 const gfx::Size& resource_size,
397                 viz::SharedImageFormat si_format,
398                 bool is_software)
399       : plane_resource_id_(plane_resource_id),
400         resource_size_(resource_size),
401         si_format_(si_format),
402         is_software_(is_software) {}
403
404   PlaneResource(const PlaneResource&) = delete;
405   PlaneResource& operator=(const PlaneResource&) = delete;
406
407   virtual ~PlaneResource() = default;
408
409   // Casts |this| to SoftwarePlaneResource for software compositing.
410   SoftwarePlaneResource* AsSoftware();
411
412   // Casts |this| to HardwarePlaneResource for GPU compositing.
413   HardwarePlaneResource* AsHardware();
414
415   // Returns true if this resource matches the unique identifiers of another
416   // VideoFrame resource.
417   bool Matches(VideoFrame::ID unique_frame_id, size_t plane_index) {
418     return has_unique_frame_id_and_plane_index_ &&
419            unique_frame_id_ == unique_frame_id && plane_index_ == plane_index;
420   }
421
422   // Sets the unique identifiers for this resource, may only be called when
423   // there is a single reference to the resource (i.e. |ref_count_| == 1).
424   void SetUniqueId(VideoFrame::ID unique_frame_id, size_t plane_index) {
425     DCHECK_EQ(ref_count_, 1);
426     plane_index_ = plane_index;
427     unique_frame_id_ = unique_frame_id;
428     has_unique_frame_id_and_plane_index_ = true;
429   }
430
431   // Accessors for resource identifiers provided at construction time.
432   uint32_t plane_resource_id() const { return plane_resource_id_; }
433   const gfx::Size& resource_size() const { return resource_size_; }
434   viz::SharedImageFormat si_format() const { return si_format_; }
435
436   // Various methods for managing references. See |ref_count_| for details.
437   void add_ref() { ++ref_count_; }
438   void remove_ref() { --ref_count_; }
439   void clear_refs() { ref_count_ = 0; }
440   bool has_refs() const { return ref_count_ != 0; }
441
442  private:
443   const uint32_t plane_resource_id_;
444   const gfx::Size resource_size_;
445   const viz::SharedImageFormat si_format_;
446   const bool is_software_;
447
448   // The number of times this resource has been imported vs number of times this
449   // resource has returned.
450   int ref_count_ = 0;
451
452   // These two members are used for identifying the data stored in this
453   // resource; they uniquely identify a VideoFrame plane.
454   VideoFrame::ID unique_frame_id_;
455   size_t plane_index_ = 0u;
456   // Indicates if the above two members have been set or not.
457   bool has_unique_frame_id_and_plane_index_ = false;
458 };
459
460 class VideoResourceUpdater::SoftwarePlaneResource
461     : public VideoResourceUpdater::PlaneResource {
462  public:
463   SoftwarePlaneResource(uint32_t plane_resource_id,
464                         const gfx::Size& size,
465                         viz::SharedBitmapReporter* shared_bitmap_reporter)
466       : PlaneResource(plane_resource_id,
467                       size,
468                       viz::SinglePlaneFormat::kRGBA_8888,
469                       /*is_software=*/true),
470         shared_bitmap_reporter_(shared_bitmap_reporter),
471         shared_bitmap_id_(viz::SharedBitmap::GenerateId()) {
472     DCHECK(shared_bitmap_reporter_);
473
474     // Allocate SharedMemory and notify display compositor of the allocation.
475     base::MappedReadOnlyRegion shm =
476         viz::bitmap_allocation::AllocateSharedBitmap(
477             resource_size(), viz::SinglePlaneFormat::kRGBA_8888);
478     shared_mapping_ = std::move(shm.mapping);
479     shared_bitmap_reporter_->DidAllocateSharedBitmap(std::move(shm.region),
480                                                      shared_bitmap_id_);
481   }
482
483   SoftwarePlaneResource(const SoftwarePlaneResource&) = delete;
484   SoftwarePlaneResource& operator=(const SoftwarePlaneResource&) = delete;
485
486   ~SoftwarePlaneResource() override {
487     shared_bitmap_reporter_->DidDeleteSharedBitmap(shared_bitmap_id_);
488   }
489
490   const viz::SharedBitmapId& shared_bitmap_id() const {
491     return shared_bitmap_id_;
492   }
493   void* pixels() { return shared_mapping_.memory(); }
494
495   // Returns a memory dump GUID consistent across processes.
496   base::UnguessableToken GetSharedMemoryGuid() const {
497     return shared_mapping_.guid();
498   }
499
500  private:
501   const raw_ptr<viz::SharedBitmapReporter> shared_bitmap_reporter_;
502   const viz::SharedBitmapId shared_bitmap_id_;
503   base::WritableSharedMemoryMapping shared_mapping_;
504 };
505
506 class VideoResourceUpdater::HardwarePlaneResource
507     : public VideoResourceUpdater::PlaneResource {
508  public:
509   // Provides a RAII scope to access the HardwarePlaneResource as a texture on a
510   // GL context. This will wait on the sync token and provide the shared image
511   // access scope.
512   class ScopedTexture {
513    public:
514     ScopedTexture(gpu::gles2::GLES2Interface* gl,
515                   HardwarePlaneResource* resource)
516         : gl_(gl) {
517       texture_id_ = gl_->CreateAndTexStorage2DSharedImageCHROMIUM(
518           resource->mailbox().name);
519       gl_->BeginSharedImageAccessDirectCHROMIUM(
520           texture_id_, GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
521     }
522
523     ~ScopedTexture() {
524       gl_->EndSharedImageAccessDirectCHROMIUM(texture_id_);
525       gl_->DeleteTextures(1, &texture_id_);
526     }
527
528     GLuint texture_id() const { return texture_id_; }
529
530    private:
531     raw_ptr<gpu::gles2::GLES2Interface> gl_;
532     GLuint texture_id_;
533   };
534
535   HardwarePlaneResource(uint32_t plane_resource_id,
536                         const gfx::Size& size,
537                         viz::SharedImageFormat format,
538                         const gfx::ColorSpace& color_space,
539                         bool use_gpu_memory_buffer_resources,
540                         viz::RasterContextProvider* context_provider)
541       : PlaneResource(plane_resource_id, size, format, /*is_software=*/false),
542         context_provider_(context_provider) {
543     DCHECK(context_provider_);
544     const gpu::Capabilities& caps = context_provider_->ContextCapabilities();
545     DCHECK(format.is_single_plane());
546     // TODO(hitawala): Add multiplanar support for software decode.
547     auto* sii = SharedImageInterface();
548     overlay_candidate_ =
549         use_gpu_memory_buffer_resources &&
550         sii->GetCapabilities().supports_scanout_shared_images &&
551         CanCreateGpuMemoryBufferForSinglePlaneSharedImageFormat(format);
552     uint32_t shared_image_usage =
553         gpu::SHARED_IMAGE_USAGE_GLES2 | gpu::SHARED_IMAGE_USAGE_DISPLAY_READ;
554     if (overlay_candidate_) {
555       shared_image_usage |= gpu::SHARED_IMAGE_USAGE_SCANOUT;
556       texture_target_ = gpu::GetBufferTextureTarget(
557           gfx::BufferUsage::SCANOUT,
558           SinglePlaneSharedImageFormatToBufferFormat(format), caps);
559     }
560     mailbox_ = sii->CreateSharedImage(
561         format, size, color_space, kTopLeft_GrSurfaceOrigin,
562         kPremul_SkAlphaType, shared_image_usage, "VideoResourceUpdater",
563         gpu::kNullSurfaceHandle);
564     InterfaceBase()->WaitSyncTokenCHROMIUM(
565         sii->GenUnverifiedSyncToken().GetConstData());
566   }
567
568   HardwarePlaneResource(const HardwarePlaneResource&) = delete;
569   HardwarePlaneResource& operator=(const HardwarePlaneResource&) = delete;
570
571   ~HardwarePlaneResource() override {
572     gpu::SyncToken sync_token;
573     InterfaceBase()->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
574     SharedImageInterface()->DestroySharedImage(sync_token, mailbox_);
575   }
576
577   const gpu::Mailbox& mailbox() const { return mailbox_; }
578
579   GLenum texture_target() const { return texture_target_; }
580   bool overlay_candidate() const { return overlay_candidate_; }
581
582  private:
583   gpu::SharedImageInterface* SharedImageInterface() {
584     auto* sii = context_provider_->SharedImageInterface();
585     DCHECK(sii);
586     return sii;
587   }
588
589   gpu::gles2::GLES2Interface* ContextGL() {
590     auto* gl = context_provider_->ContextGL();
591     DCHECK(gl);
592     return gl;
593   }
594
595   gpu::raster::RasterInterface* RasterInterface() {
596     auto* ri = context_provider_->RasterInterface();
597     CHECK(ri);
598     return ri;
599   }
600
601   gpu::InterfaceBase* InterfaceBase() {
602     return CanUseRasterInterface()
603                ? static_cast<gpu::InterfaceBase*>(RasterInterface())
604                : static_cast<gpu::InterfaceBase*>(ContextGL());
605   }
606
607   const raw_ptr<viz::RasterContextProvider> context_provider_;
608   gpu::Mailbox mailbox_;
609   GLenum texture_target_ = GL_TEXTURE_2D;
610   bool overlay_candidate_ = false;
611 };
612
613 VideoResourceUpdater::SoftwarePlaneResource*
614 VideoResourceUpdater::PlaneResource::AsSoftware() {
615   DCHECK(is_software_);
616   return static_cast<SoftwarePlaneResource*>(this);
617 }
618
619 VideoResourceUpdater::HardwarePlaneResource*
620 VideoResourceUpdater::PlaneResource::AsHardware() {
621   DCHECK(!is_software_);
622   return static_cast<HardwarePlaneResource*>(this);
623 }
624
625 VideoResourceUpdater::VideoResourceUpdater(
626     viz::RasterContextProvider* context_provider,
627     viz::SharedBitmapReporter* shared_bitmap_reporter,
628     viz::ClientResourceProvider* resource_provider,
629     bool use_stream_video_draw_quad,
630     bool use_gpu_memory_buffer_resources,
631     int max_resource_size)
632     : context_provider_(context_provider),
633       shared_bitmap_reporter_(shared_bitmap_reporter),
634       resource_provider_(resource_provider),
635       use_stream_video_draw_quad_(use_stream_video_draw_quad),
636       use_gpu_memory_buffer_resources_(use_gpu_memory_buffer_resources),
637       max_resource_size_(max_resource_size),
638       tracing_id_(g_next_video_resource_updater_id.GetNext()) {
639   DCHECK(context_provider_ || shared_bitmap_reporter_);
640
641   base::trace_event::MemoryDumpManager::GetInstance()->RegisterDumpProvider(
642       this, "media::VideoResourceUpdater",
643       base::SingleThreadTaskRunner::GetCurrentDefault());
644 }
645
646 VideoResourceUpdater::~VideoResourceUpdater() {
647   base::trace_event::MemoryDumpManager::GetInstance()->UnregisterDumpProvider(
648       this);
649 }
650
651 void VideoResourceUpdater::ObtainFrameResources(
652     scoped_refptr<VideoFrame> video_frame,
653     viz::ResourceId external_resource_id) {
654 #if defined(TIZEN_TBM_SUPPORT)
655   if (video_frame->IsTBMBackend()) {
656     frame_resources_.emplace_back(external_resource_id,
657                                   video_frame->coded_size());
658     frame_resource_type_ = VideoFrameResourceType::RGBA;
659     return;
660   }
661 #endif
662   if (video_frame->metadata().overlay_plane_id.has_value()) {
663     // This is a hole punching VideoFrame, there is nothing to display.
664     overlay_plane_id_ = *video_frame->metadata().overlay_plane_id;
665     frame_resource_type_ = VideoFrameResourceType::VIDEO_HOLE;
666     return;
667   }
668
669   VideoFrameExternalResources external_resources =
670       CreateExternalResourcesFromVideoFrame(video_frame);
671   frame_resource_type_ = external_resources.type;
672
673   if (external_resources.type == VideoFrameResourceType::YUV ||
674       external_resources.type == VideoFrameResourceType::YUVA) {
675     frame_resource_offset_ = external_resources.offset;
676     frame_resource_multiplier_ = external_resources.multiplier;
677     frame_bits_per_channel_ = external_resources.bits_per_channel;
678   }
679
680   DCHECK_EQ(external_resources.resources.size(),
681             external_resources.release_callbacks.size());
682   for (size_t i = 0; i < external_resources.resources.size(); ++i) {
683     viz::ResourceId resource_id = resource_provider_->ImportResource(
684         external_resources.resources[i],
685         std::move(external_resources.release_callbacks[i]));
686     frame_resources_.emplace_back(resource_id,
687                                   external_resources.resources[i].size);
688   }
689   TRACE_EVENT_INSTANT1("media", "VideoResourceUpdater::ObtainFrameResources",
690                        TRACE_EVENT_SCOPE_THREAD, "Timestamp",
691                        video_frame->timestamp().InMicroseconds());
692 }
693
694 void VideoResourceUpdater::ReleaseFrameResources() {
695   for (auto& frame_resource : frame_resources_)
696     resource_provider_->RemoveImportedResource(frame_resource.id);
697   frame_resources_.clear();
698 }
699
700 void VideoResourceUpdater::AppendQuads(
701     viz::CompositorRenderPass* render_pass,
702     scoped_refptr<VideoFrame> frame,
703     gfx::Transform transform,
704     gfx::Rect quad_rect,
705     gfx::Rect visible_quad_rect,
706     const gfx::MaskFilterInfo& mask_filter_info,
707     absl::optional<gfx::Rect> clip_rect,
708     bool contents_opaque,
709     float draw_opacity,
710     int sorting_context_id) {
711   DCHECK(frame.get());
712
713   viz::SharedQuadState* shared_quad_state =
714       render_pass->CreateAndAppendSharedQuadState();
715   shared_quad_state->SetAll(
716       transform, quad_rect, visible_quad_rect, mask_filter_info, clip_rect,
717       contents_opaque, draw_opacity, SkBlendMode::kSrcOver, sorting_context_id,
718       /*layer_id=*/0u, /*fast_rounded_corner=*/false);
719
720   bool needs_blending = !contents_opaque;
721
722   gfx::Rect visible_rect = frame->visible_rect();
723   gfx::Size coded_size = frame->coded_size();
724
725   const gfx::PointF uv_top_left(
726       static_cast<float>(visible_rect.x()) / coded_size.width(),
727       static_cast<float>(visible_rect.y()) / coded_size.height());
728
729   const gfx::PointF uv_bottom_right(
730       static_cast<float>(visible_rect.right()) / coded_size.width(),
731       static_cast<float>(visible_rect.bottom()) / coded_size.height());
732
733   switch (frame_resource_type_) {
734     case VideoFrameResourceType::VIDEO_HOLE: {
735       auto* video_hole_quad =
736           render_pass->CreateAndAppendDrawQuad<viz::VideoHoleDrawQuad>();
737       video_hole_quad->SetNew(shared_quad_state, quad_rect, visible_quad_rect,
738                               overlay_plane_id_);
739       break;
740     }
741     case VideoFrameResourceType::YUV:
742     case VideoFrameResourceType::YUVA: {
743       DCHECK_EQ(frame_resources_.size(),
744                 VideoFrame::NumPlanes(frame->format()));
745       if (frame->HasTextures()) {
746         if (frame_resource_type_ == VideoFrameResourceType::YUV) {
747           DCHECK(frame->format() == PIXEL_FORMAT_NV12 ||
748                  frame->format() == PIXEL_FORMAT_P016LE ||
749                  frame->format() == PIXEL_FORMAT_I420);
750         } else {
751           DCHECK_EQ(frame->format(), PIXEL_FORMAT_NV12A);
752         }
753       }
754
755       // Get the scaling factor of the YA texture relative to the UV texture.
756       const gfx::Size uv_sample_size =
757           VideoFrame::SampleSize(frame->format(), VideoFrame::kUPlane);
758
759       auto* yuv_video_quad =
760           render_pass->CreateAndAppendDrawQuad<viz::YUVVideoDrawQuad>();
761       viz::ResourceId v_plane_id;
762       viz::ResourceId a_plane_id;
763       if (frame_resource_type_ == VideoFrameResourceType::YUV) {
764         v_plane_id = frame_resources_.size() > 2 ? frame_resources_[2].id
765                                                  : frame_resources_[1].id;
766         a_plane_id = frame_resources_.size() > 3 ? frame_resources_[3].id
767                                                  : viz::kInvalidResourceId;
768       } else {
769         v_plane_id = frame_resources_.size() > 3 ? frame_resources_[2].id
770                                                  : frame_resources_[1].id;
771         a_plane_id = frame_resources_.size() > 3 ? frame_resources_[3].id
772                                                  : frame_resources_[2].id;
773       }
774       yuv_video_quad->SetNew(
775           shared_quad_state, quad_rect, visible_quad_rect, needs_blending,
776           coded_size, visible_rect, uv_sample_size, frame_resources_[0].id,
777           frame_resources_[1].id, v_plane_id, a_plane_id, frame->ColorSpace(),
778           frame_resource_offset_, frame_resource_multiplier_,
779           frame_bits_per_channel_,
780           ProtectedVideoTypeFromMetadata(frame->metadata()),
781           frame->hdr_metadata().value_or(gfx::HDRMetadata()));
782
783       for (viz::ResourceId resource_id : yuv_video_quad->resources) {
784         resource_provider_->ValidateResource(resource_id);
785       }
786       break;
787     }
788     case VideoFrameResourceType::RGBA:
789     case VideoFrameResourceType::RGBA_PREMULTIPLIED:
790     case VideoFrameResourceType::RGB:
791     case VideoFrameResourceType::STREAM_TEXTURE: {
792       DCHECK_EQ(frame_resources_.size(), 1u);
793       if (frame_resources_.size() < 1u)
794         break;
795       bool premultiplied_alpha =
796           frame_resource_type_ == VideoFrameResourceType::RGBA_PREMULTIPLIED;
797
798       float opacity[] = {1.0f, 1.0f, 1.0f, 1.0f};
799       bool flipped = !frame->metadata().texture_origin_is_top_left;
800       bool nearest_neighbor = false;
801       gfx::ProtectedVideoType protected_video_type =
802           ProtectedVideoTypeFromMetadata(frame->metadata());
803       auto* texture_quad =
804           render_pass->CreateAndAppendDrawQuad<viz::TextureDrawQuad>();
805       texture_quad->SetNew(shared_quad_state, quad_rect, visible_quad_rect,
806                            needs_blending, frame_resources_[0].id,
807                            premultiplied_alpha, uv_top_left, uv_bottom_right,
808                            SkColors::kTransparent, opacity, flipped,
809                            nearest_neighbor, false, protected_video_type);
810       texture_quad->set_resource_size_in_pixels(coded_size);
811       // Set the is_stream_video flag for STREAM_TEXTURE. Is used downstream
812       // (e.g. *_layer_overlay.cc).
813       texture_quad->is_stream_video =
814           frame_resource_type_ == VideoFrameResourceType::STREAM_TEXTURE;
815 #if BUILDFLAG(IS_WIN)
816       // Windows uses DComp surfaces to e.g. hold MediaFoundation videos, which
817       // must be promoted to overlay to be composited correctly.
818       if (frame->metadata().dcomp_surface) {
819         texture_quad->overlay_priority_hint = viz::OverlayPriority::kRequired;
820       }
821 #endif
822       texture_quad->is_video_frame = true;
823       texture_quad->hdr_metadata =
824           frame->hdr_metadata().value_or(gfx::HDRMetadata());
825       for (viz::ResourceId resource_id : texture_quad->resources) {
826         resource_provider_->ValidateResource(resource_id);
827       }
828
829       break;
830     }
831 #if defined(TIZEN_VIDEO_HOLE)
832     case VideoFrameResourceType::HOLE: {
833       DCHECK_EQ(frame_resources_.size(), 0u);
834       auto* solid_color_draw_quad =
835           render_pass->CreateAndAppendDrawQuad<viz::SolidColorDrawQuad>();
836
837       // Create a solid color quad with transparent black and force no
838       // blending / no anti-aliasing.
839       solid_color_draw_quad->SetAll(shared_quad_state, quad_rect,
840                                     visible_quad_rect, false,
841                                     SkColors::kTransparent, true);
842       break;
843     }
844 #endif
845     case VideoFrameResourceType::NONE:
846       NOTIMPLEMENTED();
847       break;
848   }
849 }
850
851 VideoFrameExternalResources
852 VideoResourceUpdater::CreateExternalResourcesFromVideoFrame(
853     scoped_refptr<VideoFrame> video_frame) {
854 #if defined(TIZEN_VIDEO_HOLE)
855   if (video_frame->storage_type() == media::VideoFrame::STORAGE_HOLE) {
856     VideoFrameExternalResources external_resources;
857     external_resources.type = VideoFrameResourceType::HOLE;
858     return external_resources;
859   }
860 #endif
861
862   if (video_frame->format() == PIXEL_FORMAT_UNKNOWN)
863     return VideoFrameExternalResources();
864   DCHECK(video_frame->HasTextures() || video_frame->IsMappable());
865   if (video_frame->HasTextures())
866     return CreateForHardwarePlanes(std::move(video_frame));
867   else
868     return CreateForSoftwarePlanes(std::move(video_frame));
869 }
870
871 viz::SharedImageFormat VideoResourceUpdater::YuvSharedImageFormat(
872     int bits_per_channel) {
873   DCHECK(context_provider_);
874   const auto& caps = context_provider_->ContextCapabilities();
875   const auto& shared_image_caps =
876       context_provider_->SharedImageInterface()->GetCapabilities();
877   if (caps.disable_one_component_textures)
878     return PaintCanvasVideoRenderer::GetRGBPixelsOutputFormat();
879   if (bits_per_channel <= 8) {
880     DCHECK(shared_image_caps.supports_luminance_shared_images ||
881            caps.texture_rg);
882     return GetSingleChannel8BitFormat(caps, shared_image_caps);
883   }
884   if (caps.texture_norm16 && shared_image_caps.supports_r16_shared_images) {
885     return viz::SinglePlaneFormat::kR_16;
886   }
887   if (caps.texture_half_float_linear &&
888       shared_image_caps.supports_luminance_shared_images) {
889     return viz::SinglePlaneFormat::kLUMINANCE_F16;
890   }
891   return GetSingleChannel8BitFormat(caps, shared_image_caps);
892 }
893
894 bool VideoResourceUpdater::ReallocateUploadPixels(size_t needed_size) {
895   // Free the existing data first so that the memory can be reused, if
896   // possible. Note that the new array is purposely not initialized.
897   upload_pixels_.reset();
898   uint8_t* pixel_mem = nullptr;
899   // Fail if we can't support the required memory to upload pixels.
900   if (!base::UncheckedMalloc(needed_size,
901                              reinterpret_cast<void**>(&pixel_mem))) {
902     DLOG(ERROR) << "Unable to allocate enough memory required to "
903                    "upload pixels";
904     return false;
905   }
906   upload_pixels_.reset(pixel_mem);
907   upload_pixels_size_ = needed_size;
908   return true;
909 }
910
911 VideoResourceUpdater::PlaneResource*
912 VideoResourceUpdater::RecycleOrAllocateResource(
913     const gfx::Size& resource_size,
914     viz::SharedImageFormat si_format,
915     const gfx::ColorSpace& color_space,
916     VideoFrame::ID unique_id,
917     int plane_index) {
918   PlaneResource* recyclable_resource = nullptr;
919   for (auto& resource : all_resources_) {
920     // If the plane index is valid (positive, or 0, meaning all planes)
921     // then we are allowed to return a referenced resource that already
922     // contains the right frame data. It's safe to reuse it even if
923     // resource_provider_ holds some references to it, because those
924     // references are read-only.
925     if (plane_index != -1 && resource->Matches(unique_id, plane_index)) {
926       DCHECK(resource->resource_size() == resource_size);
927       DCHECK(resource->si_format() == si_format);
928       return resource.get();
929     }
930
931     // Otherwise check whether this is an unreferenced resource of the right
932     // format that we can recycle. Remember it, but don't return immediately,
933     // because we still want to find any reusable resources.
934     const bool in_use = resource->has_refs();
935
936     if (!in_use && resource->resource_size() == resource_size &&
937         resource->si_format() == si_format) {
938       recyclable_resource = resource.get();
939     }
940   }
941
942   if (recyclable_resource)
943     return recyclable_resource;
944
945   // There was nothing available to reuse or recycle. Allocate a new resource.
946   return AllocateResource(resource_size, si_format, color_space);
947 }
948
949 VideoResourceUpdater::PlaneResource* VideoResourceUpdater::AllocateResource(
950     const gfx::Size& plane_size,
951     viz::SharedImageFormat format,
952     const gfx::ColorSpace& color_space) {
953   const uint32_t plane_resource_id = next_plane_resource_id_++;
954
955   if (software_compositor()) {
956     DCHECK_EQ(format, viz::SinglePlaneFormat::kRGBA_8888);
957
958     all_resources_.push_back(std::make_unique<SoftwarePlaneResource>(
959         plane_resource_id, plane_size, shared_bitmap_reporter_));
960   } else {
961     all_resources_.push_back(std::make_unique<HardwarePlaneResource>(
962         plane_resource_id, plane_size, format, color_space,
963         use_gpu_memory_buffer_resources_, context_provider_));
964   }
965   return all_resources_.back().get();
966 }
967
968 void VideoResourceUpdater::CopyHardwarePlane(
969     VideoFrame* video_frame,
970     const gfx::ColorSpace& resource_color_space,
971     const gpu::MailboxHolder& mailbox_holder,
972     VideoFrameExternalResources* external_resources) {
973   const gfx::Size output_plane_resource_size = video_frame->coded_size();
974   // The copy needs to be a direct transfer of pixel data, so we use an RGBA8
975   // target to avoid loss of precision or dropping any alpha component.
976   constexpr viz::SharedImageFormat copy_si_format =
977       viz::SinglePlaneFormat::kRGBA_8888;
978
979   const VideoFrame::ID no_unique_id;
980   const int no_plane_index = -1;  // Do not recycle referenced textures.
981   PlaneResource* plane_resource = RecycleOrAllocateResource(
982       output_plane_resource_size, copy_si_format, resource_color_space,
983       no_unique_id, no_plane_index);
984   HardwarePlaneResource* hardware_resource = plane_resource->AsHardware();
985   hardware_resource->add_ref();
986
987   DCHECK_EQ(hardware_resource->texture_target(),
988             static_cast<GLenum>(GL_TEXTURE_2D));
989
990   if (CanUseRasterInterface()) {
991     auto* ri = RasterInterface();
992     ri->WaitSyncTokenCHROMIUM(mailbox_holder.sync_token.GetConstData());
993
994     // This is only used on Android where all video mailboxes already use shared
995     // images.
996     CHECK(mailbox_holder.mailbox.IsSharedImage());
997     ri->CopySharedImage(
998         mailbox_holder.mailbox, hardware_resource->mailbox(), GL_TEXTURE_2D,
999         /*xoffset=*/0, /*yoffset=*/0, /*x=*/0, /*y=*/0,
1000         output_plane_resource_size.width(), output_plane_resource_size.height(),
1001         /*unpack_flip_y=*/false, /*unpack_premultiply_alpha=*/false);
1002   } else {
1003     auto* gl = ContextGL();
1004     gl->WaitSyncTokenCHROMIUM(mailbox_holder.sync_token.GetConstData());
1005
1006     // This is only used on Android where all video mailboxes already use shared
1007     // images.
1008     DCHECK(mailbox_holder.mailbox.IsSharedImage());
1009     GLuint src_texture_id = gl->CreateAndTexStorage2DSharedImageCHROMIUM(
1010         mailbox_holder.mailbox.name);
1011     gl->BeginSharedImageAccessDirectCHROMIUM(
1012         src_texture_id, GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM);
1013     {
1014       HardwarePlaneResource::ScopedTexture scope(gl, hardware_resource);
1015       gl->CopySubTextureCHROMIUM(
1016           src_texture_id, /*source_level=*/0, GL_TEXTURE_2D, scope.texture_id(),
1017           /*dest_level=*/0, /*xoffset=*/0, /*yoffset=*/0, /*x=*/0, /*y=*/0,
1018           output_plane_resource_size.width(),
1019           output_plane_resource_size.height(),
1020           /*unpack_flip_y=*/false, /*unpack_premultiply_alpha=*/false,
1021           /*unpack_unmultiply_alpha=*/false);
1022     }
1023     gl->EndSharedImageAccessDirectCHROMIUM(src_texture_id);
1024     gl->DeleteTextures(1, &src_texture_id);
1025   }
1026
1027   // Wait (if the existing token isn't null) and replace it with a new one.
1028   //
1029   // This path is currently only used with single mailbox frames. Assert this
1030   // here since this code isn't tuned for multiple planes; it should only update
1031   // the release token once.
1032   DCHECK_EQ(video_frame->NumTextures(), 1u);
1033   WaitAndReplaceSyncTokenClient client(InterfaceBase());
1034   gpu::SyncToken sync_token = video_frame->UpdateReleaseSyncToken(&client);
1035
1036   auto transferable_resource = viz::TransferableResource::MakeGpu(
1037       hardware_resource->mailbox(), GL_TEXTURE_2D, sync_token,
1038       output_plane_resource_size, copy_si_format,
1039       false /* is_overlay_candidate */,
1040       viz::TransferableResource::ResourceSource::kVideo);
1041   transferable_resource.color_space = resource_color_space;
1042   external_resources->resources.push_back(std::move(transferable_resource));
1043
1044   external_resources->release_callbacks.push_back(base::BindOnce(
1045       &VideoResourceUpdater::RecycleResource, weak_ptr_factory_.GetWeakPtr(),
1046       hardware_resource->plane_resource_id()));
1047 }
1048
1049 VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes(
1050     scoped_refptr<VideoFrame> video_frame) {
1051   TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForHardwarePlanes");
1052   DCHECK(video_frame->HasTextures());
1053   if (!context_provider_) {
1054     return VideoFrameExternalResources();
1055   }
1056
1057   VideoFrameExternalResources external_resources;
1058   gfx::ColorSpace resource_color_space = video_frame->ColorSpace();
1059
1060   const bool copy_required = video_frame->metadata().copy_required;
1061
1062   GLuint target = video_frame->mailbox_holder(0).texture_target;
1063   // If |copy_required| then we will copy into a GL_TEXTURE_2D target.
1064   if (copy_required)
1065     target = GL_TEXTURE_2D;
1066
1067   viz::SharedImageFormat si_formats[VideoFrame::kMaxPlanes];
1068   external_resources.type = ExternalResourceTypeForHardwarePlanes(
1069       *video_frame, target, si_formats, use_stream_video_draw_quad_);
1070   external_resources.bits_per_channel = video_frame->BitDepth();
1071
1072   if (external_resources.type == VideoFrameResourceType::NONE) {
1073     DLOG(ERROR) << "Unsupported Texture format"
1074                 << VideoPixelFormatToString(video_frame->format());
1075     return external_resources;
1076   }
1077   absl::optional<gfx::ColorSpace> resource_color_space_when_sampled;
1078   if (external_resources.type == VideoFrameResourceType::RGB ||
1079       external_resources.type == VideoFrameResourceType::RGBA ||
1080       external_resources.type == VideoFrameResourceType::RGBA_PREMULTIPLIED) {
1081     resource_color_space_when_sampled =
1082         resource_color_space.GetAsFullRangeRGB();
1083   }
1084
1085   const size_t num_textures = video_frame->NumTextures();
1086   if (video_frame->shared_image_format_type() !=
1087       SharedImageFormatType::kLegacy) {
1088     DCHECK_EQ(num_textures, 1u);
1089   }
1090
1091   // Make a copy of the current release SyncToken so we know if it changes.
1092   CopyingSyncTokenClient client;
1093   auto original_release_token = video_frame->UpdateReleaseSyncToken(&client);
1094
1095   for (size_t i = 0; i < num_textures; ++i) {
1096     const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(i);
1097     if (mailbox_holder.mailbox.IsZero())
1098       break;
1099
1100     if (copy_required) {
1101       CopyHardwarePlane(
1102           video_frame.get(),
1103           resource_color_space_when_sampled.value_or(resource_color_space),
1104           mailbox_holder, &external_resources);
1105     } else {
1106       const size_t width = video_frame->columns(i);
1107       const size_t height = video_frame->rows(i);
1108       const gfx::Size plane_size(width, height);
1109       auto transfer_resource = viz::TransferableResource::MakeGpu(
1110           mailbox_holder.mailbox, mailbox_holder.texture_target,
1111           mailbox_holder.sync_token, plane_size, si_formats[i],
1112           video_frame->metadata().allow_overlay,
1113           viz::TransferableResource::ResourceSource::kVideo);
1114       transfer_resource.color_space = resource_color_space;
1115       transfer_resource.color_space_when_sampled =
1116           resource_color_space_when_sampled;
1117       transfer_resource.hdr_metadata =
1118           video_frame->hdr_metadata().value_or(gfx::HDRMetadata());
1119       if (video_frame->metadata().read_lock_fences_enabled) {
1120         transfer_resource.synchronization_type = viz::TransferableResource::
1121             SynchronizationType::kGpuCommandsCompleted;
1122       }
1123       transfer_resource.ycbcr_info = video_frame->ycbcr_info();
1124
1125 #if BUILDFLAG(IS_ANDROID)
1126       transfer_resource.is_backed_by_surface_texture =
1127           video_frame->metadata().texture_owner;
1128 #endif
1129
1130 #if BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_WIN)
1131       transfer_resource.wants_promotion_hint =
1132           video_frame->metadata().wants_promotion_hint;
1133 #endif
1134
1135       external_resources.resources.push_back(std::move(transfer_resource));
1136       external_resources.release_callbacks.push_back(base::BindOnce(
1137           &VideoResourceUpdater::ReturnTexture, weak_ptr_factory_.GetWeakPtr(),
1138           video_frame, original_release_token));
1139     }
1140   }
1141   return external_resources;
1142 }
1143
1144 VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
1145     scoped_refptr<VideoFrame> video_frame) {
1146   TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes");
1147   const VideoPixelFormat input_frame_format = video_frame->format();
1148
1149   size_t bits_per_channel = video_frame->BitDepth();
1150
1151   const bool is_rgb = input_frame_format == PIXEL_FORMAT_XBGR ||
1152                       input_frame_format == PIXEL_FORMAT_XRGB ||
1153                       input_frame_format == PIXEL_FORMAT_ABGR ||
1154                       input_frame_format == PIXEL_FORMAT_ARGB;
1155
1156   DCHECK(IsYuvPlanar(input_frame_format) ||
1157          input_frame_format == PIXEL_FORMAT_Y16 || is_rgb);
1158
1159   viz::SharedImageFormat output_si_format;
1160   absl::optional<viz::SharedImageFormat> subplane_si_format;
1161   gfx::ColorSpace output_color_space = video_frame->ColorSpace();
1162   bool texture_needs_rgb_conversion = false;
1163   if (is_rgb) {
1164     output_si_format = GetRGBSharedImageFormat(input_frame_format);
1165   } else if (input_frame_format == PIXEL_FORMAT_Y16) {
1166     // Unable to display directly as yuv planes so convert it to RGB.
1167     texture_needs_rgb_conversion = true;
1168   } else if (!software_compositor()) {
1169     // Can be composited directly from yuv planes.
1170     output_si_format = YuvSharedImageFormat(bits_per_channel);
1171
1172     // If GPU compositing is enabled, but the output resource format returned by
1173     // the resource provider is viz::SinglePlaneFormat::kRGBA_8888, then a GPU
1174     // driver bug workaround requires that YUV frames must be converted to RGB
1175     // before texture upload.
1176     if (output_si_format == viz::SinglePlaneFormat::kRGBA_8888 ||
1177         output_si_format == viz::SinglePlaneFormat::kBGRA_8888) {
1178       texture_needs_rgb_conversion = true;
1179     }
1180
1181     // Some YUV resources have different sized planes. If we lack the proper
1182     // SharedImageFormat just convert to RGB. We could do something better like
1183     // unpacking to I420/I016, but texture_rg and r16 support should be pretty
1184     // universal and we expect these frames to be rare.
1185     if (input_frame_format == PIXEL_FORMAT_NV12) {
1186       if (output_si_format == viz::SinglePlaneFormat::kR_8) {
1187         subplane_si_format = viz::SinglePlaneFormat::kRG_88;
1188       } else {
1189         texture_needs_rgb_conversion = true;
1190       }
1191     } else if (input_frame_format == PIXEL_FORMAT_P016LE) {
1192       if (output_si_format == viz::SinglePlaneFormat::kR_16) {
1193         subplane_si_format = viz::SinglePlaneFormat::kRG_1616;
1194       } else {
1195         texture_needs_rgb_conversion = true;
1196       }
1197     } else {
1198       DCHECK_EQ(VideoFrame::BytesPerElement(input_frame_format, 0),
1199                 VideoFrame::BytesPerElement(input_frame_format, 1));
1200     }
1201   }
1202
1203   size_t output_plane_count = VideoFrame::NumPlanes(input_frame_format);
1204
1205   // TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB
1206   // conversion here. That involves an extra copy of each frame to a bitmap.
1207   // Obviously, this is suboptimal and should be addressed once ubercompositor
1208   // starts shaping up.
1209   if (software_compositor() || texture_needs_rgb_conversion) {
1210     output_si_format =
1211         software_compositor()
1212             ? viz::SinglePlaneFormat::kRGBA_8888
1213             : PaintCanvasVideoRenderer::GetRGBPixelsOutputFormat();
1214     output_plane_count = 1;
1215     bits_per_channel = 8;
1216
1217     // The YUV to RGB conversion will be performed when we convert
1218     // from single-channel textures to an RGBA texture via
1219     // ConvertVideoFrameToRGBPixels below.
1220     output_color_space = output_color_space.GetAsFullRangeRGB();
1221   }
1222
1223   std::vector<gfx::Size> outplane_plane_sizes;
1224   outplane_plane_sizes.reserve(output_plane_count);
1225   for (size_t i = 0; i < output_plane_count; ++i) {
1226     outplane_plane_sizes.push_back(
1227         SoftwarePlaneDimension(video_frame.get(), software_compositor(), i));
1228     const gfx::Size& output_plane_resource_size = outplane_plane_sizes.back();
1229     if (output_plane_resource_size.IsEmpty() ||
1230         output_plane_resource_size.width() > max_resource_size_ ||
1231         output_plane_resource_size.height() > max_resource_size_) {
1232       // This output plane has invalid geometry so return an empty external
1233       // resources.
1234       DLOG(ERROR)
1235           << "Video resource is too large to upload. Maximum dimension is "
1236           << max_resource_size_ << " and resource is "
1237           << output_plane_resource_size.ToString();
1238       return VideoFrameExternalResources();
1239     }
1240   }
1241
1242   // Delete recycled resources that are the wrong format or wrong size.
1243   auto can_delete_resource_fn =
1244       [output_si_format, subplane_si_format,
1245        &outplane_plane_sizes](const std::unique_ptr<PlaneResource>& resource) {
1246         // Resources that are still being used can't be deleted.
1247         if (resource->has_refs())
1248           return false;
1249
1250         return (resource->si_format() != output_si_format &&
1251                 resource->si_format() !=
1252                     subplane_si_format.value_or(output_si_format)) ||
1253                !base::Contains(outplane_plane_sizes, resource->resource_size());
1254       };
1255   base::EraseIf(all_resources_, can_delete_resource_fn);
1256
1257   // Recycle or allocate resources for each video plane.
1258   std::vector<PlaneResource*> plane_resources;
1259   plane_resources.reserve(output_plane_count);
1260   for (size_t i = 0; i < output_plane_count; ++i) {
1261     auto si_format = i == 0 ? output_si_format
1262                             : subplane_si_format.value_or(output_si_format);
1263     DCHECK(si_format.is_single_plane());
1264     plane_resources.push_back(RecycleOrAllocateResource(
1265         outplane_plane_sizes[i], si_format, output_color_space,
1266         video_frame->unique_id(), i));
1267     plane_resources.back()->add_ref();
1268   }
1269
1270   VideoFrameExternalResources external_resources;
1271
1272   external_resources.bits_per_channel = bits_per_channel;
1273
1274   if (software_compositor() || texture_needs_rgb_conversion || is_rgb) {
1275     DCHECK_EQ(plane_resources.size(), 1u);
1276     PlaneResource* plane_resource = plane_resources[0];
1277
1278     if (!plane_resource->Matches(video_frame->unique_id(), 0)) {
1279       // We need to transfer data from |video_frame| to the plane resource.
1280       if (software_compositor()) {
1281         DCHECK_EQ(plane_resource->si_format(),
1282                   viz::SinglePlaneFormat::kRGBA_8888);
1283
1284         if (!video_renderer_)
1285           video_renderer_ = std::make_unique<PaintCanvasVideoRenderer>();
1286
1287         SoftwarePlaneResource* software_resource = plane_resource->AsSoftware();
1288
1289         // We know the format is RGBA_8888 from check above.
1290         SkImageInfo info = SkImageInfo::MakeN32Premul(
1291             gfx::SizeToSkISize(software_resource->resource_size()));
1292
1293         SkBitmap sk_bitmap;
1294         sk_bitmap.installPixels(info, software_resource->pixels(),
1295                                 info.minRowBytes());
1296         // This is software path, so |canvas| and |video_frame| are always
1297         // backed by software.
1298         cc::SkiaPaintCanvas canvas(sk_bitmap);
1299         cc::PaintFlags flags;
1300         flags.setBlendMode(SkBlendMode::kSrc);
1301         flags.setFilterQuality(cc::PaintFlags::FilterQuality::kLow);
1302
1303         // Note that PaintCanvasVideoRenderer::Copy would copy to the origin,
1304         // not |video_frame->visible_rect|, so call Paint instead.
1305         // https://crbug.com/1090435
1306         video_renderer_->Paint(video_frame, &canvas,
1307                                gfx::RectF(video_frame->visible_rect()), flags,
1308                                media::kNoTransformation, nullptr);
1309       } else {
1310         HardwarePlaneResource* hardware_resource = plane_resource->AsHardware();
1311         size_t bytes_per_row = viz::ResourceSizes::CheckedWidthInBytes<size_t>(
1312             video_frame->coded_size().width(), output_si_format);
1313         const gfx::Size& plane_size = hardware_resource->resource_size();
1314
1315         // Note: Strides may be negative in case of bottom-up layouts.
1316         const int stride = video_frame->stride(VideoFrame::kARGBPlane);
1317         const bool has_compatible_stride =
1318             stride > 0 && static_cast<size_t>(stride) == bytes_per_row;
1319
1320         const uint8_t* source_pixels = nullptr;
1321         if (HasCompatibleFormat(input_frame_format, output_si_format) &&
1322             has_compatible_stride) {
1323           // We can passthrough when the texture format matches. Since we
1324           // always copy the entire coded area we don't have to worry about
1325           // origin.
1326           source_pixels = video_frame->data(VideoFrame::kARGBPlane);
1327         } else {
1328           size_t needed_size =
1329               bytes_per_row * video_frame->coded_size().height();
1330           if (upload_pixels_size_ < needed_size) {
1331             if (!ReallocateUploadPixels(needed_size)) {
1332               // Fail here if memory reallocation fails.
1333               return VideoFrameExternalResources();
1334             }
1335           }
1336
1337           // PCVR writes to origin, so offset upload pixels by start since
1338           // we upload frames in coded size and pass on the visible rect to
1339           // the compositor. Note: It'd save a few bytes not to do this...
1340           auto* dest_ptr = upload_pixels_.get() +
1341                            video_frame->visible_rect().y() * bytes_per_row +
1342                            video_frame->visible_rect().x() * sizeof(uint32_t);
1343           PaintCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
1344               video_frame.get(), dest_ptr, bytes_per_row);
1345           source_pixels = upload_pixels_.get();
1346         }
1347
1348         // Copy pixels into texture.
1349         if (CanUseRasterInterface()) {
1350           auto* ri = RasterInterface();
1351           auto color_type = viz::ToClosestSkColorType(
1352               /*gpu_compositing=*/true, output_si_format, /*plane_index=*/0);
1353           SkImageInfo info =
1354               SkImageInfo::Make(plane_size.width(), plane_size.height(),
1355                                 color_type, kPremul_SkAlphaType);
1356           SkPixmap pixmap = SkPixmap(info, source_pixels, bytes_per_row);
1357           ri->WritePixels(hardware_resource->mailbox(), /*dst_x_offset=*/0,
1358                           /*dst_y_offset=*/0, /*dst_plane_index=*/0,
1359                           hardware_resource->texture_target(), pixmap);
1360         } else {
1361           auto* gl = ContextGL();
1362           HardwarePlaneResource::ScopedTexture scope(gl, hardware_resource);
1363           gl->BindTexture(hardware_resource->texture_target(),
1364                           scope.texture_id());
1365           gl->TexSubImage2D(
1366               hardware_resource->texture_target(), /*level=*/0, /*xoffset=*/0,
1367               /*yoffset=*/0, plane_size.width(), plane_size.height(),
1368               viz::SharedImageFormatRestrictedSinglePlaneUtils::ToGLDataFormat(
1369                   output_si_format),
1370               viz::SharedImageFormatRestrictedSinglePlaneUtils::ToGLDataType(
1371                   output_si_format),
1372               source_pixels);
1373         }
1374       }
1375       plane_resource->SetUniqueId(video_frame->unique_id(), 0);
1376     }
1377
1378     viz::TransferableResource transferable_resource;
1379     if (software_compositor()) {
1380       SoftwarePlaneResource* software_resource = plane_resource->AsSoftware();
1381       external_resources.type = VideoFrameResourceType::RGBA_PREMULTIPLIED;
1382       transferable_resource = viz::TransferableResource::MakeSoftware(
1383           software_resource->shared_bitmap_id(),
1384           software_resource->resource_size(), plane_resource->si_format(),
1385           viz::TransferableResource::ResourceSource::kVideo);
1386     } else {
1387       HardwarePlaneResource* hardware_resource = plane_resource->AsHardware();
1388       external_resources.type = VideoFrameResourceType::RGBA;
1389       gpu::SyncToken sync_token;
1390       InterfaceBase()->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
1391       transferable_resource = viz::TransferableResource::MakeGpu(
1392           hardware_resource->mailbox(), hardware_resource->texture_target(),
1393           sync_token, hardware_resource->resource_size(), output_si_format,
1394           hardware_resource->overlay_candidate(),
1395           viz::TransferableResource::ResourceSource::kVideo);
1396     }
1397
1398     transferable_resource.color_space = output_color_space;
1399     external_resources.resources.push_back(std::move(transferable_resource));
1400     external_resources.release_callbacks.push_back(base::BindOnce(
1401         &VideoResourceUpdater::RecycleResource, weak_ptr_factory_.GetWeakPtr(),
1402         plane_resource->plane_resource_id()));
1403
1404     return external_resources;
1405   }
1406
1407   const auto yuv_si_format = output_si_format;
1408   DCHECK(yuv_si_format.is_single_plane());
1409   DCHECK(yuv_si_format == viz::SinglePlaneFormat::kLUMINANCE_F16 ||
1410          yuv_si_format == viz::SinglePlaneFormat::kR_16 ||
1411          yuv_si_format == viz::SinglePlaneFormat::kLUMINANCE_8 ||
1412          yuv_si_format == viz::SinglePlaneFormat::kR_8)
1413       << yuv_si_format.ToString();
1414
1415   std::unique_ptr<HalfFloatMaker> half_float_maker;
1416   if (yuv_si_format == viz::SinglePlaneFormat::kLUMINANCE_F16) {
1417     half_float_maker = HalfFloatMaker::NewHalfFloatMaker(bits_per_channel);
1418     external_resources.offset = half_float_maker->Offset();
1419     external_resources.multiplier = half_float_maker->Multiplier();
1420   } else if (yuv_si_format == viz::SinglePlaneFormat::kR_16) {
1421     external_resources.multiplier = 65535.0f / ((1 << bits_per_channel) - 1);
1422     external_resources.offset = 0;
1423   }
1424
1425   // We need to transfer data from |video_frame| to the plane resources.
1426   for (size_t i = 0; i < plane_resources.size(); ++i) {
1427     HardwarePlaneResource* plane_resource = plane_resources[i]->AsHardware();
1428
1429     // Skip the transfer if this |video_frame|'s plane has been processed.
1430     if (plane_resource->Matches(video_frame->unique_id(), i))
1431       continue;
1432
1433     const viz::SharedImageFormat plane_si_format = plane_resource->si_format();
1434     DCHECK(plane_si_format == yuv_si_format ||
1435            plane_si_format == subplane_si_format.value_or(yuv_si_format));
1436
1437     // |video_stride_bytes| is the width of the |video_frame| we are uploading
1438     // (including non-frame data to fill in the stride).
1439     const int video_stride_bytes = video_frame->stride(i);
1440
1441     // |resource_size_pixels| is the size of the destination resource.
1442     const gfx::Size resource_size_pixels = plane_resource->resource_size();
1443
1444     const size_t bytes_per_row =
1445         viz::ResourceSizes::CheckedWidthInBytes<size_t>(
1446             resource_size_pixels.width(), plane_si_format);
1447
1448     // Use 4-byte row alignment (OpenGL default) for upload performance.
1449     // Assuming that GL_UNPACK_ALIGNMENT has not changed from default.
1450     constexpr size_t kDefaultUnpackAlignment = 4;
1451     const size_t upload_image_stride = cc::MathUtil::CheckedRoundUp<size_t>(
1452         bytes_per_row, kDefaultUnpackAlignment);
1453
1454     const size_t resource_bit_depth =
1455         static_cast<size_t>(plane_si_format.BitsPerPixel());
1456
1457     // Data downshifting is needed if the resource bit depth is not enough.
1458     const bool needs_bit_downshifting = bits_per_channel > resource_bit_depth;
1459
1460     // We need to convert the incoming data if we're transferring to half float,
1461     // if the need a bit downshift or if the strides need to be reconciled.
1462     const bool needs_conversion =
1463         plane_si_format == viz::SinglePlaneFormat::kLUMINANCE_F16 ||
1464         needs_bit_downshifting;
1465
1466     constexpr size_t kDefaultUnpackRowLength = 0;
1467     GLuint unpack_row_length = kDefaultUnpackRowLength;
1468     GLuint unpack_alignment = kDefaultUnpackAlignment;
1469
1470     const uint8_t* pixels;
1471     int pixels_stride_in_bytes;
1472
1473     if (!needs_conversion) {
1474       // Stride adaptation is needed if source and destination strides are
1475       // different but they have the same bit depth.
1476       const bool needs_stride_adaptation =
1477           (bits_per_channel == resource_bit_depth) &&
1478           (upload_image_stride != static_cast<size_t>(video_stride_bytes));
1479       if (needs_stride_adaptation) {
1480         const int bytes_per_element =
1481             VideoFrame::BytesPerElement(video_frame->format(), i);
1482         // Stride is aligned to VideoFrameLayout::kFrameAddressAlignment (32)
1483         // which should be divisible by pixel size for YUV formats (1, 2 or 4).
1484         DCHECK_EQ(video_stride_bytes % bytes_per_element, 0);
1485         // Unpack row length is in pixels not bytes.
1486         unpack_row_length = video_stride_bytes / bytes_per_element;
1487         // Use a non-standard alignment only if necessary.
1488         if (video_stride_bytes % kDefaultUnpackAlignment != 0)
1489           unpack_alignment = bytes_per_element;
1490       }
1491       pixels = video_frame->data(i);
1492       pixels_stride_in_bytes = video_stride_bytes;
1493     } else {
1494       // Avoid malloc for each frame/plane if possible.
1495       const size_t needed_size =
1496           upload_image_stride * resource_size_pixels.height();
1497       if (upload_pixels_size_ < needed_size) {
1498         if (!ReallocateUploadPixels(needed_size)) {
1499           // Fail here if memory reallocation fails.
1500           return VideoFrameExternalResources();
1501         }
1502       }
1503
1504       if (plane_si_format == viz::SinglePlaneFormat::kLUMINANCE_F16) {
1505         for (int row = 0; row < resource_size_pixels.height(); ++row) {
1506           uint16_t* dst = reinterpret_cast<uint16_t*>(
1507               &upload_pixels_[upload_image_stride * row]);
1508           const uint16_t* src = reinterpret_cast<const uint16_t*>(
1509               video_frame->data(i) + (video_stride_bytes * row));
1510           half_float_maker->MakeHalfFloats(src, bytes_per_row / 2, dst);
1511         }
1512       } else if (needs_bit_downshifting) {
1513         DCHECK(plane_si_format == viz::SinglePlaneFormat::kLUMINANCE_8 ||
1514                plane_si_format == viz::SinglePlaneFormat::kR_8);
1515         const int scale = 0x10000 >> (bits_per_channel - 8);
1516         libyuv::Convert16To8Plane(
1517             reinterpret_cast<const uint16_t*>(video_frame->data(i)),
1518             video_stride_bytes / 2, upload_pixels_.get(), upload_image_stride,
1519             scale, bytes_per_row, resource_size_pixels.height());
1520       } else {
1521         NOTREACHED();
1522       }
1523
1524       pixels = upload_pixels_.get();
1525       pixels_stride_in_bytes = upload_image_stride;
1526     }
1527
1528     // Copy pixels into texture. TexSubImage2D() is applicable because
1529     // |yuv_si_format| is LUMINANCE_F16, R16_EXT, LUMINANCE_8 or RED_8.
1530     if (CanUseRasterInterface()) {
1531       auto* ri = RasterInterface();
1532       auto color_type = viz::ToClosestSkColorType(
1533           /*gpu_compositing=*/true, plane_si_format, /*plane_index=*/0);
1534       SkImageInfo info = SkImageInfo::Make(resource_size_pixels.width(),
1535                                            resource_size_pixels.height(),
1536                                            color_type, kPremul_SkAlphaType);
1537       SkPixmap pixmap = SkPixmap(info, pixels, pixels_stride_in_bytes);
1538       ri->WritePixels(plane_resource->mailbox(), /*dst_x_offset=*/0,
1539                       /*dst_y_offset=*/0, /*dst_plane_index=*/0,
1540                       plane_resource->texture_target(), pixmap);
1541     } else {
1542       auto* gl = ContextGL();
1543       HardwarePlaneResource::ScopedTexture scope(gl, plane_resource);
1544
1545       gl->BindTexture(plane_resource->texture_target(), scope.texture_id());
1546
1547       gl->PixelStorei(GL_UNPACK_ROW_LENGTH, unpack_row_length);
1548       gl->PixelStorei(GL_UNPACK_ALIGNMENT, unpack_alignment);
1549       gl->TexSubImage2D(
1550           plane_resource->texture_target(), /*level=*/0, /*xoffset=*/0,
1551           /*yoffset=*/0, resource_size_pixels.width(),
1552           resource_size_pixels.height(),
1553           viz::SharedImageFormatRestrictedSinglePlaneUtils::ToGLDataFormat(
1554               plane_si_format),
1555           viz::SharedImageFormatRestrictedSinglePlaneUtils::ToGLDataType(
1556               plane_si_format),
1557           pixels);
1558       gl->PixelStorei(GL_UNPACK_ROW_LENGTH, kDefaultUnpackRowLength);
1559       gl->PixelStorei(GL_UNPACK_ALIGNMENT, kDefaultUnpackAlignment);
1560     }
1561
1562     plane_resource->SetUniqueId(video_frame->unique_id(), i);
1563   }
1564
1565   // Set the sync token otherwise resource is assumed to be synchronized.
1566   gpu::SyncToken sync_token;
1567   InterfaceBase()->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
1568
1569   for (size_t i = 0; i < plane_resources.size(); ++i) {
1570     HardwarePlaneResource* plane_resource = plane_resources[i]->AsHardware();
1571     auto transferable_resource = viz::TransferableResource::MakeGpu(
1572         plane_resource->mailbox(), plane_resource->texture_target(), sync_token,
1573         plane_resource->resource_size(),
1574         i == 0 ? output_si_format
1575                : subplane_si_format.value_or(output_si_format),
1576         plane_resource->overlay_candidate(),
1577         viz::TransferableResource::ResourceSource::kVideo);
1578     transferable_resource.color_space = output_color_space;
1579     external_resources.resources.push_back(std::move(transferable_resource));
1580     external_resources.release_callbacks.push_back(base::BindOnce(
1581         &VideoResourceUpdater::RecycleResource, weak_ptr_factory_.GetWeakPtr(),
1582         plane_resource->plane_resource_id()));
1583   }
1584
1585   external_resources.type = VideoFrameResourceType::YUV;
1586   return external_resources;
1587 }
1588
1589 gpu::gles2::GLES2Interface* VideoResourceUpdater::ContextGL() {
1590   // This is last usage of ContextGL() in RasterContextProvider. Delete function
1591   // and friend entry from RasterContextProvider if removing this.
1592   auto* gl = context_provider_->ContextGL();
1593   DCHECK(gl);
1594   return gl;
1595 }
1596
1597 gpu::raster::RasterInterface* VideoResourceUpdater::RasterInterface() {
1598   auto* ri = context_provider_->RasterInterface();
1599   CHECK(ri);
1600   return ri;
1601 }
1602
1603 gpu::InterfaceBase* VideoResourceUpdater::InterfaceBase() {
1604   return CanUseRasterInterface()
1605              ? static_cast<gpu::InterfaceBase*>(RasterInterface())
1606              : static_cast<gpu::InterfaceBase*>(ContextGL());
1607 }
1608
1609 void VideoResourceUpdater::ReturnTexture(
1610     scoped_refptr<VideoFrame> video_frame,
1611     const gpu::SyncToken& original_release_token,
1612     const gpu::SyncToken& new_release_token,
1613     bool lost_resource) {
1614   // Note: This method is called for each plane texture in the frame! Which
1615   // means it may end up receiving the same `new_release_token` multiple times.
1616
1617   if (lost_resource) {
1618     return;
1619   }
1620
1621   if (!new_release_token.HasData()) {
1622     return;
1623   }
1624
1625   ResourceSyncTokenClient client(InterfaceBase(), original_release_token,
1626                                  new_release_token);
1627   video_frame->UpdateReleaseSyncToken(&client);
1628 }
1629
1630 void VideoResourceUpdater::RecycleResource(uint32_t plane_resource_id,
1631                                            const gpu::SyncToken& sync_token,
1632                                            bool lost_resource) {
1633   auto resource_it = base::ranges::find(all_resources_, plane_resource_id,
1634                                         &PlaneResource::plane_resource_id);
1635   if (resource_it == all_resources_.end())
1636     return;
1637
1638   if (context_provider_ && sync_token.HasData()) {
1639     InterfaceBase()->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
1640   }
1641
1642   if (lost_resource) {
1643     all_resources_.erase(resource_it);
1644   } else {
1645     (*resource_it)->remove_ref();
1646   }
1647 }
1648
1649 bool VideoResourceUpdater::OnMemoryDump(
1650     const base::trace_event::MemoryDumpArgs& args,
1651     base::trace_event::ProcessMemoryDump* pmd) {
1652   for (auto& resource : all_resources_) {
1653     std::string dump_name =
1654         base::StringPrintf("cc/video_memory/updater_%d/resource_%d",
1655                            tracing_id_, resource->plane_resource_id());
1656     base::trace_event::MemoryAllocatorDump* dump =
1657         pmd->CreateAllocatorDump(dump_name);
1658
1659     const uint64_t total_bytes =
1660         resource->si_format().EstimatedSizeInBytes(resource->resource_size());
1661     dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
1662                     base::trace_event::MemoryAllocatorDump::kUnitsBytes,
1663                     total_bytes);
1664
1665     // The importance value assigned to the GUID here must be greater than the
1666     // importance value assigned elsewhere so that resource ownership is
1667     // attributed to VideoResourceUpdater.
1668     constexpr int kImportance = 2;
1669
1670     // Resources are shared across processes and require a shared GUID to
1671     // prevent double counting the memory.
1672     if (software_compositor()) {
1673       base::UnguessableToken shm_guid =
1674           resource->AsSoftware()->GetSharedMemoryGuid();
1675       pmd->CreateSharedMemoryOwnershipEdge(dump->guid(), shm_guid, kImportance);
1676     } else {
1677       base::trace_event::MemoryAllocatorDumpGuid guid =
1678           gpu::GetSharedImageGUIDForTracing(resource->AsHardware()->mailbox());
1679       pmd->CreateSharedGlobalAllocatorDump(guid);
1680       pmd->AddOwnershipEdge(dump->guid(), guid, kImportance);
1681     }
1682   }
1683
1684   return true;
1685 }
1686
1687 VideoResourceUpdater::FrameResource::FrameResource() = default;
1688
1689 VideoResourceUpdater::FrameResource::FrameResource(viz::ResourceId id,
1690                                                    const gfx::Size& size)
1691     : id(id), size_in_pixels(size) {}
1692
1693 }  // namespace media