Upload upstream chromium 108.0.5359.1
[platform/framework/web/chromium-efl.git] / media / filters / vpx_video_decoder.cc
1 // Copyright 2012 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/filters/vpx_video_decoder.h"
6
7 #include <stddef.h>
8 #include <stdint.h>
9
10 #include <algorithm>
11 #include <string>
12 #include <vector>
13
14 #include "base/bind.h"
15 #include "base/callback_helpers.h"
16 #include "base/feature_list.h"
17 #include "base/location.h"
18 #include "base/logging.h"
19 #include "base/metrics/histogram_macros.h"
20 #include "base/sys_byteorder.h"
21 #include "base/trace_event/trace_event.h"
22 #include "media/base/bind_to_current_loop.h"
23 #include "media/base/decoder_buffer.h"
24 #include "media/base/limits.h"
25 #include "media/base/media_switches.h"
26 #include "media/base/video_aspect_ratio.h"
27 #include "media/filters/frame_buffer_pool.h"
28 #include "third_party/libvpx/source/libvpx/vpx/vp8dx.h"
29 #include "third_party/libvpx/source/libvpx/vpx/vpx_decoder.h"
30 #include "third_party/libvpx/source/libvpx/vpx/vpx_frame_buffer.h"
31
32 #include "third_party/libyuv/include/libyuv/convert.h"
33 #include "third_party/libyuv/include/libyuv/planar_functions.h"
34
35 namespace media {
36
37 // Returns the number of threads.
38 static int GetVpxVideoDecoderThreadCount(const VideoDecoderConfig& config) {
39   // vp8a doesn't really need more threads.
40   int desired_threads = limits::kMinVideoDecodeThreads;
41
42   // For VP9 decoding increase the number of decode threads to equal the
43   // maximum number of tiles possible for higher resolution streams.
44   if (config.codec() == VideoCodec::kVP9) {
45     const int width = config.coded_size().width();
46     if (width >= 3840)
47       desired_threads = 16;
48     else if (width >= 2560)
49       desired_threads = 8;
50     else if (width >= 1280)
51       desired_threads = 4;
52   }
53
54   return VideoDecoder::GetRecommendedThreadCount(desired_threads);
55 }
56
57 static std::unique_ptr<vpx_codec_ctx> InitializeVpxContext(
58     const VideoDecoderConfig& config) {
59   auto context = std::make_unique<vpx_codec_ctx>();
60   vpx_codec_dec_cfg_t vpx_config = {0};
61   vpx_config.w = config.coded_size().width();
62   vpx_config.h = config.coded_size().height();
63   vpx_config.threads = GetVpxVideoDecoderThreadCount(config);
64
65   vpx_codec_err_t status = vpx_codec_dec_init(context.get(),
66                                               config.codec() == VideoCodec::kVP9
67                                                   ? vpx_codec_vp9_dx()
68                                                   : vpx_codec_vp8_dx(),
69                                               &vpx_config, 0 /* flags */);
70   if (status == VPX_CODEC_OK)
71     return context;
72
73   DLOG(ERROR) << "vpx_codec_dec_init() failed: "
74               << vpx_codec_error(context.get());
75   return nullptr;
76 }
77
78 static int32_t GetVP9FrameBuffer(void* user_priv,
79                                  size_t min_size,
80                                  vpx_codec_frame_buffer* fb) {
81   DCHECK(user_priv);
82   DCHECK(fb);
83   FrameBufferPool* pool = static_cast<FrameBufferPool*>(user_priv);
84   fb->data = pool->GetFrameBuffer(min_size, &fb->priv);
85   fb->size = min_size;
86   return fb->data ? 0 : VPX_CODEC_MEM_ERROR;
87 }
88
89 static int32_t ReleaseVP9FrameBuffer(void* user_priv,
90                                      vpx_codec_frame_buffer* fb) {
91   DCHECK(user_priv);
92   DCHECK(fb);
93   if (!fb->priv)
94     return -1;
95
96   FrameBufferPool* pool = static_cast<FrameBufferPool*>(user_priv);
97   pool->ReleaseFrameBuffer(fb->priv);
98   return 0;
99 }
100
101 // static
102 SupportedVideoDecoderConfigs VpxVideoDecoder::SupportedConfigs() {
103   SupportedVideoDecoderConfigs supported_configs;
104   supported_configs.emplace_back(/*profile_min=*/VP8PROFILE_ANY,
105                                  /*profile_max=*/VP8PROFILE_ANY,
106                                  /*coded_size_min=*/kDefaultSwDecodeSizeMin,
107                                  /*coded_size_max=*/kDefaultSwDecodeSizeMax,
108                                  /*allow_encrypted=*/false,
109                                  /*require_encrypted=*/false);
110
111   supported_configs.emplace_back(/*profile_min=*/VP9PROFILE_PROFILE0,
112                                  /*profile_max=*/VP9PROFILE_PROFILE2,
113                                  /*coded_size_min=*/kDefaultSwDecodeSizeMin,
114                                  /*coded_size_max=*/kDefaultSwDecodeSizeMax,
115                                  /*allow_encrypted=*/false,
116                                  /*require_encrypted=*/false);
117   return supported_configs;
118 }
119
120 VpxVideoDecoder::VpxVideoDecoder(OffloadState offload_state)
121     : bind_callbacks_(offload_state == OffloadState::kNormal) {
122   DETACH_FROM_SEQUENCE(sequence_checker_);
123 }
124
125 VpxVideoDecoder::~VpxVideoDecoder() {
126   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
127   CloseDecoder();
128 }
129
130 VideoDecoderType VpxVideoDecoder::GetDecoderType() const {
131   return VideoDecoderType::kVpx;
132 }
133
134 void VpxVideoDecoder::Initialize(const VideoDecoderConfig& config,
135                                  bool /* low_delay */,
136                                  CdmContext* /* cdm_context */,
137                                  InitCB init_cb,
138                                  const OutputCB& output_cb,
139                                  const WaitingCB& /* waiting_cb */) {
140   DVLOG(1) << __func__ << ": " << config.AsHumanReadableString();
141   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
142   DCHECK(config.IsValidConfig());
143
144   CloseDecoder();
145
146   InitCB bound_init_cb = bind_callbacks_ ? BindToCurrentLoop(std::move(init_cb))
147                                          : std::move(init_cb);
148   if (config.is_encrypted()) {
149     std::move(bound_init_cb)
150         .Run(DecoderStatus::Codes::kUnsupportedEncryptionMode);
151     return;
152   }
153
154   if (!ConfigureDecoder(config)) {
155     std::move(bound_init_cb).Run(DecoderStatus::Codes::kUnsupportedConfig);
156     return;
157   }
158
159   // Success!
160   config_ = config;
161   state_ = DecoderState::kNormal;
162   output_cb_ = output_cb;
163   std::move(bound_init_cb).Run(DecoderStatus::Codes::kOk);
164 }
165
166 void VpxVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
167                              DecodeCB decode_cb) {
168   DVLOG(3) << __func__ << ": " << buffer->AsHumanReadableString();
169   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
170   DCHECK(buffer);
171   DCHECK(decode_cb);
172   DCHECK_NE(state_, DecoderState::kUninitialized)
173       << "Called Decode() before successful Initialize()";
174
175   DecodeCB bound_decode_cb = bind_callbacks_
176                                  ? BindToCurrentLoop(std::move(decode_cb))
177                                  : std::move(decode_cb);
178
179   if (state_ == DecoderState::kError) {
180     std::move(bound_decode_cb).Run(DecoderStatus::Codes::kFailed);
181     return;
182   }
183
184   if (state_ == DecoderState::kDecodeFinished) {
185     std::move(bound_decode_cb).Run(DecoderStatus::Codes::kOk);
186     return;
187   }
188
189   if (state_ == DecoderState::kNormal && buffer->end_of_stream()) {
190     state_ = DecoderState::kDecodeFinished;
191     std::move(bound_decode_cb).Run(DecoderStatus::Codes::kOk);
192     return;
193   }
194
195   scoped_refptr<VideoFrame> video_frame;
196   if (!VpxDecode(buffer.get(), &video_frame)) {
197     state_ = DecoderState::kError;
198     std::move(bound_decode_cb).Run(DecoderStatus::Codes::kFailed);
199     return;
200   }
201
202   // We might get a successful VpxDecode but not a frame if only a partial
203   // decode happened.
204   if (video_frame) {
205     video_frame->metadata().power_efficient = false;
206     output_cb_.Run(video_frame);
207   }
208
209   // VideoDecoderShim expects |decode_cb| call after |output_cb_|.
210   std::move(bound_decode_cb).Run(DecoderStatus::Codes::kOk);
211 }
212
213 void VpxVideoDecoder::Reset(base::OnceClosure reset_cb) {
214   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
215   state_ = DecoderState::kNormal;
216
217   if (bind_callbacks_)
218     BindToCurrentLoop(std::move(reset_cb)).Run();
219   else
220     std::move(reset_cb).Run();
221
222   // Allow Initialize() to be called on another thread now.
223   DETACH_FROM_SEQUENCE(sequence_checker_);
224 }
225
226 bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) {
227   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
228   if (config.codec() != VideoCodec::kVP8 && config.codec() != VideoCodec::kVP9)
229     return false;
230
231 #if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
232   // When enabled, ffmpeg handles VP8 that doesn't have alpha, and
233   // VpxVideoDecoder will handle VP8 with alpha. FFvp8 is being deprecated.
234   // See http://crbug.com/992235.
235   if (base::FeatureList::IsEnabled(kFFmpegDecodeOpaqueVP8) &&
236       config.codec() == VideoCodec::kVP8 &&
237       config.alpha_mode() == VideoDecoderConfig::AlphaMode::kIsOpaque) {
238     return false;
239   }
240 #endif
241
242   DCHECK(!vpx_codec_);
243   vpx_codec_ = InitializeVpxContext(config);
244   if (!vpx_codec_)
245     return false;
246
247   // Configure VP9 to decode on our buffers to skip a data copy on
248   // decoding. For YV12A-VP9, we use our buffers for the Y, U and V planes and
249   // copy the A plane.
250   if (config.codec() == VideoCodec::kVP9) {
251     DCHECK(vpx_codec_get_caps(vpx_codec_->iface) &
252            VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER);
253
254     DCHECK(!memory_pool_);
255     memory_pool_ = new FrameBufferPool();
256
257     if (vpx_codec_set_frame_buffer_functions(
258             vpx_codec_.get(), &GetVP9FrameBuffer, &ReleaseVP9FrameBuffer,
259             memory_pool_.get())) {
260       DLOG(ERROR) << "Failed to configure external buffers. "
261                   << vpx_codec_error(vpx_codec_.get());
262       return false;
263     }
264
265     vpx_codec_err_t status =
266         vpx_codec_control(vpx_codec_.get(), VP9D_SET_LOOP_FILTER_OPT, 1);
267     if (status != VPX_CODEC_OK) {
268       DLOG(ERROR) << "Failed to enable VP9D_SET_LOOP_FILTER_OPT. "
269                   << vpx_codec_error(vpx_codec_.get());
270       return false;
271     }
272   }
273
274   if (config.alpha_mode() == VideoDecoderConfig::AlphaMode::kIsOpaque)
275     return true;
276
277   DCHECK(!vpx_codec_alpha_);
278   vpx_codec_alpha_ = InitializeVpxContext(config);
279   return !!vpx_codec_alpha_;
280 }
281
282 void VpxVideoDecoder::CloseDecoder() {
283   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
284
285   // Note: The vpx_codec_destroy() calls below don't release the memory
286   // allocated for vpx_codec_ctx, they just release internal allocations, so we
287   // still need std::unique_ptr to release the structure memory.
288   if (vpx_codec_)
289     vpx_codec_destroy(vpx_codec_.get());
290
291   if (vpx_codec_alpha_)
292     vpx_codec_destroy(vpx_codec_alpha_.get());
293
294   vpx_codec_.reset();
295   vpx_codec_alpha_.reset();
296
297   if (memory_pool_) {
298     memory_pool_->Shutdown();
299     memory_pool_ = nullptr;
300   }
301 }
302
303 void VpxVideoDecoder::Detach() {
304   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
305   DCHECK(!bind_callbacks_);
306
307   CloseDecoder();
308   DETACH_FROM_SEQUENCE(sequence_checker_);
309 }
310
311 bool VpxVideoDecoder::VpxDecode(const DecoderBuffer* buffer,
312                                 scoped_refptr<VideoFrame>* video_frame) {
313   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
314   DCHECK(video_frame);
315   DCHECK(!buffer->end_of_stream());
316
317   {
318     TRACE_EVENT1("media", "vpx_codec_decode", "buffer",
319                  buffer->AsHumanReadableString());
320     vpx_codec_err_t status =
321         vpx_codec_decode(vpx_codec_.get(), buffer->data(), buffer->data_size(),
322                          nullptr /* user_priv */, 0 /* deadline */);
323     if (status != VPX_CODEC_OK) {
324       DLOG(ERROR) << "vpx_codec_decode() error: "
325                   << vpx_codec_err_to_string(status);
326       return false;
327     }
328   }
329
330   // Gets pointer to decoded data.
331   vpx_codec_iter_t iter = NULL;
332   const vpx_image_t* vpx_image = vpx_codec_get_frame(vpx_codec_.get(), &iter);
333   if (!vpx_image) {
334     *video_frame = nullptr;
335     return true;
336   }
337
338   const vpx_image_t* vpx_image_alpha = nullptr;
339   const auto alpha_decode_status =
340       DecodeAlphaPlane(vpx_image, &vpx_image_alpha, buffer);
341   if (alpha_decode_status == kAlphaPlaneError) {
342     return false;
343   } else if (alpha_decode_status == kNoAlphaPlaneData) {
344     *video_frame = nullptr;
345     return true;
346   }
347
348   if (!CopyVpxImageToVideoFrame(vpx_image, vpx_image_alpha, video_frame))
349     return false;
350
351   if (vpx_image_alpha && config_.codec() == VideoCodec::kVP8) {
352     libyuv::CopyPlane(
353         vpx_image_alpha->planes[VPX_PLANE_Y],
354         vpx_image_alpha->stride[VPX_PLANE_Y],
355         (*video_frame)->GetWritableVisibleData(VideoFrame::kAPlane),
356         (*video_frame)->stride(VideoFrame::kAPlane),
357         (*video_frame)->visible_rect().width(),
358         (*video_frame)->visible_rect().height());
359   }
360
361   (*video_frame)->set_timestamp(buffer->timestamp());
362   (*video_frame)->set_hdr_metadata(config_.hdr_metadata());
363
364   // Prefer the color space from the config if available. It generally comes
365   // from the color tag which is more expressive than the vp8 and vp9 bitstream.
366   if (config_.color_space_info().IsSpecified()) {
367     (*video_frame)
368         ->set_color_space(config_.color_space_info().ToGfxColorSpace());
369     return true;
370   }
371
372   auto primaries = gfx::ColorSpace::PrimaryID::INVALID;
373   auto transfer = gfx::ColorSpace::TransferID::INVALID;
374   auto matrix = gfx::ColorSpace::MatrixID::INVALID;
375   auto range = vpx_image->range == VPX_CR_FULL_RANGE
376                    ? gfx::ColorSpace::RangeID::FULL
377                    : gfx::ColorSpace::RangeID::LIMITED;
378
379   switch (vpx_image->cs) {
380     case VPX_CS_BT_601:
381     case VPX_CS_SMPTE_170:
382       primaries = gfx::ColorSpace::PrimaryID::SMPTE170M;
383       transfer = gfx::ColorSpace::TransferID::SMPTE170M;
384       matrix = gfx::ColorSpace::MatrixID::SMPTE170M;
385       break;
386     case VPX_CS_SMPTE_240:
387       primaries = gfx::ColorSpace::PrimaryID::SMPTE240M;
388       transfer = gfx::ColorSpace::TransferID::SMPTE240M;
389       matrix = gfx::ColorSpace::MatrixID::SMPTE240M;
390       break;
391     case VPX_CS_BT_709:
392       primaries = gfx::ColorSpace::PrimaryID::BT709;
393       transfer = gfx::ColorSpace::TransferID::BT709;
394       matrix = gfx::ColorSpace::MatrixID::BT709;
395       break;
396     case VPX_CS_BT_2020:
397       primaries = gfx::ColorSpace::PrimaryID::BT2020;
398       if (vpx_image->bit_depth >= 12)
399         transfer = gfx::ColorSpace::TransferID::BT2020_12;
400       else if (vpx_image->bit_depth >= 10)
401         transfer = gfx::ColorSpace::TransferID::BT2020_10;
402       else
403         transfer = gfx::ColorSpace::TransferID::BT709;
404       matrix = gfx::ColorSpace::MatrixID::BT2020_NCL;  // is this right?
405       break;
406     case VPX_CS_SRGB:
407       primaries = gfx::ColorSpace::PrimaryID::BT709;
408       transfer = gfx::ColorSpace::TransferID::SRGB;
409       matrix = gfx::ColorSpace::MatrixID::GBR;
410       break;
411     default:
412       break;
413   }
414
415   // TODO(ccameron): Set a color space even for unspecified values.
416   if (primaries != gfx::ColorSpace::PrimaryID::INVALID) {
417     (*video_frame)
418         ->set_color_space(gfx::ColorSpace(primaries, transfer, matrix, range));
419   }
420
421   return true;
422 }
423
424 VpxVideoDecoder::AlphaDecodeStatus VpxVideoDecoder::DecodeAlphaPlane(
425     const struct vpx_image* vpx_image,
426     const struct vpx_image** vpx_image_alpha,
427     const DecoderBuffer* buffer) {
428   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
429   if (!vpx_codec_alpha_ || buffer->side_data_size() < 8) {
430     return kAlphaPlaneProcessed;
431   }
432
433   // First 8 bytes of side data is |side_data_id| in big endian.
434   const uint64_t side_data_id = base::NetToHost64(
435       *(reinterpret_cast<const uint64_t*>(buffer->side_data())));
436   if (side_data_id != 1) {
437     return kAlphaPlaneProcessed;
438   }
439
440   // Try and decode buffer->side_data() minus the first 8 bytes as a full
441   // frame.
442   {
443     TRACE_EVENT1("media", "vpx_codec_decode_alpha", "buffer",
444                  buffer->AsHumanReadableString());
445     vpx_codec_err_t status =
446         vpx_codec_decode(vpx_codec_alpha_.get(), buffer->side_data() + 8,
447                          buffer->side_data_size() - 8, nullptr /* user_priv */,
448                          0 /* deadline */);
449     if (status != VPX_CODEC_OK) {
450       DLOG(ERROR) << "vpx_codec_decode() failed for the alpha: "
451                   << vpx_codec_error(vpx_codec_.get());
452       return kAlphaPlaneError;
453     }
454   }
455
456   vpx_codec_iter_t iter_alpha = NULL;
457   *vpx_image_alpha = vpx_codec_get_frame(vpx_codec_alpha_.get(), &iter_alpha);
458   if (!(*vpx_image_alpha)) {
459     return kNoAlphaPlaneData;
460   }
461
462   if ((*vpx_image_alpha)->d_h != vpx_image->d_h ||
463       (*vpx_image_alpha)->d_w != vpx_image->d_w) {
464     DLOG(ERROR) << "The alpha plane dimensions are not the same as the "
465                    "image dimensions.";
466     return kAlphaPlaneError;
467   }
468
469   return kAlphaPlaneProcessed;
470 }
471
472 bool VpxVideoDecoder::CopyVpxImageToVideoFrame(
473     const struct vpx_image* vpx_image,
474     const struct vpx_image* vpx_image_alpha,
475     scoped_refptr<VideoFrame>* video_frame) {
476   DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
477   DCHECK(vpx_image);
478
479   VideoPixelFormat codec_format;
480   switch (vpx_image->fmt) {
481     case VPX_IMG_FMT_I420:
482       codec_format = vpx_image_alpha ? PIXEL_FORMAT_I420A : PIXEL_FORMAT_I420;
483       break;
484
485     case VPX_IMG_FMT_I422:
486       codec_format = PIXEL_FORMAT_I422;
487       break;
488
489     case VPX_IMG_FMT_I444:
490       codec_format = PIXEL_FORMAT_I444;
491       break;
492
493     case VPX_IMG_FMT_I42016:
494       switch (vpx_image->bit_depth) {
495         case 10:
496           codec_format = PIXEL_FORMAT_YUV420P10;
497           break;
498         case 12:
499           codec_format = PIXEL_FORMAT_YUV420P12;
500           break;
501         default:
502           DLOG(ERROR) << "Unsupported bit depth: " << vpx_image->bit_depth;
503           return false;
504       }
505       break;
506
507     case VPX_IMG_FMT_I42216:
508       switch (vpx_image->bit_depth) {
509         case 10:
510           codec_format = PIXEL_FORMAT_YUV422P10;
511           break;
512         case 12:
513           codec_format = PIXEL_FORMAT_YUV422P12;
514           break;
515         default:
516           DLOG(ERROR) << "Unsupported bit depth: " << vpx_image->bit_depth;
517           return false;
518       }
519       break;
520
521     case VPX_IMG_FMT_I44416:
522       switch (vpx_image->bit_depth) {
523         case 10:
524           codec_format = PIXEL_FORMAT_YUV444P10;
525           break;
526         case 12:
527           codec_format = PIXEL_FORMAT_YUV444P12;
528           break;
529         default:
530           DLOG(ERROR) << "Unsupported bit depth: " << vpx_image->bit_depth;
531           return false;
532       }
533       break;
534
535     default:
536       DLOG(ERROR) << "Unsupported pixel format: " << vpx_image->fmt;
537       return false;
538   }
539
540   // The mixed |w|/|d_h| in |coded_size| is intentional. Setting the correct
541   // coded width is necessary to allow coalesced memory access, which may avoid
542   // frame copies. Setting the correct coded height however does not have any
543   // benefit, and only risk copying too much data.
544   const gfx::Size coded_size(vpx_image->w, vpx_image->d_h);
545   const gfx::Size visible_size(vpx_image->d_w, vpx_image->d_h);
546   // Compute natural size by scaling visible size by *pixel* aspect ratio. Note
547   // that we could instead use vpx_image r_w and r_h, but doing so would allow
548   // pixel aspect ratio to change on a per-frame basis which would make
549   // vpx_video_decoder inconsistent with decoders where changes to
550   // pixel aspect ratio are not surfaced (e.g. Android MediaCodec).
551   const gfx::Size natural_size =
552       config_.aspect_ratio().GetNaturalSize(gfx::Rect(visible_size));
553
554   if (memory_pool_) {
555     DCHECK_EQ(VideoCodec::kVP9, config_.codec());
556     if (vpx_image_alpha) {
557       size_t alpha_plane_size =
558           vpx_image_alpha->stride[VPX_PLANE_Y] * vpx_image_alpha->d_h;
559       uint8_t* alpha_plane = memory_pool_->AllocateAlphaPlaneForFrameBuffer(
560           alpha_plane_size, vpx_image->fb_priv);
561       if (!alpha_plane)  // In case of OOM, abort copy.
562         return false;
563       libyuv::CopyPlane(vpx_image_alpha->planes[VPX_PLANE_Y],
564                         vpx_image_alpha->stride[VPX_PLANE_Y], alpha_plane,
565                         vpx_image_alpha->stride[VPX_PLANE_Y],
566                         vpx_image_alpha->d_w, vpx_image_alpha->d_h);
567       *video_frame = VideoFrame::WrapExternalYuvaData(
568           codec_format, coded_size, gfx::Rect(visible_size), natural_size,
569           vpx_image->stride[VPX_PLANE_Y], vpx_image->stride[VPX_PLANE_U],
570           vpx_image->stride[VPX_PLANE_V], vpx_image_alpha->stride[VPX_PLANE_Y],
571           vpx_image->planes[VPX_PLANE_Y], vpx_image->planes[VPX_PLANE_U],
572           vpx_image->planes[VPX_PLANE_V], alpha_plane, kNoTimestamp);
573     } else {
574       *video_frame = VideoFrame::WrapExternalYuvData(
575           codec_format, coded_size, gfx::Rect(visible_size), natural_size,
576           vpx_image->stride[VPX_PLANE_Y], vpx_image->stride[VPX_PLANE_U],
577           vpx_image->stride[VPX_PLANE_V], vpx_image->planes[VPX_PLANE_Y],
578           vpx_image->planes[VPX_PLANE_U], vpx_image->planes[VPX_PLANE_V],
579           kNoTimestamp);
580     }
581     if (!(*video_frame))
582       return false;
583
584     video_frame->get()->AddDestructionObserver(
585         memory_pool_->CreateFrameCallback(vpx_image->fb_priv));
586     return true;
587   }
588
589   *video_frame = frame_pool_.CreateFrame(codec_format, visible_size,
590                                          gfx::Rect(visible_size), natural_size,
591                                          kNoTimestamp);
592   if (!(*video_frame))
593     return false;
594
595   for (int plane = 0; plane < 3; plane++) {
596     libyuv::CopyPlane(vpx_image->planes[plane], vpx_image->stride[plane],
597                       (*video_frame)->GetWritableVisibleData(plane),
598                       (*video_frame)->stride(plane),
599                       (*video_frame)->row_bytes(plane),
600                       (*video_frame)->rows(plane));
601   }
602
603   return true;
604 }
605
606 }  // namespace media