1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/base/video_frame.h"
10 #include "base/callback_helpers.h"
11 #include "base/logging.h"
12 #include "base/memory/aligned_memory.h"
13 #include "base/strings/string_piece.h"
14 #include "media/base/limits.h"
15 #include "media/base/video_util.h"
16 #include "third_party/skia/include/core/SkBitmap.h"
21 scoped_refptr<VideoFrame> VideoFrame::CreateFrame(
22 VideoFrame::Format format,
23 const gfx::Size& coded_size,
24 const gfx::Rect& visible_rect,
25 const gfx::Size& natural_size,
26 base::TimeDelta timestamp) {
27 DCHECK(IsValidConfig(format, coded_size, visible_rect, natural_size));
28 scoped_refptr<VideoFrame> frame(new VideoFrame(
29 format, coded_size, visible_rect, natural_size, timestamp));
31 case VideoFrame::RGB32:
32 frame->AllocateRGB(4u);
34 case VideoFrame::YV12:
35 case VideoFrame::YV12A:
36 case VideoFrame::YV16:
37 case VideoFrame::I420:
41 LOG(FATAL) << "Unsupported frame format: " << format;
47 std::string VideoFrame::FormatToString(VideoFrame::Format format) {
49 case VideoFrame::UNKNOWN:
51 case VideoFrame::RGB32:
53 case VideoFrame::YV12:
55 case VideoFrame::YV16:
57 case VideoFrame::EMPTY:
59 case VideoFrame::I420:
61 case VideoFrame::NATIVE_TEXTURE:
62 return "NATIVE_TEXTURE";
63 #if defined(GOOGLE_TV)
64 case VideoFrame::HOLE:
67 case VideoFrame::YV12A:
70 NOTREACHED() << "Invalid videoframe format provided: " << format;
75 bool VideoFrame::IsValidConfig(VideoFrame::Format format,
76 const gfx::Size& coded_size,
77 const gfx::Rect& visible_rect,
78 const gfx::Size& natural_size) {
79 return (format != VideoFrame::UNKNOWN &&
80 !coded_size.IsEmpty() &&
81 coded_size.GetArea() <= limits::kMaxCanvas &&
82 coded_size.width() <= limits::kMaxDimension &&
83 coded_size.height() <= limits::kMaxDimension &&
84 !visible_rect.IsEmpty() &&
85 visible_rect.x() >= 0 && visible_rect.y() >= 0 &&
86 visible_rect.right() <= coded_size.width() &&
87 visible_rect.bottom() <= coded_size.height() &&
88 !natural_size.IsEmpty() &&
89 natural_size.GetArea() <= limits::kMaxCanvas &&
90 natural_size.width() <= limits::kMaxDimension &&
91 natural_size.height() <= limits::kMaxDimension);
95 scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture(
96 const scoped_refptr<MailboxHolder>& mailbox_holder,
97 uint32 texture_target,
98 const gfx::Size& coded_size,
99 const gfx::Rect& visible_rect,
100 const gfx::Size& natural_size,
101 base::TimeDelta timestamp,
102 const ReadPixelsCB& read_pixels_cb,
103 const base::Closure& no_longer_needed_cb) {
104 scoped_refptr<VideoFrame> frame(new VideoFrame(
105 NATIVE_TEXTURE, coded_size, visible_rect, natural_size, timestamp));
106 frame->texture_mailbox_holder_ = mailbox_holder;
107 frame->texture_target_ = texture_target;
108 frame->read_pixels_cb_ = read_pixels_cb;
109 frame->no_longer_needed_cb_ = no_longer_needed_cb;
114 void VideoFrame::ReadPixelsFromNativeTexture(const SkBitmap& pixels) {
115 DCHECK_EQ(format_, NATIVE_TEXTURE);
116 if (!read_pixels_cb_.is_null())
117 read_pixels_cb_.Run(pixels);
121 scoped_refptr<VideoFrame> VideoFrame::WrapExternalSharedMemory(
123 const gfx::Size& coded_size,
124 const gfx::Rect& visible_rect,
125 const gfx::Size& natural_size,
128 base::SharedMemoryHandle handle,
129 base::TimeDelta timestamp,
130 const base::Closure& no_longer_needed_cb) {
131 if (data_size < AllocationSize(format, coded_size))
136 scoped_refptr<VideoFrame> frame(new VideoFrame(
137 format, coded_size, visible_rect, natural_size, timestamp));
138 frame->shared_memory_handle_ = handle;
139 frame->strides_[kYPlane] = coded_size.width();
140 frame->strides_[kUPlane] = coded_size.width() / 2;
141 frame->strides_[kVPlane] = coded_size.width() / 2;
142 frame->data_[kYPlane] = data;
143 frame->data_[kUPlane] = data + coded_size.GetArea();
144 frame->data_[kVPlane] = data + (coded_size.GetArea() * 5 / 4);
145 frame->no_longer_needed_cb_ = no_longer_needed_cb;
155 scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData(
157 const gfx::Size& coded_size,
158 const gfx::Rect& visible_rect,
159 const gfx::Size& natural_size,
166 base::TimeDelta timestamp,
167 const base::Closure& no_longer_needed_cb) {
168 DCHECK(format == YV12 || format == YV16 || format == I420) << format;
169 scoped_refptr<VideoFrame> frame(new VideoFrame(
170 format, coded_size, visible_rect, natural_size, timestamp));
171 frame->strides_[kYPlane] = y_stride;
172 frame->strides_[kUPlane] = u_stride;
173 frame->strides_[kVPlane] = v_stride;
174 frame->data_[kYPlane] = y_data;
175 frame->data_[kUPlane] = u_data;
176 frame->data_[kVPlane] = v_data;
177 frame->no_longer_needed_cb_ = no_longer_needed_cb;
182 scoped_refptr<VideoFrame> VideoFrame::CreateEmptyFrame() {
183 return new VideoFrame(
184 VideoFrame::EMPTY, gfx::Size(), gfx::Rect(), gfx::Size(),
189 scoped_refptr<VideoFrame> VideoFrame::CreateColorFrame(
190 const gfx::Size& size,
191 uint8 y, uint8 u, uint8 v,
192 base::TimeDelta timestamp) {
193 DCHECK(IsValidConfig(VideoFrame::YV12, size, gfx::Rect(size), size));
194 scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
195 VideoFrame::YV12, size, gfx::Rect(size), size, timestamp);
196 FillYUV(frame.get(), y, u, v);
201 scoped_refptr<VideoFrame> VideoFrame::CreateBlackFrame(const gfx::Size& size) {
202 const uint8 kBlackY = 0x00;
203 const uint8 kBlackUV = 0x80;
204 const base::TimeDelta kZero;
205 return CreateColorFrame(size, kBlackY, kBlackUV, kBlackUV, kZero);
208 #if defined(GOOGLE_TV)
209 // This block and other blocks wrapped around #if defined(GOOGLE_TV) is not
210 // maintained by the general compositor team. Please contact the following
213 // wonsik@chromium.org
214 // ycheo@chromium.org
217 scoped_refptr<VideoFrame> VideoFrame::CreateHoleFrame(
218 const gfx::Size& size) {
219 DCHECK(IsValidConfig(VideoFrame::HOLE, size, gfx::Rect(size), size));
220 scoped_refptr<VideoFrame> frame(new VideoFrame(
221 VideoFrame::HOLE, size, gfx::Rect(size), size, base::TimeDelta()));
227 size_t VideoFrame::NumPlanes(Format format) {
229 case VideoFrame::NATIVE_TEXTURE:
230 #if defined(GOOGLE_TV)
231 case VideoFrame::HOLE:
234 case VideoFrame::RGB32:
236 case VideoFrame::YV12:
237 case VideoFrame::YV16:
238 case VideoFrame::I420:
240 case VideoFrame::YV12A:
242 case VideoFrame::EMPTY:
243 case VideoFrame::UNKNOWN:
246 NOTREACHED() << "Unsupported video frame format: " << format;
250 static inline size_t RoundUp(size_t value, size_t alignment) {
251 // Check that |alignment| is a power of 2.
252 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1)));
253 return ((value + (alignment - 1)) & ~(alignment-1));
257 size_t VideoFrame::AllocationSize(Format format, const gfx::Size& coded_size) {
259 case VideoFrame::RGB32:
260 return coded_size.GetArea() * 4;
261 case VideoFrame::YV12:
262 case VideoFrame::I420: {
263 const size_t rounded_size =
264 RoundUp(coded_size.width(), 2) * RoundUp(coded_size.height(), 2);
265 return rounded_size * 3 / 2;
267 case VideoFrame::YV12A: {
268 const size_t rounded_size =
269 RoundUp(coded_size.width(), 2) * RoundUp(coded_size.height(), 2);
270 return rounded_size * 5 / 2;
272 case VideoFrame::YV16: {
273 const size_t rounded_size =
274 RoundUp(coded_size.width(), 2) * RoundUp(coded_size.height(), 2);
275 return rounded_size * 2;
277 case VideoFrame::UNKNOWN:
278 case VideoFrame::EMPTY:
279 case VideoFrame::NATIVE_TEXTURE:
280 #if defined(GOOGLE_TV)
281 case VideoFrame::HOLE:
285 NOTREACHED() << "Unsupported video frame format: " << format;
289 // Release data allocated by AllocateRGB() or AllocateYUV().
290 static void ReleaseData(uint8* data) {
292 base::AlignedFree(data);
295 void VideoFrame::AllocateRGB(size_t bytes_per_pixel) {
296 // Round up to align at least at a 16-byte boundary for each row.
297 // This is sufficient for MMX and SSE2 reads (movq/movdqa).
298 size_t bytes_per_row = RoundUp(coded_size_.width(),
299 kFrameSizeAlignment) * bytes_per_pixel;
300 size_t aligned_height = RoundUp(coded_size_.height(), kFrameSizeAlignment);
301 strides_[VideoFrame::kRGBPlane] = bytes_per_row;
302 data_[VideoFrame::kRGBPlane] = reinterpret_cast<uint8*>(
303 base::AlignedAlloc(bytes_per_row * aligned_height + kFrameSizePadding,
304 kFrameAddressAlignment));
305 no_longer_needed_cb_ = base::Bind(&ReleaseData, data_[VideoFrame::kRGBPlane]);
306 DCHECK(!(reinterpret_cast<intptr_t>(data_[VideoFrame::kRGBPlane]) & 7));
307 COMPILE_ASSERT(0 == VideoFrame::kRGBPlane, RGB_data_must_be_index_0);
310 void VideoFrame::AllocateYUV() {
311 DCHECK(format_ == VideoFrame::YV12 || format_ == VideoFrame::YV16 ||
312 format_ == VideoFrame::YV12A || format_ == VideoFrame::I420);
313 // Align Y rows at least at 16 byte boundaries. The stride for both
314 // YV12 and YV16 is 1/2 of the stride of Y. For YV12, every row of bytes for
315 // U and V applies to two rows of Y (one byte of UV for 4 bytes of Y), so in
316 // the case of YV12 the strides are identical for the same width surface, but
317 // the number of bytes allocated for YV12 is 1/2 the amount for U & V as
318 // YV16. We also round the height of the surface allocated to be an even
319 // number to avoid any potential of faulting by code that attempts to access
320 // the Y values of the final row, but assumes that the last row of U & V
321 // applies to a full two rows of Y. YV12A is the same as YV12, but with an
322 // additional alpha plane that has the same size and alignment as the Y plane.
324 size_t y_stride = RoundUp(row_bytes(VideoFrame::kYPlane),
325 kFrameSizeAlignment);
326 size_t uv_stride = RoundUp(row_bytes(VideoFrame::kUPlane),
327 kFrameSizeAlignment);
328 // The *2 here is because some formats (e.g. h264) allow interlaced coding,
329 // and then the size needs to be a multiple of two macroblocks (vertically).
330 // See libavcodec/utils.c:avcodec_align_dimensions2().
331 size_t y_height = RoundUp(coded_size_.height(), kFrameSizeAlignment * 2);
333 (format_ == VideoFrame::YV12 || format_ == VideoFrame::YV12A ||
334 format_ == VideoFrame::I420)
337 size_t y_bytes = y_height * y_stride;
338 size_t uv_bytes = uv_height * uv_stride;
339 size_t a_bytes = format_ == VideoFrame::YV12A ? y_bytes : 0;
341 // The extra line of UV being allocated is because h264 chroma MC
342 // overreads by one line in some cases, see libavcodec/utils.c:
343 // avcodec_align_dimensions2() and libavcodec/x86/h264_chromamc.asm:
344 // put_h264_chroma_mc4_ssse3().
345 uint8* data = reinterpret_cast<uint8*>(
347 y_bytes + (uv_bytes * 2 + uv_stride) + a_bytes + kFrameSizePadding,
348 kFrameAddressAlignment));
349 no_longer_needed_cb_ = base::Bind(&ReleaseData, data);
350 COMPILE_ASSERT(0 == VideoFrame::kYPlane, y_plane_data_must_be_index_0);
351 data_[VideoFrame::kYPlane] = data;
352 data_[VideoFrame::kUPlane] = data + y_bytes;
353 data_[VideoFrame::kVPlane] = data + y_bytes + uv_bytes;
354 strides_[VideoFrame::kYPlane] = y_stride;
355 strides_[VideoFrame::kUPlane] = uv_stride;
356 strides_[VideoFrame::kVPlane] = uv_stride;
357 if (format_ == YV12A) {
358 data_[VideoFrame::kAPlane] = data + y_bytes + (2 * uv_bytes);
359 strides_[VideoFrame::kAPlane] = y_stride;
363 VideoFrame::VideoFrame(VideoFrame::Format format,
364 const gfx::Size& coded_size,
365 const gfx::Rect& visible_rect,
366 const gfx::Size& natural_size,
367 base::TimeDelta timestamp)
369 coded_size_(coded_size),
370 visible_rect_(visible_rect),
371 natural_size_(natural_size),
373 shared_memory_handle_(base::SharedMemory::NULLHandle()),
374 timestamp_(timestamp) {
375 memset(&strides_, 0, sizeof(strides_));
376 memset(&data_, 0, sizeof(data_));
379 VideoFrame::~VideoFrame() {
380 if (!no_longer_needed_cb_.is_null())
381 base::ResetAndReturn(&no_longer_needed_cb_).Run();
384 bool VideoFrame::IsValidPlane(size_t plane) const {
385 return (plane < NumPlanes(format_));
388 int VideoFrame::stride(size_t plane) const {
389 DCHECK(IsValidPlane(plane));
390 return strides_[plane];
393 int VideoFrame::row_bytes(size_t plane) const {
394 DCHECK(IsValidPlane(plane));
395 int width = coded_size_.width();
403 if (plane == kAPlane)
409 if (plane == kYPlane)
411 return RoundUp(width, 2) / 2;
417 // Intentionally leave out non-production formats.
418 NOTREACHED() << "Unsupported video frame format: " << format_;
422 int VideoFrame::rows(size_t plane) const {
423 DCHECK(IsValidPlane(plane));
424 int height = coded_size_.height();
431 if (plane == kAPlane)
436 if (plane == kYPlane)
438 return RoundUp(height, 2) / 2;
444 // Intentionally leave out non-production formats.
445 NOTREACHED() << "Unsupported video frame format: " << format_;
449 uint8* VideoFrame::data(size_t plane) const {
450 DCHECK(IsValidPlane(plane));
454 const scoped_refptr<VideoFrame::MailboxHolder>& VideoFrame::texture_mailbox()
456 DCHECK_EQ(format_, NATIVE_TEXTURE);
457 return texture_mailbox_holder_;
460 uint32 VideoFrame::texture_target() const {
461 DCHECK_EQ(format_, NATIVE_TEXTURE);
462 return texture_target_;
465 base::SharedMemoryHandle VideoFrame::shared_memory_handle() const {
466 return shared_memory_handle_;
469 bool VideoFrame::IsEndOfStream() const {
470 return format_ == VideoFrame::EMPTY;
473 void VideoFrame::HashFrameForTesting(base::MD5Context* context) {
474 for (int plane = 0; plane < kMaxPlanes; ++plane) {
475 if (!IsValidPlane(plane))
477 for (int row = 0; row < rows(plane); ++row) {
478 base::MD5Update(context, base::StringPiece(
479 reinterpret_cast<char*>(data(plane) + stride(plane) * row),
485 VideoFrame::MailboxHolder::MailboxHolder(
486 const gpu::Mailbox& mailbox,
488 const TextureNoLongerNeededCallback& release_callback)
490 sync_point_(sync_point),
491 release_callback_(release_callback) {}
493 VideoFrame::MailboxHolder::~MailboxHolder() {
494 if (!release_callback_.is_null())
495 release_callback_.Run(sync_point_);