1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // The bulk of this file is support code; sorry about that. Here's an overview
6 // to hopefully help readers of this code:
7 // - RenderingHelper is charged with interacting with X11/{EGL/GLES2,GLX/GL} or
9 // - ClientState is an enum for the state of the decode client used by the test.
10 // - ClientStateNotification is a barrier abstraction that allows the test code
11 // to be written sequentially and wait for the decode client to see certain
13 // - GLRenderingVDAClient is a VideoDecodeAccelerator::Client implementation
14 // - Finally actual TEST cases are at the bottom of this file, using the above
19 #include <sys/types.h>
24 // Include gtest.h out of order because <X11/X.h> #define's Bool & None, which
25 // gtest uses as struct names (inside a namespace). This means that
26 // #include'ing gtest after anything that pulls in X.h fails to compile.
27 // This is http://code.google.com/p/googletest/issues/detail?id=371
28 #include "testing/gtest/include/gtest/gtest.h"
30 #include "base/at_exit.h"
31 #include "base/bind.h"
32 #include "base/command_line.h"
33 #include "base/file_util.h"
34 #include "base/files/file.h"
35 #include "base/format_macros.h"
37 #include "base/message_loop/message_loop_proxy.h"
38 #include "base/process/process.h"
39 #include "base/stl_util.h"
40 #include "base/strings/string_number_conversions.h"
41 #include "base/strings/string_split.h"
42 #include "base/strings/stringize_macros.h"
43 #include "base/strings/stringprintf.h"
44 #include "base/strings/utf_string_conversions.h"
45 #include "base/synchronization/condition_variable.h"
46 #include "base/synchronization/lock.h"
47 #include "base/synchronization/waitable_event.h"
48 #include "base/threading/thread.h"
49 #include "content/common/gpu/media/rendering_helper.h"
50 #include "content/common/gpu/media/video_accelerator_unittest_helpers.h"
51 #include "content/public/common/content_switches.h"
52 #include "media/filters/h264_parser.h"
53 #include "ui/gfx/codec/png_codec.h"
56 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
57 #elif defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL)
58 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
59 #include "content/common/gpu/media/v4l2_video_device.h"
60 #elif (defined(OS_CHROMEOS) || defined(OS_LINUX)) && defined(ARCH_CPU_X86_FAMILY)
61 #include "content/common/gpu/media/vaapi_video_decode_accelerator.h"
62 #include "content/common/gpu/media/vaapi_wrapper.h"
64 #include "ui/gl/gl_implementation.h"
67 #error The VideoAccelerator tests are not supported on this platform.
70 using media::VideoDecodeAccelerator;
75 // Values optionally filled in from flags; see main() below.
76 // The syntax of multiple test videos is:
77 // test-video1;test-video2;test-video3
78 // where only the first video is required and other optional videos would be
79 // decoded by concurrent decoders.
80 // The syntax of each test-video is:
81 // filename:width:height:numframes:numfragments:minFPSwithRender:minFPSnoRender
82 // where only the first field is required. Value details:
83 // - |filename| must be an h264 Annex B (NAL) stream or an IVF VP8 stream.
84 // - |width| and |height| are in pixels.
85 // - |numframes| is the number of picture frames in the file.
86 // - |numfragments| NALU (h264) or frame (VP8) count in the stream.
87 // - |minFPSwithRender| and |minFPSnoRender| are minimum frames/second speeds
88 // expected to be achieved with and without rendering to the screen, resp.
89 // (the latter tests just decode speed).
90 // - |profile| is the media::VideoCodecProfile set during Initialization.
91 // An empty value for a numeric field means "ignore".
92 const base::FilePath::CharType* g_test_video_data =
93 // FILE_PATH_LITERAL("test-25fps.vp8:320:240:250:250:50:175:11");
94 FILE_PATH_LITERAL("test-25fps.h264:320:240:250:258:50:175:1");
96 // The file path of the test output log. This is used to communicate the test
97 // results to CrOS autotests. We can enable the log and specify the filename by
98 // the "--output_log" switch.
99 const base::FilePath::CharType* g_output_log = NULL;
101 // The value is set by the switch "--rendering_fps".
102 double g_rendering_fps = 0;
104 // Disable rendering, the value is set by the switch "--disable_rendering".
105 bool g_disable_rendering = false;
107 // Magic constants for differentiating the reasons for NotifyResetDone being
110 // Reset() just after calling Decode() with a fragment containing config info.
111 RESET_AFTER_FIRST_CONFIG_INFO = -4,
112 START_OF_STREAM_RESET = -3,
113 MID_STREAM_RESET = -2,
114 END_OF_STREAM_RESET = -1
117 const int kMaxResetAfterFrameNum = 100;
118 const int kMaxFramesToDelayReuse = 64;
119 const base::TimeDelta kReuseDelay = base::TimeDelta::FromSeconds(1);
120 // Simulate WebRTC and call VDA::Decode 30 times per second.
121 const int kWebRtcDecodeCallsPerSecond = 30;
123 struct TestVideoFile {
124 explicit TestVideoFile(base::FilePath::StringType file_name)
125 : file_name(file_name),
131 min_fps_no_render(-1),
132 profile(media::VIDEO_CODEC_PROFILE_UNKNOWN),
133 reset_after_frame_num(END_OF_STREAM_RESET) {
136 base::FilePath::StringType file_name;
142 int min_fps_no_render;
143 media::VideoCodecProfile profile;
144 int reset_after_frame_num;
145 std::string data_str;
148 // Presumed minimal display size.
149 // We subtract one pixel from the width because some ARM chromebooks do not
150 // support two fullscreen app running at the same time. See crbug.com/270064.
151 const gfx::Size kThumbnailsDisplaySize(1366 - 1, 768);
152 const gfx::Size kThumbnailsPageSize(1600, 1200);
153 const gfx::Size kThumbnailSize(160, 120);
154 const int kMD5StringLength = 32;
156 // Read in golden MD5s for the thumbnailed rendering of this video
157 void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file,
158 std::vector<std::string>* md5_strings) {
159 base::FilePath filepath(video_file->file_name);
160 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".md5"));
161 std::string all_md5s;
162 base::ReadFileToString(filepath, &all_md5s);
163 base::SplitString(all_md5s, '\n', md5_strings);
164 // Check these are legitimate MD5s.
165 for (std::vector<std::string>::iterator md5_string = md5_strings->begin();
166 md5_string != md5_strings->end(); ++md5_string) {
167 // Ignore the empty string added by SplitString
168 if (!md5_string->length())
171 if (md5_string->at(0) == '#')
174 CHECK_EQ(static_cast<int>(md5_string->length()),
175 kMD5StringLength) << *md5_string;
176 bool hex_only = std::count_if(md5_string->begin(),
177 md5_string->end(), isxdigit) ==
179 CHECK(hex_only) << *md5_string;
181 CHECK_GE(md5_strings->size(), 1U) << all_md5s;
184 // State of the GLRenderingVDAClient below. Order matters here as the test
185 // makes assumptions about it.
196 CS_MAX, // Must be last entry.
199 // A wrapper client that throttles the PictureReady callbacks to a given rate.
200 // It may drops or queues frame to deliver them on time.
201 class ThrottlingVDAClient : public VideoDecodeAccelerator::Client,
202 public base::SupportsWeakPtr<ThrottlingVDAClient> {
204 // Callback invoked whan the picture is dropped and should be reused for
205 // the decoder again.
206 typedef base::Callback<void(int32 picture_buffer_id)> ReusePictureCB;
208 ThrottlingVDAClient(VideoDecodeAccelerator::Client* client,
210 ReusePictureCB reuse_picture_cb);
211 virtual ~ThrottlingVDAClient();
213 // VideoDecodeAccelerator::Client implementation
214 virtual void ProvidePictureBuffers(uint32 requested_num_of_buffers,
215 const gfx::Size& dimensions,
216 uint32 texture_target) OVERRIDE;
217 virtual void DismissPictureBuffer(int32 picture_buffer_id) OVERRIDE;
218 virtual void PictureReady(const media::Picture& picture) OVERRIDE;
219 virtual void NotifyInitializeDone() OVERRIDE;
220 virtual void NotifyEndOfBitstreamBuffer(int32 bitstream_buffer_id) OVERRIDE;
221 virtual void NotifyFlushDone() OVERRIDE;
222 virtual void NotifyResetDone() OVERRIDE;
223 virtual void NotifyError(VideoDecodeAccelerator::Error error) OVERRIDE;
225 int num_decoded_frames() { return num_decoded_frames_; }
229 void CallClientPictureReady(int version);
231 VideoDecodeAccelerator::Client* client_;
232 ReusePictureCB reuse_picture_cb_;
233 base::TimeTicks next_frame_delivered_time_;
234 base::TimeDelta frame_duration_;
236 int num_decoded_frames_;
238 std::deque<media::Picture> pending_pictures_;
240 DISALLOW_IMPLICIT_CONSTRUCTORS(ThrottlingVDAClient);
243 ThrottlingVDAClient::ThrottlingVDAClient(VideoDecodeAccelerator::Client* client,
245 ReusePictureCB reuse_picture_cb)
247 reuse_picture_cb_(reuse_picture_cb),
248 num_decoded_frames_(0),
252 frame_duration_ = base::TimeDelta::FromSeconds(1) / fps;
255 ThrottlingVDAClient::~ThrottlingVDAClient() {}
257 void ThrottlingVDAClient::ProvidePictureBuffers(uint32 requested_num_of_buffers,
258 const gfx::Size& dimensions,
259 uint32 texture_target) {
260 client_->ProvidePictureBuffers(
261 requested_num_of_buffers, dimensions, texture_target);
264 void ThrottlingVDAClient::DismissPictureBuffer(int32 picture_buffer_id) {
265 client_->DismissPictureBuffer(picture_buffer_id);
268 void ThrottlingVDAClient::PictureReady(const media::Picture& picture) {
269 ++num_decoded_frames_;
271 if (pending_pictures_.empty()) {
272 base::TimeDelta delay =
273 next_frame_delivered_time_.is_null()
275 : next_frame_delivered_time_ - base::TimeTicks::Now();
276 base::MessageLoop::current()->PostDelayedTask(
278 base::Bind(&ThrottlingVDAClient::CallClientPictureReady,
283 pending_pictures_.push_back(picture);
286 void ThrottlingVDAClient::CallClientPictureReady(int version) {
287 // Just return if we have reset the decoder
288 if (version != stream_version_)
291 base::TimeTicks now = base::TimeTicks::Now();
293 if (next_frame_delivered_time_.is_null())
294 next_frame_delivered_time_ = now;
296 if (next_frame_delivered_time_ + frame_duration_ < now) {
297 // Too late, drop the frame
298 reuse_picture_cb_.Run(pending_pictures_.front().picture_buffer_id());
300 client_->PictureReady(pending_pictures_.front());
303 pending_pictures_.pop_front();
304 next_frame_delivered_time_ += frame_duration_;
305 if (!pending_pictures_.empty()) {
306 base::MessageLoop::current()->PostDelayedTask(
308 base::Bind(&ThrottlingVDAClient::CallClientPictureReady,
311 next_frame_delivered_time_ - base::TimeTicks::Now());
315 void ThrottlingVDAClient::NotifyInitializeDone() {
316 client_->NotifyInitializeDone();
319 void ThrottlingVDAClient::NotifyEndOfBitstreamBuffer(
320 int32 bitstream_buffer_id) {
321 client_->NotifyEndOfBitstreamBuffer(bitstream_buffer_id);
324 void ThrottlingVDAClient::NotifyFlushDone() {
325 if (!pending_pictures_.empty()) {
326 base::MessageLoop::current()->PostDelayedTask(
328 base::Bind(&ThrottlingVDAClient::NotifyFlushDone,
329 base::Unretained(this)),
330 next_frame_delivered_time_ - base::TimeTicks::Now());
333 client_->NotifyFlushDone();
336 void ThrottlingVDAClient::NotifyResetDone() {
338 while (!pending_pictures_.empty()) {
339 reuse_picture_cb_.Run(pending_pictures_.front().picture_buffer_id());
340 pending_pictures_.pop_front();
342 next_frame_delivered_time_ = base::TimeTicks();
343 client_->NotifyResetDone();
346 void ThrottlingVDAClient::NotifyError(VideoDecodeAccelerator::Error error) {
347 client_->NotifyError(error);
350 // Client that can accept callbacks from a VideoDecodeAccelerator and is used by
352 class GLRenderingVDAClient
353 : public VideoDecodeAccelerator::Client,
354 public base::SupportsWeakPtr<GLRenderingVDAClient> {
356 // Doesn't take ownership of |rendering_helper| or |note|, which must outlive
358 // |num_play_throughs| indicates how many times to play through the video.
359 // |reset_after_frame_num| can be a frame number >=0 indicating a mid-stream
360 // Reset() should be done after that frame number is delivered, or
361 // END_OF_STREAM_RESET to indicate no mid-stream Reset().
362 // |delete_decoder_state| indicates when the underlying decoder should be
363 // Destroy()'d and deleted and can take values: N<0: delete after -N Decode()
364 // calls have been made, N>=0 means interpret as ClientState.
365 // Both |reset_after_frame_num| & |delete_decoder_state| apply only to the
366 // last play-through (governed by |num_play_throughs|).
367 // |rendering_fps| indicates the target rendering fps. 0 means no target fps
368 // and it would render as fast as possible.
369 // |suppress_rendering| indicates GL rendering is suppressed or not.
370 // After |delay_reuse_after_frame_num| frame has been delivered, the client
371 // will start delaying the call to ReusePictureBuffer() for kReuseDelay.
372 // |decode_calls_per_second| is the number of VDA::Decode calls per second.
373 // If |decode_calls_per_second| > 0, |num_in_flight_decodes| must be 1.
374 GLRenderingVDAClient(RenderingHelper* rendering_helper,
375 int rendering_window_id,
376 ClientStateNotification<ClientState>* note,
377 const std::string& encoded_data,
378 int num_in_flight_decodes,
379 int num_play_throughs,
380 int reset_after_frame_num,
381 int delete_decoder_state,
384 media::VideoCodecProfile profile,
385 double rendering_fps,
386 bool suppress_rendering,
387 int delay_reuse_after_frame_num,
388 int decode_calls_per_second);
389 virtual ~GLRenderingVDAClient();
390 void CreateAndStartDecoder();
392 // VideoDecodeAccelerator::Client implementation.
393 // The heart of the Client.
394 virtual void ProvidePictureBuffers(uint32 requested_num_of_buffers,
395 const gfx::Size& dimensions,
396 uint32 texture_target) OVERRIDE;
397 virtual void DismissPictureBuffer(int32 picture_buffer_id) OVERRIDE;
398 virtual void PictureReady(const media::Picture& picture) OVERRIDE;
399 // Simple state changes.
400 virtual void NotifyInitializeDone() OVERRIDE;
401 virtual void NotifyEndOfBitstreamBuffer(int32 bitstream_buffer_id) OVERRIDE;
402 virtual void NotifyFlushDone() OVERRIDE;
403 virtual void NotifyResetDone() OVERRIDE;
404 virtual void NotifyError(VideoDecodeAccelerator::Error error) OVERRIDE;
406 void OutputFrameDeliveryTimes(base::File* output);
408 void NotifyFrameDropped(int32 picture_buffer_id);
410 // Simple getters for inspecting the state of the Client.
411 int num_done_bitstream_buffers() { return num_done_bitstream_buffers_; }
412 int num_skipped_fragments() { return num_skipped_fragments_; }
413 int num_queued_fragments() { return num_queued_fragments_; }
414 int num_decoded_frames();
415 double frames_per_second();
416 // Return the median of the decode time in milliseconds.
417 int decode_time_median();
418 bool decoder_deleted() { return !decoder_.get(); }
421 typedef std::map<int, media::PictureBuffer*> PictureBufferById;
423 void SetState(ClientState new_state);
425 // Delete the associated decoder helper.
426 void DeleteDecoder();
428 // Compute & return the first encoded bytes (including a start frame) to send
429 // to the decoder, starting at |start_pos| and returning one fragment. Skips
430 // to the first decodable position.
431 std::string GetBytesForFirstFragment(size_t start_pos, size_t* end_pos);
432 // Compute & return the encoded bytes of next fragment to send to the decoder
433 // (based on |start_pos|).
434 std::string GetBytesForNextFragment(size_t start_pos, size_t* end_pos);
435 // Helpers for GetBytesForNextFragment above.
436 void GetBytesForNextNALU(size_t start_pos, size_t* end_pos); // For h.264.
437 std::string GetBytesForNextFrame(
438 size_t start_pos, size_t* end_pos); // For VP8.
440 // Request decode of the next fragment in the encoded data.
441 void DecodeNextFragment();
443 RenderingHelper* rendering_helper_;
444 int rendering_window_id_;
445 std::string encoded_data_;
446 const int num_in_flight_decodes_;
447 int outstanding_decodes_;
448 size_t encoded_data_next_pos_to_decode_;
449 int next_bitstream_buffer_id_;
450 ClientStateNotification<ClientState>* note_;
451 scoped_ptr<VideoDecodeAccelerator> decoder_;
452 std::set<int> outstanding_texture_ids_;
453 int remaining_play_throughs_;
454 int reset_after_frame_num_;
455 int delete_decoder_state_;
457 int num_skipped_fragments_;
458 int num_queued_fragments_;
459 int num_decoded_frames_;
460 int num_done_bitstream_buffers_;
461 PictureBufferById picture_buffers_by_id_;
462 base::TimeTicks initialize_done_ticks_;
463 media::VideoCodecProfile profile_;
464 GLenum texture_target_;
465 bool suppress_rendering_;
466 std::vector<base::TimeTicks> frame_delivery_times_;
467 int delay_reuse_after_frame_num_;
468 scoped_ptr<ThrottlingVDAClient> throttling_client_;
469 // A map from bitstream buffer id to the decode start time of the buffer.
470 std::map<int, base::TimeTicks> decode_start_time_;
471 // The decode time of all decoded frames.
472 std::vector<base::TimeDelta> decode_time_;
473 // The number of VDA::Decode calls per second. This is to simulate webrtc.
474 int decode_calls_per_second_;
476 DISALLOW_IMPLICIT_CONSTRUCTORS(GLRenderingVDAClient);
479 GLRenderingVDAClient::GLRenderingVDAClient(
480 RenderingHelper* rendering_helper,
481 int rendering_window_id,
482 ClientStateNotification<ClientState>* note,
483 const std::string& encoded_data,
484 int num_in_flight_decodes,
485 int num_play_throughs,
486 int reset_after_frame_num,
487 int delete_decoder_state,
490 media::VideoCodecProfile profile,
491 double rendering_fps,
492 bool suppress_rendering,
493 int delay_reuse_after_frame_num,
494 int decode_calls_per_second)
495 : rendering_helper_(rendering_helper),
496 rendering_window_id_(rendering_window_id),
497 encoded_data_(encoded_data),
498 num_in_flight_decodes_(num_in_flight_decodes),
499 outstanding_decodes_(0),
500 encoded_data_next_pos_to_decode_(0),
501 next_bitstream_buffer_id_(0),
503 remaining_play_throughs_(num_play_throughs),
504 reset_after_frame_num_(reset_after_frame_num),
505 delete_decoder_state_(delete_decoder_state),
507 num_skipped_fragments_(0),
508 num_queued_fragments_(0),
509 num_decoded_frames_(0),
510 num_done_bitstream_buffers_(0),
512 suppress_rendering_(suppress_rendering),
513 delay_reuse_after_frame_num_(delay_reuse_after_frame_num),
514 decode_calls_per_second_(decode_calls_per_second) {
515 CHECK_GT(num_in_flight_decodes, 0);
516 CHECK_GT(num_play_throughs, 0);
517 CHECK_GE(rendering_fps, 0);
518 // |num_in_flight_decodes_| is unsupported if |decode_calls_per_second_| > 0.
519 if (decode_calls_per_second_ > 0)
520 CHECK_EQ(1, num_in_flight_decodes_);
522 // Default to H264 baseline if no profile provided.
523 profile_ = (profile != media::VIDEO_CODEC_PROFILE_UNKNOWN
525 : media::H264PROFILE_BASELINE);
527 if (rendering_fps > 0)
528 throttling_client_.reset(new ThrottlingVDAClient(
531 base::Bind(&GLRenderingVDAClient::NotifyFrameDropped,
532 base::Unretained(this))));
535 GLRenderingVDAClient::~GLRenderingVDAClient() {
536 DeleteDecoder(); // Clean up in case of expected error.
537 CHECK(decoder_deleted());
538 STLDeleteValues(&picture_buffers_by_id_);
539 SetState(CS_DESTROYED);
542 static bool DoNothingReturnTrue() { return true; }
544 void GLRenderingVDAClient::CreateAndStartDecoder() {
545 CHECK(decoder_deleted());
546 CHECK(!decoder_.get());
548 VideoDecodeAccelerator::Client* client = this;
549 base::WeakPtr<VideoDecodeAccelerator::Client> weak_client = AsWeakPtr();
550 if (throttling_client_) {
551 client = throttling_client_.get();
552 weak_client = throttling_client_->AsWeakPtr();
556 new DXVAVideoDecodeAccelerator(base::Bind(&DoNothingReturnTrue)));
557 #elif defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL)
559 scoped_ptr<V4L2Device> device = V4L2Device::Create();
561 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
564 decoder_.reset(new V4L2VideoDecodeAccelerator(
565 static_cast<EGLDisplay>(rendering_helper_->GetGLDisplay()),
567 base::Bind(&DoNothingReturnTrue),
569 base::MessageLoopProxy::current()));
570 #elif (defined(OS_CHROMEOS) || defined(OS_LINUX)) && defined(ARCH_CPU_X86_FAMILY)
571 CHECK_EQ(gfx::kGLImplementationDesktopGL, gfx::GetGLImplementation())
572 << "Hardware video decode does not work with OSMesa";
573 decoder_.reset(new VaapiVideoDecodeAccelerator(
574 static_cast<Display*>(rendering_helper_->GetGLDisplay()),
575 base::Bind(&DoNothingReturnTrue)));
577 CHECK(decoder_.get());
578 SetState(CS_DECODER_SET);
579 if (decoder_deleted())
582 CHECK(decoder_->Initialize(profile_, client));
585 void GLRenderingVDAClient::ProvidePictureBuffers(
586 uint32 requested_num_of_buffers,
587 const gfx::Size& dimensions,
588 uint32 texture_target) {
589 if (decoder_deleted())
591 std::vector<media::PictureBuffer> buffers;
593 texture_target_ = texture_target;
594 for (uint32 i = 0; i < requested_num_of_buffers; ++i) {
595 uint32 id = picture_buffers_by_id_.size();
597 base::WaitableEvent done(false, false);
598 rendering_helper_->CreateTexture(
599 rendering_window_id_, texture_target_, &texture_id, &done);
601 CHECK(outstanding_texture_ids_.insert(texture_id).second);
602 media::PictureBuffer* buffer =
603 new media::PictureBuffer(id, dimensions, texture_id);
604 CHECK(picture_buffers_by_id_.insert(std::make_pair(id, buffer)).second);
605 buffers.push_back(*buffer);
607 decoder_->AssignPictureBuffers(buffers);
610 void GLRenderingVDAClient::DismissPictureBuffer(int32 picture_buffer_id) {
611 PictureBufferById::iterator it =
612 picture_buffers_by_id_.find(picture_buffer_id);
613 CHECK(it != picture_buffers_by_id_.end());
614 CHECK_EQ(outstanding_texture_ids_.erase(it->second->texture_id()), 1U);
615 rendering_helper_->DeleteTexture(it->second->texture_id());
617 picture_buffers_by_id_.erase(it);
620 void GLRenderingVDAClient::PictureReady(const media::Picture& picture) {
621 // We shouldn't be getting pictures delivered after Reset has completed.
622 CHECK_LT(state_, CS_RESET);
624 if (decoder_deleted())
627 base::TimeTicks now = base::TimeTicks::Now();
628 frame_delivery_times_.push_back(now);
629 // Save the decode time of this picture.
630 std::map<int, base::TimeTicks>::iterator it =
631 decode_start_time_.find(picture.bitstream_buffer_id());
632 ASSERT_NE(decode_start_time_.end(), it);
633 decode_time_.push_back(now - it->second);
634 decode_start_time_.erase(it);
636 CHECK_LE(picture.bitstream_buffer_id(), next_bitstream_buffer_id_);
637 ++num_decoded_frames_;
639 // Mid-stream reset applies only to the last play-through per constructor
641 if (remaining_play_throughs_ == 1 &&
642 reset_after_frame_num_ == num_decoded_frames()) {
643 reset_after_frame_num_ = MID_STREAM_RESET;
645 // Re-start decoding from the beginning of the stream to avoid needing to
646 // know how to find I-frames and so on in this test.
647 encoded_data_next_pos_to_decode_ = 0;
650 media::PictureBuffer* picture_buffer =
651 picture_buffers_by_id_[picture.picture_buffer_id()];
652 CHECK(picture_buffer);
653 if (!suppress_rendering_) {
654 rendering_helper_->RenderTexture(texture_target_,
655 picture_buffer->texture_id());
658 if (num_decoded_frames() > delay_reuse_after_frame_num_) {
659 base::MessageLoop::current()->PostDelayedTask(
661 base::Bind(&VideoDecodeAccelerator::ReusePictureBuffer,
662 decoder_->AsWeakPtr(),
663 picture.picture_buffer_id()),
666 decoder_->ReusePictureBuffer(picture.picture_buffer_id());
670 void GLRenderingVDAClient::NotifyInitializeDone() {
671 SetState(CS_INITIALIZED);
672 initialize_done_ticks_ = base::TimeTicks::Now();
674 if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
675 reset_after_frame_num_ = MID_STREAM_RESET;
680 for (int i = 0; i < num_in_flight_decodes_; ++i)
681 DecodeNextFragment();
682 DCHECK_EQ(outstanding_decodes_, num_in_flight_decodes_);
685 void GLRenderingVDAClient::NotifyEndOfBitstreamBuffer(
686 int32 bitstream_buffer_id) {
687 // TODO(fischman): this test currently relies on this notification to make
688 // forward progress during a Reset(). But the VDA::Reset() API doesn't
689 // guarantee this, so stop relying on it (and remove the notifications from
690 // VaapiVideoDecodeAccelerator::FinishReset()).
691 ++num_done_bitstream_buffers_;
692 --outstanding_decodes_;
693 if (decode_calls_per_second_ == 0)
694 DecodeNextFragment();
697 void GLRenderingVDAClient::NotifyFlushDone() {
698 if (decoder_deleted())
700 SetState(CS_FLUSHED);
701 --remaining_play_throughs_;
702 DCHECK_GE(remaining_play_throughs_, 0);
703 if (decoder_deleted())
706 SetState(CS_RESETTING);
709 void GLRenderingVDAClient::NotifyResetDone() {
710 if (decoder_deleted())
713 if (reset_after_frame_num_ == MID_STREAM_RESET) {
714 reset_after_frame_num_ = END_OF_STREAM_RESET;
715 DecodeNextFragment();
717 } else if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
718 reset_after_frame_num_ = END_OF_STREAM_RESET;
719 for (int i = 0; i < num_in_flight_decodes_; ++i)
720 DecodeNextFragment();
724 if (remaining_play_throughs_) {
725 encoded_data_next_pos_to_decode_ = 0;
726 NotifyInitializeDone();
731 if (!decoder_deleted())
735 void GLRenderingVDAClient::NotifyError(VideoDecodeAccelerator::Error error) {
739 void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
740 std::string s = base::StringPrintf("frame count: %" PRIuS "\n",
741 frame_delivery_times_.size());
742 output->WriteAtCurrentPos(s.data(), s.length());
743 base::TimeTicks t0 = initialize_done_ticks_;
744 for (size_t i = 0; i < frame_delivery_times_.size(); ++i) {
745 s = base::StringPrintf("frame %04" PRIuS ": %" PRId64 " us\n",
747 (frame_delivery_times_[i] - t0).InMicroseconds());
748 t0 = frame_delivery_times_[i];
749 output->WriteAtCurrentPos(s.data(), s.length());
753 void GLRenderingVDAClient::NotifyFrameDropped(int32 picture_buffer_id) {
754 decoder_->ReusePictureBuffer(picture_buffer_id);
757 static bool LookingAtNAL(const std::string& encoded, size_t pos) {
758 return encoded[pos] == 0 && encoded[pos + 1] == 0 &&
759 encoded[pos + 2] == 0 && encoded[pos + 3] == 1;
762 void GLRenderingVDAClient::SetState(ClientState new_state) {
763 note_->Notify(new_state);
765 if (!remaining_play_throughs_ && new_state == delete_decoder_state_) {
766 CHECK(!decoder_deleted());
771 void GLRenderingVDAClient::DeleteDecoder() {
772 if (decoder_deleted())
774 decoder_.release()->Destroy();
775 STLClearObject(&encoded_data_);
776 for (std::set<int>::iterator it = outstanding_texture_ids_.begin();
777 it != outstanding_texture_ids_.end(); ++it) {
778 rendering_helper_->DeleteTexture(*it);
780 outstanding_texture_ids_.clear();
781 // Cascade through the rest of the states to simplify test code below.
782 for (int i = state_ + 1; i < CS_MAX; ++i)
783 SetState(static_cast<ClientState>(i));
786 std::string GLRenderingVDAClient::GetBytesForFirstFragment(
787 size_t start_pos, size_t* end_pos) {
788 if (profile_ < media::H264PROFILE_MAX) {
789 *end_pos = start_pos;
790 while (*end_pos + 4 < encoded_data_.size()) {
791 if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7) // SPS start frame
792 return GetBytesForNextFragment(*end_pos, end_pos);
793 GetBytesForNextNALU(*end_pos, end_pos);
794 num_skipped_fragments_++;
796 *end_pos = start_pos;
797 return std::string();
799 DCHECK_LE(profile_, media::VP8PROFILE_MAX);
800 return GetBytesForNextFragment(start_pos, end_pos);
803 std::string GLRenderingVDAClient::GetBytesForNextFragment(
804 size_t start_pos, size_t* end_pos) {
805 if (profile_ < media::H264PROFILE_MAX) {
806 *end_pos = start_pos;
807 GetBytesForNextNALU(*end_pos, end_pos);
808 if (start_pos != *end_pos) {
809 num_queued_fragments_++;
811 return encoded_data_.substr(start_pos, *end_pos - start_pos);
813 DCHECK_LE(profile_, media::VP8PROFILE_MAX);
814 return GetBytesForNextFrame(start_pos, end_pos);
817 void GLRenderingVDAClient::GetBytesForNextNALU(
818 size_t start_pos, size_t* end_pos) {
819 *end_pos = start_pos;
820 if (*end_pos + 4 > encoded_data_.size())
822 CHECK(LookingAtNAL(encoded_data_, start_pos));
824 while (*end_pos + 4 <= encoded_data_.size() &&
825 !LookingAtNAL(encoded_data_, *end_pos)) {
828 if (*end_pos + 3 >= encoded_data_.size())
829 *end_pos = encoded_data_.size();
832 std::string GLRenderingVDAClient::GetBytesForNextFrame(
833 size_t start_pos, size_t* end_pos) {
834 // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF
837 start_pos = 32; // Skip IVF header.
838 *end_pos = start_pos;
839 uint32 frame_size = *reinterpret_cast<uint32*>(&encoded_data_[*end_pos]);
840 *end_pos += 12; // Skip frame header.
841 bytes.append(encoded_data_.substr(*end_pos, frame_size));
842 *end_pos += frame_size;
843 num_queued_fragments_++;
847 static bool FragmentHasConfigInfo(const uint8* data, size_t size,
848 media::VideoCodecProfile profile) {
849 if (profile >= media::H264PROFILE_MIN &&
850 profile <= media::H264PROFILE_MAX) {
851 media::H264Parser parser;
852 parser.SetStream(data, size);
853 media::H264NALU nalu;
854 media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
855 if (result != media::H264Parser::kOk) {
856 // Let the VDA figure out there's something wrong with the stream.
860 return nalu.nal_unit_type == media::H264NALU::kSPS;
861 } else if (profile >= media::VP8PROFILE_MIN &&
862 profile <= media::VP8PROFILE_MAX) {
863 return (size > 0 && !(data[0] & 0x01));
865 // Shouldn't happen at this point.
866 LOG(FATAL) << "Invalid profile: " << profile;
870 void GLRenderingVDAClient::DecodeNextFragment() {
871 if (decoder_deleted())
873 if (encoded_data_next_pos_to_decode_ == encoded_data_.size()) {
874 if (outstanding_decodes_ == 0) {
876 SetState(CS_FLUSHING);
881 std::string next_fragment_bytes;
882 if (encoded_data_next_pos_to_decode_ == 0) {
883 next_fragment_bytes = GetBytesForFirstFragment(0, &end_pos);
885 next_fragment_bytes =
886 GetBytesForNextFragment(encoded_data_next_pos_to_decode_, &end_pos);
888 size_t next_fragment_size = next_fragment_bytes.size();
890 // Call Reset() just after Decode() if the fragment contains config info.
891 // This tests how the VDA behaves when it gets a reset request before it has
892 // a chance to ProvidePictureBuffers().
893 bool reset_here = false;
894 if (reset_after_frame_num_ == RESET_AFTER_FIRST_CONFIG_INFO) {
895 reset_here = FragmentHasConfigInfo(
896 reinterpret_cast<const uint8*>(next_fragment_bytes.data()),
900 reset_after_frame_num_ = END_OF_STREAM_RESET;
903 // Populate the shared memory buffer w/ the fragment, duplicate its handle,
904 // and hand it off to the decoder.
905 base::SharedMemory shm;
906 CHECK(shm.CreateAndMapAnonymous(next_fragment_size));
907 memcpy(shm.memory(), next_fragment_bytes.data(), next_fragment_size);
908 base::SharedMemoryHandle dup_handle;
909 CHECK(shm.ShareToProcess(base::Process::Current().handle(), &dup_handle));
910 media::BitstreamBuffer bitstream_buffer(
911 next_bitstream_buffer_id_, dup_handle, next_fragment_size);
912 decode_start_time_[next_bitstream_buffer_id_] = base::TimeTicks::Now();
913 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
914 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
915 decoder_->Decode(bitstream_buffer);
916 ++outstanding_decodes_;
917 if (!remaining_play_throughs_ &&
918 -delete_decoder_state_ == next_bitstream_buffer_id_) {
923 reset_after_frame_num_ = MID_STREAM_RESET;
925 // Restart from the beginning to re-Decode() the SPS we just sent.
926 encoded_data_next_pos_to_decode_ = 0;
928 encoded_data_next_pos_to_decode_ = end_pos;
931 if (decode_calls_per_second_ > 0) {
932 base::MessageLoop::current()->PostDelayedTask(
934 base::Bind(&GLRenderingVDAClient::DecodeNextFragment, AsWeakPtr()),
935 base::TimeDelta::FromSeconds(1) / decode_calls_per_second_);
939 int GLRenderingVDAClient::num_decoded_frames() {
940 return throttling_client_ ? throttling_client_->num_decoded_frames()
941 : num_decoded_frames_;
944 double GLRenderingVDAClient::frames_per_second() {
945 base::TimeDelta delta = frame_delivery_times_.back() - initialize_done_ticks_;
946 if (delta.InSecondsF() == 0)
948 return num_decoded_frames() / delta.InSecondsF();
951 int GLRenderingVDAClient::decode_time_median() {
952 if (decode_time_.size() == 0)
954 std::sort(decode_time_.begin(), decode_time_.end());
955 int index = decode_time_.size() / 2;
956 if (decode_time_.size() % 2 != 0)
957 return decode_time_[index].InMilliseconds();
959 return (decode_time_[index] + decode_time_[index - 1]).InMilliseconds() / 2;
962 class VideoDecodeAcceleratorTest : public ::testing::Test {
964 VideoDecodeAcceleratorTest();
965 virtual void SetUp();
966 virtual void TearDown();
968 // Parse |data| into its constituent parts, set the various output fields
969 // accordingly, and read in video stream. CHECK-fails on unexpected or
970 // missing required data. Unspecified optional fields are set to -1.
971 void ParseAndReadTestVideoData(base::FilePath::StringType data,
972 std::vector<TestVideoFile*>* test_video_files);
974 // Update the parameters of |test_video_files| according to
975 // |num_concurrent_decoders| and |reset_point|. Ex: the expected number of
976 // frames should be adjusted if decoder is reset in the middle of the stream.
977 void UpdateTestVideoFileParams(
978 size_t num_concurrent_decoders,
980 std::vector<TestVideoFile*>* test_video_files);
982 void InitializeRenderingHelper(const RenderingHelperParams& helper_params);
983 void CreateAndStartDecoder(GLRenderingVDAClient* client,
984 ClientStateNotification<ClientState>* note);
985 void WaitUntilDecodeFinish(ClientStateNotification<ClientState>* note);
986 void WaitUntilIdle();
987 void OutputLogFile(const base::FilePath::CharType* log_path,
988 const std::string& content);
990 std::vector<TestVideoFile*> test_video_files_;
991 RenderingHelper rendering_helper_;
992 scoped_refptr<base::MessageLoopProxy> rendering_loop_proxy_;
995 base::Thread rendering_thread_;
996 // Required for Thread to work. Not used otherwise.
997 base::ShadowingAtExitManager at_exit_manager_;
999 DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTest);
1002 VideoDecodeAcceleratorTest::VideoDecodeAcceleratorTest()
1003 : rendering_thread_("GLRenderingVDAClientThread") {}
1005 void VideoDecodeAcceleratorTest::SetUp() {
1006 ParseAndReadTestVideoData(g_test_video_data, &test_video_files_);
1008 // Initialize the rendering thread.
1009 base::Thread::Options options;
1010 options.message_loop_type = base::MessageLoop::TYPE_DEFAULT;
1012 // For windows the decoding thread initializes the media foundation decoder
1013 // which uses COM. We need the thread to be a UI thread.
1014 options.message_loop_type = base::MessageLoop::TYPE_UI;
1017 rendering_thread_.StartWithOptions(options);
1018 rendering_loop_proxy_ = rendering_thread_.message_loop_proxy();
1021 void VideoDecodeAcceleratorTest::TearDown() {
1022 rendering_loop_proxy_->PostTask(
1024 base::Bind(&STLDeleteElements<std::vector<TestVideoFile*> >,
1025 &test_video_files_));
1027 base::WaitableEvent done(false, false);
1028 rendering_loop_proxy_->PostTask(
1030 base::Bind(&RenderingHelper::UnInitialize,
1031 base::Unretained(&rendering_helper_),
1035 rendering_thread_.Stop();
1038 void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
1039 base::FilePath::StringType data,
1040 std::vector<TestVideoFile*>* test_video_files) {
1041 std::vector<base::FilePath::StringType> entries;
1042 base::SplitString(data, ';', &entries);
1043 CHECK_GE(entries.size(), 1U) << data;
1044 for (size_t index = 0; index < entries.size(); ++index) {
1045 std::vector<base::FilePath::StringType> fields;
1046 base::SplitString(entries[index], ':', &fields);
1047 CHECK_GE(fields.size(), 1U) << entries[index];
1048 CHECK_LE(fields.size(), 8U) << entries[index];
1049 TestVideoFile* video_file = new TestVideoFile(fields[0]);
1050 if (!fields[1].empty())
1051 CHECK(base::StringToInt(fields[1], &video_file->width));
1052 if (!fields[2].empty())
1053 CHECK(base::StringToInt(fields[2], &video_file->height));
1054 if (!fields[3].empty())
1055 CHECK(base::StringToInt(fields[3], &video_file->num_frames));
1056 if (!fields[4].empty())
1057 CHECK(base::StringToInt(fields[4], &video_file->num_fragments));
1058 if (!fields[5].empty())
1059 CHECK(base::StringToInt(fields[5], &video_file->min_fps_render));
1060 if (!fields[6].empty())
1061 CHECK(base::StringToInt(fields[6], &video_file->min_fps_no_render));
1063 if (!fields[7].empty())
1064 CHECK(base::StringToInt(fields[7], &profile));
1065 video_file->profile = static_cast<media::VideoCodecProfile>(profile);
1067 // Read in the video data.
1068 base::FilePath filepath(video_file->file_name);
1069 CHECK(base::ReadFileToString(filepath, &video_file->data_str))
1070 << "test_video_file: " << filepath.MaybeAsASCII();
1072 test_video_files->push_back(video_file);
1076 void VideoDecodeAcceleratorTest::UpdateTestVideoFileParams(
1077 size_t num_concurrent_decoders,
1079 std::vector<TestVideoFile*>* test_video_files) {
1080 for (size_t i = 0; i < test_video_files->size(); i++) {
1081 TestVideoFile* video_file = (*test_video_files)[i];
1082 if (reset_point == MID_STREAM_RESET) {
1083 // Reset should not go beyond the last frame;
1084 // reset in the middle of the stream for short videos.
1085 video_file->reset_after_frame_num = kMaxResetAfterFrameNum;
1086 if (video_file->num_frames <= video_file->reset_after_frame_num)
1087 video_file->reset_after_frame_num = video_file->num_frames / 2;
1089 video_file->num_frames += video_file->reset_after_frame_num;
1091 video_file->reset_after_frame_num = reset_point;
1094 if (video_file->min_fps_render != -1)
1095 video_file->min_fps_render /= num_concurrent_decoders;
1096 if (video_file->min_fps_no_render != -1)
1097 video_file->min_fps_no_render /= num_concurrent_decoders;
1101 void VideoDecodeAcceleratorTest::InitializeRenderingHelper(
1102 const RenderingHelperParams& helper_params) {
1103 base::WaitableEvent done(false, false);
1104 rendering_loop_proxy_->PostTask(
1106 base::Bind(&RenderingHelper::Initialize,
1107 base::Unretained(&rendering_helper_),
1113 void VideoDecodeAcceleratorTest::CreateAndStartDecoder(
1114 GLRenderingVDAClient* client,
1115 ClientStateNotification<ClientState>* note) {
1116 rendering_loop_proxy_->PostTask(
1118 base::Bind(&GLRenderingVDAClient::CreateAndStartDecoder,
1119 base::Unretained(client)));
1120 ASSERT_EQ(note->Wait(), CS_DECODER_SET);
1123 void VideoDecodeAcceleratorTest::WaitUntilDecodeFinish(
1124 ClientStateNotification<ClientState>* note) {
1125 for (int i = 0; i < CS_MAX; i++) {
1126 if (note->Wait() == CS_DESTROYED)
1131 void VideoDecodeAcceleratorTest::WaitUntilIdle() {
1132 base::WaitableEvent done(false, false);
1133 rendering_loop_proxy_->PostTask(
1135 base::Bind(&base::WaitableEvent::Signal, base::Unretained(&done)));
1139 void VideoDecodeAcceleratorTest::OutputLogFile(
1140 const base::FilePath::CharType* log_path,
1141 const std::string& content) {
1142 base::File file(base::FilePath(log_path),
1143 base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1144 file.WriteAtCurrentPos(content.data(), content.length());
1148 // - Number of concurrent decoders.
1149 // - Number of concurrent in-flight Decode() calls per decoder.
1150 // - Number of play-throughs.
1151 // - reset_after_frame_num: see GLRenderingVDAClient ctor.
1152 // - delete_decoder_phase: see GLRenderingVDAClient ctor.
1153 // - whether to test slow rendering by delaying ReusePictureBuffer().
1154 // - whether the video frames are rendered as thumbnails.
1155 class VideoDecodeAcceleratorParamTest
1156 : public VideoDecodeAcceleratorTest,
1157 public ::testing::WithParamInterface<
1158 Tuple7<int, int, int, ResetPoint, ClientState, bool, bool> > {
1161 // Helper so that gtest failures emit a more readable version of the tuple than
1162 // its byte representation.
1163 ::std::ostream& operator<<(
1165 const Tuple7<int, int, int, ResetPoint, ClientState, bool, bool>& t) {
1166 return os << t.a << ", " << t.b << ", " << t.c << ", " << t.d << ", " << t.e
1167 << ", " << t.f << ", " << t.g;
1170 // Wait for |note| to report a state and if it's not |expected_state| then
1171 // assert |client| has deleted its decoder.
1172 static void AssertWaitForStateOrDeleted(
1173 ClientStateNotification<ClientState>* note,
1174 GLRenderingVDAClient* client,
1175 ClientState expected_state) {
1176 ClientState state = note->Wait();
1177 if (state == expected_state) return;
1178 ASSERT_TRUE(client->decoder_deleted())
1179 << "Decoder not deleted but Wait() returned " << state
1180 << ", instead of " << expected_state;
1183 // We assert a minimal number of concurrent decoders we expect to succeed.
1184 // Different platforms can support more concurrent decoders, so we don't assert
1185 // failure above this.
1186 enum { kMinSupportedNumConcurrentDecoders = 3 };
1188 // Test the most straightforward case possible: data is decoded from a single
1189 // chunk and rendered to the screen.
1190 TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
1191 const size_t num_concurrent_decoders = GetParam().a;
1192 const size_t num_in_flight_decodes = GetParam().b;
1193 const int num_play_throughs = GetParam().c;
1194 const int reset_point = GetParam().d;
1195 const int delete_decoder_state = GetParam().e;
1196 bool test_reuse_delay = GetParam().f;
1197 const bool render_as_thumbnails = GetParam().g;
1199 UpdateTestVideoFileParams(
1200 num_concurrent_decoders, reset_point, &test_video_files_);
1202 // Suppress GL rendering for all tests when the "--disable_rendering" is set.
1203 const bool suppress_rendering = g_disable_rendering;
1205 std::vector<ClientStateNotification<ClientState>*>
1206 notes(num_concurrent_decoders, NULL);
1207 std::vector<GLRenderingVDAClient*> clients(num_concurrent_decoders, NULL);
1209 RenderingHelperParams helper_params;
1210 helper_params.num_windows = num_concurrent_decoders;
1211 helper_params.render_as_thumbnails = render_as_thumbnails;
1212 if (render_as_thumbnails) {
1213 // Only one decoder is supported with thumbnail rendering
1214 CHECK_EQ(num_concurrent_decoders, 1U);
1215 gfx::Size frame_size(test_video_files_[0]->width,
1216 test_video_files_[0]->height);
1217 helper_params.frame_dimensions.push_back(frame_size);
1218 helper_params.window_dimensions.push_back(kThumbnailsDisplaySize);
1219 helper_params.thumbnails_page_size = kThumbnailsPageSize;
1220 helper_params.thumbnail_size = kThumbnailSize;
1222 for (size_t index = 0; index < test_video_files_.size(); ++index) {
1223 gfx::Size frame_size(test_video_files_[index]->width,
1224 test_video_files_[index]->height);
1225 helper_params.frame_dimensions.push_back(frame_size);
1226 helper_params.window_dimensions.push_back(frame_size);
1229 InitializeRenderingHelper(helper_params);
1231 // First kick off all the decoders.
1232 for (size_t index = 0; index < num_concurrent_decoders; ++index) {
1233 TestVideoFile* video_file =
1234 test_video_files_[index % test_video_files_.size()];
1235 ClientStateNotification<ClientState>* note =
1236 new ClientStateNotification<ClientState>();
1237 notes[index] = note;
1239 int delay_after_frame_num = std::numeric_limits<int>::max();
1240 if (test_reuse_delay &&
1241 kMaxFramesToDelayReuse * 2 < video_file->num_frames) {
1242 delay_after_frame_num = video_file->num_frames - kMaxFramesToDelayReuse;
1245 GLRenderingVDAClient* client =
1246 new GLRenderingVDAClient(&rendering_helper_,
1249 video_file->data_str,
1250 num_in_flight_decodes,
1252 video_file->reset_after_frame_num,
1253 delete_decoder_state,
1256 video_file->profile,
1259 delay_after_frame_num,
1261 clients[index] = client;
1263 CreateAndStartDecoder(client, note);
1265 // Then wait for all the decodes to finish.
1266 // Only check performance & correctness later if we play through only once.
1267 bool skip_performance_and_correctness_checks = num_play_throughs > 1;
1268 for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1269 ClientStateNotification<ClientState>* note = notes[i];
1270 ClientState state = note->Wait();
1271 if (state != CS_INITIALIZED) {
1272 skip_performance_and_correctness_checks = true;
1273 // We expect initialization to fail only when more than the supported
1274 // number of decoders is instantiated. Assert here that something else
1275 // didn't trigger failure.
1276 ASSERT_GT(num_concurrent_decoders,
1277 static_cast<size_t>(kMinSupportedNumConcurrentDecoders));
1280 ASSERT_EQ(state, CS_INITIALIZED);
1281 for (int n = 0; n < num_play_throughs; ++n) {
1282 // For play-throughs other than the first, we expect initialization to
1283 // succeed unconditionally.
1285 ASSERT_NO_FATAL_FAILURE(
1286 AssertWaitForStateOrDeleted(note, clients[i], CS_INITIALIZED));
1288 // InitializeDone kicks off decoding inside the client, so we just need to
1290 ASSERT_NO_FATAL_FAILURE(
1291 AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHING));
1292 ASSERT_NO_FATAL_FAILURE(
1293 AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHED));
1294 // FlushDone requests Reset().
1295 ASSERT_NO_FATAL_FAILURE(
1296 AssertWaitForStateOrDeleted(note, clients[i], CS_RESETTING));
1298 ASSERT_NO_FATAL_FAILURE(
1299 AssertWaitForStateOrDeleted(note, clients[i], CS_RESET));
1300 // ResetDone requests Destroy().
1301 ASSERT_NO_FATAL_FAILURE(
1302 AssertWaitForStateOrDeleted(note, clients[i], CS_DESTROYED));
1304 // Finally assert that decoding went as expected.
1305 for (size_t i = 0; i < num_concurrent_decoders &&
1306 !skip_performance_and_correctness_checks; ++i) {
1307 // We can only make performance/correctness assertions if the decoder was
1308 // allowed to finish.
1309 if (delete_decoder_state < CS_FLUSHED)
1311 GLRenderingVDAClient* client = clients[i];
1312 TestVideoFile* video_file = test_video_files_[i % test_video_files_.size()];
1313 if (video_file->num_frames > 0) {
1314 // Expect the decoded frames may be more than the video frames as frames
1315 // could still be returned until resetting done.
1316 if (video_file->reset_after_frame_num > 0)
1317 EXPECT_GE(client->num_decoded_frames(), video_file->num_frames);
1319 EXPECT_EQ(client->num_decoded_frames(), video_file->num_frames);
1321 if (reset_point == END_OF_STREAM_RESET) {
1322 EXPECT_EQ(video_file->num_fragments, client->num_skipped_fragments() +
1323 client->num_queued_fragments());
1324 EXPECT_EQ(client->num_done_bitstream_buffers(),
1325 client->num_queued_fragments());
1327 VLOG(0) << "Decoder " << i << " fps: " << client->frames_per_second();
1328 if (!render_as_thumbnails) {
1329 int min_fps = suppress_rendering ?
1330 video_file->min_fps_no_render : video_file->min_fps_render;
1331 if (min_fps > 0 && !test_reuse_delay)
1332 EXPECT_GT(client->frames_per_second(), min_fps);
1336 if (render_as_thumbnails) {
1337 std::vector<unsigned char> rgb;
1339 base::WaitableEvent done(false, false);
1340 rendering_loop_proxy_->PostTask(
1342 base::Bind(&RenderingHelper::GetThumbnailsAsRGB,
1343 base::Unretained(&rendering_helper_),
1344 &rgb, &alpha_solid, &done));
1347 std::vector<std::string> golden_md5s;
1348 std::string md5_string = base::MD5String(
1349 base::StringPiece(reinterpret_cast<char*>(&rgb[0]), rgb.size()));
1350 ReadGoldenThumbnailMD5s(test_video_files_[0], &golden_md5s);
1351 std::vector<std::string>::iterator match =
1352 find(golden_md5s.begin(), golden_md5s.end(), md5_string);
1353 if (match == golden_md5s.end()) {
1354 // Convert raw RGB into PNG for export.
1355 std::vector<unsigned char> png;
1356 gfx::PNGCodec::Encode(&rgb[0],
1357 gfx::PNGCodec::FORMAT_RGB,
1358 kThumbnailsPageSize,
1359 kThumbnailsPageSize.width() * 3,
1361 std::vector<gfx::PNGCodec::Comment>(),
1364 LOG(ERROR) << "Unknown thumbnails MD5: " << md5_string;
1366 base::FilePath filepath(test_video_files_[0]->file_name);
1367 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails"));
1368 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".png"));
1369 int num_bytes = base::WriteFile(filepath,
1370 reinterpret_cast<char*>(&png[0]),
1372 ASSERT_EQ(num_bytes, static_cast<int>(png.size()));
1374 ASSERT_NE(match, golden_md5s.end());
1375 EXPECT_EQ(alpha_solid, true) << "RGBA frame had incorrect alpha";
1378 // Output the frame delivery time to file
1379 // We can only make performance/correctness assertions if the decoder was
1380 // allowed to finish.
1381 if (g_output_log != NULL && delete_decoder_state >= CS_FLUSHED) {
1382 base::File output_file(
1383 base::FilePath(g_output_log),
1384 base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1385 for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1386 clients[i]->OutputFrameDeliveryTimes(&output_file);
1390 rendering_loop_proxy_->PostTask(
1392 base::Bind(&STLDeleteElements<std::vector<GLRenderingVDAClient*> >,
1394 rendering_loop_proxy_->PostTask(
1396 base::Bind(&STLDeleteElements<
1397 std::vector<ClientStateNotification<ClientState>*> >,
1402 // Test that replay after EOS works fine.
1403 INSTANTIATE_TEST_CASE_P(
1404 ReplayAfterEOS, VideoDecodeAcceleratorParamTest,
1406 MakeTuple(1, 1, 4, END_OF_STREAM_RESET, CS_RESET, false, false)));
1408 // Test that Reset() before the first Decode() works fine.
1409 INSTANTIATE_TEST_CASE_P(
1410 ResetBeforeDecode, VideoDecodeAcceleratorParamTest,
1412 MakeTuple(1, 1, 1, START_OF_STREAM_RESET, CS_RESET, false, false)));
1414 // Test Reset() immediately after Decode() containing config info.
1415 INSTANTIATE_TEST_CASE_P(
1416 ResetAfterFirstConfigInfo, VideoDecodeAcceleratorParamTest,
1419 1, 1, 1, RESET_AFTER_FIRST_CONFIG_INFO, CS_RESET, false, false)));
1421 // Test that Reset() mid-stream works fine and doesn't affect decoding even when
1422 // Decode() calls are made during the reset.
1423 INSTANTIATE_TEST_CASE_P(
1424 MidStreamReset, VideoDecodeAcceleratorParamTest,
1426 MakeTuple(1, 1, 1, MID_STREAM_RESET, CS_RESET, false, false)));
1428 INSTANTIATE_TEST_CASE_P(
1429 SlowRendering, VideoDecodeAcceleratorParamTest,
1431 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, true, false)));
1433 // Test that Destroy() mid-stream works fine (primarily this is testing that no
1435 INSTANTIATE_TEST_CASE_P(
1436 TearDownTiming, VideoDecodeAcceleratorParamTest,
1438 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_DECODER_SET, false, false),
1439 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_INITIALIZED, false, false),
1440 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHING, false, false),
1441 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHED, false, false),
1442 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESETTING, false, false),
1443 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1444 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1445 static_cast<ClientState>(-1), false, false),
1446 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1447 static_cast<ClientState>(-10), false, false),
1448 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1449 static_cast<ClientState>(-100), false, false)));
1451 // Test that decoding various variation works with multiple in-flight decodes.
1452 INSTANTIATE_TEST_CASE_P(
1453 DecodeVariations, VideoDecodeAcceleratorParamTest,
1455 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1456 MakeTuple(1, 10, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1458 MakeTuple(1, 15, 1, END_OF_STREAM_RESET, CS_RESET, false, false)));
1460 // Find out how many concurrent decoders can go before we exhaust system
1462 INSTANTIATE_TEST_CASE_P(
1463 ResourceExhaustion, VideoDecodeAcceleratorParamTest,
1465 // +0 hack below to promote enum to int.
1466 MakeTuple(kMinSupportedNumConcurrentDecoders + 0, 1, 1,
1467 END_OF_STREAM_RESET, CS_RESET, false, false),
1468 MakeTuple(kMinSupportedNumConcurrentDecoders + 1, 1, 1,
1469 END_OF_STREAM_RESET, CS_RESET, false, false)));
1471 // Thumbnailing test
1472 INSTANTIATE_TEST_CASE_P(
1473 Thumbnail, VideoDecodeAcceleratorParamTest,
1475 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, true)));
1477 // Measure the median of the decode time when VDA::Decode is called 30 times per
1479 TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
1480 RenderingHelperParams helper_params;
1481 helper_params.num_windows = 1;
1482 helper_params.render_as_thumbnails = false;
1483 gfx::Size frame_size(test_video_files_[0]->width,
1484 test_video_files_[0]->height);
1485 helper_params.frame_dimensions.push_back(frame_size);
1486 helper_params.window_dimensions.push_back(frame_size);
1487 InitializeRenderingHelper(helper_params);
1489 ClientStateNotification<ClientState>* note =
1490 new ClientStateNotification<ClientState>();
1491 GLRenderingVDAClient* client =
1492 new GLRenderingVDAClient(&rendering_helper_,
1495 test_video_files_[0]->data_str,
1498 test_video_files_[0]->reset_after_frame_num,
1500 test_video_files_[0]->width,
1501 test_video_files_[0]->height,
1502 test_video_files_[0]->profile,
1505 std::numeric_limits<int>::max(),
1506 kWebRtcDecodeCallsPerSecond);
1507 CreateAndStartDecoder(client, note);
1508 WaitUntilDecodeFinish(note);
1510 int decode_time_median = client->decode_time_median();
1511 std::string output_string =
1512 base::StringPrintf("Decode time median: %d ms", decode_time_median);
1513 VLOG(0) << output_string;
1514 ASSERT_GT(decode_time_median, 0);
1516 if (g_output_log != NULL)
1517 OutputLogFile(g_output_log, output_string);
1519 rendering_loop_proxy_->DeleteSoon(FROM_HERE, client);
1520 rendering_loop_proxy_->DeleteSoon(FROM_HERE, note);
1524 // TODO(fischman, vrk): add more tests! In particular:
1525 // - Test life-cycle: Seek/Stop/Pause/Play for a single decoder.
1526 // - Test alternate configurations
1527 // - Test failure conditions.
1528 // - Test frame size changes mid-stream
1531 } // namespace content
1533 int main(int argc, char **argv) {
1534 testing::InitGoogleTest(&argc, argv); // Removes gtest-specific args.
1535 CommandLine::Init(argc, argv);
1537 // Needed to enable DVLOG through --vmodule.
1538 logging::LoggingSettings settings;
1539 settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG;
1540 CHECK(logging::InitLogging(settings));
1542 CommandLine* cmd_line = CommandLine::ForCurrentProcess();
1545 CommandLine::SwitchMap switches = cmd_line->GetSwitches();
1546 for (CommandLine::SwitchMap::const_iterator it = switches.begin();
1547 it != switches.end(); ++it) {
1548 if (it->first == "test_video_data") {
1549 content::g_test_video_data = it->second.c_str();
1552 // TODO(wuchengli): remove frame_deliver_log after CrOS test get updated.
1553 // See http://crosreview.com/175426.
1554 if (it->first == "frame_delivery_log" || it->first == "output_log") {
1555 content::g_output_log = it->second.c_str();
1558 if (it->first == "rendering_fps") {
1559 // On Windows, CommandLine::StringType is wstring. We need to convert
1560 // it to std::string first
1561 std::string input(it->second.begin(), it->second.end());
1562 CHECK(base::StringToDouble(input, &content::g_rendering_fps));
1565 if (it->first == "disable_rendering") {
1566 content::g_disable_rendering = true;
1569 if (it->first == "v" || it->first == "vmodule")
1571 LOG(FATAL) << "Unexpected switch: " << it->first << ":" << it->second;
1574 base::ShadowingAtExitManager at_exit_manager;
1576 return RUN_ALL_TESTS();