Upstream version 9.38.198.0
[platform/framework/web/crosswalk.git] / src / content / common / gpu / media / video_decode_accelerator_unittest.cc
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // The bulk of this file is support code; sorry about that.  Here's an overview
6 // to hopefully help readers of this code:
7 // - RenderingHelper is charged with interacting with X11/{EGL/GLES2,GLX/GL} or
8 //   Win/EGL.
9 // - ClientState is an enum for the state of the decode client used by the test.
10 // - ClientStateNotification is a barrier abstraction that allows the test code
11 //   to be written sequentially and wait for the decode client to see certain
12 //   state transitions.
13 // - GLRenderingVDAClient is a VideoDecodeAccelerator::Client implementation
14 // - Finally actual TEST cases are at the bottom of this file, using the above
15 //   infrastructure.
16
17 #include <fcntl.h>
18 #include <sys/stat.h>
19 #include <sys/types.h>
20 #include <algorithm>
21 #include <deque>
22 #include <map>
23
24 // Include gtest.h out of order because <X11/X.h> #define's Bool & None, which
25 // gtest uses as struct names (inside a namespace).  This means that
26 // #include'ing gtest after anything that pulls in X.h fails to compile.
27 // This is http://code.google.com/p/googletest/issues/detail?id=371
28 #include "testing/gtest/include/gtest/gtest.h"
29
30 #include "base/at_exit.h"
31 #include "base/bind.h"
32 #include "base/command_line.h"
33 #include "base/file_util.h"
34 #include "base/files/file.h"
35 #include "base/format_macros.h"
36 #include "base/md5.h"
37 #include "base/message_loop/message_loop_proxy.h"
38 #include "base/process/process.h"
39 #include "base/stl_util.h"
40 #include "base/strings/string_number_conversions.h"
41 #include "base/strings/string_split.h"
42 #include "base/strings/stringize_macros.h"
43 #include "base/strings/stringprintf.h"
44 #include "base/strings/utf_string_conversions.h"
45 #include "base/synchronization/condition_variable.h"
46 #include "base/synchronization/lock.h"
47 #include "base/synchronization/waitable_event.h"
48 #include "base/threading/thread.h"
49 #include "content/common/gpu/media/rendering_helper.h"
50 #include "content/common/gpu/media/video_accelerator_unittest_helpers.h"
51 #include "content/public/common/content_switches.h"
52 #include "media/filters/h264_parser.h"
53 #include "ui/gfx/codec/png_codec.h"
54
55 #if defined(OS_WIN)
56 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
57 #elif defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL)
58 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
59 #include "content/common/gpu/media/v4l2_video_device.h"
60 #elif defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
61 #include "content/common/gpu/media/vaapi_video_decode_accelerator.h"
62 #include "content/common/gpu/media/vaapi_wrapper.h"
63 #if defined(USE_X11)
64 #include "ui/gl/gl_implementation.h"
65 #endif  // USE_X11
66 #else
67 #error The VideoAccelerator tests are not supported on this platform.
68 #endif  // OS_WIN
69
70 using media::VideoDecodeAccelerator;
71
72 namespace content {
73 namespace {
74
75 // Values optionally filled in from flags; see main() below.
76 // The syntax of multiple test videos is:
77 //  test-video1;test-video2;test-video3
78 // where only the first video is required and other optional videos would be
79 // decoded by concurrent decoders.
80 // The syntax of each test-video is:
81 //  filename:width:height:numframes:numfragments:minFPSwithRender:minFPSnoRender
82 // where only the first field is required.  Value details:
83 // - |filename| must be an h264 Annex B (NAL) stream or an IVF VP8 stream.
84 // - |width| and |height| are in pixels.
85 // - |numframes| is the number of picture frames in the file.
86 // - |numfragments| NALU (h264) or frame (VP8) count in the stream.
87 // - |minFPSwithRender| and |minFPSnoRender| are minimum frames/second speeds
88 //   expected to be achieved with and without rendering to the screen, resp.
89 //   (the latter tests just decode speed).
90 // - |profile| is the media::VideoCodecProfile set during Initialization.
91 // An empty value for a numeric field means "ignore".
92 const base::FilePath::CharType* g_test_video_data =
93     // FILE_PATH_LITERAL("test-25fps.vp8:320:240:250:250:50:175:11");
94     FILE_PATH_LITERAL("test-25fps.h264:320:240:250:258:50:175:1");
95
96 // The file path of the test output log. This is used to communicate the test
97 // results to CrOS autotests. We can enable the log and specify the filename by
98 // the "--output_log" switch.
99 const base::FilePath::CharType* g_output_log = NULL;
100
101 // The value is set by the switch "--rendering_fps".
102 double g_rendering_fps = 60;
103
104 // Magic constants for differentiating the reasons for NotifyResetDone being
105 // called.
106 enum ResetPoint {
107   // Reset() just after calling Decode() with a fragment containing config info.
108   RESET_AFTER_FIRST_CONFIG_INFO = -4,
109   START_OF_STREAM_RESET = -3,
110   MID_STREAM_RESET = -2,
111   END_OF_STREAM_RESET = -1
112 };
113
114 const int kMaxResetAfterFrameNum = 100;
115 const int kMaxFramesToDelayReuse = 64;
116 const base::TimeDelta kReuseDelay = base::TimeDelta::FromSeconds(1);
117 // Simulate WebRTC and call VDA::Decode 30 times per second.
118 const int kWebRtcDecodeCallsPerSecond = 30;
119
120 struct TestVideoFile {
121   explicit TestVideoFile(base::FilePath::StringType file_name)
122       : file_name(file_name),
123         width(-1),
124         height(-1),
125         num_frames(-1),
126         num_fragments(-1),
127         min_fps_render(-1),
128         min_fps_no_render(-1),
129         profile(media::VIDEO_CODEC_PROFILE_UNKNOWN),
130         reset_after_frame_num(END_OF_STREAM_RESET) {
131   }
132
133   base::FilePath::StringType file_name;
134   int width;
135   int height;
136   int num_frames;
137   int num_fragments;
138   int min_fps_render;
139   int min_fps_no_render;
140   media::VideoCodecProfile profile;
141   int reset_after_frame_num;
142   std::string data_str;
143 };
144
145 const gfx::Size kThumbnailsPageSize(1600, 1200);
146 const gfx::Size kThumbnailSize(160, 120);
147 const int kMD5StringLength = 32;
148
149 // Read in golden MD5s for the thumbnailed rendering of this video
150 void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file,
151                              std::vector<std::string>* md5_strings) {
152   base::FilePath filepath(video_file->file_name);
153   filepath = filepath.AddExtension(FILE_PATH_LITERAL(".md5"));
154   std::string all_md5s;
155   base::ReadFileToString(filepath, &all_md5s);
156   base::SplitString(all_md5s, '\n', md5_strings);
157   // Check these are legitimate MD5s.
158   for (std::vector<std::string>::iterator md5_string = md5_strings->begin();
159       md5_string != md5_strings->end(); ++md5_string) {
160       // Ignore the empty string added by SplitString
161       if (!md5_string->length())
162         continue;
163       // Ignore comments
164       if (md5_string->at(0) == '#')
165         continue;
166
167       CHECK_EQ(static_cast<int>(md5_string->length()),
168                kMD5StringLength) << *md5_string;
169       bool hex_only = std::count_if(md5_string->begin(),
170                                     md5_string->end(), isxdigit) ==
171                                     kMD5StringLength;
172       CHECK(hex_only) << *md5_string;
173   }
174   CHECK_GE(md5_strings->size(), 1U) << all_md5s;
175 }
176
177 // State of the GLRenderingVDAClient below.  Order matters here as the test
178 // makes assumptions about it.
179 enum ClientState {
180   CS_CREATED = 0,
181   CS_DECODER_SET = 1,
182   CS_INITIALIZED = 2,
183   CS_FLUSHING = 3,
184   CS_FLUSHED = 4,
185   CS_RESETTING = 5,
186   CS_RESET = 6,
187   CS_ERROR = 7,
188   CS_DESTROYED = 8,
189   CS_MAX,  // Must be last entry.
190 };
191
192 // Client that can accept callbacks from a VideoDecodeAccelerator and is used by
193 // the TESTs below.
194 class GLRenderingVDAClient
195     : public VideoDecodeAccelerator::Client,
196       public RenderingHelper::Client,
197       public base::SupportsWeakPtr<GLRenderingVDAClient> {
198  public:
199   // Doesn't take ownership of |rendering_helper| or |note|, which must outlive
200   // |*this|.
201   // |num_play_throughs| indicates how many times to play through the video.
202   // |reset_after_frame_num| can be a frame number >=0 indicating a mid-stream
203   // Reset() should be done after that frame number is delivered, or
204   // END_OF_STREAM_RESET to indicate no mid-stream Reset().
205   // |delete_decoder_state| indicates when the underlying decoder should be
206   // Destroy()'d and deleted and can take values: N<0: delete after -N Decode()
207   // calls have been made, N>=0 means interpret as ClientState.
208   // Both |reset_after_frame_num| & |delete_decoder_state| apply only to the
209   // last play-through (governed by |num_play_throughs|).
210   // |suppress_rendering| indicates GL rendering is supressed or not.
211   // After |delay_reuse_after_frame_num| frame has been delivered, the client
212   // will start delaying the call to ReusePictureBuffer() for kReuseDelay.
213   // |decode_calls_per_second| is the number of VDA::Decode calls per second.
214   // If |decode_calls_per_second| > 0, |num_in_flight_decodes| must be 1.
215   GLRenderingVDAClient(RenderingHelper* rendering_helper,
216                        ClientStateNotification<ClientState>* note,
217                        const std::string& encoded_data,
218                        int num_in_flight_decodes,
219                        int num_play_throughs,
220                        int reset_after_frame_num,
221                        int delete_decoder_state,
222                        int frame_width,
223                        int frame_height,
224                        media::VideoCodecProfile profile,
225                        bool suppress_rendering,
226                        int delay_reuse_after_frame_num,
227                        int decode_calls_per_second,
228                        bool render_as_thumbnails);
229   virtual ~GLRenderingVDAClient();
230   void CreateAndStartDecoder();
231
232   // VideoDecodeAccelerator::Client implementation.
233   // The heart of the Client.
234   virtual void ProvidePictureBuffers(uint32 requested_num_of_buffers,
235                                      const gfx::Size& dimensions,
236                                      uint32 texture_target) OVERRIDE;
237   virtual void DismissPictureBuffer(int32 picture_buffer_id) OVERRIDE;
238   virtual void PictureReady(const media::Picture& picture) OVERRIDE;
239   // Simple state changes.
240   virtual void NotifyEndOfBitstreamBuffer(int32 bitstream_buffer_id) OVERRIDE;
241   virtual void NotifyFlushDone() OVERRIDE;
242   virtual void NotifyResetDone() OVERRIDE;
243   virtual void NotifyError(VideoDecodeAccelerator::Error error) OVERRIDE;
244
245   // RenderingHelper::Client implementation.
246   virtual void RenderContent(RenderingHelper*) OVERRIDE;
247   virtual const gfx::Size& GetWindowSize() OVERRIDE;
248
249   void OutputFrameDeliveryTimes(base::File* output);
250
251   void NotifyFrameDropped(int32 picture_buffer_id);
252
253   // Simple getters for inspecting the state of the Client.
254   int num_done_bitstream_buffers() { return num_done_bitstream_buffers_; }
255   int num_skipped_fragments() { return num_skipped_fragments_; }
256   int num_queued_fragments() { return num_queued_fragments_; }
257   int num_decoded_frames() { return num_decoded_frames_; }
258   double frames_per_second();
259   // Return the median of the decode time of all decoded frames.
260   base::TimeDelta decode_time_median();
261   bool decoder_deleted() { return !decoder_.get(); }
262
263  private:
264   typedef std::map<int, media::PictureBuffer*> PictureBufferById;
265
266   void SetState(ClientState new_state);
267   void FinishInitialization();
268   void ReturnPicture(int32 picture_buffer_id);
269
270   // Delete the associated decoder helper.
271   void DeleteDecoder();
272
273   // Compute & return the first encoded bytes (including a start frame) to send
274   // to the decoder, starting at |start_pos| and returning one fragment. Skips
275   // to the first decodable position.
276   std::string GetBytesForFirstFragment(size_t start_pos, size_t* end_pos);
277   // Compute & return the encoded bytes of next fragment to send to the decoder
278   // (based on |start_pos|).
279   std::string GetBytesForNextFragment(size_t start_pos, size_t* end_pos);
280   // Helpers for GetBytesForNextFragment above.
281   void GetBytesForNextNALU(size_t start_pos, size_t* end_pos);  // For h.264.
282   std::string GetBytesForNextFrame(
283       size_t start_pos, size_t* end_pos);  // For VP8.
284
285   // Request decode of the next fragment in the encoded data.
286   void DecodeNextFragment();
287
288   RenderingHelper* rendering_helper_;
289   gfx::Size frame_size_;
290   std::string encoded_data_;
291   const int num_in_flight_decodes_;
292   int outstanding_decodes_;
293   size_t encoded_data_next_pos_to_decode_;
294   int next_bitstream_buffer_id_;
295   ClientStateNotification<ClientState>* note_;
296   scoped_ptr<VideoDecodeAccelerator> decoder_;
297   scoped_ptr<base::WeakPtrFactory<VideoDecodeAccelerator> >
298       weak_decoder_factory_;
299   std::set<int> outstanding_texture_ids_;
300   int remaining_play_throughs_;
301   int reset_after_frame_num_;
302   int delete_decoder_state_;
303   ClientState state_;
304   int num_skipped_fragments_;
305   int num_queued_fragments_;
306   int num_decoded_frames_;
307   int num_done_bitstream_buffers_;
308   PictureBufferById picture_buffers_by_id_;
309   base::TimeTicks initialize_done_ticks_;
310   media::VideoCodecProfile profile_;
311   GLenum texture_target_;
312   bool suppress_rendering_;
313   std::vector<base::TimeTicks> frame_delivery_times_;
314   int delay_reuse_after_frame_num_;
315   // A map from bitstream buffer id to the decode start time of the buffer.
316   std::map<int, base::TimeTicks> decode_start_time_;
317   // The decode time of all decoded frames.
318   std::vector<base::TimeDelta> decode_time_;
319   // The number of VDA::Decode calls per second. This is to simulate webrtc.
320   int decode_calls_per_second_;
321   bool render_as_thumbnails_;
322   bool pending_picture_updated_;
323   std::deque<int32> pending_picture_buffer_ids_;
324
325   DISALLOW_IMPLICIT_CONSTRUCTORS(GLRenderingVDAClient);
326 };
327
328 GLRenderingVDAClient::GLRenderingVDAClient(
329     RenderingHelper* rendering_helper,
330     ClientStateNotification<ClientState>* note,
331     const std::string& encoded_data,
332     int num_in_flight_decodes,
333     int num_play_throughs,
334     int reset_after_frame_num,
335     int delete_decoder_state,
336     int frame_width,
337     int frame_height,
338     media::VideoCodecProfile profile,
339     bool suppress_rendering,
340     int delay_reuse_after_frame_num,
341     int decode_calls_per_second,
342     bool render_as_thumbnails)
343     : rendering_helper_(rendering_helper),
344       frame_size_(frame_width, frame_height),
345       encoded_data_(encoded_data),
346       num_in_flight_decodes_(num_in_flight_decodes),
347       outstanding_decodes_(0),
348       encoded_data_next_pos_to_decode_(0),
349       next_bitstream_buffer_id_(0),
350       note_(note),
351       remaining_play_throughs_(num_play_throughs),
352       reset_after_frame_num_(reset_after_frame_num),
353       delete_decoder_state_(delete_decoder_state),
354       state_(CS_CREATED),
355       num_skipped_fragments_(0),
356       num_queued_fragments_(0),
357       num_decoded_frames_(0),
358       num_done_bitstream_buffers_(0),
359       texture_target_(0),
360       suppress_rendering_(suppress_rendering),
361       delay_reuse_after_frame_num_(delay_reuse_after_frame_num),
362       decode_calls_per_second_(decode_calls_per_second),
363       render_as_thumbnails_(render_as_thumbnails),
364       pending_picture_updated_(true) {
365   CHECK_GT(num_in_flight_decodes, 0);
366   CHECK_GT(num_play_throughs, 0);
367   // |num_in_flight_decodes_| is unsupported if |decode_calls_per_second_| > 0.
368   if (decode_calls_per_second_ > 0)
369     CHECK_EQ(1, num_in_flight_decodes_);
370
371   // Default to H264 baseline if no profile provided.
372   profile_ = (profile != media::VIDEO_CODEC_PROFILE_UNKNOWN
373                   ? profile
374                   : media::H264PROFILE_BASELINE);
375 }
376
377 GLRenderingVDAClient::~GLRenderingVDAClient() {
378   DeleteDecoder();  // Clean up in case of expected error.
379   CHECK(decoder_deleted());
380   STLDeleteValues(&picture_buffers_by_id_);
381   SetState(CS_DESTROYED);
382 }
383
384 static bool DoNothingReturnTrue() { return true; }
385
386 void GLRenderingVDAClient::CreateAndStartDecoder() {
387   CHECK(decoder_deleted());
388   CHECK(!decoder_.get());
389
390   VideoDecodeAccelerator::Client* client = this;
391   base::WeakPtr<VideoDecodeAccelerator::Client> weak_client = AsWeakPtr();
392 #if defined(OS_WIN)
393   decoder_.reset(
394       new DXVAVideoDecodeAccelerator(base::Bind(&DoNothingReturnTrue)));
395 #elif defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL)
396
397   scoped_ptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder);
398   if (!device.get()) {
399     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
400     return;
401   }
402   decoder_.reset(new V4L2VideoDecodeAccelerator(
403       static_cast<EGLDisplay>(rendering_helper_->GetGLDisplay()),
404       static_cast<EGLContext>(rendering_helper_->GetGLContext()),
405       weak_client,
406       base::Bind(&DoNothingReturnTrue),
407       device.Pass(),
408       base::MessageLoopProxy::current()));
409 #elif defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
410   CHECK_EQ(gfx::kGLImplementationDesktopGL, gfx::GetGLImplementation())
411       << "Hardware video decode does not work with OSMesa";
412   decoder_.reset(new VaapiVideoDecodeAccelerator(
413       static_cast<Display*>(rendering_helper_->GetGLDisplay()),
414       base::Bind(&DoNothingReturnTrue)));
415 #endif  // OS_WIN
416   CHECK(decoder_.get());
417   weak_decoder_factory_.reset(
418       new base::WeakPtrFactory<VideoDecodeAccelerator>(decoder_.get()));
419   SetState(CS_DECODER_SET);
420   if (decoder_deleted())
421     return;
422
423   CHECK(decoder_->Initialize(profile_, client));
424   FinishInitialization();
425 }
426
427 void GLRenderingVDAClient::ProvidePictureBuffers(
428     uint32 requested_num_of_buffers,
429     const gfx::Size& dimensions,
430     uint32 texture_target) {
431   if (decoder_deleted())
432     return;
433   std::vector<media::PictureBuffer> buffers;
434
435   texture_target_ = texture_target;
436   for (uint32 i = 0; i < requested_num_of_buffers; ++i) {
437     uint32 id = picture_buffers_by_id_.size();
438     uint32 texture_id;
439     base::WaitableEvent done(false, false);
440     rendering_helper_->CreateTexture(
441         texture_target_, &texture_id, dimensions, &done);
442     done.Wait();
443     CHECK(outstanding_texture_ids_.insert(texture_id).second);
444     media::PictureBuffer* buffer =
445         new media::PictureBuffer(id, dimensions, texture_id);
446     CHECK(picture_buffers_by_id_.insert(std::make_pair(id, buffer)).second);
447     buffers.push_back(*buffer);
448   }
449   decoder_->AssignPictureBuffers(buffers);
450 }
451
452 void GLRenderingVDAClient::DismissPictureBuffer(int32 picture_buffer_id) {
453   PictureBufferById::iterator it =
454       picture_buffers_by_id_.find(picture_buffer_id);
455   CHECK(it != picture_buffers_by_id_.end());
456   CHECK_EQ(outstanding_texture_ids_.erase(it->second->texture_id()), 1U);
457   rendering_helper_->DeleteTexture(it->second->texture_id());
458   delete it->second;
459   picture_buffers_by_id_.erase(it);
460 }
461
462 void GLRenderingVDAClient::RenderContent(RenderingHelper*) {
463   CHECK(!render_as_thumbnails_);
464
465   // No decoded texture for rendering yet, just skip.
466   if (pending_picture_buffer_ids_.size() == 0)
467     return;
468
469   int32 buffer_id = pending_picture_buffer_ids_.front();
470   media::PictureBuffer* picture_buffer = picture_buffers_by_id_[buffer_id];
471
472   CHECK(picture_buffer);
473   if (!pending_picture_updated_) {
474     // Frame dropped, just redraw the last texture.
475     rendering_helper_->RenderTexture(texture_target_,
476                                      picture_buffer->texture_id());
477     return;
478   }
479
480   base::TimeTicks now = base::TimeTicks::Now();
481   frame_delivery_times_.push_back(now);
482
483   rendering_helper_->RenderTexture(texture_target_,
484                                    picture_buffer->texture_id());
485
486   if (pending_picture_buffer_ids_.size() == 1) {
487     pending_picture_updated_ = false;
488   } else {
489     pending_picture_buffer_ids_.pop_front();
490     ReturnPicture(buffer_id);
491   }
492 }
493
494 const gfx::Size& GLRenderingVDAClient::GetWindowSize() {
495   return render_as_thumbnails_ ? kThumbnailsPageSize : frame_size_;
496 }
497
498 void GLRenderingVDAClient::PictureReady(const media::Picture& picture) {
499   // We shouldn't be getting pictures delivered after Reset has completed.
500   CHECK_LT(state_, CS_RESET);
501
502   if (decoder_deleted())
503     return;
504
505   base::TimeTicks now = base::TimeTicks::Now();
506   // Save the decode time of this picture.
507   std::map<int, base::TimeTicks>::iterator it =
508       decode_start_time_.find(picture.bitstream_buffer_id());
509   ASSERT_NE(decode_start_time_.end(), it);
510   decode_time_.push_back(now - it->second);
511   decode_start_time_.erase(it);
512
513   CHECK_LE(picture.bitstream_buffer_id(), next_bitstream_buffer_id_);
514   ++num_decoded_frames_;
515
516   // Mid-stream reset applies only to the last play-through per constructor
517   // comment.
518   if (remaining_play_throughs_ == 1 &&
519       reset_after_frame_num_ == num_decoded_frames_) {
520     reset_after_frame_num_ = MID_STREAM_RESET;
521     decoder_->Reset();
522     // Re-start decoding from the beginning of the stream to avoid needing to
523     // know how to find I-frames and so on in this test.
524     encoded_data_next_pos_to_decode_ = 0;
525   }
526
527   if (render_as_thumbnails_) {
528     frame_delivery_times_.push_back(now);
529     media::PictureBuffer* picture_buffer =
530         picture_buffers_by_id_[picture.picture_buffer_id()];
531     CHECK(picture_buffer);
532     rendering_helper_->RenderThumbnail(texture_target_,
533                                        picture_buffer->texture_id());
534     ReturnPicture(picture.picture_buffer_id());
535   } else if (!suppress_rendering_) {
536     // Keep the picture for rendering.
537     pending_picture_buffer_ids_.push_back(picture.picture_buffer_id());
538     if (pending_picture_buffer_ids_.size() > 1 && !pending_picture_updated_) {
539       ReturnPicture(pending_picture_buffer_ids_.front());
540       pending_picture_buffer_ids_.pop_front();
541       pending_picture_updated_ = true;
542     }
543   } else {
544     frame_delivery_times_.push_back(now);
545     ReturnPicture(picture.picture_buffer_id());
546   }
547 }
548
549 void GLRenderingVDAClient::ReturnPicture(int32 picture_buffer_id) {
550   if (decoder_deleted())
551     return;
552   if (num_decoded_frames_ > delay_reuse_after_frame_num_) {
553     base::MessageLoop::current()->PostDelayedTask(
554         FROM_HERE,
555         base::Bind(&VideoDecodeAccelerator::ReusePictureBuffer,
556                    weak_decoder_factory_->GetWeakPtr(),
557                    picture_buffer_id),
558         kReuseDelay);
559   } else {
560     decoder_->ReusePictureBuffer(picture_buffer_id);
561   }
562 }
563
564 void GLRenderingVDAClient::NotifyEndOfBitstreamBuffer(
565     int32 bitstream_buffer_id) {
566   // TODO(fischman): this test currently relies on this notification to make
567   // forward progress during a Reset().  But the VDA::Reset() API doesn't
568   // guarantee this, so stop relying on it (and remove the notifications from
569   // VaapiVideoDecodeAccelerator::FinishReset()).
570   ++num_done_bitstream_buffers_;
571   --outstanding_decodes_;
572   if (decode_calls_per_second_ == 0)
573     DecodeNextFragment();
574 }
575
576 void GLRenderingVDAClient::NotifyFlushDone() {
577   if (decoder_deleted())
578     return;
579
580   SetState(CS_FLUSHED);
581   --remaining_play_throughs_;
582   DCHECK_GE(remaining_play_throughs_, 0);
583   if (decoder_deleted())
584     return;
585   decoder_->Reset();
586   SetState(CS_RESETTING);
587 }
588
589 void GLRenderingVDAClient::NotifyResetDone() {
590   if (decoder_deleted())
591     return;
592
593   // Clear pending_pictures and reuse them.
594   while (!pending_picture_buffer_ids_.empty()) {
595     decoder_->ReusePictureBuffer(pending_picture_buffer_ids_.front());
596     pending_picture_buffer_ids_.pop_front();
597   }
598   pending_picture_updated_ = true;
599
600   if (reset_after_frame_num_ == MID_STREAM_RESET) {
601     reset_after_frame_num_ = END_OF_STREAM_RESET;
602     DecodeNextFragment();
603     return;
604   } else if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
605     reset_after_frame_num_ = END_OF_STREAM_RESET;
606     for (int i = 0; i < num_in_flight_decodes_; ++i)
607       DecodeNextFragment();
608     return;
609   }
610
611   if (remaining_play_throughs_) {
612     encoded_data_next_pos_to_decode_ = 0;
613     FinishInitialization();
614     return;
615   }
616
617   SetState(CS_RESET);
618   if (!decoder_deleted())
619     DeleteDecoder();
620 }
621
622 void GLRenderingVDAClient::NotifyError(VideoDecodeAccelerator::Error error) {
623   SetState(CS_ERROR);
624 }
625
626 void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
627   std::string s = base::StringPrintf("frame count: %" PRIuS "\n",
628                                      frame_delivery_times_.size());
629   output->WriteAtCurrentPos(s.data(), s.length());
630   base::TimeTicks t0 = initialize_done_ticks_;
631   for (size_t i = 0; i < frame_delivery_times_.size(); ++i) {
632     s = base::StringPrintf("frame %04" PRIuS ": %" PRId64 " us\n",
633                            i,
634                            (frame_delivery_times_[i] - t0).InMicroseconds());
635     t0 = frame_delivery_times_[i];
636     output->WriteAtCurrentPos(s.data(), s.length());
637   }
638 }
639
640 void GLRenderingVDAClient::NotifyFrameDropped(int32 picture_buffer_id) {
641   decoder_->ReusePictureBuffer(picture_buffer_id);
642 }
643
644 static bool LookingAtNAL(const std::string& encoded, size_t pos) {
645   return encoded[pos] == 0 && encoded[pos + 1] == 0 &&
646       encoded[pos + 2] == 0 && encoded[pos + 3] == 1;
647 }
648
649 void GLRenderingVDAClient::SetState(ClientState new_state) {
650   note_->Notify(new_state);
651   state_ = new_state;
652   if (!remaining_play_throughs_ && new_state == delete_decoder_state_) {
653     CHECK(!decoder_deleted());
654     DeleteDecoder();
655   }
656 }
657
658 void GLRenderingVDAClient::FinishInitialization() {
659   SetState(CS_INITIALIZED);
660   initialize_done_ticks_ = base::TimeTicks::Now();
661
662   if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
663     reset_after_frame_num_ = MID_STREAM_RESET;
664     decoder_->Reset();
665     return;
666   }
667
668   for (int i = 0; i < num_in_flight_decodes_; ++i)
669     DecodeNextFragment();
670   DCHECK_EQ(outstanding_decodes_, num_in_flight_decodes_);
671 }
672
673 void GLRenderingVDAClient::DeleteDecoder() {
674   if (decoder_deleted())
675     return;
676   weak_decoder_factory_.reset();
677   decoder_.reset();
678   STLClearObject(&encoded_data_);
679   for (std::set<int>::iterator it = outstanding_texture_ids_.begin();
680        it != outstanding_texture_ids_.end(); ++it) {
681     rendering_helper_->DeleteTexture(*it);
682   }
683   outstanding_texture_ids_.clear();
684   // Cascade through the rest of the states to simplify test code below.
685   for (int i = state_ + 1; i < CS_MAX; ++i)
686     SetState(static_cast<ClientState>(i));
687 }
688
689 std::string GLRenderingVDAClient::GetBytesForFirstFragment(
690     size_t start_pos, size_t* end_pos) {
691   if (profile_ < media::H264PROFILE_MAX) {
692     *end_pos = start_pos;
693     while (*end_pos + 4 < encoded_data_.size()) {
694       if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7) // SPS start frame
695         return GetBytesForNextFragment(*end_pos, end_pos);
696       GetBytesForNextNALU(*end_pos, end_pos);
697       num_skipped_fragments_++;
698     }
699     *end_pos = start_pos;
700     return std::string();
701   }
702   DCHECK_LE(profile_, media::VP8PROFILE_MAX);
703   return GetBytesForNextFragment(start_pos, end_pos);
704 }
705
706 std::string GLRenderingVDAClient::GetBytesForNextFragment(
707     size_t start_pos, size_t* end_pos) {
708   if (profile_ < media::H264PROFILE_MAX) {
709     *end_pos = start_pos;
710     GetBytesForNextNALU(*end_pos, end_pos);
711     if (start_pos != *end_pos) {
712       num_queued_fragments_++;
713     }
714     return encoded_data_.substr(start_pos, *end_pos - start_pos);
715   }
716   DCHECK_LE(profile_, media::VP8PROFILE_MAX);
717   return GetBytesForNextFrame(start_pos, end_pos);
718 }
719
720 void GLRenderingVDAClient::GetBytesForNextNALU(
721     size_t start_pos, size_t* end_pos) {
722   *end_pos = start_pos;
723   if (*end_pos + 4 > encoded_data_.size())
724     return;
725   CHECK(LookingAtNAL(encoded_data_, start_pos));
726   *end_pos += 4;
727   while (*end_pos + 4 <= encoded_data_.size() &&
728          !LookingAtNAL(encoded_data_, *end_pos)) {
729     ++*end_pos;
730   }
731   if (*end_pos + 3 >= encoded_data_.size())
732     *end_pos = encoded_data_.size();
733 }
734
735 std::string GLRenderingVDAClient::GetBytesForNextFrame(
736     size_t start_pos, size_t* end_pos) {
737   // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF
738   std::string bytes;
739   if (start_pos == 0)
740     start_pos = 32;  // Skip IVF header.
741   *end_pos = start_pos;
742   uint32 frame_size = *reinterpret_cast<uint32*>(&encoded_data_[*end_pos]);
743   *end_pos += 12;  // Skip frame header.
744   bytes.append(encoded_data_.substr(*end_pos, frame_size));
745   *end_pos += frame_size;
746   num_queued_fragments_++;
747   return bytes;
748 }
749
750 static bool FragmentHasConfigInfo(const uint8* data, size_t size,
751                                   media::VideoCodecProfile profile) {
752   if (profile >= media::H264PROFILE_MIN &&
753       profile <= media::H264PROFILE_MAX) {
754     media::H264Parser parser;
755     parser.SetStream(data, size);
756     media::H264NALU nalu;
757     media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
758     if (result != media::H264Parser::kOk) {
759       // Let the VDA figure out there's something wrong with the stream.
760       return false;
761     }
762
763     return nalu.nal_unit_type == media::H264NALU::kSPS;
764   } else if (profile >= media::VP8PROFILE_MIN &&
765              profile <= media::VP8PROFILE_MAX) {
766     return (size > 0 && !(data[0] & 0x01));
767   }
768   // Shouldn't happen at this point.
769   LOG(FATAL) << "Invalid profile: " << profile;
770   return false;
771 }
772
773 void GLRenderingVDAClient::DecodeNextFragment() {
774   if (decoder_deleted())
775     return;
776   if (encoded_data_next_pos_to_decode_ == encoded_data_.size()) {
777     if (outstanding_decodes_ == 0) {
778       decoder_->Flush();
779       SetState(CS_FLUSHING);
780     }
781     return;
782   }
783   size_t end_pos;
784   std::string next_fragment_bytes;
785   if (encoded_data_next_pos_to_decode_ == 0) {
786     next_fragment_bytes = GetBytesForFirstFragment(0, &end_pos);
787   } else {
788     next_fragment_bytes =
789         GetBytesForNextFragment(encoded_data_next_pos_to_decode_, &end_pos);
790   }
791   size_t next_fragment_size = next_fragment_bytes.size();
792
793   // Call Reset() just after Decode() if the fragment contains config info.
794   // This tests how the VDA behaves when it gets a reset request before it has
795   // a chance to ProvidePictureBuffers().
796   bool reset_here = false;
797   if (reset_after_frame_num_ == RESET_AFTER_FIRST_CONFIG_INFO) {
798     reset_here = FragmentHasConfigInfo(
799         reinterpret_cast<const uint8*>(next_fragment_bytes.data()),
800         next_fragment_size,
801         profile_);
802     if (reset_here)
803       reset_after_frame_num_ = END_OF_STREAM_RESET;
804   }
805
806   // Populate the shared memory buffer w/ the fragment, duplicate its handle,
807   // and hand it off to the decoder.
808   base::SharedMemory shm;
809   CHECK(shm.CreateAndMapAnonymous(next_fragment_size));
810   memcpy(shm.memory(), next_fragment_bytes.data(), next_fragment_size);
811   base::SharedMemoryHandle dup_handle;
812   CHECK(shm.ShareToProcess(base::Process::Current().handle(), &dup_handle));
813   media::BitstreamBuffer bitstream_buffer(
814       next_bitstream_buffer_id_, dup_handle, next_fragment_size);
815   decode_start_time_[next_bitstream_buffer_id_] = base::TimeTicks::Now();
816   // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
817   next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
818   decoder_->Decode(bitstream_buffer);
819   ++outstanding_decodes_;
820   if (!remaining_play_throughs_ &&
821       -delete_decoder_state_ == next_bitstream_buffer_id_) {
822     DeleteDecoder();
823   }
824
825   if (reset_here) {
826     reset_after_frame_num_ = MID_STREAM_RESET;
827     decoder_->Reset();
828     // Restart from the beginning to re-Decode() the SPS we just sent.
829     encoded_data_next_pos_to_decode_ = 0;
830   } else {
831     encoded_data_next_pos_to_decode_ = end_pos;
832   }
833
834   if (decode_calls_per_second_ > 0) {
835     base::MessageLoop::current()->PostDelayedTask(
836         FROM_HERE,
837         base::Bind(&GLRenderingVDAClient::DecodeNextFragment, AsWeakPtr()),
838         base::TimeDelta::FromSeconds(1) / decode_calls_per_second_);
839   }
840 }
841
842 double GLRenderingVDAClient::frames_per_second() {
843   base::TimeDelta delta = frame_delivery_times_.back() - initialize_done_ticks_;
844   return num_decoded_frames_ / delta.InSecondsF();
845 }
846
847 base::TimeDelta GLRenderingVDAClient::decode_time_median() {
848   if (decode_time_.size() == 0)
849     return base::TimeDelta();
850   std::sort(decode_time_.begin(), decode_time_.end());
851   int index = decode_time_.size() / 2;
852   if (decode_time_.size() % 2 != 0)
853     return decode_time_[index];
854
855   return (decode_time_[index] + decode_time_[index - 1]) / 2;
856 }
857
858 class VideoDecodeAcceleratorTest : public ::testing::Test {
859  protected:
860   VideoDecodeAcceleratorTest();
861   virtual void SetUp();
862   virtual void TearDown();
863
864   // Parse |data| into its constituent parts, set the various output fields
865   // accordingly, and read in video stream. CHECK-fails on unexpected or
866   // missing required data. Unspecified optional fields are set to -1.
867   void ParseAndReadTestVideoData(base::FilePath::StringType data,
868                                  std::vector<TestVideoFile*>* test_video_files);
869
870   // Update the parameters of |test_video_files| according to
871   // |num_concurrent_decoders| and |reset_point|. Ex: the expected number of
872   // frames should be adjusted if decoder is reset in the middle of the stream.
873   void UpdateTestVideoFileParams(
874       size_t num_concurrent_decoders,
875       int reset_point,
876       std::vector<TestVideoFile*>* test_video_files);
877
878   void InitializeRenderingHelper(const RenderingHelperParams& helper_params);
879   void CreateAndStartDecoder(GLRenderingVDAClient* client,
880                              ClientStateNotification<ClientState>* note);
881   void WaitUntilDecodeFinish(ClientStateNotification<ClientState>* note);
882   void WaitUntilIdle();
883   void OutputLogFile(const base::FilePath::CharType* log_path,
884                      const std::string& content);
885
886   std::vector<TestVideoFile*> test_video_files_;
887   RenderingHelper rendering_helper_;
888   scoped_refptr<base::MessageLoopProxy> rendering_loop_proxy_;
889
890  private:
891   base::Thread rendering_thread_;
892   // Required for Thread to work.  Not used otherwise.
893   base::ShadowingAtExitManager at_exit_manager_;
894
895   DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTest);
896 };
897
898 VideoDecodeAcceleratorTest::VideoDecodeAcceleratorTest()
899     : rendering_thread_("GLRenderingVDAClientThread") {}
900
901 void VideoDecodeAcceleratorTest::SetUp() {
902   ParseAndReadTestVideoData(g_test_video_data, &test_video_files_);
903
904   // Initialize the rendering thread.
905   base::Thread::Options options;
906   options.message_loop_type = base::MessageLoop::TYPE_DEFAULT;
907 #if defined(OS_WIN)
908   // For windows the decoding thread initializes the media foundation decoder
909   // which uses COM. We need the thread to be a UI thread.
910   options.message_loop_type = base::MessageLoop::TYPE_UI;
911 #endif  // OS_WIN
912
913   rendering_thread_.StartWithOptions(options);
914   rendering_loop_proxy_ = rendering_thread_.message_loop_proxy();
915 }
916
917 void VideoDecodeAcceleratorTest::TearDown() {
918   rendering_loop_proxy_->PostTask(
919       FROM_HERE,
920       base::Bind(&STLDeleteElements<std::vector<TestVideoFile*> >,
921                  &test_video_files_));
922
923   base::WaitableEvent done(false, false);
924   rendering_loop_proxy_->PostTask(
925       FROM_HERE,
926       base::Bind(&RenderingHelper::UnInitialize,
927                  base::Unretained(&rendering_helper_),
928                  &done));
929   done.Wait();
930
931   rendering_thread_.Stop();
932 }
933
934 void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
935     base::FilePath::StringType data,
936     std::vector<TestVideoFile*>* test_video_files) {
937   std::vector<base::FilePath::StringType> entries;
938   base::SplitString(data, ';', &entries);
939   CHECK_GE(entries.size(), 1U) << data;
940   for (size_t index = 0; index < entries.size(); ++index) {
941     std::vector<base::FilePath::StringType> fields;
942     base::SplitString(entries[index], ':', &fields);
943     CHECK_GE(fields.size(), 1U) << entries[index];
944     CHECK_LE(fields.size(), 8U) << entries[index];
945     TestVideoFile* video_file = new TestVideoFile(fields[0]);
946     if (!fields[1].empty())
947       CHECK(base::StringToInt(fields[1], &video_file->width));
948     if (!fields[2].empty())
949       CHECK(base::StringToInt(fields[2], &video_file->height));
950     if (!fields[3].empty())
951       CHECK(base::StringToInt(fields[3], &video_file->num_frames));
952     if (!fields[4].empty())
953       CHECK(base::StringToInt(fields[4], &video_file->num_fragments));
954     if (!fields[5].empty())
955       CHECK(base::StringToInt(fields[5], &video_file->min_fps_render));
956     if (!fields[6].empty())
957       CHECK(base::StringToInt(fields[6], &video_file->min_fps_no_render));
958     int profile = -1;
959     if (!fields[7].empty())
960       CHECK(base::StringToInt(fields[7], &profile));
961     video_file->profile = static_cast<media::VideoCodecProfile>(profile);
962
963     // Read in the video data.
964     base::FilePath filepath(video_file->file_name);
965     CHECK(base::ReadFileToString(filepath, &video_file->data_str))
966         << "test_video_file: " << filepath.MaybeAsASCII();
967
968     test_video_files->push_back(video_file);
969   }
970 }
971
972 void VideoDecodeAcceleratorTest::UpdateTestVideoFileParams(
973     size_t num_concurrent_decoders,
974     int reset_point,
975     std::vector<TestVideoFile*>* test_video_files) {
976   for (size_t i = 0; i < test_video_files->size(); i++) {
977     TestVideoFile* video_file = (*test_video_files)[i];
978     if (reset_point == MID_STREAM_RESET) {
979       // Reset should not go beyond the last frame;
980       // reset in the middle of the stream for short videos.
981       video_file->reset_after_frame_num = kMaxResetAfterFrameNum;
982       if (video_file->num_frames <= video_file->reset_after_frame_num)
983         video_file->reset_after_frame_num = video_file->num_frames / 2;
984
985       video_file->num_frames += video_file->reset_after_frame_num;
986     } else {
987       video_file->reset_after_frame_num = reset_point;
988     }
989
990     if (video_file->min_fps_render != -1)
991       video_file->min_fps_render /= num_concurrent_decoders;
992     if (video_file->min_fps_no_render != -1)
993       video_file->min_fps_no_render /= num_concurrent_decoders;
994   }
995 }
996
997 void VideoDecodeAcceleratorTest::InitializeRenderingHelper(
998     const RenderingHelperParams& helper_params) {
999   base::WaitableEvent done(false, false);
1000   rendering_loop_proxy_->PostTask(
1001       FROM_HERE,
1002       base::Bind(&RenderingHelper::Initialize,
1003                  base::Unretained(&rendering_helper_),
1004                  helper_params,
1005                  &done));
1006   done.Wait();
1007 }
1008
1009 void VideoDecodeAcceleratorTest::CreateAndStartDecoder(
1010     GLRenderingVDAClient* client,
1011     ClientStateNotification<ClientState>* note) {
1012   rendering_loop_proxy_->PostTask(
1013       FROM_HERE,
1014       base::Bind(&GLRenderingVDAClient::CreateAndStartDecoder,
1015                  base::Unretained(client)));
1016   ASSERT_EQ(note->Wait(), CS_DECODER_SET);
1017 }
1018
1019 void VideoDecodeAcceleratorTest::WaitUntilDecodeFinish(
1020     ClientStateNotification<ClientState>* note) {
1021   for (int i = 0; i < CS_MAX; i++) {
1022     if (note->Wait() == CS_DESTROYED)
1023       break;
1024   }
1025 }
1026
1027 void VideoDecodeAcceleratorTest::WaitUntilIdle() {
1028   base::WaitableEvent done(false, false);
1029   rendering_loop_proxy_->PostTask(
1030       FROM_HERE,
1031       base::Bind(&base::WaitableEvent::Signal, base::Unretained(&done)));
1032   done.Wait();
1033 }
1034
1035 void VideoDecodeAcceleratorTest::OutputLogFile(
1036     const base::FilePath::CharType* log_path,
1037     const std::string& content) {
1038   base::File file(base::FilePath(log_path),
1039                   base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1040   file.WriteAtCurrentPos(content.data(), content.length());
1041 }
1042
1043 // Test parameters:
1044 // - Number of concurrent decoders.
1045 // - Number of concurrent in-flight Decode() calls per decoder.
1046 // - Number of play-throughs.
1047 // - reset_after_frame_num: see GLRenderingVDAClient ctor.
1048 // - delete_decoder_phase: see GLRenderingVDAClient ctor.
1049 // - whether to test slow rendering by delaying ReusePictureBuffer().
1050 // - whether the video frames are rendered as thumbnails.
1051 class VideoDecodeAcceleratorParamTest
1052     : public VideoDecodeAcceleratorTest,
1053       public ::testing::WithParamInterface<
1054         Tuple7<int, int, int, ResetPoint, ClientState, bool, bool> > {
1055 };
1056
1057 // Helper so that gtest failures emit a more readable version of the tuple than
1058 // its byte representation.
1059 ::std::ostream& operator<<(
1060     ::std::ostream& os,
1061     const Tuple7<int, int, int, ResetPoint, ClientState, bool, bool>& t) {
1062   return os << t.a << ", " << t.b << ", " << t.c << ", " << t.d << ", " << t.e
1063             << ", " << t.f << ", " << t.g;
1064 }
1065
1066 // Wait for |note| to report a state and if it's not |expected_state| then
1067 // assert |client| has deleted its decoder.
1068 static void AssertWaitForStateOrDeleted(
1069     ClientStateNotification<ClientState>* note,
1070     GLRenderingVDAClient* client,
1071     ClientState expected_state) {
1072   ClientState state = note->Wait();
1073   if (state == expected_state) return;
1074   ASSERT_TRUE(client->decoder_deleted())
1075       << "Decoder not deleted but Wait() returned " << state
1076       << ", instead of " << expected_state;
1077 }
1078
1079 // We assert a minimal number of concurrent decoders we expect to succeed.
1080 // Different platforms can support more concurrent decoders, so we don't assert
1081 // failure above this.
1082 enum { kMinSupportedNumConcurrentDecoders = 3 };
1083
1084 // Test the most straightforward case possible: data is decoded from a single
1085 // chunk and rendered to the screen.
1086 TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
1087   const size_t num_concurrent_decoders = GetParam().a;
1088   const size_t num_in_flight_decodes = GetParam().b;
1089   const int num_play_throughs = GetParam().c;
1090   const int reset_point = GetParam().d;
1091   const int delete_decoder_state = GetParam().e;
1092   bool test_reuse_delay = GetParam().f;
1093   const bool render_as_thumbnails = GetParam().g;
1094
1095   UpdateTestVideoFileParams(
1096       num_concurrent_decoders, reset_point, &test_video_files_);
1097
1098   // Suppress GL rendering for all tests when the "--rendering_fps" is 0.
1099   const bool suppress_rendering = g_rendering_fps == 0;
1100
1101   std::vector<ClientStateNotification<ClientState>*>
1102       notes(num_concurrent_decoders, NULL);
1103   std::vector<GLRenderingVDAClient*> clients(num_concurrent_decoders, NULL);
1104
1105   RenderingHelperParams helper_params;
1106   helper_params.rendering_fps = g_rendering_fps;
1107   helper_params.render_as_thumbnails = render_as_thumbnails;
1108   if (render_as_thumbnails) {
1109     // Only one decoder is supported with thumbnail rendering
1110     CHECK_EQ(num_concurrent_decoders, 1U);
1111     helper_params.thumbnails_page_size = kThumbnailsPageSize;
1112     helper_params.thumbnail_size = kThumbnailSize;
1113   }
1114
1115   // First kick off all the decoders.
1116   for (size_t index = 0; index < num_concurrent_decoders; ++index) {
1117     TestVideoFile* video_file =
1118         test_video_files_[index % test_video_files_.size()];
1119     ClientStateNotification<ClientState>* note =
1120         new ClientStateNotification<ClientState>();
1121     notes[index] = note;
1122
1123     int delay_after_frame_num = std::numeric_limits<int>::max();
1124     if (test_reuse_delay &&
1125         kMaxFramesToDelayReuse * 2 < video_file->num_frames) {
1126       delay_after_frame_num = video_file->num_frames - kMaxFramesToDelayReuse;
1127     }
1128
1129     GLRenderingVDAClient* client =
1130         new GLRenderingVDAClient(&rendering_helper_,
1131                                  note,
1132                                  video_file->data_str,
1133                                  num_in_flight_decodes,
1134                                  num_play_throughs,
1135                                  video_file->reset_after_frame_num,
1136                                  delete_decoder_state,
1137                                  video_file->width,
1138                                  video_file->height,
1139                                  video_file->profile,
1140                                  suppress_rendering,
1141                                  delay_after_frame_num,
1142                                  0,
1143                                  render_as_thumbnails);
1144
1145     clients[index] = client;
1146     helper_params.clients.push_back(client->AsWeakPtr());
1147   }
1148
1149   InitializeRenderingHelper(helper_params);
1150
1151   for (size_t index = 0; index < num_concurrent_decoders; ++index) {
1152     CreateAndStartDecoder(clients[index], notes[index]);
1153   }
1154
1155   // Then wait for all the decodes to finish.
1156   // Only check performance & correctness later if we play through only once.
1157   bool skip_performance_and_correctness_checks = num_play_throughs > 1;
1158   for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1159     ClientStateNotification<ClientState>* note = notes[i];
1160     ClientState state = note->Wait();
1161     if (state != CS_INITIALIZED) {
1162       skip_performance_and_correctness_checks = true;
1163       // We expect initialization to fail only when more than the supported
1164       // number of decoders is instantiated.  Assert here that something else
1165       // didn't trigger failure.
1166       ASSERT_GT(num_concurrent_decoders,
1167                 static_cast<size_t>(kMinSupportedNumConcurrentDecoders));
1168       continue;
1169     }
1170     ASSERT_EQ(state, CS_INITIALIZED);
1171     for (int n = 0; n < num_play_throughs; ++n) {
1172       // For play-throughs other than the first, we expect initialization to
1173       // succeed unconditionally.
1174       if (n > 0) {
1175         ASSERT_NO_FATAL_FAILURE(
1176             AssertWaitForStateOrDeleted(note, clients[i], CS_INITIALIZED));
1177       }
1178       // InitializeDone kicks off decoding inside the client, so we just need to
1179       // wait for Flush.
1180       ASSERT_NO_FATAL_FAILURE(
1181           AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHING));
1182       ASSERT_NO_FATAL_FAILURE(
1183           AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHED));
1184       // FlushDone requests Reset().
1185       ASSERT_NO_FATAL_FAILURE(
1186           AssertWaitForStateOrDeleted(note, clients[i], CS_RESETTING));
1187     }
1188     ASSERT_NO_FATAL_FAILURE(
1189         AssertWaitForStateOrDeleted(note, clients[i], CS_RESET));
1190     // ResetDone requests Destroy().
1191     ASSERT_NO_FATAL_FAILURE(
1192         AssertWaitForStateOrDeleted(note, clients[i], CS_DESTROYED));
1193   }
1194   // Finally assert that decoding went as expected.
1195   for (size_t i = 0; i < num_concurrent_decoders &&
1196            !skip_performance_and_correctness_checks; ++i) {
1197     // We can only make performance/correctness assertions if the decoder was
1198     // allowed to finish.
1199     if (delete_decoder_state < CS_FLUSHED)
1200       continue;
1201     GLRenderingVDAClient* client = clients[i];
1202     TestVideoFile* video_file = test_video_files_[i % test_video_files_.size()];
1203     if (video_file->num_frames > 0) {
1204       // Expect the decoded frames may be more than the video frames as frames
1205       // could still be returned until resetting done.
1206       if (video_file->reset_after_frame_num > 0)
1207         EXPECT_GE(client->num_decoded_frames(), video_file->num_frames);
1208       else
1209         EXPECT_EQ(client->num_decoded_frames(), video_file->num_frames);
1210     }
1211     if (reset_point == END_OF_STREAM_RESET) {
1212       EXPECT_EQ(video_file->num_fragments, client->num_skipped_fragments() +
1213                 client->num_queued_fragments());
1214       EXPECT_EQ(client->num_done_bitstream_buffers(),
1215                 client->num_queued_fragments());
1216     }
1217     LOG(INFO) << "Decoder " << i << " fps: " << client->frames_per_second();
1218     if (!render_as_thumbnails) {
1219       int min_fps = suppress_rendering ?
1220           video_file->min_fps_no_render : video_file->min_fps_render;
1221       if (min_fps > 0 && !test_reuse_delay)
1222         EXPECT_GT(client->frames_per_second(), min_fps);
1223     }
1224   }
1225
1226   if (render_as_thumbnails) {
1227     std::vector<unsigned char> rgb;
1228     bool alpha_solid;
1229     base::WaitableEvent done(false, false);
1230     rendering_loop_proxy_->PostTask(
1231       FROM_HERE,
1232       base::Bind(&RenderingHelper::GetThumbnailsAsRGB,
1233                  base::Unretained(&rendering_helper_),
1234                  &rgb, &alpha_solid, &done));
1235     done.Wait();
1236
1237     std::vector<std::string> golden_md5s;
1238     std::string md5_string = base::MD5String(
1239         base::StringPiece(reinterpret_cast<char*>(&rgb[0]), rgb.size()));
1240     ReadGoldenThumbnailMD5s(test_video_files_[0], &golden_md5s);
1241     std::vector<std::string>::iterator match =
1242         find(golden_md5s.begin(), golden_md5s.end(), md5_string);
1243     if (match == golden_md5s.end()) {
1244       // Convert raw RGB into PNG for export.
1245       std::vector<unsigned char> png;
1246       gfx::PNGCodec::Encode(&rgb[0],
1247                             gfx::PNGCodec::FORMAT_RGB,
1248                             kThumbnailsPageSize,
1249                             kThumbnailsPageSize.width() * 3,
1250                             true,
1251                             std::vector<gfx::PNGCodec::Comment>(),
1252                             &png);
1253
1254       LOG(ERROR) << "Unknown thumbnails MD5: " << md5_string;
1255
1256       base::FilePath filepath(test_video_files_[0]->file_name);
1257       filepath = filepath.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails"));
1258       filepath = filepath.AddExtension(FILE_PATH_LITERAL(".png"));
1259       int num_bytes = base::WriteFile(filepath,
1260                                            reinterpret_cast<char*>(&png[0]),
1261                                            png.size());
1262       ASSERT_EQ(num_bytes, static_cast<int>(png.size()));
1263     }
1264     ASSERT_NE(match, golden_md5s.end());
1265     EXPECT_EQ(alpha_solid, true) << "RGBA frame had incorrect alpha";
1266   }
1267
1268   // Output the frame delivery time to file
1269   // We can only make performance/correctness assertions if the decoder was
1270   // allowed to finish.
1271   if (g_output_log != NULL && delete_decoder_state >= CS_FLUSHED) {
1272     base::File output_file(
1273         base::FilePath(g_output_log),
1274         base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1275     for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1276       clients[i]->OutputFrameDeliveryTimes(&output_file);
1277     }
1278   }
1279
1280   rendering_loop_proxy_->PostTask(
1281       FROM_HERE,
1282       base::Bind(&STLDeleteElements<std::vector<GLRenderingVDAClient*> >,
1283                  &clients));
1284   rendering_loop_proxy_->PostTask(
1285       FROM_HERE,
1286       base::Bind(&STLDeleteElements<
1287                       std::vector<ClientStateNotification<ClientState>*> >,
1288                  &notes));
1289   WaitUntilIdle();
1290 };
1291
1292 // Test that replay after EOS works fine.
1293 INSTANTIATE_TEST_CASE_P(
1294     ReplayAfterEOS, VideoDecodeAcceleratorParamTest,
1295     ::testing::Values(
1296         MakeTuple(1, 1, 4, END_OF_STREAM_RESET, CS_RESET, false, false)));
1297
1298 // Test that Reset() before the first Decode() works fine.
1299 INSTANTIATE_TEST_CASE_P(
1300     ResetBeforeDecode, VideoDecodeAcceleratorParamTest,
1301     ::testing::Values(
1302         MakeTuple(1, 1, 1, START_OF_STREAM_RESET, CS_RESET, false, false)));
1303
1304 // Test Reset() immediately after Decode() containing config info.
1305 INSTANTIATE_TEST_CASE_P(
1306     ResetAfterFirstConfigInfo, VideoDecodeAcceleratorParamTest,
1307     ::testing::Values(
1308         MakeTuple(
1309             1, 1, 1, RESET_AFTER_FIRST_CONFIG_INFO, CS_RESET, false, false)));
1310
1311 // Test that Reset() mid-stream works fine and doesn't affect decoding even when
1312 // Decode() calls are made during the reset.
1313 INSTANTIATE_TEST_CASE_P(
1314     MidStreamReset, VideoDecodeAcceleratorParamTest,
1315     ::testing::Values(
1316         MakeTuple(1, 1, 1, MID_STREAM_RESET, CS_RESET, false, false)));
1317
1318 INSTANTIATE_TEST_CASE_P(
1319     SlowRendering, VideoDecodeAcceleratorParamTest,
1320     ::testing::Values(
1321         MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, true, false)));
1322
1323 // Test that Destroy() mid-stream works fine (primarily this is testing that no
1324 // crashes occur).
1325 INSTANTIATE_TEST_CASE_P(
1326     TearDownTiming, VideoDecodeAcceleratorParamTest,
1327     ::testing::Values(
1328         MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_DECODER_SET, false, false),
1329         MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_INITIALIZED, false, false),
1330         MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHING, false, false),
1331         MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHED, false, false),
1332         MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESETTING, false, false),
1333         MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1334         MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1335                   static_cast<ClientState>(-1), false, false),
1336         MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1337                   static_cast<ClientState>(-10), false, false),
1338         MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1339                   static_cast<ClientState>(-100), false, false)));
1340
1341 // Test that decoding various variation works with multiple in-flight decodes.
1342 INSTANTIATE_TEST_CASE_P(
1343     DecodeVariations, VideoDecodeAcceleratorParamTest,
1344     ::testing::Values(
1345         MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1346         MakeTuple(1, 10, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1347         // Tests queuing.
1348         MakeTuple(1, 15, 1, END_OF_STREAM_RESET, CS_RESET, false, false)));
1349
1350 // Find out how many concurrent decoders can go before we exhaust system
1351 // resources.
1352 INSTANTIATE_TEST_CASE_P(
1353     ResourceExhaustion, VideoDecodeAcceleratorParamTest,
1354     ::testing::Values(
1355         // +0 hack below to promote enum to int.
1356         MakeTuple(kMinSupportedNumConcurrentDecoders + 0, 1, 1,
1357                   END_OF_STREAM_RESET, CS_RESET, false, false),
1358         MakeTuple(kMinSupportedNumConcurrentDecoders + 1, 1, 1,
1359                   END_OF_STREAM_RESET, CS_RESET, false, false)));
1360
1361 // Thumbnailing test
1362 INSTANTIATE_TEST_CASE_P(
1363     Thumbnail, VideoDecodeAcceleratorParamTest,
1364     ::testing::Values(
1365         MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, true)));
1366
1367 // Measure the median of the decode time when VDA::Decode is called 30 times per
1368 // second.
1369 TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
1370   RenderingHelperParams helper_params;
1371
1372   // Disable rendering by setting the rendering_fps = 0.
1373   helper_params.rendering_fps = 0;
1374   helper_params.render_as_thumbnails = false;
1375
1376   ClientStateNotification<ClientState>* note =
1377       new ClientStateNotification<ClientState>();
1378   GLRenderingVDAClient* client =
1379       new GLRenderingVDAClient(&rendering_helper_,
1380                                note,
1381                                test_video_files_[0]->data_str,
1382                                1,
1383                                1,
1384                                test_video_files_[0]->reset_after_frame_num,
1385                                CS_RESET,
1386                                test_video_files_[0]->width,
1387                                test_video_files_[0]->height,
1388                                test_video_files_[0]->profile,
1389                                true,
1390                                std::numeric_limits<int>::max(),
1391                                kWebRtcDecodeCallsPerSecond,
1392                                false /* render_as_thumbnail */);
1393   helper_params.clients.push_back(client->AsWeakPtr());
1394   InitializeRenderingHelper(helper_params);
1395   CreateAndStartDecoder(client, note);
1396   WaitUntilDecodeFinish(note);
1397
1398   base::TimeDelta decode_time_median = client->decode_time_median();
1399   std::string output_string =
1400       base::StringPrintf("Decode time median: %" PRId64 " us",
1401                          decode_time_median.InMicroseconds());
1402   LOG(INFO) << output_string;
1403
1404   if (g_output_log != NULL)
1405     OutputLogFile(g_output_log, output_string);
1406
1407   rendering_loop_proxy_->DeleteSoon(FROM_HERE, client);
1408   rendering_loop_proxy_->DeleteSoon(FROM_HERE, note);
1409   WaitUntilIdle();
1410 };
1411
1412 // TODO(fischman, vrk): add more tests!  In particular:
1413 // - Test life-cycle: Seek/Stop/Pause/Play for a single decoder.
1414 // - Test alternate configurations
1415 // - Test failure conditions.
1416 // - Test frame size changes mid-stream
1417
1418 }  // namespace
1419 }  // namespace content
1420
1421 int main(int argc, char **argv) {
1422   testing::InitGoogleTest(&argc, argv);  // Removes gtest-specific args.
1423   base::CommandLine::Init(argc, argv);
1424
1425   // Needed to enable DVLOG through --vmodule.
1426   logging::LoggingSettings settings;
1427   settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG;
1428   CHECK(logging::InitLogging(settings));
1429
1430   const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
1431   DCHECK(cmd_line);
1432
1433   base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
1434   for (CommandLine::SwitchMap::const_iterator it = switches.begin();
1435        it != switches.end(); ++it) {
1436     if (it->first == "test_video_data") {
1437       content::g_test_video_data = it->second.c_str();
1438       continue;
1439     }
1440     // The output log for VDA performance test.
1441     if (it->first == "output_log") {
1442       content::g_output_log = it->second.c_str();
1443       continue;
1444     }
1445     if (it->first == "rendering_fps") {
1446       // On Windows, CommandLine::StringType is wstring. We need to convert
1447       // it to std::string first
1448       std::string input(it->second.begin(), it->second.end());
1449       CHECK(base::StringToDouble(input, &content::g_rendering_fps));
1450       continue;
1451     }
1452     // TODO(owenlin): Remove this flag once it is not used in autotest.
1453     if (it->first == "disable_rendering") {
1454       content::g_rendering_fps = 0;
1455       continue;
1456     }
1457     if (it->first == "v" || it->first == "vmodule")
1458       continue;
1459     LOG(FATAL) << "Unexpected switch: " << it->first << ":" << it->second;
1460   }
1461
1462   base::ShadowingAtExitManager at_exit_manager;
1463   content::RenderingHelper::InitializeOneOff();
1464
1465   return RUN_ALL_TESTS();
1466 }