#include <fcntl.h>
#include <sys/stat.h>
#include <sys/types.h>
+#include <algorithm>
#include <deque>
+#include <map>
// Include gtest.h out of order because <X11/X.h> #define's Bool & None, which
// gtest uses as struct names (inside a namespace). This means that
#include "base/at_exit.h"
#include "base/bind.h"
#include "base/command_line.h"
-#include "base/file_util.h"
+#include "base/files/file.h"
+#include "base/files/file_util.h"
#include "base/format_macros.h"
#include "base/md5.h"
#include "base/message_loop/message_loop_proxy.h"
-#include "base/platform_file.h"
#include "base/process/process.h"
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
#include "content/common/gpu/media/rendering_helper.h"
#include "content/common/gpu/media/video_accelerator_unittest_helpers.h"
#include "content/public/common/content_switches.h"
+#include "media/filters/h264_parser.h"
#include "ui/gfx/codec/png_codec.h"
#if defined(OS_WIN)
#include "content/common/gpu/media/dxva_video_decode_accelerator.h"
-#elif defined(OS_CHROMEOS)
-#if defined(ARCH_CPU_ARMEL)
-#include "content/common/gpu/media/exynos_video_decode_accelerator.h"
-#elif defined(ARCH_CPU_X86_FAMILY)
+#elif defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL)
+#include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
+#include "content/common/gpu/media/v4l2_video_device.h"
+#elif defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
#include "content/common/gpu/media/vaapi_video_decode_accelerator.h"
#include "content/common/gpu/media/vaapi_wrapper.h"
-#endif // ARCH_CPU_ARMEL
+#if defined(USE_X11)
+#include "ui/gl/gl_implementation.h"
+#endif // USE_X11
#else
#error The VideoAccelerator tests are not supported on this platform.
#endif // OS_WIN
// FILE_PATH_LITERAL("test-25fps.vp8:320:240:250:250:50:175:11");
FILE_PATH_LITERAL("test-25fps.h264:320:240:250:258:50:175:1");
-// The path of the frame delivery time log. We can enable the log and specify
-// the filename by the "--frame_delivery_log" switch.
-const base::FilePath::CharType* g_frame_delivery_log = NULL;
+// The file path of the test output log. This is used to communicate the test
+// results to CrOS autotests. We can enable the log and specify the filename by
+// the "--output_log" switch.
+const base::FilePath::CharType* g_output_log = NULL;
// The value is set by the switch "--rendering_fps".
-double g_rendering_fps = 0;
-
-// Disable rendering, the value is set by the switch "--disable_rendering".
-bool g_disable_rendering = false;
+double g_rendering_fps = 60;
// Magic constants for differentiating the reasons for NotifyResetDone being
// called.
enum ResetPoint {
+ // Reset() just after calling Decode() with a fragment containing config info.
+ RESET_AFTER_FIRST_CONFIG_INFO = -4,
START_OF_STREAM_RESET = -3,
MID_STREAM_RESET = -2,
END_OF_STREAM_RESET = -1
const int kMaxResetAfterFrameNum = 100;
const int kMaxFramesToDelayReuse = 64;
const base::TimeDelta kReuseDelay = base::TimeDelta::FromSeconds(1);
+// Simulate WebRTC and call VDA::Decode 30 times per second.
+const int kWebRtcDecodeCallsPerSecond = 30;
struct TestVideoFile {
explicit TestVideoFile(base::FilePath::StringType file_name)
num_fragments(-1),
min_fps_render(-1),
min_fps_no_render(-1),
- profile(-1),
+ profile(media::VIDEO_CODEC_PROFILE_UNKNOWN),
reset_after_frame_num(END_OF_STREAM_RESET) {
}
int num_fragments;
int min_fps_render;
int min_fps_no_render;
- int profile;
+ media::VideoCodecProfile profile;
int reset_after_frame_num;
std::string data_str;
};
-// Presumed minimal display size.
-// We subtract one pixel from the width because some ARM chromebooks do not
-// support two fullscreen app running at the same time. See crbug.com/270064.
-const gfx::Size kThumbnailsDisplaySize(1366 - 1, 768);
const gfx::Size kThumbnailsPageSize(1600, 1200);
const gfx::Size kThumbnailSize(160, 120);
const int kMD5StringLength = 32;
-// Parse |data| into its constituent parts, set the various output fields
-// accordingly, and read in video stream. CHECK-fails on unexpected or
-// missing required data. Unspecified optional fields are set to -1.
-void ParseAndReadTestVideoData(base::FilePath::StringType data,
- size_t num_concurrent_decoders,
- int reset_point,
- std::vector<TestVideoFile*>* test_video_files) {
- std::vector<base::FilePath::StringType> entries;
- base::SplitString(data, ';', &entries);
- CHECK_GE(entries.size(), 1U) << data;
- for (size_t index = 0; index < entries.size(); ++index) {
- std::vector<base::FilePath::StringType> fields;
- base::SplitString(entries[index], ':', &fields);
- CHECK_GE(fields.size(), 1U) << entries[index];
- CHECK_LE(fields.size(), 8U) << entries[index];
- TestVideoFile* video_file = new TestVideoFile(fields[0]);
- if (!fields[1].empty())
- CHECK(base::StringToInt(fields[1], &video_file->width));
- if (!fields[2].empty())
- CHECK(base::StringToInt(fields[2], &video_file->height));
- if (!fields[3].empty()) {
- CHECK(base::StringToInt(fields[3], &video_file->num_frames));
- // If we reset mid-stream and start playback over, account for frames
- // that are decoded twice in our expectations.
- if (video_file->num_frames > 0 && reset_point == MID_STREAM_RESET) {
- // Reset should not go beyond the last frame; reset after the first
- // frame for short videos.
- video_file->reset_after_frame_num = kMaxResetAfterFrameNum;
- if (video_file->num_frames <= kMaxResetAfterFrameNum)
- video_file->reset_after_frame_num = 1;
- video_file->num_frames += video_file->reset_after_frame_num;
- } else {
- video_file->reset_after_frame_num = reset_point;
- }
- }
- if (!fields[4].empty())
- CHECK(base::StringToInt(fields[4], &video_file->num_fragments));
- if (!fields[5].empty()) {
- CHECK(base::StringToInt(fields[5], &video_file->min_fps_render));
- video_file->min_fps_render /= num_concurrent_decoders;
- }
- if (!fields[6].empty()) {
- CHECK(base::StringToInt(fields[6], &video_file->min_fps_no_render));
- video_file->min_fps_no_render /= num_concurrent_decoders;
- }
- if (!fields[7].empty())
- CHECK(base::StringToInt(fields[7], &video_file->profile));
-
- // Read in the video data.
- base::FilePath filepath(video_file->file_name);
- CHECK(base::ReadFileToString(filepath, &video_file->data_str))
- << "test_video_file: " << filepath.MaybeAsASCII();
-
- test_video_files->push_back(video_file);
- }
-}
-
// Read in golden MD5s for the thumbnailed rendering of this video
void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file,
std::vector<std::string>* md5_strings) {
// Ignore the empty string added by SplitString
if (!md5_string->length())
continue;
+ // Ignore comments
+ if (md5_string->at(0) == '#')
+ continue;
CHECK_EQ(static_cast<int>(md5_string->length()),
kMD5StringLength) << *md5_string;
CS_MAX, // Must be last entry.
};
-// A wrapper client that throttles the PictureReady callbacks to a given rate.
-// It may drops or queues frame to deliver them on time.
-class ThrottlingVDAClient : public VideoDecodeAccelerator::Client,
- public base::SupportsWeakPtr<ThrottlingVDAClient> {
- public:
- // Callback invoked whan the picture is dropped and should be reused for
- // the decoder again.
- typedef base::Callback<void(int32 picture_buffer_id)> ReusePictureCB;
-
- ThrottlingVDAClient(VideoDecodeAccelerator::Client* client,
- double fps,
- ReusePictureCB reuse_picture_cb);
- virtual ~ThrottlingVDAClient();
-
- // VideoDecodeAccelerator::Client implementation
- virtual void ProvidePictureBuffers(uint32 requested_num_of_buffers,
- const gfx::Size& dimensions,
- uint32 texture_target) OVERRIDE;
- virtual void DismissPictureBuffer(int32 picture_buffer_id) OVERRIDE;
- virtual void PictureReady(const media::Picture& picture) OVERRIDE;
- virtual void NotifyInitializeDone() OVERRIDE;
- virtual void NotifyEndOfBitstreamBuffer(int32 bitstream_buffer_id) OVERRIDE;
- virtual void NotifyFlushDone() OVERRIDE;
- virtual void NotifyResetDone() OVERRIDE;
- virtual void NotifyError(VideoDecodeAccelerator::Error error) OVERRIDE;
-
- int num_decoded_frames() { return num_decoded_frames_; }
-
- private:
-
- void CallClientPictureReady(int version);
-
- VideoDecodeAccelerator::Client* client_;
- ReusePictureCB reuse_picture_cb_;
- base::TimeTicks next_frame_delivered_time_;
- base::TimeDelta frame_duration_;
-
- int num_decoded_frames_;
- int stream_version_;
- std::deque<media::Picture> pending_pictures_;
-
- DISALLOW_IMPLICIT_CONSTRUCTORS(ThrottlingVDAClient);
-};
-
-ThrottlingVDAClient::ThrottlingVDAClient(VideoDecodeAccelerator::Client* client,
- double fps,
- ReusePictureCB reuse_picture_cb)
- : client_(client),
- reuse_picture_cb_(reuse_picture_cb),
- num_decoded_frames_(0),
- stream_version_(0) {
- CHECK(client_);
- CHECK_GT(fps, 0);
- frame_duration_ = base::TimeDelta::FromSeconds(1) / fps;
-}
-
-ThrottlingVDAClient::~ThrottlingVDAClient() {}
-
-void ThrottlingVDAClient::ProvidePictureBuffers(uint32 requested_num_of_buffers,
- const gfx::Size& dimensions,
- uint32 texture_target) {
- client_->ProvidePictureBuffers(
- requested_num_of_buffers, dimensions, texture_target);
-}
-
-void ThrottlingVDAClient::DismissPictureBuffer(int32 picture_buffer_id) {
- client_->DismissPictureBuffer(picture_buffer_id);
-}
-
-void ThrottlingVDAClient::PictureReady(const media::Picture& picture) {
- ++num_decoded_frames_;
-
- if (pending_pictures_.empty()) {
- base::TimeDelta delay =
- next_frame_delivered_time_.is_null()
- ? base::TimeDelta()
- : next_frame_delivered_time_ - base::TimeTicks::Now();
- base::MessageLoop::current()->PostDelayedTask(
- FROM_HERE,
- base::Bind(&ThrottlingVDAClient::CallClientPictureReady,
- AsWeakPtr(),
- stream_version_),
- delay);
- }
- pending_pictures_.push_back(picture);
-}
-
-void ThrottlingVDAClient::CallClientPictureReady(int version) {
- // Just return if we have reset the decoder
- if (version != stream_version_)
- return;
-
- base::TimeTicks now = base::TimeTicks::Now();
-
- if (next_frame_delivered_time_.is_null())
- next_frame_delivered_time_ = now;
-
- if (next_frame_delivered_time_ + frame_duration_ < now) {
- // Too late, drop the frame
- reuse_picture_cb_.Run(pending_pictures_.front().picture_buffer_id());
- } else {
- client_->PictureReady(pending_pictures_.front());
- }
-
- pending_pictures_.pop_front();
- next_frame_delivered_time_ += frame_duration_;
- if (!pending_pictures_.empty()) {
- base::MessageLoop::current()->PostDelayedTask(
- FROM_HERE,
- base::Bind(&ThrottlingVDAClient::CallClientPictureReady,
- AsWeakPtr(),
- stream_version_),
- next_frame_delivered_time_ - base::TimeTicks::Now());
- }
-}
-
-void ThrottlingVDAClient::NotifyInitializeDone() {
- client_->NotifyInitializeDone();
-}
-
-void ThrottlingVDAClient::NotifyEndOfBitstreamBuffer(
- int32 bitstream_buffer_id) {
- client_->NotifyEndOfBitstreamBuffer(bitstream_buffer_id);
-}
-
-void ThrottlingVDAClient::NotifyFlushDone() {
- if (!pending_pictures_.empty()) {
- base::MessageLoop::current()->PostDelayedTask(
- FROM_HERE,
- base::Bind(&ThrottlingVDAClient::NotifyFlushDone,
- base::Unretained(this)),
- next_frame_delivered_time_ - base::TimeTicks::Now());
- return;
- }
- client_->NotifyFlushDone();
-}
-
-void ThrottlingVDAClient::NotifyResetDone() {
- ++stream_version_;
- while (!pending_pictures_.empty()) {
- reuse_picture_cb_.Run(pending_pictures_.front().picture_buffer_id());
- pending_pictures_.pop_front();
- }
- next_frame_delivered_time_ = base::TimeTicks();
- client_->NotifyResetDone();
-}
-
-void ThrottlingVDAClient::NotifyError(VideoDecodeAccelerator::Error error) {
- client_->NotifyError(error);
-}
-
// Client that can accept callbacks from a VideoDecodeAccelerator and is used by
// the TESTs below.
class GLRenderingVDAClient
: public VideoDecodeAccelerator::Client,
public base::SupportsWeakPtr<GLRenderingVDAClient> {
public:
+ // |window_id| the window_id of the client, which is used to identify the
+ // rendering area in the |rendering_helper|.
// Doesn't take ownership of |rendering_helper| or |note|, which must outlive
// |*this|.
// |num_play_throughs| indicates how many times to play through the video.
// calls have been made, N>=0 means interpret as ClientState.
// Both |reset_after_frame_num| & |delete_decoder_state| apply only to the
// last play-through (governed by |num_play_throughs|).
- // |rendering_fps| indicates the target rendering fps. 0 means no target fps
- // and it would render as fast as possible.
- // |suppress_rendering| indicates GL rendering is suppressed or not.
+ // |suppress_rendering| indicates GL rendering is supressed or not.
// After |delay_reuse_after_frame_num| frame has been delivered, the client
// will start delaying the call to ReusePictureBuffer() for kReuseDelay.
- GLRenderingVDAClient(RenderingHelper* rendering_helper,
- int rendering_window_id,
+ // |decode_calls_per_second| is the number of VDA::Decode calls per second.
+ // If |decode_calls_per_second| > 0, |num_in_flight_decodes| must be 1.
+ GLRenderingVDAClient(size_t window_id,
+ RenderingHelper* rendering_helper,
ClientStateNotification<ClientState>* note,
const std::string& encoded_data,
int num_in_flight_decodes,
int delete_decoder_state,
int frame_width,
int frame_height,
- int profile,
- double rendering_fps,
+ media::VideoCodecProfile profile,
bool suppress_rendering,
- int delay_reuse_after_frame_num);
+ int delay_reuse_after_frame_num,
+ int decode_calls_per_second,
+ bool render_as_thumbnails);
virtual ~GLRenderingVDAClient();
- void CreateDecoder();
+ void CreateAndStartDecoder();
// VideoDecodeAccelerator::Client implementation.
// The heart of the Client.
virtual void DismissPictureBuffer(int32 picture_buffer_id) OVERRIDE;
virtual void PictureReady(const media::Picture& picture) OVERRIDE;
// Simple state changes.
- virtual void NotifyInitializeDone() OVERRIDE;
virtual void NotifyEndOfBitstreamBuffer(int32 bitstream_buffer_id) OVERRIDE;
virtual void NotifyFlushDone() OVERRIDE;
virtual void NotifyResetDone() OVERRIDE;
virtual void NotifyError(VideoDecodeAccelerator::Error error) OVERRIDE;
- void OutputFrameDeliveryTimes(base::PlatformFile output);
-
- void NotifyFrameDropped(int32 picture_buffer_id);
+ void OutputFrameDeliveryTimes(base::File* output);
// Simple getters for inspecting the state of the Client.
int num_done_bitstream_buffers() { return num_done_bitstream_buffers_; }
int num_skipped_fragments() { return num_skipped_fragments_; }
int num_queued_fragments() { return num_queued_fragments_; }
- int num_decoded_frames();
+ int num_decoded_frames() { return num_decoded_frames_; }
double frames_per_second();
+ // Return the median of the decode time of all decoded frames.
+ base::TimeDelta decode_time_median();
bool decoder_deleted() { return !decoder_.get(); }
private:
typedef std::map<int, media::PictureBuffer*> PictureBufferById;
void SetState(ClientState new_state);
+ void FinishInitialization();
+ void ReturnPicture(int32 picture_buffer_id);
// Delete the associated decoder helper.
void DeleteDecoder();
// Request decode of the next fragment in the encoded data.
void DecodeNextFragment();
+ size_t window_id_;
RenderingHelper* rendering_helper_;
- int rendering_window_id_;
+ gfx::Size frame_size_;
std::string encoded_data_;
const int num_in_flight_decodes_;
int outstanding_decodes_;
int next_bitstream_buffer_id_;
ClientStateNotification<ClientState>* note_;
scoped_ptr<VideoDecodeAccelerator> decoder_;
+ scoped_ptr<base::WeakPtrFactory<VideoDecodeAccelerator> >
+ weak_decoder_factory_;
std::set<int> outstanding_texture_ids_;
int remaining_play_throughs_;
int reset_after_frame_num_;
int num_done_bitstream_buffers_;
PictureBufferById picture_buffers_by_id_;
base::TimeTicks initialize_done_ticks_;
- int profile_;
+ media::VideoCodecProfile profile_;
+ GLenum texture_target_;
bool suppress_rendering_;
std::vector<base::TimeTicks> frame_delivery_times_;
int delay_reuse_after_frame_num_;
- scoped_ptr<ThrottlingVDAClient> throttling_client_;
+ // A map from bitstream buffer id to the decode start time of the buffer.
+ std::map<int, base::TimeTicks> decode_start_time_;
+ // The decode time of all decoded frames.
+ std::vector<base::TimeDelta> decode_time_;
+ // The number of VDA::Decode calls per second. This is to simulate webrtc.
+ int decode_calls_per_second_;
+ bool render_as_thumbnails_;
+ // The number of frames that are not returned from rendering_helper_. We
+ // checks this count to ensure all frames are rendered before entering the
+ // CS_RESET state.
+ int frames_at_render_;
DISALLOW_IMPLICIT_CONSTRUCTORS(GLRenderingVDAClient);
};
GLRenderingVDAClient::GLRenderingVDAClient(
+ size_t window_id,
RenderingHelper* rendering_helper,
- int rendering_window_id,
ClientStateNotification<ClientState>* note,
const std::string& encoded_data,
int num_in_flight_decodes,
int delete_decoder_state,
int frame_width,
int frame_height,
- int profile,
- double rendering_fps,
+ media::VideoCodecProfile profile,
bool suppress_rendering,
- int delay_reuse_after_frame_num)
- : rendering_helper_(rendering_helper),
- rendering_window_id_(rendering_window_id),
+ int delay_reuse_after_frame_num,
+ int decode_calls_per_second,
+ bool render_as_thumbnails)
+ : window_id_(window_id),
+ rendering_helper_(rendering_helper),
+ frame_size_(frame_width, frame_height),
encoded_data_(encoded_data),
num_in_flight_decodes_(num_in_flight_decodes),
outstanding_decodes_(0),
num_queued_fragments_(0),
num_decoded_frames_(0),
num_done_bitstream_buffers_(0),
- profile_(profile),
+ texture_target_(0),
suppress_rendering_(suppress_rendering),
- delay_reuse_after_frame_num_(delay_reuse_after_frame_num) {
+ delay_reuse_after_frame_num_(delay_reuse_after_frame_num),
+ decode_calls_per_second_(decode_calls_per_second),
+ render_as_thumbnails_(render_as_thumbnails),
+ frames_at_render_(0) {
CHECK_GT(num_in_flight_decodes, 0);
CHECK_GT(num_play_throughs, 0);
- CHECK_GE(rendering_fps, 0);
- if (rendering_fps > 0)
- throttling_client_.reset(new ThrottlingVDAClient(
- this,
- rendering_fps,
- base::Bind(&GLRenderingVDAClient::NotifyFrameDropped,
- base::Unretained(this))));
+ // |num_in_flight_decodes_| is unsupported if |decode_calls_per_second_| > 0.
+ if (decode_calls_per_second_ > 0)
+ CHECK_EQ(1, num_in_flight_decodes_);
+
+ // Default to H264 baseline if no profile provided.
+ profile_ = (profile != media::VIDEO_CODEC_PROFILE_UNKNOWN
+ ? profile
+ : media::H264PROFILE_BASELINE);
}
GLRenderingVDAClient::~GLRenderingVDAClient() {
static bool DoNothingReturnTrue() { return true; }
-void GLRenderingVDAClient::CreateDecoder() {
+void GLRenderingVDAClient::CreateAndStartDecoder() {
CHECK(decoder_deleted());
CHECK(!decoder_.get());
VideoDecodeAccelerator::Client* client = this;
base::WeakPtr<VideoDecodeAccelerator::Client> weak_client = AsWeakPtr();
- if (throttling_client_) {
- client = throttling_client_.get();
- weak_client = throttling_client_->AsWeakPtr();
- }
#if defined(OS_WIN)
decoder_.reset(
- new DXVAVideoDecodeAccelerator(client, base::Bind(&DoNothingReturnTrue)));
-#elif defined(OS_CHROMEOS)
-#if defined(ARCH_CPU_ARMEL)
- decoder_.reset(new ExynosVideoDecodeAccelerator(
+ new DXVAVideoDecodeAccelerator(base::Bind(&DoNothingReturnTrue)));
+#elif defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL)
+
+ scoped_ptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder);
+ if (!device.get()) {
+ NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
+ return;
+ }
+ decoder_.reset(new V4L2VideoDecodeAccelerator(
static_cast<EGLDisplay>(rendering_helper_->GetGLDisplay()),
static_cast<EGLContext>(rendering_helper_->GetGLContext()),
- client,
weak_client,
base::Bind(&DoNothingReturnTrue),
+ device.Pass(),
base::MessageLoopProxy::current()));
-#elif defined(ARCH_CPU_X86_FAMILY)
+#elif defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
+ CHECK_EQ(gfx::kGLImplementationDesktopGL, gfx::GetGLImplementation())
+ << "Hardware video decode does not work with OSMesa";
decoder_.reset(new VaapiVideoDecodeAccelerator(
static_cast<Display*>(rendering_helper_->GetGLDisplay()),
- static_cast<GLXContext>(rendering_helper_->GetGLContext()),
- client,
base::Bind(&DoNothingReturnTrue)));
-#endif // ARCH_CPU_ARMEL
#endif // OS_WIN
CHECK(decoder_.get());
+ weak_decoder_factory_.reset(
+ new base::WeakPtrFactory<VideoDecodeAccelerator>(decoder_.get()));
SetState(CS_DECODER_SET);
if (decoder_deleted())
return;
- // Configure the decoder.
- media::VideoCodecProfile profile = media::H264PROFILE_BASELINE;
- if (profile_ != -1)
- profile = static_cast<media::VideoCodecProfile>(profile_);
- CHECK(decoder_->Initialize(profile));
+ CHECK(decoder_->Initialize(profile_, client));
+ FinishInitialization();
}
void GLRenderingVDAClient::ProvidePictureBuffers(
return;
std::vector<media::PictureBuffer> buffers;
+ texture_target_ = texture_target;
for (uint32 i = 0; i < requested_num_of_buffers; ++i) {
uint32 id = picture_buffers_by_id_.size();
uint32 texture_id;
base::WaitableEvent done(false, false);
rendering_helper_->CreateTexture(
- rendering_window_id_, texture_target, &texture_id, &done);
+ texture_target_, &texture_id, dimensions, &done);
done.Wait();
CHECK(outstanding_texture_ids_.insert(texture_id).second);
media::PictureBuffer* buffer =
if (decoder_deleted())
return;
- frame_delivery_times_.push_back(base::TimeTicks::Now());
+ base::TimeTicks now = base::TimeTicks::Now();
+
+ frame_delivery_times_.push_back(now);
+
+ // Save the decode time of this picture.
+ std::map<int, base::TimeTicks>::iterator it =
+ decode_start_time_.find(picture.bitstream_buffer_id());
+ ASSERT_NE(decode_start_time_.end(), it);
+ decode_time_.push_back(now - it->second);
+ decode_start_time_.erase(it);
CHECK_LE(picture.bitstream_buffer_id(), next_bitstream_buffer_id_);
++num_decoded_frames_;
// Mid-stream reset applies only to the last play-through per constructor
// comment.
if (remaining_play_throughs_ == 1 &&
- reset_after_frame_num_ == num_decoded_frames()) {
+ reset_after_frame_num_ == num_decoded_frames_) {
reset_after_frame_num_ = MID_STREAM_RESET;
decoder_->Reset();
// Re-start decoding from the beginning of the stream to avoid needing to
media::PictureBuffer* picture_buffer =
picture_buffers_by_id_[picture.picture_buffer_id()];
CHECK(picture_buffer);
- if (!suppress_rendering_) {
- rendering_helper_->RenderTexture(picture_buffer->texture_id());
- }
- if (num_decoded_frames() > delay_reuse_after_frame_num_) {
- base::MessageLoop::current()->PostDelayedTask(
- FROM_HERE,
- base::Bind(&VideoDecodeAccelerator::ReusePictureBuffer,
- decoder_->AsWeakPtr(),
- picture.picture_buffer_id()),
- kReuseDelay);
- } else {
- decoder_->ReusePictureBuffer(picture.picture_buffer_id());
+ scoped_refptr<VideoFrameTexture> video_frame =
+ new VideoFrameTexture(texture_target_,
+ picture_buffer->texture_id(),
+ base::Bind(&GLRenderingVDAClient::ReturnPicture,
+ AsWeakPtr(),
+ picture.picture_buffer_id()));
+ ++frames_at_render_;
+
+ if (render_as_thumbnails_) {
+ rendering_helper_->RenderThumbnail(video_frame->texture_target(),
+ video_frame->texture_id());
+ } else if (!suppress_rendering_) {
+ rendering_helper_->QueueVideoFrame(window_id_, video_frame);
}
}
-void GLRenderingVDAClient::NotifyInitializeDone() {
- SetState(CS_INITIALIZED);
- initialize_done_ticks_ = base::TimeTicks::Now();
+void GLRenderingVDAClient::ReturnPicture(int32 picture_buffer_id) {
+ if (decoder_deleted())
+ return;
- if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
- decoder_->Reset();
+ --frames_at_render_;
+ if (frames_at_render_ == 0 && state_ == CS_RESETTING) {
+ SetState(CS_RESET);
+ DeleteDecoder();
return;
}
- for (int i = 0; i < num_in_flight_decodes_; ++i)
- DecodeNextFragment();
- DCHECK_EQ(outstanding_decodes_, num_in_flight_decodes_);
+ if (num_decoded_frames_ > delay_reuse_after_frame_num_) {
+ base::MessageLoop::current()->PostDelayedTask(
+ FROM_HERE,
+ base::Bind(&VideoDecodeAccelerator::ReusePictureBuffer,
+ weak_decoder_factory_->GetWeakPtr(),
+ picture_buffer_id),
+ kReuseDelay);
+ } else {
+ decoder_->ReusePictureBuffer(picture_buffer_id);
+ }
}
void GLRenderingVDAClient::NotifyEndOfBitstreamBuffer(
// VaapiVideoDecodeAccelerator::FinishReset()).
++num_done_bitstream_buffers_;
--outstanding_decodes_;
- DecodeNextFragment();
+ if (decode_calls_per_second_ == 0)
+ DecodeNextFragment();
}
void GLRenderingVDAClient::NotifyFlushDone() {
if (decoder_deleted())
return;
+
SetState(CS_FLUSHED);
--remaining_play_throughs_;
DCHECK_GE(remaining_play_throughs_, 0);
if (remaining_play_throughs_) {
encoded_data_next_pos_to_decode_ = 0;
- NotifyInitializeDone();
+ FinishInitialization();
return;
}
- SetState(CS_RESET);
- if (!decoder_deleted())
+ rendering_helper_->Flush(window_id_);
+
+ if (frames_at_render_ == 0) {
+ SetState(CS_RESET);
DeleteDecoder();
+ }
}
void GLRenderingVDAClient::NotifyError(VideoDecodeAccelerator::Error error) {
SetState(CS_ERROR);
}
-void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::PlatformFile output) {
+void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
std::string s = base::StringPrintf("frame count: %" PRIuS "\n",
frame_delivery_times_.size());
- base::WritePlatformFileAtCurrentPos(output, s.data(), s.length());
+ output->WriteAtCurrentPos(s.data(), s.length());
base::TimeTicks t0 = initialize_done_ticks_;
for (size_t i = 0; i < frame_delivery_times_.size(); ++i) {
s = base::StringPrintf("frame %04" PRIuS ": %" PRId64 " us\n",
i,
(frame_delivery_times_[i] - t0).InMicroseconds());
t0 = frame_delivery_times_[i];
- base::WritePlatformFileAtCurrentPos(output, s.data(), s.length());
+ output->WriteAtCurrentPos(s.data(), s.length());
}
}
-void GLRenderingVDAClient::NotifyFrameDropped(int32 picture_buffer_id) {
- decoder_->ReusePictureBuffer(picture_buffer_id);
-}
-
static bool LookingAtNAL(const std::string& encoded, size_t pos) {
return encoded[pos] == 0 && encoded[pos + 1] == 0 &&
encoded[pos + 2] == 0 && encoded[pos + 3] == 1;
}
}
+void GLRenderingVDAClient::FinishInitialization() {
+ SetState(CS_INITIALIZED);
+ initialize_done_ticks_ = base::TimeTicks::Now();
+
+ if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
+ reset_after_frame_num_ = MID_STREAM_RESET;
+ decoder_->Reset();
+ return;
+ }
+
+ for (int i = 0; i < num_in_flight_decodes_; ++i)
+ DecodeNextFragment();
+ DCHECK_EQ(outstanding_decodes_, num_in_flight_decodes_);
+}
+
void GLRenderingVDAClient::DeleteDecoder() {
if (decoder_deleted())
return;
- decoder_.release()->Destroy();
+ weak_decoder_factory_.reset();
+ decoder_.reset();
STLClearObject(&encoded_data_);
for (std::set<int>::iterator it = outstanding_texture_ids_.begin();
it != outstanding_texture_ids_.end(); ++it) {
return bytes;
}
+static bool FragmentHasConfigInfo(const uint8* data, size_t size,
+ media::VideoCodecProfile profile) {
+ if (profile >= media::H264PROFILE_MIN &&
+ profile <= media::H264PROFILE_MAX) {
+ media::H264Parser parser;
+ parser.SetStream(data, size);
+ media::H264NALU nalu;
+ media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
+ if (result != media::H264Parser::kOk) {
+ // Let the VDA figure out there's something wrong with the stream.
+ return false;
+ }
+
+ return nalu.nal_unit_type == media::H264NALU::kSPS;
+ } else if (profile >= media::VP8PROFILE_MIN &&
+ profile <= media::VP8PROFILE_MAX) {
+ return (size > 0 && !(data[0] & 0x01));
+ }
+ // Shouldn't happen at this point.
+ LOG(FATAL) << "Invalid profile: " << profile;
+ return false;
+}
+
void GLRenderingVDAClient::DecodeNextFragment() {
if (decoder_deleted())
return;
}
size_t next_fragment_size = next_fragment_bytes.size();
+ // Call Reset() just after Decode() if the fragment contains config info.
+ // This tests how the VDA behaves when it gets a reset request before it has
+ // a chance to ProvidePictureBuffers().
+ bool reset_here = false;
+ if (reset_after_frame_num_ == RESET_AFTER_FIRST_CONFIG_INFO) {
+ reset_here = FragmentHasConfigInfo(
+ reinterpret_cast<const uint8*>(next_fragment_bytes.data()),
+ next_fragment_size,
+ profile_);
+ if (reset_here)
+ reset_after_frame_num_ = END_OF_STREAM_RESET;
+ }
+
// Populate the shared memory buffer w/ the fragment, duplicate its handle,
// and hand it off to the decoder.
base::SharedMemory shm;
CHECK(shm.ShareToProcess(base::Process::Current().handle(), &dup_handle));
media::BitstreamBuffer bitstream_buffer(
next_bitstream_buffer_id_, dup_handle, next_fragment_size);
+ decode_start_time_[next_bitstream_buffer_id_] = base::TimeTicks::Now();
// Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
decoder_->Decode(bitstream_buffer);
++outstanding_decodes_;
- encoded_data_next_pos_to_decode_ = end_pos;
-
if (!remaining_play_throughs_ &&
-delete_decoder_state_ == next_bitstream_buffer_id_) {
DeleteDecoder();
}
-}
-int GLRenderingVDAClient::num_decoded_frames() {
- return throttling_client_ ? throttling_client_->num_decoded_frames()
- : num_decoded_frames_;
+ if (reset_here) {
+ reset_after_frame_num_ = MID_STREAM_RESET;
+ decoder_->Reset();
+ // Restart from the beginning to re-Decode() the SPS we just sent.
+ encoded_data_next_pos_to_decode_ = 0;
+ } else {
+ encoded_data_next_pos_to_decode_ = end_pos;
+ }
+
+ if (decode_calls_per_second_ > 0) {
+ base::MessageLoop::current()->PostDelayedTask(
+ FROM_HERE,
+ base::Bind(&GLRenderingVDAClient::DecodeNextFragment, AsWeakPtr()),
+ base::TimeDelta::FromSeconds(1) / decode_calls_per_second_);
+ }
}
double GLRenderingVDAClient::frames_per_second() {
base::TimeDelta delta = frame_delivery_times_.back() - initialize_done_ticks_;
- if (delta.InSecondsF() == 0)
- return 0;
- return num_decoded_frames() / delta.InSecondsF();
+ return num_decoded_frames_ / delta.InSecondsF();
+}
+
+base::TimeDelta GLRenderingVDAClient::decode_time_median() {
+ if (decode_time_.size() == 0)
+ return base::TimeDelta();
+ std::sort(decode_time_.begin(), decode_time_.end());
+ int index = decode_time_.size() / 2;
+ if (decode_time_.size() % 2 != 0)
+ return decode_time_[index];
+
+ return (decode_time_[index] + decode_time_[index - 1]) / 2;
+}
+
+class VideoDecodeAcceleratorTest : public ::testing::Test {
+ protected:
+ VideoDecodeAcceleratorTest();
+ virtual void SetUp();
+ virtual void TearDown();
+
+ // Parse |data| into its constituent parts, set the various output fields
+ // accordingly, and read in video stream. CHECK-fails on unexpected or
+ // missing required data. Unspecified optional fields are set to -1.
+ void ParseAndReadTestVideoData(base::FilePath::StringType data,
+ std::vector<TestVideoFile*>* test_video_files);
+
+ // Update the parameters of |test_video_files| according to
+ // |num_concurrent_decoders| and |reset_point|. Ex: the expected number of
+ // frames should be adjusted if decoder is reset in the middle of the stream.
+ void UpdateTestVideoFileParams(
+ size_t num_concurrent_decoders,
+ int reset_point,
+ std::vector<TestVideoFile*>* test_video_files);
+
+ void InitializeRenderingHelper(const RenderingHelperParams& helper_params);
+ void CreateAndStartDecoder(GLRenderingVDAClient* client,
+ ClientStateNotification<ClientState>* note);
+ void WaitUntilDecodeFinish(ClientStateNotification<ClientState>* note);
+ void WaitUntilIdle();
+ void OutputLogFile(const base::FilePath::CharType* log_path,
+ const std::string& content);
+
+ std::vector<TestVideoFile*> test_video_files_;
+ RenderingHelper rendering_helper_;
+ scoped_refptr<base::MessageLoopProxy> rendering_loop_proxy_;
+
+ private:
+ base::Thread rendering_thread_;
+ // Required for Thread to work. Not used otherwise.
+ base::ShadowingAtExitManager at_exit_manager_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTest);
+};
+
+VideoDecodeAcceleratorTest::VideoDecodeAcceleratorTest()
+ : rendering_thread_("GLRenderingVDAClientThread") {}
+
+void VideoDecodeAcceleratorTest::SetUp() {
+ ParseAndReadTestVideoData(g_test_video_data, &test_video_files_);
+
+ // Initialize the rendering thread.
+ base::Thread::Options options;
+ options.message_loop_type = base::MessageLoop::TYPE_DEFAULT;
+#if defined(OS_WIN)
+ // For windows the decoding thread initializes the media foundation decoder
+ // which uses COM. We need the thread to be a UI thread.
+ options.message_loop_type = base::MessageLoop::TYPE_UI;
+#endif // OS_WIN
+
+ rendering_thread_.StartWithOptions(options);
+ rendering_loop_proxy_ = rendering_thread_.message_loop_proxy();
+}
+
+void VideoDecodeAcceleratorTest::TearDown() {
+ rendering_loop_proxy_->PostTask(
+ FROM_HERE,
+ base::Bind(&STLDeleteElements<std::vector<TestVideoFile*> >,
+ &test_video_files_));
+
+ base::WaitableEvent done(false, false);
+ rendering_loop_proxy_->PostTask(
+ FROM_HERE,
+ base::Bind(&RenderingHelper::UnInitialize,
+ base::Unretained(&rendering_helper_),
+ &done));
+ done.Wait();
+
+ rendering_thread_.Stop();
+}
+
+void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
+ base::FilePath::StringType data,
+ std::vector<TestVideoFile*>* test_video_files) {
+ std::vector<base::FilePath::StringType> entries;
+ base::SplitString(data, ';', &entries);
+ CHECK_GE(entries.size(), 1U) << data;
+ for (size_t index = 0; index < entries.size(); ++index) {
+ std::vector<base::FilePath::StringType> fields;
+ base::SplitString(entries[index], ':', &fields);
+ CHECK_GE(fields.size(), 1U) << entries[index];
+ CHECK_LE(fields.size(), 8U) << entries[index];
+ TestVideoFile* video_file = new TestVideoFile(fields[0]);
+ if (!fields[1].empty())
+ CHECK(base::StringToInt(fields[1], &video_file->width));
+ if (!fields[2].empty())
+ CHECK(base::StringToInt(fields[2], &video_file->height));
+ if (!fields[3].empty())
+ CHECK(base::StringToInt(fields[3], &video_file->num_frames));
+ if (!fields[4].empty())
+ CHECK(base::StringToInt(fields[4], &video_file->num_fragments));
+ if (!fields[5].empty())
+ CHECK(base::StringToInt(fields[5], &video_file->min_fps_render));
+ if (!fields[6].empty())
+ CHECK(base::StringToInt(fields[6], &video_file->min_fps_no_render));
+ int profile = -1;
+ if (!fields[7].empty())
+ CHECK(base::StringToInt(fields[7], &profile));
+ video_file->profile = static_cast<media::VideoCodecProfile>(profile);
+
+ // Read in the video data.
+ base::FilePath filepath(video_file->file_name);
+ CHECK(base::ReadFileToString(filepath, &video_file->data_str))
+ << "test_video_file: " << filepath.MaybeAsASCII();
+
+ test_video_files->push_back(video_file);
+ }
+}
+
+void VideoDecodeAcceleratorTest::UpdateTestVideoFileParams(
+ size_t num_concurrent_decoders,
+ int reset_point,
+ std::vector<TestVideoFile*>* test_video_files) {
+ for (size_t i = 0; i < test_video_files->size(); i++) {
+ TestVideoFile* video_file = (*test_video_files)[i];
+ if (reset_point == MID_STREAM_RESET) {
+ // Reset should not go beyond the last frame;
+ // reset in the middle of the stream for short videos.
+ video_file->reset_after_frame_num = kMaxResetAfterFrameNum;
+ if (video_file->num_frames <= video_file->reset_after_frame_num)
+ video_file->reset_after_frame_num = video_file->num_frames / 2;
+
+ video_file->num_frames += video_file->reset_after_frame_num;
+ } else {
+ video_file->reset_after_frame_num = reset_point;
+ }
+
+ if (video_file->min_fps_render != -1)
+ video_file->min_fps_render /= num_concurrent_decoders;
+ if (video_file->min_fps_no_render != -1)
+ video_file->min_fps_no_render /= num_concurrent_decoders;
+ }
+}
+
+void VideoDecodeAcceleratorTest::InitializeRenderingHelper(
+ const RenderingHelperParams& helper_params) {
+ base::WaitableEvent done(false, false);
+ rendering_loop_proxy_->PostTask(
+ FROM_HERE,
+ base::Bind(&RenderingHelper::Initialize,
+ base::Unretained(&rendering_helper_),
+ helper_params,
+ &done));
+ done.Wait();
+}
+
+void VideoDecodeAcceleratorTest::CreateAndStartDecoder(
+ GLRenderingVDAClient* client,
+ ClientStateNotification<ClientState>* note) {
+ rendering_loop_proxy_->PostTask(
+ FROM_HERE,
+ base::Bind(&GLRenderingVDAClient::CreateAndStartDecoder,
+ base::Unretained(client)));
+ ASSERT_EQ(note->Wait(), CS_DECODER_SET);
+}
+
+void VideoDecodeAcceleratorTest::WaitUntilDecodeFinish(
+ ClientStateNotification<ClientState>* note) {
+ for (int i = 0; i < CS_MAX; i++) {
+ if (note->Wait() == CS_DESTROYED)
+ break;
+ }
+}
+
+void VideoDecodeAcceleratorTest::WaitUntilIdle() {
+ base::WaitableEvent done(false, false);
+ rendering_loop_proxy_->PostTask(
+ FROM_HERE,
+ base::Bind(&base::WaitableEvent::Signal, base::Unretained(&done)));
+ done.Wait();
+}
+
+void VideoDecodeAcceleratorTest::OutputLogFile(
+ const base::FilePath::CharType* log_path,
+ const std::string& content) {
+ base::File file(base::FilePath(log_path),
+ base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
+ file.WriteAtCurrentPos(content.data(), content.length());
}
// Test parameters:
// - delete_decoder_phase: see GLRenderingVDAClient ctor.
// - whether to test slow rendering by delaying ReusePictureBuffer().
// - whether the video frames are rendered as thumbnails.
-class VideoDecodeAcceleratorTest
- : public ::testing::TestWithParam<
- Tuple7<int, int, int, ResetPoint, ClientState, bool, bool> > {
+class VideoDecodeAcceleratorParamTest
+ : public VideoDecodeAcceleratorTest,
+ public ::testing::WithParamInterface<
+ Tuple7<int, int, int, ResetPoint, ClientState, bool, bool> > {
};
// Helper so that gtest failures emit a more readable version of the tuple than
// Test the most straightforward case possible: data is decoded from a single
// chunk and rendered to the screen.
-TEST_P(VideoDecodeAcceleratorTest, TestSimpleDecode) {
- // Required for Thread to work. Not used otherwise.
- base::ShadowingAtExitManager at_exit_manager;
-
+TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
const size_t num_concurrent_decoders = GetParam().a;
const size_t num_in_flight_decodes = GetParam().b;
const int num_play_throughs = GetParam().c;
bool test_reuse_delay = GetParam().f;
const bool render_as_thumbnails = GetParam().g;
- std::vector<TestVideoFile*> test_video_files;
- ParseAndReadTestVideoData(g_test_video_data,
- num_concurrent_decoders,
- reset_point,
- &test_video_files);
+ UpdateTestVideoFileParams(
+ num_concurrent_decoders, reset_point, &test_video_files_);
- // Suppress GL rendering for all tests when the "--disable_rendering" is set.
- const bool suppress_rendering = g_disable_rendering;
+ // Suppress GL rendering for all tests when the "--rendering_fps" is 0.
+ const bool suppress_rendering = g_rendering_fps == 0;
std::vector<ClientStateNotification<ClientState>*>
notes(num_concurrent_decoders, NULL);
std::vector<GLRenderingVDAClient*> clients(num_concurrent_decoders, NULL);
- // Initialize the rendering helper.
- base::Thread rendering_thread("GLRenderingVDAClientThread");
- base::Thread::Options options;
- options.message_loop_type = base::MessageLoop::TYPE_DEFAULT;
-#if defined(OS_WIN)
- // For windows the decoding thread initializes the media foundation decoder
- // which uses COM. We need the thread to be a UI thread.
- options.message_loop_type = base::MessageLoop::TYPE_UI;
-#endif // OS_WIN
-
- rendering_thread.StartWithOptions(options);
- RenderingHelper rendering_helper;
-
- base::WaitableEvent done(false, false);
RenderingHelperParams helper_params;
- helper_params.num_windows = num_concurrent_decoders;
+ helper_params.rendering_fps = g_rendering_fps;
helper_params.render_as_thumbnails = render_as_thumbnails;
if (render_as_thumbnails) {
// Only one decoder is supported with thumbnail rendering
CHECK_EQ(num_concurrent_decoders, 1U);
- gfx::Size frame_size(test_video_files[0]->width,
- test_video_files[0]->height);
- helper_params.frame_dimensions.push_back(frame_size);
- helper_params.window_dimensions.push_back(kThumbnailsDisplaySize);
helper_params.thumbnails_page_size = kThumbnailsPageSize;
helper_params.thumbnail_size = kThumbnailSize;
- } else {
- for (size_t index = 0; index < test_video_files.size(); ++index) {
- gfx::Size frame_size(test_video_files[index]->width,
- test_video_files[index]->height);
- helper_params.frame_dimensions.push_back(frame_size);
- helper_params.window_dimensions.push_back(frame_size);
- }
}
- rendering_thread.message_loop()->PostTask(
- FROM_HERE,
- base::Bind(&RenderingHelper::Initialize,
- base::Unretained(&rendering_helper),
- helper_params,
- &done));
- done.Wait();
// First kick off all the decoders.
for (size_t index = 0; index < num_concurrent_decoders; ++index) {
TestVideoFile* video_file =
- test_video_files[index % test_video_files.size()];
+ test_video_files_[index % test_video_files_.size()];
ClientStateNotification<ClientState>* note =
new ClientStateNotification<ClientState>();
notes[index] = note;
}
GLRenderingVDAClient* client =
- new GLRenderingVDAClient(&rendering_helper,
- index,
+ new GLRenderingVDAClient(index,
+ &rendering_helper_,
note,
video_file->data_str,
num_in_flight_decodes,
video_file->width,
video_file->height,
video_file->profile,
- g_rendering_fps,
suppress_rendering,
- delay_after_frame_num);
+ delay_after_frame_num,
+ 0,
+ render_as_thumbnails);
+
clients[index] = client;
+ helper_params.window_sizes.push_back(
+ render_as_thumbnails
+ ? kThumbnailsPageSize
+ : gfx::Size(video_file->width, video_file->height));
+ }
- rendering_thread.message_loop()->PostTask(
- FROM_HERE,
- base::Bind(&GLRenderingVDAClient::CreateDecoder,
- base::Unretained(client)));
+ InitializeRenderingHelper(helper_params);
- ASSERT_EQ(note->Wait(), CS_DECODER_SET);
+ for (size_t index = 0; index < num_concurrent_decoders; ++index) {
+ CreateAndStartDecoder(clients[index], notes[index]);
}
+
// Then wait for all the decodes to finish.
// Only check performance & correctness later if we play through only once.
bool skip_performance_and_correctness_checks = num_play_throughs > 1;
if (delete_decoder_state < CS_FLUSHED)
continue;
GLRenderingVDAClient* client = clients[i];
- TestVideoFile* video_file = test_video_files[i % test_video_files.size()];
+ TestVideoFile* video_file = test_video_files_[i % test_video_files_.size()];
if (video_file->num_frames > 0) {
// Expect the decoded frames may be more than the video frames as frames
// could still be returned until resetting done.
if (render_as_thumbnails) {
std::vector<unsigned char> rgb;
bool alpha_solid;
- rendering_thread.message_loop()->PostTask(
+ base::WaitableEvent done(false, false);
+ rendering_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&RenderingHelper::GetThumbnailsAsRGB,
- base::Unretained(&rendering_helper),
+ base::Unretained(&rendering_helper_),
&rgb, &alpha_solid, &done));
done.Wait();
std::vector<std::string> golden_md5s;
std::string md5_string = base::MD5String(
base::StringPiece(reinterpret_cast<char*>(&rgb[0]), rgb.size()));
- ReadGoldenThumbnailMD5s(test_video_files[0], &golden_md5s);
+ ReadGoldenThumbnailMD5s(test_video_files_[0], &golden_md5s);
std::vector<std::string>::iterator match =
find(golden_md5s.begin(), golden_md5s.end(), md5_string);
if (match == golden_md5s.end()) {
LOG(ERROR) << "Unknown thumbnails MD5: " << md5_string;
- base::FilePath filepath(test_video_files[0]->file_name);
+ base::FilePath filepath(test_video_files_[0]->file_name);
filepath = filepath.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails"));
filepath = filepath.AddExtension(FILE_PATH_LITERAL(".png"));
- int num_bytes = file_util::WriteFile(filepath,
+ int num_bytes = base::WriteFile(filepath,
reinterpret_cast<char*>(&png[0]),
png.size());
ASSERT_EQ(num_bytes, static_cast<int>(png.size()));
// Output the frame delivery time to file
// We can only make performance/correctness assertions if the decoder was
// allowed to finish.
- if (g_frame_delivery_log != NULL && delete_decoder_state >= CS_FLUSHED) {
- base::PlatformFile output_file = base::CreatePlatformFile(
- base::FilePath(g_frame_delivery_log),
- base::PLATFORM_FILE_CREATE_ALWAYS | base::PLATFORM_FILE_WRITE,
- NULL,
- NULL);
+ if (g_output_log != NULL && delete_decoder_state >= CS_FLUSHED) {
+ base::File output_file(
+ base::FilePath(g_output_log),
+ base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
for (size_t i = 0; i < num_concurrent_decoders; ++i) {
- clients[i]->OutputFrameDeliveryTimes(output_file);
+ clients[i]->OutputFrameDeliveryTimes(&output_file);
}
- base::ClosePlatformFile(output_file);
}
- rendering_thread.message_loop()->PostTask(
+ rendering_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&STLDeleteElements<std::vector<GLRenderingVDAClient*> >,
&clients));
- rendering_thread.message_loop()->PostTask(
+ rendering_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&STLDeleteElements<
- std::vector<ClientStateNotification<ClientState>*> >,
- ¬es));
- rendering_thread.message_loop()->PostTask(
- FROM_HERE,
- base::Bind(&STLDeleteElements<std::vector<TestVideoFile*> >,
- &test_video_files));
- rendering_thread.message_loop()->PostTask(
- FROM_HERE,
- base::Bind(&RenderingHelper::UnInitialize,
- base::Unretained(&rendering_helper),
- &done));
- done.Wait();
- rendering_thread.Stop();
+ std::vector<ClientStateNotification<ClientState>*> >,
+ ¬es));
+ WaitUntilIdle();
};
// Test that replay after EOS works fine.
INSTANTIATE_TEST_CASE_P(
- ReplayAfterEOS, VideoDecodeAcceleratorTest,
+ ReplayAfterEOS, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 4, END_OF_STREAM_RESET, CS_RESET, false, false)));
-// This hangs on Exynos, preventing further testing and wasting test machine
-// time.
-// TODO(ihf): Enable again once http://crbug.com/269754 is fixed.
-#if defined(ARCH_CPU_X86_FAMILY)
// Test that Reset() before the first Decode() works fine.
INSTANTIATE_TEST_CASE_P(
- ResetBeforeDecode, VideoDecodeAcceleratorTest,
+ ResetBeforeDecode, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, START_OF_STREAM_RESET, CS_RESET, false, false)));
-#endif // ARCH_CPU_X86_FAMILY
+
+// Test Reset() immediately after Decode() containing config info.
+INSTANTIATE_TEST_CASE_P(
+ ResetAfterFirstConfigInfo, VideoDecodeAcceleratorParamTest,
+ ::testing::Values(
+ MakeTuple(
+ 1, 1, 1, RESET_AFTER_FIRST_CONFIG_INFO, CS_RESET, false, false)));
// Test that Reset() mid-stream works fine and doesn't affect decoding even when
// Decode() calls are made during the reset.
INSTANTIATE_TEST_CASE_P(
- MidStreamReset, VideoDecodeAcceleratorTest,
+ MidStreamReset, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, MID_STREAM_RESET, CS_RESET, false, false)));
INSTANTIATE_TEST_CASE_P(
- SlowRendering, VideoDecodeAcceleratorTest,
+ SlowRendering, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, true, false)));
// Test that Destroy() mid-stream works fine (primarily this is testing that no
// crashes occur).
INSTANTIATE_TEST_CASE_P(
- TearDownTiming, VideoDecodeAcceleratorTest,
+ TearDownTiming, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_DECODER_SET, false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_INITIALIZED, false, false),
// Test that decoding various variation works with multiple in-flight decodes.
INSTANTIATE_TEST_CASE_P(
- DecodeVariations, VideoDecodeAcceleratorTest,
+ DecodeVariations, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
MakeTuple(1, 10, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
// Find out how many concurrent decoders can go before we exhaust system
// resources.
INSTANTIATE_TEST_CASE_P(
- ResourceExhaustion, VideoDecodeAcceleratorTest,
+ ResourceExhaustion, VideoDecodeAcceleratorParamTest,
::testing::Values(
// +0 hack below to promote enum to int.
MakeTuple(kMinSupportedNumConcurrentDecoders + 0, 1, 1,
// Thumbnailing test
INSTANTIATE_TEST_CASE_P(
- Thumbnail, VideoDecodeAcceleratorTest,
+ Thumbnail, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, true)));
+// Measure the median of the decode time when VDA::Decode is called 30 times per
+// second.
+TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
+ RenderingHelperParams helper_params;
+
+ // Disable rendering by setting the rendering_fps = 0.
+ helper_params.rendering_fps = 0;
+ helper_params.render_as_thumbnails = false;
+
+ ClientStateNotification<ClientState>* note =
+ new ClientStateNotification<ClientState>();
+ GLRenderingVDAClient* client =
+ new GLRenderingVDAClient(0,
+ &rendering_helper_,
+ note,
+ test_video_files_[0]->data_str,
+ 1,
+ 1,
+ test_video_files_[0]->reset_after_frame_num,
+ CS_RESET,
+ test_video_files_[0]->width,
+ test_video_files_[0]->height,
+ test_video_files_[0]->profile,
+ true,
+ std::numeric_limits<int>::max(),
+ kWebRtcDecodeCallsPerSecond,
+ false /* render_as_thumbnail */);
+ helper_params.window_sizes.push_back(
+ gfx::Size(test_video_files_[0]->width, test_video_files_[0]->height));
+ InitializeRenderingHelper(helper_params);
+ CreateAndStartDecoder(client, note);
+ WaitUntilDecodeFinish(note);
+
+ base::TimeDelta decode_time_median = client->decode_time_median();
+ std::string output_string =
+ base::StringPrintf("Decode time median: %" PRId64 " us",
+ decode_time_median.InMicroseconds());
+ LOG(INFO) << output_string;
+
+ if (g_output_log != NULL)
+ OutputLogFile(g_output_log, output_string);
+
+ rendering_loop_proxy_->DeleteSoon(FROM_HERE, client);
+ rendering_loop_proxy_->DeleteSoon(FROM_HERE, note);
+ WaitUntilIdle();
+};
+
// TODO(fischman, vrk): add more tests! In particular:
// - Test life-cycle: Seek/Stop/Pause/Play for a single decoder.
// - Test alternate configurations
int main(int argc, char **argv) {
testing::InitGoogleTest(&argc, argv); // Removes gtest-specific args.
- CommandLine::Init(argc, argv);
+ base::CommandLine::Init(argc, argv);
// Needed to enable DVLOG through --vmodule.
logging::LoggingSettings settings;
settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG;
- settings.dcheck_state =
- logging::ENABLE_DCHECK_FOR_NON_OFFICIAL_RELEASE_BUILDS;
CHECK(logging::InitLogging(settings));
- CommandLine* cmd_line = CommandLine::ForCurrentProcess();
+ const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
DCHECK(cmd_line);
- CommandLine::SwitchMap switches = cmd_line->GetSwitches();
+ base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
for (CommandLine::SwitchMap::const_iterator it = switches.begin();
it != switches.end(); ++it) {
if (it->first == "test_video_data") {
content::g_test_video_data = it->second.c_str();
continue;
}
- if (it->first == "frame_delivery_log") {
- content::g_frame_delivery_log = it->second.c_str();
+ // The output log for VDA performance test.
+ if (it->first == "output_log") {
+ content::g_output_log = it->second.c_str();
continue;
}
if (it->first == "rendering_fps") {
CHECK(base::StringToDouble(input, &content::g_rendering_fps));
continue;
}
+ // TODO(owenlin): Remove this flag once it is not used in autotest.
if (it->first == "disable_rendering") {
- content::g_disable_rendering = true;
+ content::g_rendering_fps = 0;
continue;
}
if (it->first == "v" || it->first == "vmodule")
}
base::ShadowingAtExitManager at_exit_manager;
-
-#if defined(OS_WIN)
- content::DXVAVideoDecodeAccelerator::PreSandboxInitialization();
-#elif defined(OS_CHROMEOS)
-#if defined(ARCH_CPU_ARMEL)
- content::ExynosVideoDecodeAccelerator::PreSandboxInitialization();
-#elif defined(ARCH_CPU_X86_FAMILY)
- content::VaapiWrapper::PreSandboxInitialization();
-#endif // ARCH_CPU_ARMEL
-#endif // OS_CHROMEOS
+ content::RenderingHelper::InitializeOneOff();
return RUN_ALL_TESTS();
}