2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
11 #include "testing/gtest/include/gtest/gtest.h"
12 #include "webrtc/modules/video_coding/codecs/test_framework/unit_test.h"
13 #include "webrtc/modules/video_coding/codecs/test_framework/video_source.h"
14 #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
15 #include "webrtc/system_wrappers/interface/scoped_ptr.h"
16 #include "webrtc/system_wrappers/interface/tick_util.h"
17 #include "webrtc/test/testsupport/fileutils.h"
18 #include "webrtc/test/testsupport/gtest_disable.h"
22 enum { kMaxWaitEncTimeMs = 100 };
23 enum { kMaxWaitDecTimeMs = 25 };
25 static const uint32_t kTestTimestamp = 123;
26 static const int64_t kTestNtpTimeMs = 456;
28 // TODO(mikhal): Replace these with mocks.
29 class Vp8UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback {
31 Vp8UnitTestEncodeCompleteCallback(VideoFrame* frame,
32 unsigned int decoderSpecificSize,
33 void* decoderSpecificInfo)
34 : encoded_video_frame_(frame),
35 encode_complete_(false) {}
36 int Encoded(EncodedImage& encodedImage,
37 const CodecSpecificInfo* codecSpecificInfo,
38 const RTPFragmentationHeader*);
39 bool EncodeComplete();
40 // Note that this only makes sense if an encode has been completed
41 VideoFrameType EncodedFrameType() const {return encoded_frame_type_;}
44 VideoFrame* encoded_video_frame_;
45 bool encode_complete_;
46 VideoFrameType encoded_frame_type_;
49 int Vp8UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
50 const CodecSpecificInfo* codecSpecificInfo,
51 const RTPFragmentationHeader* fragmentation) {
52 encoded_video_frame_->VerifyAndAllocate(encodedImage._size);
53 encoded_video_frame_->CopyFrame(encodedImage._size, encodedImage._buffer);
54 encoded_video_frame_->SetLength(encodedImage._length);
55 // TODO(mikhal): Update frame type API.
56 // encoded_video_frame_->SetFrameType(encodedImage._frameType);
57 encoded_video_frame_->SetWidth(encodedImage._encodedWidth);
58 encoded_video_frame_->SetHeight(encodedImage._encodedHeight);
59 encoded_video_frame_->SetTimeStamp(encodedImage._timeStamp);
60 encode_complete_ = true;
61 encoded_frame_type_ = encodedImage._frameType;
65 bool Vp8UnitTestEncodeCompleteCallback::EncodeComplete() {
66 if (encode_complete_) {
67 encode_complete_ = false;
73 class Vp8UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback {
75 explicit Vp8UnitTestDecodeCompleteCallback(I420VideoFrame* frame)
76 : decoded_video_frame_(frame),
77 decode_complete(false) {}
78 int Decoded(webrtc::I420VideoFrame& frame);
79 bool DecodeComplete();
81 I420VideoFrame* decoded_video_frame_;
85 bool Vp8UnitTestDecodeCompleteCallback::DecodeComplete() {
86 if (decode_complete) {
87 decode_complete = false;
93 int Vp8UnitTestDecodeCompleteCallback::Decoded(I420VideoFrame& image) {
94 decoded_video_frame_->CopyFrame(image);
95 decode_complete = true;
99 class TestVp8Impl : public ::testing::Test {
101 virtual void SetUp() {
102 encoder_.reset(VP8Encoder::Create());
103 decoder_.reset(VP8Decoder::Create());
104 memset(&codec_inst_, 0, sizeof(codec_inst_));
105 encode_complete_callback_.reset(new
106 Vp8UnitTestEncodeCompleteCallback(&encoded_video_frame_, 0, NULL));
107 decode_complete_callback_.reset(new
108 Vp8UnitTestDecodeCompleteCallback(&decoded_video_frame_));
109 encoder_->RegisterEncodeCompleteCallback(encode_complete_callback_.get());
110 decoder_->RegisterDecodeCompleteCallback(decode_complete_callback_.get());
111 // Using a QCIF image (aligned stride (u,v planes) > width).
112 // Processing only one frame.
113 const VideoSource source(test::ResourcePath("paris_qcif", "yuv"), kQCIF);
114 length_source_frame_ = source.GetFrameLength();
115 source_buffer_.reset(new uint8_t[length_source_frame_]);
116 source_file_ = fopen(source.GetFileName().c_str(), "rb");
117 ASSERT_TRUE(source_file_ != NULL);
119 ASSERT_EQ(fread(source_buffer_.get(), 1, length_source_frame_,
120 source_file_), length_source_frame_);
121 codec_inst_.width = source.GetWidth();
122 codec_inst_.height = source.GetHeight();
123 codec_inst_.maxFramerate = source.GetFrameRate();
124 // Setting aligned stride values.
127 Calc16ByteAlignedStride(codec_inst_.width, &stride_y, &stride_uv);
128 EXPECT_EQ(stride_y, 176);
129 EXPECT_EQ(stride_uv, 96);
131 input_frame_.CreateEmptyFrame(codec_inst_.width, codec_inst_.height,
132 stride_y, stride_uv, stride_uv);
133 input_frame_.set_timestamp(kTestTimestamp);
134 // Using ConvertToI420 to add stride to the image.
135 EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0,
136 codec_inst_.width, codec_inst_.height,
137 0, kRotateNone, &input_frame_));
140 void SetUpEncodeDecode() {
141 codec_inst_.startBitrate = 300;
142 codec_inst_.maxBitrate = 4000;
143 codec_inst_.qpMax = 56;
144 codec_inst_.codecSpecific.VP8.denoisingOn = true;
146 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
147 encoder_->InitEncode(&codec_inst_, 1, 1440));
148 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->InitDecode(&codec_inst_, 1));
151 int WaitForEncodedFrame() const {
152 int64_t startTime = TickTime::MillisecondTimestamp();
153 while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitEncTimeMs) {
154 if (encode_complete_callback_->EncodeComplete()) {
155 return encoded_video_frame_.Length();
161 int WaitForDecodedFrame() const {
162 int64_t startTime = TickTime::MillisecondTimestamp();
163 while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitDecTimeMs) {
164 if (decode_complete_callback_->DecodeComplete()) {
165 return CalcBufferSize(kI420, decoded_video_frame_.width(),
166 decoded_video_frame_.height());
172 void VideoFrameToEncodedImage(VideoFrame& frame, EncodedImage &image) {
173 image._buffer = frame.Buffer();
174 image._length = frame.Length();
175 image._size = frame.Size();
176 image._timeStamp = frame.TimeStamp();
177 image._encodedWidth = frame.Width();
178 image._encodedHeight = frame.Height();
179 image._completeFrame = true;
182 scoped_ptr<Vp8UnitTestEncodeCompleteCallback> encode_complete_callback_;
183 scoped_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_;
184 scoped_ptr<uint8_t[]> source_buffer_;
186 I420VideoFrame input_frame_;
187 scoped_ptr<VideoEncoder> encoder_;
188 scoped_ptr<VideoDecoder> decoder_;
189 VideoFrame encoded_video_frame_;
190 I420VideoFrame decoded_video_frame_;
191 unsigned int length_source_frame_;
192 VideoCodec codec_inst_;
195 // Disabled on MemorySanitizer as it's breaking on generic libvpx.
196 // https://code.google.com/p/webrtc/issues/detail?id=3904
197 #if defined(MEMORY_SANITIZER)
198 TEST_F(TestVp8Impl, DISABLED_BaseUnitTest) {
200 TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(BaseUnitTest)) {
202 // TODO(mikhal): Remove dependency. Move all test code here.
203 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
205 unittest.SetEncoder(encoder_.get());
206 unittest.SetDecoder(decoder_.get());
212 TEST_F(TestVp8Impl, EncoderParameterTest) {
213 strncpy(codec_inst_.plName, "VP8", 31);
214 codec_inst_.plType = 126;
215 codec_inst_.maxBitrate = 0;
216 codec_inst_.minBitrate = 0;
217 codec_inst_.width = 1440;
218 codec_inst_.height = 1080;
219 codec_inst_.maxFramerate = 30;
220 codec_inst_.startBitrate = 300;
221 codec_inst_.qpMax = 56;
222 codec_inst_.codecSpecific.VP8.complexity = kComplexityNormal;
223 codec_inst_.codecSpecific.VP8.numberOfTemporalLayers = 1;
224 // Calls before InitEncode().
225 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
227 EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
228 encoder_->SetRates(bit_rate, codec_inst_.maxFramerate));
230 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
231 encoder_->InitEncode(&codec_inst_, 1, 1440));
233 // Decoder parameter tests.
234 // Calls before InitDecode().
235 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
236 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->InitDecode(&codec_inst_, 1));
239 TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(AlignedStrideEncodeDecode)) {
241 encoder_->Encode(input_frame_, NULL, NULL);
242 EXPECT_GT(WaitForEncodedFrame(), 0);
243 EncodedImage encodedImage;
244 VideoFrameToEncodedImage(encoded_video_frame_, encodedImage);
245 // First frame should be a key frame.
246 encodedImage._frameType = kKeyFrame;
247 encodedImage.ntp_time_ms_ = kTestNtpTimeMs;
248 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encodedImage, false, NULL));
249 EXPECT_GT(WaitForDecodedFrame(), 0);
250 // Compute PSNR on all planes (faster than SSIM).
251 EXPECT_GT(I420PSNR(&input_frame_, &decoded_video_frame_), 36);
252 EXPECT_EQ(kTestTimestamp, decoded_video_frame_.timestamp());
253 EXPECT_EQ(kTestNtpTimeMs, decoded_video_frame_.ntp_time_ms());
256 TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(DecodeWithACompleteKeyFrame)) {
258 encoder_->Encode(input_frame_, NULL, NULL);
259 EXPECT_GT(WaitForEncodedFrame(), 0);
260 EncodedImage encodedImage;
261 VideoFrameToEncodedImage(encoded_video_frame_, encodedImage);
262 // Setting complete to false -> should return an error.
263 encodedImage._completeFrame = false;
264 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
265 decoder_->Decode(encodedImage, false, NULL));
266 // Setting complete back to true. Forcing a delta frame.
267 encodedImage._frameType = kDeltaFrame;
268 encodedImage._completeFrame = true;
269 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
270 decoder_->Decode(encodedImage, false, NULL));
271 // Now setting a key frame.
272 encodedImage._frameType = kKeyFrame;
273 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
274 decoder_->Decode(encodedImage, false, NULL));
275 EXPECT_GT(I420PSNR(&input_frame_, &decoded_video_frame_), 36);
278 } // namespace webrtc