2 * Copyright (c) 2012 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
13 #include "third_party/googletest/src/include/gtest/gtest.h"
15 #include "test/clear_system_state.h"
16 #include "test/register_state_check.h"
18 #include "vpx/vpx_integer.h"
19 #include "./vpx_config.h"
20 #include "vpx_mem/vpx_mem.h"
21 #if CONFIG_VP8_ENCODER
22 # include "./vp8_rtcd.h"
23 # include "vp8/common/variance.h"
25 #if CONFIG_VP9_ENCODER
26 # include "./vp9_rtcd.h"
27 # include "vp9/encoder/vp9_variance.h"
29 #include "test/acm_random.h"
33 using ::std::tr1::get;
34 using ::std::tr1::make_tuple;
35 using ::std::tr1::tuple;
36 using libvpx_test::ACMRandom;
38 static unsigned int variance_ref(const uint8_t *ref, const uint8_t *src,
39 int l2w, int l2h, unsigned int *sse_ptr) {
42 const int w = 1 << l2w, h = 1 << l2h;
43 for (int y = 0; y < h; y++) {
44 for (int x = 0; x < w; x++) {
45 int diff = ref[w * y + x] - src[w * y + x];
51 return sse - (((int64_t) se * se) >> (l2w + l2h));
54 static unsigned int subpel_variance_ref(const uint8_t *ref, const uint8_t *src,
55 int l2w, int l2h, int xoff, int yoff,
56 unsigned int *sse_ptr) {
59 const int w = 1 << l2w, h = 1 << l2h;
60 for (int y = 0; y < h; y++) {
61 for (int x = 0; x < w; x++) {
62 // bilinear interpolation at a 16th pel step
63 const int a1 = ref[(w + 1) * (y + 0) + x + 0];
64 const int a2 = ref[(w + 1) * (y + 0) + x + 1];
65 const int b1 = ref[(w + 1) * (y + 1) + x + 0];
66 const int b2 = ref[(w + 1) * (y + 1) + x + 1];
67 const int a = a1 + (((a2 - a1) * xoff + 8) >> 4);
68 const int b = b1 + (((b2 - b1) * xoff + 8) >> 4);
69 const int r = a + (((b - a) * yoff + 8) >> 4);
70 int diff = r - src[w * y + x];
76 return sse - (((int64_t) se * se) >> (l2w + l2h));
79 template<typename VarianceFunctionType>
81 : public ::testing::TestWithParam<tuple<int, int, VarianceFunctionType> > {
83 virtual void SetUp() {
84 const tuple<int, int, VarianceFunctionType>& params = this->GetParam();
85 log2width_ = get<0>(params);
86 width_ = 1 << log2width_;
87 log2height_ = get<1>(params);
88 height_ = 1 << log2height_;
89 variance_ = get<2>(params);
91 rnd(ACMRandom::DeterministicSeed());
92 block_size_ = width_ * height_;
93 src_ = new uint8_t[block_size_];
94 ref_ = new uint8_t[block_size_];
95 ASSERT_TRUE(src_ != NULL);
96 ASSERT_TRUE(ref_ != NULL);
99 virtual void TearDown() {
102 libvpx_test::ClearSystemState();
108 void OneQuarterTest();
113 int width_, log2width_;
114 int height_, log2height_;
116 VarianceFunctionType variance_;
119 template<typename VarianceFunctionType>
120 void VarianceTest<VarianceFunctionType>::ZeroTest() {
121 for (int i = 0; i <= 255; ++i) {
122 memset(src_, i, block_size_);
123 for (int j = 0; j <= 255; ++j) {
124 memset(ref_, j, block_size_);
127 REGISTER_STATE_CHECK(var = variance_(src_, width_, ref_, width_, &sse));
128 EXPECT_EQ(0u, var) << "src values: " << i << "ref values: " << j;
133 template<typename VarianceFunctionType>
134 void VarianceTest<VarianceFunctionType>::RefTest() {
135 for (int i = 0; i < 10; ++i) {
136 for (int j = 0; j < block_size_; j++) {
137 src_[j] = rnd.Rand8();
138 ref_[j] = rnd.Rand8();
140 unsigned int sse1, sse2;
142 REGISTER_STATE_CHECK(var1 = variance_(src_, width_, ref_, width_, &sse1));
143 const unsigned int var2 = variance_ref(src_, ref_, log2width_,
145 EXPECT_EQ(sse1, sse2);
146 EXPECT_EQ(var1, var2);
150 template<typename VarianceFunctionType>
151 void VarianceTest<VarianceFunctionType>::OneQuarterTest() {
152 memset(src_, 255, block_size_);
153 const int half = block_size_ / 2;
154 memset(ref_, 255, half);
155 memset(ref_ + half, 0, half);
158 REGISTER_STATE_CHECK(var = variance_(src_, width_, ref_, width_, &sse));
159 const unsigned int expected = block_size_ * 255 * 255 / 4;
160 EXPECT_EQ(expected, var);
163 #if CONFIG_VP9_ENCODER
165 unsigned int subpel_avg_variance_ref(const uint8_t *ref,
167 const uint8_t *second_pred,
170 unsigned int *sse_ptr) {
172 unsigned int sse = 0;
173 const int w = 1 << l2w, h = 1 << l2h;
174 for (int y = 0; y < h; y++) {
175 for (int x = 0; x < w; x++) {
176 // bilinear interpolation at a 16th pel step
177 const int a1 = ref[(w + 1) * (y + 0) + x + 0];
178 const int a2 = ref[(w + 1) * (y + 0) + x + 1];
179 const int b1 = ref[(w + 1) * (y + 1) + x + 0];
180 const int b2 = ref[(w + 1) * (y + 1) + x + 1];
181 const int a = a1 + (((a2 - a1) * xoff + 8) >> 4);
182 const int b = b1 + (((b2 - b1) * xoff + 8) >> 4);
183 const int r = a + (((b - a) * yoff + 8) >> 4);
184 int diff = ((r + second_pred[w * y + x] + 1) >> 1) - src[w * y + x];
190 return sse - (((int64_t) se * se) >> (l2w + l2h));
193 template<typename SubpelVarianceFunctionType>
194 class SubpelVarianceTest
195 : public ::testing::TestWithParam<tuple<int, int,
196 SubpelVarianceFunctionType> > {
198 virtual void SetUp() {
199 const tuple<int, int, SubpelVarianceFunctionType>& params =
201 log2width_ = get<0>(params);
202 width_ = 1 << log2width_;
203 log2height_ = get<1>(params);
204 height_ = 1 << log2height_;
205 subpel_variance_ = get<2>(params);
207 rnd(ACMRandom::DeterministicSeed());
208 block_size_ = width_ * height_;
209 src_ = reinterpret_cast<uint8_t *>(vpx_memalign(16, block_size_));
210 sec_ = reinterpret_cast<uint8_t *>(vpx_memalign(16, block_size_));
211 ref_ = new uint8_t[block_size_ + width_ + height_ + 1];
212 ASSERT_TRUE(src_ != NULL);
213 ASSERT_TRUE(sec_ != NULL);
214 ASSERT_TRUE(ref_ != NULL);
217 virtual void TearDown() {
221 libvpx_test::ClearSystemState();
231 int width_, log2width_;
232 int height_, log2height_;
234 SubpelVarianceFunctionType subpel_variance_;
237 template<typename SubpelVarianceFunctionType>
238 void SubpelVarianceTest<SubpelVarianceFunctionType>::RefTest() {
239 for (int x = 0; x < 16; ++x) {
240 for (int y = 0; y < 16; ++y) {
241 for (int j = 0; j < block_size_; j++) {
242 src_[j] = rnd.Rand8();
244 for (int j = 0; j < block_size_ + width_ + height_ + 1; j++) {
245 ref_[j] = rnd.Rand8();
247 unsigned int sse1, sse2;
249 REGISTER_STATE_CHECK(var1 = subpel_variance_(ref_, width_ + 1, x, y,
250 src_, width_, &sse1));
251 const unsigned int var2 = subpel_variance_ref(ref_, src_, log2width_,
252 log2height_, x, y, &sse2);
253 EXPECT_EQ(sse1, sse2) << "at position " << x << ", " << y;
254 EXPECT_EQ(var1, var2) << "at position " << x << ", " << y;
260 void SubpelVarianceTest<vp9_subp_avg_variance_fn_t>::RefTest() {
261 for (int x = 0; x < 16; ++x) {
262 for (int y = 0; y < 16; ++y) {
263 for (int j = 0; j < block_size_; j++) {
264 src_[j] = rnd.Rand8();
265 sec_[j] = rnd.Rand8();
267 for (int j = 0; j < block_size_ + width_ + height_ + 1; j++) {
268 ref_[j] = rnd.Rand8();
270 unsigned int sse1, sse2;
272 REGISTER_STATE_CHECK(var1 = subpel_variance_(ref_, width_ + 1, x, y,
273 src_, width_, &sse1, sec_));
274 const unsigned int var2 = subpel_avg_variance_ref(ref_, src_, sec_,
275 log2width_, log2height_,
277 EXPECT_EQ(sse1, sse2) << "at position " << x << ", " << y;
278 EXPECT_EQ(var1, var2) << "at position " << x << ", " << y;
283 #endif // CONFIG_VP9_ENCODER
285 // -----------------------------------------------------------------------------
290 #if CONFIG_VP8_ENCODER
291 typedef VarianceTest<vp8_variance_fn_t> VP8VarianceTest;
293 TEST_P(VP8VarianceTest, Zero) { ZeroTest(); }
294 TEST_P(VP8VarianceTest, Ref) { RefTest(); }
295 TEST_P(VP8VarianceTest, OneQuarter) { OneQuarterTest(); }
297 INSTANTIATE_TEST_CASE_P(
299 ::testing::Values(make_tuple(2, 2, vp8_variance4x4_c),
300 make_tuple(3, 3, vp8_variance8x8_c),
301 make_tuple(3, 4, vp8_variance8x16_c),
302 make_tuple(4, 3, vp8_variance16x8_c),
303 make_tuple(4, 4, vp8_variance16x16_c)));
306 INSTANTIATE_TEST_CASE_P(
307 NEON, VP8VarianceTest,
308 ::testing::Values(make_tuple(3, 3, vp8_variance8x8_neon),
309 make_tuple(3, 4, vp8_variance8x16_neon),
310 make_tuple(4, 3, vp8_variance16x8_neon),
311 make_tuple(4, 4, vp8_variance16x16_neon)));
315 INSTANTIATE_TEST_CASE_P(
316 MMX, VP8VarianceTest,
317 ::testing::Values(make_tuple(2, 2, vp8_variance4x4_mmx),
318 make_tuple(3, 3, vp8_variance8x8_mmx),
319 make_tuple(3, 4, vp8_variance8x16_mmx),
320 make_tuple(4, 3, vp8_variance16x8_mmx),
321 make_tuple(4, 4, vp8_variance16x16_mmx)));
325 INSTANTIATE_TEST_CASE_P(
326 SSE2, VP8VarianceTest,
327 ::testing::Values(make_tuple(2, 2, vp8_variance4x4_wmt),
328 make_tuple(3, 3, vp8_variance8x8_wmt),
329 make_tuple(3, 4, vp8_variance8x16_wmt),
330 make_tuple(4, 3, vp8_variance16x8_wmt),
331 make_tuple(4, 4, vp8_variance16x16_wmt)));
333 #endif // CONFIG_VP8_ENCODER
337 // -----------------------------------------------------------------------------
342 #if CONFIG_VP9_ENCODER
343 typedef VarianceTest<vp9_variance_fn_t> VP9VarianceTest;
344 typedef SubpelVarianceTest<vp9_subpixvariance_fn_t> VP9SubpelVarianceTest;
345 typedef SubpelVarianceTest<vp9_subp_avg_variance_fn_t> VP9SubpelAvgVarianceTest;
347 TEST_P(VP9VarianceTest, Zero) { ZeroTest(); }
348 TEST_P(VP9VarianceTest, Ref) { RefTest(); }
349 TEST_P(VP9SubpelVarianceTest, Ref) { RefTest(); }
350 TEST_P(VP9SubpelAvgVarianceTest, Ref) { RefTest(); }
351 TEST_P(VP9VarianceTest, OneQuarter) { OneQuarterTest(); }
353 INSTANTIATE_TEST_CASE_P(
355 ::testing::Values(make_tuple(2, 2, vp9_variance4x4_c),
356 make_tuple(2, 3, vp9_variance4x8_c),
357 make_tuple(3, 2, vp9_variance8x4_c),
358 make_tuple(3, 3, vp9_variance8x8_c),
359 make_tuple(3, 4, vp9_variance8x16_c),
360 make_tuple(4, 3, vp9_variance16x8_c),
361 make_tuple(4, 4, vp9_variance16x16_c),
362 make_tuple(4, 5, vp9_variance16x32_c),
363 make_tuple(5, 4, vp9_variance32x16_c),
364 make_tuple(5, 5, vp9_variance32x32_c),
365 make_tuple(5, 6, vp9_variance32x64_c),
366 make_tuple(6, 5, vp9_variance64x32_c),
367 make_tuple(6, 6, vp9_variance64x64_c)));
369 INSTANTIATE_TEST_CASE_P(
370 C, VP9SubpelVarianceTest,
371 ::testing::Values(make_tuple(2, 2, vp9_sub_pixel_variance4x4_c),
372 make_tuple(2, 3, vp9_sub_pixel_variance4x8_c),
373 make_tuple(3, 2, vp9_sub_pixel_variance8x4_c),
374 make_tuple(3, 3, vp9_sub_pixel_variance8x8_c),
375 make_tuple(3, 4, vp9_sub_pixel_variance8x16_c),
376 make_tuple(4, 3, vp9_sub_pixel_variance16x8_c),
377 make_tuple(4, 4, vp9_sub_pixel_variance16x16_c),
378 make_tuple(4, 5, vp9_sub_pixel_variance16x32_c),
379 make_tuple(5, 4, vp9_sub_pixel_variance32x16_c),
380 make_tuple(5, 5, vp9_sub_pixel_variance32x32_c),
381 make_tuple(5, 6, vp9_sub_pixel_variance32x64_c),
382 make_tuple(6, 5, vp9_sub_pixel_variance64x32_c),
383 make_tuple(6, 6, vp9_sub_pixel_variance64x64_c)));
385 INSTANTIATE_TEST_CASE_P(
386 C, VP9SubpelAvgVarianceTest,
387 ::testing::Values(make_tuple(2, 2, vp9_sub_pixel_avg_variance4x4_c),
388 make_tuple(2, 3, vp9_sub_pixel_avg_variance4x8_c),
389 make_tuple(3, 2, vp9_sub_pixel_avg_variance8x4_c),
390 make_tuple(3, 3, vp9_sub_pixel_avg_variance8x8_c),
391 make_tuple(3, 4, vp9_sub_pixel_avg_variance8x16_c),
392 make_tuple(4, 3, vp9_sub_pixel_avg_variance16x8_c),
393 make_tuple(4, 4, vp9_sub_pixel_avg_variance16x16_c),
394 make_tuple(4, 5, vp9_sub_pixel_avg_variance16x32_c),
395 make_tuple(5, 4, vp9_sub_pixel_avg_variance32x16_c),
396 make_tuple(5, 5, vp9_sub_pixel_avg_variance32x32_c),
397 make_tuple(5, 6, vp9_sub_pixel_avg_variance32x64_c),
398 make_tuple(6, 5, vp9_sub_pixel_avg_variance64x32_c),
399 make_tuple(6, 6, vp9_sub_pixel_avg_variance64x64_c)));
402 INSTANTIATE_TEST_CASE_P(
403 MMX, VP9VarianceTest,
404 ::testing::Values(make_tuple(2, 2, vp9_variance4x4_mmx),
405 make_tuple(3, 3, vp9_variance8x8_mmx),
406 make_tuple(3, 4, vp9_variance8x16_mmx),
407 make_tuple(4, 3, vp9_variance16x8_mmx),
408 make_tuple(4, 4, vp9_variance16x16_mmx)));
412 #if CONFIG_USE_X86INC
413 INSTANTIATE_TEST_CASE_P(
414 SSE2, VP9VarianceTest,
415 ::testing::Values(make_tuple(2, 2, vp9_variance4x4_sse2),
416 make_tuple(2, 3, vp9_variance4x8_sse2),
417 make_tuple(3, 2, vp9_variance8x4_sse2),
418 make_tuple(3, 3, vp9_variance8x8_sse2),
419 make_tuple(3, 4, vp9_variance8x16_sse2),
420 make_tuple(4, 3, vp9_variance16x8_sse2),
421 make_tuple(4, 4, vp9_variance16x16_sse2),
422 make_tuple(4, 5, vp9_variance16x32_sse2),
423 make_tuple(5, 4, vp9_variance32x16_sse2),
424 make_tuple(5, 5, vp9_variance32x32_sse2),
425 make_tuple(5, 6, vp9_variance32x64_sse2),
426 make_tuple(6, 5, vp9_variance64x32_sse2),
427 make_tuple(6, 6, vp9_variance64x64_sse2)));
429 INSTANTIATE_TEST_CASE_P(
430 SSE2, VP9SubpelVarianceTest,
431 ::testing::Values(make_tuple(2, 2, vp9_sub_pixel_variance4x4_sse),
432 make_tuple(2, 3, vp9_sub_pixel_variance4x8_sse),
433 make_tuple(3, 2, vp9_sub_pixel_variance8x4_sse2),
434 make_tuple(3, 3, vp9_sub_pixel_variance8x8_sse2),
435 make_tuple(3, 4, vp9_sub_pixel_variance8x16_sse2),
436 make_tuple(4, 3, vp9_sub_pixel_variance16x8_sse2),
437 make_tuple(4, 4, vp9_sub_pixel_variance16x16_sse2),
438 make_tuple(4, 5, vp9_sub_pixel_variance16x32_sse2),
439 make_tuple(5, 4, vp9_sub_pixel_variance32x16_sse2),
440 make_tuple(5, 5, vp9_sub_pixel_variance32x32_sse2),
441 make_tuple(5, 6, vp9_sub_pixel_variance32x64_sse2),
442 make_tuple(6, 5, vp9_sub_pixel_variance64x32_sse2),
443 make_tuple(6, 6, vp9_sub_pixel_variance64x64_sse2)));
445 INSTANTIATE_TEST_CASE_P(
446 SSE2, VP9SubpelAvgVarianceTest,
447 ::testing::Values(make_tuple(2, 2, vp9_sub_pixel_avg_variance4x4_sse),
448 make_tuple(2, 3, vp9_sub_pixel_avg_variance4x8_sse),
449 make_tuple(3, 2, vp9_sub_pixel_avg_variance8x4_sse2),
450 make_tuple(3, 3, vp9_sub_pixel_avg_variance8x8_sse2),
451 make_tuple(3, 4, vp9_sub_pixel_avg_variance8x16_sse2),
452 make_tuple(4, 3, vp9_sub_pixel_avg_variance16x8_sse2),
453 make_tuple(4, 4, vp9_sub_pixel_avg_variance16x16_sse2),
454 make_tuple(4, 5, vp9_sub_pixel_avg_variance16x32_sse2),
455 make_tuple(5, 4, vp9_sub_pixel_avg_variance32x16_sse2),
456 make_tuple(5, 5, vp9_sub_pixel_avg_variance32x32_sse2),
457 make_tuple(5, 6, vp9_sub_pixel_avg_variance32x64_sse2),
458 make_tuple(6, 5, vp9_sub_pixel_avg_variance64x32_sse2),
459 make_tuple(6, 6, vp9_sub_pixel_avg_variance64x64_sse2)));
464 #if CONFIG_USE_X86INC
466 INSTANTIATE_TEST_CASE_P(
467 SSSE3, VP9SubpelVarianceTest,
468 ::testing::Values(make_tuple(2, 2, vp9_sub_pixel_variance4x4_ssse3),
469 make_tuple(2, 3, vp9_sub_pixel_variance4x8_ssse3),
470 make_tuple(3, 2, vp9_sub_pixel_variance8x4_ssse3),
471 make_tuple(3, 3, vp9_sub_pixel_variance8x8_ssse3),
472 make_tuple(3, 4, vp9_sub_pixel_variance8x16_ssse3),
473 make_tuple(4, 3, vp9_sub_pixel_variance16x8_ssse3),
474 make_tuple(4, 4, vp9_sub_pixel_variance16x16_ssse3),
475 make_tuple(4, 5, vp9_sub_pixel_variance16x32_ssse3),
476 make_tuple(5, 4, vp9_sub_pixel_variance32x16_ssse3),
477 make_tuple(5, 5, vp9_sub_pixel_variance32x32_ssse3),
478 make_tuple(5, 6, vp9_sub_pixel_variance32x64_ssse3),
479 make_tuple(6, 5, vp9_sub_pixel_variance64x32_ssse3),
480 make_tuple(6, 6, vp9_sub_pixel_variance64x64_ssse3)));
482 INSTANTIATE_TEST_CASE_P(
483 SSSE3, VP9SubpelAvgVarianceTest,
484 ::testing::Values(make_tuple(2, 2, vp9_sub_pixel_avg_variance4x4_ssse3),
485 make_tuple(2, 3, vp9_sub_pixel_avg_variance4x8_ssse3),
486 make_tuple(3, 2, vp9_sub_pixel_avg_variance8x4_ssse3),
487 make_tuple(3, 3, vp9_sub_pixel_avg_variance8x8_ssse3),
488 make_tuple(3, 4, vp9_sub_pixel_avg_variance8x16_ssse3),
489 make_tuple(4, 3, vp9_sub_pixel_avg_variance16x8_ssse3),
490 make_tuple(4, 4, vp9_sub_pixel_avg_variance16x16_ssse3),
491 make_tuple(4, 5, vp9_sub_pixel_avg_variance16x32_ssse3),
492 make_tuple(5, 4, vp9_sub_pixel_avg_variance32x16_ssse3),
493 make_tuple(5, 5, vp9_sub_pixel_avg_variance32x32_ssse3),
494 make_tuple(5, 6, vp9_sub_pixel_avg_variance32x64_ssse3),
495 make_tuple(6, 5, vp9_sub_pixel_avg_variance64x32_ssse3),
496 make_tuple(6, 6, vp9_sub_pixel_avg_variance64x64_ssse3)));
499 #endif // CONFIG_VP9_ENCODER