2 * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
13 #include "vpx_ports/config.h"
15 #include "modecosts.h"
16 #include "encodeintra.h"
17 #include "entropymode.h"
18 #include "pickinter.h"
19 #include "findnearmv.h"
21 #include "reconinter.h"
22 #include "reconintra.h"
23 #include "reconintra4x4.h"
28 #include "vpx_mem/vpx_mem.h"
30 #if CONFIG_RUNTIME_CPU_DETECT
31 #define IF_RTCD(x) (x)
33 #define IF_RTCD(x) NULL
36 extern int VP8_UVSSE(MACROBLOCK *x, const vp8_variance_rtcd_vtable_t *rtcd);
39 extern unsigned int cnt_pm;
42 extern const MV_REFERENCE_FRAME vp8_ref_frame_order[MAX_MODES];
43 extern const MB_PREDICTION_MODE vp8_mode_order[MAX_MODES];
46 extern unsigned int (*vp8_get16x16pred_error)(unsigned char *src_ptr, int src_stride, unsigned char *ref_ptr, int ref_stride);
47 extern unsigned int (*vp8_get4x4sse_cs)(unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr, int recon_stride);
48 extern int vp8_rd_pick_best_mbsegmentation(VP8_COMP *cpi, MACROBLOCK *x, MV *best_ref_mv, int best_rd, int *, int *, int *, int, int *mvcost[2], int, int fullpixel);
49 extern int vp8_cost_mv_ref(MB_PREDICTION_MODE m, const int near_mv_ref_ct[4]);
50 extern void vp8_set_mbmode_and_mvs(MACROBLOCK *x, MB_PREDICTION_MODE mb, MV *mv);
53 int vp8_skip_fractional_mv_step(MACROBLOCK *mb, BLOCK *b, BLOCKD *d, MV *bestmv, MV *ref_mv, int error_per_bit, vp8_subpixvariance_fn_t svf, vp8_variance_fn_t vf, int *mvcost[2])
68 static int get_inter_mbpred_error(MACROBLOCK *mb, vp8_subpixvariance_fn_t svf, vp8_variance_fn_t vf, unsigned int *sse)
71 BLOCK *b = &mb->block[0];
72 BLOCKD *d = &mb->e_mbd.block[0];
73 unsigned char *what = (*(b->base_src) + b->src);
74 int what_stride = b->src_stride;
75 unsigned char *in_what = *(d->base_pre) + d->pre ;
76 int in_what_stride = d->pre_stride;
77 int xoffset = d->bmi.mv.as_mv.col & 7;
78 int yoffset = d->bmi.mv.as_mv.row & 7;
80 in_what += (d->bmi.mv.as_mv.row >> 3) * d->pre_stride + (d->bmi.mv.as_mv.col >> 3);
82 if (xoffset | yoffset)
84 return svf(in_what, in_what_stride, xoffset, yoffset, what, what_stride, sse);
88 return vf(what, what_stride, in_what, in_what_stride, sse);
93 unsigned int vp8_get16x16pred_error_c
95 unsigned char *src_ptr,
97 unsigned char *ref_ptr,
102 unsigned pred_error = 0;
106 for (i = 0; i < 16; i++)
110 for (j = 0; j < 16; j++)
112 diff = src_ptr[j] - ref_ptr[j];
114 pred_error += diff * diff;
117 src_ptr += src_stride;
118 ref_ptr += ref_stride;
121 pred_error -= sum * sum / 256;
126 unsigned int vp8_get4x4sse_cs_c
128 unsigned char *src_ptr,
130 unsigned char *ref_ptr,
138 for (r = 0; r < 4; r++)
140 for (c = 0; c < 4; c++)
142 int diff = src_ptr[c] - ref_ptr[c];
143 distortion += diff * diff;
146 src_ptr += source_stride;
147 ref_ptr += recon_stride;
153 static int get_prediction_error(BLOCK *be, BLOCKD *b, const vp8_variance_rtcd_vtable_t *rtcd)
157 sptr = (*(be->base_src) + be->src);
160 return VARIANCE_INVOKE(rtcd, get4x4sse_cs)(sptr, be->src_stride, dptr, 16, 0x7fffffff);
164 static int pick_intra4x4block(
165 const VP8_ENCODER_RTCD *rtcd,
169 B_PREDICTION_MODE *best_mode,
170 B_PREDICTION_MODE above,
171 B_PREDICTION_MODE left,
178 B_PREDICTION_MODE mode;
179 int best_rd = INT_MAX; // 1<<30
182 unsigned int *mode_costs;
186 if (x->e_mbd.frame_type == KEY_FRAME)
188 mode_costs = x->bmode_costs[above][left];
192 mode_costs = x->inter_bmode_costs;
195 for (mode = B_DC_PRED; mode <= B_HE_PRED /*B_HU_PRED*/; mode++)
199 rate = mode_costs[mode];
200 vp8_predict_intra4x4(b, mode, b->predictor);
201 distortion = get_prediction_error(be, b, &rtcd->variance);
202 this_rd = RD_ESTIMATE(x->rdmult, x->rddiv, rate, distortion);
204 if (this_rd < best_rd)
207 *bestdistortion = distortion;
213 b->bmi.mode = (B_PREDICTION_MODE)(*best_mode);
214 vp8_encode_intra4x4block(rtcd, x, be, b, b->bmi.mode);
219 int vp8_pick_intra4x4mby_modes(const VP8_ENCODER_RTCD *rtcd, MACROBLOCK *mb, int *Rate, int *best_dist)
221 MACROBLOCKD *const xd = &mb->e_mbd;
224 int cost = mb->mbmode_cost [xd->frame_type] [B_PRED];
225 int error = RD_ESTIMATE(mb->rdmult, mb->rddiv, cost, 0); // Rd estimate for the cost of the block prediction mode
228 vp8_intra_prediction_down_copy(xd);
229 vp8_setup_temp_context(&t, xd->above_context[Y1CONTEXT], xd->left_context[Y1CONTEXT], 4);
231 for (i = 0; i < 16; i++)
233 MODE_INFO *const mic = xd->mode_info_context;
234 const int mis = xd->mode_info_stride;
235 const B_PREDICTION_MODE A = vp8_above_bmi(mic, i, mis)->mode;
236 const B_PREDICTION_MODE L = vp8_left_bmi(mic, i)->mode;
237 B_PREDICTION_MODE UNINITIALIZED_IS_SAFE(best_mode);
238 int UNINITIALIZED_IS_SAFE(r), UNINITIALIZED_IS_SAFE(d);
240 error += pick_intra4x4block(rtcd,
241 mb, mb->block + i, xd->block + i, &best_mode, A, L,
242 t.a + vp8_block2above[i],
243 t.l + vp8_block2left[i], &r, &d);
248 mic->bmi[i].mode = xd->block[i].bmi.mode = best_mode;
250 // Break out case where we have already exceeded best so far value that was bassed in
251 if (distortion > *best_dist)
255 for (i = 0; i < 16; i++)
256 xd->block[i].bmi.mv.as_int = 0;
261 *best_dist = distortion;
263 *best_dist = INT_MAX;
269 int vp8_pick_intra_mbuv_mode(MACROBLOCK *mb)
272 MACROBLOCKD *x = &mb->e_mbd;
273 unsigned char *uabove_row = x->dst.u_buffer - x->dst.uv_stride;
274 unsigned char *vabove_row = x->dst.v_buffer - x->dst.uv_stride;
275 unsigned char *usrc_ptr = (mb->block[16].src + *mb->block[16].base_src);
276 unsigned char *vsrc_ptr = (mb->block[20].src + *mb->block[20].base_src);
277 int uvsrc_stride = mb->block[16].src_stride;
278 unsigned char uleft_col[8];
279 unsigned char vleft_col[8];
280 unsigned char utop_left = uabove_row[-1];
281 unsigned char vtop_left = vabove_row[-1];
289 int pred_error[4] = {0, 0, 0, 0}, best_error = INT_MAX;
290 MB_PREDICTION_MODE UNINITIALIZED_IS_SAFE(best_mode);
293 for (i = 0; i < 8; i++)
295 uleft_col[i] = x->dst.u_buffer [i* x->dst.uv_stride -1];
296 vleft_col[i] = x->dst.v_buffer [i* x->dst.uv_stride -1];
299 if (!x->up_available && !x->left_available)
311 for (i = 0; i < 8; i++)
313 Uaverage += uabove_row[i];
314 Vaverage += vabove_row[i];
321 if (x->left_available)
323 for (i = 0; i < 8; i++)
325 Uaverage += uleft_col[i];
326 Vaverage += vleft_col[i];
333 expected_udc = (Uaverage + (1 << (shift - 1))) >> shift;
334 expected_vdc = (Vaverage + (1 << (shift - 1))) >> shift;
338 for (i = 0; i < 8; i++)
340 for (j = 0; j < 8; j++)
343 int predu = uleft_col[i] + uabove_row[j] - utop_left;
344 int predv = vleft_col[i] + vabove_row[j] - vtop_left;
363 diff = u_p - expected_udc;
364 pred_error[DC_PRED] += diff * diff;
365 diff = v_p - expected_vdc;
366 pred_error[DC_PRED] += diff * diff;
369 diff = u_p - uabove_row[j];
370 pred_error[V_PRED] += diff * diff;
371 diff = v_p - vabove_row[j];
372 pred_error[V_PRED] += diff * diff;
375 diff = u_p - uleft_col[i];
376 pred_error[H_PRED] += diff * diff;
377 diff = v_p - vleft_col[i];
378 pred_error[H_PRED] += diff * diff;
382 pred_error[TM_PRED] += diff * diff;
384 pred_error[TM_PRED] += diff * diff;
389 usrc_ptr += uvsrc_stride;
390 vsrc_ptr += uvsrc_stride;
394 usrc_ptr = (mb->block[18].src + *mb->block[18].base_src);
395 vsrc_ptr = (mb->block[22].src + *mb->block[22].base_src);
403 for (i = DC_PRED; i <= TM_PRED; i++)
405 if (best_error > pred_error[i])
407 best_error = pred_error[i];
408 best_mode = (MB_PREDICTION_MODE)i;
413 mb->e_mbd.mbmi.uv_mode = best_mode;
419 int vp8_pick_inter_mode(VP8_COMP *cpi, MACROBLOCK *x, int recon_yoffset, int recon_uvoffset, int *returnrate, int *returndistortion, int *returnintra)
421 BLOCK *b = &x->block[0];
422 BLOCKD *d = &x->e_mbd.block[0];
423 MACROBLOCKD *xd = &x->e_mbd;
424 B_MODE_INFO best_bmodes[16];
425 MB_MODE_INFO best_mbmode;
427 MV mode_mv[MB_MODE_COUNT];
428 MB_PREDICTION_MODE this_mode;
432 int best_rd = INT_MAX; // 1 << 30;
433 int best_intra_rd = INT_MAX;
435 int ref_frame_cost[MAX_REF_FRAMES];
440 //int all_rds[MAX_MODES]; // Experimental debug code.
441 int best_mode_index = 0;
448 unsigned char *y_buffer[4];
449 unsigned char *u_buffer[4];
450 unsigned char *v_buffer[4];
452 int skip_mode[4] = {0, 0, 0, 0};
454 vpx_memset(mode_mv, 0, sizeof(mode_mv));
455 vpx_memset(nearest_mv, 0, sizeof(nearest_mv));
456 vpx_memset(near_mv, 0, sizeof(near_mv));
457 vpx_memset(&best_mbmode, 0, sizeof(best_mbmode));
460 // set up all the refframe dependent pointers.
461 if (cpi->ref_frame_flags & VP8_LAST_FLAG)
463 vp8_find_near_mvs(&x->e_mbd, x->e_mbd.mode_info_context, &nearest_mv[LAST_FRAME], &near_mv[LAST_FRAME],
464 &best_ref_mv[LAST_FRAME], MDCounts[LAST_FRAME], LAST_FRAME, cpi->common.ref_frame_sign_bias);
466 y_buffer[LAST_FRAME] = cpi->common.last_frame.y_buffer + recon_yoffset;
467 u_buffer[LAST_FRAME] = cpi->common.last_frame.u_buffer + recon_uvoffset;
468 v_buffer[LAST_FRAME] = cpi->common.last_frame.v_buffer + recon_uvoffset;
471 skip_mode[LAST_FRAME] = 1;
473 if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
475 vp8_find_near_mvs(&x->e_mbd, x->e_mbd.mode_info_context, &nearest_mv[GOLDEN_FRAME], &near_mv[GOLDEN_FRAME],
476 &best_ref_mv[GOLDEN_FRAME], MDCounts[GOLDEN_FRAME], GOLDEN_FRAME, cpi->common.ref_frame_sign_bias);
478 y_buffer[GOLDEN_FRAME] = cpi->common.golden_frame.y_buffer + recon_yoffset;
479 u_buffer[GOLDEN_FRAME] = cpi->common.golden_frame.u_buffer + recon_uvoffset;
480 v_buffer[GOLDEN_FRAME] = cpi->common.golden_frame.v_buffer + recon_uvoffset;
483 skip_mode[GOLDEN_FRAME] = 1;
485 if (cpi->ref_frame_flags & VP8_ALT_FLAG && cpi->source_alt_ref_active)
487 vp8_find_near_mvs(&x->e_mbd, x->e_mbd.mode_info_context, &nearest_mv[ALTREF_FRAME], &near_mv[ALTREF_FRAME],
488 &best_ref_mv[ALTREF_FRAME], MDCounts[ALTREF_FRAME], ALTREF_FRAME, cpi->common.ref_frame_sign_bias);
490 y_buffer[ALTREF_FRAME] = cpi->common.alt_ref_frame.y_buffer + recon_yoffset;
491 u_buffer[ALTREF_FRAME] = cpi->common.alt_ref_frame.u_buffer + recon_uvoffset;
492 v_buffer[ALTREF_FRAME] = cpi->common.alt_ref_frame.v_buffer + recon_uvoffset;
495 skip_mode[ALTREF_FRAME] = 1;
497 cpi->mbs_tested_so_far++; // Count of the number of MBs tested so far this frame
499 *returnintra = best_intra_rd;
502 ref_frame_cost[INTRA_FRAME] = vp8_cost_zero(cpi->prob_intra_coded);
504 // Special case treatment when GF and ARF are not sensible options for reference
505 if (cpi->ref_frame_flags == VP8_LAST_FLAG)
507 ref_frame_cost[LAST_FRAME] = vp8_cost_one(cpi->prob_intra_coded)
508 + vp8_cost_zero(255);
509 ref_frame_cost[GOLDEN_FRAME] = vp8_cost_one(cpi->prob_intra_coded)
511 + vp8_cost_zero(128);
512 ref_frame_cost[ALTREF_FRAME] = vp8_cost_one(cpi->prob_intra_coded)
518 ref_frame_cost[LAST_FRAME] = vp8_cost_one(cpi->prob_intra_coded)
519 + vp8_cost_zero(cpi->prob_last_coded);
520 ref_frame_cost[GOLDEN_FRAME] = vp8_cost_one(cpi->prob_intra_coded)
521 + vp8_cost_one(cpi->prob_last_coded)
522 + vp8_cost_zero(cpi->prob_gf_coded);
523 ref_frame_cost[ALTREF_FRAME] = vp8_cost_one(cpi->prob_intra_coded)
524 + vp8_cost_one(cpi->prob_last_coded)
525 + vp8_cost_one(cpi->prob_gf_coded);
532 x->e_mbd.mbmi.ref_frame = INTRA_FRAME;
534 // if we encode a new mv this is important
535 // find the best new motion vector
536 for (mode_index = 0; mode_index < MAX_MODES; mode_index++)
539 int this_rd = INT_MAX;
541 if (best_rd <= cpi->rd_threshes[mode_index])
544 x->e_mbd.mbmi.ref_frame = vp8_ref_frame_order[mode_index];
546 if (skip_mode[x->e_mbd.mbmi.ref_frame])
549 // Check to see if the testing frequency for this mode is at its max
550 // If so then prevent it from being tested and increase the threshold for its testing
551 if (cpi->mode_test_hit_counts[mode_index] && (cpi->mode_check_freq[mode_index] > 1))
553 //if ( (cpi->mbs_tested_so_far / cpi->mode_test_hit_counts[mode_index]) <= cpi->mode_check_freq[mode_index] )
554 if (cpi->mbs_tested_so_far <= (cpi->mode_check_freq[mode_index] * cpi->mode_test_hit_counts[mode_index]))
556 // Increase the threshold for coding this mode to make it less likely to be chosen
557 cpi->rd_thresh_mult[mode_index] += 4;
559 if (cpi->rd_thresh_mult[mode_index] > MAX_THRESHMULT)
560 cpi->rd_thresh_mult[mode_index] = MAX_THRESHMULT;
562 cpi->rd_threshes[mode_index] = (cpi->rd_baseline_thresh[mode_index] >> 7) * cpi->rd_thresh_mult[mode_index];
568 // We have now reached the point where we are going to test the current mode so increment the counter for the number of times it has been tested
569 cpi->mode_test_hit_counts[mode_index] ++;
574 this_mode = vp8_mode_order[mode_index];
576 // Experimental debug code.
577 //all_rds[mode_index] = -1;
579 x->e_mbd.mbmi.mode = this_mode;
580 x->e_mbd.mbmi.uv_mode = DC_PRED;
582 // Work out the cost assosciated with selecting the reference frame
583 frame_cost = ref_frame_cost[x->e_mbd.mbmi.ref_frame];
586 // everything but intra
587 if (x->e_mbd.mbmi.ref_frame)
589 x->e_mbd.pre.y_buffer = y_buffer[x->e_mbd.mbmi.ref_frame];
590 x->e_mbd.pre.u_buffer = u_buffer[x->e_mbd.mbmi.ref_frame];
591 x->e_mbd.pre.v_buffer = v_buffer[x->e_mbd.mbmi.ref_frame];
592 mode_mv[NEARESTMV] = nearest_mv[x->e_mbd.mbmi.ref_frame];
593 mode_mv[NEARMV] = near_mv[x->e_mbd.mbmi.ref_frame];
594 best_ref_mv1 = best_ref_mv[x->e_mbd.mbmi.ref_frame];
595 memcpy(mdcounts, MDCounts[x->e_mbd.mbmi.ref_frame], sizeof(mdcounts));
598 //Only consider ZEROMV/ALTREF_FRAME for alt ref frame.
599 if (cpi->is_src_frame_alt_ref)
601 if (this_mode != ZEROMV || x->e_mbd.mbmi.ref_frame != ALTREF_FRAME)
608 distortion2 = *returndistortion; // Best so far passed in as breakout value to vp8_pick_intra4x4mby_modes
609 vp8_pick_intra4x4mby_modes(IF_RTCD(&cpi->rtcd), x, &rate, &distortion2);
611 distortion2 = VARIANCE_INVOKE(&cpi->rtcd.variance, get16x16prederror)(x->src.y_buffer, x->src.y_stride, x->e_mbd.predictor, 16, 0x7fffffff);
613 if (distortion2 == INT_MAX)
619 this_rd = RD_ESTIMATE(x->rdmult, x->rddiv, rate2, distortion2);
621 if (this_rd < best_intra_rd)
623 best_intra_rd = this_rd;
624 *returnintra = best_intra_rd ;
632 // Split MV modes currently not supported when RD is nopt enabled.
639 vp8_build_intra_predictors_mby_ptr(&x->e_mbd);
640 distortion2 = VARIANCE_INVOKE(&cpi->rtcd.variance, get16x16prederror)(x->src.y_buffer, x->src.y_stride, x->e_mbd.predictor, 16, 0x7fffffff);
641 rate2 += x->mbmode_cost[x->e_mbd.frame_type][x->e_mbd.mbmi.mode];
642 this_rd = RD_ESTIMATE(x->rdmult, x->rddiv, rate2, distortion2);
644 if (this_rd < best_intra_rd)
646 best_intra_rd = this_rd;
647 *returnintra = best_intra_rd ;
658 int sadpb = x->sadperbit16;
660 // Further step/diamond searches as necessary
663 step_param = cpi->sf.first_step + ((cpi->Speed > 5) ? 1 : 0);
664 further_steps = (cpi->sf.max_step_search_steps - 1) - step_param;
668 step_param = cpi->sf.first_step + 2;
674 // Initial step Search
675 bestsme = vp8_diamond_search_sad(x, b, d, &best_ref_mv1, &d->bmi.mv.as_mv, step_param, x->errorperbit, &num00, &cpi->fn_ptr, cpi->mb.mvsadcost, cpi->mb.mvcost);
676 mode_mv[NEWMV].row = d->bmi.mv.as_mv.row;
677 mode_mv[NEWMV].col = d->bmi.mv.as_mv.col;
679 // Further step searches
680 while (n < further_steps)
688 thissme = vp8_diamond_search_sad(x, b, d, &best_ref_mv1, &d->bmi.mv.as_mv, step_param + n, x->errorperbit, &num00, &cpi->fn_ptr, cpi->mb.mvsadcost, x->mvcost);
690 if (thissme < bestsme)
693 mode_mv[NEWMV].row = d->bmi.mv.as_mv.row;
694 mode_mv[NEWMV].col = d->bmi.mv.as_mv.col;
698 d->bmi.mv.as_mv.row = mode_mv[NEWMV].row;
699 d->bmi.mv.as_mv.col = mode_mv[NEWMV].col;
706 if (cpi->sf.search_method == HEX)
708 bestsme = vp8_hex_search(x, b, d, &best_ref_mv1, &d->bmi.mv.as_mv, step_param, sadpb/*x->errorperbit*/, &num00, cpi->fn_ptr.vf, cpi->fn_ptr.sdf, x->mvsadcost, x->mvcost);
709 mode_mv[NEWMV].row = d->bmi.mv.as_mv.row;
710 mode_mv[NEWMV].col = d->bmi.mv.as_mv.col;
714 bestsme = cpi->diamond_search_sad(x, b, d, &best_ref_mv1, &d->bmi.mv.as_mv, step_param, sadpb / 2/*x->errorperbit*/, &num00, &cpi->fn_ptr, x->mvsadcost, x->mvcost); //sadpb < 9
715 mode_mv[NEWMV].row = d->bmi.mv.as_mv.row;
716 mode_mv[NEWMV].col = d->bmi.mv.as_mv.col;
718 // Further step/diamond searches as necessary
720 //further_steps = (cpi->sf.max_step_search_steps - 1) - step_param;
725 while (n < further_steps)
733 thissme = cpi->diamond_search_sad(x, b, d, &best_ref_mv1, &d->bmi.mv.as_mv, step_param + n, sadpb / 4/*x->errorperbit*/, &num00, &cpi->fn_ptr, x->mvsadcost, x->mvcost); //sadpb = 9
735 if (thissme < bestsme)
738 mode_mv[NEWMV].row = d->bmi.mv.as_mv.row;
739 mode_mv[NEWMV].col = d->bmi.mv.as_mv.col;
743 d->bmi.mv.as_mv.row = mode_mv[NEWMV].row;
744 d->bmi.mv.as_mv.col = mode_mv[NEWMV].col;
753 if (bestsme < INT_MAX)
754 cpi->find_fractional_mv_step(x, b, d, &d->bmi.mv.as_mv, &best_ref_mv1, x->errorperbit, cpi->fn_ptr.svf, cpi->fn_ptr.vf, cpi->mb.mvcost);
756 mode_mv[NEWMV].row = d->bmi.mv.as_mv.row;
757 mode_mv[NEWMV].col = d->bmi.mv.as_mv.col;
760 rate2 += vp8_mv_bit_cost(&mode_mv[NEWMV], &best_ref_mv1, cpi->mb.mvcost, 128);
766 if (mode_mv[this_mode].row == 0 && mode_mv[this_mode].col == 0)
771 // Trap vectors that reach beyond the UMV borders
772 // Note that ALL New MV, Nearest MV Near MV and Zero MV code drops through to this point
773 // because of the lack of break statements in the previous two cases.
774 if (((mode_mv[this_mode].row >> 3) < x->mv_row_min) || ((mode_mv[this_mode].row >> 3) > x->mv_row_max) ||
775 ((mode_mv[this_mode].col >> 3) < x->mv_col_min) || ((mode_mv[this_mode].col >> 3) > x->mv_col_max))
778 rate2 += vp8_cost_mv_ref(this_mode, mdcounts);
779 x->e_mbd.mbmi.mode = this_mode;
780 x->e_mbd.mbmi.mv.as_mv = mode_mv[this_mode];
781 x->e_mbd.block[0].bmi.mode = this_mode;
782 x->e_mbd.block[0].bmi.mv.as_int = x->e_mbd.mbmi.mv.as_int;
784 distortion2 = get_inter_mbpred_error(x, cpi->fn_ptr.svf, cpi->fn_ptr.vf, (unsigned int *)(&sse));
786 this_rd = RD_ESTIMATE(x->rdmult, x->rddiv, rate2, distortion2);
788 if (cpi->active_map_enabled && x->active_ptr[0] == 0)
792 else if (sse < x->encode_breakout)
794 // Check u and v to make sure skip is ok
797 sse2 = VP8_UVSSE(x, IF_RTCD(&cpi->rtcd.variance));
799 if (sse2 * 2 < x->encode_breakout)
810 // Experimental debug code.
811 //all_rds[mode_index] = this_rd;
813 if (this_rd < best_rd || x->skip)
815 // Note index of best mode
816 best_mode_index = mode_index;
819 *returndistortion = distortion2;
821 vpx_memcpy(&best_mbmode, &x->e_mbd.mbmi, sizeof(MB_MODE_INFO));
823 if (this_mode == B_PRED || this_mode == SPLITMV)
824 for (i = 0; i < 16; i++)
826 vpx_memcpy(&best_bmodes[i], &x->e_mbd.block[i].bmi, sizeof(B_MODE_INFO));
830 best_bmodes[0].mv = x->e_mbd.block[0].bmi.mv;
833 // Testing this mode gave rise to an improvement in best error score. Lower threshold a bit for next time
834 cpi->rd_thresh_mult[mode_index] = (cpi->rd_thresh_mult[mode_index] >= (MIN_THRESHMULT + 2)) ? cpi->rd_thresh_mult[mode_index] - 2 : MIN_THRESHMULT;
835 cpi->rd_threshes[mode_index] = (cpi->rd_baseline_thresh[mode_index] >> 7) * cpi->rd_thresh_mult[mode_index];
838 // If the mode did not help improve the best error case then raise the threshold for testing that mode next time around.
841 cpi->rd_thresh_mult[mode_index] += 4;
843 if (cpi->rd_thresh_mult[mode_index] > MAX_THRESHMULT)
844 cpi->rd_thresh_mult[mode_index] = MAX_THRESHMULT;
846 cpi->rd_threshes[mode_index] = (cpi->rd_baseline_thresh[mode_index] >> 7) * cpi->rd_thresh_mult[mode_index];
853 // Reduce the activation RD thresholds for the best choice mode
854 if ((cpi->rd_baseline_thresh[best_mode_index] > 0) && (cpi->rd_baseline_thresh[best_mode_index] < (INT_MAX >> 2)))
856 int best_adjustment = (cpi->rd_thresh_mult[best_mode_index] >> 3);
858 cpi->rd_thresh_mult[best_mode_index] = (cpi->rd_thresh_mult[best_mode_index] >= (MIN_THRESHMULT + best_adjustment)) ? cpi->rd_thresh_mult[best_mode_index] - best_adjustment : MIN_THRESHMULT;
859 cpi->rd_threshes[best_mode_index] = (cpi->rd_baseline_thresh[best_mode_index] >> 7) * cpi->rd_thresh_mult[best_mode_index];
862 // Keep a record of best mode index for use in next loop
863 cpi->last_best_mode_index = best_mode_index;
865 if (best_mbmode.mode <= B_PRED)
867 x->e_mbd.mbmi.ref_frame = INTRA_FRAME;
868 vp8_pick_intra_mbuv_mode(x);
869 best_mbmode.uv_mode = x->e_mbd.mbmi.uv_mode;
874 int this_rdbin = (*returndistortion >> 7);
876 if (this_rdbin >= 1024)
881 cpi->error_bins[this_rdbin] ++;
885 if (cpi->is_src_frame_alt_ref && (best_mbmode.mode != ZEROMV || best_mbmode.ref_frame != ALTREF_FRAME))
887 best_mbmode.mode = ZEROMV;
888 best_mbmode.ref_frame = ALTREF_FRAME;
889 best_mbmode.mv.as_int = 0;
890 best_mbmode.uv_mode = 0;
891 best_mbmode.mb_skip_coeff = (cpi->common.mb_no_coeff_skip) ? 1 : 0;
892 best_mbmode.partitioning = 0;
893 best_mbmode.dc_diff = 0;
895 vpx_memcpy(&x->e_mbd.mbmi, &best_mbmode, sizeof(MB_MODE_INFO));
897 for (i = 0; i < 16; i++)
899 vpx_memset(&x->e_mbd.block[i].bmi, 0, sizeof(B_MODE_INFO));
902 x->e_mbd.mbmi.mv.as_int = 0;
909 vpx_memcpy(&x->e_mbd.mbmi, &best_mbmode, sizeof(MB_MODE_INFO));
911 if (x->e_mbd.mbmi.mode == B_PRED || x->e_mbd.mbmi.mode == SPLITMV)
912 for (i = 0; i < 16; i++)
914 vpx_memcpy(&x->e_mbd.block[i].bmi, &best_bmodes[i], sizeof(B_MODE_INFO));
919 vp8_set_mbmode_and_mvs(x, x->e_mbd.mbmi.mode, &best_bmodes[0].mv.as_mv);
922 x->e_mbd.mbmi.mv.as_mv = x->e_mbd.block[15].bmi.mv.as_mv;