2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
12 #include "vpx_config.h"
13 #include "./vpx_scale_rtcd.h"
14 #include "vp8/common/onyxc_int.h"
15 #include "vp8/common/blockd.h"
17 #include "vp8/common/systemdependent.h"
19 #include "vp8/common/alloccommon.h"
21 #include "firstpass.h"
22 #include "vpx/internal/vpx_psnr.h"
23 #include "vpx_scale/vpx_scale.h"
24 #include "vp8/common/extend.h"
26 #include "vp8/common/quant_common.h"
27 #include "segmentation.h"
29 #include "vp8/common/postproc.h"
31 #include "vpx_mem/vpx_mem.h"
32 #include "vp8/common/swapyv12buffer.h"
33 #include "vp8/common/threading.h"
34 #include "vpx_ports/vpx_timer.h"
36 #include "vpx_ports/arm.h"
38 #if CONFIG_MULTI_RES_ENCODING
39 #include "mr_dissim.h"
41 #include "encodeframe.h"
47 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
48 extern int vp8_update_coef_context(VP8_COMP *cpi);
49 extern void vp8_update_coef_probs(VP8_COMP *cpi);
52 extern void vp8cx_pick_filter_level_fast(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
53 extern void vp8cx_set_alt_lf_level(VP8_COMP *cpi, int filt_val);
54 extern void vp8cx_pick_filter_level(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
56 extern void vp8_deblock_frame(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *post, int filt_lvl, int low_var_thresh, int flag);
57 extern void print_parms(VP8_CONFIG *ocf, char *filenam);
58 extern unsigned int vp8_get_processor_freq();
59 extern void print_tree_update_probs();
60 extern int vp8cx_create_encoder_threads(VP8_COMP *cpi);
61 extern void vp8cx_remove_encoder_threads(VP8_COMP *cpi);
63 int vp8_estimate_entropy_savings(VP8_COMP *cpi);
65 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest);
67 extern void vp8_temporal_filter_prepare_c(VP8_COMP *cpi, int distance);
69 static void set_default_lf_deltas(VP8_COMP *cpi);
71 extern const int vp8_gf_interval_table[101];
73 #if CONFIG_INTERNAL_STATS
76 extern double vp8_calc_ssim
78 YV12_BUFFER_CONFIG *source,
79 YV12_BUFFER_CONFIG *dest,
85 extern double vp8_calc_ssimg
87 YV12_BUFFER_CONFIG *source,
88 YV12_BUFFER_CONFIG *dest,
109 extern int skip_true_count;
110 extern int skip_false_count;
114 #ifdef VP8_ENTROPY_STATS
115 extern int intra_mode_stats[10][10][10];
119 unsigned int frames_at_speed[16] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
120 unsigned int tot_pm = 0;
121 unsigned int cnt_pm = 0;
122 unsigned int tot_ef = 0;
123 unsigned int cnt_ef = 0;
127 extern unsigned __int64 Sectionbits[50];
128 extern int y_modes[5] ;
129 extern int uv_modes[4] ;
130 extern int b_modes[10] ;
132 extern int inter_y_modes[10] ;
133 extern int inter_uv_modes[4] ;
134 extern unsigned int inter_b_modes[15];
137 extern const int vp8_bits_per_mb[2][QINDEX_RANGE];
139 extern const int qrounding_factors[129];
140 extern const int qzbin_factors[129];
141 extern void vp8cx_init_quantizer(VP8_COMP *cpi);
142 extern const int vp8cx_base_skip_false_prob[128];
144 /* Tables relating active max Q to active min Q */
145 static const unsigned char kf_low_motion_minq[QINDEX_RANGE] =
147 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
148 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
149 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
150 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
151 3,3,3,3,3,3,4,4,4,5,5,5,5,5,6,6,
152 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
153 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
154 16,16,17,17,18,18,18,18,19,20,20,21,21,22,23,23
156 static const unsigned char kf_high_motion_minq[QINDEX_RANGE] =
158 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
159 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
160 1,1,1,1,1,1,1,1,2,2,2,2,3,3,3,3,
161 3,3,3,3,4,4,4,4,5,5,5,5,5,5,6,6,
162 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
163 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
164 16,16,17,17,18,18,18,18,19,19,20,20,20,20,21,21,
165 21,21,22,22,23,23,24,25,25,26,26,27,28,28,29,30
167 static const unsigned char gf_low_motion_minq[QINDEX_RANGE] =
169 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
170 3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,
171 7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,
172 11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,
173 19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,
174 27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34,
175 35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,
176 43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58
178 static const unsigned char gf_mid_motion_minq[QINDEX_RANGE] =
180 0,0,0,0,1,1,1,1,1,1,2,2,3,3,3,4,
181 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
182 9,10,10,10,10,11,11,11,12,12,12,12,13,13,13,14,
183 14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,
184 22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,
185 30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,
186 38,39,39,40,40,41,41,42,42,43,43,44,45,46,47,48,
187 49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64
189 static const unsigned char gf_high_motion_minq[QINDEX_RANGE] =
191 0,0,0,0,1,1,1,1,1,2,2,2,3,3,3,4,
192 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
193 9,10,10,10,11,11,12,12,13,13,14,14,15,15,16,16,
194 17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,
195 25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,
196 33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,
197 41,41,42,42,43,44,45,46,47,48,49,50,51,52,53,54,
198 55,56,57,58,59,60,62,64,66,68,70,72,74,76,78,80
200 static const unsigned char inter_minq[QINDEX_RANGE] =
202 0,0,1,1,2,3,3,4,4,5,6,6,7,8,8,9,
203 9,10,11,11,12,13,13,14,15,15,16,17,17,18,19,20,
204 20,21,22,22,23,24,24,25,26,27,27,28,29,30,30,31,
205 32,33,33,34,35,36,36,37,38,39,39,40,41,42,42,43,
206 44,45,46,46,47,48,49,50,50,51,52,53,54,55,55,56,
207 57,58,59,60,60,61,62,63,64,65,66,67,67,68,69,70,
208 71,72,73,74,75,75,76,77,78,79,80,81,82,83,84,85,
209 86,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100
212 #ifdef PACKET_TESTING
213 extern FILE *vpxlogc;
216 static void save_layer_context(VP8_COMP *cpi)
218 LAYER_CONTEXT *lc = &cpi->layer_context[cpi->current_layer];
220 /* Save layer dependent coding state */
221 lc->target_bandwidth = cpi->target_bandwidth;
222 lc->starting_buffer_level = cpi->oxcf.starting_buffer_level;
223 lc->optimal_buffer_level = cpi->oxcf.optimal_buffer_level;
224 lc->maximum_buffer_size = cpi->oxcf.maximum_buffer_size;
225 lc->starting_buffer_level_in_ms = cpi->oxcf.starting_buffer_level_in_ms;
226 lc->optimal_buffer_level_in_ms = cpi->oxcf.optimal_buffer_level_in_ms;
227 lc->maximum_buffer_size_in_ms = cpi->oxcf.maximum_buffer_size_in_ms;
228 lc->buffer_level = cpi->buffer_level;
229 lc->bits_off_target = cpi->bits_off_target;
230 lc->total_actual_bits = cpi->total_actual_bits;
231 lc->worst_quality = cpi->worst_quality;
232 lc->active_worst_quality = cpi->active_worst_quality;
233 lc->best_quality = cpi->best_quality;
234 lc->active_best_quality = cpi->active_best_quality;
235 lc->ni_av_qi = cpi->ni_av_qi;
236 lc->ni_tot_qi = cpi->ni_tot_qi;
237 lc->ni_frames = cpi->ni_frames;
238 lc->avg_frame_qindex = cpi->avg_frame_qindex;
239 lc->rate_correction_factor = cpi->rate_correction_factor;
240 lc->key_frame_rate_correction_factor = cpi->key_frame_rate_correction_factor;
241 lc->gf_rate_correction_factor = cpi->gf_rate_correction_factor;
242 lc->zbin_over_quant = cpi->mb.zbin_over_quant;
243 lc->inter_frame_target = cpi->inter_frame_target;
244 lc->total_byte_count = cpi->total_byte_count;
245 lc->filter_level = cpi->common.filter_level;
247 lc->last_frame_percent_intra = cpi->last_frame_percent_intra;
249 memcpy (lc->count_mb_ref_frame_usage,
250 cpi->mb.count_mb_ref_frame_usage,
251 sizeof(cpi->mb.count_mb_ref_frame_usage));
254 static void restore_layer_context(VP8_COMP *cpi, const int layer)
256 LAYER_CONTEXT *lc = &cpi->layer_context[layer];
258 /* Restore layer dependent coding state */
259 cpi->current_layer = layer;
260 cpi->target_bandwidth = lc->target_bandwidth;
261 cpi->oxcf.target_bandwidth = lc->target_bandwidth;
262 cpi->oxcf.starting_buffer_level = lc->starting_buffer_level;
263 cpi->oxcf.optimal_buffer_level = lc->optimal_buffer_level;
264 cpi->oxcf.maximum_buffer_size = lc->maximum_buffer_size;
265 cpi->oxcf.starting_buffer_level_in_ms = lc->starting_buffer_level_in_ms;
266 cpi->oxcf.optimal_buffer_level_in_ms = lc->optimal_buffer_level_in_ms;
267 cpi->oxcf.maximum_buffer_size_in_ms = lc->maximum_buffer_size_in_ms;
268 cpi->buffer_level = lc->buffer_level;
269 cpi->bits_off_target = lc->bits_off_target;
270 cpi->total_actual_bits = lc->total_actual_bits;
271 cpi->active_worst_quality = lc->active_worst_quality;
272 cpi->active_best_quality = lc->active_best_quality;
273 cpi->ni_av_qi = lc->ni_av_qi;
274 cpi->ni_tot_qi = lc->ni_tot_qi;
275 cpi->ni_frames = lc->ni_frames;
276 cpi->avg_frame_qindex = lc->avg_frame_qindex;
277 cpi->rate_correction_factor = lc->rate_correction_factor;
278 cpi->key_frame_rate_correction_factor = lc->key_frame_rate_correction_factor;
279 cpi->gf_rate_correction_factor = lc->gf_rate_correction_factor;
280 cpi->mb.zbin_over_quant = lc->zbin_over_quant;
281 cpi->inter_frame_target = lc->inter_frame_target;
282 cpi->total_byte_count = lc->total_byte_count;
283 cpi->common.filter_level = lc->filter_level;
285 cpi->last_frame_percent_intra = lc->last_frame_percent_intra;
287 memcpy (cpi->mb.count_mb_ref_frame_usage,
288 lc->count_mb_ref_frame_usage,
289 sizeof(cpi->mb.count_mb_ref_frame_usage));
292 static int rescale(int val, int num, int denom)
295 int64_t llden = denom;
298 return (int)(llval * llnum / llden);
301 static void init_temporal_layer_context(VP8_COMP *cpi,
304 double prev_layer_framerate)
306 LAYER_CONTEXT *lc = &cpi->layer_context[layer];
308 lc->framerate = cpi->output_framerate / cpi->oxcf.rate_decimator[layer];
309 lc->target_bandwidth = cpi->oxcf.target_bitrate[layer] * 1000;
311 lc->starting_buffer_level_in_ms = oxcf->starting_buffer_level;
312 lc->optimal_buffer_level_in_ms = oxcf->optimal_buffer_level;
313 lc->maximum_buffer_size_in_ms = oxcf->maximum_buffer_size;
315 lc->starting_buffer_level =
316 rescale((int)(oxcf->starting_buffer_level),
317 lc->target_bandwidth, 1000);
319 if (oxcf->optimal_buffer_level == 0)
320 lc->optimal_buffer_level = lc->target_bandwidth / 8;
322 lc->optimal_buffer_level =
323 rescale((int)(oxcf->optimal_buffer_level),
324 lc->target_bandwidth, 1000);
326 if (oxcf->maximum_buffer_size == 0)
327 lc->maximum_buffer_size = lc->target_bandwidth / 8;
329 lc->maximum_buffer_size =
330 rescale((int)(oxcf->maximum_buffer_size),
331 lc->target_bandwidth, 1000);
333 /* Work out the average size of a frame within this layer */
335 lc->avg_frame_size_for_layer =
336 (int)((cpi->oxcf.target_bitrate[layer] -
337 cpi->oxcf.target_bitrate[layer-1]) * 1000 /
338 (lc->framerate - prev_layer_framerate));
340 lc->active_worst_quality = cpi->oxcf.worst_allowed_q;
341 lc->active_best_quality = cpi->oxcf.best_allowed_q;
342 lc->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
344 lc->buffer_level = lc->starting_buffer_level;
345 lc->bits_off_target = lc->starting_buffer_level;
347 lc->total_actual_bits = 0;
351 lc->rate_correction_factor = 1.0;
352 lc->key_frame_rate_correction_factor = 1.0;
353 lc->gf_rate_correction_factor = 1.0;
354 lc->inter_frame_target = 0;
357 // Upon a run-time change in temporal layers, reset the layer context parameters
358 // for any "new" layers. For "existing" layers, let them inherit the parameters
359 // from the previous layer state (at the same layer #). In future we may want
360 // to better map the previous layer state(s) to the "new" ones.
361 static void reset_temporal_layer_change(VP8_COMP *cpi,
363 const int prev_num_layers)
366 double prev_layer_framerate = 0;
367 const int curr_num_layers = cpi->oxcf.number_of_layers;
368 // If the previous state was 1 layer, get current layer context from cpi.
369 // We need this to set the layer context for the new layers below.
370 if (prev_num_layers == 1)
372 cpi->current_layer = 0;
373 save_layer_context(cpi);
375 for (i = 0; i < curr_num_layers; i++)
377 LAYER_CONTEXT *lc = &cpi->layer_context[i];
378 if (i >= prev_num_layers)
380 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
382 // The initial buffer levels are set based on their starting levels.
383 // We could set the buffer levels based on the previous state (normalized
384 // properly by the layer bandwidths) but we would need to keep track of
385 // the previous set of layer bandwidths (i.e., target_bitrate[i])
386 // before the layer change. For now, reset to the starting levels.
387 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
388 cpi->oxcf.target_bitrate[i];
389 lc->bits_off_target = lc->buffer_level;
390 // TDOD(marpan): Should we set the rate_correction_factor and
391 // active_worst/best_quality to values derived from the previous layer
392 // state (to smooth-out quality dips/rate fluctuation at transition)?
394 // We need to treat the 1 layer case separately: oxcf.target_bitrate[i]
395 // is not set for 1 layer, and the restore_layer_context/save_context()
396 // are not called in the encoding loop, so we need to call it here to
397 // pass the layer context state to |cpi|.
398 if (curr_num_layers == 1)
400 lc->target_bandwidth = cpi->oxcf.target_bandwidth;
401 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
402 lc->target_bandwidth / 1000;
403 lc->bits_off_target = lc->buffer_level;
404 restore_layer_context(cpi, 0);
406 prev_layer_framerate = cpi->output_framerate /
407 cpi->oxcf.rate_decimator[i];
411 static void setup_features(VP8_COMP *cpi)
413 // If segmentation enabled set the update flags
414 if ( cpi->mb.e_mbd.segmentation_enabled )
416 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
417 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
421 cpi->mb.e_mbd.update_mb_segmentation_map = 0;
422 cpi->mb.e_mbd.update_mb_segmentation_data = 0;
425 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 0;
426 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
427 vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
428 vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
429 vpx_memset(cpi->mb.e_mbd.last_ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
430 vpx_memset(cpi->mb.e_mbd.last_mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
432 set_default_lf_deltas(cpi);
437 static void dealloc_raw_frame_buffers(VP8_COMP *cpi);
440 static void dealloc_compressor_data(VP8_COMP *cpi)
442 vpx_free(cpi->tplist);
445 /* Delete last frame MV storage buffers */
449 vpx_free(cpi->lf_ref_frame_sign_bias);
450 cpi->lf_ref_frame_sign_bias = 0;
452 vpx_free(cpi->lf_ref_frame);
453 cpi->lf_ref_frame = 0;
455 /* Delete sementation map */
456 vpx_free(cpi->segmentation_map);
457 cpi->segmentation_map = 0;
459 vpx_free(cpi->active_map);
462 vp8_de_alloc_frame_buffers(&cpi->common);
464 vp8_yv12_de_alloc_frame_buffer(&cpi->pick_lf_lvl_frame);
465 vp8_yv12_de_alloc_frame_buffer(&cpi->scaled_source);
466 dealloc_raw_frame_buffers(cpi);
471 /* Structure used to monitor GF usage */
472 vpx_free(cpi->gf_active_flags);
473 cpi->gf_active_flags = 0;
475 /* Activity mask based per mb zbin adjustments */
476 vpx_free(cpi->mb_activity_map);
477 cpi->mb_activity_map = 0;
479 vpx_free(cpi->mb.pip);
482 #if CONFIG_MULTITHREAD
483 vpx_free(cpi->mt_current_mb_col);
484 cpi->mt_current_mb_col = NULL;
488 static void enable_segmentation(VP8_COMP *cpi)
490 /* Set the appropriate feature bit */
491 cpi->mb.e_mbd.segmentation_enabled = 1;
492 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
493 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
495 static void disable_segmentation(VP8_COMP *cpi)
497 /* Clear the appropriate feature bit */
498 cpi->mb.e_mbd.segmentation_enabled = 0;
501 /* Valid values for a segment are 0 to 3
502 * Segmentation map is arrange as [Rows][Columns]
504 static void set_segmentation_map(VP8_COMP *cpi, unsigned char *segmentation_map)
506 /* Copy in the new segmentation map */
507 vpx_memcpy(cpi->segmentation_map, segmentation_map, (cpi->common.mb_rows * cpi->common.mb_cols));
509 /* Signal that the map should be updated. */
510 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
511 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
514 /* The values given for each segment can be either deltas (from the default
515 * value chosen for the frame) or absolute values.
517 * Valid range for abs values is:
518 * (0-127 for MB_LVL_ALT_Q), (0-63 for SEGMENT_ALT_LF)
519 * Valid range for delta values are:
520 * (+/-127 for MB_LVL_ALT_Q), (+/-63 for SEGMENT_ALT_LF)
522 * abs_delta = SEGMENT_DELTADATA (deltas)
523 * abs_delta = SEGMENT_ABSDATA (use the absolute values given).
526 static void set_segment_data(VP8_COMP *cpi, signed char *feature_data, unsigned char abs_delta)
528 cpi->mb.e_mbd.mb_segement_abs_delta = abs_delta;
529 vpx_memcpy(cpi->segment_feature_data, feature_data, sizeof(cpi->segment_feature_data));
533 static void segmentation_test_function(VP8_COMP *cpi)
535 unsigned char *seg_map;
536 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
538 // Create a temporary map for segmentation data.
539 CHECK_MEM_ERROR(seg_map, vpx_calloc(cpi->common.mb_rows * cpi->common.mb_cols, 1));
541 // Set the segmentation Map
542 set_segmentation_map(cpi, seg_map);
544 // Activate segmentation.
545 enable_segmentation(cpi);
547 // Set up the quant segment data
548 feature_data[MB_LVL_ALT_Q][0] = 0;
549 feature_data[MB_LVL_ALT_Q][1] = 4;
550 feature_data[MB_LVL_ALT_Q][2] = 0;
551 feature_data[MB_LVL_ALT_Q][3] = 0;
552 // Set up the loop segment data
553 feature_data[MB_LVL_ALT_LF][0] = 0;
554 feature_data[MB_LVL_ALT_LF][1] = 0;
555 feature_data[MB_LVL_ALT_LF][2] = 0;
556 feature_data[MB_LVL_ALT_LF][3] = 0;
558 // Initialise the feature data structure
559 // SEGMENT_DELTADATA 0, SEGMENT_ABSDATA 1
560 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
562 // Delete sementation map
568 /* A simple function to cyclically refresh the background at a lower Q */
569 static void cyclic_background_refresh(VP8_COMP *cpi, int Q, int lf_adjustment)
571 unsigned char *seg_map = cpi->segmentation_map;
572 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
574 int block_count = cpi->cyclic_refresh_mode_max_mbs_perframe;
575 int mbs_in_frame = cpi->common.mb_rows * cpi->common.mb_cols;
577 cpi->cyclic_refresh_q = Q / 2;
579 // Set every macroblock to be eligible for update.
580 // For key frame this will reset seg map to 0.
581 vpx_memset(cpi->segmentation_map, 0, mbs_in_frame);
583 if (cpi->common.frame_type != KEY_FRAME)
585 /* Cycle through the macro_block rows */
586 /* MB loop to set local segmentation map */
587 i = cpi->cyclic_refresh_mode_index;
588 assert(i < mbs_in_frame);
591 /* If the MB is as a candidate for clean up then mark it for
592 * possible boost/refresh (segment 1) The segment id may get
593 * reset to 0 later if the MB gets coded anything other than
594 * last frame 0,0 as only (last frame 0,0) MBs are eligable for
595 * refresh : that is to say Mbs likely to be background blocks.
597 if (cpi->cyclic_refresh_map[i] == 0)
602 else if (cpi->cyclic_refresh_map[i] < 0)
603 cpi->cyclic_refresh_map[i]++;
606 if (i == mbs_in_frame)
610 while(block_count && i != cpi->cyclic_refresh_mode_index);
612 cpi->cyclic_refresh_mode_index = i;
615 /* Activate segmentation. */
616 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
617 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
618 enable_segmentation(cpi);
620 /* Set up the quant segment data */
621 feature_data[MB_LVL_ALT_Q][0] = 0;
622 feature_data[MB_LVL_ALT_Q][1] = (cpi->cyclic_refresh_q - Q);
623 feature_data[MB_LVL_ALT_Q][2] = 0;
624 feature_data[MB_LVL_ALT_Q][3] = 0;
626 /* Set up the loop segment data */
627 feature_data[MB_LVL_ALT_LF][0] = 0;
628 feature_data[MB_LVL_ALT_LF][1] = lf_adjustment;
629 feature_data[MB_LVL_ALT_LF][2] = 0;
630 feature_data[MB_LVL_ALT_LF][3] = 0;
632 /* Initialise the feature data structure */
633 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
637 static void set_default_lf_deltas(VP8_COMP *cpi)
639 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 1;
640 cpi->mb.e_mbd.mode_ref_lf_delta_update = 1;
642 vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
643 vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
645 /* Test of ref frame deltas */
646 cpi->mb.e_mbd.ref_lf_deltas[INTRA_FRAME] = 2;
647 cpi->mb.e_mbd.ref_lf_deltas[LAST_FRAME] = 0;
648 cpi->mb.e_mbd.ref_lf_deltas[GOLDEN_FRAME] = -2;
649 cpi->mb.e_mbd.ref_lf_deltas[ALTREF_FRAME] = -2;
651 cpi->mb.e_mbd.mode_lf_deltas[0] = 4; /* BPRED */
653 if(cpi->oxcf.Mode == MODE_REALTIME)
654 cpi->mb.e_mbd.mode_lf_deltas[1] = -12; /* Zero */
656 cpi->mb.e_mbd.mode_lf_deltas[1] = -2; /* Zero */
658 cpi->mb.e_mbd.mode_lf_deltas[2] = 2; /* New mv */
659 cpi->mb.e_mbd.mode_lf_deltas[3] = 4; /* Split mv */
662 /* Convenience macros for mapping speed and mode into a continuous
665 #define GOOD(x) (x+1)
668 static int speed_map(int speed, const int *map)
675 } while(speed >= *map++);
679 static const int thresh_mult_map_znn[] = {
680 /* map common to zero, nearest, and near */
681 0, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(2), 2000, INT_MAX
684 static const int thresh_mult_map_vhpred[] = {
685 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(1), 2000,
686 RT(7), INT_MAX, INT_MAX
689 static const int thresh_mult_map_bpred[] = {
690 2000, GOOD(0), 2500, GOOD(2), 5000, GOOD(3), 7500, RT(0), 2500, RT(1), 5000,
691 RT(6), INT_MAX, INT_MAX
694 static const int thresh_mult_map_tm[] = {
695 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 0, RT(1), 1000, RT(2), 2000,
696 RT(7), INT_MAX, INT_MAX
699 static const int thresh_mult_map_new1[] = {
700 1000, GOOD(2), 2000, RT(0), 2000, INT_MAX
703 static const int thresh_mult_map_new2[] = {
704 1000, GOOD(2), 2000, GOOD(3), 2500, GOOD(5), 4000, RT(0), 2000, RT(2), 2500,
708 static const int thresh_mult_map_split1[] = {
709 2500, GOOD(0), 1700, GOOD(2), 10000, GOOD(3), 25000, GOOD(4), INT_MAX,
710 RT(0), 5000, RT(1), 10000, RT(2), 25000, RT(3), INT_MAX, INT_MAX
713 static const int thresh_mult_map_split2[] = {
714 5000, GOOD(0), 4500, GOOD(2), 20000, GOOD(3), 50000, GOOD(4), INT_MAX,
715 RT(0), 10000, RT(1), 20000, RT(2), 50000, RT(3), INT_MAX, INT_MAX
718 static const int mode_check_freq_map_zn2[] = {
719 /* {zero,nearest}{2,3} */
720 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
723 static const int mode_check_freq_map_vhbpred[] = {
724 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(5), 4, INT_MAX
727 static const int mode_check_freq_map_near2[] = {
728 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(10), 1<<2, RT(11), 1<<3, RT(12), 1<<4,
732 static const int mode_check_freq_map_new1[] = {
733 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
736 static const int mode_check_freq_map_new2[] = {
737 0, GOOD(5), 4, RT(0), 0, RT(3), 4, RT(10), 1<<3, RT(11), 1<<4, RT(12), 1<<5,
741 static const int mode_check_freq_map_split1[] = {
742 0, GOOD(2), 2, GOOD(3), 7, RT(1), 2, RT(2), 7, INT_MAX
745 static const int mode_check_freq_map_split2[] = {
746 0, GOOD(1), 2, GOOD(2), 4, GOOD(3), 15, RT(1), 4, RT(2), 15, INT_MAX
749 void vp8_set_speed_features(VP8_COMP *cpi)
751 SPEED_FEATURES *sf = &cpi->sf;
752 int Mode = cpi->compressor_speed;
753 int Speed = cpi->Speed;
755 VP8_COMMON *cm = &cpi->common;
756 int last_improved_quant = sf->improved_quant;
759 /* Initialise default mode frequency sampling variables */
760 for (i = 0; i < MAX_MODES; i ++)
762 cpi->mode_check_freq[i] = 0;
765 cpi->mb.mbs_tested_so_far = 0;
767 /* best quality defaults */
769 sf->search_method = NSTEP;
770 sf->improved_quant = 1;
771 sf->improved_dct = 1;
774 sf->quarter_pixel_search = 1;
775 sf->half_pixel_search = 1;
776 sf->iterative_sub_pixel = 1;
777 sf->optimize_coefficients = 1;
778 sf->use_fastquant_for_pick = 0;
779 sf->no_skip_block4x4_search = 1;
782 sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
783 sf->improved_mv_pred = 1;
785 /* default thresholds to 0 */
786 for (i = 0; i < MAX_MODES; i++)
787 sf->thresh_mult[i] = 0;
789 /* Count enabled references */
791 if (cpi->ref_frame_flags & VP8_LAST_FRAME)
793 if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
795 if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
798 /* Convert speed to continuous range, with clamping */
810 sf->thresh_mult[THR_ZERO1] =
811 sf->thresh_mult[THR_NEAREST1] =
812 sf->thresh_mult[THR_NEAR1] =
813 sf->thresh_mult[THR_DC] = 0; /* always */
815 sf->thresh_mult[THR_ZERO2] =
816 sf->thresh_mult[THR_ZERO3] =
817 sf->thresh_mult[THR_NEAREST2] =
818 sf->thresh_mult[THR_NEAREST3] =
819 sf->thresh_mult[THR_NEAR2] =
820 sf->thresh_mult[THR_NEAR3] = speed_map(Speed, thresh_mult_map_znn);
822 sf->thresh_mult[THR_V_PRED] =
823 sf->thresh_mult[THR_H_PRED] = speed_map(Speed, thresh_mult_map_vhpred);
824 sf->thresh_mult[THR_B_PRED] = speed_map(Speed, thresh_mult_map_bpred);
825 sf->thresh_mult[THR_TM] = speed_map(Speed, thresh_mult_map_tm);
826 sf->thresh_mult[THR_NEW1] = speed_map(Speed, thresh_mult_map_new1);
827 sf->thresh_mult[THR_NEW2] =
828 sf->thresh_mult[THR_NEW3] = speed_map(Speed, thresh_mult_map_new2);
829 sf->thresh_mult[THR_SPLIT1] = speed_map(Speed, thresh_mult_map_split1);
830 sf->thresh_mult[THR_SPLIT2] =
831 sf->thresh_mult[THR_SPLIT3] = speed_map(Speed, thresh_mult_map_split2);
833 cpi->mode_check_freq[THR_ZERO1] =
834 cpi->mode_check_freq[THR_NEAREST1] =
835 cpi->mode_check_freq[THR_NEAR1] =
836 cpi->mode_check_freq[THR_TM] =
837 cpi->mode_check_freq[THR_DC] = 0; /* always */
839 cpi->mode_check_freq[THR_ZERO2] =
840 cpi->mode_check_freq[THR_ZERO3] =
841 cpi->mode_check_freq[THR_NEAREST2] =
842 cpi->mode_check_freq[THR_NEAREST3] = speed_map(Speed,
843 mode_check_freq_map_zn2);
845 cpi->mode_check_freq[THR_NEAR2] =
846 cpi->mode_check_freq[THR_NEAR3] = speed_map(Speed,
847 mode_check_freq_map_near2);
849 cpi->mode_check_freq[THR_V_PRED] =
850 cpi->mode_check_freq[THR_H_PRED] =
851 cpi->mode_check_freq[THR_B_PRED] = speed_map(Speed,
852 mode_check_freq_map_vhbpred);
853 cpi->mode_check_freq[THR_NEW1] = speed_map(Speed,
854 mode_check_freq_map_new1);
855 cpi->mode_check_freq[THR_NEW2] =
856 cpi->mode_check_freq[THR_NEW3] = speed_map(Speed,
857 mode_check_freq_map_new2);
858 cpi->mode_check_freq[THR_SPLIT1] = speed_map(Speed,
859 mode_check_freq_map_split1);
860 cpi->mode_check_freq[THR_SPLIT2] =
861 cpi->mode_check_freq[THR_SPLIT3] = speed_map(Speed,
862 mode_check_freq_map_split2);
866 #if !(CONFIG_REALTIME_ONLY)
867 case 0: /* best quality mode */
869 sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
875 /* Disable coefficient optimization above speed 0 */
876 sf->optimize_coefficients = 0;
877 sf->use_fastquant_for_pick = 1;
878 sf->no_skip_block4x4_search = 0;
885 sf->improved_quant = 0;
886 sf->improved_dct = 0;
888 /* Only do recode loop on key frames, golden frames and
898 sf->recode_loop = 0; /* recode loop off */
899 sf->RD = 0; /* Turn rd off */
905 sf->auto_filter = 0; /* Faster selection of loop filter */
911 sf->optimize_coefficients = 0;
914 sf->iterative_sub_pixel = 1;
915 sf->search_method = NSTEP;
919 sf->improved_quant = 0;
920 sf->improved_dct = 0;
922 sf->use_fastquant_for_pick = 1;
923 sf->no_skip_block4x4_search = 0;
928 sf->auto_filter = 0; /* Faster selection of loop filter */
938 sf->auto_filter = 0; /* Faster selection of loop filter */
939 sf->search_method = HEX;
940 sf->iterative_sub_pixel = 0;
945 unsigned int sum = 0;
946 unsigned int total_mbs = cm->MBs;
948 unsigned int total_skip;
952 if (cpi->oxcf.encode_breakout > 2000)
953 min = cpi->oxcf.encode_breakout;
957 for (i = 0; i < min; i++)
959 sum += cpi->mb.error_bins[i];
965 /* i starts from 2 to make sure thresh started from 2048 */
966 for (; i < 1024; i++)
968 sum += cpi->mb.error_bins[i];
970 if (10 * sum >= (unsigned int)(cpi->Speed - 6)*(total_mbs - total_skip))
982 sf->thresh_mult[THR_NEW1 ] = thresh;
983 sf->thresh_mult[THR_NEAREST1 ] = thresh >> 1;
984 sf->thresh_mult[THR_NEAR1 ] = thresh >> 1;
989 sf->thresh_mult[THR_NEW2] = thresh << 1;
990 sf->thresh_mult[THR_NEAREST2 ] = thresh;
991 sf->thresh_mult[THR_NEAR2 ] = thresh;
996 sf->thresh_mult[THR_NEW3] = thresh << 1;
997 sf->thresh_mult[THR_NEAREST3 ] = thresh;
998 sf->thresh_mult[THR_NEAR3 ] = thresh;
1001 sf->improved_mv_pred = 0;
1005 sf->quarter_pixel_search = 0;
1007 if(cm->version == 0)
1009 cm->filter_type = NORMAL_LOOPFILTER;
1012 cm->filter_type = SIMPLE_LOOPFILTER;
1016 cm->filter_type = SIMPLE_LOOPFILTER;
1019 /* This has a big hit on quality. Last resort */
1021 sf->half_pixel_search = 0;
1023 vpx_memset(cpi->mb.error_bins, 0, sizeof(cpi->mb.error_bins));
1027 /* Slow quant, dct and trellis not worthwhile for first pass
1028 * so make sure they are always turned off.
1030 if ( cpi->pass == 1 )
1032 sf->improved_quant = 0;
1033 sf->optimize_coefficients = 0;
1034 sf->improved_dct = 0;
1037 if (cpi->sf.search_method == NSTEP)
1039 vp8_init3smotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
1041 else if (cpi->sf.search_method == DIAMOND)
1043 vp8_init_dsmotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
1046 if (cpi->sf.improved_dct)
1048 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
1049 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
1053 /* No fast FDCT defined for any platform at this time. */
1054 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
1055 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
1058 cpi->mb.short_walsh4x4 = vp8_short_walsh4x4;
1060 if (cpi->sf.improved_quant)
1062 cpi->mb.quantize_b = vp8_regular_quantize_b;
1063 cpi->mb.quantize_b_pair = vp8_regular_quantize_b_pair;
1067 cpi->mb.quantize_b = vp8_fast_quantize_b;
1068 cpi->mb.quantize_b_pair = vp8_fast_quantize_b_pair;
1070 if (cpi->sf.improved_quant != last_improved_quant)
1071 vp8cx_init_quantizer(cpi);
1073 if (cpi->sf.iterative_sub_pixel == 1)
1075 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step_iteratively;
1077 else if (cpi->sf.quarter_pixel_search)
1079 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step;
1081 else if (cpi->sf.half_pixel_search)
1083 cpi->find_fractional_mv_step = vp8_find_best_half_pixel_step;
1087 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
1090 if (cpi->sf.optimize_coefficients == 1 && cpi->pass!=1)
1091 cpi->mb.optimize = 1;
1093 cpi->mb.optimize = 0;
1095 if (cpi->common.full_pixel)
1096 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
1099 frames_at_speed[cpi->Speed]++;
1105 static void alloc_raw_frame_buffers(VP8_COMP *cpi)
1107 #if VP8_TEMPORAL_ALT_REF
1108 int width = (cpi->oxcf.Width + 15) & ~15;
1109 int height = (cpi->oxcf.Height + 15) & ~15;
1112 cpi->lookahead = vp8_lookahead_init(cpi->oxcf.Width, cpi->oxcf.Height,
1113 cpi->oxcf.lag_in_frames);
1115 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1116 "Failed to allocate lag buffers");
1118 #if VP8_TEMPORAL_ALT_REF
1120 if (vp8_yv12_alloc_frame_buffer(&cpi->alt_ref_buffer,
1121 width, height, VP8BORDERINPIXELS))
1122 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1123 "Failed to allocate altref buffer");
1129 static void dealloc_raw_frame_buffers(VP8_COMP *cpi)
1131 #if VP8_TEMPORAL_ALT_REF
1132 vp8_yv12_de_alloc_frame_buffer(&cpi->alt_ref_buffer);
1134 vp8_lookahead_destroy(cpi->lookahead);
1138 static int vp8_alloc_partition_data(VP8_COMP *cpi)
1140 vpx_free(cpi->mb.pip);
1142 cpi->mb.pip = vpx_calloc((cpi->common.mb_cols + 1) *
1143 (cpi->common.mb_rows + 1),
1144 sizeof(PARTITION_INFO));
1148 cpi->mb.pi = cpi->mb.pip + cpi->common.mode_info_stride + 1;
1153 void vp8_alloc_compressor_data(VP8_COMP *cpi)
1155 VP8_COMMON *cm = & cpi->common;
1157 int width = cm->Width;
1158 int height = cm->Height;
1160 if (vp8_alloc_frame_buffers(cm, width, height))
1161 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1162 "Failed to allocate frame buffers");
1164 if (vp8_alloc_partition_data(cpi))
1165 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1166 "Failed to allocate partition data");
1169 if ((width & 0xf) != 0)
1170 width += 16 - (width & 0xf);
1172 if ((height & 0xf) != 0)
1173 height += 16 - (height & 0xf);
1176 if (vp8_yv12_alloc_frame_buffer(&cpi->pick_lf_lvl_frame,
1177 width, height, VP8BORDERINPIXELS))
1178 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1179 "Failed to allocate last frame buffer");
1181 if (vp8_yv12_alloc_frame_buffer(&cpi->scaled_source,
1182 width, height, VP8BORDERINPIXELS))
1183 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1184 "Failed to allocate scaled source buffer");
1189 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
1190 unsigned int tokens = 8 * 24 * 16; /* one MB for each thread */
1192 unsigned int tokens = cm->mb_rows * cm->mb_cols * 24 * 16;
1194 CHECK_MEM_ERROR(cpi->tok, vpx_calloc(tokens, sizeof(*cpi->tok)));
1197 /* Data used for real time vc mode to see if gf needs refreshing */
1198 cpi->zeromv_count = 0;
1201 /* Structures used to monitor GF usage */
1202 vpx_free(cpi->gf_active_flags);
1203 CHECK_MEM_ERROR(cpi->gf_active_flags,
1204 vpx_calloc(sizeof(*cpi->gf_active_flags),
1205 cm->mb_rows * cm->mb_cols));
1206 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
1208 vpx_free(cpi->mb_activity_map);
1209 CHECK_MEM_ERROR(cpi->mb_activity_map,
1210 vpx_calloc(sizeof(*cpi->mb_activity_map),
1211 cm->mb_rows * cm->mb_cols));
1213 /* allocate memory for storing last frame's MVs for MV prediction. */
1214 vpx_free(cpi->lfmv);
1215 CHECK_MEM_ERROR(cpi->lfmv, vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1216 sizeof(*cpi->lfmv)));
1217 vpx_free(cpi->lf_ref_frame_sign_bias);
1218 CHECK_MEM_ERROR(cpi->lf_ref_frame_sign_bias,
1219 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1220 sizeof(*cpi->lf_ref_frame_sign_bias)));
1221 vpx_free(cpi->lf_ref_frame);
1222 CHECK_MEM_ERROR(cpi->lf_ref_frame,
1223 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1224 sizeof(*cpi->lf_ref_frame)));
1226 /* Create the encoder segmentation map and set all entries to 0 */
1227 vpx_free(cpi->segmentation_map);
1228 CHECK_MEM_ERROR(cpi->segmentation_map,
1229 vpx_calloc(cm->mb_rows * cm->mb_cols,
1230 sizeof(*cpi->segmentation_map)));
1231 cpi->cyclic_refresh_mode_index = 0;
1232 vpx_free(cpi->active_map);
1233 CHECK_MEM_ERROR(cpi->active_map,
1234 vpx_calloc(cm->mb_rows * cm->mb_cols,
1235 sizeof(*cpi->active_map)));
1236 vpx_memset(cpi->active_map , 1, (cm->mb_rows * cm->mb_cols));
1238 #if CONFIG_MULTITHREAD
1240 cpi->mt_sync_range = 1;
1241 else if (width <= 1280)
1242 cpi->mt_sync_range = 4;
1243 else if (width <= 2560)
1244 cpi->mt_sync_range = 8;
1246 cpi->mt_sync_range = 16;
1248 if (cpi->oxcf.multi_threaded > 1)
1250 vpx_free(cpi->mt_current_mb_col);
1251 CHECK_MEM_ERROR(cpi->mt_current_mb_col,
1252 vpx_malloc(sizeof(*cpi->mt_current_mb_col) * cm->mb_rows));
1257 vpx_free(cpi->tplist);
1258 CHECK_MEM_ERROR(cpi->tplist, vpx_malloc(sizeof(TOKENLIST) * cm->mb_rows));
1263 static const int q_trans[] =
1265 0, 1, 2, 3, 4, 5, 7, 8,
1266 9, 10, 12, 13, 15, 17, 18, 19,
1267 20, 21, 23, 24, 25, 26, 27, 28,
1268 29, 30, 31, 33, 35, 37, 39, 41,
1269 43, 45, 47, 49, 51, 53, 55, 57,
1270 59, 61, 64, 67, 70, 73, 76, 79,
1271 82, 85, 88, 91, 94, 97, 100, 103,
1272 106, 109, 112, 115, 118, 121, 124, 127,
1275 int vp8_reverse_trans(int x)
1279 for (i = 0; i < 64; i++)
1280 if (q_trans[i] >= x)
1285 void vp8_new_framerate(VP8_COMP *cpi, double framerate)
1290 cpi->framerate = framerate;
1291 cpi->output_framerate = framerate;
1292 cpi->per_frame_bandwidth = (int)(cpi->oxcf.target_bandwidth /
1293 cpi->output_framerate);
1294 cpi->av_per_frame_bandwidth = cpi->per_frame_bandwidth;
1295 cpi->min_frame_bandwidth = (int)(cpi->av_per_frame_bandwidth *
1296 cpi->oxcf.two_pass_vbrmin_section / 100);
1298 /* Set Maximum gf/arf interval */
1299 cpi->max_gf_interval = ((int)(cpi->output_framerate / 2.0) + 2);
1301 if(cpi->max_gf_interval < 12)
1302 cpi->max_gf_interval = 12;
1304 /* Extended interval for genuinely static scenes */
1305 cpi->twopass.static_scene_max_gf_interval = cpi->key_frame_frequency >> 1;
1307 /* Special conditions when altr ref frame enabled in lagged compress mode */
1308 if (cpi->oxcf.play_alternate && cpi->oxcf.lag_in_frames)
1310 if (cpi->max_gf_interval > cpi->oxcf.lag_in_frames - 1)
1311 cpi->max_gf_interval = cpi->oxcf.lag_in_frames - 1;
1313 if (cpi->twopass.static_scene_max_gf_interval > cpi->oxcf.lag_in_frames - 1)
1314 cpi->twopass.static_scene_max_gf_interval = cpi->oxcf.lag_in_frames - 1;
1317 if ( cpi->max_gf_interval > cpi->twopass.static_scene_max_gf_interval )
1318 cpi->max_gf_interval = cpi->twopass.static_scene_max_gf_interval;
1322 static void init_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
1324 VP8_COMMON *cm = &cpi->common;
1329 cpi->auto_adjust_gold_quantizer = 1;
1331 cm->version = oxcf->Version;
1332 vp8_setup_version(cm);
1334 /* frame rate is not available on the first frame, as it's derived from
1335 * the observed timestamps. The actual value used here doesn't matter
1336 * too much, as it will adapt quickly. If the reciprocal of the timebase
1337 * seems like a reasonable framerate, then use that as a guess, otherwise
1340 cpi->framerate = (double)(oxcf->timebase.den) /
1341 (double)(oxcf->timebase.num);
1343 if (cpi->framerate > 180)
1344 cpi->framerate = 30;
1346 cpi->ref_framerate = cpi->framerate;
1348 /* change includes all joint functionality */
1349 vp8_change_config(cpi, oxcf);
1351 /* Initialize active best and worst q and average q values. */
1352 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
1353 cpi->active_best_quality = cpi->oxcf.best_allowed_q;
1354 cpi->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
1356 /* Initialise the starting buffer levels */
1357 cpi->buffer_level = cpi->oxcf.starting_buffer_level;
1358 cpi->bits_off_target = cpi->oxcf.starting_buffer_level;
1360 cpi->rolling_target_bits = cpi->av_per_frame_bandwidth;
1361 cpi->rolling_actual_bits = cpi->av_per_frame_bandwidth;
1362 cpi->long_rolling_target_bits = cpi->av_per_frame_bandwidth;
1363 cpi->long_rolling_actual_bits = cpi->av_per_frame_bandwidth;
1365 cpi->total_actual_bits = 0;
1366 cpi->total_target_vs_actual = 0;
1368 /* Temporal scalabilty */
1369 if (cpi->oxcf.number_of_layers > 1)
1372 double prev_layer_framerate=0;
1374 for (i=0; i<cpi->oxcf.number_of_layers; i++)
1376 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
1377 prev_layer_framerate = cpi->output_framerate /
1378 cpi->oxcf.rate_decimator[i];
1382 #if VP8_TEMPORAL_ALT_REF
1386 cpi->fixed_divide[0] = 0;
1388 for (i = 1; i < 512; i++)
1389 cpi->fixed_divide[i] = 0x80000 / i;
1394 static void update_layer_contexts (VP8_COMP *cpi)
1396 VP8_CONFIG *oxcf = &cpi->oxcf;
1398 /* Update snapshots of the layer contexts to reflect new parameters */
1399 if (oxcf->number_of_layers > 1)
1402 double prev_layer_framerate=0;
1404 for (i=0; i<oxcf->number_of_layers; i++)
1406 LAYER_CONTEXT *lc = &cpi->layer_context[i];
1409 cpi->ref_framerate / oxcf->rate_decimator[i];
1410 lc->target_bandwidth = oxcf->target_bitrate[i] * 1000;
1412 lc->starting_buffer_level = rescale(
1413 (int)oxcf->starting_buffer_level_in_ms,
1414 lc->target_bandwidth, 1000);
1416 if (oxcf->optimal_buffer_level == 0)
1417 lc->optimal_buffer_level = lc->target_bandwidth / 8;
1419 lc->optimal_buffer_level = rescale(
1420 (int)oxcf->optimal_buffer_level_in_ms,
1421 lc->target_bandwidth, 1000);
1423 if (oxcf->maximum_buffer_size == 0)
1424 lc->maximum_buffer_size = lc->target_bandwidth / 8;
1426 lc->maximum_buffer_size = rescale(
1427 (int)oxcf->maximum_buffer_size_in_ms,
1428 lc->target_bandwidth, 1000);
1430 /* Work out the average size of a frame within this layer */
1432 lc->avg_frame_size_for_layer =
1433 (int)((oxcf->target_bitrate[i] -
1434 oxcf->target_bitrate[i-1]) * 1000 /
1435 (lc->framerate - prev_layer_framerate));
1437 prev_layer_framerate = lc->framerate;
1442 void vp8_change_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
1444 VP8_COMMON *cm = &cpi->common;
1445 int last_w, last_h, prev_number_of_layers;
1453 #if CONFIG_MULTITHREAD
1454 /* wait for the last picture loopfilter thread done */
1455 if (cpi->b_lpf_running)
1457 sem_wait(&cpi->h_event_end_lpf);
1458 cpi->b_lpf_running = 0;
1462 if (cm->version != oxcf->Version)
1464 cm->version = oxcf->Version;
1465 vp8_setup_version(cm);
1468 last_w = cpi->oxcf.Width;
1469 last_h = cpi->oxcf.Height;
1470 prev_number_of_layers = cpi->oxcf.number_of_layers;
1474 switch (cpi->oxcf.Mode)
1479 cpi->compressor_speed = 2;
1481 if (cpi->oxcf.cpu_used < -16)
1483 cpi->oxcf.cpu_used = -16;
1486 if (cpi->oxcf.cpu_used > 16)
1487 cpi->oxcf.cpu_used = 16;
1491 case MODE_GOODQUALITY:
1493 cpi->compressor_speed = 1;
1495 if (cpi->oxcf.cpu_used < -5)
1497 cpi->oxcf.cpu_used = -5;
1500 if (cpi->oxcf.cpu_used > 5)
1501 cpi->oxcf.cpu_used = 5;
1505 case MODE_BESTQUALITY:
1507 cpi->compressor_speed = 0;
1510 case MODE_FIRSTPASS:
1512 cpi->compressor_speed = 1;
1514 case MODE_SECONDPASS:
1516 cpi->compressor_speed = 1;
1518 if (cpi->oxcf.cpu_used < -5)
1520 cpi->oxcf.cpu_used = -5;
1523 if (cpi->oxcf.cpu_used > 5)
1524 cpi->oxcf.cpu_used = 5;
1527 case MODE_SECONDPASS_BEST:
1529 cpi->compressor_speed = 0;
1534 cpi->auto_worst_q = 1;
1536 cpi->oxcf.worst_allowed_q = q_trans[oxcf->worst_allowed_q];
1537 cpi->oxcf.best_allowed_q = q_trans[oxcf->best_allowed_q];
1538 cpi->oxcf.cq_level = q_trans[cpi->oxcf.cq_level];
1540 if (oxcf->fixed_q >= 0)
1542 if (oxcf->worst_allowed_q < 0)
1543 cpi->oxcf.fixed_q = q_trans[0];
1545 cpi->oxcf.fixed_q = q_trans[oxcf->worst_allowed_q];
1547 if (oxcf->alt_q < 0)
1548 cpi->oxcf.alt_q = q_trans[0];
1550 cpi->oxcf.alt_q = q_trans[oxcf->alt_q];
1552 if (oxcf->key_q < 0)
1553 cpi->oxcf.key_q = q_trans[0];
1555 cpi->oxcf.key_q = q_trans[oxcf->key_q];
1557 if (oxcf->gold_q < 0)
1558 cpi->oxcf.gold_q = q_trans[0];
1560 cpi->oxcf.gold_q = q_trans[oxcf->gold_q];
1564 cpi->baseline_gf_interval =
1565 cpi->oxcf.alt_freq ? cpi->oxcf.alt_freq : DEFAULT_GF_INTERVAL;
1567 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
1569 cm->refresh_golden_frame = 0;
1570 cm->refresh_last_frame = 1;
1571 cm->refresh_entropy_probs = 1;
1573 #if (CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
1574 cpi->oxcf.token_partitions = 3;
1577 if (cpi->oxcf.token_partitions >= 0 && cpi->oxcf.token_partitions <= 3)
1578 cm->multi_token_partition =
1579 (TOKEN_PARTITION) cpi->oxcf.token_partitions;
1581 setup_features(cpi);
1586 for (i = 0; i < MAX_MB_SEGMENTS; i++)
1587 cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout;
1590 /* At the moment the first order values may not be > MAXQ */
1591 if (cpi->oxcf.fixed_q > MAXQ)
1592 cpi->oxcf.fixed_q = MAXQ;
1594 /* local file playback mode == really big buffer */
1595 if (cpi->oxcf.end_usage == USAGE_LOCAL_FILE_PLAYBACK)
1597 cpi->oxcf.starting_buffer_level = 60000;
1598 cpi->oxcf.optimal_buffer_level = 60000;
1599 cpi->oxcf.maximum_buffer_size = 240000;
1600 cpi->oxcf.starting_buffer_level_in_ms = 60000;
1601 cpi->oxcf.optimal_buffer_level_in_ms = 60000;
1602 cpi->oxcf.maximum_buffer_size_in_ms = 240000;
1605 /* Convert target bandwidth from Kbit/s to Bit/s */
1606 cpi->oxcf.target_bandwidth *= 1000;
1608 cpi->oxcf.starting_buffer_level =
1609 rescale((int)cpi->oxcf.starting_buffer_level,
1610 cpi->oxcf.target_bandwidth, 1000);
1612 /* Set or reset optimal and maximum buffer levels. */
1613 if (cpi->oxcf.optimal_buffer_level == 0)
1614 cpi->oxcf.optimal_buffer_level = cpi->oxcf.target_bandwidth / 8;
1616 cpi->oxcf.optimal_buffer_level =
1617 rescale((int)cpi->oxcf.optimal_buffer_level,
1618 cpi->oxcf.target_bandwidth, 1000);
1620 if (cpi->oxcf.maximum_buffer_size == 0)
1621 cpi->oxcf.maximum_buffer_size = cpi->oxcf.target_bandwidth / 8;
1623 cpi->oxcf.maximum_buffer_size =
1624 rescale((int)cpi->oxcf.maximum_buffer_size,
1625 cpi->oxcf.target_bandwidth, 1000);
1626 // Under a configuration change, where maximum_buffer_size may change,
1627 // keep buffer level clipped to the maximum allowed buffer size.
1628 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size) {
1629 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
1630 cpi->buffer_level = cpi->bits_off_target;
1633 /* Set up frame rate and related parameters rate control values. */
1634 vp8_new_framerate(cpi, cpi->framerate);
1636 /* Set absolute upper and lower quality limits */
1637 cpi->worst_quality = cpi->oxcf.worst_allowed_q;
1638 cpi->best_quality = cpi->oxcf.best_allowed_q;
1640 /* active values should only be modified if out of new range */
1641 if (cpi->active_worst_quality > cpi->oxcf.worst_allowed_q)
1643 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
1646 else if (cpi->active_worst_quality < cpi->oxcf.best_allowed_q)
1648 cpi->active_worst_quality = cpi->oxcf.best_allowed_q;
1650 if (cpi->active_best_quality < cpi->oxcf.best_allowed_q)
1652 cpi->active_best_quality = cpi->oxcf.best_allowed_q;
1655 else if (cpi->active_best_quality > cpi->oxcf.worst_allowed_q)
1657 cpi->active_best_quality = cpi->oxcf.worst_allowed_q;
1660 cpi->buffered_mode = cpi->oxcf.optimal_buffer_level > 0;
1662 cpi->cq_target_quality = cpi->oxcf.cq_level;
1664 /* Only allow dropped frames in buffered mode */
1665 cpi->drop_frames_allowed = cpi->oxcf.allow_df && cpi->buffered_mode;
1667 cpi->target_bandwidth = cpi->oxcf.target_bandwidth;
1669 // Check if the number of temporal layers has changed, and if so reset the
1670 // pattern counter and set/initialize the temporal layer context for the
1671 // new layer configuration.
1672 if (cpi->oxcf.number_of_layers != prev_number_of_layers)
1674 // If the number of temporal layers are changed we must start at the
1675 // base of the pattern cycle, so reset temporal_pattern_counter.
1676 cpi->temporal_pattern_counter = 0;
1677 reset_temporal_layer_change(cpi, oxcf, prev_number_of_layers);
1680 cm->Width = cpi->oxcf.Width;
1681 cm->Height = cpi->oxcf.Height;
1683 /* TODO(jkoleszar): if an internal spatial resampling is active,
1684 * and we downsize the input image, maybe we should clear the
1685 * internal scale immediately rather than waiting for it to
1689 /* VP8 sharpness level mapping 0-7 (vs 0-10 in general VPx dialogs) */
1690 if (cpi->oxcf.Sharpness > 7)
1691 cpi->oxcf.Sharpness = 7;
1693 cm->sharpness_level = cpi->oxcf.Sharpness;
1695 if (cm->horiz_scale != NORMAL || cm->vert_scale != NORMAL)
1697 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
1698 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
1700 Scale2Ratio(cm->horiz_scale, &hr, &hs);
1701 Scale2Ratio(cm->vert_scale, &vr, &vs);
1703 /* always go to the next whole number */
1704 cm->Width = (hs - 1 + cpi->oxcf.Width * hr) / hs;
1705 cm->Height = (vs - 1 + cpi->oxcf.Height * vr) / vs;
1708 if (last_w != cpi->oxcf.Width || last_h != cpi->oxcf.Height)
1709 cpi->force_next_frame_intra = 1;
1711 if (((cm->Width + 15) & 0xfffffff0) !=
1712 cm->yv12_fb[cm->lst_fb_idx].y_width ||
1713 ((cm->Height + 15) & 0xfffffff0) !=
1714 cm->yv12_fb[cm->lst_fb_idx].y_height ||
1715 cm->yv12_fb[cm->lst_fb_idx].y_width == 0)
1717 dealloc_raw_frame_buffers(cpi);
1718 alloc_raw_frame_buffers(cpi);
1719 vp8_alloc_compressor_data(cpi);
1722 if (cpi->oxcf.fixed_q >= 0)
1724 cpi->last_q[0] = cpi->oxcf.fixed_q;
1725 cpi->last_q[1] = cpi->oxcf.fixed_q;
1728 cpi->Speed = cpi->oxcf.cpu_used;
1730 /* force to allowlag to 0 if lag_in_frames is 0; */
1731 if (cpi->oxcf.lag_in_frames == 0)
1733 cpi->oxcf.allow_lag = 0;
1735 /* Limit on lag buffers as these are not currently dynamically allocated */
1736 else if (cpi->oxcf.lag_in_frames > MAX_LAG_BUFFERS)
1737 cpi->oxcf.lag_in_frames = MAX_LAG_BUFFERS;
1740 cpi->alt_ref_source = NULL;
1741 cpi->is_src_frame_alt_ref = 0;
1743 #if CONFIG_TEMPORAL_DENOISING
1744 if (cpi->oxcf.noise_sensitivity)
1746 if (!cpi->denoiser.yv12_mc_running_avg.buffer_alloc)
1748 int width = (cpi->oxcf.Width + 15) & ~15;
1749 int height = (cpi->oxcf.Height + 15) & ~15;
1750 vp8_denoiser_allocate(&cpi->denoiser, width, height);
1756 /* Experimental RD Code */
1757 cpi->frame_distortion = 0;
1758 cpi->last_frame_distortion = 0;
1763 #define M_LOG2_E 0.693147180559945309417
1764 #define log2f(x) (log (x) / (float) M_LOG2_E)
1765 static void cal_mvsadcosts(int *mvsadcost[2])
1769 mvsadcost [0] [0] = 300;
1770 mvsadcost [1] [0] = 300;
1774 double z = 256 * (2 * (log2f(8 * i) + .6));
1775 mvsadcost [0][i] = (int) z;
1776 mvsadcost [1][i] = (int) z;
1777 mvsadcost [0][-i] = (int) z;
1778 mvsadcost [1][-i] = (int) z;
1780 while (++i <= mvfp_max);
1783 struct VP8_COMP* vp8_create_compressor(VP8_CONFIG *oxcf)
1790 cpi = vpx_memalign(32, sizeof(VP8_COMP));
1791 /* Check that the CPI instance is valid */
1797 vpx_memset(cpi, 0, sizeof(VP8_COMP));
1799 if (setjmp(cm->error.jmp))
1801 cpi->common.error.setjmp = 0;
1802 vp8_remove_compressor(&cpi);
1806 cpi->common.error.setjmp = 1;
1808 CHECK_MEM_ERROR(cpi->mb.ss, vpx_calloc(sizeof(search_site), (MAX_MVSEARCH_STEPS * 8) + 1));
1810 vp8_create_common(&cpi->common);
1812 init_config(cpi, oxcf);
1814 memcpy(cpi->base_skip_false_prob, vp8cx_base_skip_false_prob, sizeof(vp8cx_base_skip_false_prob));
1815 cpi->common.current_video_frame = 0;
1816 cpi->temporal_pattern_counter = 0;
1817 cpi->kf_overspend_bits = 0;
1818 cpi->kf_bitrate_adjustment = 0;
1819 cpi->frames_till_gf_update_due = 0;
1820 cpi->gf_overspend_bits = 0;
1821 cpi->non_gf_bitrate_adjustment = 0;
1822 cpi->prob_last_coded = 128;
1823 cpi->prob_gf_coded = 128;
1824 cpi->prob_intra_coded = 63;
1826 /* Prime the recent reference frame usage counters.
1827 * Hereafter they will be maintained as a sort of moving average
1829 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
1830 cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
1831 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
1832 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
1834 /* Set reference frame sign bias for ALTREF frame to 1 (for now) */
1835 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
1837 cpi->twopass.gf_decay_rate = 0;
1838 cpi->baseline_gf_interval = DEFAULT_GF_INTERVAL;
1840 cpi->gold_is_last = 0 ;
1841 cpi->alt_is_last = 0 ;
1842 cpi->gold_is_alt = 0 ;
1844 cpi->active_map_enabled = 0;
1847 /* Experimental code for lagged and one pass */
1848 /* Initialise one_pass GF frames stats */
1849 /* Update stats used for GF selection */
1852 cpi->one_pass_frame_index = 0;
1854 for (i = 0; i < MAX_LAG_BUFFERS; i++)
1856 cpi->one_pass_frame_stats[i].frames_so_far = 0;
1857 cpi->one_pass_frame_stats[i].frame_intra_error = 0.0;
1858 cpi->one_pass_frame_stats[i].frame_coded_error = 0.0;
1859 cpi->one_pass_frame_stats[i].frame_pcnt_inter = 0.0;
1860 cpi->one_pass_frame_stats[i].frame_pcnt_motion = 0.0;
1861 cpi->one_pass_frame_stats[i].frame_mvr = 0.0;
1862 cpi->one_pass_frame_stats[i].frame_mvr_abs = 0.0;
1863 cpi->one_pass_frame_stats[i].frame_mvc = 0.0;
1864 cpi->one_pass_frame_stats[i].frame_mvc_abs = 0.0;
1869 /* Should we use the cyclic refresh method.
1870 * Currently this is tied to error resilliant mode
1872 cpi->cyclic_refresh_mode_enabled = cpi->oxcf.error_resilient_mode;
1873 cpi->cyclic_refresh_mode_max_mbs_perframe = (cpi->common.mb_rows * cpi->common.mb_cols) / 5;
1874 cpi->cyclic_refresh_mode_index = 0;
1875 cpi->cyclic_refresh_q = 32;
1877 if (cpi->cyclic_refresh_mode_enabled)
1879 CHECK_MEM_ERROR(cpi->cyclic_refresh_map, vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1));
1882 cpi->cyclic_refresh_map = (signed char *) NULL;
1884 #ifdef VP8_ENTROPY_STATS
1885 init_context_counters();
1888 /*Initialize the feed-forward activity masking.*/
1889 cpi->activity_avg = 90<<12;
1891 /* Give a sensible default for the first frame. */
1892 cpi->frames_since_key = 8;
1893 cpi->key_frame_frequency = cpi->oxcf.key_freq;
1894 cpi->this_key_frame_forced = 0;
1895 cpi->next_key_frame_forced = 0;
1897 cpi->source_alt_ref_pending = 0;
1898 cpi->source_alt_ref_active = 0;
1899 cpi->common.refresh_alt_ref_frame = 0;
1901 cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
1902 #if CONFIG_INTERNAL_STATS
1903 cpi->b_calculate_ssimg = 0;
1908 if (cpi->b_calculate_psnr)
1910 cpi->total_sq_error = 0.0;
1911 cpi->total_sq_error2 = 0.0;
1916 cpi->totalp_y = 0.0;
1917 cpi->totalp_u = 0.0;
1918 cpi->totalp_v = 0.0;
1920 cpi->tot_recode_hits = 0;
1921 cpi->summed_quality = 0;
1922 cpi->summed_weights = 0;
1925 if (cpi->b_calculate_ssimg)
1927 cpi->total_ssimg_y = 0;
1928 cpi->total_ssimg_u = 0;
1929 cpi->total_ssimg_v = 0;
1930 cpi->total_ssimg_all = 0;
1935 cpi->first_time_stamp_ever = 0x7FFFFFFF;
1937 cpi->frames_till_gf_update_due = 0;
1938 cpi->key_frame_count = 1;
1940 cpi->ni_av_qi = cpi->oxcf.worst_allowed_q;
1943 cpi->total_byte_count = 0;
1945 cpi->drop_frame = 0;
1947 cpi->rate_correction_factor = 1.0;
1948 cpi->key_frame_rate_correction_factor = 1.0;
1949 cpi->gf_rate_correction_factor = 1.0;
1950 cpi->twopass.est_max_qcorrection_factor = 1.0;
1952 for (i = 0; i < KEY_FRAME_CONTEXT; i++)
1954 cpi->prior_key_frame_distance[i] = (int)cpi->output_framerate;
1957 #ifdef OUTPUT_YUV_SRC
1958 yuv_file = fopen("bd.yuv", "ab");
1962 framepsnr = fopen("framepsnr.stt", "a");
1963 kf_list = fopen("kf_list.stt", "w");
1966 cpi->output_pkt_list = oxcf->output_pkt_list;
1968 #if !(CONFIG_REALTIME_ONLY)
1972 vp8_init_first_pass(cpi);
1974 else if (cpi->pass == 2)
1976 size_t packet_sz = sizeof(FIRSTPASS_STATS);
1977 int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
1979 cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
1980 cpi->twopass.stats_in = cpi->twopass.stats_in_start;
1981 cpi->twopass.stats_in_end = (void*)((char *)cpi->twopass.stats_in
1982 + (packets - 1) * packet_sz);
1983 vp8_init_second_pass(cpi);
1988 if (cpi->compressor_speed == 2)
1990 cpi->avg_encode_time = 0;
1991 cpi->avg_pick_mode_time = 0;
1994 vp8_set_speed_features(cpi);
1996 /* Set starting values of RD threshold multipliers (128 = *1) */
1997 for (i = 0; i < MAX_MODES; i++)
1999 cpi->mb.rd_thresh_mult[i] = 128;
2002 #ifdef VP8_ENTROPY_STATS
2003 init_mv_ref_counts();
2006 #if CONFIG_MULTITHREAD
2007 if(vp8cx_create_encoder_threads(cpi))
2009 vp8_remove_compressor(&cpi);
2014 cpi->fn_ptr[BLOCK_16X16].sdf = vp8_sad16x16;
2015 cpi->fn_ptr[BLOCK_16X16].vf = vp8_variance16x16;
2016 cpi->fn_ptr[BLOCK_16X16].svf = vp8_sub_pixel_variance16x16;
2017 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_h = vp8_variance_halfpixvar16x16_h;
2018 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_v = vp8_variance_halfpixvar16x16_v;
2019 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_hv = vp8_variance_halfpixvar16x16_hv;
2020 cpi->fn_ptr[BLOCK_16X16].sdx3f = vp8_sad16x16x3;
2021 cpi->fn_ptr[BLOCK_16X16].sdx8f = vp8_sad16x16x8;
2022 cpi->fn_ptr[BLOCK_16X16].sdx4df = vp8_sad16x16x4d;
2024 cpi->fn_ptr[BLOCK_16X8].sdf = vp8_sad16x8;
2025 cpi->fn_ptr[BLOCK_16X8].vf = vp8_variance16x8;
2026 cpi->fn_ptr[BLOCK_16X8].svf = vp8_sub_pixel_variance16x8;
2027 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_h = NULL;
2028 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_v = NULL;
2029 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_hv = NULL;
2030 cpi->fn_ptr[BLOCK_16X8].sdx3f = vp8_sad16x8x3;
2031 cpi->fn_ptr[BLOCK_16X8].sdx8f = vp8_sad16x8x8;
2032 cpi->fn_ptr[BLOCK_16X8].sdx4df = vp8_sad16x8x4d;
2034 cpi->fn_ptr[BLOCK_8X16].sdf = vp8_sad8x16;
2035 cpi->fn_ptr[BLOCK_8X16].vf = vp8_variance8x16;
2036 cpi->fn_ptr[BLOCK_8X16].svf = vp8_sub_pixel_variance8x16;
2037 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_h = NULL;
2038 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_v = NULL;
2039 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_hv = NULL;
2040 cpi->fn_ptr[BLOCK_8X16].sdx3f = vp8_sad8x16x3;
2041 cpi->fn_ptr[BLOCK_8X16].sdx8f = vp8_sad8x16x8;
2042 cpi->fn_ptr[BLOCK_8X16].sdx4df = vp8_sad8x16x4d;
2044 cpi->fn_ptr[BLOCK_8X8].sdf = vp8_sad8x8;
2045 cpi->fn_ptr[BLOCK_8X8].vf = vp8_variance8x8;
2046 cpi->fn_ptr[BLOCK_8X8].svf = vp8_sub_pixel_variance8x8;
2047 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_h = NULL;
2048 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_v = NULL;
2049 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_hv = NULL;
2050 cpi->fn_ptr[BLOCK_8X8].sdx3f = vp8_sad8x8x3;
2051 cpi->fn_ptr[BLOCK_8X8].sdx8f = vp8_sad8x8x8;
2052 cpi->fn_ptr[BLOCK_8X8].sdx4df = vp8_sad8x8x4d;
2054 cpi->fn_ptr[BLOCK_4X4].sdf = vp8_sad4x4;
2055 cpi->fn_ptr[BLOCK_4X4].vf = vp8_variance4x4;
2056 cpi->fn_ptr[BLOCK_4X4].svf = vp8_sub_pixel_variance4x4;
2057 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_h = NULL;
2058 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_v = NULL;
2059 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_hv = NULL;
2060 cpi->fn_ptr[BLOCK_4X4].sdx3f = vp8_sad4x4x3;
2061 cpi->fn_ptr[BLOCK_4X4].sdx8f = vp8_sad4x4x8;
2062 cpi->fn_ptr[BLOCK_4X4].sdx4df = vp8_sad4x4x4d;
2064 #if ARCH_X86 || ARCH_X86_64
2065 cpi->fn_ptr[BLOCK_16X16].copymem = vp8_copy32xn;
2066 cpi->fn_ptr[BLOCK_16X8].copymem = vp8_copy32xn;
2067 cpi->fn_ptr[BLOCK_8X16].copymem = vp8_copy32xn;
2068 cpi->fn_ptr[BLOCK_8X8].copymem = vp8_copy32xn;
2069 cpi->fn_ptr[BLOCK_4X4].copymem = vp8_copy32xn;
2072 cpi->full_search_sad = vp8_full_search_sad;
2073 cpi->diamond_search_sad = vp8_diamond_search_sad;
2074 cpi->refining_search_sad = vp8_refining_search_sad;
2076 /* make sure frame 1 is okay */
2077 cpi->mb.error_bins[0] = cpi->common.MBs;
2079 /* vp8cx_init_quantizer() is first called here. Add check in
2080 * vp8cx_frame_init_quantizer() so that vp8cx_init_quantizer is only
2081 * called later when needed. This will avoid unnecessary calls of
2082 * vp8cx_init_quantizer() for every frame.
2084 vp8cx_init_quantizer(cpi);
2086 vp8_loop_filter_init(cm);
2088 cpi->common.error.setjmp = 0;
2090 #if CONFIG_MULTI_RES_ENCODING
2092 /* Calculate # of MBs in a row in lower-resolution level image. */
2093 if (cpi->oxcf.mr_encoder_id > 0)
2094 vp8_cal_low_res_mb_cols(cpi);
2098 /* setup RD costs to MACROBLOCK struct */
2100 cpi->mb.mvcost[0] = &cpi->rd_costs.mvcosts[0][mv_max+1];
2101 cpi->mb.mvcost[1] = &cpi->rd_costs.mvcosts[1][mv_max+1];
2102 cpi->mb.mvsadcost[0] = &cpi->rd_costs.mvsadcosts[0][mvfp_max+1];
2103 cpi->mb.mvsadcost[1] = &cpi->rd_costs.mvsadcosts[1][mvfp_max+1];
2105 cal_mvsadcosts(cpi->mb.mvsadcost);
2107 cpi->mb.mbmode_cost = cpi->rd_costs.mbmode_cost;
2108 cpi->mb.intra_uv_mode_cost = cpi->rd_costs.intra_uv_mode_cost;
2109 cpi->mb.bmode_costs = cpi->rd_costs.bmode_costs;
2110 cpi->mb.inter_bmode_costs = cpi->rd_costs.inter_bmode_costs;
2111 cpi->mb.token_costs = cpi->rd_costs.token_costs;
2113 /* setup block ptrs & offsets */
2114 vp8_setup_block_ptrs(&cpi->mb);
2115 vp8_setup_block_dptrs(&cpi->mb.e_mbd);
2121 void vp8_remove_compressor(VP8_COMP **ptr)
2123 VP8_COMP *cpi = *ptr;
2128 if (cpi && (cpi->common.current_video_frame > 0))
2130 #if !(CONFIG_REALTIME_ONLY)
2134 vp8_end_second_pass(cpi);
2139 #ifdef VP8_ENTROPY_STATS
2140 print_context_counters();
2141 print_tree_update_probs();
2142 print_mode_context();
2145 #if CONFIG_INTERNAL_STATS
2149 FILE *f = fopen("opsnr.stt", "a");
2150 double time_encoded = (cpi->last_end_time_stamp_seen
2151 - cpi->first_time_stamp_ever) / 10000000.000;
2152 double total_encode_time = (cpi->time_receive_data +
2153 cpi->time_compress_data) / 1000.000;
2154 double dr = (double)cpi->bytes * 8.0 / 1000.0 / time_encoded;
2156 if (cpi->b_calculate_psnr)
2158 YV12_BUFFER_CONFIG *lst_yv12 =
2159 &cpi->common.yv12_fb[cpi->common.lst_fb_idx];
2161 if (cpi->oxcf.number_of_layers > 1)
2165 fprintf(f, "Layer\tBitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
2166 "GLPsnrP\tVPXSSIM\t\n");
2167 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
2169 double dr = (double)cpi->bytes_in_layer[i] *
2170 8.0 / 1000.0 / time_encoded;
2171 double samples = 3.0 / 2 * cpi->frames_in_layer[i] *
2172 lst_yv12->y_width * lst_yv12->y_height;
2174 vpx_sse_to_psnr(samples, 255.0,
2175 cpi->total_error2[i]);
2176 double total_psnr2 =
2177 vpx_sse_to_psnr(samples, 255.0,
2178 cpi->total_error2_p[i]);
2179 double total_ssim = 100 * pow(cpi->sum_ssim[i] /
2180 cpi->sum_weights[i], 8.0);
2182 fprintf(f, "%5d\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2185 cpi->sum_psnr[i] / cpi->frames_in_layer[i],
2187 cpi->sum_psnr_p[i] / cpi->frames_in_layer[i],
2188 total_psnr2, total_ssim);
2193 double samples = 3.0 / 2 * cpi->count *
2194 lst_yv12->y_width * lst_yv12->y_height;
2195 double total_psnr = vpx_sse_to_psnr(samples, 255.0,
2196 cpi->total_sq_error);
2197 double total_psnr2 = vpx_sse_to_psnr(samples, 255.0,
2198 cpi->total_sq_error2);
2199 double total_ssim = 100 * pow(cpi->summed_quality /
2200 cpi->summed_weights, 8.0);
2202 fprintf(f, "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
2203 "GLPsnrP\tVPXSSIM\t Time(us)\n");
2204 fprintf(f, "%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2206 dr, cpi->total / cpi->count, total_psnr,
2207 cpi->totalp / cpi->count, total_psnr2,
2208 total_ssim, total_encode_time);
2212 if (cpi->b_calculate_ssimg)
2214 if (cpi->oxcf.number_of_layers > 1)
2218 fprintf(f, "Layer\tBitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
2220 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
2222 double dr = (double)cpi->bytes_in_layer[i] *
2223 8.0 / 1000.0 / time_encoded;
2224 fprintf(f, "%5d\t%7.3f\t%6.4f\t"
2225 "%6.4f\t%6.4f\t%6.4f\t%8.0f\n",
2227 cpi->total_ssimg_y_in_layer[i] /
2228 cpi->frames_in_layer[i],
2229 cpi->total_ssimg_u_in_layer[i] /
2230 cpi->frames_in_layer[i],
2231 cpi->total_ssimg_v_in_layer[i] /
2232 cpi->frames_in_layer[i],
2233 cpi->total_ssimg_all_in_layer[i] /
2234 cpi->frames_in_layer[i],
2240 fprintf(f, "BitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
2242 fprintf(f, "%7.3f\t%6.4f\t%6.4f\t%6.4f\t%6.4f\t%8.0f\n", dr,
2243 cpi->total_ssimg_y / cpi->count,
2244 cpi->total_ssimg_u / cpi->count,
2245 cpi->total_ssimg_v / cpi->count,
2246 cpi->total_ssimg_all / cpi->count, total_encode_time);
2252 f = fopen("qskip.stt", "a");
2253 fprintf(f, "minq:%d -maxq:%d skiptrue:skipfalse = %d:%d\n", cpi->oxcf.best_allowed_q, cpi->oxcf.worst_allowed_q, skiptruecount, skipfalsecount);
2264 if (cpi->compressor_speed == 2)
2267 FILE *f = fopen("cxspeed.stt", "a");
2268 cnt_pm /= cpi->common.MBs;
2270 for (i = 0; i < 16; i++)
2271 fprintf(f, "%5d", frames_at_speed[i]);
2282 extern int count_mb_seg[4];
2283 FILE *f = fopen("modes.stt", "a");
2284 double dr = (double)cpi->framerate * (double)bytes * (double)8 / (double)count / (double)1000 ;
2285 fprintf(f, "intra_mode in Intra Frames:\n");
2286 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d\n", y_modes[0], y_modes[1], y_modes[2], y_modes[3], y_modes[4]);
2287 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", uv_modes[0], uv_modes[1], uv_modes[2], uv_modes[3]);
2292 for (i = 0; i < 10; i++)
2293 fprintf(f, "%8d, ", b_modes[i]);
2299 fprintf(f, "Modes in Inter Frames:\n");
2300 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d\n",
2301 inter_y_modes[0], inter_y_modes[1], inter_y_modes[2], inter_y_modes[3], inter_y_modes[4],
2302 inter_y_modes[5], inter_y_modes[6], inter_y_modes[7], inter_y_modes[8], inter_y_modes[9]);
2303 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", inter_uv_modes[0], inter_uv_modes[1], inter_uv_modes[2], inter_uv_modes[3]);
2308 for (i = 0; i < 15; i++)
2309 fprintf(f, "%8d, ", inter_b_modes[i]);
2314 fprintf(f, "P:%8d, %8d, %8d, %8d\n", count_mb_seg[0], count_mb_seg[1], count_mb_seg[2], count_mb_seg[3]);
2315 fprintf(f, "PB:%8d, %8d, %8d, %8d\n", inter_b_modes[LEFT4X4], inter_b_modes[ABOVE4X4], inter_b_modes[ZERO4X4], inter_b_modes[NEW4X4]);
2323 #ifdef VP8_ENTROPY_STATS
2326 FILE *fmode = fopen("modecontext.c", "w");
2328 fprintf(fmode, "\n#include \"entropymode.h\"\n\n");
2329 fprintf(fmode, "const unsigned int vp8_kf_default_bmode_counts ");
2330 fprintf(fmode, "[VP8_BINTRAMODES] [VP8_BINTRAMODES] [VP8_BINTRAMODES] =\n{\n");
2332 for (i = 0; i < 10; i++)
2335 fprintf(fmode, " { /* Above Mode : %d */\n", i);
2337 for (j = 0; j < 10; j++)
2340 fprintf(fmode, " {");
2342 for (k = 0; k < 10; k++)
2344 if (!intra_mode_stats[i][j][k])
2345 fprintf(fmode, " %5d, ", 1);
2347 fprintf(fmode, " %5d, ", intra_mode_stats[i][j][k]);
2350 fprintf(fmode, "}, /* left_mode %d */\n", j);
2354 fprintf(fmode, " },\n");
2358 fprintf(fmode, "};\n");
2364 #if defined(SECTIONBITS_OUTPUT)
2369 FILE *f = fopen("tokenbits.stt", "a");
2371 for (i = 0; i < 28; i++)
2372 fprintf(f, "%8d", (int)(Sectionbits[i] / 256));
2382 printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000);
2383 printf("\n_frames recive_data encod_mb_row compress_frame Total\n");
2384 printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame, cpi->time_receive_data / 1000, cpi->time_encode_mb_row / 1000, cpi->time_compress_data / 1000, (cpi->time_receive_data + cpi->time_compress_data) / 1000);
2390 #if CONFIG_MULTITHREAD
2391 vp8cx_remove_encoder_threads(cpi);
2394 #if CONFIG_TEMPORAL_DENOISING
2395 vp8_denoiser_free(&cpi->denoiser);
2397 dealloc_compressor_data(cpi);
2398 vpx_free(cpi->mb.ss);
2400 vpx_free(cpi->cyclic_refresh_map);
2402 vp8_remove_common(&cpi->common);
2406 #ifdef OUTPUT_YUV_SRC
2426 static uint64_t calc_plane_error(unsigned char *orig, int orig_stride,
2427 unsigned char *recon, int recon_stride,
2428 unsigned int cols, unsigned int rows)
2430 unsigned int row, col;
2431 uint64_t total_sse = 0;
2434 for (row = 0; row + 16 <= rows; row += 16)
2436 for (col = 0; col + 16 <= cols; col += 16)
2440 vp8_mse16x16(orig + col, orig_stride,
2441 recon + col, recon_stride,
2446 /* Handle odd-sized width */
2449 unsigned int border_row, border_col;
2450 unsigned char *border_orig = orig;
2451 unsigned char *border_recon = recon;
2453 for (border_row = 0; border_row < 16; border_row++)
2455 for (border_col = col; border_col < cols; border_col++)
2457 diff = border_orig[border_col] - border_recon[border_col];
2458 total_sse += diff * diff;
2461 border_orig += orig_stride;
2462 border_recon += recon_stride;
2466 orig += orig_stride * 16;
2467 recon += recon_stride * 16;
2470 /* Handle odd-sized height */
2471 for (; row < rows; row++)
2473 for (col = 0; col < cols; col++)
2475 diff = orig[col] - recon[col];
2476 total_sse += diff * diff;
2479 orig += orig_stride;
2480 recon += recon_stride;
2483 vp8_clear_system_state();
2488 static void generate_psnr_packet(VP8_COMP *cpi)
2490 YV12_BUFFER_CONFIG *orig = cpi->Source;
2491 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
2492 struct vpx_codec_cx_pkt pkt;
2495 unsigned int width = cpi->common.Width;
2496 unsigned int height = cpi->common.Height;
2498 pkt.kind = VPX_CODEC_PSNR_PKT;
2499 sse = calc_plane_error(orig->y_buffer, orig->y_stride,
2500 recon->y_buffer, recon->y_stride,
2502 pkt.data.psnr.sse[0] = sse;
2503 pkt.data.psnr.sse[1] = sse;
2504 pkt.data.psnr.samples[0] = width * height;
2505 pkt.data.psnr.samples[1] = width * height;
2507 width = (width + 1) / 2;
2508 height = (height + 1) / 2;
2510 sse = calc_plane_error(orig->u_buffer, orig->uv_stride,
2511 recon->u_buffer, recon->uv_stride,
2513 pkt.data.psnr.sse[0] += sse;
2514 pkt.data.psnr.sse[2] = sse;
2515 pkt.data.psnr.samples[0] += width * height;
2516 pkt.data.psnr.samples[2] = width * height;
2518 sse = calc_plane_error(orig->v_buffer, orig->uv_stride,
2519 recon->v_buffer, recon->uv_stride,
2521 pkt.data.psnr.sse[0] += sse;
2522 pkt.data.psnr.sse[3] = sse;
2523 pkt.data.psnr.samples[0] += width * height;
2524 pkt.data.psnr.samples[3] = width * height;
2526 for (i = 0; i < 4; i++)
2527 pkt.data.psnr.psnr[i] = vpx_sse_to_psnr(pkt.data.psnr.samples[i], 255.0,
2528 (double)(pkt.data.psnr.sse[i]));
2530 vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
2534 int vp8_use_as_reference(VP8_COMP *cpi, int ref_frame_flags)
2536 if (ref_frame_flags > 7)
2539 cpi->ref_frame_flags = ref_frame_flags;
2542 int vp8_update_reference(VP8_COMP *cpi, int ref_frame_flags)
2544 if (ref_frame_flags > 7)
2547 cpi->common.refresh_golden_frame = 0;
2548 cpi->common.refresh_alt_ref_frame = 0;
2549 cpi->common.refresh_last_frame = 0;
2551 if (ref_frame_flags & VP8_LAST_FRAME)
2552 cpi->common.refresh_last_frame = 1;
2554 if (ref_frame_flags & VP8_GOLD_FRAME)
2555 cpi->common.refresh_golden_frame = 1;
2557 if (ref_frame_flags & VP8_ALTR_FRAME)
2558 cpi->common.refresh_alt_ref_frame = 1;
2563 int vp8_get_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
2565 VP8_COMMON *cm = &cpi->common;
2568 if (ref_frame_flag == VP8_LAST_FRAME)
2569 ref_fb_idx = cm->lst_fb_idx;
2570 else if (ref_frame_flag == VP8_GOLD_FRAME)
2571 ref_fb_idx = cm->gld_fb_idx;
2572 else if (ref_frame_flag == VP8_ALTR_FRAME)
2573 ref_fb_idx = cm->alt_fb_idx;
2577 vp8_yv12_copy_frame(&cm->yv12_fb[ref_fb_idx], sd);
2581 int vp8_set_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
2583 VP8_COMMON *cm = &cpi->common;
2587 if (ref_frame_flag == VP8_LAST_FRAME)
2588 ref_fb_idx = cm->lst_fb_idx;
2589 else if (ref_frame_flag == VP8_GOLD_FRAME)
2590 ref_fb_idx = cm->gld_fb_idx;
2591 else if (ref_frame_flag == VP8_ALTR_FRAME)
2592 ref_fb_idx = cm->alt_fb_idx;
2596 vp8_yv12_copy_frame(sd, &cm->yv12_fb[ref_fb_idx]);
2600 int vp8_update_entropy(VP8_COMP *cpi, int update)
2602 VP8_COMMON *cm = &cpi->common;
2603 cm->refresh_entropy_probs = update;
2610 void vp8_write_yuv_frame(const char *name, YV12_BUFFER_CONFIG *s)
2612 FILE *yuv_file = fopen(name, "ab");
2613 unsigned char *src = s->y_buffer;
2614 int h = s->y_height;
2618 fwrite(src, s->y_width, 1, yuv_file);
2628 fwrite(src, s->uv_width, 1, yuv_file);
2629 src += s->uv_stride;
2638 fwrite(src, s->uv_width, 1, yuv_file);
2639 src += s->uv_stride;
2648 static void scale_and_extend_source(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi)
2650 VP8_COMMON *cm = &cpi->common;
2652 /* are we resizing the image */
2653 if (cm->horiz_scale != 0 || cm->vert_scale != 0)
2655 #if CONFIG_SPATIAL_RESAMPLING
2656 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
2657 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
2660 if (cm->vert_scale == 3)
2665 Scale2Ratio(cm->horiz_scale, &hr, &hs);
2666 Scale2Ratio(cm->vert_scale, &vr, &vs);
2668 vpx_scale_frame(sd, &cpi->scaled_source, cm->temp_scale_frame.y_buffer,
2669 tmp_height, hs, hr, vs, vr, 0);
2671 vp8_yv12_extend_frame_borders(&cpi->scaled_source);
2672 cpi->Source = &cpi->scaled_source;
2680 static int resize_key_frame(VP8_COMP *cpi)
2682 #if CONFIG_SPATIAL_RESAMPLING
2683 VP8_COMMON *cm = &cpi->common;
2685 /* Do we need to apply resampling for one pass cbr.
2686 * In one pass this is more limited than in two pass cbr.
2687 * The test and any change is only made once per key frame sequence.
2689 if (cpi->oxcf.allow_spatial_resampling && (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER))
2691 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
2692 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
2693 int new_width, new_height;
2695 /* If we are below the resample DOWN watermark then scale down a
2698 if (cpi->buffer_level < (cpi->oxcf.resample_down_water_mark * cpi->oxcf.optimal_buffer_level / 100))
2700 cm->horiz_scale = (cm->horiz_scale < ONETWO) ? cm->horiz_scale + 1 : ONETWO;
2701 cm->vert_scale = (cm->vert_scale < ONETWO) ? cm->vert_scale + 1 : ONETWO;
2703 /* Should we now start scaling back up */
2704 else if (cpi->buffer_level > (cpi->oxcf.resample_up_water_mark * cpi->oxcf.optimal_buffer_level / 100))
2706 cm->horiz_scale = (cm->horiz_scale > NORMAL) ? cm->horiz_scale - 1 : NORMAL;
2707 cm->vert_scale = (cm->vert_scale > NORMAL) ? cm->vert_scale - 1 : NORMAL;
2710 /* Get the new height and width */
2711 Scale2Ratio(cm->horiz_scale, &hr, &hs);
2712 Scale2Ratio(cm->vert_scale, &vr, &vs);
2713 new_width = ((hs - 1) + (cpi->oxcf.Width * hr)) / hs;
2714 new_height = ((vs - 1) + (cpi->oxcf.Height * vr)) / vs;
2716 /* If the image size has changed we need to reallocate the buffers
2717 * and resample the source image
2719 if ((cm->Width != new_width) || (cm->Height != new_height))
2721 cm->Width = new_width;
2722 cm->Height = new_height;
2723 vp8_alloc_compressor_data(cpi);
2724 scale_and_extend_source(cpi->un_scaled_source, cpi);
2734 static void update_alt_ref_frame_stats(VP8_COMP *cpi)
2736 VP8_COMMON *cm = &cpi->common;
2738 /* Select an interval before next GF or altref */
2739 if (!cpi->auto_gold)
2740 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
2742 if ((cpi->pass != 2) && cpi->frames_till_gf_update_due)
2744 cpi->current_gf_interval = cpi->frames_till_gf_update_due;
2746 /* Set the bits per frame that we should try and recover in
2747 * subsequent inter frames to account for the extra GF spend...
2748 * note that his does not apply for GF updates that occur
2749 * coincident with a key frame as the extra cost of key frames is
2750 * dealt with elsewhere.
2752 cpi->gf_overspend_bits += cpi->projected_frame_size;
2753 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
2756 /* Update data structure that monitors level of reference to last GF */
2757 vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
2758 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
2760 /* this frame refreshes means next frames don't unless specified by user */
2761 cpi->frames_since_golden = 0;
2763 /* Clear the alternate reference update pending flag. */
2764 cpi->source_alt_ref_pending = 0;
2766 /* Set the alternate reference frame active flag */
2767 cpi->source_alt_ref_active = 1;
2771 static void update_golden_frame_stats(VP8_COMP *cpi)
2773 VP8_COMMON *cm = &cpi->common;
2775 /* Update the Golden frame usage counts. */
2776 if (cm->refresh_golden_frame)
2778 /* Select an interval before next GF */
2779 if (!cpi->auto_gold)
2780 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
2782 if ((cpi->pass != 2) && (cpi->frames_till_gf_update_due > 0))
2784 cpi->current_gf_interval = cpi->frames_till_gf_update_due;
2786 /* Set the bits per frame that we should try and recover in
2787 * subsequent inter frames to account for the extra GF spend...
2788 * note that his does not apply for GF updates that occur
2789 * coincident with a key frame as the extra cost of key frames
2790 * is dealt with elsewhere.
2792 if ((cm->frame_type != KEY_FRAME) && !cpi->source_alt_ref_active)
2794 /* Calcluate GF bits to be recovered
2795 * Projected size - av frame bits available for inter
2796 * frames for clip as a whole
2798 cpi->gf_overspend_bits += (cpi->projected_frame_size - cpi->inter_frame_target);
2801 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
2805 /* Update data structure that monitors level of reference to last GF */
2806 vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
2807 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
2809 /* this frame refreshes means next frames don't unless specified by
2812 cm->refresh_golden_frame = 0;
2813 cpi->frames_since_golden = 0;
2815 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
2816 cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
2817 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
2818 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
2820 /* ******** Fixed Q test code only ************ */
2821 /* If we are going to use the ALT reference for the next group of
2822 * frames set a flag to say so.
2824 if (cpi->oxcf.fixed_q >= 0 &&
2825 cpi->oxcf.play_alternate && !cpi->common.refresh_alt_ref_frame)
2827 cpi->source_alt_ref_pending = 1;
2828 cpi->frames_till_gf_update_due = cpi->baseline_gf_interval;
2831 if (!cpi->source_alt_ref_pending)
2832 cpi->source_alt_ref_active = 0;
2834 /* Decrement count down till next gf */
2835 if (cpi->frames_till_gf_update_due > 0)
2836 cpi->frames_till_gf_update_due--;
2839 else if (!cpi->common.refresh_alt_ref_frame)
2841 /* Decrement count down till next gf */
2842 if (cpi->frames_till_gf_update_due > 0)
2843 cpi->frames_till_gf_update_due--;
2845 if (cpi->frames_till_alt_ref_frame)
2846 cpi->frames_till_alt_ref_frame --;
2848 cpi->frames_since_golden ++;
2850 if (cpi->frames_since_golden > 1)
2852 cpi->recent_ref_frame_usage[INTRA_FRAME] +=
2853 cpi->mb.count_mb_ref_frame_usage[INTRA_FRAME];
2854 cpi->recent_ref_frame_usage[LAST_FRAME] +=
2855 cpi->mb.count_mb_ref_frame_usage[LAST_FRAME];
2856 cpi->recent_ref_frame_usage[GOLDEN_FRAME] +=
2857 cpi->mb.count_mb_ref_frame_usage[GOLDEN_FRAME];
2858 cpi->recent_ref_frame_usage[ALTREF_FRAME] +=
2859 cpi->mb.count_mb_ref_frame_usage[ALTREF_FRAME];
2864 /* This function updates the reference frame probability estimates that
2865 * will be used during mode selection
2867 static void update_rd_ref_frame_probs(VP8_COMP *cpi)
2869 VP8_COMMON *cm = &cpi->common;
2871 const int *const rfct = cpi->mb.count_mb_ref_frame_usage;
2872 const int rf_intra = rfct[INTRA_FRAME];
2873 const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
2875 if (cm->frame_type == KEY_FRAME)
2877 cpi->prob_intra_coded = 255;
2878 cpi->prob_last_coded = 128;
2879 cpi->prob_gf_coded = 128;
2881 else if (!(rf_intra + rf_inter))
2883 cpi->prob_intra_coded = 63;
2884 cpi->prob_last_coded = 128;
2885 cpi->prob_gf_coded = 128;
2888 /* update reference frame costs since we can do better than what we got
2891 if (cpi->oxcf.number_of_layers == 1)
2893 if (cpi->common.refresh_alt_ref_frame)
2895 cpi->prob_intra_coded += 40;
2896 if (cpi->prob_intra_coded > 255)
2897 cpi->prob_intra_coded = 255;
2898 cpi->prob_last_coded = 200;
2899 cpi->prob_gf_coded = 1;
2901 else if (cpi->frames_since_golden == 0)
2903 cpi->prob_last_coded = 214;
2905 else if (cpi->frames_since_golden == 1)
2907 cpi->prob_last_coded = 192;
2908 cpi->prob_gf_coded = 220;
2910 else if (cpi->source_alt_ref_active)
2912 cpi->prob_gf_coded -= 20;
2914 if (cpi->prob_gf_coded < 10)
2915 cpi->prob_gf_coded = 10;
2917 if (!cpi->source_alt_ref_active)
2918 cpi->prob_gf_coded = 255;
2923 /* 1 = key, 0 = inter */
2924 static int decide_key_frame(VP8_COMP *cpi)
2926 VP8_COMMON *cm = &cpi->common;
2928 int code_key_frame = 0;
2932 if (cpi->Speed > 11)
2935 /* Clear down mmx registers */
2936 vp8_clear_system_state();
2938 if ((cpi->compressor_speed == 2) && (cpi->Speed >= 5) && (cpi->sf.RD == 0))
2940 double change = 1.0 * abs((int)(cpi->mb.intra_error -
2941 cpi->last_intra_error)) / (1 + cpi->last_intra_error);
2942 double change2 = 1.0 * abs((int)(cpi->mb.prediction_error -
2943 cpi->last_prediction_error)) / (1 + cpi->last_prediction_error);
2944 double minerror = cm->MBs * 256;
2946 cpi->last_intra_error = cpi->mb.intra_error;
2947 cpi->last_prediction_error = cpi->mb.prediction_error;
2949 if (10 * cpi->mb.intra_error / (1 + cpi->mb.prediction_error) < 15
2950 && cpi->mb.prediction_error > minerror
2951 && (change > .25 || change2 > .25))
2953 /*(change > 1.4 || change < .75)&& cpi->this_frame_percent_intra > cpi->last_frame_percent_intra + 3*/
2961 /* If the following are true we might as well code a key frame */
2962 if (((cpi->this_frame_percent_intra == 100) &&
2963 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 2))) ||
2964 ((cpi->this_frame_percent_intra > 95) &&
2965 (cpi->this_frame_percent_intra >= (cpi->last_frame_percent_intra + 5))))
2969 /* in addition if the following are true and this is not a golden frame
2970 * then code a key frame Note that on golden frames there often seems
2971 * to be a pop in intra useage anyway hence this restriction is
2972 * designed to prevent spurious key frames. The Intra pop needs to be
2975 else if (((cpi->this_frame_percent_intra > 60) &&
2976 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 2))) ||
2977 ((cpi->this_frame_percent_intra > 75) &&
2978 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 3 / 2))) ||
2979 ((cpi->this_frame_percent_intra > 90) &&
2980 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 10))))
2982 if (!cm->refresh_golden_frame)
2986 return code_key_frame;
2990 #if !(CONFIG_REALTIME_ONLY)
2991 static void Pass1Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned int *frame_flags)
2996 vp8_set_quantizer(cpi, 26);
2998 vp8_first_pass(cpi);
3003 void write_cx_frame_to_file(YV12_BUFFER_CONFIG *frame, int this_frame)
3006 /* write the frame */
3011 sprintf(filename, "cx\\y%04d.raw", this_frame);
3012 yframe = fopen(filename, "wb");
3014 for (i = 0; i < frame->y_height; i++)
3015 fwrite(frame->y_buffer + i * frame->y_stride, frame->y_width, 1, yframe);
3018 sprintf(filename, "cx\\u%04d.raw", this_frame);
3019 yframe = fopen(filename, "wb");
3021 for (i = 0; i < frame->uv_height; i++)
3022 fwrite(frame->u_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
3025 sprintf(filename, "cx\\v%04d.raw", this_frame);
3026 yframe = fopen(filename, "wb");
3028 for (i = 0; i < frame->uv_height; i++)
3029 fwrite(frame->v_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
3034 /* return of 0 means drop frame */
3036 /* Function to test for conditions that indeicate we should loop
3037 * back and recode a frame.
3039 static int recode_loop_test( VP8_COMP *cpi,
3040 int high_limit, int low_limit,
3041 int q, int maxq, int minq )
3043 int force_recode = 0;
3044 VP8_COMMON *cm = &cpi->common;
3046 /* Is frame recode allowed at all
3047 * Yes if either recode mode 1 is selected or mode two is selcted
3048 * and the frame is a key frame. golden frame or alt_ref_frame
3050 if ( (cpi->sf.recode_loop == 1) ||
3051 ( (cpi->sf.recode_loop == 2) &&
3052 ( (cm->frame_type == KEY_FRAME) ||
3053 cm->refresh_golden_frame ||
3054 cm->refresh_alt_ref_frame ) ) )
3056 /* General over and under shoot tests */
3057 if ( ((cpi->projected_frame_size > high_limit) && (q < maxq)) ||
3058 ((cpi->projected_frame_size < low_limit) && (q > minq)) )
3062 /* Special Constrained quality tests */
3063 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
3065 /* Undershoot and below auto cq level */
3066 if ( (q > cpi->cq_target_quality) &&
3067 (cpi->projected_frame_size <
3068 ((cpi->this_frame_target * 7) >> 3)))
3072 /* Severe undershoot and between auto and user cq level */
3073 else if ( (q > cpi->oxcf.cq_level) &&
3074 (cpi->projected_frame_size < cpi->min_frame_bandwidth) &&
3075 (cpi->active_best_quality > cpi->oxcf.cq_level))
3078 cpi->active_best_quality = cpi->oxcf.cq_level;
3083 return force_recode;
3086 static void update_reference_frames(VP8_COMP *cpi)
3088 VP8_COMMON *cm = &cpi->common;
3089 YV12_BUFFER_CONFIG *yv12_fb = cm->yv12_fb;
3091 /* At this point the new frame has been encoded.
3092 * If any buffer copy / swapping is signaled it should be done here.
3095 if (cm->frame_type == KEY_FRAME)
3097 yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME | VP8_ALTR_FRAME ;
3099 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3100 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3102 cm->alt_fb_idx = cm->gld_fb_idx = cm->new_fb_idx;
3104 #if CONFIG_MULTI_RES_ENCODING
3105 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
3106 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
3109 else /* For non key frames */
3111 if (cm->refresh_alt_ref_frame)
3113 assert(!cm->copy_buffer_to_arf);
3115 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_ALTR_FRAME;
3116 cm->yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3117 cm->alt_fb_idx = cm->new_fb_idx;
3119 #if CONFIG_MULTI_RES_ENCODING
3120 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
3123 else if (cm->copy_buffer_to_arf)
3125 assert(!(cm->copy_buffer_to_arf & ~0x3));
3127 if (cm->copy_buffer_to_arf == 1)
3129 if(cm->alt_fb_idx != cm->lst_fb_idx)
3131 yv12_fb[cm->lst_fb_idx].flags |= VP8_ALTR_FRAME;
3132 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3133 cm->alt_fb_idx = cm->lst_fb_idx;
3135 #if CONFIG_MULTI_RES_ENCODING
3136 cpi->current_ref_frames[ALTREF_FRAME] =
3137 cpi->current_ref_frames[LAST_FRAME];
3141 else /* if (cm->copy_buffer_to_arf == 2) */
3143 if(cm->alt_fb_idx != cm->gld_fb_idx)
3145 yv12_fb[cm->gld_fb_idx].flags |= VP8_ALTR_FRAME;
3146 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3147 cm->alt_fb_idx = cm->gld_fb_idx;
3149 #if CONFIG_MULTI_RES_ENCODING
3150 cpi->current_ref_frames[ALTREF_FRAME] =
3151 cpi->current_ref_frames[GOLDEN_FRAME];
3157 if (cm->refresh_golden_frame)
3159 assert(!cm->copy_buffer_to_gf);
3161 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME;
3162 cm->yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3163 cm->gld_fb_idx = cm->new_fb_idx;
3165 #if CONFIG_MULTI_RES_ENCODING
3166 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
3169 else if (cm->copy_buffer_to_gf)
3171 assert(!(cm->copy_buffer_to_arf & ~0x3));
3173 if (cm->copy_buffer_to_gf == 1)
3175 if(cm->gld_fb_idx != cm->lst_fb_idx)
3177 yv12_fb[cm->lst_fb_idx].flags |= VP8_GOLD_FRAME;
3178 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3179 cm->gld_fb_idx = cm->lst_fb_idx;
3181 #if CONFIG_MULTI_RES_ENCODING
3182 cpi->current_ref_frames[GOLDEN_FRAME] =
3183 cpi->current_ref_frames[LAST_FRAME];
3187 else /* if (cm->copy_buffer_to_gf == 2) */
3189 if(cm->alt_fb_idx != cm->gld_fb_idx)
3191 yv12_fb[cm->alt_fb_idx].flags |= VP8_GOLD_FRAME;
3192 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3193 cm->gld_fb_idx = cm->alt_fb_idx;
3195 #if CONFIG_MULTI_RES_ENCODING
3196 cpi->current_ref_frames[GOLDEN_FRAME] =
3197 cpi->current_ref_frames[ALTREF_FRAME];
3204 if (cm->refresh_last_frame)
3206 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_LAST_FRAME;
3207 cm->yv12_fb[cm->lst_fb_idx].flags &= ~VP8_LAST_FRAME;
3208 cm->lst_fb_idx = cm->new_fb_idx;
3210 #if CONFIG_MULTI_RES_ENCODING
3211 cpi->current_ref_frames[LAST_FRAME] = cm->current_video_frame;
3215 #if CONFIG_TEMPORAL_DENOISING
3216 if (cpi->oxcf.noise_sensitivity)
3218 /* we shouldn't have to keep multiple copies as we know in advance which
3219 * buffer we should start - for now to get something up and running
3220 * I've chosen to copy the buffers
3222 if (cm->frame_type == KEY_FRAME)
3225 vp8_yv12_copy_frame(
3227 &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
3229 vp8_yv12_extend_frame_borders(
3230 &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
3232 for (i = 2; i < MAX_REF_FRAMES - 1; i++)
3233 vp8_yv12_copy_frame(
3234 &cpi->denoiser.yv12_running_avg[LAST_FRAME],
3235 &cpi->denoiser.yv12_running_avg[i]);
3237 else /* For non key frames */
3239 vp8_yv12_extend_frame_borders(
3240 &cpi->denoiser.yv12_running_avg[INTRA_FRAME]);
3242 if (cm->refresh_alt_ref_frame || cm->copy_buffer_to_arf)
3244 vp8_yv12_copy_frame(
3245 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3246 &cpi->denoiser.yv12_running_avg[ALTREF_FRAME]);
3248 if (cm->refresh_golden_frame || cm->copy_buffer_to_gf)
3250 vp8_yv12_copy_frame(
3251 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3252 &cpi->denoiser.yv12_running_avg[GOLDEN_FRAME]);
3254 if(cm->refresh_last_frame)
3256 vp8_yv12_copy_frame(
3257 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3258 &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
3267 void vp8_loopfilter_frame(VP8_COMP *cpi, VP8_COMMON *cm)
3269 const FRAME_TYPE frame_type = cm->frame_type;
3273 cm->filter_level = 0;
3277 struct vpx_usec_timer timer;
3279 vp8_clear_system_state();
3281 vpx_usec_timer_start(&timer);
3282 if (cpi->sf.auto_filter == 0)
3283 vp8cx_pick_filter_level_fast(cpi->Source, cpi);
3286 vp8cx_pick_filter_level(cpi->Source, cpi);
3288 if (cm->filter_level > 0)
3290 vp8cx_set_alt_lf_level(cpi, cm->filter_level);
3293 vpx_usec_timer_mark(&timer);
3294 cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer);
3297 #if CONFIG_MULTITHREAD
3298 if (cpi->b_multi_threaded)
3299 sem_post(&cpi->h_event_end_lpf); /* signal that we have set filter_level */
3302 if (cm->filter_level > 0)
3304 vp8_loop_filter_frame(cm, &cpi->mb.e_mbd, frame_type);
3307 vp8_yv12_extend_frame_borders(cm->frame_to_show);
3311 static void encode_frame_to_data_rate
3314 unsigned long *size,
3315 unsigned char *dest,
3316 unsigned char* dest_end,
3317 unsigned int *frame_flags
3321 int frame_over_shoot_limit;
3322 int frame_under_shoot_limit;
3327 VP8_COMMON *cm = &cpi->common;
3328 int active_worst_qchanged = 0;
3330 #if !(CONFIG_REALTIME_ONLY)
3334 int zbin_oq_low = 0;
3337 int overshoot_seen = 0;
3338 int undershoot_seen = 0;
3341 int drop_mark = (int)(cpi->oxcf.drop_frames_water_mark *
3342 cpi->oxcf.optimal_buffer_level / 100);
3343 int drop_mark75 = drop_mark * 2 / 3;
3344 int drop_mark50 = drop_mark / 4;
3345 int drop_mark25 = drop_mark / 8;
3348 /* Clear down mmx registers to allow floating point in what follows */
3349 vp8_clear_system_state();
3351 #if CONFIG_MULTITHREAD
3352 /* wait for the last picture loopfilter thread done */
3353 if (cpi->b_lpf_running)
3355 sem_wait(&cpi->h_event_end_lpf);
3356 cpi->b_lpf_running = 0;
3360 if(cpi->force_next_frame_intra)
3362 cm->frame_type = KEY_FRAME; /* delayed intra frame */
3363 cpi->force_next_frame_intra = 0;
3366 /* For an alt ref frame in 2 pass we skip the call to the second pass
3367 * function that sets the target bandwidth
3369 #if !(CONFIG_REALTIME_ONLY)
3373 if (cpi->common.refresh_alt_ref_frame)
3375 /* Per frame bit target for the alt ref frame */
3376 cpi->per_frame_bandwidth = cpi->twopass.gf_bits;
3377 /* per second target bitrate */
3378 cpi->target_bandwidth = (int)(cpi->twopass.gf_bits *
3379 cpi->output_framerate);
3384 cpi->per_frame_bandwidth = (int)(cpi->target_bandwidth / cpi->output_framerate);
3386 /* Default turn off buffer to buffer copying */
3387 cm->copy_buffer_to_gf = 0;
3388 cm->copy_buffer_to_arf = 0;
3390 /* Clear zbin over-quant value and mode boost values. */
3391 cpi->mb.zbin_over_quant = 0;
3392 cpi->mb.zbin_mode_boost = 0;
3394 /* Enable or disable mode based tweaking of the zbin
3395 * For 2 Pass Only used where GF/ARF prediction quality
3396 * is above a threshold
3398 cpi->mb.zbin_mode_boost_enabled = 1;
3401 if ( cpi->gfu_boost <= 400 )
3403 cpi->mb.zbin_mode_boost_enabled = 0;
3407 /* Current default encoder behaviour for the altref sign bias */
3408 if (cpi->source_alt_ref_active)
3409 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
3411 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 0;
3413 /* Check to see if a key frame is signaled
3414 * For two pass with auto key frame enabled cm->frame_type may already
3415 * be set, but not for one pass.
3417 if ((cm->current_video_frame == 0) ||
3418 (cm->frame_flags & FRAMEFLAGS_KEY) ||
3419 (cpi->oxcf.auto_key && (cpi->frames_since_key % cpi->key_frame_frequency == 0)))
3421 /* Key frame from VFW/auto-keyframe/first frame */
3422 cm->frame_type = KEY_FRAME;
3425 #if CONFIG_MULTI_RES_ENCODING
3426 /* In multi-resolution encoding, frame_type is decided by lowest-resolution
3427 * encoder. Same frame_type is adopted while encoding at other resolution.
3429 if (cpi->oxcf.mr_encoder_id)
3431 LOWER_RES_FRAME_INFO* low_res_frame_info
3432 = (LOWER_RES_FRAME_INFO*)cpi->oxcf.mr_low_res_mode_info;
3434 cm->frame_type = low_res_frame_info->frame_type;
3436 if(cm->frame_type != KEY_FRAME)
3438 cpi->mr_low_res_mv_avail = 1;
3439 cpi->mr_low_res_mv_avail &= !(low_res_frame_info->is_frame_dropped);
3441 if (cpi->ref_frame_flags & VP8_LAST_FRAME)
3442 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[LAST_FRAME]
3443 == low_res_frame_info->low_res_ref_frames[LAST_FRAME]);
3445 if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
3446 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[GOLDEN_FRAME]
3447 == low_res_frame_info->low_res_ref_frames[GOLDEN_FRAME]);
3449 if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
3450 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[ALTREF_FRAME]
3451 == low_res_frame_info->low_res_ref_frames[ALTREF_FRAME]);
3456 /* Set various flags etc to special state if it is a key frame */
3457 if (cm->frame_type == KEY_FRAME)
3461 // Set the loop filter deltas and segmentation map update
3462 setup_features(cpi);
3464 /* The alternate reference frame cannot be active for a key frame */
3465 cpi->source_alt_ref_active = 0;
3467 /* Reset the RD threshold multipliers to default of * 1 (128) */
3468 for (i = 0; i < MAX_MODES; i++)
3470 cpi->mb.rd_thresh_mult[i] = 128;
3475 /* Experimental code for lagged compress and one pass
3476 * Initialise one_pass GF frames stats
3477 * Update stats used for GF selection
3480 cpi->one_pass_frame_index = cm->current_video_frame % MAX_LAG_BUFFERS;
3482 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frames_so_far = 0;
3483 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_intra_error = 0.0;
3484 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_coded_error = 0.0;
3485 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_inter = 0.0;
3486 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_motion = 0.0;
3487 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr = 0.0;
3488 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr_abs = 0.0;
3489 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc = 0.0;
3490 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc_abs = 0.0;
3494 update_rd_ref_frame_probs(cpi);
3496 if (cpi->drop_frames_allowed)
3498 /* The reset to decimation 0 is only done here for one pass.
3499 * Once it is set two pass leaves decimation on till the next kf.
3501 if ((cpi->buffer_level > drop_mark) && (cpi->decimation_factor > 0))
3502 cpi->decimation_factor --;
3504 if (cpi->buffer_level > drop_mark75 && cpi->decimation_factor > 0)
3505 cpi->decimation_factor = 1;
3507 else if (cpi->buffer_level < drop_mark25 && (cpi->decimation_factor == 2 || cpi->decimation_factor == 3))
3509 cpi->decimation_factor = 3;
3511 else if (cpi->buffer_level < drop_mark50 && (cpi->decimation_factor == 1 || cpi->decimation_factor == 2))
3513 cpi->decimation_factor = 2;
3515 else if (cpi->buffer_level < drop_mark75 && (cpi->decimation_factor == 0 || cpi->decimation_factor == 1))
3517 cpi->decimation_factor = 1;
3521 /* The following decimates the frame rate according to a regular
3522 * pattern (i.e. to 1/2 or 2/3 frame rate) This can be used to help
3523 * prevent buffer under-run in CBR mode. Alternatively it might be
3524 * desirable in some situations to drop frame rate but throw more bits
3527 * Note that dropping a key frame can be problematic if spatial
3528 * resampling is also active
3530 if (cpi->decimation_factor > 0)
3532 switch (cpi->decimation_factor)
3535 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 3 / 2;
3538 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
3541 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
3545 /* Note that we should not throw out a key frame (especially when
3546 * spatial resampling is enabled).
3548 if (cm->frame_type == KEY_FRAME)
3550 cpi->decimation_count = cpi->decimation_factor;
3552 else if (cpi->decimation_count > 0)
3554 cpi->decimation_count --;
3556 cpi->bits_off_target += cpi->av_per_frame_bandwidth;
3557 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
3558 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
3560 #if CONFIG_MULTI_RES_ENCODING
3561 vp8_store_drop_frame_info(cpi);
3564 cm->current_video_frame++;
3565 cpi->frames_since_key++;
3566 // We advance the temporal pattern for dropped frames.
3567 cpi->temporal_pattern_counter++;
3569 #if CONFIG_INTERNAL_STATS
3573 cpi->buffer_level = cpi->bits_off_target;
3575 if (cpi->oxcf.number_of_layers > 1)
3579 /* Propagate bits saved by dropping the frame to higher
3582 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
3584 LAYER_CONTEXT *lc = &cpi->layer_context[i];
3585 lc->bits_off_target += (int)(lc->target_bandwidth /
3587 if (lc->bits_off_target > lc->maximum_buffer_size)
3588 lc->bits_off_target = lc->maximum_buffer_size;
3589 lc->buffer_level = lc->bits_off_target;
3596 cpi->decimation_count = cpi->decimation_factor;
3599 cpi->decimation_count = 0;
3601 /* Decide how big to make the frame */
3602 if (!vp8_pick_frame_size(cpi))
3604 /*TODO: 2 drop_frame and return code could be put together. */
3605 #if CONFIG_MULTI_RES_ENCODING
3606 vp8_store_drop_frame_info(cpi);
3608 cm->current_video_frame++;
3609 cpi->frames_since_key++;
3610 // We advance the temporal pattern for dropped frames.
3611 cpi->temporal_pattern_counter++;
3615 /* Reduce active_worst_allowed_q for CBR if our buffer is getting too full.
3616 * This has a knock on effect on active best quality as well.
3617 * For CBR if the buffer reaches its maximum level then we can no longer
3618 * save up bits for later frames so we might as well use them up
3619 * on the current frame.
3621 if ((cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) &&
3622 (cpi->buffer_level >= cpi->oxcf.optimal_buffer_level) && cpi->buffered_mode)
3624 /* Max adjustment is 1/4 */
3625 int Adjustment = cpi->active_worst_quality / 4;
3631 if (cpi->buffer_level < cpi->oxcf.maximum_buffer_size)
3633 buff_lvl_step = (int)
3634 ((cpi->oxcf.maximum_buffer_size -
3635 cpi->oxcf.optimal_buffer_level) /
3640 ((cpi->buffer_level -
3641 cpi->oxcf.optimal_buffer_level) /
3647 cpi->active_worst_quality -= Adjustment;
3649 if(cpi->active_worst_quality < cpi->active_best_quality)
3650 cpi->active_worst_quality = cpi->active_best_quality;
3654 /* Set an active best quality and if necessary active worst quality
3655 * There is some odd behavior for one pass here that needs attention.
3657 if ( (cpi->pass == 2) || (cpi->ni_frames > 150))
3659 vp8_clear_system_state();
3661 Q = cpi->active_worst_quality;
3663 if ( cm->frame_type == KEY_FRAME )
3665 if ( cpi->pass == 2 )
3667 if (cpi->gfu_boost > 600)
3668 cpi->active_best_quality = kf_low_motion_minq[Q];
3670 cpi->active_best_quality = kf_high_motion_minq[Q];
3672 /* Special case for key frames forced because we have reached
3673 * the maximum key frame interval. Here force the Q to a range
3674 * based on the ambient Q to reduce the risk of popping
3676 if ( cpi->this_key_frame_forced )
3678 if ( cpi->active_best_quality > cpi->avg_frame_qindex * 7/8)
3679 cpi->active_best_quality = cpi->avg_frame_qindex * 7/8;
3680 else if ( cpi->active_best_quality < cpi->avg_frame_qindex >> 2 )
3681 cpi->active_best_quality = cpi->avg_frame_qindex >> 2;
3684 /* One pass more conservative */
3686 cpi->active_best_quality = kf_high_motion_minq[Q];
3689 else if (cpi->oxcf.number_of_layers==1 &&
3690 (cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame))
3692 /* Use the lower of cpi->active_worst_quality and recent
3693 * average Q as basis for GF/ARF Q limit unless last frame was
3696 if ( (cpi->frames_since_key > 1) &&
3697 (cpi->avg_frame_qindex < cpi->active_worst_quality) )
3699 Q = cpi->avg_frame_qindex;
3702 /* For constrained quality dont allow Q less than the cq level */
3703 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
3704 (Q < cpi->cq_target_quality) )
3706 Q = cpi->cq_target_quality;
3709 if ( cpi->pass == 2 )
3711 if ( cpi->gfu_boost > 1000 )
3712 cpi->active_best_quality = gf_low_motion_minq[Q];
3713 else if ( cpi->gfu_boost < 400 )
3714 cpi->active_best_quality = gf_high_motion_minq[Q];
3716 cpi->active_best_quality = gf_mid_motion_minq[Q];
3718 /* Constrained quality use slightly lower active best. */
3719 if ( cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY )
3721 cpi->active_best_quality =
3722 cpi->active_best_quality * 15/16;
3725 /* One pass more conservative */
3727 cpi->active_best_quality = gf_high_motion_minq[Q];
3731 cpi->active_best_quality = inter_minq[Q];
3733 /* For the constant/constrained quality mode we dont want
3734 * q to fall below the cq level.
3736 if ((cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
3737 (cpi->active_best_quality < cpi->cq_target_quality) )
3739 /* If we are strongly undershooting the target rate in the last
3740 * frames then use the user passed in cq value not the auto
3743 if ( cpi->rolling_actual_bits < cpi->min_frame_bandwidth )
3744 cpi->active_best_quality = cpi->oxcf.cq_level;
3746 cpi->active_best_quality = cpi->cq_target_quality;
3750 /* If CBR and the buffer is as full then it is reasonable to allow
3751 * higher quality on the frames to prevent bits just going to waste.
3753 if (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)
3755 /* Note that the use of >= here elliminates the risk of a devide
3756 * by 0 error in the else if clause
3758 if (cpi->buffer_level >= cpi->oxcf.maximum_buffer_size)
3759 cpi->active_best_quality = cpi->best_quality;
3761 else if (cpi->buffer_level > cpi->oxcf.optimal_buffer_level)
3763 int Fraction = (int)
3764 (((cpi->buffer_level - cpi->oxcf.optimal_buffer_level) * 128)
3765 / (cpi->oxcf.maximum_buffer_size -
3766 cpi->oxcf.optimal_buffer_level));
3767 int min_qadjustment = ((cpi->active_best_quality -
3768 cpi->best_quality) * Fraction) / 128;
3770 cpi->active_best_quality -= min_qadjustment;
3774 /* Make sure constrained quality mode limits are adhered to for the first
3775 * few frames of one pass encodes
3777 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
3779 if ( (cm->frame_type == KEY_FRAME) ||
3780 cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame )
3782 cpi->active_best_quality = cpi->best_quality;
3784 else if (cpi->active_best_quality < cpi->cq_target_quality)
3786 cpi->active_best_quality = cpi->cq_target_quality;
3790 /* Clip the active best and worst quality values to limits */
3791 if (cpi->active_worst_quality > cpi->worst_quality)
3792 cpi->active_worst_quality = cpi->worst_quality;
3794 if (cpi->active_best_quality < cpi->best_quality)
3795 cpi->active_best_quality = cpi->best_quality;
3797 if ( cpi->active_worst_quality < cpi->active_best_quality )
3798 cpi->active_worst_quality = cpi->active_best_quality;
3800 /* Determine initial Q to try */
3801 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
3803 #if !(CONFIG_REALTIME_ONLY)
3805 /* Set highest allowed value for Zbin over quant */
3806 if (cm->frame_type == KEY_FRAME)
3808 else if ((cpi->oxcf.number_of_layers == 1) && ((cm->refresh_alt_ref_frame ||
3809 (cm->refresh_golden_frame && !cpi->source_alt_ref_active))))
3814 zbin_oq_high = ZBIN_OQ_MAX;
3817 /* Setup background Q adjustment for error resilient mode.
3818 * For multi-layer encodes only enable this for the base layer.
3820 if (cpi->cyclic_refresh_mode_enabled)
3822 if (cpi->current_layer==0)
3823 cyclic_background_refresh(cpi, Q, 0);
3825 disable_segmentation(cpi);
3828 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
3830 #if !(CONFIG_REALTIME_ONLY)
3831 /* Limit Q range for the adaptive loop. */
3832 bottom_index = cpi->active_best_quality;
3833 top_index = cpi->active_worst_quality;
3834 q_low = cpi->active_best_quality;
3835 q_high = cpi->active_worst_quality;
3838 vp8_save_coding_context(cpi);
3842 scale_and_extend_source(cpi->un_scaled_source, cpi);
3844 #if !(CONFIG_REALTIME_ONLY) && CONFIG_POSTPROC && !(CONFIG_TEMPORAL_DENOISING)
3846 if (cpi->oxcf.noise_sensitivity > 0)
3851 switch (cpi->oxcf.noise_sensitivity)
3874 if (cm->frame_type == KEY_FRAME)
3876 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0);
3880 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0);
3882 src = cpi->Source->y_buffer;
3884 if (cpi->Source->y_stride < 0)
3886 src += cpi->Source->y_stride * (cpi->Source->y_height - 1);
3893 #ifdef OUTPUT_YUV_SRC
3894 vp8_write_yuv_frame(cpi->Source);
3899 vp8_clear_system_state();
3901 vp8_set_quantizer(cpi, Q);
3903 /* setup skip prob for costing in mode/mv decision */
3904 if (cpi->common.mb_no_coeff_skip)
3906 cpi->prob_skip_false = cpi->base_skip_false_prob[Q];
3908 if (cm->frame_type != KEY_FRAME)
3910 if (cpi->common.refresh_alt_ref_frame)
3912 if (cpi->last_skip_false_probs[2] != 0)
3913 cpi->prob_skip_false = cpi->last_skip_false_probs[2];
3916 if(cpi->last_skip_false_probs[2]!=0 && abs(Q- cpi->last_skip_probs_q[2])<=16 )
3917 cpi->prob_skip_false = cpi->last_skip_false_probs[2];
3918 else if (cpi->last_skip_false_probs[2]!=0)
3919 cpi->prob_skip_false = (cpi->last_skip_false_probs[2] + cpi->prob_skip_false ) / 2;
3922 else if (cpi->common.refresh_golden_frame)
3924 if (cpi->last_skip_false_probs[1] != 0)
3925 cpi->prob_skip_false = cpi->last_skip_false_probs[1];
3928 if(cpi->last_skip_false_probs[1]!=0 && abs(Q- cpi->last_skip_probs_q[1])<=16 )
3929 cpi->prob_skip_false = cpi->last_skip_false_probs[1];
3930 else if (cpi->last_skip_false_probs[1]!=0)
3931 cpi->prob_skip_false = (cpi->last_skip_false_probs[1] + cpi->prob_skip_false ) / 2;
3936 if (cpi->last_skip_false_probs[0] != 0)
3937 cpi->prob_skip_false = cpi->last_skip_false_probs[0];
3940 if(cpi->last_skip_false_probs[0]!=0 && abs(Q- cpi->last_skip_probs_q[0])<=16 )
3941 cpi->prob_skip_false = cpi->last_skip_false_probs[0];
3942 else if(cpi->last_skip_false_probs[0]!=0)
3943 cpi->prob_skip_false = (cpi->last_skip_false_probs[0] + cpi->prob_skip_false ) / 2;
3947 /* as this is for cost estimate, let's make sure it does not
3948 * go extreme eitehr way
3950 if (cpi->prob_skip_false < 5)
3951 cpi->prob_skip_false = 5;
3953 if (cpi->prob_skip_false > 250)
3954 cpi->prob_skip_false = 250;
3956 if (cpi->oxcf.number_of_layers == 1 && cpi->is_src_frame_alt_ref)
3957 cpi->prob_skip_false = 1;
3964 FILE *f = fopen("skip.stt", "a");
3965 fprintf(f, "%d, %d, %4d ", cpi->common.refresh_golden_frame, cpi->common.refresh_alt_ref_frame, cpi->prob_skip_false);
3973 if (cm->frame_type == KEY_FRAME)
3975 if(resize_key_frame(cpi))
3977 /* If the frame size has changed, need to reset Q, quantizer,
3978 * and background refresh.
3980 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
3981 if (cpi->cyclic_refresh_mode_enabled)
3983 if (cpi->current_layer==0)
3984 cyclic_background_refresh(cpi, Q, 0);
3986 disable_segmentation(cpi);
3988 vp8_set_quantizer(cpi, Q);
3991 vp8_setup_key_frame(cpi);
3996 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
3998 if(cpi->oxcf.error_resilient_mode)
3999 cm->refresh_entropy_probs = 0;
4001 if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS)
4003 if (cm->frame_type == KEY_FRAME)
4004 cm->refresh_entropy_probs = 1;
4007 if (cm->refresh_entropy_probs == 0)
4009 /* save a copy for later refresh */
4010 vpx_memcpy(&cm->lfc, &cm->fc, sizeof(cm->fc));
4013 vp8_update_coef_context(cpi);
4015 vp8_update_coef_probs(cpi);
4017 /* transform / motion compensation build reconstruction frame
4018 * +pack coef partitions
4020 vp8_encode_frame(cpi);
4022 /* cpi->projected_frame_size is not needed for RT mode */
4025 /* transform / motion compensation build reconstruction frame */
4026 vp8_encode_frame(cpi);
4028 cpi->projected_frame_size -= vp8_estimate_entropy_savings(cpi);
4029 cpi->projected_frame_size = (cpi->projected_frame_size > 0) ? cpi->projected_frame_size : 0;
4031 vp8_clear_system_state();
4033 /* Test to see if the stats generated for this frame indicate that
4034 * we should have coded a key frame (assuming that we didn't)!
4037 if (cpi->pass != 2 && cpi->oxcf.auto_key && cm->frame_type != KEY_FRAME
4038 && cpi->compressor_speed != 2)
4040 #if !(CONFIG_REALTIME_ONLY)
4041 if (decide_key_frame(cpi))
4043 /* Reset all our sizing numbers and recode */
4044 cm->frame_type = KEY_FRAME;
4046 vp8_pick_frame_size(cpi);
4048 /* Clear the Alt reference frame active flag when we have
4051 cpi->source_alt_ref_active = 0;
4053 // Set the loop filter deltas and segmentation map update
4054 setup_features(cpi);
4056 vp8_restore_coding_context(cpi);
4058 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4060 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
4062 /* Limit Q range for the adaptive loop. */
4063 bottom_index = cpi->active_best_quality;
4064 top_index = cpi->active_worst_quality;
4065 q_low = cpi->active_best_quality;
4066 q_high = cpi->active_worst_quality;
4076 vp8_clear_system_state();
4078 if (frame_over_shoot_limit == 0)
4079 frame_over_shoot_limit = 1;
4081 /* Are we are overshooting and up against the limit of active max Q. */
4082 if (((cpi->pass != 2) || (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)) &&
4083 (Q == cpi->active_worst_quality) &&
4084 (cpi->active_worst_quality < cpi->worst_quality) &&
4085 (cpi->projected_frame_size > frame_over_shoot_limit))
4087 int over_size_percent = ((cpi->projected_frame_size - frame_over_shoot_limit) * 100) / frame_over_shoot_limit;
4089 /* If so is there any scope for relaxing it */
4090 while ((cpi->active_worst_quality < cpi->worst_quality) && (over_size_percent > 0))
4092 cpi->active_worst_quality++;
4093 /* Assume 1 qstep = about 4% on frame size. */
4094 over_size_percent = (int)(over_size_percent * 0.96);
4096 #if !(CONFIG_REALTIME_ONLY)
4097 top_index = cpi->active_worst_quality;
4099 /* If we have updated the active max Q do not call
4100 * vp8_update_rate_correction_factors() this loop.
4102 active_worst_qchanged = 1;
4105 active_worst_qchanged = 0;
4107 #if !(CONFIG_REALTIME_ONLY)
4108 /* Special case handling for forced key frames */
4109 if ( (cm->frame_type == KEY_FRAME) && cpi->this_key_frame_forced )
4112 int kf_err = vp8_calc_ss_err(cpi->Source,
4113 &cm->yv12_fb[cm->new_fb_idx]);
4115 /* The key frame is not good enough */
4116 if ( kf_err > ((cpi->ambient_err * 7) >> 3) )
4119 q_high = (Q > q_low) ? (Q - 1) : q_low;
4122 Q = (q_high + q_low) >> 1;
4124 /* The key frame is much better than the previous frame */
4125 else if ( kf_err < (cpi->ambient_err >> 1) )
4128 q_low = (Q < q_high) ? (Q + 1) : q_high;
4131 Q = (q_high + q_low + 1) >> 1;
4134 /* Clamp Q to upper and lower limits: */
4143 /* Is the projected frame size out of range and are we allowed
4144 * to attempt to recode.
4146 else if ( recode_loop_test( cpi,
4147 frame_over_shoot_limit, frame_under_shoot_limit,
4148 Q, top_index, bottom_index ) )
4153 /* Frame size out of permitted range. Update correction factor
4154 * & compute new Q to try...
4157 /* Frame is too large */
4158 if (cpi->projected_frame_size > cpi->this_frame_target)
4160 /* Raise Qlow as to at least the current value */
4161 q_low = (Q < q_high) ? (Q + 1) : q_high;
4163 /* If we are using over quant do the same for zbin_oq_low */
4164 if (cpi->mb.zbin_over_quant > 0)
4165 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
4166 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
4168 if (undershoot_seen)
4170 /* Update rate_correction_factor unless
4171 * cpi->active_worst_quality has changed.
4173 if (!active_worst_qchanged)
4174 vp8_update_rate_correction_factors(cpi, 1);
4176 Q = (q_high + q_low + 1) / 2;
4178 /* Adjust cpi->zbin_over_quant (only allowed when Q
4182 cpi->mb.zbin_over_quant = 0;
4185 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
4186 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
4187 cpi->mb.zbin_over_quant =
4188 (zbin_oq_high + zbin_oq_low) / 2;
4193 /* Update rate_correction_factor unless
4194 * cpi->active_worst_quality has changed.
4196 if (!active_worst_qchanged)
4197 vp8_update_rate_correction_factors(cpi, 0);
4199 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4201 while (((Q < q_low) ||
4202 (cpi->mb.zbin_over_quant < zbin_oq_low)) &&
4205 vp8_update_rate_correction_factors(cpi, 0);
4206 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4213 /* Frame is too small */
4216 if (cpi->mb.zbin_over_quant == 0)
4217 /* Lower q_high if not using over quant */
4218 q_high = (Q > q_low) ? (Q - 1) : q_low;
4220 /* else lower zbin_oq_high */
4221 zbin_oq_high = (cpi->mb.zbin_over_quant > zbin_oq_low) ?
4222 (cpi->mb.zbin_over_quant - 1) : zbin_oq_low;
4226 /* Update rate_correction_factor unless
4227 * cpi->active_worst_quality has changed.
4229 if (!active_worst_qchanged)
4230 vp8_update_rate_correction_factors(cpi, 1);
4232 Q = (q_high + q_low) / 2;
4234 /* Adjust cpi->zbin_over_quant (only allowed when Q
4238 cpi->mb.zbin_over_quant = 0;
4240 cpi->mb.zbin_over_quant =
4241 (zbin_oq_high + zbin_oq_low) / 2;
4245 /* Update rate_correction_factor unless
4246 * cpi->active_worst_quality has changed.
4248 if (!active_worst_qchanged)
4249 vp8_update_rate_correction_factors(cpi, 0);
4251 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4253 /* Special case reset for qlow for constrained quality.
4254 * This should only trigger where there is very substantial
4255 * undershoot on a frame and the auto cq level is above
4256 * the user passsed in value.
4258 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
4264 while (((Q > q_high) ||
4265 (cpi->mb.zbin_over_quant > zbin_oq_high)) &&
4268 vp8_update_rate_correction_factors(cpi, 0);
4269 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4274 undershoot_seen = 1;
4277 /* Clamp Q to upper and lower limits: */
4283 /* Clamp cpi->zbin_over_quant */
4284 cpi->mb.zbin_over_quant = (cpi->mb.zbin_over_quant < zbin_oq_low) ?
4285 zbin_oq_low : (cpi->mb.zbin_over_quant > zbin_oq_high) ?
4286 zbin_oq_high : cpi->mb.zbin_over_quant;
4294 if (cpi->is_src_frame_alt_ref)
4299 vp8_restore_coding_context(cpi);
4301 #if CONFIG_INTERNAL_STATS
4302 cpi->tot_recode_hits++;
4309 /* Experimental code for lagged and one pass
4310 * Update stats used for one pass GF selection
4313 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_coded_error = (double)cpi->prediction_error;
4314 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_intra_error = (double)cpi->intra_error;
4315 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_pcnt_inter = (double)(100 - cpi->this_frame_percent_intra) / 100.0;
4319 /* Special case code to reduce pulsing when key frames are forced at a
4320 * fixed interval. Note the reconstruction error if it is the frame before
4321 * the force key frame
4323 if ( cpi->next_key_frame_forced && (cpi->twopass.frames_to_key == 0) )
4325 cpi->ambient_err = vp8_calc_ss_err(cpi->Source,
4326 &cm->yv12_fb[cm->new_fb_idx]);
4329 /* This frame's MVs are saved and will be used in next frame's MV predictor.
4330 * Last frame has one more line(add to bottom) and one more column(add to
4331 * right) than cm->mip. The edge elements are initialized to 0.
4333 #if CONFIG_MULTI_RES_ENCODING
4334 if(!cpi->oxcf.mr_encoder_id && cm->show_frame)
4336 if(cm->show_frame) /* do not save for altref frame */
4341 /* Point to beginning of allocated MODE_INFO arrays. */
4342 MODE_INFO *tmp = cm->mip;
4344 if(cm->frame_type != KEY_FRAME)
4346 for (mb_row = 0; mb_row < cm->mb_rows+1; mb_row ++)
4348 for (mb_col = 0; mb_col < cm->mb_cols+1; mb_col ++)
4350 if(tmp->mbmi.ref_frame != INTRA_FRAME)
4351 cpi->lfmv[mb_col + mb_row*(cm->mode_info_stride+1)].as_int = tmp->mbmi.mv.as_int;
4353 cpi->lf_ref_frame_sign_bias[mb_col + mb_row*(cm->mode_info_stride+1)] = cm->ref_frame_sign_bias[tmp->mbmi.ref_frame];
4354 cpi->lf_ref_frame[mb_col + mb_row*(cm->mode_info_stride+1)] = tmp->mbmi.ref_frame;
4361 /* Count last ref frame 0,0 usage on current encoded frame. */
4365 /* Point to beginning of MODE_INFO arrays. */
4366 MODE_INFO *tmp = cm->mi;
4368 cpi->zeromv_count = 0;
4370 if(cm->frame_type != KEY_FRAME)
4372 for (mb_row = 0; mb_row < cm->mb_rows; mb_row ++)
4374 for (mb_col = 0; mb_col < cm->mb_cols; mb_col ++)
4376 if(tmp->mbmi.mode == ZEROMV)
4377 cpi->zeromv_count++;
4385 #if CONFIG_MULTI_RES_ENCODING
4386 vp8_cal_dissimilarity(cpi);
4389 /* Update the GF useage maps.
4390 * This is done after completing the compression of a frame when all
4391 * modes etc. are finalized but before loop filter
4393 if (cpi->oxcf.number_of_layers == 1)
4394 vp8_update_gf_useage_maps(cpi, cm, &cpi->mb);
4396 if (cm->frame_type == KEY_FRAME)
4397 cm->refresh_last_frame = 1;
4401 FILE *f = fopen("gfactive.stt", "a");
4402 fprintf(f, "%8d %8d %8d %8d %8d\n", cm->current_video_frame, (100 * cpi->gf_active_count) / (cpi->common.mb_rows * cpi->common.mb_cols), cpi->this_iiratio, cpi->next_iiratio, cm->refresh_golden_frame);
4407 /* For inter frames the current default behavior is that when
4408 * cm->refresh_golden_frame is set we copy the old GF over to the ARF buffer
4409 * This is purely an encoder decision at present.
4411 if (!cpi->oxcf.error_resilient_mode && cm->refresh_golden_frame)
4412 cm->copy_buffer_to_arf = 2;
4414 cm->copy_buffer_to_arf = 0;
4416 cm->frame_to_show = &cm->yv12_fb[cm->new_fb_idx];
4418 #if CONFIG_MULTITHREAD
4419 if (cpi->b_multi_threaded)
4421 /* start loopfilter in separate thread */
4422 sem_post(&cpi->h_event_start_lpf);
4423 cpi->b_lpf_running = 1;
4428 vp8_loopfilter_frame(cpi, cm);
4431 update_reference_frames(cpi);
4433 #if !(CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
4434 if (cpi->oxcf.error_resilient_mode)
4436 cm->refresh_entropy_probs = 0;
4440 #if CONFIG_MULTITHREAD
4441 /* wait that filter_level is picked so that we can continue with stream packing */
4442 if (cpi->b_multi_threaded)
4443 sem_wait(&cpi->h_event_end_lpf);
4446 /* build the bitstream */
4447 vp8_pack_bitstream(cpi, dest, dest_end, size);
4449 #if CONFIG_MULTITHREAD
4450 /* if PSNR packets are generated we have to wait for the lpf */
4451 if (cpi->b_lpf_running && cpi->b_calculate_psnr)
4453 sem_wait(&cpi->h_event_end_lpf);
4454 cpi->b_lpf_running = 0;
4458 /* Move storing frame_type out of the above loop since it is also
4459 * needed in motion search besides loopfilter */
4460 cm->last_frame_type = cm->frame_type;
4462 /* Update rate control heuristics */
4463 cpi->total_byte_count += (*size);
4464 cpi->projected_frame_size = (*size) << 3;
4466 if (cpi->oxcf.number_of_layers > 1)
4469 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
4470 cpi->layer_context[i].total_byte_count += (*size);
4473 if (!active_worst_qchanged)
4474 vp8_update_rate_correction_factors(cpi, 2);
4476 cpi->last_q[cm->frame_type] = cm->base_qindex;
4478 if (cm->frame_type == KEY_FRAME)
4480 vp8_adjust_key_frame_context(cpi);
4483 /* Keep a record of ambient average Q. */
4484 if (cm->frame_type != KEY_FRAME)
4485 cpi->avg_frame_qindex = (2 + 3 * cpi->avg_frame_qindex + cm->base_qindex) >> 2;
4487 /* Keep a record from which we can calculate the average Q excluding
4488 * GF updates and key frames
4490 if ((cm->frame_type != KEY_FRAME) && ((cpi->oxcf.number_of_layers > 1) ||
4491 (!cm->refresh_golden_frame && !cm->refresh_alt_ref_frame)))
4495 /* Calculate the average Q for normal inter frames (not key or GFU
4498 if ( cpi->pass == 2 )
4500 cpi->ni_tot_qi += Q;
4501 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
4505 /* Damp value for first few frames */
4506 if (cpi->ni_frames > 150 )
4508 cpi->ni_tot_qi += Q;
4509 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
4511 /* For one pass, early in the clip ... average the current frame Q
4512 * value with the worstq entered by the user as a dampening measure
4516 cpi->ni_tot_qi += Q;
4517 cpi->ni_av_qi = ((cpi->ni_tot_qi / cpi->ni_frames) + cpi->worst_quality + 1) / 2;
4520 /* If the average Q is higher than what was used in the last
4521 * frame (after going through the recode loop to keep the frame
4522 * size within range) then use the last frame value - 1. The -1
4523 * is designed to stop Q and hence the data rate, from
4524 * progressively falling away during difficult sections, but at
4525 * the same time reduce the number of itterations around the
4528 if (Q > cpi->ni_av_qi)
4529 cpi->ni_av_qi = Q - 1;
4533 /* Update the buffer level variable. */
4534 /* Non-viewable frames are a special case and are treated as pure overhead. */
4535 if ( !cm->show_frame )
4536 cpi->bits_off_target -= cpi->projected_frame_size;
4538 cpi->bits_off_target += cpi->av_per_frame_bandwidth - cpi->projected_frame_size;
4540 /* Clip the buffer level to the maximum specified buffer size */
4541 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
4542 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
4544 /* Rolling monitors of whether we are over or underspending used to
4545 * help regulate min and Max Q in two pass.
4547 cpi->rolling_target_bits = ((cpi->rolling_target_bits * 3) + cpi->this_frame_target + 2) / 4;
4548 cpi->rolling_actual_bits = ((cpi->rolling_actual_bits * 3) + cpi->projected_frame_size + 2) / 4;
4549 cpi->long_rolling_target_bits = ((cpi->long_rolling_target_bits * 31) + cpi->this_frame_target + 16) / 32;
4550 cpi->long_rolling_actual_bits = ((cpi->long_rolling_actual_bits * 31) + cpi->projected_frame_size + 16) / 32;
4552 /* Actual bits spent */
4553 cpi->total_actual_bits += cpi->projected_frame_size;
4556 cpi->total_target_vs_actual += (cpi->this_frame_target - cpi->projected_frame_size);
4558 cpi->buffer_level = cpi->bits_off_target;
4560 /* Propagate values to higher temporal layers */
4561 if (cpi->oxcf.number_of_layers > 1)
4565 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
4567 LAYER_CONTEXT *lc = &cpi->layer_context[i];
4568 int bits_off_for_this_layer =
4569 (int)(lc->target_bandwidth / lc->framerate -
4570 cpi->projected_frame_size);
4572 lc->bits_off_target += bits_off_for_this_layer;
4574 /* Clip buffer level to maximum buffer size for the layer */
4575 if (lc->bits_off_target > lc->maximum_buffer_size)
4576 lc->bits_off_target = lc->maximum_buffer_size;
4578 lc->total_actual_bits += cpi->projected_frame_size;
4579 lc->total_target_vs_actual += bits_off_for_this_layer;
4580 lc->buffer_level = lc->bits_off_target;
4584 /* Update bits left to the kf and gf groups to account for overshoot
4585 * or undershoot on these frames
4587 if (cm->frame_type == KEY_FRAME)
4589 cpi->twopass.kf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
4591 if (cpi->twopass.kf_group_bits < 0)
4592 cpi->twopass.kf_group_bits = 0 ;
4594 else if (cm->refresh_golden_frame || cm->refresh_alt_ref_frame)
4596 cpi->twopass.gf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
4598 if (cpi->twopass.gf_group_bits < 0)
4599 cpi->twopass.gf_group_bits = 0 ;
4602 if (cm->frame_type != KEY_FRAME)
4604 if (cpi->common.refresh_alt_ref_frame)
4606 cpi->last_skip_false_probs[2] = cpi->prob_skip_false;
4607 cpi->last_skip_probs_q[2] = cm->base_qindex;
4609 else if (cpi->common.refresh_golden_frame)
4611 cpi->last_skip_false_probs[1] = cpi->prob_skip_false;
4612 cpi->last_skip_probs_q[1] = cm->base_qindex;
4616 cpi->last_skip_false_probs[0] = cpi->prob_skip_false;
4617 cpi->last_skip_probs_q[0] = cm->base_qindex;
4619 /* update the baseline */
4620 cpi->base_skip_false_prob[cm->base_qindex] = cpi->prob_skip_false;
4625 #if 0 && CONFIG_INTERNAL_STATS
4627 FILE *f = fopen("tmp.stt", "a");
4629 vp8_clear_system_state();
4631 if (cpi->twopass.total_left_stats.coded_error != 0.0)
4632 fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
4633 "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
4634 "%8.2lf %"PRId64" %10.3lf %10"PRId64" %8d\n",
4635 cpi->common.current_video_frame, cpi->this_frame_target,
4636 cpi->projected_frame_size,
4637 (cpi->projected_frame_size - cpi->this_frame_target),
4638 cpi->total_target_vs_actual,
4640 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
4641 cpi->total_actual_bits, cm->base_qindex,
4642 cpi->active_best_quality, cpi->active_worst_quality,
4643 cpi->ni_av_qi, cpi->cq_target_quality,
4644 cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
4645 cm->frame_type, cpi->gfu_boost,
4646 cpi->twopass.est_max_qcorrection_factor,
4647 cpi->twopass.bits_left,
4648 cpi->twopass.total_left_stats.coded_error,
4649 (double)cpi->twopass.bits_left /
4650 cpi->twopass.total_left_stats.coded_error,
4651 cpi->tot_recode_hits);
4653 fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
4654 "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
4655 "%8.2lf %"PRId64" %10.3lf %8d\n",
4656 cpi->common.current_video_frame, cpi->this_frame_target,
4657 cpi->projected_frame_size,
4658 (cpi->projected_frame_size - cpi->this_frame_target),
4659 cpi->total_target_vs_actual,
4661 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
4662 cpi->total_actual_bits, cm->base_qindex,
4663 cpi->active_best_quality, cpi->active_worst_quality,
4664 cpi->ni_av_qi, cpi->cq_target_quality,
4665 cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
4666 cm->frame_type, cpi->gfu_boost,
4667 cpi->twopass.est_max_qcorrection_factor,
4668 cpi->twopass.bits_left,
4669 cpi->twopass.total_left_stats.coded_error,
4670 cpi->tot_recode_hits);
4675 FILE *fmodes = fopen("Modes.stt", "a");
4677 fprintf(fmodes, "%6d:%1d:%1d:%1d ",
4678 cpi->common.current_video_frame,
4679 cm->frame_type, cm->refresh_golden_frame,
4680 cm->refresh_alt_ref_frame);
4682 fprintf(fmodes, "\n");
4690 if (cm->refresh_golden_frame == 1)
4691 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_GOLDEN;
4693 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_GOLDEN;
4695 if (cm->refresh_alt_ref_frame == 1)
4696 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_ALTREF;
4698 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_ALTREF;
4701 if (cm->refresh_last_frame & cm->refresh_golden_frame)
4702 /* both refreshed */
4703 cpi->gold_is_last = 1;
4704 else if (cm->refresh_last_frame ^ cm->refresh_golden_frame)
4705 /* 1 refreshed but not the other */
4706 cpi->gold_is_last = 0;
4708 if (cm->refresh_last_frame & cm->refresh_alt_ref_frame)
4709 /* both refreshed */
4710 cpi->alt_is_last = 1;
4711 else if (cm->refresh_last_frame ^ cm->refresh_alt_ref_frame)
4712 /* 1 refreshed but not the other */
4713 cpi->alt_is_last = 0;
4715 if (cm->refresh_alt_ref_frame & cm->refresh_golden_frame)
4716 /* both refreshed */
4717 cpi->gold_is_alt = 1;
4718 else if (cm->refresh_alt_ref_frame ^ cm->refresh_golden_frame)
4719 /* 1 refreshed but not the other */
4720 cpi->gold_is_alt = 0;
4722 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
4724 if (cpi->gold_is_last)
4725 cpi->ref_frame_flags &= ~VP8_GOLD_FRAME;
4727 if (cpi->alt_is_last)
4728 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
4730 if (cpi->gold_is_alt)
4731 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
4734 if (!cpi->oxcf.error_resilient_mode)
4736 if (cpi->oxcf.play_alternate && cm->refresh_alt_ref_frame && (cm->frame_type != KEY_FRAME))
4737 /* Update the alternate reference frame stats as appropriate. */
4738 update_alt_ref_frame_stats(cpi);
4740 /* Update the Golden frame stats as appropriate. */
4741 update_golden_frame_stats(cpi);
4744 if (cm->frame_type == KEY_FRAME)
4746 /* Tell the caller that the frame was coded as a key frame */
4747 *frame_flags = cm->frame_flags | FRAMEFLAGS_KEY;
4749 /* As this frame is a key frame the next defaults to an inter frame. */
4750 cm->frame_type = INTER_FRAME;
4752 cpi->last_frame_percent_intra = 100;
4756 *frame_flags = cm->frame_flags&~FRAMEFLAGS_KEY;
4758 cpi->last_frame_percent_intra = cpi->this_frame_percent_intra;
4761 /* Clear the one shot update flags for segmentation map and mode/ref
4762 * loop filter deltas.
4764 cpi->mb.e_mbd.update_mb_segmentation_map = 0;
4765 cpi->mb.e_mbd.update_mb_segmentation_data = 0;
4766 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
4769 /* Dont increment frame counters if this was an altref buffer update
4774 cm->current_video_frame++;
4775 cpi->frames_since_key++;
4776 cpi->temporal_pattern_counter++;
4779 /* reset to normal state now that we are done. */
4787 sprintf(filename, "enc%04d.yuv", (int) cm->current_video_frame);
4788 recon_file = fopen(filename, "wb");
4789 fwrite(cm->yv12_fb[cm->lst_fb_idx].buffer_alloc,
4790 cm->yv12_fb[cm->lst_fb_idx].frame_size, 1, recon_file);
4796 /* vp8_write_yuv_frame("encoder_recon.yuv", cm->frame_to_show); */
4800 #if !(CONFIG_REALTIME_ONLY)
4801 static void Pass2Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned char * dest_end, unsigned int *frame_flags)
4804 if (!cpi->common.refresh_alt_ref_frame)
4805 vp8_second_pass(cpi);
4807 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
4808 cpi->twopass.bits_left -= 8 * *size;
4810 if (!cpi->common.refresh_alt_ref_frame)
4812 double two_pass_min_rate = (double)(cpi->oxcf.target_bandwidth
4813 *cpi->oxcf.two_pass_vbrmin_section / 100);
4814 cpi->twopass.bits_left += (int64_t)(two_pass_min_rate / cpi->framerate);
4819 /* For ARM NEON, d8-d15 are callee-saved registers, and need to be saved. */
4821 extern void vp8_push_neon(int64_t *store);
4822 extern void vp8_pop_neon(int64_t *store);
4826 int vp8_receive_raw_frame(VP8_COMP *cpi, unsigned int frame_flags, YV12_BUFFER_CONFIG *sd, int64_t time_stamp, int64_t end_time)
4829 int64_t store_reg[8];
4830 #if CONFIG_RUNTIME_CPU_DETECT
4831 VP8_COMMON *cm = &cpi->common;
4834 struct vpx_usec_timer timer;
4838 #if CONFIG_RUNTIME_CPU_DETECT
4839 if (cm->cpu_caps & HAS_NEON)
4842 vp8_push_neon(store_reg);
4846 vpx_usec_timer_start(&timer);
4848 /* Reinit the lookahead buffer if the frame size changes */
4849 if (sd->y_width != cpi->oxcf.Width || sd->y_height != cpi->oxcf.Height)
4851 assert(cpi->oxcf.lag_in_frames < 2);
4852 dealloc_raw_frame_buffers(cpi);
4853 alloc_raw_frame_buffers(cpi);
4856 if(vp8_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
4857 frame_flags, cpi->active_map_enabled ? cpi->active_map : NULL))
4859 vpx_usec_timer_mark(&timer);
4860 cpi->time_receive_data += vpx_usec_timer_elapsed(&timer);
4863 #if CONFIG_RUNTIME_CPU_DETECT
4864 if (cm->cpu_caps & HAS_NEON)
4867 vp8_pop_neon(store_reg);
4875 static int frame_is_reference(const VP8_COMP *cpi)
4877 const VP8_COMMON *cm = &cpi->common;
4878 const MACROBLOCKD *xd = &cpi->mb.e_mbd;
4880 return cm->frame_type == KEY_FRAME || cm->refresh_last_frame
4881 || cm->refresh_golden_frame || cm->refresh_alt_ref_frame
4882 || cm->copy_buffer_to_gf || cm->copy_buffer_to_arf
4883 || cm->refresh_entropy_probs
4884 || xd->mode_ref_lf_delta_update
4885 || xd->update_mb_segmentation_map || xd->update_mb_segmentation_data;
4889 int vp8_get_compressed_data(VP8_COMP *cpi, unsigned int *frame_flags, unsigned long *size, unsigned char *dest, unsigned char *dest_end, int64_t *time_stamp, int64_t *time_end, int flush)
4892 int64_t store_reg[8];
4895 struct vpx_usec_timer tsctimer;
4896 struct vpx_usec_timer ticktimer;
4897 struct vpx_usec_timer cmptimer;
4898 YV12_BUFFER_CONFIG *force_src_buffer = NULL;
4905 if (setjmp(cpi->common.error.jmp))
4907 cpi->common.error.setjmp = 0;
4908 return VPX_CODEC_CORRUPT_FRAME;
4911 cpi->common.error.setjmp = 1;
4914 #if CONFIG_RUNTIME_CPU_DETECT
4915 if (cm->cpu_caps & HAS_NEON)
4918 vp8_push_neon(store_reg);
4922 vpx_usec_timer_start(&cmptimer);
4926 #if !(CONFIG_REALTIME_ONLY)
4927 /* Should we code an alternate reference frame */
4928 if (cpi->oxcf.error_resilient_mode == 0 &&
4929 cpi->oxcf.play_alternate &&
4930 cpi->source_alt_ref_pending)
4932 if ((cpi->source = vp8_lookahead_peek(cpi->lookahead,
4933 cpi->frames_till_gf_update_due,
4936 cpi->alt_ref_source = cpi->source;
4937 if (cpi->oxcf.arnr_max_frames > 0)
4939 vp8_temporal_filter_prepare_c(cpi,
4940 cpi->frames_till_gf_update_due);
4941 force_src_buffer = &cpi->alt_ref_buffer;
4943 cpi->frames_till_alt_ref_frame = cpi->frames_till_gf_update_due;
4944 cm->refresh_alt_ref_frame = 1;
4945 cm->refresh_golden_frame = 0;
4946 cm->refresh_last_frame = 0;
4948 /* Clear Pending alt Ref flag. */
4949 cpi->source_alt_ref_pending = 0;
4950 cpi->is_src_frame_alt_ref = 0;
4957 /* Read last frame source if we are encoding first pass. */
4958 if (cpi->pass == 1 && cm->current_video_frame > 0)
4960 if((cpi->last_source = vp8_lookahead_peek(cpi->lookahead, 1,
4961 PEEK_BACKWARD)) == NULL)
4966 if ((cpi->source = vp8_lookahead_pop(cpi->lookahead, flush)))
4970 cpi->is_src_frame_alt_ref = cpi->alt_ref_source
4971 && (cpi->source == cpi->alt_ref_source);
4973 if(cpi->is_src_frame_alt_ref)
4974 cpi->alt_ref_source = NULL;
4980 cpi->Source = force_src_buffer ? force_src_buffer : &cpi->source->img;
4981 cpi->un_scaled_source = cpi->Source;
4982 *time_stamp = cpi->source->ts_start;
4983 *time_end = cpi->source->ts_end;
4984 *frame_flags = cpi->source->flags;
4986 if (cpi->pass == 1 && cm->current_video_frame > 0)
4988 cpi->last_frame_unscaled_source = &cpi->last_source->img;
4994 #if !(CONFIG_REALTIME_ONLY)
4996 if (flush && cpi->pass == 1 && !cpi->twopass.first_pass_done)
4998 vp8_end_first_pass(cpi); /* get last stats packet */
4999 cpi->twopass.first_pass_done = 1;
5005 #if CONFIG_RUNTIME_CPU_DETECT
5006 if (cm->cpu_caps & HAS_NEON)
5009 vp8_pop_neon(store_reg);
5015 if (cpi->source->ts_start < cpi->first_time_stamp_ever)
5017 cpi->first_time_stamp_ever = cpi->source->ts_start;
5018 cpi->last_end_time_stamp_seen = cpi->source->ts_start;
5021 /* adjust frame rates based on timestamps given */
5024 int64_t this_duration;
5027 if (cpi->source->ts_start == cpi->first_time_stamp_ever)
5029 this_duration = cpi->source->ts_end - cpi->source->ts_start;
5034 int64_t last_duration;
5036 this_duration = cpi->source->ts_end - cpi->last_end_time_stamp_seen;
5037 last_duration = cpi->last_end_time_stamp_seen
5038 - cpi->last_time_stamp_seen;
5039 /* do a step update if the duration changes by 10% */
5041 step = (int)(((this_duration - last_duration) *
5042 10 / last_duration));
5048 cpi->ref_framerate = 10000000.0 / this_duration;
5051 double avg_duration, interval;
5053 /* Average this frame's rate into the last second's average
5054 * frame rate. If we haven't seen 1 second yet, then average
5055 * over the whole interval seen.
5057 interval = (double)(cpi->source->ts_end -
5058 cpi->first_time_stamp_ever);
5059 if(interval > 10000000.0)
5060 interval = 10000000;
5062 avg_duration = 10000000.0 / cpi->ref_framerate;
5063 avg_duration *= (interval - avg_duration + this_duration);
5064 avg_duration /= interval;
5066 cpi->ref_framerate = 10000000.0 / avg_duration;
5069 if (cpi->oxcf.number_of_layers > 1)
5073 /* Update frame rates for each layer */
5074 for (i=0; i<cpi->oxcf.number_of_layers; i++)
5076 LAYER_CONTEXT *lc = &cpi->layer_context[i];
5077 lc->framerate = cpi->ref_framerate /
5078 cpi->oxcf.rate_decimator[i];
5082 vp8_new_framerate(cpi, cpi->ref_framerate);
5085 cpi->last_time_stamp_seen = cpi->source->ts_start;
5086 cpi->last_end_time_stamp_seen = cpi->source->ts_end;
5089 if (cpi->oxcf.number_of_layers > 1)
5093 update_layer_contexts (cpi);
5095 /* Restore layer specific context & set frame rate */
5096 layer = cpi->oxcf.layer_id[
5097 cpi->temporal_pattern_counter % cpi->oxcf.periodicity];
5098 restore_layer_context (cpi, layer);
5099 vp8_new_framerate(cpi, cpi->layer_context[layer].framerate);
5102 if (cpi->compressor_speed == 2)
5104 vpx_usec_timer_start(&tsctimer);
5105 vpx_usec_timer_start(&ticktimer);
5108 cpi->lf_zeromv_pct = (cpi->zeromv_count * 100)/cm->MBs;
5110 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
5113 const int num_part = (1 << cm->multi_token_partition);
5114 /* the available bytes in dest */
5115 const unsigned long dest_size = dest_end - dest;
5116 const int tok_part_buff_size = (dest_size * 9) / (10 * num_part);
5118 unsigned char *dp = dest;
5120 cpi->partition_d[0] = dp;
5121 dp += dest_size/10; /* reserve 1/10 for control partition */
5122 cpi->partition_d_end[0] = dp;
5124 for(i = 0; i < num_part; i++)
5126 cpi->partition_d[i + 1] = dp;
5127 dp += tok_part_buff_size;
5128 cpi->partition_d_end[i + 1] = dp;
5133 /* start with a 0 size frame */
5136 /* Clear down mmx registers */
5137 vp8_clear_system_state();
5139 cm->frame_type = INTER_FRAME;
5140 cm->frame_flags = *frame_flags;
5144 if (cm->refresh_alt_ref_frame)
5146 cm->refresh_golden_frame = 0;
5147 cm->refresh_last_frame = 0;
5151 cm->refresh_golden_frame = 0;
5152 cm->refresh_last_frame = 1;
5156 /* find a free buffer for the new frame */
5159 for(; i < NUM_YV12_BUFFERS; i++)
5161 if(!cm->yv12_fb[i].flags)
5168 assert(i < NUM_YV12_BUFFERS );
5170 #if !(CONFIG_REALTIME_ONLY)
5174 Pass1Encode(cpi, size, dest, frame_flags);
5176 else if (cpi->pass == 2)
5178 Pass2Encode(cpi, size, dest, dest_end, frame_flags);
5182 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
5184 if (cpi->compressor_speed == 2)
5186 unsigned int duration, duration2;
5187 vpx_usec_timer_mark(&tsctimer);
5188 vpx_usec_timer_mark(&ticktimer);
5190 duration = (int)(vpx_usec_timer_elapsed(&ticktimer));
5191 duration2 = (unsigned int)((double)duration / 2);
5193 if (cm->frame_type != KEY_FRAME)
5195 if (cpi->avg_encode_time == 0)
5196 cpi->avg_encode_time = duration;
5198 cpi->avg_encode_time = (7 * cpi->avg_encode_time + duration) >> 3;
5205 if (cpi->avg_pick_mode_time == 0)
5206 cpi->avg_pick_mode_time = duration2;
5208 cpi->avg_pick_mode_time = (7 * cpi->avg_pick_mode_time + duration2) >> 3;
5214 if (cm->refresh_entropy_probs == 0)
5216 vpx_memcpy(&cm->fc, &cm->lfc, sizeof(cm->fc));
5219 /* Save the contexts separately for alt ref, gold and last. */
5220 /* (TODO jbb -> Optimize this with pointers to avoid extra copies. ) */
5221 if(cm->refresh_alt_ref_frame)
5222 vpx_memcpy(&cpi->lfc_a, &cm->fc, sizeof(cm->fc));
5224 if(cm->refresh_golden_frame)
5225 vpx_memcpy(&cpi->lfc_g, &cm->fc, sizeof(cm->fc));
5227 if(cm->refresh_last_frame)
5228 vpx_memcpy(&cpi->lfc_n, &cm->fc, sizeof(cm->fc));
5230 /* if its a dropped frame honor the requests on subsequent frames */
5233 cpi->droppable = !frame_is_reference(cpi);
5235 /* return to normal state */
5236 cm->refresh_entropy_probs = 1;
5237 cm->refresh_alt_ref_frame = 0;
5238 cm->refresh_golden_frame = 0;
5239 cm->refresh_last_frame = 1;
5240 cm->frame_type = INTER_FRAME;
5244 /* Save layer specific state */
5245 if (cpi->oxcf.number_of_layers > 1)
5246 save_layer_context (cpi);
5248 vpx_usec_timer_mark(&cmptimer);
5249 cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer);
5251 if (cpi->b_calculate_psnr && cpi->pass != 1 && cm->show_frame)
5253 generate_psnr_packet(cpi);
5256 #if CONFIG_INTERNAL_STATS
5260 cpi->bytes += *size;
5264 cpi->common.show_frame_mi = cpi->common.mi;
5267 if (cpi->b_calculate_psnr)
5271 YV12_BUFFER_CONFIG *orig = cpi->Source;
5272 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
5273 int y_samples = orig->y_height * orig->y_width ;
5274 int uv_samples = orig->uv_height * orig->uv_width ;
5275 int t_samples = y_samples + 2 * uv_samples;
5276 double sq_error, sq_error2;
5278 ye = calc_plane_error(orig->y_buffer, orig->y_stride,
5279 recon->y_buffer, recon->y_stride, orig->y_width, orig->y_height);
5281 ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
5282 recon->u_buffer, recon->uv_stride, orig->uv_width, orig->uv_height);
5284 ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
5285 recon->v_buffer, recon->uv_stride, orig->uv_width, orig->uv_height);
5287 sq_error = (double)(ye + ue + ve);
5289 frame_psnr = vpx_sse_to_psnr(t_samples, 255.0, sq_error);
5291 cpi->total_y += vpx_sse_to_psnr(y_samples, 255.0, (double)ye);
5292 cpi->total_u += vpx_sse_to_psnr(uv_samples, 255.0, (double)ue);
5293 cpi->total_v += vpx_sse_to_psnr(uv_samples, 255.0, (double)ve);
5294 cpi->total_sq_error += sq_error;
5295 cpi->total += frame_psnr;
5298 YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer;
5299 double frame_psnr2, frame_ssim2 = 0;
5302 vp8_deblock(cm, cm->frame_to_show, &cm->post_proc_buffer, cm->filter_level * 10 / 6, 1, 0);
5303 vp8_clear_system_state();
5305 ye = calc_plane_error(orig->y_buffer, orig->y_stride,
5306 pp->y_buffer, pp->y_stride, orig->y_width, orig->y_height);
5308 ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
5309 pp->u_buffer, pp->uv_stride, orig->uv_width, orig->uv_height);
5311 ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
5312 pp->v_buffer, pp->uv_stride, orig->uv_width, orig->uv_height);
5314 sq_error2 = (double)(ye + ue + ve);
5316 frame_psnr2 = vpx_sse_to_psnr(t_samples, 255.0, sq_error2);
5318 cpi->totalp_y += vpx_sse_to_psnr(y_samples,
5320 cpi->totalp_u += vpx_sse_to_psnr(uv_samples,
5322 cpi->totalp_v += vpx_sse_to_psnr(uv_samples,
5324 cpi->total_sq_error2 += sq_error2;
5325 cpi->totalp += frame_psnr2;
5327 frame_ssim2 = vp8_calc_ssim(cpi->Source,
5328 &cm->post_proc_buffer, 1, &weight);
5330 cpi->summed_quality += frame_ssim2 * weight;
5331 cpi->summed_weights += weight;
5333 if (cpi->oxcf.number_of_layers > 1)
5337 for (i=cpi->current_layer;
5338 i<cpi->oxcf.number_of_layers; i++)
5340 cpi->frames_in_layer[i]++;
5342 cpi->bytes_in_layer[i] += *size;
5343 cpi->sum_psnr[i] += frame_psnr;
5344 cpi->sum_psnr_p[i] += frame_psnr2;
5345 cpi->total_error2[i] += sq_error;
5346 cpi->total_error2_p[i] += sq_error2;
5347 cpi->sum_ssim[i] += frame_ssim2 * weight;
5348 cpi->sum_weights[i] += weight;
5355 if (cpi->b_calculate_ssimg)
5357 double y, u, v, frame_all;
5358 frame_all = vp8_calc_ssimg(cpi->Source, cm->frame_to_show,
5361 if (cpi->oxcf.number_of_layers > 1)
5365 for (i=cpi->current_layer;
5366 i<cpi->oxcf.number_of_layers; i++)
5368 if (!cpi->b_calculate_psnr)
5369 cpi->frames_in_layer[i]++;
5371 cpi->total_ssimg_y_in_layer[i] += y;
5372 cpi->total_ssimg_u_in_layer[i] += u;
5373 cpi->total_ssimg_v_in_layer[i] += v;
5374 cpi->total_ssimg_all_in_layer[i] += frame_all;
5379 cpi->total_ssimg_y += y;
5380 cpi->total_ssimg_u += u;
5381 cpi->total_ssimg_v += v;
5382 cpi->total_ssimg_all += frame_all;
5391 if (cpi->common.frame_type != 0 && cpi->common.base_qindex == cpi->oxcf.worst_allowed_q)
5393 skiptruecount += cpi->skip_true_count;
5394 skipfalsecount += cpi->skip_false_count;
5402 FILE *f = fopen("skip.stt", "a");
5403 fprintf(f, "frame:%4d flags:%4x Q:%4d P:%4d Size:%5d\n", cpi->common.current_video_frame, *frame_flags, cpi->common.base_qindex, cpi->prob_skip_false, *size);
5405 if (cpi->is_src_frame_alt_ref == 1)
5406 fprintf(f, "skipcount: %4d framesize: %d\n", cpi->skip_true_count , *size);
5415 #if CONFIG_RUNTIME_CPU_DETECT
5416 if (cm->cpu_caps & HAS_NEON)
5419 vp8_pop_neon(store_reg);
5423 cpi->common.error.setjmp = 0;
5428 int vp8_get_preview_raw_frame(VP8_COMP *cpi, YV12_BUFFER_CONFIG *dest, vp8_ppflags_t *flags)
5430 if (cpi->common.refresh_alt_ref_frame)
5436 #if CONFIG_MULTITHREAD
5437 if(cpi->b_lpf_running)
5439 sem_wait(&cpi->h_event_end_lpf);
5440 cpi->b_lpf_running = 0;
5445 cpi->common.show_frame_mi = cpi->common.mi;
5446 ret = vp8_post_proc_frame(&cpi->common, dest, flags);
5449 if (cpi->common.frame_to_show)
5451 *dest = *cpi->common.frame_to_show;
5452 dest->y_width = cpi->common.Width;
5453 dest->y_height = cpi->common.Height;
5454 dest->uv_height = cpi->common.Height / 2;
5463 vp8_clear_system_state();
5468 int vp8_set_roimap(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols, int delta_q[4], int delta_lf[4], unsigned int threshold[4])
5470 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
5471 int internal_delta_q[MAX_MB_SEGMENTS];
5472 const int range = 63;
5475 // This method is currently incompatible with the cyclic refresh method
5476 if ( cpi->cyclic_refresh_mode_enabled )
5479 // Check number of rows and columns match
5480 if (cpi->common.mb_rows != rows || cpi->common.mb_cols != cols)
5483 // Range check the delta Q values and convert the external Q range values
5484 // to internal ones.
5485 if ( (abs(delta_q[0]) > range) || (abs(delta_q[1]) > range) ||
5486 (abs(delta_q[2]) > range) || (abs(delta_q[3]) > range) )
5489 // Range check the delta lf values
5490 if ( (abs(delta_lf[0]) > range) || (abs(delta_lf[1]) > range) ||
5491 (abs(delta_lf[2]) > range) || (abs(delta_lf[3]) > range) )
5496 disable_segmentation(cpi);
5500 // Translate the external delta q values to internal values.
5501 for ( i = 0; i < MAX_MB_SEGMENTS; i++ )
5502 internal_delta_q[i] =
5503 ( delta_q[i] >= 0 ) ? q_trans[delta_q[i]] : -q_trans[-delta_q[i]];
5505 /* Set the segmentation Map */
5506 set_segmentation_map(cpi, map);
5508 /* Activate segmentation. */
5509 enable_segmentation(cpi);
5511 /* Set up the quant segment data */
5512 feature_data[MB_LVL_ALT_Q][0] = internal_delta_q[0];
5513 feature_data[MB_LVL_ALT_Q][1] = internal_delta_q[1];
5514 feature_data[MB_LVL_ALT_Q][2] = internal_delta_q[2];
5515 feature_data[MB_LVL_ALT_Q][3] = internal_delta_q[3];
5517 /* Set up the loop segment data s */
5518 feature_data[MB_LVL_ALT_LF][0] = delta_lf[0];
5519 feature_data[MB_LVL_ALT_LF][1] = delta_lf[1];
5520 feature_data[MB_LVL_ALT_LF][2] = delta_lf[2];
5521 feature_data[MB_LVL_ALT_LF][3] = delta_lf[3];
5523 cpi->segment_encode_breakout[0] = threshold[0];
5524 cpi->segment_encode_breakout[1] = threshold[1];
5525 cpi->segment_encode_breakout[2] = threshold[2];
5526 cpi->segment_encode_breakout[3] = threshold[3];
5528 /* Initialise the feature data structure */
5529 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
5534 int vp8_set_active_map(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols)
5536 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols)
5540 vpx_memcpy(cpi->active_map, map, rows * cols);
5541 cpi->active_map_enabled = 1;
5544 cpi->active_map_enabled = 0;
5554 int vp8_set_internal_size(VP8_COMP *cpi, VPX_SCALING horiz_mode, VPX_SCALING vert_mode)
5556 if (horiz_mode <= ONETWO)
5557 cpi->common.horiz_scale = horiz_mode;
5561 if (vert_mode <= ONETWO)
5562 cpi->common.vert_scale = vert_mode;
5571 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest)
5576 unsigned char *src = source->y_buffer;
5577 unsigned char *dst = dest->y_buffer;
5579 /* Loop through the Y plane raw and reconstruction data summing
5580 * (square differences)
5582 for (i = 0; i < source->y_height; i += 16)
5584 for (j = 0; j < source->y_width; j += 16)
5587 Total += vp8_mse16x16(src + j, source->y_stride, dst + j, dest->y_stride, &sse);
5590 src += 16 * source->y_stride;
5591 dst += 16 * dest->y_stride;
5598 int vp8_get_quantizer(VP8_COMP *cpi)
5600 return cpi->common.base_qindex;