2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
12 #include "vpx_config.h"
13 #include "./vpx_scale_rtcd.h"
14 #include "vp8/common/onyxc_int.h"
15 #include "vp8/common/blockd.h"
17 #include "vp8/common/systemdependent.h"
19 #include "vp8/common/alloccommon.h"
21 #include "firstpass.h"
22 #include "vpx/internal/vpx_psnr.h"
23 #include "vpx_scale/vpx_scale.h"
24 #include "vp8/common/extend.h"
26 #include "vp8/common/quant_common.h"
27 #include "segmentation.h"
29 #include "vp8/common/postproc.h"
31 #include "vpx_mem/vpx_mem.h"
32 #include "vp8/common/swapyv12buffer.h"
33 #include "vp8/common/threading.h"
34 #include "vpx_ports/vpx_timer.h"
36 #include "vpx_ports/arm.h"
38 #if CONFIG_MULTI_RES_ENCODING
39 #include "mr_dissim.h"
41 #include "encodeframe.h"
47 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
48 extern int vp8_update_coef_context(VP8_COMP *cpi);
49 extern void vp8_update_coef_probs(VP8_COMP *cpi);
52 extern void vp8cx_pick_filter_level_fast(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
53 extern void vp8cx_set_alt_lf_level(VP8_COMP *cpi, int filt_val);
54 extern void vp8cx_pick_filter_level(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
56 extern void vp8_deblock_frame(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *post, int filt_lvl, int low_var_thresh, int flag);
57 extern void print_parms(VP8_CONFIG *ocf, char *filenam);
58 extern unsigned int vp8_get_processor_freq();
59 extern void print_tree_update_probs();
60 extern int vp8cx_create_encoder_threads(VP8_COMP *cpi);
61 extern void vp8cx_remove_encoder_threads(VP8_COMP *cpi);
63 int vp8_estimate_entropy_savings(VP8_COMP *cpi);
65 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest);
67 extern void vp8_temporal_filter_prepare_c(VP8_COMP *cpi, int distance);
69 static void set_default_lf_deltas(VP8_COMP *cpi);
71 extern const int vp8_gf_interval_table[101];
73 #if CONFIG_INTERNAL_STATS
76 extern double vp8_calc_ssim
78 YV12_BUFFER_CONFIG *source,
79 YV12_BUFFER_CONFIG *dest,
85 extern double vp8_calc_ssimg
87 YV12_BUFFER_CONFIG *source,
88 YV12_BUFFER_CONFIG *dest,
109 extern int skip_true_count;
110 extern int skip_false_count;
114 #ifdef VP8_ENTROPY_STATS
115 extern int intra_mode_stats[10][10][10];
119 unsigned int frames_at_speed[16] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
120 unsigned int tot_pm = 0;
121 unsigned int cnt_pm = 0;
122 unsigned int tot_ef = 0;
123 unsigned int cnt_ef = 0;
127 extern unsigned __int64 Sectionbits[50];
128 extern int y_modes[5] ;
129 extern int uv_modes[4] ;
130 extern int b_modes[10] ;
132 extern int inter_y_modes[10] ;
133 extern int inter_uv_modes[4] ;
134 extern unsigned int inter_b_modes[15];
137 extern const int vp8_bits_per_mb[2][QINDEX_RANGE];
139 extern const int qrounding_factors[129];
140 extern const int qzbin_factors[129];
141 extern void vp8cx_init_quantizer(VP8_COMP *cpi);
142 extern const int vp8cx_base_skip_false_prob[128];
144 /* Tables relating active max Q to active min Q */
145 static const unsigned char kf_low_motion_minq[QINDEX_RANGE] =
147 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
148 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
149 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
150 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
151 3,3,3,3,3,3,4,4,4,5,5,5,5,5,6,6,
152 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
153 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
154 16,16,17,17,18,18,18,18,19,20,20,21,21,22,23,23
156 static const unsigned char kf_high_motion_minq[QINDEX_RANGE] =
158 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
159 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
160 1,1,1,1,1,1,1,1,2,2,2,2,3,3,3,3,
161 3,3,3,3,4,4,4,4,5,5,5,5,5,5,6,6,
162 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
163 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
164 16,16,17,17,18,18,18,18,19,19,20,20,20,20,21,21,
165 21,21,22,22,23,23,24,25,25,26,26,27,28,28,29,30
167 static const unsigned char gf_low_motion_minq[QINDEX_RANGE] =
169 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
170 3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,
171 7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,
172 11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,
173 19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,
174 27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34,
175 35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,
176 43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58
178 static const unsigned char gf_mid_motion_minq[QINDEX_RANGE] =
180 0,0,0,0,1,1,1,1,1,1,2,2,3,3,3,4,
181 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
182 9,10,10,10,10,11,11,11,12,12,12,12,13,13,13,14,
183 14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,
184 22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,
185 30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,
186 38,39,39,40,40,41,41,42,42,43,43,44,45,46,47,48,
187 49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64
189 static const unsigned char gf_high_motion_minq[QINDEX_RANGE] =
191 0,0,0,0,1,1,1,1,1,2,2,2,3,3,3,4,
192 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
193 9,10,10,10,11,11,12,12,13,13,14,14,15,15,16,16,
194 17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,
195 25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,
196 33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,
197 41,41,42,42,43,44,45,46,47,48,49,50,51,52,53,54,
198 55,56,57,58,59,60,62,64,66,68,70,72,74,76,78,80
200 static const unsigned char inter_minq[QINDEX_RANGE] =
202 0,0,1,1,2,3,3,4,4,5,6,6,7,8,8,9,
203 9,10,11,11,12,13,13,14,15,15,16,17,17,18,19,20,
204 20,21,22,22,23,24,24,25,26,27,27,28,29,30,30,31,
205 32,33,33,34,35,36,36,37,38,39,39,40,41,42,42,43,
206 44,45,46,46,47,48,49,50,50,51,52,53,54,55,55,56,
207 57,58,59,60,60,61,62,63,64,65,66,67,67,68,69,70,
208 71,72,73,74,75,75,76,77,78,79,80,81,82,83,84,85,
209 86,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100
212 #ifdef PACKET_TESTING
213 extern FILE *vpxlogc;
216 static void save_layer_context(VP8_COMP *cpi)
218 LAYER_CONTEXT *lc = &cpi->layer_context[cpi->current_layer];
220 /* Save layer dependent coding state */
221 lc->target_bandwidth = cpi->target_bandwidth;
222 lc->starting_buffer_level = cpi->oxcf.starting_buffer_level;
223 lc->optimal_buffer_level = cpi->oxcf.optimal_buffer_level;
224 lc->maximum_buffer_size = cpi->oxcf.maximum_buffer_size;
225 lc->starting_buffer_level_in_ms = cpi->oxcf.starting_buffer_level_in_ms;
226 lc->optimal_buffer_level_in_ms = cpi->oxcf.optimal_buffer_level_in_ms;
227 lc->maximum_buffer_size_in_ms = cpi->oxcf.maximum_buffer_size_in_ms;
228 lc->buffer_level = cpi->buffer_level;
229 lc->bits_off_target = cpi->bits_off_target;
230 lc->total_actual_bits = cpi->total_actual_bits;
231 lc->worst_quality = cpi->worst_quality;
232 lc->active_worst_quality = cpi->active_worst_quality;
233 lc->best_quality = cpi->best_quality;
234 lc->active_best_quality = cpi->active_best_quality;
235 lc->ni_av_qi = cpi->ni_av_qi;
236 lc->ni_tot_qi = cpi->ni_tot_qi;
237 lc->ni_frames = cpi->ni_frames;
238 lc->avg_frame_qindex = cpi->avg_frame_qindex;
239 lc->rate_correction_factor = cpi->rate_correction_factor;
240 lc->key_frame_rate_correction_factor = cpi->key_frame_rate_correction_factor;
241 lc->gf_rate_correction_factor = cpi->gf_rate_correction_factor;
242 lc->zbin_over_quant = cpi->mb.zbin_over_quant;
243 lc->inter_frame_target = cpi->inter_frame_target;
244 lc->total_byte_count = cpi->total_byte_count;
245 lc->filter_level = cpi->common.filter_level;
247 lc->last_frame_percent_intra = cpi->last_frame_percent_intra;
249 memcpy (lc->count_mb_ref_frame_usage,
250 cpi->mb.count_mb_ref_frame_usage,
251 sizeof(cpi->mb.count_mb_ref_frame_usage));
254 static void restore_layer_context(VP8_COMP *cpi, const int layer)
256 LAYER_CONTEXT *lc = &cpi->layer_context[layer];
258 /* Restore layer dependent coding state */
259 cpi->current_layer = layer;
260 cpi->target_bandwidth = lc->target_bandwidth;
261 cpi->oxcf.target_bandwidth = lc->target_bandwidth;
262 cpi->oxcf.starting_buffer_level = lc->starting_buffer_level;
263 cpi->oxcf.optimal_buffer_level = lc->optimal_buffer_level;
264 cpi->oxcf.maximum_buffer_size = lc->maximum_buffer_size;
265 cpi->oxcf.starting_buffer_level_in_ms = lc->starting_buffer_level_in_ms;
266 cpi->oxcf.optimal_buffer_level_in_ms = lc->optimal_buffer_level_in_ms;
267 cpi->oxcf.maximum_buffer_size_in_ms = lc->maximum_buffer_size_in_ms;
268 cpi->buffer_level = lc->buffer_level;
269 cpi->bits_off_target = lc->bits_off_target;
270 cpi->total_actual_bits = lc->total_actual_bits;
271 cpi->active_worst_quality = lc->active_worst_quality;
272 cpi->active_best_quality = lc->active_best_quality;
273 cpi->ni_av_qi = lc->ni_av_qi;
274 cpi->ni_tot_qi = lc->ni_tot_qi;
275 cpi->ni_frames = lc->ni_frames;
276 cpi->avg_frame_qindex = lc->avg_frame_qindex;
277 cpi->rate_correction_factor = lc->rate_correction_factor;
278 cpi->key_frame_rate_correction_factor = lc->key_frame_rate_correction_factor;
279 cpi->gf_rate_correction_factor = lc->gf_rate_correction_factor;
280 cpi->mb.zbin_over_quant = lc->zbin_over_quant;
281 cpi->inter_frame_target = lc->inter_frame_target;
282 cpi->total_byte_count = lc->total_byte_count;
283 cpi->common.filter_level = lc->filter_level;
285 cpi->last_frame_percent_intra = lc->last_frame_percent_intra;
287 memcpy (cpi->mb.count_mb_ref_frame_usage,
288 lc->count_mb_ref_frame_usage,
289 sizeof(cpi->mb.count_mb_ref_frame_usage));
292 static int rescale(int val, int num, int denom)
295 int64_t llden = denom;
298 return (int)(llval * llnum / llden);
301 static void init_temporal_layer_context(VP8_COMP *cpi,
304 double prev_layer_framerate)
306 LAYER_CONTEXT *lc = &cpi->layer_context[layer];
308 lc->framerate = cpi->output_framerate / cpi->oxcf.rate_decimator[layer];
309 lc->target_bandwidth = cpi->oxcf.target_bitrate[layer] * 1000;
311 lc->starting_buffer_level_in_ms = oxcf->starting_buffer_level;
312 lc->optimal_buffer_level_in_ms = oxcf->optimal_buffer_level;
313 lc->maximum_buffer_size_in_ms = oxcf->maximum_buffer_size;
315 lc->starting_buffer_level =
316 rescale((int)(oxcf->starting_buffer_level),
317 lc->target_bandwidth, 1000);
319 if (oxcf->optimal_buffer_level == 0)
320 lc->optimal_buffer_level = lc->target_bandwidth / 8;
322 lc->optimal_buffer_level =
323 rescale((int)(oxcf->optimal_buffer_level),
324 lc->target_bandwidth, 1000);
326 if (oxcf->maximum_buffer_size == 0)
327 lc->maximum_buffer_size = lc->target_bandwidth / 8;
329 lc->maximum_buffer_size =
330 rescale((int)(oxcf->maximum_buffer_size),
331 lc->target_bandwidth, 1000);
333 /* Work out the average size of a frame within this layer */
335 lc->avg_frame_size_for_layer =
336 (int)((cpi->oxcf.target_bitrate[layer] -
337 cpi->oxcf.target_bitrate[layer-1]) * 1000 /
338 (lc->framerate - prev_layer_framerate));
340 lc->active_worst_quality = cpi->oxcf.worst_allowed_q;
341 lc->active_best_quality = cpi->oxcf.best_allowed_q;
342 lc->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
344 lc->buffer_level = lc->starting_buffer_level;
345 lc->bits_off_target = lc->starting_buffer_level;
347 lc->total_actual_bits = 0;
351 lc->rate_correction_factor = 1.0;
352 lc->key_frame_rate_correction_factor = 1.0;
353 lc->gf_rate_correction_factor = 1.0;
354 lc->inter_frame_target = 0;
357 // Upon a run-time change in temporal layers, reset the layer context parameters
358 // for any "new" layers. For "existing" layers, let them inherit the parameters
359 // from the previous layer state (at the same layer #). In future we may want
360 // to better map the previous layer state(s) to the "new" ones.
361 static void reset_temporal_layer_change(VP8_COMP *cpi,
363 const int prev_num_layers)
366 double prev_layer_framerate = 0;
367 const int curr_num_layers = cpi->oxcf.number_of_layers;
368 // If the previous state was 1 layer, get current layer context from cpi.
369 // We need this to set the layer context for the new layers below.
370 if (prev_num_layers == 1)
372 cpi->current_layer = 0;
373 save_layer_context(cpi);
375 for (i = 0; i < curr_num_layers; i++)
377 LAYER_CONTEXT *lc = &cpi->layer_context[i];
378 if (i >= prev_num_layers)
380 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
382 // The initial buffer levels are set based on their starting levels.
383 // We could set the buffer levels based on the previous state (normalized
384 // properly by the layer bandwidths) but we would need to keep track of
385 // the previous set of layer bandwidths (i.e., target_bitrate[i])
386 // before the layer change. For now, reset to the starting levels.
387 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
388 cpi->oxcf.target_bitrate[i];
389 lc->bits_off_target = lc->buffer_level;
390 // TDOD(marpan): Should we set the rate_correction_factor and
391 // active_worst/best_quality to values derived from the previous layer
392 // state (to smooth-out quality dips/rate fluctuation at transition)?
394 // We need to treat the 1 layer case separately: oxcf.target_bitrate[i]
395 // is not set for 1 layer, and the restore_layer_context/save_context()
396 // are not called in the encoding loop, so we need to call it here to
397 // pass the layer context state to |cpi|.
398 if (curr_num_layers == 1)
400 lc->target_bandwidth = cpi->oxcf.target_bandwidth;
401 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
402 lc->target_bandwidth / 1000;
403 lc->bits_off_target = lc->buffer_level;
404 restore_layer_context(cpi, 0);
406 prev_layer_framerate = cpi->output_framerate /
407 cpi->oxcf.rate_decimator[i];
411 static void setup_features(VP8_COMP *cpi)
413 // If segmentation enabled set the update flags
414 if ( cpi->mb.e_mbd.segmentation_enabled )
416 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
417 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
421 cpi->mb.e_mbd.update_mb_segmentation_map = 0;
422 cpi->mb.e_mbd.update_mb_segmentation_data = 0;
425 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 0;
426 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
427 vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
428 vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
429 vpx_memset(cpi->mb.e_mbd.last_ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
430 vpx_memset(cpi->mb.e_mbd.last_mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
432 set_default_lf_deltas(cpi);
437 static void dealloc_raw_frame_buffers(VP8_COMP *cpi);
440 static void dealloc_compressor_data(VP8_COMP *cpi)
442 vpx_free(cpi->tplist);
445 /* Delete last frame MV storage buffers */
449 vpx_free(cpi->lf_ref_frame_sign_bias);
450 cpi->lf_ref_frame_sign_bias = 0;
452 vpx_free(cpi->lf_ref_frame);
453 cpi->lf_ref_frame = 0;
455 /* Delete sementation map */
456 vpx_free(cpi->segmentation_map);
457 cpi->segmentation_map = 0;
459 vpx_free(cpi->active_map);
462 vp8_de_alloc_frame_buffers(&cpi->common);
464 vp8_yv12_de_alloc_frame_buffer(&cpi->pick_lf_lvl_frame);
465 vp8_yv12_de_alloc_frame_buffer(&cpi->scaled_source);
466 dealloc_raw_frame_buffers(cpi);
471 /* Structure used to monitor GF usage */
472 vpx_free(cpi->gf_active_flags);
473 cpi->gf_active_flags = 0;
475 /* Activity mask based per mb zbin adjustments */
476 vpx_free(cpi->mb_activity_map);
477 cpi->mb_activity_map = 0;
479 vpx_free(cpi->mb.pip);
482 #if CONFIG_MULTITHREAD
483 vpx_free(cpi->mt_current_mb_col);
484 cpi->mt_current_mb_col = NULL;
488 static void enable_segmentation(VP8_COMP *cpi)
490 /* Set the appropriate feature bit */
491 cpi->mb.e_mbd.segmentation_enabled = 1;
492 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
493 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
495 static void disable_segmentation(VP8_COMP *cpi)
497 /* Clear the appropriate feature bit */
498 cpi->mb.e_mbd.segmentation_enabled = 0;
501 /* Valid values for a segment are 0 to 3
502 * Segmentation map is arrange as [Rows][Columns]
504 static void set_segmentation_map(VP8_COMP *cpi, unsigned char *segmentation_map)
506 /* Copy in the new segmentation map */
507 vpx_memcpy(cpi->segmentation_map, segmentation_map, (cpi->common.mb_rows * cpi->common.mb_cols));
509 /* Signal that the map should be updated. */
510 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
511 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
514 /* The values given for each segment can be either deltas (from the default
515 * value chosen for the frame) or absolute values.
517 * Valid range for abs values is:
518 * (0-127 for MB_LVL_ALT_Q), (0-63 for SEGMENT_ALT_LF)
519 * Valid range for delta values are:
520 * (+/-127 for MB_LVL_ALT_Q), (+/-63 for SEGMENT_ALT_LF)
522 * abs_delta = SEGMENT_DELTADATA (deltas)
523 * abs_delta = SEGMENT_ABSDATA (use the absolute values given).
526 static void set_segment_data(VP8_COMP *cpi, signed char *feature_data, unsigned char abs_delta)
528 cpi->mb.e_mbd.mb_segement_abs_delta = abs_delta;
529 vpx_memcpy(cpi->segment_feature_data, feature_data, sizeof(cpi->segment_feature_data));
533 static void segmentation_test_function(VP8_COMP *cpi)
535 unsigned char *seg_map;
536 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
538 // Create a temporary map for segmentation data.
539 CHECK_MEM_ERROR(seg_map, vpx_calloc(cpi->common.mb_rows * cpi->common.mb_cols, 1));
541 // Set the segmentation Map
542 set_segmentation_map(cpi, seg_map);
544 // Activate segmentation.
545 enable_segmentation(cpi);
547 // Set up the quant segment data
548 feature_data[MB_LVL_ALT_Q][0] = 0;
549 feature_data[MB_LVL_ALT_Q][1] = 4;
550 feature_data[MB_LVL_ALT_Q][2] = 0;
551 feature_data[MB_LVL_ALT_Q][3] = 0;
552 // Set up the loop segment data
553 feature_data[MB_LVL_ALT_LF][0] = 0;
554 feature_data[MB_LVL_ALT_LF][1] = 0;
555 feature_data[MB_LVL_ALT_LF][2] = 0;
556 feature_data[MB_LVL_ALT_LF][3] = 0;
558 // Initialise the feature data structure
559 // SEGMENT_DELTADATA 0, SEGMENT_ABSDATA 1
560 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
562 // Delete sementation map
568 /* A simple function to cyclically refresh the background at a lower Q */
569 static void cyclic_background_refresh(VP8_COMP *cpi, int Q, int lf_adjustment)
571 unsigned char *seg_map = cpi->segmentation_map;
572 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
574 int block_count = cpi->cyclic_refresh_mode_max_mbs_perframe;
575 int mbs_in_frame = cpi->common.mb_rows * cpi->common.mb_cols;
577 cpi->cyclic_refresh_q = Q / 2;
579 // Set every macroblock to be eligible for update.
580 // For key frame this will reset seg map to 0.
581 vpx_memset(cpi->segmentation_map, 0, mbs_in_frame);
583 if (cpi->common.frame_type != KEY_FRAME)
585 /* Cycle through the macro_block rows */
586 /* MB loop to set local segmentation map */
587 i = cpi->cyclic_refresh_mode_index;
588 assert(i < mbs_in_frame);
591 /* If the MB is as a candidate for clean up then mark it for
592 * possible boost/refresh (segment 1) The segment id may get
593 * reset to 0 later if the MB gets coded anything other than
594 * last frame 0,0 as only (last frame 0,0) MBs are eligable for
595 * refresh : that is to say Mbs likely to be background blocks.
597 if (cpi->cyclic_refresh_map[i] == 0)
602 else if (cpi->cyclic_refresh_map[i] < 0)
603 cpi->cyclic_refresh_map[i]++;
606 if (i == mbs_in_frame)
610 while(block_count && i != cpi->cyclic_refresh_mode_index);
612 cpi->cyclic_refresh_mode_index = i;
615 /* Activate segmentation. */
616 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
617 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
618 enable_segmentation(cpi);
620 /* Set up the quant segment data */
621 feature_data[MB_LVL_ALT_Q][0] = 0;
622 feature_data[MB_LVL_ALT_Q][1] = (cpi->cyclic_refresh_q - Q);
623 feature_data[MB_LVL_ALT_Q][2] = 0;
624 feature_data[MB_LVL_ALT_Q][3] = 0;
626 /* Set up the loop segment data */
627 feature_data[MB_LVL_ALT_LF][0] = 0;
628 feature_data[MB_LVL_ALT_LF][1] = lf_adjustment;
629 feature_data[MB_LVL_ALT_LF][2] = 0;
630 feature_data[MB_LVL_ALT_LF][3] = 0;
632 /* Initialise the feature data structure */
633 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
637 static void set_default_lf_deltas(VP8_COMP *cpi)
639 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 1;
640 cpi->mb.e_mbd.mode_ref_lf_delta_update = 1;
642 vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
643 vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
645 /* Test of ref frame deltas */
646 cpi->mb.e_mbd.ref_lf_deltas[INTRA_FRAME] = 2;
647 cpi->mb.e_mbd.ref_lf_deltas[LAST_FRAME] = 0;
648 cpi->mb.e_mbd.ref_lf_deltas[GOLDEN_FRAME] = -2;
649 cpi->mb.e_mbd.ref_lf_deltas[ALTREF_FRAME] = -2;
651 cpi->mb.e_mbd.mode_lf_deltas[0] = 4; /* BPRED */
653 if(cpi->oxcf.Mode == MODE_REALTIME)
654 cpi->mb.e_mbd.mode_lf_deltas[1] = -12; /* Zero */
656 cpi->mb.e_mbd.mode_lf_deltas[1] = -2; /* Zero */
658 cpi->mb.e_mbd.mode_lf_deltas[2] = 2; /* New mv */
659 cpi->mb.e_mbd.mode_lf_deltas[3] = 4; /* Split mv */
662 /* Convenience macros for mapping speed and mode into a continuous
665 #define GOOD(x) (x+1)
668 static int speed_map(int speed, const int *map)
675 } while(speed >= *map++);
679 static const int thresh_mult_map_znn[] = {
680 /* map common to zero, nearest, and near */
681 0, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(2), 2000, INT_MAX
684 static const int thresh_mult_map_vhpred[] = {
685 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(1), 2000,
686 RT(7), INT_MAX, INT_MAX
689 static const int thresh_mult_map_bpred[] = {
690 2000, GOOD(0), 2500, GOOD(2), 5000, GOOD(3), 7500, RT(0), 2500, RT(1), 5000,
691 RT(6), INT_MAX, INT_MAX
694 static const int thresh_mult_map_tm[] = {
695 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 0, RT(1), 1000, RT(2), 2000,
696 RT(7), INT_MAX, INT_MAX
699 static const int thresh_mult_map_new1[] = {
700 1000, GOOD(2), 2000, RT(0), 2000, INT_MAX
703 static const int thresh_mult_map_new2[] = {
704 1000, GOOD(2), 2000, GOOD(3), 2500, GOOD(5), 4000, RT(0), 2000, RT(2), 2500,
708 static const int thresh_mult_map_split1[] = {
709 2500, GOOD(0), 1700, GOOD(2), 10000, GOOD(3), 25000, GOOD(4), INT_MAX,
710 RT(0), 5000, RT(1), 10000, RT(2), 25000, RT(3), INT_MAX, INT_MAX
713 static const int thresh_mult_map_split2[] = {
714 5000, GOOD(0), 4500, GOOD(2), 20000, GOOD(3), 50000, GOOD(4), INT_MAX,
715 RT(0), 10000, RT(1), 20000, RT(2), 50000, RT(3), INT_MAX, INT_MAX
718 static const int mode_check_freq_map_zn2[] = {
719 /* {zero,nearest}{2,3} */
720 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
723 static const int mode_check_freq_map_vhbpred[] = {
724 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(5), 4, INT_MAX
727 static const int mode_check_freq_map_near2[] = {
728 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(10), 1<<2, RT(11), 1<<3, RT(12), 1<<4,
732 static const int mode_check_freq_map_new1[] = {
733 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
736 static const int mode_check_freq_map_new2[] = {
737 0, GOOD(5), 4, RT(0), 0, RT(3), 4, RT(10), 1<<3, RT(11), 1<<4, RT(12), 1<<5,
741 static const int mode_check_freq_map_split1[] = {
742 0, GOOD(2), 2, GOOD(3), 7, RT(1), 2, RT(2), 7, INT_MAX
745 static const int mode_check_freq_map_split2[] = {
746 0, GOOD(1), 2, GOOD(2), 4, GOOD(3), 15, RT(1), 4, RT(2), 15, INT_MAX
749 void vp8_set_speed_features(VP8_COMP *cpi)
751 SPEED_FEATURES *sf = &cpi->sf;
752 int Mode = cpi->compressor_speed;
753 int Speed = cpi->Speed;
755 VP8_COMMON *cm = &cpi->common;
756 int last_improved_quant = sf->improved_quant;
759 /* Initialise default mode frequency sampling variables */
760 for (i = 0; i < MAX_MODES; i ++)
762 cpi->mode_check_freq[i] = 0;
765 cpi->mb.mbs_tested_so_far = 0;
767 /* best quality defaults */
769 sf->search_method = NSTEP;
770 sf->improved_quant = 1;
771 sf->improved_dct = 1;
774 sf->quarter_pixel_search = 1;
775 sf->half_pixel_search = 1;
776 sf->iterative_sub_pixel = 1;
777 sf->optimize_coefficients = 1;
778 sf->use_fastquant_for_pick = 0;
779 sf->no_skip_block4x4_search = 1;
782 sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
783 sf->improved_mv_pred = 1;
785 /* default thresholds to 0 */
786 for (i = 0; i < MAX_MODES; i++)
787 sf->thresh_mult[i] = 0;
789 /* Count enabled references */
791 if (cpi->ref_frame_flags & VP8_LAST_FRAME)
793 if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
795 if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
798 /* Convert speed to continuous range, with clamping */
810 sf->thresh_mult[THR_ZERO1] =
811 sf->thresh_mult[THR_NEAREST1] =
812 sf->thresh_mult[THR_NEAR1] =
813 sf->thresh_mult[THR_DC] = 0; /* always */
815 sf->thresh_mult[THR_ZERO2] =
816 sf->thresh_mult[THR_ZERO3] =
817 sf->thresh_mult[THR_NEAREST2] =
818 sf->thresh_mult[THR_NEAREST3] =
819 sf->thresh_mult[THR_NEAR2] =
820 sf->thresh_mult[THR_NEAR3] = speed_map(Speed, thresh_mult_map_znn);
822 sf->thresh_mult[THR_V_PRED] =
823 sf->thresh_mult[THR_H_PRED] = speed_map(Speed, thresh_mult_map_vhpred);
824 sf->thresh_mult[THR_B_PRED] = speed_map(Speed, thresh_mult_map_bpred);
825 sf->thresh_mult[THR_TM] = speed_map(Speed, thresh_mult_map_tm);
826 sf->thresh_mult[THR_NEW1] = speed_map(Speed, thresh_mult_map_new1);
827 sf->thresh_mult[THR_NEW2] =
828 sf->thresh_mult[THR_NEW3] = speed_map(Speed, thresh_mult_map_new2);
829 sf->thresh_mult[THR_SPLIT1] = speed_map(Speed, thresh_mult_map_split1);
830 sf->thresh_mult[THR_SPLIT2] =
831 sf->thresh_mult[THR_SPLIT3] = speed_map(Speed, thresh_mult_map_split2);
833 cpi->mode_check_freq[THR_ZERO1] =
834 cpi->mode_check_freq[THR_NEAREST1] =
835 cpi->mode_check_freq[THR_NEAR1] =
836 cpi->mode_check_freq[THR_TM] =
837 cpi->mode_check_freq[THR_DC] = 0; /* always */
839 cpi->mode_check_freq[THR_ZERO2] =
840 cpi->mode_check_freq[THR_ZERO3] =
841 cpi->mode_check_freq[THR_NEAREST2] =
842 cpi->mode_check_freq[THR_NEAREST3] = speed_map(Speed,
843 mode_check_freq_map_zn2);
845 cpi->mode_check_freq[THR_NEAR2] =
846 cpi->mode_check_freq[THR_NEAR3] = speed_map(Speed,
847 mode_check_freq_map_near2);
849 cpi->mode_check_freq[THR_V_PRED] =
850 cpi->mode_check_freq[THR_H_PRED] =
851 cpi->mode_check_freq[THR_B_PRED] = speed_map(Speed,
852 mode_check_freq_map_vhbpred);
853 cpi->mode_check_freq[THR_NEW1] = speed_map(Speed,
854 mode_check_freq_map_new1);
855 cpi->mode_check_freq[THR_NEW2] =
856 cpi->mode_check_freq[THR_NEW3] = speed_map(Speed,
857 mode_check_freq_map_new2);
858 cpi->mode_check_freq[THR_SPLIT1] = speed_map(Speed,
859 mode_check_freq_map_split1);
860 cpi->mode_check_freq[THR_SPLIT2] =
861 cpi->mode_check_freq[THR_SPLIT3] = speed_map(Speed,
862 mode_check_freq_map_split2);
866 #if !(CONFIG_REALTIME_ONLY)
867 case 0: /* best quality mode */
869 sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
875 /* Disable coefficient optimization above speed 0 */
876 sf->optimize_coefficients = 0;
877 sf->use_fastquant_for_pick = 1;
878 sf->no_skip_block4x4_search = 0;
885 sf->improved_quant = 0;
886 sf->improved_dct = 0;
888 /* Only do recode loop on key frames, golden frames and
898 sf->recode_loop = 0; /* recode loop off */
899 sf->RD = 0; /* Turn rd off */
905 sf->auto_filter = 0; /* Faster selection of loop filter */
911 sf->optimize_coefficients = 0;
914 sf->iterative_sub_pixel = 1;
915 sf->search_method = NSTEP;
919 sf->improved_quant = 0;
920 sf->improved_dct = 0;
922 sf->use_fastquant_for_pick = 1;
923 sf->no_skip_block4x4_search = 0;
928 sf->auto_filter = 0; /* Faster selection of loop filter */
938 sf->auto_filter = 0; /* Faster selection of loop filter */
939 sf->search_method = HEX;
940 sf->iterative_sub_pixel = 0;
945 unsigned int sum = 0;
946 unsigned int total_mbs = cm->MBs;
948 unsigned int total_skip;
952 if (cpi->oxcf.encode_breakout > 2000)
953 min = cpi->oxcf.encode_breakout;
957 for (i = 0; i < min; i++)
959 sum += cpi->mb.error_bins[i];
965 /* i starts from 2 to make sure thresh started from 2048 */
966 for (; i < 1024; i++)
968 sum += cpi->mb.error_bins[i];
970 if (10 * sum >= (unsigned int)(cpi->Speed - 6)*(total_mbs - total_skip))
982 sf->thresh_mult[THR_NEW1 ] = thresh;
983 sf->thresh_mult[THR_NEAREST1 ] = thresh >> 1;
984 sf->thresh_mult[THR_NEAR1 ] = thresh >> 1;
989 sf->thresh_mult[THR_NEW2] = thresh << 1;
990 sf->thresh_mult[THR_NEAREST2 ] = thresh;
991 sf->thresh_mult[THR_NEAR2 ] = thresh;
996 sf->thresh_mult[THR_NEW3] = thresh << 1;
997 sf->thresh_mult[THR_NEAREST3 ] = thresh;
998 sf->thresh_mult[THR_NEAR3 ] = thresh;
1001 sf->improved_mv_pred = 0;
1005 sf->quarter_pixel_search = 0;
1007 if(cm->version == 0)
1009 cm->filter_type = NORMAL_LOOPFILTER;
1012 cm->filter_type = SIMPLE_LOOPFILTER;
1016 cm->filter_type = SIMPLE_LOOPFILTER;
1019 /* This has a big hit on quality. Last resort */
1021 sf->half_pixel_search = 0;
1023 vpx_memset(cpi->mb.error_bins, 0, sizeof(cpi->mb.error_bins));
1027 /* Slow quant, dct and trellis not worthwhile for first pass
1028 * so make sure they are always turned off.
1030 if ( cpi->pass == 1 )
1032 sf->improved_quant = 0;
1033 sf->optimize_coefficients = 0;
1034 sf->improved_dct = 0;
1037 if (cpi->sf.search_method == NSTEP)
1039 vp8_init3smotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
1041 else if (cpi->sf.search_method == DIAMOND)
1043 vp8_init_dsmotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
1046 if (cpi->sf.improved_dct)
1048 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
1049 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
1053 /* No fast FDCT defined for any platform at this time. */
1054 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
1055 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
1058 cpi->mb.short_walsh4x4 = vp8_short_walsh4x4;
1060 if (cpi->sf.improved_quant)
1062 cpi->mb.quantize_b = vp8_regular_quantize_b;
1063 cpi->mb.quantize_b_pair = vp8_regular_quantize_b_pair;
1067 cpi->mb.quantize_b = vp8_fast_quantize_b;
1068 cpi->mb.quantize_b_pair = vp8_fast_quantize_b_pair;
1070 if (cpi->sf.improved_quant != last_improved_quant)
1071 vp8cx_init_quantizer(cpi);
1073 if (cpi->sf.iterative_sub_pixel == 1)
1075 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step_iteratively;
1077 else if (cpi->sf.quarter_pixel_search)
1079 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step;
1081 else if (cpi->sf.half_pixel_search)
1083 cpi->find_fractional_mv_step = vp8_find_best_half_pixel_step;
1087 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
1090 if (cpi->sf.optimize_coefficients == 1 && cpi->pass!=1)
1091 cpi->mb.optimize = 1;
1093 cpi->mb.optimize = 0;
1095 if (cpi->common.full_pixel)
1096 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
1099 frames_at_speed[cpi->Speed]++;
1105 static void alloc_raw_frame_buffers(VP8_COMP *cpi)
1107 #if VP8_TEMPORAL_ALT_REF
1108 int width = (cpi->oxcf.Width + 15) & ~15;
1109 int height = (cpi->oxcf.Height + 15) & ~15;
1112 cpi->lookahead = vp8_lookahead_init(cpi->oxcf.Width, cpi->oxcf.Height,
1113 cpi->oxcf.lag_in_frames);
1115 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1116 "Failed to allocate lag buffers");
1118 #if VP8_TEMPORAL_ALT_REF
1120 if (vp8_yv12_alloc_frame_buffer(&cpi->alt_ref_buffer,
1121 width, height, VP8BORDERINPIXELS))
1122 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1123 "Failed to allocate altref buffer");
1129 static void dealloc_raw_frame_buffers(VP8_COMP *cpi)
1131 #if VP8_TEMPORAL_ALT_REF
1132 vp8_yv12_de_alloc_frame_buffer(&cpi->alt_ref_buffer);
1134 vp8_lookahead_destroy(cpi->lookahead);
1138 static int vp8_alloc_partition_data(VP8_COMP *cpi)
1140 vpx_free(cpi->mb.pip);
1142 cpi->mb.pip = vpx_calloc((cpi->common.mb_cols + 1) *
1143 (cpi->common.mb_rows + 1),
1144 sizeof(PARTITION_INFO));
1148 cpi->mb.pi = cpi->mb.pip + cpi->common.mode_info_stride + 1;
1153 void vp8_alloc_compressor_data(VP8_COMP *cpi)
1155 VP8_COMMON *cm = & cpi->common;
1157 int width = cm->Width;
1158 int height = cm->Height;
1160 if (vp8_alloc_frame_buffers(cm, width, height))
1161 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1162 "Failed to allocate frame buffers");
1164 if (vp8_alloc_partition_data(cpi))
1165 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1166 "Failed to allocate partition data");
1169 if ((width & 0xf) != 0)
1170 width += 16 - (width & 0xf);
1172 if ((height & 0xf) != 0)
1173 height += 16 - (height & 0xf);
1176 if (vp8_yv12_alloc_frame_buffer(&cpi->pick_lf_lvl_frame,
1177 width, height, VP8BORDERINPIXELS))
1178 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1179 "Failed to allocate last frame buffer");
1181 if (vp8_yv12_alloc_frame_buffer(&cpi->scaled_source,
1182 width, height, VP8BORDERINPIXELS))
1183 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1184 "Failed to allocate scaled source buffer");
1189 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
1190 unsigned int tokens = 8 * 24 * 16; /* one MB for each thread */
1192 unsigned int tokens = cm->mb_rows * cm->mb_cols * 24 * 16;
1194 CHECK_MEM_ERROR(cpi->tok, vpx_calloc(tokens, sizeof(*cpi->tok)));
1197 /* Data used for real time vc mode to see if gf needs refreshing */
1198 cpi->zeromv_count = 0;
1201 /* Structures used to monitor GF usage */
1202 vpx_free(cpi->gf_active_flags);
1203 CHECK_MEM_ERROR(cpi->gf_active_flags,
1204 vpx_calloc(sizeof(*cpi->gf_active_flags),
1205 cm->mb_rows * cm->mb_cols));
1206 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
1208 vpx_free(cpi->mb_activity_map);
1209 CHECK_MEM_ERROR(cpi->mb_activity_map,
1210 vpx_calloc(sizeof(*cpi->mb_activity_map),
1211 cm->mb_rows * cm->mb_cols));
1213 /* allocate memory for storing last frame's MVs for MV prediction. */
1214 vpx_free(cpi->lfmv);
1215 CHECK_MEM_ERROR(cpi->lfmv, vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1216 sizeof(*cpi->lfmv)));
1217 vpx_free(cpi->lf_ref_frame_sign_bias);
1218 CHECK_MEM_ERROR(cpi->lf_ref_frame_sign_bias,
1219 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1220 sizeof(*cpi->lf_ref_frame_sign_bias)));
1221 vpx_free(cpi->lf_ref_frame);
1222 CHECK_MEM_ERROR(cpi->lf_ref_frame,
1223 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1224 sizeof(*cpi->lf_ref_frame)));
1226 /* Create the encoder segmentation map and set all entries to 0 */
1227 vpx_free(cpi->segmentation_map);
1228 CHECK_MEM_ERROR(cpi->segmentation_map,
1229 vpx_calloc(cm->mb_rows * cm->mb_cols,
1230 sizeof(*cpi->segmentation_map)));
1231 cpi->cyclic_refresh_mode_index = 0;
1232 vpx_free(cpi->active_map);
1233 CHECK_MEM_ERROR(cpi->active_map,
1234 vpx_calloc(cm->mb_rows * cm->mb_cols,
1235 sizeof(*cpi->active_map)));
1236 vpx_memset(cpi->active_map , 1, (cm->mb_rows * cm->mb_cols));
1238 #if CONFIG_MULTITHREAD
1240 cpi->mt_sync_range = 1;
1241 else if (width <= 1280)
1242 cpi->mt_sync_range = 4;
1243 else if (width <= 2560)
1244 cpi->mt_sync_range = 8;
1246 cpi->mt_sync_range = 16;
1248 if (cpi->oxcf.multi_threaded > 1)
1250 vpx_free(cpi->mt_current_mb_col);
1251 CHECK_MEM_ERROR(cpi->mt_current_mb_col,
1252 vpx_malloc(sizeof(*cpi->mt_current_mb_col) * cm->mb_rows));
1257 vpx_free(cpi->tplist);
1258 CHECK_MEM_ERROR(cpi->tplist, vpx_malloc(sizeof(TOKENLIST) * cm->mb_rows));
1263 static const int q_trans[] =
1265 0, 1, 2, 3, 4, 5, 7, 8,
1266 9, 10, 12, 13, 15, 17, 18, 19,
1267 20, 21, 23, 24, 25, 26, 27, 28,
1268 29, 30, 31, 33, 35, 37, 39, 41,
1269 43, 45, 47, 49, 51, 53, 55, 57,
1270 59, 61, 64, 67, 70, 73, 76, 79,
1271 82, 85, 88, 91, 94, 97, 100, 103,
1272 106, 109, 112, 115, 118, 121, 124, 127,
1275 int vp8_reverse_trans(int x)
1279 for (i = 0; i < 64; i++)
1280 if (q_trans[i] >= x)
1285 void vp8_new_framerate(VP8_COMP *cpi, double framerate)
1290 cpi->framerate = framerate;
1291 cpi->output_framerate = framerate;
1292 cpi->per_frame_bandwidth = (int)(cpi->oxcf.target_bandwidth /
1293 cpi->output_framerate);
1294 cpi->av_per_frame_bandwidth = cpi->per_frame_bandwidth;
1295 cpi->min_frame_bandwidth = (int)(cpi->av_per_frame_bandwidth *
1296 cpi->oxcf.two_pass_vbrmin_section / 100);
1298 /* Set Maximum gf/arf interval */
1299 cpi->max_gf_interval = ((int)(cpi->output_framerate / 2.0) + 2);
1301 if(cpi->max_gf_interval < 12)
1302 cpi->max_gf_interval = 12;
1304 /* Extended interval for genuinely static scenes */
1305 cpi->twopass.static_scene_max_gf_interval = cpi->key_frame_frequency >> 1;
1307 /* Special conditions when altr ref frame enabled in lagged compress mode */
1308 if (cpi->oxcf.play_alternate && cpi->oxcf.lag_in_frames)
1310 if (cpi->max_gf_interval > cpi->oxcf.lag_in_frames - 1)
1311 cpi->max_gf_interval = cpi->oxcf.lag_in_frames - 1;
1313 if (cpi->twopass.static_scene_max_gf_interval > cpi->oxcf.lag_in_frames - 1)
1314 cpi->twopass.static_scene_max_gf_interval = cpi->oxcf.lag_in_frames - 1;
1317 if ( cpi->max_gf_interval > cpi->twopass.static_scene_max_gf_interval )
1318 cpi->max_gf_interval = cpi->twopass.static_scene_max_gf_interval;
1322 static void init_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
1324 VP8_COMMON *cm = &cpi->common;
1329 cpi->auto_adjust_gold_quantizer = 1;
1331 cm->version = oxcf->Version;
1332 vp8_setup_version(cm);
1334 /* frame rate is not available on the first frame, as it's derived from
1335 * the observed timestamps. The actual value used here doesn't matter
1336 * too much, as it will adapt quickly. If the reciprocal of the timebase
1337 * seems like a reasonable framerate, then use that as a guess, otherwise
1340 cpi->framerate = (double)(oxcf->timebase.den) /
1341 (double)(oxcf->timebase.num);
1343 if (cpi->framerate > 180)
1344 cpi->framerate = 30;
1346 cpi->ref_framerate = cpi->framerate;
1348 /* change includes all joint functionality */
1349 vp8_change_config(cpi, oxcf);
1351 /* Initialize active best and worst q and average q values. */
1352 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
1353 cpi->active_best_quality = cpi->oxcf.best_allowed_q;
1354 cpi->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
1356 /* Initialise the starting buffer levels */
1357 cpi->buffer_level = cpi->oxcf.starting_buffer_level;
1358 cpi->bits_off_target = cpi->oxcf.starting_buffer_level;
1360 cpi->rolling_target_bits = cpi->av_per_frame_bandwidth;
1361 cpi->rolling_actual_bits = cpi->av_per_frame_bandwidth;
1362 cpi->long_rolling_target_bits = cpi->av_per_frame_bandwidth;
1363 cpi->long_rolling_actual_bits = cpi->av_per_frame_bandwidth;
1365 cpi->total_actual_bits = 0;
1366 cpi->total_target_vs_actual = 0;
1368 /* Temporal scalabilty */
1369 if (cpi->oxcf.number_of_layers > 1)
1372 double prev_layer_framerate=0;
1374 for (i=0; i<cpi->oxcf.number_of_layers; i++)
1376 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
1377 prev_layer_framerate = cpi->output_framerate /
1378 cpi->oxcf.rate_decimator[i];
1382 #if VP8_TEMPORAL_ALT_REF
1386 cpi->fixed_divide[0] = 0;
1388 for (i = 1; i < 512; i++)
1389 cpi->fixed_divide[i] = 0x80000 / i;
1394 static void update_layer_contexts (VP8_COMP *cpi)
1396 VP8_CONFIG *oxcf = &cpi->oxcf;
1398 /* Update snapshots of the layer contexts to reflect new parameters */
1399 if (oxcf->number_of_layers > 1)
1402 double prev_layer_framerate=0;
1404 assert(oxcf->number_of_layers <= VPX_TS_MAX_LAYERS);
1405 for (i=0; i<oxcf->number_of_layers; i++)
1407 LAYER_CONTEXT *lc = &cpi->layer_context[i];
1410 cpi->ref_framerate / oxcf->rate_decimator[i];
1411 lc->target_bandwidth = oxcf->target_bitrate[i] * 1000;
1413 lc->starting_buffer_level = rescale(
1414 (int)oxcf->starting_buffer_level_in_ms,
1415 lc->target_bandwidth, 1000);
1417 if (oxcf->optimal_buffer_level == 0)
1418 lc->optimal_buffer_level = lc->target_bandwidth / 8;
1420 lc->optimal_buffer_level = rescale(
1421 (int)oxcf->optimal_buffer_level_in_ms,
1422 lc->target_bandwidth, 1000);
1424 if (oxcf->maximum_buffer_size == 0)
1425 lc->maximum_buffer_size = lc->target_bandwidth / 8;
1427 lc->maximum_buffer_size = rescale(
1428 (int)oxcf->maximum_buffer_size_in_ms,
1429 lc->target_bandwidth, 1000);
1431 /* Work out the average size of a frame within this layer */
1433 lc->avg_frame_size_for_layer =
1434 (int)((oxcf->target_bitrate[i] -
1435 oxcf->target_bitrate[i-1]) * 1000 /
1436 (lc->framerate - prev_layer_framerate));
1438 prev_layer_framerate = lc->framerate;
1443 void vp8_change_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
1445 VP8_COMMON *cm = &cpi->common;
1446 int last_w, last_h, prev_number_of_layers;
1454 #if CONFIG_MULTITHREAD
1455 /* wait for the last picture loopfilter thread done */
1456 if (cpi->b_lpf_running)
1458 sem_wait(&cpi->h_event_end_lpf);
1459 cpi->b_lpf_running = 0;
1463 if (cm->version != oxcf->Version)
1465 cm->version = oxcf->Version;
1466 vp8_setup_version(cm);
1469 last_w = cpi->oxcf.Width;
1470 last_h = cpi->oxcf.Height;
1471 prev_number_of_layers = cpi->oxcf.number_of_layers;
1475 switch (cpi->oxcf.Mode)
1480 cpi->compressor_speed = 2;
1482 if (cpi->oxcf.cpu_used < -16)
1484 cpi->oxcf.cpu_used = -16;
1487 if (cpi->oxcf.cpu_used > 16)
1488 cpi->oxcf.cpu_used = 16;
1492 case MODE_GOODQUALITY:
1494 cpi->compressor_speed = 1;
1496 if (cpi->oxcf.cpu_used < -5)
1498 cpi->oxcf.cpu_used = -5;
1501 if (cpi->oxcf.cpu_used > 5)
1502 cpi->oxcf.cpu_used = 5;
1506 case MODE_BESTQUALITY:
1508 cpi->compressor_speed = 0;
1511 case MODE_FIRSTPASS:
1513 cpi->compressor_speed = 1;
1515 case MODE_SECONDPASS:
1517 cpi->compressor_speed = 1;
1519 if (cpi->oxcf.cpu_used < -5)
1521 cpi->oxcf.cpu_used = -5;
1524 if (cpi->oxcf.cpu_used > 5)
1525 cpi->oxcf.cpu_used = 5;
1528 case MODE_SECONDPASS_BEST:
1530 cpi->compressor_speed = 0;
1535 cpi->auto_worst_q = 1;
1537 cpi->oxcf.worst_allowed_q = q_trans[oxcf->worst_allowed_q];
1538 cpi->oxcf.best_allowed_q = q_trans[oxcf->best_allowed_q];
1539 cpi->oxcf.cq_level = q_trans[cpi->oxcf.cq_level];
1541 if (oxcf->fixed_q >= 0)
1543 if (oxcf->worst_allowed_q < 0)
1544 cpi->oxcf.fixed_q = q_trans[0];
1546 cpi->oxcf.fixed_q = q_trans[oxcf->worst_allowed_q];
1548 if (oxcf->alt_q < 0)
1549 cpi->oxcf.alt_q = q_trans[0];
1551 cpi->oxcf.alt_q = q_trans[oxcf->alt_q];
1553 if (oxcf->key_q < 0)
1554 cpi->oxcf.key_q = q_trans[0];
1556 cpi->oxcf.key_q = q_trans[oxcf->key_q];
1558 if (oxcf->gold_q < 0)
1559 cpi->oxcf.gold_q = q_trans[0];
1561 cpi->oxcf.gold_q = q_trans[oxcf->gold_q];
1565 cpi->baseline_gf_interval =
1566 cpi->oxcf.alt_freq ? cpi->oxcf.alt_freq : DEFAULT_GF_INTERVAL;
1568 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
1570 cm->refresh_golden_frame = 0;
1571 cm->refresh_last_frame = 1;
1572 cm->refresh_entropy_probs = 1;
1574 #if (CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
1575 cpi->oxcf.token_partitions = 3;
1578 if (cpi->oxcf.token_partitions >= 0 && cpi->oxcf.token_partitions <= 3)
1579 cm->multi_token_partition =
1580 (TOKEN_PARTITION) cpi->oxcf.token_partitions;
1582 setup_features(cpi);
1587 for (i = 0; i < MAX_MB_SEGMENTS; i++)
1588 cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout;
1591 /* At the moment the first order values may not be > MAXQ */
1592 if (cpi->oxcf.fixed_q > MAXQ)
1593 cpi->oxcf.fixed_q = MAXQ;
1595 /* local file playback mode == really big buffer */
1596 if (cpi->oxcf.end_usage == USAGE_LOCAL_FILE_PLAYBACK)
1598 cpi->oxcf.starting_buffer_level = 60000;
1599 cpi->oxcf.optimal_buffer_level = 60000;
1600 cpi->oxcf.maximum_buffer_size = 240000;
1601 cpi->oxcf.starting_buffer_level_in_ms = 60000;
1602 cpi->oxcf.optimal_buffer_level_in_ms = 60000;
1603 cpi->oxcf.maximum_buffer_size_in_ms = 240000;
1606 /* Convert target bandwidth from Kbit/s to Bit/s */
1607 cpi->oxcf.target_bandwidth *= 1000;
1609 cpi->oxcf.starting_buffer_level =
1610 rescale((int)cpi->oxcf.starting_buffer_level,
1611 cpi->oxcf.target_bandwidth, 1000);
1613 /* Set or reset optimal and maximum buffer levels. */
1614 if (cpi->oxcf.optimal_buffer_level == 0)
1615 cpi->oxcf.optimal_buffer_level = cpi->oxcf.target_bandwidth / 8;
1617 cpi->oxcf.optimal_buffer_level =
1618 rescale((int)cpi->oxcf.optimal_buffer_level,
1619 cpi->oxcf.target_bandwidth, 1000);
1621 if (cpi->oxcf.maximum_buffer_size == 0)
1622 cpi->oxcf.maximum_buffer_size = cpi->oxcf.target_bandwidth / 8;
1624 cpi->oxcf.maximum_buffer_size =
1625 rescale((int)cpi->oxcf.maximum_buffer_size,
1626 cpi->oxcf.target_bandwidth, 1000);
1627 // Under a configuration change, where maximum_buffer_size may change,
1628 // keep buffer level clipped to the maximum allowed buffer size.
1629 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size) {
1630 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
1631 cpi->buffer_level = cpi->bits_off_target;
1634 /* Set up frame rate and related parameters rate control values. */
1635 vp8_new_framerate(cpi, cpi->framerate);
1637 /* Set absolute upper and lower quality limits */
1638 cpi->worst_quality = cpi->oxcf.worst_allowed_q;
1639 cpi->best_quality = cpi->oxcf.best_allowed_q;
1641 /* active values should only be modified if out of new range */
1642 if (cpi->active_worst_quality > cpi->oxcf.worst_allowed_q)
1644 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
1647 else if (cpi->active_worst_quality < cpi->oxcf.best_allowed_q)
1649 cpi->active_worst_quality = cpi->oxcf.best_allowed_q;
1651 if (cpi->active_best_quality < cpi->oxcf.best_allowed_q)
1653 cpi->active_best_quality = cpi->oxcf.best_allowed_q;
1656 else if (cpi->active_best_quality > cpi->oxcf.worst_allowed_q)
1658 cpi->active_best_quality = cpi->oxcf.worst_allowed_q;
1661 cpi->buffered_mode = cpi->oxcf.optimal_buffer_level > 0;
1663 cpi->cq_target_quality = cpi->oxcf.cq_level;
1665 /* Only allow dropped frames in buffered mode */
1666 cpi->drop_frames_allowed = cpi->oxcf.allow_df && cpi->buffered_mode;
1668 cpi->target_bandwidth = cpi->oxcf.target_bandwidth;
1670 // Check if the number of temporal layers has changed, and if so reset the
1671 // pattern counter and set/initialize the temporal layer context for the
1672 // new layer configuration.
1673 if (cpi->oxcf.number_of_layers != prev_number_of_layers)
1675 // If the number of temporal layers are changed we must start at the
1676 // base of the pattern cycle, so reset temporal_pattern_counter.
1677 cpi->temporal_pattern_counter = 0;
1678 reset_temporal_layer_change(cpi, oxcf, prev_number_of_layers);
1681 cm->Width = cpi->oxcf.Width;
1682 cm->Height = cpi->oxcf.Height;
1684 /* TODO(jkoleszar): if an internal spatial resampling is active,
1685 * and we downsize the input image, maybe we should clear the
1686 * internal scale immediately rather than waiting for it to
1690 /* VP8 sharpness level mapping 0-7 (vs 0-10 in general VPx dialogs) */
1691 if (cpi->oxcf.Sharpness > 7)
1692 cpi->oxcf.Sharpness = 7;
1694 cm->sharpness_level = cpi->oxcf.Sharpness;
1696 if (cm->horiz_scale != NORMAL || cm->vert_scale != NORMAL)
1698 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
1699 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
1701 Scale2Ratio(cm->horiz_scale, &hr, &hs);
1702 Scale2Ratio(cm->vert_scale, &vr, &vs);
1704 /* always go to the next whole number */
1705 cm->Width = (hs - 1 + cpi->oxcf.Width * hr) / hs;
1706 cm->Height = (vs - 1 + cpi->oxcf.Height * vr) / vs;
1709 if (last_w != cpi->oxcf.Width || last_h != cpi->oxcf.Height)
1710 cpi->force_next_frame_intra = 1;
1712 if (((cm->Width + 15) & 0xfffffff0) !=
1713 cm->yv12_fb[cm->lst_fb_idx].y_width ||
1714 ((cm->Height + 15) & 0xfffffff0) !=
1715 cm->yv12_fb[cm->lst_fb_idx].y_height ||
1716 cm->yv12_fb[cm->lst_fb_idx].y_width == 0)
1718 dealloc_raw_frame_buffers(cpi);
1719 alloc_raw_frame_buffers(cpi);
1720 vp8_alloc_compressor_data(cpi);
1723 if (cpi->oxcf.fixed_q >= 0)
1725 cpi->last_q[0] = cpi->oxcf.fixed_q;
1726 cpi->last_q[1] = cpi->oxcf.fixed_q;
1729 cpi->Speed = cpi->oxcf.cpu_used;
1731 /* force to allowlag to 0 if lag_in_frames is 0; */
1732 if (cpi->oxcf.lag_in_frames == 0)
1734 cpi->oxcf.allow_lag = 0;
1736 /* Limit on lag buffers as these are not currently dynamically allocated */
1737 else if (cpi->oxcf.lag_in_frames > MAX_LAG_BUFFERS)
1738 cpi->oxcf.lag_in_frames = MAX_LAG_BUFFERS;
1741 cpi->alt_ref_source = NULL;
1742 cpi->is_src_frame_alt_ref = 0;
1744 #if CONFIG_TEMPORAL_DENOISING
1745 if (cpi->oxcf.noise_sensitivity)
1747 if (!cpi->denoiser.yv12_mc_running_avg.buffer_alloc)
1749 int width = (cpi->oxcf.Width + 15) & ~15;
1750 int height = (cpi->oxcf.Height + 15) & ~15;
1751 vp8_denoiser_allocate(&cpi->denoiser, width, height);
1757 /* Experimental RD Code */
1758 cpi->frame_distortion = 0;
1759 cpi->last_frame_distortion = 0;
1765 #define M_LOG2_E 0.693147180559945309417
1767 #define log2f(x) (log (x) / (float) M_LOG2_E)
1769 static void cal_mvsadcosts(int *mvsadcost[2])
1773 mvsadcost [0] [0] = 300;
1774 mvsadcost [1] [0] = 300;
1778 double z = 256 * (2 * (log2f(8 * i) + .6));
1779 mvsadcost [0][i] = (int) z;
1780 mvsadcost [1][i] = (int) z;
1781 mvsadcost [0][-i] = (int) z;
1782 mvsadcost [1][-i] = (int) z;
1784 while (++i <= mvfp_max);
1787 struct VP8_COMP* vp8_create_compressor(VP8_CONFIG *oxcf)
1794 cpi = vpx_memalign(32, sizeof(VP8_COMP));
1795 /* Check that the CPI instance is valid */
1801 vpx_memset(cpi, 0, sizeof(VP8_COMP));
1803 if (setjmp(cm->error.jmp))
1805 cpi->common.error.setjmp = 0;
1806 vp8_remove_compressor(&cpi);
1810 cpi->common.error.setjmp = 1;
1812 CHECK_MEM_ERROR(cpi->mb.ss, vpx_calloc(sizeof(search_site), (MAX_MVSEARCH_STEPS * 8) + 1));
1814 vp8_create_common(&cpi->common);
1816 init_config(cpi, oxcf);
1818 memcpy(cpi->base_skip_false_prob, vp8cx_base_skip_false_prob, sizeof(vp8cx_base_skip_false_prob));
1819 cpi->common.current_video_frame = 0;
1820 cpi->temporal_pattern_counter = 0;
1821 cpi->kf_overspend_bits = 0;
1822 cpi->kf_bitrate_adjustment = 0;
1823 cpi->frames_till_gf_update_due = 0;
1824 cpi->gf_overspend_bits = 0;
1825 cpi->non_gf_bitrate_adjustment = 0;
1826 cpi->prob_last_coded = 128;
1827 cpi->prob_gf_coded = 128;
1828 cpi->prob_intra_coded = 63;
1830 /* Prime the recent reference frame usage counters.
1831 * Hereafter they will be maintained as a sort of moving average
1833 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
1834 cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
1835 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
1836 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
1838 /* Set reference frame sign bias for ALTREF frame to 1 (for now) */
1839 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
1841 cpi->twopass.gf_decay_rate = 0;
1842 cpi->baseline_gf_interval = DEFAULT_GF_INTERVAL;
1844 cpi->gold_is_last = 0 ;
1845 cpi->alt_is_last = 0 ;
1846 cpi->gold_is_alt = 0 ;
1848 cpi->active_map_enabled = 0;
1851 /* Experimental code for lagged and one pass */
1852 /* Initialise one_pass GF frames stats */
1853 /* Update stats used for GF selection */
1856 cpi->one_pass_frame_index = 0;
1858 for (i = 0; i < MAX_LAG_BUFFERS; i++)
1860 cpi->one_pass_frame_stats[i].frames_so_far = 0;
1861 cpi->one_pass_frame_stats[i].frame_intra_error = 0.0;
1862 cpi->one_pass_frame_stats[i].frame_coded_error = 0.0;
1863 cpi->one_pass_frame_stats[i].frame_pcnt_inter = 0.0;
1864 cpi->one_pass_frame_stats[i].frame_pcnt_motion = 0.0;
1865 cpi->one_pass_frame_stats[i].frame_mvr = 0.0;
1866 cpi->one_pass_frame_stats[i].frame_mvr_abs = 0.0;
1867 cpi->one_pass_frame_stats[i].frame_mvc = 0.0;
1868 cpi->one_pass_frame_stats[i].frame_mvc_abs = 0.0;
1873 /* Should we use the cyclic refresh method.
1874 * Currently this is tied to error resilliant mode
1876 cpi->cyclic_refresh_mode_enabled = cpi->oxcf.error_resilient_mode;
1877 cpi->cyclic_refresh_mode_max_mbs_perframe = (cpi->common.mb_rows * cpi->common.mb_cols) / 5;
1878 cpi->cyclic_refresh_mode_index = 0;
1879 cpi->cyclic_refresh_q = 32;
1881 if (cpi->cyclic_refresh_mode_enabled)
1883 CHECK_MEM_ERROR(cpi->cyclic_refresh_map, vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1));
1886 cpi->cyclic_refresh_map = (signed char *) NULL;
1888 #ifdef VP8_ENTROPY_STATS
1889 init_context_counters();
1892 /*Initialize the feed-forward activity masking.*/
1893 cpi->activity_avg = 90<<12;
1895 /* Give a sensible default for the first frame. */
1896 cpi->frames_since_key = 8;
1897 cpi->key_frame_frequency = cpi->oxcf.key_freq;
1898 cpi->this_key_frame_forced = 0;
1899 cpi->next_key_frame_forced = 0;
1901 cpi->source_alt_ref_pending = 0;
1902 cpi->source_alt_ref_active = 0;
1903 cpi->common.refresh_alt_ref_frame = 0;
1905 cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
1906 #if CONFIG_INTERNAL_STATS
1907 cpi->b_calculate_ssimg = 0;
1912 if (cpi->b_calculate_psnr)
1914 cpi->total_sq_error = 0.0;
1915 cpi->total_sq_error2 = 0.0;
1920 cpi->totalp_y = 0.0;
1921 cpi->totalp_u = 0.0;
1922 cpi->totalp_v = 0.0;
1924 cpi->tot_recode_hits = 0;
1925 cpi->summed_quality = 0;
1926 cpi->summed_weights = 0;
1929 if (cpi->b_calculate_ssimg)
1931 cpi->total_ssimg_y = 0;
1932 cpi->total_ssimg_u = 0;
1933 cpi->total_ssimg_v = 0;
1934 cpi->total_ssimg_all = 0;
1939 cpi->first_time_stamp_ever = 0x7FFFFFFF;
1941 cpi->frames_till_gf_update_due = 0;
1942 cpi->key_frame_count = 1;
1944 cpi->ni_av_qi = cpi->oxcf.worst_allowed_q;
1947 cpi->total_byte_count = 0;
1949 cpi->drop_frame = 0;
1951 cpi->rate_correction_factor = 1.0;
1952 cpi->key_frame_rate_correction_factor = 1.0;
1953 cpi->gf_rate_correction_factor = 1.0;
1954 cpi->twopass.est_max_qcorrection_factor = 1.0;
1956 for (i = 0; i < KEY_FRAME_CONTEXT; i++)
1958 cpi->prior_key_frame_distance[i] = (int)cpi->output_framerate;
1961 #ifdef OUTPUT_YUV_SRC
1962 yuv_file = fopen("bd.yuv", "ab");
1966 framepsnr = fopen("framepsnr.stt", "a");
1967 kf_list = fopen("kf_list.stt", "w");
1970 cpi->output_pkt_list = oxcf->output_pkt_list;
1972 #if !(CONFIG_REALTIME_ONLY)
1976 vp8_init_first_pass(cpi);
1978 else if (cpi->pass == 2)
1980 size_t packet_sz = sizeof(FIRSTPASS_STATS);
1981 int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
1983 cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
1984 cpi->twopass.stats_in = cpi->twopass.stats_in_start;
1985 cpi->twopass.stats_in_end = (void*)((char *)cpi->twopass.stats_in
1986 + (packets - 1) * packet_sz);
1987 vp8_init_second_pass(cpi);
1992 if (cpi->compressor_speed == 2)
1994 cpi->avg_encode_time = 0;
1995 cpi->avg_pick_mode_time = 0;
1998 vp8_set_speed_features(cpi);
2000 /* Set starting values of RD threshold multipliers (128 = *1) */
2001 for (i = 0; i < MAX_MODES; i++)
2003 cpi->mb.rd_thresh_mult[i] = 128;
2006 #ifdef VP8_ENTROPY_STATS
2007 init_mv_ref_counts();
2010 #if CONFIG_MULTITHREAD
2011 if(vp8cx_create_encoder_threads(cpi))
2013 vp8_remove_compressor(&cpi);
2018 cpi->fn_ptr[BLOCK_16X16].sdf = vp8_sad16x16;
2019 cpi->fn_ptr[BLOCK_16X16].vf = vp8_variance16x16;
2020 cpi->fn_ptr[BLOCK_16X16].svf = vp8_sub_pixel_variance16x16;
2021 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_h = vp8_variance_halfpixvar16x16_h;
2022 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_v = vp8_variance_halfpixvar16x16_v;
2023 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_hv = vp8_variance_halfpixvar16x16_hv;
2024 cpi->fn_ptr[BLOCK_16X16].sdx3f = vp8_sad16x16x3;
2025 cpi->fn_ptr[BLOCK_16X16].sdx8f = vp8_sad16x16x8;
2026 cpi->fn_ptr[BLOCK_16X16].sdx4df = vp8_sad16x16x4d;
2028 cpi->fn_ptr[BLOCK_16X8].sdf = vp8_sad16x8;
2029 cpi->fn_ptr[BLOCK_16X8].vf = vp8_variance16x8;
2030 cpi->fn_ptr[BLOCK_16X8].svf = vp8_sub_pixel_variance16x8;
2031 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_h = NULL;
2032 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_v = NULL;
2033 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_hv = NULL;
2034 cpi->fn_ptr[BLOCK_16X8].sdx3f = vp8_sad16x8x3;
2035 cpi->fn_ptr[BLOCK_16X8].sdx8f = vp8_sad16x8x8;
2036 cpi->fn_ptr[BLOCK_16X8].sdx4df = vp8_sad16x8x4d;
2038 cpi->fn_ptr[BLOCK_8X16].sdf = vp8_sad8x16;
2039 cpi->fn_ptr[BLOCK_8X16].vf = vp8_variance8x16;
2040 cpi->fn_ptr[BLOCK_8X16].svf = vp8_sub_pixel_variance8x16;
2041 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_h = NULL;
2042 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_v = NULL;
2043 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_hv = NULL;
2044 cpi->fn_ptr[BLOCK_8X16].sdx3f = vp8_sad8x16x3;
2045 cpi->fn_ptr[BLOCK_8X16].sdx8f = vp8_sad8x16x8;
2046 cpi->fn_ptr[BLOCK_8X16].sdx4df = vp8_sad8x16x4d;
2048 cpi->fn_ptr[BLOCK_8X8].sdf = vp8_sad8x8;
2049 cpi->fn_ptr[BLOCK_8X8].vf = vp8_variance8x8;
2050 cpi->fn_ptr[BLOCK_8X8].svf = vp8_sub_pixel_variance8x8;
2051 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_h = NULL;
2052 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_v = NULL;
2053 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_hv = NULL;
2054 cpi->fn_ptr[BLOCK_8X8].sdx3f = vp8_sad8x8x3;
2055 cpi->fn_ptr[BLOCK_8X8].sdx8f = vp8_sad8x8x8;
2056 cpi->fn_ptr[BLOCK_8X8].sdx4df = vp8_sad8x8x4d;
2058 cpi->fn_ptr[BLOCK_4X4].sdf = vp8_sad4x4;
2059 cpi->fn_ptr[BLOCK_4X4].vf = vp8_variance4x4;
2060 cpi->fn_ptr[BLOCK_4X4].svf = vp8_sub_pixel_variance4x4;
2061 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_h = NULL;
2062 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_v = NULL;
2063 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_hv = NULL;
2064 cpi->fn_ptr[BLOCK_4X4].sdx3f = vp8_sad4x4x3;
2065 cpi->fn_ptr[BLOCK_4X4].sdx8f = vp8_sad4x4x8;
2066 cpi->fn_ptr[BLOCK_4X4].sdx4df = vp8_sad4x4x4d;
2068 #if ARCH_X86 || ARCH_X86_64
2069 cpi->fn_ptr[BLOCK_16X16].copymem = vp8_copy32xn;
2070 cpi->fn_ptr[BLOCK_16X8].copymem = vp8_copy32xn;
2071 cpi->fn_ptr[BLOCK_8X16].copymem = vp8_copy32xn;
2072 cpi->fn_ptr[BLOCK_8X8].copymem = vp8_copy32xn;
2073 cpi->fn_ptr[BLOCK_4X4].copymem = vp8_copy32xn;
2076 cpi->full_search_sad = vp8_full_search_sad;
2077 cpi->diamond_search_sad = vp8_diamond_search_sad;
2078 cpi->refining_search_sad = vp8_refining_search_sad;
2080 /* make sure frame 1 is okay */
2081 cpi->mb.error_bins[0] = cpi->common.MBs;
2083 /* vp8cx_init_quantizer() is first called here. Add check in
2084 * vp8cx_frame_init_quantizer() so that vp8cx_init_quantizer is only
2085 * called later when needed. This will avoid unnecessary calls of
2086 * vp8cx_init_quantizer() for every frame.
2088 vp8cx_init_quantizer(cpi);
2090 vp8_loop_filter_init(cm);
2092 cpi->common.error.setjmp = 0;
2094 #if CONFIG_MULTI_RES_ENCODING
2096 /* Calculate # of MBs in a row in lower-resolution level image. */
2097 if (cpi->oxcf.mr_encoder_id > 0)
2098 vp8_cal_low_res_mb_cols(cpi);
2102 /* setup RD costs to MACROBLOCK struct */
2104 cpi->mb.mvcost[0] = &cpi->rd_costs.mvcosts[0][mv_max+1];
2105 cpi->mb.mvcost[1] = &cpi->rd_costs.mvcosts[1][mv_max+1];
2106 cpi->mb.mvsadcost[0] = &cpi->rd_costs.mvsadcosts[0][mvfp_max+1];
2107 cpi->mb.mvsadcost[1] = &cpi->rd_costs.mvsadcosts[1][mvfp_max+1];
2109 cal_mvsadcosts(cpi->mb.mvsadcost);
2111 cpi->mb.mbmode_cost = cpi->rd_costs.mbmode_cost;
2112 cpi->mb.intra_uv_mode_cost = cpi->rd_costs.intra_uv_mode_cost;
2113 cpi->mb.bmode_costs = cpi->rd_costs.bmode_costs;
2114 cpi->mb.inter_bmode_costs = cpi->rd_costs.inter_bmode_costs;
2115 cpi->mb.token_costs = cpi->rd_costs.token_costs;
2117 /* setup block ptrs & offsets */
2118 vp8_setup_block_ptrs(&cpi->mb);
2119 vp8_setup_block_dptrs(&cpi->mb.e_mbd);
2125 void vp8_remove_compressor(VP8_COMP **ptr)
2127 VP8_COMP *cpi = *ptr;
2132 if (cpi && (cpi->common.current_video_frame > 0))
2134 #if !(CONFIG_REALTIME_ONLY)
2138 vp8_end_second_pass(cpi);
2143 #ifdef VP8_ENTROPY_STATS
2144 print_context_counters();
2145 print_tree_update_probs();
2146 print_mode_context();
2149 #if CONFIG_INTERNAL_STATS
2153 FILE *f = fopen("opsnr.stt", "a");
2154 double time_encoded = (cpi->last_end_time_stamp_seen
2155 - cpi->first_time_stamp_ever) / 10000000.000;
2156 double total_encode_time = (cpi->time_receive_data +
2157 cpi->time_compress_data) / 1000.000;
2158 double dr = (double)cpi->bytes * 8.0 / 1000.0 / time_encoded;
2160 if (cpi->b_calculate_psnr)
2162 YV12_BUFFER_CONFIG *lst_yv12 =
2163 &cpi->common.yv12_fb[cpi->common.lst_fb_idx];
2165 if (cpi->oxcf.number_of_layers > 1)
2169 fprintf(f, "Layer\tBitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
2170 "GLPsnrP\tVPXSSIM\t\n");
2171 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
2173 double dr = (double)cpi->bytes_in_layer[i] *
2174 8.0 / 1000.0 / time_encoded;
2175 double samples = 3.0 / 2 * cpi->frames_in_layer[i] *
2176 lst_yv12->y_width * lst_yv12->y_height;
2178 vpx_sse_to_psnr(samples, 255.0,
2179 cpi->total_error2[i]);
2180 double total_psnr2 =
2181 vpx_sse_to_psnr(samples, 255.0,
2182 cpi->total_error2_p[i]);
2183 double total_ssim = 100 * pow(cpi->sum_ssim[i] /
2184 cpi->sum_weights[i], 8.0);
2186 fprintf(f, "%5d\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2189 cpi->sum_psnr[i] / cpi->frames_in_layer[i],
2191 cpi->sum_psnr_p[i] / cpi->frames_in_layer[i],
2192 total_psnr2, total_ssim);
2197 double samples = 3.0 / 2 * cpi->count *
2198 lst_yv12->y_width * lst_yv12->y_height;
2199 double total_psnr = vpx_sse_to_psnr(samples, 255.0,
2200 cpi->total_sq_error);
2201 double total_psnr2 = vpx_sse_to_psnr(samples, 255.0,
2202 cpi->total_sq_error2);
2203 double total_ssim = 100 * pow(cpi->summed_quality /
2204 cpi->summed_weights, 8.0);
2206 fprintf(f, "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
2207 "GLPsnrP\tVPXSSIM\t Time(us)\n");
2208 fprintf(f, "%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2210 dr, cpi->total / cpi->count, total_psnr,
2211 cpi->totalp / cpi->count, total_psnr2,
2212 total_ssim, total_encode_time);
2216 if (cpi->b_calculate_ssimg)
2218 if (cpi->oxcf.number_of_layers > 1)
2222 fprintf(f, "Layer\tBitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
2224 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
2226 double dr = (double)cpi->bytes_in_layer[i] *
2227 8.0 / 1000.0 / time_encoded;
2228 fprintf(f, "%5d\t%7.3f\t%6.4f\t"
2229 "%6.4f\t%6.4f\t%6.4f\t%8.0f\n",
2231 cpi->total_ssimg_y_in_layer[i] /
2232 cpi->frames_in_layer[i],
2233 cpi->total_ssimg_u_in_layer[i] /
2234 cpi->frames_in_layer[i],
2235 cpi->total_ssimg_v_in_layer[i] /
2236 cpi->frames_in_layer[i],
2237 cpi->total_ssimg_all_in_layer[i] /
2238 cpi->frames_in_layer[i],
2244 fprintf(f, "BitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
2246 fprintf(f, "%7.3f\t%6.4f\t%6.4f\t%6.4f\t%6.4f\t%8.0f\n", dr,
2247 cpi->total_ssimg_y / cpi->count,
2248 cpi->total_ssimg_u / cpi->count,
2249 cpi->total_ssimg_v / cpi->count,
2250 cpi->total_ssimg_all / cpi->count, total_encode_time);
2256 f = fopen("qskip.stt", "a");
2257 fprintf(f, "minq:%d -maxq:%d skiptrue:skipfalse = %d:%d\n", cpi->oxcf.best_allowed_q, cpi->oxcf.worst_allowed_q, skiptruecount, skipfalsecount);
2268 if (cpi->compressor_speed == 2)
2271 FILE *f = fopen("cxspeed.stt", "a");
2272 cnt_pm /= cpi->common.MBs;
2274 for (i = 0; i < 16; i++)
2275 fprintf(f, "%5d", frames_at_speed[i]);
2286 extern int count_mb_seg[4];
2287 FILE *f = fopen("modes.stt", "a");
2288 double dr = (double)cpi->framerate * (double)bytes * (double)8 / (double)count / (double)1000 ;
2289 fprintf(f, "intra_mode in Intra Frames:\n");
2290 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d\n", y_modes[0], y_modes[1], y_modes[2], y_modes[3], y_modes[4]);
2291 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", uv_modes[0], uv_modes[1], uv_modes[2], uv_modes[3]);
2296 for (i = 0; i < 10; i++)
2297 fprintf(f, "%8d, ", b_modes[i]);
2303 fprintf(f, "Modes in Inter Frames:\n");
2304 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d\n",
2305 inter_y_modes[0], inter_y_modes[1], inter_y_modes[2], inter_y_modes[3], inter_y_modes[4],
2306 inter_y_modes[5], inter_y_modes[6], inter_y_modes[7], inter_y_modes[8], inter_y_modes[9]);
2307 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", inter_uv_modes[0], inter_uv_modes[1], inter_uv_modes[2], inter_uv_modes[3]);
2312 for (i = 0; i < 15; i++)
2313 fprintf(f, "%8d, ", inter_b_modes[i]);
2318 fprintf(f, "P:%8d, %8d, %8d, %8d\n", count_mb_seg[0], count_mb_seg[1], count_mb_seg[2], count_mb_seg[3]);
2319 fprintf(f, "PB:%8d, %8d, %8d, %8d\n", inter_b_modes[LEFT4X4], inter_b_modes[ABOVE4X4], inter_b_modes[ZERO4X4], inter_b_modes[NEW4X4]);
2327 #ifdef VP8_ENTROPY_STATS
2330 FILE *fmode = fopen("modecontext.c", "w");
2332 fprintf(fmode, "\n#include \"entropymode.h\"\n\n");
2333 fprintf(fmode, "const unsigned int vp8_kf_default_bmode_counts ");
2334 fprintf(fmode, "[VP8_BINTRAMODES] [VP8_BINTRAMODES] [VP8_BINTRAMODES] =\n{\n");
2336 for (i = 0; i < 10; i++)
2339 fprintf(fmode, " { /* Above Mode : %d */\n", i);
2341 for (j = 0; j < 10; j++)
2344 fprintf(fmode, " {");
2346 for (k = 0; k < 10; k++)
2348 if (!intra_mode_stats[i][j][k])
2349 fprintf(fmode, " %5d, ", 1);
2351 fprintf(fmode, " %5d, ", intra_mode_stats[i][j][k]);
2354 fprintf(fmode, "}, /* left_mode %d */\n", j);
2358 fprintf(fmode, " },\n");
2362 fprintf(fmode, "};\n");
2368 #if defined(SECTIONBITS_OUTPUT)
2373 FILE *f = fopen("tokenbits.stt", "a");
2375 for (i = 0; i < 28; i++)
2376 fprintf(f, "%8d", (int)(Sectionbits[i] / 256));
2386 printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000);
2387 printf("\n_frames recive_data encod_mb_row compress_frame Total\n");
2388 printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame, cpi->time_receive_data / 1000, cpi->time_encode_mb_row / 1000, cpi->time_compress_data / 1000, (cpi->time_receive_data + cpi->time_compress_data) / 1000);
2394 #if CONFIG_MULTITHREAD
2395 vp8cx_remove_encoder_threads(cpi);
2398 #if CONFIG_TEMPORAL_DENOISING
2399 vp8_denoiser_free(&cpi->denoiser);
2401 dealloc_compressor_data(cpi);
2402 vpx_free(cpi->mb.ss);
2404 vpx_free(cpi->cyclic_refresh_map);
2406 vp8_remove_common(&cpi->common);
2410 #ifdef OUTPUT_YUV_SRC
2430 static uint64_t calc_plane_error(unsigned char *orig, int orig_stride,
2431 unsigned char *recon, int recon_stride,
2432 unsigned int cols, unsigned int rows)
2434 unsigned int row, col;
2435 uint64_t total_sse = 0;
2438 for (row = 0; row + 16 <= rows; row += 16)
2440 for (col = 0; col + 16 <= cols; col += 16)
2444 vp8_mse16x16(orig + col, orig_stride,
2445 recon + col, recon_stride,
2450 /* Handle odd-sized width */
2453 unsigned int border_row, border_col;
2454 unsigned char *border_orig = orig;
2455 unsigned char *border_recon = recon;
2457 for (border_row = 0; border_row < 16; border_row++)
2459 for (border_col = col; border_col < cols; border_col++)
2461 diff = border_orig[border_col] - border_recon[border_col];
2462 total_sse += diff * diff;
2465 border_orig += orig_stride;
2466 border_recon += recon_stride;
2470 orig += orig_stride * 16;
2471 recon += recon_stride * 16;
2474 /* Handle odd-sized height */
2475 for (; row < rows; row++)
2477 for (col = 0; col < cols; col++)
2479 diff = orig[col] - recon[col];
2480 total_sse += diff * diff;
2483 orig += orig_stride;
2484 recon += recon_stride;
2487 vp8_clear_system_state();
2492 static void generate_psnr_packet(VP8_COMP *cpi)
2494 YV12_BUFFER_CONFIG *orig = cpi->Source;
2495 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
2496 struct vpx_codec_cx_pkt pkt;
2499 unsigned int width = cpi->common.Width;
2500 unsigned int height = cpi->common.Height;
2502 pkt.kind = VPX_CODEC_PSNR_PKT;
2503 sse = calc_plane_error(orig->y_buffer, orig->y_stride,
2504 recon->y_buffer, recon->y_stride,
2506 pkt.data.psnr.sse[0] = sse;
2507 pkt.data.psnr.sse[1] = sse;
2508 pkt.data.psnr.samples[0] = width * height;
2509 pkt.data.psnr.samples[1] = width * height;
2511 width = (width + 1) / 2;
2512 height = (height + 1) / 2;
2514 sse = calc_plane_error(orig->u_buffer, orig->uv_stride,
2515 recon->u_buffer, recon->uv_stride,
2517 pkt.data.psnr.sse[0] += sse;
2518 pkt.data.psnr.sse[2] = sse;
2519 pkt.data.psnr.samples[0] += width * height;
2520 pkt.data.psnr.samples[2] = width * height;
2522 sse = calc_plane_error(orig->v_buffer, orig->uv_stride,
2523 recon->v_buffer, recon->uv_stride,
2525 pkt.data.psnr.sse[0] += sse;
2526 pkt.data.psnr.sse[3] = sse;
2527 pkt.data.psnr.samples[0] += width * height;
2528 pkt.data.psnr.samples[3] = width * height;
2530 for (i = 0; i < 4; i++)
2531 pkt.data.psnr.psnr[i] = vpx_sse_to_psnr(pkt.data.psnr.samples[i], 255.0,
2532 (double)(pkt.data.psnr.sse[i]));
2534 vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
2538 int vp8_use_as_reference(VP8_COMP *cpi, int ref_frame_flags)
2540 if (ref_frame_flags > 7)
2543 cpi->ref_frame_flags = ref_frame_flags;
2546 int vp8_update_reference(VP8_COMP *cpi, int ref_frame_flags)
2548 if (ref_frame_flags > 7)
2551 cpi->common.refresh_golden_frame = 0;
2552 cpi->common.refresh_alt_ref_frame = 0;
2553 cpi->common.refresh_last_frame = 0;
2555 if (ref_frame_flags & VP8_LAST_FRAME)
2556 cpi->common.refresh_last_frame = 1;
2558 if (ref_frame_flags & VP8_GOLD_FRAME)
2559 cpi->common.refresh_golden_frame = 1;
2561 if (ref_frame_flags & VP8_ALTR_FRAME)
2562 cpi->common.refresh_alt_ref_frame = 1;
2567 int vp8_get_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
2569 VP8_COMMON *cm = &cpi->common;
2572 if (ref_frame_flag == VP8_LAST_FRAME)
2573 ref_fb_idx = cm->lst_fb_idx;
2574 else if (ref_frame_flag == VP8_GOLD_FRAME)
2575 ref_fb_idx = cm->gld_fb_idx;
2576 else if (ref_frame_flag == VP8_ALTR_FRAME)
2577 ref_fb_idx = cm->alt_fb_idx;
2581 vp8_yv12_copy_frame(&cm->yv12_fb[ref_fb_idx], sd);
2585 int vp8_set_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
2587 VP8_COMMON *cm = &cpi->common;
2591 if (ref_frame_flag == VP8_LAST_FRAME)
2592 ref_fb_idx = cm->lst_fb_idx;
2593 else if (ref_frame_flag == VP8_GOLD_FRAME)
2594 ref_fb_idx = cm->gld_fb_idx;
2595 else if (ref_frame_flag == VP8_ALTR_FRAME)
2596 ref_fb_idx = cm->alt_fb_idx;
2600 vp8_yv12_copy_frame(sd, &cm->yv12_fb[ref_fb_idx]);
2604 int vp8_update_entropy(VP8_COMP *cpi, int update)
2606 VP8_COMMON *cm = &cpi->common;
2607 cm->refresh_entropy_probs = update;
2614 void vp8_write_yuv_frame(const char *name, YV12_BUFFER_CONFIG *s)
2616 FILE *yuv_file = fopen(name, "ab");
2617 unsigned char *src = s->y_buffer;
2618 int h = s->y_height;
2622 fwrite(src, s->y_width, 1, yuv_file);
2632 fwrite(src, s->uv_width, 1, yuv_file);
2633 src += s->uv_stride;
2642 fwrite(src, s->uv_width, 1, yuv_file);
2643 src += s->uv_stride;
2652 static void scale_and_extend_source(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi)
2654 VP8_COMMON *cm = &cpi->common;
2656 /* are we resizing the image */
2657 if (cm->horiz_scale != 0 || cm->vert_scale != 0)
2659 #if CONFIG_SPATIAL_RESAMPLING
2660 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
2661 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
2664 if (cm->vert_scale == 3)
2669 Scale2Ratio(cm->horiz_scale, &hr, &hs);
2670 Scale2Ratio(cm->vert_scale, &vr, &vs);
2672 vpx_scale_frame(sd, &cpi->scaled_source, cm->temp_scale_frame.y_buffer,
2673 tmp_height, hs, hr, vs, vr, 0);
2675 vp8_yv12_extend_frame_borders(&cpi->scaled_source);
2676 cpi->Source = &cpi->scaled_source;
2684 static int resize_key_frame(VP8_COMP *cpi)
2686 #if CONFIG_SPATIAL_RESAMPLING
2687 VP8_COMMON *cm = &cpi->common;
2689 /* Do we need to apply resampling for one pass cbr.
2690 * In one pass this is more limited than in two pass cbr.
2691 * The test and any change is only made once per key frame sequence.
2693 if (cpi->oxcf.allow_spatial_resampling && (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER))
2695 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
2696 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
2697 int new_width, new_height;
2699 /* If we are below the resample DOWN watermark then scale down a
2702 if (cpi->buffer_level < (cpi->oxcf.resample_down_water_mark * cpi->oxcf.optimal_buffer_level / 100))
2704 cm->horiz_scale = (cm->horiz_scale < ONETWO) ? cm->horiz_scale + 1 : ONETWO;
2705 cm->vert_scale = (cm->vert_scale < ONETWO) ? cm->vert_scale + 1 : ONETWO;
2707 /* Should we now start scaling back up */
2708 else if (cpi->buffer_level > (cpi->oxcf.resample_up_water_mark * cpi->oxcf.optimal_buffer_level / 100))
2710 cm->horiz_scale = (cm->horiz_scale > NORMAL) ? cm->horiz_scale - 1 : NORMAL;
2711 cm->vert_scale = (cm->vert_scale > NORMAL) ? cm->vert_scale - 1 : NORMAL;
2714 /* Get the new height and width */
2715 Scale2Ratio(cm->horiz_scale, &hr, &hs);
2716 Scale2Ratio(cm->vert_scale, &vr, &vs);
2717 new_width = ((hs - 1) + (cpi->oxcf.Width * hr)) / hs;
2718 new_height = ((vs - 1) + (cpi->oxcf.Height * vr)) / vs;
2720 /* If the image size has changed we need to reallocate the buffers
2721 * and resample the source image
2723 if ((cm->Width != new_width) || (cm->Height != new_height))
2725 cm->Width = new_width;
2726 cm->Height = new_height;
2727 vp8_alloc_compressor_data(cpi);
2728 scale_and_extend_source(cpi->un_scaled_source, cpi);
2738 static void update_alt_ref_frame_stats(VP8_COMP *cpi)
2740 VP8_COMMON *cm = &cpi->common;
2742 /* Select an interval before next GF or altref */
2743 if (!cpi->auto_gold)
2744 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
2746 if ((cpi->pass != 2) && cpi->frames_till_gf_update_due)
2748 cpi->current_gf_interval = cpi->frames_till_gf_update_due;
2750 /* Set the bits per frame that we should try and recover in
2751 * subsequent inter frames to account for the extra GF spend...
2752 * note that his does not apply for GF updates that occur
2753 * coincident with a key frame as the extra cost of key frames is
2754 * dealt with elsewhere.
2756 cpi->gf_overspend_bits += cpi->projected_frame_size;
2757 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
2760 /* Update data structure that monitors level of reference to last GF */
2761 vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
2762 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
2764 /* this frame refreshes means next frames don't unless specified by user */
2765 cpi->frames_since_golden = 0;
2767 /* Clear the alternate reference update pending flag. */
2768 cpi->source_alt_ref_pending = 0;
2770 /* Set the alternate reference frame active flag */
2771 cpi->source_alt_ref_active = 1;
2775 static void update_golden_frame_stats(VP8_COMP *cpi)
2777 VP8_COMMON *cm = &cpi->common;
2779 /* Update the Golden frame usage counts. */
2780 if (cm->refresh_golden_frame)
2782 /* Select an interval before next GF */
2783 if (!cpi->auto_gold)
2784 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
2786 if ((cpi->pass != 2) && (cpi->frames_till_gf_update_due > 0))
2788 cpi->current_gf_interval = cpi->frames_till_gf_update_due;
2790 /* Set the bits per frame that we should try and recover in
2791 * subsequent inter frames to account for the extra GF spend...
2792 * note that his does not apply for GF updates that occur
2793 * coincident with a key frame as the extra cost of key frames
2794 * is dealt with elsewhere.
2796 if ((cm->frame_type != KEY_FRAME) && !cpi->source_alt_ref_active)
2798 /* Calcluate GF bits to be recovered
2799 * Projected size - av frame bits available for inter
2800 * frames for clip as a whole
2802 cpi->gf_overspend_bits += (cpi->projected_frame_size - cpi->inter_frame_target);
2805 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
2809 /* Update data structure that monitors level of reference to last GF */
2810 vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
2811 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
2813 /* this frame refreshes means next frames don't unless specified by
2816 cm->refresh_golden_frame = 0;
2817 cpi->frames_since_golden = 0;
2819 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
2820 cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
2821 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
2822 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
2824 /* ******** Fixed Q test code only ************ */
2825 /* If we are going to use the ALT reference for the next group of
2826 * frames set a flag to say so.
2828 if (cpi->oxcf.fixed_q >= 0 &&
2829 cpi->oxcf.play_alternate && !cpi->common.refresh_alt_ref_frame)
2831 cpi->source_alt_ref_pending = 1;
2832 cpi->frames_till_gf_update_due = cpi->baseline_gf_interval;
2835 if (!cpi->source_alt_ref_pending)
2836 cpi->source_alt_ref_active = 0;
2838 /* Decrement count down till next gf */
2839 if (cpi->frames_till_gf_update_due > 0)
2840 cpi->frames_till_gf_update_due--;
2843 else if (!cpi->common.refresh_alt_ref_frame)
2845 /* Decrement count down till next gf */
2846 if (cpi->frames_till_gf_update_due > 0)
2847 cpi->frames_till_gf_update_due--;
2849 if (cpi->frames_till_alt_ref_frame)
2850 cpi->frames_till_alt_ref_frame --;
2852 cpi->frames_since_golden ++;
2854 if (cpi->frames_since_golden > 1)
2856 cpi->recent_ref_frame_usage[INTRA_FRAME] +=
2857 cpi->mb.count_mb_ref_frame_usage[INTRA_FRAME];
2858 cpi->recent_ref_frame_usage[LAST_FRAME] +=
2859 cpi->mb.count_mb_ref_frame_usage[LAST_FRAME];
2860 cpi->recent_ref_frame_usage[GOLDEN_FRAME] +=
2861 cpi->mb.count_mb_ref_frame_usage[GOLDEN_FRAME];
2862 cpi->recent_ref_frame_usage[ALTREF_FRAME] +=
2863 cpi->mb.count_mb_ref_frame_usage[ALTREF_FRAME];
2868 /* This function updates the reference frame probability estimates that
2869 * will be used during mode selection
2871 static void update_rd_ref_frame_probs(VP8_COMP *cpi)
2873 VP8_COMMON *cm = &cpi->common;
2875 const int *const rfct = cpi->mb.count_mb_ref_frame_usage;
2876 const int rf_intra = rfct[INTRA_FRAME];
2877 const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
2879 if (cm->frame_type == KEY_FRAME)
2881 cpi->prob_intra_coded = 255;
2882 cpi->prob_last_coded = 128;
2883 cpi->prob_gf_coded = 128;
2885 else if (!(rf_intra + rf_inter))
2887 cpi->prob_intra_coded = 63;
2888 cpi->prob_last_coded = 128;
2889 cpi->prob_gf_coded = 128;
2892 /* update reference frame costs since we can do better than what we got
2895 if (cpi->oxcf.number_of_layers == 1)
2897 if (cpi->common.refresh_alt_ref_frame)
2899 cpi->prob_intra_coded += 40;
2900 if (cpi->prob_intra_coded > 255)
2901 cpi->prob_intra_coded = 255;
2902 cpi->prob_last_coded = 200;
2903 cpi->prob_gf_coded = 1;
2905 else if (cpi->frames_since_golden == 0)
2907 cpi->prob_last_coded = 214;
2909 else if (cpi->frames_since_golden == 1)
2911 cpi->prob_last_coded = 192;
2912 cpi->prob_gf_coded = 220;
2914 else if (cpi->source_alt_ref_active)
2916 cpi->prob_gf_coded -= 20;
2918 if (cpi->prob_gf_coded < 10)
2919 cpi->prob_gf_coded = 10;
2921 if (!cpi->source_alt_ref_active)
2922 cpi->prob_gf_coded = 255;
2927 /* 1 = key, 0 = inter */
2928 static int decide_key_frame(VP8_COMP *cpi)
2930 VP8_COMMON *cm = &cpi->common;
2932 int code_key_frame = 0;
2936 if (cpi->Speed > 11)
2939 /* Clear down mmx registers */
2940 vp8_clear_system_state();
2942 if ((cpi->compressor_speed == 2) && (cpi->Speed >= 5) && (cpi->sf.RD == 0))
2944 double change = 1.0 * abs((int)(cpi->mb.intra_error -
2945 cpi->last_intra_error)) / (1 + cpi->last_intra_error);
2946 double change2 = 1.0 * abs((int)(cpi->mb.prediction_error -
2947 cpi->last_prediction_error)) / (1 + cpi->last_prediction_error);
2948 double minerror = cm->MBs * 256;
2950 cpi->last_intra_error = cpi->mb.intra_error;
2951 cpi->last_prediction_error = cpi->mb.prediction_error;
2953 if (10 * cpi->mb.intra_error / (1 + cpi->mb.prediction_error) < 15
2954 && cpi->mb.prediction_error > minerror
2955 && (change > .25 || change2 > .25))
2957 /*(change > 1.4 || change < .75)&& cpi->this_frame_percent_intra > cpi->last_frame_percent_intra + 3*/
2965 /* If the following are true we might as well code a key frame */
2966 if (((cpi->this_frame_percent_intra == 100) &&
2967 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 2))) ||
2968 ((cpi->this_frame_percent_intra > 95) &&
2969 (cpi->this_frame_percent_intra >= (cpi->last_frame_percent_intra + 5))))
2973 /* in addition if the following are true and this is not a golden frame
2974 * then code a key frame Note that on golden frames there often seems
2975 * to be a pop in intra useage anyway hence this restriction is
2976 * designed to prevent spurious key frames. The Intra pop needs to be
2979 else if (((cpi->this_frame_percent_intra > 60) &&
2980 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 2))) ||
2981 ((cpi->this_frame_percent_intra > 75) &&
2982 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 3 / 2))) ||
2983 ((cpi->this_frame_percent_intra > 90) &&
2984 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 10))))
2986 if (!cm->refresh_golden_frame)
2990 return code_key_frame;
2994 #if !(CONFIG_REALTIME_ONLY)
2995 static void Pass1Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned int *frame_flags)
3000 vp8_set_quantizer(cpi, 26);
3002 vp8_first_pass(cpi);
3007 void write_cx_frame_to_file(YV12_BUFFER_CONFIG *frame, int this_frame)
3010 /* write the frame */
3015 sprintf(filename, "cx\\y%04d.raw", this_frame);
3016 yframe = fopen(filename, "wb");
3018 for (i = 0; i < frame->y_height; i++)
3019 fwrite(frame->y_buffer + i * frame->y_stride, frame->y_width, 1, yframe);
3022 sprintf(filename, "cx\\u%04d.raw", this_frame);
3023 yframe = fopen(filename, "wb");
3025 for (i = 0; i < frame->uv_height; i++)
3026 fwrite(frame->u_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
3029 sprintf(filename, "cx\\v%04d.raw", this_frame);
3030 yframe = fopen(filename, "wb");
3032 for (i = 0; i < frame->uv_height; i++)
3033 fwrite(frame->v_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
3038 /* return of 0 means drop frame */
3040 /* Function to test for conditions that indeicate we should loop
3041 * back and recode a frame.
3043 static int recode_loop_test( VP8_COMP *cpi,
3044 int high_limit, int low_limit,
3045 int q, int maxq, int minq )
3047 int force_recode = 0;
3048 VP8_COMMON *cm = &cpi->common;
3050 /* Is frame recode allowed at all
3051 * Yes if either recode mode 1 is selected or mode two is selcted
3052 * and the frame is a key frame. golden frame or alt_ref_frame
3054 if ( (cpi->sf.recode_loop == 1) ||
3055 ( (cpi->sf.recode_loop == 2) &&
3056 ( (cm->frame_type == KEY_FRAME) ||
3057 cm->refresh_golden_frame ||
3058 cm->refresh_alt_ref_frame ) ) )
3060 /* General over and under shoot tests */
3061 if ( ((cpi->projected_frame_size > high_limit) && (q < maxq)) ||
3062 ((cpi->projected_frame_size < low_limit) && (q > minq)) )
3066 /* Special Constrained quality tests */
3067 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
3069 /* Undershoot and below auto cq level */
3070 if ( (q > cpi->cq_target_quality) &&
3071 (cpi->projected_frame_size <
3072 ((cpi->this_frame_target * 7) >> 3)))
3076 /* Severe undershoot and between auto and user cq level */
3077 else if ( (q > cpi->oxcf.cq_level) &&
3078 (cpi->projected_frame_size < cpi->min_frame_bandwidth) &&
3079 (cpi->active_best_quality > cpi->oxcf.cq_level))
3082 cpi->active_best_quality = cpi->oxcf.cq_level;
3087 return force_recode;
3090 static void update_reference_frames(VP8_COMP *cpi)
3092 VP8_COMMON *cm = &cpi->common;
3093 YV12_BUFFER_CONFIG *yv12_fb = cm->yv12_fb;
3095 /* At this point the new frame has been encoded.
3096 * If any buffer copy / swapping is signaled it should be done here.
3099 if (cm->frame_type == KEY_FRAME)
3101 yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME | VP8_ALTR_FRAME ;
3103 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3104 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3106 cm->alt_fb_idx = cm->gld_fb_idx = cm->new_fb_idx;
3108 #if CONFIG_MULTI_RES_ENCODING
3109 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
3110 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
3113 else /* For non key frames */
3115 if (cm->refresh_alt_ref_frame)
3117 assert(!cm->copy_buffer_to_arf);
3119 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_ALTR_FRAME;
3120 cm->yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3121 cm->alt_fb_idx = cm->new_fb_idx;
3123 #if CONFIG_MULTI_RES_ENCODING
3124 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
3127 else if (cm->copy_buffer_to_arf)
3129 assert(!(cm->copy_buffer_to_arf & ~0x3));
3131 if (cm->copy_buffer_to_arf == 1)
3133 if(cm->alt_fb_idx != cm->lst_fb_idx)
3135 yv12_fb[cm->lst_fb_idx].flags |= VP8_ALTR_FRAME;
3136 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3137 cm->alt_fb_idx = cm->lst_fb_idx;
3139 #if CONFIG_MULTI_RES_ENCODING
3140 cpi->current_ref_frames[ALTREF_FRAME] =
3141 cpi->current_ref_frames[LAST_FRAME];
3145 else /* if (cm->copy_buffer_to_arf == 2) */
3147 if(cm->alt_fb_idx != cm->gld_fb_idx)
3149 yv12_fb[cm->gld_fb_idx].flags |= VP8_ALTR_FRAME;
3150 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3151 cm->alt_fb_idx = cm->gld_fb_idx;
3153 #if CONFIG_MULTI_RES_ENCODING
3154 cpi->current_ref_frames[ALTREF_FRAME] =
3155 cpi->current_ref_frames[GOLDEN_FRAME];
3161 if (cm->refresh_golden_frame)
3163 assert(!cm->copy_buffer_to_gf);
3165 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME;
3166 cm->yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3167 cm->gld_fb_idx = cm->new_fb_idx;
3169 #if CONFIG_MULTI_RES_ENCODING
3170 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
3173 else if (cm->copy_buffer_to_gf)
3175 assert(!(cm->copy_buffer_to_arf & ~0x3));
3177 if (cm->copy_buffer_to_gf == 1)
3179 if(cm->gld_fb_idx != cm->lst_fb_idx)
3181 yv12_fb[cm->lst_fb_idx].flags |= VP8_GOLD_FRAME;
3182 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3183 cm->gld_fb_idx = cm->lst_fb_idx;
3185 #if CONFIG_MULTI_RES_ENCODING
3186 cpi->current_ref_frames[GOLDEN_FRAME] =
3187 cpi->current_ref_frames[LAST_FRAME];
3191 else /* if (cm->copy_buffer_to_gf == 2) */
3193 if(cm->alt_fb_idx != cm->gld_fb_idx)
3195 yv12_fb[cm->alt_fb_idx].flags |= VP8_GOLD_FRAME;
3196 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3197 cm->gld_fb_idx = cm->alt_fb_idx;
3199 #if CONFIG_MULTI_RES_ENCODING
3200 cpi->current_ref_frames[GOLDEN_FRAME] =
3201 cpi->current_ref_frames[ALTREF_FRAME];
3208 if (cm->refresh_last_frame)
3210 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_LAST_FRAME;
3211 cm->yv12_fb[cm->lst_fb_idx].flags &= ~VP8_LAST_FRAME;
3212 cm->lst_fb_idx = cm->new_fb_idx;
3214 #if CONFIG_MULTI_RES_ENCODING
3215 cpi->current_ref_frames[LAST_FRAME] = cm->current_video_frame;
3219 #if CONFIG_TEMPORAL_DENOISING
3220 if (cpi->oxcf.noise_sensitivity)
3222 /* we shouldn't have to keep multiple copies as we know in advance which
3223 * buffer we should start - for now to get something up and running
3224 * I've chosen to copy the buffers
3226 if (cm->frame_type == KEY_FRAME)
3229 vp8_yv12_copy_frame(
3231 &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
3233 vp8_yv12_extend_frame_borders(
3234 &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
3236 for (i = 2; i < MAX_REF_FRAMES - 1; i++)
3237 vp8_yv12_copy_frame(
3238 &cpi->denoiser.yv12_running_avg[LAST_FRAME],
3239 &cpi->denoiser.yv12_running_avg[i]);
3241 else /* For non key frames */
3243 vp8_yv12_extend_frame_borders(
3244 &cpi->denoiser.yv12_running_avg[INTRA_FRAME]);
3246 if (cm->refresh_alt_ref_frame || cm->copy_buffer_to_arf)
3248 vp8_yv12_copy_frame(
3249 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3250 &cpi->denoiser.yv12_running_avg[ALTREF_FRAME]);
3252 if (cm->refresh_golden_frame || cm->copy_buffer_to_gf)
3254 vp8_yv12_copy_frame(
3255 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3256 &cpi->denoiser.yv12_running_avg[GOLDEN_FRAME]);
3258 if(cm->refresh_last_frame)
3260 vp8_yv12_copy_frame(
3261 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3262 &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
3271 void vp8_loopfilter_frame(VP8_COMP *cpi, VP8_COMMON *cm)
3273 const FRAME_TYPE frame_type = cm->frame_type;
3277 cm->filter_level = 0;
3281 struct vpx_usec_timer timer;
3283 vp8_clear_system_state();
3285 vpx_usec_timer_start(&timer);
3286 if (cpi->sf.auto_filter == 0)
3287 vp8cx_pick_filter_level_fast(cpi->Source, cpi);
3290 vp8cx_pick_filter_level(cpi->Source, cpi);
3292 if (cm->filter_level > 0)
3294 vp8cx_set_alt_lf_level(cpi, cm->filter_level);
3297 vpx_usec_timer_mark(&timer);
3298 cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer);
3301 #if CONFIG_MULTITHREAD
3302 if (cpi->b_multi_threaded)
3303 sem_post(&cpi->h_event_end_lpf); /* signal that we have set filter_level */
3306 if (cm->filter_level > 0)
3308 vp8_loop_filter_frame(cm, &cpi->mb.e_mbd, frame_type);
3311 vp8_yv12_extend_frame_borders(cm->frame_to_show);
3315 static void encode_frame_to_data_rate
3318 unsigned long *size,
3319 unsigned char *dest,
3320 unsigned char* dest_end,
3321 unsigned int *frame_flags
3325 int frame_over_shoot_limit;
3326 int frame_under_shoot_limit;
3331 VP8_COMMON *cm = &cpi->common;
3332 int active_worst_qchanged = 0;
3334 #if !(CONFIG_REALTIME_ONLY)
3338 int zbin_oq_low = 0;
3341 int overshoot_seen = 0;
3342 int undershoot_seen = 0;
3345 int drop_mark = (int)(cpi->oxcf.drop_frames_water_mark *
3346 cpi->oxcf.optimal_buffer_level / 100);
3347 int drop_mark75 = drop_mark * 2 / 3;
3348 int drop_mark50 = drop_mark / 4;
3349 int drop_mark25 = drop_mark / 8;
3352 /* Clear down mmx registers to allow floating point in what follows */
3353 vp8_clear_system_state();
3355 #if CONFIG_MULTITHREAD
3356 /* wait for the last picture loopfilter thread done */
3357 if (cpi->b_lpf_running)
3359 sem_wait(&cpi->h_event_end_lpf);
3360 cpi->b_lpf_running = 0;
3364 if(cpi->force_next_frame_intra)
3366 cm->frame_type = KEY_FRAME; /* delayed intra frame */
3367 cpi->force_next_frame_intra = 0;
3370 /* For an alt ref frame in 2 pass we skip the call to the second pass
3371 * function that sets the target bandwidth
3373 #if !(CONFIG_REALTIME_ONLY)
3377 if (cpi->common.refresh_alt_ref_frame)
3379 /* Per frame bit target for the alt ref frame */
3380 cpi->per_frame_bandwidth = cpi->twopass.gf_bits;
3381 /* per second target bitrate */
3382 cpi->target_bandwidth = (int)(cpi->twopass.gf_bits *
3383 cpi->output_framerate);
3388 cpi->per_frame_bandwidth = (int)(cpi->target_bandwidth / cpi->output_framerate);
3390 /* Default turn off buffer to buffer copying */
3391 cm->copy_buffer_to_gf = 0;
3392 cm->copy_buffer_to_arf = 0;
3394 /* Clear zbin over-quant value and mode boost values. */
3395 cpi->mb.zbin_over_quant = 0;
3396 cpi->mb.zbin_mode_boost = 0;
3398 /* Enable or disable mode based tweaking of the zbin
3399 * For 2 Pass Only used where GF/ARF prediction quality
3400 * is above a threshold
3402 cpi->mb.zbin_mode_boost_enabled = 1;
3405 if ( cpi->gfu_boost <= 400 )
3407 cpi->mb.zbin_mode_boost_enabled = 0;
3411 /* Current default encoder behaviour for the altref sign bias */
3412 if (cpi->source_alt_ref_active)
3413 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
3415 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 0;
3417 /* Check to see if a key frame is signaled
3418 * For two pass with auto key frame enabled cm->frame_type may already
3419 * be set, but not for one pass.
3421 if ((cm->current_video_frame == 0) ||
3422 (cm->frame_flags & FRAMEFLAGS_KEY) ||
3423 (cpi->oxcf.auto_key && (cpi->frames_since_key % cpi->key_frame_frequency == 0)))
3425 /* Key frame from VFW/auto-keyframe/first frame */
3426 cm->frame_type = KEY_FRAME;
3429 #if CONFIG_MULTI_RES_ENCODING
3430 /* In multi-resolution encoding, frame_type is decided by lowest-resolution
3431 * encoder. Same frame_type is adopted while encoding at other resolution.
3433 if (cpi->oxcf.mr_encoder_id)
3435 LOWER_RES_FRAME_INFO* low_res_frame_info
3436 = (LOWER_RES_FRAME_INFO*)cpi->oxcf.mr_low_res_mode_info;
3438 cm->frame_type = low_res_frame_info->frame_type;
3440 if(cm->frame_type != KEY_FRAME)
3442 cpi->mr_low_res_mv_avail = 1;
3443 cpi->mr_low_res_mv_avail &= !(low_res_frame_info->is_frame_dropped);
3445 if (cpi->ref_frame_flags & VP8_LAST_FRAME)
3446 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[LAST_FRAME]
3447 == low_res_frame_info->low_res_ref_frames[LAST_FRAME]);
3449 if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
3450 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[GOLDEN_FRAME]
3451 == low_res_frame_info->low_res_ref_frames[GOLDEN_FRAME]);
3453 if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
3454 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[ALTREF_FRAME]
3455 == low_res_frame_info->low_res_ref_frames[ALTREF_FRAME]);
3460 /* Set various flags etc to special state if it is a key frame */
3461 if (cm->frame_type == KEY_FRAME)
3465 // Set the loop filter deltas and segmentation map update
3466 setup_features(cpi);
3468 /* The alternate reference frame cannot be active for a key frame */
3469 cpi->source_alt_ref_active = 0;
3471 /* Reset the RD threshold multipliers to default of * 1 (128) */
3472 for (i = 0; i < MAX_MODES; i++)
3474 cpi->mb.rd_thresh_mult[i] = 128;
3479 /* Experimental code for lagged compress and one pass
3480 * Initialise one_pass GF frames stats
3481 * Update stats used for GF selection
3484 cpi->one_pass_frame_index = cm->current_video_frame % MAX_LAG_BUFFERS;
3486 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frames_so_far = 0;
3487 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_intra_error = 0.0;
3488 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_coded_error = 0.0;
3489 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_inter = 0.0;
3490 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_motion = 0.0;
3491 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr = 0.0;
3492 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr_abs = 0.0;
3493 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc = 0.0;
3494 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc_abs = 0.0;
3498 update_rd_ref_frame_probs(cpi);
3500 if (cpi->drop_frames_allowed)
3502 /* The reset to decimation 0 is only done here for one pass.
3503 * Once it is set two pass leaves decimation on till the next kf.
3505 if ((cpi->buffer_level > drop_mark) && (cpi->decimation_factor > 0))
3506 cpi->decimation_factor --;
3508 if (cpi->buffer_level > drop_mark75 && cpi->decimation_factor > 0)
3509 cpi->decimation_factor = 1;
3511 else if (cpi->buffer_level < drop_mark25 && (cpi->decimation_factor == 2 || cpi->decimation_factor == 3))
3513 cpi->decimation_factor = 3;
3515 else if (cpi->buffer_level < drop_mark50 && (cpi->decimation_factor == 1 || cpi->decimation_factor == 2))
3517 cpi->decimation_factor = 2;
3519 else if (cpi->buffer_level < drop_mark75 && (cpi->decimation_factor == 0 || cpi->decimation_factor == 1))
3521 cpi->decimation_factor = 1;
3525 /* The following decimates the frame rate according to a regular
3526 * pattern (i.e. to 1/2 or 2/3 frame rate) This can be used to help
3527 * prevent buffer under-run in CBR mode. Alternatively it might be
3528 * desirable in some situations to drop frame rate but throw more bits
3531 * Note that dropping a key frame can be problematic if spatial
3532 * resampling is also active
3534 if (cpi->decimation_factor > 0)
3536 switch (cpi->decimation_factor)
3539 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 3 / 2;
3542 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
3545 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
3549 /* Note that we should not throw out a key frame (especially when
3550 * spatial resampling is enabled).
3552 if (cm->frame_type == KEY_FRAME)
3554 cpi->decimation_count = cpi->decimation_factor;
3556 else if (cpi->decimation_count > 0)
3558 cpi->decimation_count --;
3560 cpi->bits_off_target += cpi->av_per_frame_bandwidth;
3561 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
3562 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
3564 #if CONFIG_MULTI_RES_ENCODING
3565 vp8_store_drop_frame_info(cpi);
3568 cm->current_video_frame++;
3569 cpi->frames_since_key++;
3570 // We advance the temporal pattern for dropped frames.
3571 cpi->temporal_pattern_counter++;
3573 #if CONFIG_INTERNAL_STATS
3577 cpi->buffer_level = cpi->bits_off_target;
3579 if (cpi->oxcf.number_of_layers > 1)
3583 /* Propagate bits saved by dropping the frame to higher
3586 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
3588 LAYER_CONTEXT *lc = &cpi->layer_context[i];
3589 lc->bits_off_target += (int)(lc->target_bandwidth /
3591 if (lc->bits_off_target > lc->maximum_buffer_size)
3592 lc->bits_off_target = lc->maximum_buffer_size;
3593 lc->buffer_level = lc->bits_off_target;
3600 cpi->decimation_count = cpi->decimation_factor;
3603 cpi->decimation_count = 0;
3605 /* Decide how big to make the frame */
3606 if (!vp8_pick_frame_size(cpi))
3608 /*TODO: 2 drop_frame and return code could be put together. */
3609 #if CONFIG_MULTI_RES_ENCODING
3610 vp8_store_drop_frame_info(cpi);
3612 cm->current_video_frame++;
3613 cpi->frames_since_key++;
3614 // We advance the temporal pattern for dropped frames.
3615 cpi->temporal_pattern_counter++;
3619 /* Reduce active_worst_allowed_q for CBR if our buffer is getting too full.
3620 * This has a knock on effect on active best quality as well.
3621 * For CBR if the buffer reaches its maximum level then we can no longer
3622 * save up bits for later frames so we might as well use them up
3623 * on the current frame.
3625 if ((cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) &&
3626 (cpi->buffer_level >= cpi->oxcf.optimal_buffer_level) && cpi->buffered_mode)
3628 /* Max adjustment is 1/4 */
3629 int Adjustment = cpi->active_worst_quality / 4;
3635 if (cpi->buffer_level < cpi->oxcf.maximum_buffer_size)
3637 buff_lvl_step = (int)
3638 ((cpi->oxcf.maximum_buffer_size -
3639 cpi->oxcf.optimal_buffer_level) /
3644 ((cpi->buffer_level -
3645 cpi->oxcf.optimal_buffer_level) /
3651 cpi->active_worst_quality -= Adjustment;
3653 if(cpi->active_worst_quality < cpi->active_best_quality)
3654 cpi->active_worst_quality = cpi->active_best_quality;
3658 /* Set an active best quality and if necessary active worst quality
3659 * There is some odd behavior for one pass here that needs attention.
3661 if ( (cpi->pass == 2) || (cpi->ni_frames > 150))
3663 vp8_clear_system_state();
3665 Q = cpi->active_worst_quality;
3667 if ( cm->frame_type == KEY_FRAME )
3669 if ( cpi->pass == 2 )
3671 if (cpi->gfu_boost > 600)
3672 cpi->active_best_quality = kf_low_motion_minq[Q];
3674 cpi->active_best_quality = kf_high_motion_minq[Q];
3676 /* Special case for key frames forced because we have reached
3677 * the maximum key frame interval. Here force the Q to a range
3678 * based on the ambient Q to reduce the risk of popping
3680 if ( cpi->this_key_frame_forced )
3682 if ( cpi->active_best_quality > cpi->avg_frame_qindex * 7/8)
3683 cpi->active_best_quality = cpi->avg_frame_qindex * 7/8;
3684 else if ( cpi->active_best_quality < cpi->avg_frame_qindex >> 2 )
3685 cpi->active_best_quality = cpi->avg_frame_qindex >> 2;
3688 /* One pass more conservative */
3690 cpi->active_best_quality = kf_high_motion_minq[Q];
3693 else if (cpi->oxcf.number_of_layers==1 &&
3694 (cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame))
3696 /* Use the lower of cpi->active_worst_quality and recent
3697 * average Q as basis for GF/ARF Q limit unless last frame was
3700 if ( (cpi->frames_since_key > 1) &&
3701 (cpi->avg_frame_qindex < cpi->active_worst_quality) )
3703 Q = cpi->avg_frame_qindex;
3706 /* For constrained quality dont allow Q less than the cq level */
3707 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
3708 (Q < cpi->cq_target_quality) )
3710 Q = cpi->cq_target_quality;
3713 if ( cpi->pass == 2 )
3715 if ( cpi->gfu_boost > 1000 )
3716 cpi->active_best_quality = gf_low_motion_minq[Q];
3717 else if ( cpi->gfu_boost < 400 )
3718 cpi->active_best_quality = gf_high_motion_minq[Q];
3720 cpi->active_best_quality = gf_mid_motion_minq[Q];
3722 /* Constrained quality use slightly lower active best. */
3723 if ( cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY )
3725 cpi->active_best_quality =
3726 cpi->active_best_quality * 15/16;
3729 /* One pass more conservative */
3731 cpi->active_best_quality = gf_high_motion_minq[Q];
3735 cpi->active_best_quality = inter_minq[Q];
3737 /* For the constant/constrained quality mode we dont want
3738 * q to fall below the cq level.
3740 if ((cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
3741 (cpi->active_best_quality < cpi->cq_target_quality) )
3743 /* If we are strongly undershooting the target rate in the last
3744 * frames then use the user passed in cq value not the auto
3747 if ( cpi->rolling_actual_bits < cpi->min_frame_bandwidth )
3748 cpi->active_best_quality = cpi->oxcf.cq_level;
3750 cpi->active_best_quality = cpi->cq_target_quality;
3754 /* If CBR and the buffer is as full then it is reasonable to allow
3755 * higher quality on the frames to prevent bits just going to waste.
3757 if (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)
3759 /* Note that the use of >= here elliminates the risk of a devide
3760 * by 0 error in the else if clause
3762 if (cpi->buffer_level >= cpi->oxcf.maximum_buffer_size)
3763 cpi->active_best_quality = cpi->best_quality;
3765 else if (cpi->buffer_level > cpi->oxcf.optimal_buffer_level)
3767 int Fraction = (int)
3768 (((cpi->buffer_level - cpi->oxcf.optimal_buffer_level) * 128)
3769 / (cpi->oxcf.maximum_buffer_size -
3770 cpi->oxcf.optimal_buffer_level));
3771 int min_qadjustment = ((cpi->active_best_quality -
3772 cpi->best_quality) * Fraction) / 128;
3774 cpi->active_best_quality -= min_qadjustment;
3778 /* Make sure constrained quality mode limits are adhered to for the first
3779 * few frames of one pass encodes
3781 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
3783 if ( (cm->frame_type == KEY_FRAME) ||
3784 cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame )
3786 cpi->active_best_quality = cpi->best_quality;
3788 else if (cpi->active_best_quality < cpi->cq_target_quality)
3790 cpi->active_best_quality = cpi->cq_target_quality;
3794 /* Clip the active best and worst quality values to limits */
3795 if (cpi->active_worst_quality > cpi->worst_quality)
3796 cpi->active_worst_quality = cpi->worst_quality;
3798 if (cpi->active_best_quality < cpi->best_quality)
3799 cpi->active_best_quality = cpi->best_quality;
3801 if ( cpi->active_worst_quality < cpi->active_best_quality )
3802 cpi->active_worst_quality = cpi->active_best_quality;
3804 /* Determine initial Q to try */
3805 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
3807 #if !(CONFIG_REALTIME_ONLY)
3809 /* Set highest allowed value for Zbin over quant */
3810 if (cm->frame_type == KEY_FRAME)
3812 else if ((cpi->oxcf.number_of_layers == 1) && ((cm->refresh_alt_ref_frame ||
3813 (cm->refresh_golden_frame && !cpi->source_alt_ref_active))))
3818 zbin_oq_high = ZBIN_OQ_MAX;
3821 /* Setup background Q adjustment for error resilient mode.
3822 * For multi-layer encodes only enable this for the base layer.
3824 if (cpi->cyclic_refresh_mode_enabled)
3826 if (cpi->current_layer==0)
3827 cyclic_background_refresh(cpi, Q, 0);
3829 disable_segmentation(cpi);
3832 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
3834 #if !(CONFIG_REALTIME_ONLY)
3835 /* Limit Q range for the adaptive loop. */
3836 bottom_index = cpi->active_best_quality;
3837 top_index = cpi->active_worst_quality;
3838 q_low = cpi->active_best_quality;
3839 q_high = cpi->active_worst_quality;
3842 vp8_save_coding_context(cpi);
3846 scale_and_extend_source(cpi->un_scaled_source, cpi);
3848 #if !(CONFIG_REALTIME_ONLY) && CONFIG_POSTPROC && !(CONFIG_TEMPORAL_DENOISING)
3850 if (cpi->oxcf.noise_sensitivity > 0)
3855 switch (cpi->oxcf.noise_sensitivity)
3878 if (cm->frame_type == KEY_FRAME)
3880 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0);
3884 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0);
3886 src = cpi->Source->y_buffer;
3888 if (cpi->Source->y_stride < 0)
3890 src += cpi->Source->y_stride * (cpi->Source->y_height - 1);
3897 #ifdef OUTPUT_YUV_SRC
3898 vp8_write_yuv_frame(cpi->Source);
3903 vp8_clear_system_state();
3905 vp8_set_quantizer(cpi, Q);
3907 /* setup skip prob for costing in mode/mv decision */
3908 if (cpi->common.mb_no_coeff_skip)
3910 cpi->prob_skip_false = cpi->base_skip_false_prob[Q];
3912 if (cm->frame_type != KEY_FRAME)
3914 if (cpi->common.refresh_alt_ref_frame)
3916 if (cpi->last_skip_false_probs[2] != 0)
3917 cpi->prob_skip_false = cpi->last_skip_false_probs[2];
3920 if(cpi->last_skip_false_probs[2]!=0 && abs(Q- cpi->last_skip_probs_q[2])<=16 )
3921 cpi->prob_skip_false = cpi->last_skip_false_probs[2];
3922 else if (cpi->last_skip_false_probs[2]!=0)
3923 cpi->prob_skip_false = (cpi->last_skip_false_probs[2] + cpi->prob_skip_false ) / 2;
3926 else if (cpi->common.refresh_golden_frame)
3928 if (cpi->last_skip_false_probs[1] != 0)
3929 cpi->prob_skip_false = cpi->last_skip_false_probs[1];
3932 if(cpi->last_skip_false_probs[1]!=0 && abs(Q- cpi->last_skip_probs_q[1])<=16 )
3933 cpi->prob_skip_false = cpi->last_skip_false_probs[1];
3934 else if (cpi->last_skip_false_probs[1]!=0)
3935 cpi->prob_skip_false = (cpi->last_skip_false_probs[1] + cpi->prob_skip_false ) / 2;
3940 if (cpi->last_skip_false_probs[0] != 0)
3941 cpi->prob_skip_false = cpi->last_skip_false_probs[0];
3944 if(cpi->last_skip_false_probs[0]!=0 && abs(Q- cpi->last_skip_probs_q[0])<=16 )
3945 cpi->prob_skip_false = cpi->last_skip_false_probs[0];
3946 else if(cpi->last_skip_false_probs[0]!=0)
3947 cpi->prob_skip_false = (cpi->last_skip_false_probs[0] + cpi->prob_skip_false ) / 2;
3951 /* as this is for cost estimate, let's make sure it does not
3952 * go extreme eitehr way
3954 if (cpi->prob_skip_false < 5)
3955 cpi->prob_skip_false = 5;
3957 if (cpi->prob_skip_false > 250)
3958 cpi->prob_skip_false = 250;
3960 if (cpi->oxcf.number_of_layers == 1 && cpi->is_src_frame_alt_ref)
3961 cpi->prob_skip_false = 1;
3968 FILE *f = fopen("skip.stt", "a");
3969 fprintf(f, "%d, %d, %4d ", cpi->common.refresh_golden_frame, cpi->common.refresh_alt_ref_frame, cpi->prob_skip_false);
3977 if (cm->frame_type == KEY_FRAME)
3979 if(resize_key_frame(cpi))
3981 /* If the frame size has changed, need to reset Q, quantizer,
3982 * and background refresh.
3984 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
3985 if (cpi->cyclic_refresh_mode_enabled)
3987 if (cpi->current_layer==0)
3988 cyclic_background_refresh(cpi, Q, 0);
3990 disable_segmentation(cpi);
3992 vp8_set_quantizer(cpi, Q);
3995 vp8_setup_key_frame(cpi);
4000 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
4002 if(cpi->oxcf.error_resilient_mode)
4003 cm->refresh_entropy_probs = 0;
4005 if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS)
4007 if (cm->frame_type == KEY_FRAME)
4008 cm->refresh_entropy_probs = 1;
4011 if (cm->refresh_entropy_probs == 0)
4013 /* save a copy for later refresh */
4014 vpx_memcpy(&cm->lfc, &cm->fc, sizeof(cm->fc));
4017 vp8_update_coef_context(cpi);
4019 vp8_update_coef_probs(cpi);
4021 /* transform / motion compensation build reconstruction frame
4022 * +pack coef partitions
4024 vp8_encode_frame(cpi);
4026 /* cpi->projected_frame_size is not needed for RT mode */
4029 /* transform / motion compensation build reconstruction frame */
4030 vp8_encode_frame(cpi);
4032 cpi->projected_frame_size -= vp8_estimate_entropy_savings(cpi);
4033 cpi->projected_frame_size = (cpi->projected_frame_size > 0) ? cpi->projected_frame_size : 0;
4035 vp8_clear_system_state();
4037 /* Test to see if the stats generated for this frame indicate that
4038 * we should have coded a key frame (assuming that we didn't)!
4041 if (cpi->pass != 2 && cpi->oxcf.auto_key && cm->frame_type != KEY_FRAME
4042 && cpi->compressor_speed != 2)
4044 #if !(CONFIG_REALTIME_ONLY)
4045 if (decide_key_frame(cpi))
4047 /* Reset all our sizing numbers and recode */
4048 cm->frame_type = KEY_FRAME;
4050 vp8_pick_frame_size(cpi);
4052 /* Clear the Alt reference frame active flag when we have
4055 cpi->source_alt_ref_active = 0;
4057 // Set the loop filter deltas and segmentation map update
4058 setup_features(cpi);
4060 vp8_restore_coding_context(cpi);
4062 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4064 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
4066 /* Limit Q range for the adaptive loop. */
4067 bottom_index = cpi->active_best_quality;
4068 top_index = cpi->active_worst_quality;
4069 q_low = cpi->active_best_quality;
4070 q_high = cpi->active_worst_quality;
4080 vp8_clear_system_state();
4082 if (frame_over_shoot_limit == 0)
4083 frame_over_shoot_limit = 1;
4085 /* Are we are overshooting and up against the limit of active max Q. */
4086 if (((cpi->pass != 2) || (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)) &&
4087 (Q == cpi->active_worst_quality) &&
4088 (cpi->active_worst_quality < cpi->worst_quality) &&
4089 (cpi->projected_frame_size > frame_over_shoot_limit))
4091 int over_size_percent = ((cpi->projected_frame_size - frame_over_shoot_limit) * 100) / frame_over_shoot_limit;
4093 /* If so is there any scope for relaxing it */
4094 while ((cpi->active_worst_quality < cpi->worst_quality) && (over_size_percent > 0))
4096 cpi->active_worst_quality++;
4097 /* Assume 1 qstep = about 4% on frame size. */
4098 over_size_percent = (int)(over_size_percent * 0.96);
4100 #if !(CONFIG_REALTIME_ONLY)
4101 top_index = cpi->active_worst_quality;
4103 /* If we have updated the active max Q do not call
4104 * vp8_update_rate_correction_factors() this loop.
4106 active_worst_qchanged = 1;
4109 active_worst_qchanged = 0;
4111 #if !(CONFIG_REALTIME_ONLY)
4112 /* Special case handling for forced key frames */
4113 if ( (cm->frame_type == KEY_FRAME) && cpi->this_key_frame_forced )
4116 int kf_err = vp8_calc_ss_err(cpi->Source,
4117 &cm->yv12_fb[cm->new_fb_idx]);
4119 /* The key frame is not good enough */
4120 if ( kf_err > ((cpi->ambient_err * 7) >> 3) )
4123 q_high = (Q > q_low) ? (Q - 1) : q_low;
4126 Q = (q_high + q_low) >> 1;
4128 /* The key frame is much better than the previous frame */
4129 else if ( kf_err < (cpi->ambient_err >> 1) )
4132 q_low = (Q < q_high) ? (Q + 1) : q_high;
4135 Q = (q_high + q_low + 1) >> 1;
4138 /* Clamp Q to upper and lower limits: */
4147 /* Is the projected frame size out of range and are we allowed
4148 * to attempt to recode.
4150 else if ( recode_loop_test( cpi,
4151 frame_over_shoot_limit, frame_under_shoot_limit,
4152 Q, top_index, bottom_index ) )
4157 /* Frame size out of permitted range. Update correction factor
4158 * & compute new Q to try...
4161 /* Frame is too large */
4162 if (cpi->projected_frame_size > cpi->this_frame_target)
4164 /* Raise Qlow as to at least the current value */
4165 q_low = (Q < q_high) ? (Q + 1) : q_high;
4167 /* If we are using over quant do the same for zbin_oq_low */
4168 if (cpi->mb.zbin_over_quant > 0)
4169 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
4170 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
4172 if (undershoot_seen)
4174 /* Update rate_correction_factor unless
4175 * cpi->active_worst_quality has changed.
4177 if (!active_worst_qchanged)
4178 vp8_update_rate_correction_factors(cpi, 1);
4180 Q = (q_high + q_low + 1) / 2;
4182 /* Adjust cpi->zbin_over_quant (only allowed when Q
4186 cpi->mb.zbin_over_quant = 0;
4189 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
4190 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
4191 cpi->mb.zbin_over_quant =
4192 (zbin_oq_high + zbin_oq_low) / 2;
4197 /* Update rate_correction_factor unless
4198 * cpi->active_worst_quality has changed.
4200 if (!active_worst_qchanged)
4201 vp8_update_rate_correction_factors(cpi, 0);
4203 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4205 while (((Q < q_low) ||
4206 (cpi->mb.zbin_over_quant < zbin_oq_low)) &&
4209 vp8_update_rate_correction_factors(cpi, 0);
4210 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4217 /* Frame is too small */
4220 if (cpi->mb.zbin_over_quant == 0)
4221 /* Lower q_high if not using over quant */
4222 q_high = (Q > q_low) ? (Q - 1) : q_low;
4224 /* else lower zbin_oq_high */
4225 zbin_oq_high = (cpi->mb.zbin_over_quant > zbin_oq_low) ?
4226 (cpi->mb.zbin_over_quant - 1) : zbin_oq_low;
4230 /* Update rate_correction_factor unless
4231 * cpi->active_worst_quality has changed.
4233 if (!active_worst_qchanged)
4234 vp8_update_rate_correction_factors(cpi, 1);
4236 Q = (q_high + q_low) / 2;
4238 /* Adjust cpi->zbin_over_quant (only allowed when Q
4242 cpi->mb.zbin_over_quant = 0;
4244 cpi->mb.zbin_over_quant =
4245 (zbin_oq_high + zbin_oq_low) / 2;
4249 /* Update rate_correction_factor unless
4250 * cpi->active_worst_quality has changed.
4252 if (!active_worst_qchanged)
4253 vp8_update_rate_correction_factors(cpi, 0);
4255 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4257 /* Special case reset for qlow for constrained quality.
4258 * This should only trigger where there is very substantial
4259 * undershoot on a frame and the auto cq level is above
4260 * the user passsed in value.
4262 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
4268 while (((Q > q_high) ||
4269 (cpi->mb.zbin_over_quant > zbin_oq_high)) &&
4272 vp8_update_rate_correction_factors(cpi, 0);
4273 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4278 undershoot_seen = 1;
4281 /* Clamp Q to upper and lower limits: */
4287 /* Clamp cpi->zbin_over_quant */
4288 cpi->mb.zbin_over_quant = (cpi->mb.zbin_over_quant < zbin_oq_low) ?
4289 zbin_oq_low : (cpi->mb.zbin_over_quant > zbin_oq_high) ?
4290 zbin_oq_high : cpi->mb.zbin_over_quant;
4298 if (cpi->is_src_frame_alt_ref)
4303 vp8_restore_coding_context(cpi);
4305 #if CONFIG_INTERNAL_STATS
4306 cpi->tot_recode_hits++;
4313 /* Experimental code for lagged and one pass
4314 * Update stats used for one pass GF selection
4317 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_coded_error = (double)cpi->prediction_error;
4318 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_intra_error = (double)cpi->intra_error;
4319 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_pcnt_inter = (double)(100 - cpi->this_frame_percent_intra) / 100.0;
4323 /* Special case code to reduce pulsing when key frames are forced at a
4324 * fixed interval. Note the reconstruction error if it is the frame before
4325 * the force key frame
4327 if ( cpi->next_key_frame_forced && (cpi->twopass.frames_to_key == 0) )
4329 cpi->ambient_err = vp8_calc_ss_err(cpi->Source,
4330 &cm->yv12_fb[cm->new_fb_idx]);
4333 /* This frame's MVs are saved and will be used in next frame's MV predictor.
4334 * Last frame has one more line(add to bottom) and one more column(add to
4335 * right) than cm->mip. The edge elements are initialized to 0.
4337 #if CONFIG_MULTI_RES_ENCODING
4338 if(!cpi->oxcf.mr_encoder_id && cm->show_frame)
4340 if(cm->show_frame) /* do not save for altref frame */
4345 /* Point to beginning of allocated MODE_INFO arrays. */
4346 MODE_INFO *tmp = cm->mip;
4348 if(cm->frame_type != KEY_FRAME)
4350 for (mb_row = 0; mb_row < cm->mb_rows+1; mb_row ++)
4352 for (mb_col = 0; mb_col < cm->mb_cols+1; mb_col ++)
4354 if(tmp->mbmi.ref_frame != INTRA_FRAME)
4355 cpi->lfmv[mb_col + mb_row*(cm->mode_info_stride+1)].as_int = tmp->mbmi.mv.as_int;
4357 cpi->lf_ref_frame_sign_bias[mb_col + mb_row*(cm->mode_info_stride+1)] = cm->ref_frame_sign_bias[tmp->mbmi.ref_frame];
4358 cpi->lf_ref_frame[mb_col + mb_row*(cm->mode_info_stride+1)] = tmp->mbmi.ref_frame;
4365 /* Count last ref frame 0,0 usage on current encoded frame. */
4369 /* Point to beginning of MODE_INFO arrays. */
4370 MODE_INFO *tmp = cm->mi;
4372 cpi->zeromv_count = 0;
4374 if(cm->frame_type != KEY_FRAME)
4376 for (mb_row = 0; mb_row < cm->mb_rows; mb_row ++)
4378 for (mb_col = 0; mb_col < cm->mb_cols; mb_col ++)
4380 if(tmp->mbmi.mode == ZEROMV)
4381 cpi->zeromv_count++;
4389 #if CONFIG_MULTI_RES_ENCODING
4390 vp8_cal_dissimilarity(cpi);
4393 /* Update the GF useage maps.
4394 * This is done after completing the compression of a frame when all
4395 * modes etc. are finalized but before loop filter
4397 if (cpi->oxcf.number_of_layers == 1)
4398 vp8_update_gf_useage_maps(cpi, cm, &cpi->mb);
4400 if (cm->frame_type == KEY_FRAME)
4401 cm->refresh_last_frame = 1;
4405 FILE *f = fopen("gfactive.stt", "a");
4406 fprintf(f, "%8d %8d %8d %8d %8d\n", cm->current_video_frame, (100 * cpi->gf_active_count) / (cpi->common.mb_rows * cpi->common.mb_cols), cpi->this_iiratio, cpi->next_iiratio, cm->refresh_golden_frame);
4411 /* For inter frames the current default behavior is that when
4412 * cm->refresh_golden_frame is set we copy the old GF over to the ARF buffer
4413 * This is purely an encoder decision at present.
4415 if (!cpi->oxcf.error_resilient_mode && cm->refresh_golden_frame)
4416 cm->copy_buffer_to_arf = 2;
4418 cm->copy_buffer_to_arf = 0;
4420 cm->frame_to_show = &cm->yv12_fb[cm->new_fb_idx];
4422 #if CONFIG_MULTITHREAD
4423 if (cpi->b_multi_threaded)
4425 /* start loopfilter in separate thread */
4426 sem_post(&cpi->h_event_start_lpf);
4427 cpi->b_lpf_running = 1;
4432 vp8_loopfilter_frame(cpi, cm);
4435 update_reference_frames(cpi);
4437 #if !(CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
4438 if (cpi->oxcf.error_resilient_mode)
4440 cm->refresh_entropy_probs = 0;
4444 #if CONFIG_MULTITHREAD
4445 /* wait that filter_level is picked so that we can continue with stream packing */
4446 if (cpi->b_multi_threaded)
4447 sem_wait(&cpi->h_event_end_lpf);
4450 /* build the bitstream */
4451 vp8_pack_bitstream(cpi, dest, dest_end, size);
4453 #if CONFIG_MULTITHREAD
4454 /* if PSNR packets are generated we have to wait for the lpf */
4455 if (cpi->b_lpf_running && cpi->b_calculate_psnr)
4457 sem_wait(&cpi->h_event_end_lpf);
4458 cpi->b_lpf_running = 0;
4462 /* Move storing frame_type out of the above loop since it is also
4463 * needed in motion search besides loopfilter */
4464 cm->last_frame_type = cm->frame_type;
4466 /* Update rate control heuristics */
4467 cpi->total_byte_count += (*size);
4468 cpi->projected_frame_size = (*size) << 3;
4470 if (cpi->oxcf.number_of_layers > 1)
4473 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
4474 cpi->layer_context[i].total_byte_count += (*size);
4477 if (!active_worst_qchanged)
4478 vp8_update_rate_correction_factors(cpi, 2);
4480 cpi->last_q[cm->frame_type] = cm->base_qindex;
4482 if (cm->frame_type == KEY_FRAME)
4484 vp8_adjust_key_frame_context(cpi);
4487 /* Keep a record of ambient average Q. */
4488 if (cm->frame_type != KEY_FRAME)
4489 cpi->avg_frame_qindex = (2 + 3 * cpi->avg_frame_qindex + cm->base_qindex) >> 2;
4491 /* Keep a record from which we can calculate the average Q excluding
4492 * GF updates and key frames
4494 if ((cm->frame_type != KEY_FRAME) && ((cpi->oxcf.number_of_layers > 1) ||
4495 (!cm->refresh_golden_frame && !cm->refresh_alt_ref_frame)))
4499 /* Calculate the average Q for normal inter frames (not key or GFU
4502 if ( cpi->pass == 2 )
4504 cpi->ni_tot_qi += Q;
4505 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
4509 /* Damp value for first few frames */
4510 if (cpi->ni_frames > 150 )
4512 cpi->ni_tot_qi += Q;
4513 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
4515 /* For one pass, early in the clip ... average the current frame Q
4516 * value with the worstq entered by the user as a dampening measure
4520 cpi->ni_tot_qi += Q;
4521 cpi->ni_av_qi = ((cpi->ni_tot_qi / cpi->ni_frames) + cpi->worst_quality + 1) / 2;
4524 /* If the average Q is higher than what was used in the last
4525 * frame (after going through the recode loop to keep the frame
4526 * size within range) then use the last frame value - 1. The -1
4527 * is designed to stop Q and hence the data rate, from
4528 * progressively falling away during difficult sections, but at
4529 * the same time reduce the number of itterations around the
4532 if (Q > cpi->ni_av_qi)
4533 cpi->ni_av_qi = Q - 1;
4537 /* Update the buffer level variable. */
4538 /* Non-viewable frames are a special case and are treated as pure overhead. */
4539 if ( !cm->show_frame )
4540 cpi->bits_off_target -= cpi->projected_frame_size;
4542 cpi->bits_off_target += cpi->av_per_frame_bandwidth - cpi->projected_frame_size;
4544 /* Clip the buffer level to the maximum specified buffer size */
4545 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
4546 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
4548 /* Rolling monitors of whether we are over or underspending used to
4549 * help regulate min and Max Q in two pass.
4551 cpi->rolling_target_bits = ((cpi->rolling_target_bits * 3) + cpi->this_frame_target + 2) / 4;
4552 cpi->rolling_actual_bits = ((cpi->rolling_actual_bits * 3) + cpi->projected_frame_size + 2) / 4;
4553 cpi->long_rolling_target_bits = ((cpi->long_rolling_target_bits * 31) + cpi->this_frame_target + 16) / 32;
4554 cpi->long_rolling_actual_bits = ((cpi->long_rolling_actual_bits * 31) + cpi->projected_frame_size + 16) / 32;
4556 /* Actual bits spent */
4557 cpi->total_actual_bits += cpi->projected_frame_size;
4560 cpi->total_target_vs_actual += (cpi->this_frame_target - cpi->projected_frame_size);
4562 cpi->buffer_level = cpi->bits_off_target;
4564 /* Propagate values to higher temporal layers */
4565 if (cpi->oxcf.number_of_layers > 1)
4569 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
4571 LAYER_CONTEXT *lc = &cpi->layer_context[i];
4572 int bits_off_for_this_layer =
4573 (int)(lc->target_bandwidth / lc->framerate -
4574 cpi->projected_frame_size);
4576 lc->bits_off_target += bits_off_for_this_layer;
4578 /* Clip buffer level to maximum buffer size for the layer */
4579 if (lc->bits_off_target > lc->maximum_buffer_size)
4580 lc->bits_off_target = lc->maximum_buffer_size;
4582 lc->total_actual_bits += cpi->projected_frame_size;
4583 lc->total_target_vs_actual += bits_off_for_this_layer;
4584 lc->buffer_level = lc->bits_off_target;
4588 /* Update bits left to the kf and gf groups to account for overshoot
4589 * or undershoot on these frames
4591 if (cm->frame_type == KEY_FRAME)
4593 cpi->twopass.kf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
4595 if (cpi->twopass.kf_group_bits < 0)
4596 cpi->twopass.kf_group_bits = 0 ;
4598 else if (cm->refresh_golden_frame || cm->refresh_alt_ref_frame)
4600 cpi->twopass.gf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
4602 if (cpi->twopass.gf_group_bits < 0)
4603 cpi->twopass.gf_group_bits = 0 ;
4606 if (cm->frame_type != KEY_FRAME)
4608 if (cpi->common.refresh_alt_ref_frame)
4610 cpi->last_skip_false_probs[2] = cpi->prob_skip_false;
4611 cpi->last_skip_probs_q[2] = cm->base_qindex;
4613 else if (cpi->common.refresh_golden_frame)
4615 cpi->last_skip_false_probs[1] = cpi->prob_skip_false;
4616 cpi->last_skip_probs_q[1] = cm->base_qindex;
4620 cpi->last_skip_false_probs[0] = cpi->prob_skip_false;
4621 cpi->last_skip_probs_q[0] = cm->base_qindex;
4623 /* update the baseline */
4624 cpi->base_skip_false_prob[cm->base_qindex] = cpi->prob_skip_false;
4629 #if 0 && CONFIG_INTERNAL_STATS
4631 FILE *f = fopen("tmp.stt", "a");
4633 vp8_clear_system_state();
4635 if (cpi->twopass.total_left_stats.coded_error != 0.0)
4636 fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
4637 "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
4638 "%8.2lf %"PRId64" %10.3lf %10"PRId64" %8d\n",
4639 cpi->common.current_video_frame, cpi->this_frame_target,
4640 cpi->projected_frame_size,
4641 (cpi->projected_frame_size - cpi->this_frame_target),
4642 cpi->total_target_vs_actual,
4644 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
4645 cpi->total_actual_bits, cm->base_qindex,
4646 cpi->active_best_quality, cpi->active_worst_quality,
4647 cpi->ni_av_qi, cpi->cq_target_quality,
4648 cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
4649 cm->frame_type, cpi->gfu_boost,
4650 cpi->twopass.est_max_qcorrection_factor,
4651 cpi->twopass.bits_left,
4652 cpi->twopass.total_left_stats.coded_error,
4653 (double)cpi->twopass.bits_left /
4654 cpi->twopass.total_left_stats.coded_error,
4655 cpi->tot_recode_hits);
4657 fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
4658 "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
4659 "%8.2lf %"PRId64" %10.3lf %8d\n",
4660 cpi->common.current_video_frame, cpi->this_frame_target,
4661 cpi->projected_frame_size,
4662 (cpi->projected_frame_size - cpi->this_frame_target),
4663 cpi->total_target_vs_actual,
4665 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
4666 cpi->total_actual_bits, cm->base_qindex,
4667 cpi->active_best_quality, cpi->active_worst_quality,
4668 cpi->ni_av_qi, cpi->cq_target_quality,
4669 cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
4670 cm->frame_type, cpi->gfu_boost,
4671 cpi->twopass.est_max_qcorrection_factor,
4672 cpi->twopass.bits_left,
4673 cpi->twopass.total_left_stats.coded_error,
4674 cpi->tot_recode_hits);
4679 FILE *fmodes = fopen("Modes.stt", "a");
4681 fprintf(fmodes, "%6d:%1d:%1d:%1d ",
4682 cpi->common.current_video_frame,
4683 cm->frame_type, cm->refresh_golden_frame,
4684 cm->refresh_alt_ref_frame);
4686 fprintf(fmodes, "\n");
4694 if (cm->refresh_golden_frame == 1)
4695 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_GOLDEN;
4697 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_GOLDEN;
4699 if (cm->refresh_alt_ref_frame == 1)
4700 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_ALTREF;
4702 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_ALTREF;
4705 if (cm->refresh_last_frame & cm->refresh_golden_frame)
4706 /* both refreshed */
4707 cpi->gold_is_last = 1;
4708 else if (cm->refresh_last_frame ^ cm->refresh_golden_frame)
4709 /* 1 refreshed but not the other */
4710 cpi->gold_is_last = 0;
4712 if (cm->refresh_last_frame & cm->refresh_alt_ref_frame)
4713 /* both refreshed */
4714 cpi->alt_is_last = 1;
4715 else if (cm->refresh_last_frame ^ cm->refresh_alt_ref_frame)
4716 /* 1 refreshed but not the other */
4717 cpi->alt_is_last = 0;
4719 if (cm->refresh_alt_ref_frame & cm->refresh_golden_frame)
4720 /* both refreshed */
4721 cpi->gold_is_alt = 1;
4722 else if (cm->refresh_alt_ref_frame ^ cm->refresh_golden_frame)
4723 /* 1 refreshed but not the other */
4724 cpi->gold_is_alt = 0;
4726 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
4728 if (cpi->gold_is_last)
4729 cpi->ref_frame_flags &= ~VP8_GOLD_FRAME;
4731 if (cpi->alt_is_last)
4732 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
4734 if (cpi->gold_is_alt)
4735 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
4738 if (!cpi->oxcf.error_resilient_mode)
4740 if (cpi->oxcf.play_alternate && cm->refresh_alt_ref_frame && (cm->frame_type != KEY_FRAME))
4741 /* Update the alternate reference frame stats as appropriate. */
4742 update_alt_ref_frame_stats(cpi);
4744 /* Update the Golden frame stats as appropriate. */
4745 update_golden_frame_stats(cpi);
4748 if (cm->frame_type == KEY_FRAME)
4750 /* Tell the caller that the frame was coded as a key frame */
4751 *frame_flags = cm->frame_flags | FRAMEFLAGS_KEY;
4753 /* As this frame is a key frame the next defaults to an inter frame. */
4754 cm->frame_type = INTER_FRAME;
4756 cpi->last_frame_percent_intra = 100;
4760 *frame_flags = cm->frame_flags&~FRAMEFLAGS_KEY;
4762 cpi->last_frame_percent_intra = cpi->this_frame_percent_intra;
4765 /* Clear the one shot update flags for segmentation map and mode/ref
4766 * loop filter deltas.
4768 cpi->mb.e_mbd.update_mb_segmentation_map = 0;
4769 cpi->mb.e_mbd.update_mb_segmentation_data = 0;
4770 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
4773 /* Dont increment frame counters if this was an altref buffer update
4778 cm->current_video_frame++;
4779 cpi->frames_since_key++;
4780 cpi->temporal_pattern_counter++;
4783 /* reset to normal state now that we are done. */
4791 sprintf(filename, "enc%04d.yuv", (int) cm->current_video_frame);
4792 recon_file = fopen(filename, "wb");
4793 fwrite(cm->yv12_fb[cm->lst_fb_idx].buffer_alloc,
4794 cm->yv12_fb[cm->lst_fb_idx].frame_size, 1, recon_file);
4800 /* vp8_write_yuv_frame("encoder_recon.yuv", cm->frame_to_show); */
4804 #if !(CONFIG_REALTIME_ONLY)
4805 static void Pass2Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned char * dest_end, unsigned int *frame_flags)
4808 if (!cpi->common.refresh_alt_ref_frame)
4809 vp8_second_pass(cpi);
4811 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
4812 cpi->twopass.bits_left -= 8 * *size;
4814 if (!cpi->common.refresh_alt_ref_frame)
4816 double two_pass_min_rate = (double)(cpi->oxcf.target_bandwidth
4817 *cpi->oxcf.two_pass_vbrmin_section / 100);
4818 cpi->twopass.bits_left += (int64_t)(two_pass_min_rate / cpi->framerate);
4823 int vp8_receive_raw_frame(VP8_COMP *cpi, unsigned int frame_flags, YV12_BUFFER_CONFIG *sd, int64_t time_stamp, int64_t end_time)
4825 struct vpx_usec_timer timer;
4828 vpx_usec_timer_start(&timer);
4830 /* Reinit the lookahead buffer if the frame size changes */
4831 if (sd->y_width != cpi->oxcf.Width || sd->y_height != cpi->oxcf.Height)
4833 assert(cpi->oxcf.lag_in_frames < 2);
4834 dealloc_raw_frame_buffers(cpi);
4835 alloc_raw_frame_buffers(cpi);
4838 if(vp8_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
4839 frame_flags, cpi->active_map_enabled ? cpi->active_map : NULL))
4841 vpx_usec_timer_mark(&timer);
4842 cpi->time_receive_data += vpx_usec_timer_elapsed(&timer);
4848 static int frame_is_reference(const VP8_COMP *cpi)
4850 const VP8_COMMON *cm = &cpi->common;
4851 const MACROBLOCKD *xd = &cpi->mb.e_mbd;
4853 return cm->frame_type == KEY_FRAME || cm->refresh_last_frame
4854 || cm->refresh_golden_frame || cm->refresh_alt_ref_frame
4855 || cm->copy_buffer_to_gf || cm->copy_buffer_to_arf
4856 || cm->refresh_entropy_probs
4857 || xd->mode_ref_lf_delta_update
4858 || xd->update_mb_segmentation_map || xd->update_mb_segmentation_data;
4862 int vp8_get_compressed_data(VP8_COMP *cpi, unsigned int *frame_flags, unsigned long *size, unsigned char *dest, unsigned char *dest_end, int64_t *time_stamp, int64_t *time_end, int flush)
4865 struct vpx_usec_timer tsctimer;
4866 struct vpx_usec_timer ticktimer;
4867 struct vpx_usec_timer cmptimer;
4868 YV12_BUFFER_CONFIG *force_src_buffer = NULL;
4875 if (setjmp(cpi->common.error.jmp))
4877 cpi->common.error.setjmp = 0;
4878 return VPX_CODEC_CORRUPT_FRAME;
4881 cpi->common.error.setjmp = 1;
4883 vpx_usec_timer_start(&cmptimer);
4887 #if !(CONFIG_REALTIME_ONLY)
4888 /* Should we code an alternate reference frame */
4889 if (cpi->oxcf.error_resilient_mode == 0 &&
4890 cpi->oxcf.play_alternate &&
4891 cpi->source_alt_ref_pending)
4893 if ((cpi->source = vp8_lookahead_peek(cpi->lookahead,
4894 cpi->frames_till_gf_update_due,
4897 cpi->alt_ref_source = cpi->source;
4898 if (cpi->oxcf.arnr_max_frames > 0)
4900 vp8_temporal_filter_prepare_c(cpi,
4901 cpi->frames_till_gf_update_due);
4902 force_src_buffer = &cpi->alt_ref_buffer;
4904 cpi->frames_till_alt_ref_frame = cpi->frames_till_gf_update_due;
4905 cm->refresh_alt_ref_frame = 1;
4906 cm->refresh_golden_frame = 0;
4907 cm->refresh_last_frame = 0;
4909 /* Clear Pending alt Ref flag. */
4910 cpi->source_alt_ref_pending = 0;
4911 cpi->is_src_frame_alt_ref = 0;
4918 /* Read last frame source if we are encoding first pass. */
4919 if (cpi->pass == 1 && cm->current_video_frame > 0)
4921 if((cpi->last_source = vp8_lookahead_peek(cpi->lookahead, 1,
4922 PEEK_BACKWARD)) == NULL)
4927 if ((cpi->source = vp8_lookahead_pop(cpi->lookahead, flush)))
4931 cpi->is_src_frame_alt_ref = cpi->alt_ref_source
4932 && (cpi->source == cpi->alt_ref_source);
4934 if(cpi->is_src_frame_alt_ref)
4935 cpi->alt_ref_source = NULL;
4941 cpi->Source = force_src_buffer ? force_src_buffer : &cpi->source->img;
4942 cpi->un_scaled_source = cpi->Source;
4943 *time_stamp = cpi->source->ts_start;
4944 *time_end = cpi->source->ts_end;
4945 *frame_flags = cpi->source->flags;
4947 if (cpi->pass == 1 && cm->current_video_frame > 0)
4949 cpi->last_frame_unscaled_source = &cpi->last_source->img;
4955 #if !(CONFIG_REALTIME_ONLY)
4957 if (flush && cpi->pass == 1 && !cpi->twopass.first_pass_done)
4959 vp8_end_first_pass(cpi); /* get last stats packet */
4960 cpi->twopass.first_pass_done = 1;
4968 if (cpi->source->ts_start < cpi->first_time_stamp_ever)
4970 cpi->first_time_stamp_ever = cpi->source->ts_start;
4971 cpi->last_end_time_stamp_seen = cpi->source->ts_start;
4974 /* adjust frame rates based on timestamps given */
4977 int64_t this_duration;
4980 if (cpi->source->ts_start == cpi->first_time_stamp_ever)
4982 this_duration = cpi->source->ts_end - cpi->source->ts_start;
4987 int64_t last_duration;
4989 this_duration = cpi->source->ts_end - cpi->last_end_time_stamp_seen;
4990 last_duration = cpi->last_end_time_stamp_seen
4991 - cpi->last_time_stamp_seen;
4992 /* do a step update if the duration changes by 10% */
4994 step = (int)(((this_duration - last_duration) *
4995 10 / last_duration));
5001 cpi->ref_framerate = 10000000.0 / this_duration;
5004 double avg_duration, interval;
5006 /* Average this frame's rate into the last second's average
5007 * frame rate. If we haven't seen 1 second yet, then average
5008 * over the whole interval seen.
5010 interval = (double)(cpi->source->ts_end -
5011 cpi->first_time_stamp_ever);
5012 if(interval > 10000000.0)
5013 interval = 10000000;
5015 avg_duration = 10000000.0 / cpi->ref_framerate;
5016 avg_duration *= (interval - avg_duration + this_duration);
5017 avg_duration /= interval;
5019 cpi->ref_framerate = 10000000.0 / avg_duration;
5022 if (cpi->oxcf.number_of_layers > 1)
5026 /* Update frame rates for each layer */
5027 assert(cpi->oxcf.number_of_layers <= VPX_TS_MAX_LAYERS);
5028 for (i=0; i<cpi->oxcf.number_of_layers; i++)
5030 LAYER_CONTEXT *lc = &cpi->layer_context[i];
5031 lc->framerate = cpi->ref_framerate /
5032 cpi->oxcf.rate_decimator[i];
5036 vp8_new_framerate(cpi, cpi->ref_framerate);
5039 cpi->last_time_stamp_seen = cpi->source->ts_start;
5040 cpi->last_end_time_stamp_seen = cpi->source->ts_end;
5043 if (cpi->oxcf.number_of_layers > 1)
5047 update_layer_contexts (cpi);
5049 /* Restore layer specific context & set frame rate */
5050 layer = cpi->oxcf.layer_id[
5051 cpi->temporal_pattern_counter % cpi->oxcf.periodicity];
5052 restore_layer_context (cpi, layer);
5053 vp8_new_framerate(cpi, cpi->layer_context[layer].framerate);
5056 if (cpi->compressor_speed == 2)
5058 vpx_usec_timer_start(&tsctimer);
5059 vpx_usec_timer_start(&ticktimer);
5062 cpi->lf_zeromv_pct = (cpi->zeromv_count * 100)/cm->MBs;
5064 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
5067 const int num_part = (1 << cm->multi_token_partition);
5068 /* the available bytes in dest */
5069 const unsigned long dest_size = dest_end - dest;
5070 const int tok_part_buff_size = (dest_size * 9) / (10 * num_part);
5072 unsigned char *dp = dest;
5074 cpi->partition_d[0] = dp;
5075 dp += dest_size/10; /* reserve 1/10 for control partition */
5076 cpi->partition_d_end[0] = dp;
5078 for(i = 0; i < num_part; i++)
5080 cpi->partition_d[i + 1] = dp;
5081 dp += tok_part_buff_size;
5082 cpi->partition_d_end[i + 1] = dp;
5087 /* start with a 0 size frame */
5090 /* Clear down mmx registers */
5091 vp8_clear_system_state();
5093 cm->frame_type = INTER_FRAME;
5094 cm->frame_flags = *frame_flags;
5098 if (cm->refresh_alt_ref_frame)
5100 cm->refresh_golden_frame = 0;
5101 cm->refresh_last_frame = 0;
5105 cm->refresh_golden_frame = 0;
5106 cm->refresh_last_frame = 1;
5110 /* find a free buffer for the new frame */
5113 for(; i < NUM_YV12_BUFFERS; i++)
5115 if(!cm->yv12_fb[i].flags)
5122 assert(i < NUM_YV12_BUFFERS );
5124 #if !(CONFIG_REALTIME_ONLY)
5128 Pass1Encode(cpi, size, dest, frame_flags);
5130 else if (cpi->pass == 2)
5132 Pass2Encode(cpi, size, dest, dest_end, frame_flags);
5136 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
5138 if (cpi->compressor_speed == 2)
5140 unsigned int duration, duration2;
5141 vpx_usec_timer_mark(&tsctimer);
5142 vpx_usec_timer_mark(&ticktimer);
5144 duration = (int)(vpx_usec_timer_elapsed(&ticktimer));
5145 duration2 = (unsigned int)((double)duration / 2);
5147 if (cm->frame_type != KEY_FRAME)
5149 if (cpi->avg_encode_time == 0)
5150 cpi->avg_encode_time = duration;
5152 cpi->avg_encode_time = (7 * cpi->avg_encode_time + duration) >> 3;
5159 if (cpi->avg_pick_mode_time == 0)
5160 cpi->avg_pick_mode_time = duration2;
5162 cpi->avg_pick_mode_time = (7 * cpi->avg_pick_mode_time + duration2) >> 3;
5168 if (cm->refresh_entropy_probs == 0)
5170 vpx_memcpy(&cm->fc, &cm->lfc, sizeof(cm->fc));
5173 /* Save the contexts separately for alt ref, gold and last. */
5174 /* (TODO jbb -> Optimize this with pointers to avoid extra copies. ) */
5175 if(cm->refresh_alt_ref_frame)
5176 vpx_memcpy(&cpi->lfc_a, &cm->fc, sizeof(cm->fc));
5178 if(cm->refresh_golden_frame)
5179 vpx_memcpy(&cpi->lfc_g, &cm->fc, sizeof(cm->fc));
5181 if(cm->refresh_last_frame)
5182 vpx_memcpy(&cpi->lfc_n, &cm->fc, sizeof(cm->fc));
5184 /* if its a dropped frame honor the requests on subsequent frames */
5187 cpi->droppable = !frame_is_reference(cpi);
5189 /* return to normal state */
5190 cm->refresh_entropy_probs = 1;
5191 cm->refresh_alt_ref_frame = 0;
5192 cm->refresh_golden_frame = 0;
5193 cm->refresh_last_frame = 1;
5194 cm->frame_type = INTER_FRAME;
5198 /* Save layer specific state */
5199 if (cpi->oxcf.number_of_layers > 1)
5200 save_layer_context (cpi);
5202 vpx_usec_timer_mark(&cmptimer);
5203 cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer);
5205 if (cpi->b_calculate_psnr && cpi->pass != 1 && cm->show_frame)
5207 generate_psnr_packet(cpi);
5210 #if CONFIG_INTERNAL_STATS
5214 cpi->bytes += *size;
5218 cpi->common.show_frame_mi = cpi->common.mi;
5221 if (cpi->b_calculate_psnr)
5225 YV12_BUFFER_CONFIG *orig = cpi->Source;
5226 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
5227 int y_samples = orig->y_height * orig->y_width ;
5228 int uv_samples = orig->uv_height * orig->uv_width ;
5229 int t_samples = y_samples + 2 * uv_samples;
5232 ye = calc_plane_error(orig->y_buffer, orig->y_stride,
5233 recon->y_buffer, recon->y_stride, orig->y_width, orig->y_height);
5235 ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
5236 recon->u_buffer, recon->uv_stride, orig->uv_width, orig->uv_height);
5238 ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
5239 recon->v_buffer, recon->uv_stride, orig->uv_width, orig->uv_height);
5241 sq_error = (double)(ye + ue + ve);
5243 frame_psnr = vpx_sse_to_psnr(t_samples, 255.0, sq_error);
5245 cpi->total_y += vpx_sse_to_psnr(y_samples, 255.0, (double)ye);
5246 cpi->total_u += vpx_sse_to_psnr(uv_samples, 255.0, (double)ue);
5247 cpi->total_v += vpx_sse_to_psnr(uv_samples, 255.0, (double)ve);
5248 cpi->total_sq_error += sq_error;
5249 cpi->total += frame_psnr;
5252 YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer;
5254 double frame_psnr2, frame_ssim2 = 0;
5257 vp8_deblock(cm, cm->frame_to_show, &cm->post_proc_buffer, cm->filter_level * 10 / 6, 1, 0);
5258 vp8_clear_system_state();
5260 ye = calc_plane_error(orig->y_buffer, orig->y_stride,
5261 pp->y_buffer, pp->y_stride, orig->y_width, orig->y_height);
5263 ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
5264 pp->u_buffer, pp->uv_stride, orig->uv_width, orig->uv_height);
5266 ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
5267 pp->v_buffer, pp->uv_stride, orig->uv_width, orig->uv_height);
5269 sq_error2 = (double)(ye + ue + ve);
5271 frame_psnr2 = vpx_sse_to_psnr(t_samples, 255.0, sq_error2);
5273 cpi->totalp_y += vpx_sse_to_psnr(y_samples,
5275 cpi->totalp_u += vpx_sse_to_psnr(uv_samples,
5277 cpi->totalp_v += vpx_sse_to_psnr(uv_samples,
5279 cpi->total_sq_error2 += sq_error2;
5280 cpi->totalp += frame_psnr2;
5282 frame_ssim2 = vp8_calc_ssim(cpi->Source,
5283 &cm->post_proc_buffer, 1, &weight);
5285 cpi->summed_quality += frame_ssim2 * weight;
5286 cpi->summed_weights += weight;
5288 if (cpi->oxcf.number_of_layers > 1)
5292 for (i=cpi->current_layer;
5293 i<cpi->oxcf.number_of_layers; i++)
5295 cpi->frames_in_layer[i]++;
5297 cpi->bytes_in_layer[i] += *size;
5298 cpi->sum_psnr[i] += frame_psnr;
5299 cpi->sum_psnr_p[i] += frame_psnr2;
5300 cpi->total_error2[i] += sq_error;
5301 cpi->total_error2_p[i] += sq_error2;
5302 cpi->sum_ssim[i] += frame_ssim2 * weight;
5303 cpi->sum_weights[i] += weight;
5310 if (cpi->b_calculate_ssimg)
5312 double y, u, v, frame_all;
5313 frame_all = vp8_calc_ssimg(cpi->Source, cm->frame_to_show,
5316 if (cpi->oxcf.number_of_layers > 1)
5320 for (i=cpi->current_layer;
5321 i<cpi->oxcf.number_of_layers; i++)
5323 if (!cpi->b_calculate_psnr)
5324 cpi->frames_in_layer[i]++;
5326 cpi->total_ssimg_y_in_layer[i] += y;
5327 cpi->total_ssimg_u_in_layer[i] += u;
5328 cpi->total_ssimg_v_in_layer[i] += v;
5329 cpi->total_ssimg_all_in_layer[i] += frame_all;
5334 cpi->total_ssimg_y += y;
5335 cpi->total_ssimg_u += u;
5336 cpi->total_ssimg_v += v;
5337 cpi->total_ssimg_all += frame_all;
5346 if (cpi->common.frame_type != 0 && cpi->common.base_qindex == cpi->oxcf.worst_allowed_q)
5348 skiptruecount += cpi->skip_true_count;
5349 skipfalsecount += cpi->skip_false_count;
5357 FILE *f = fopen("skip.stt", "a");
5358 fprintf(f, "frame:%4d flags:%4x Q:%4d P:%4d Size:%5d\n", cpi->common.current_video_frame, *frame_flags, cpi->common.base_qindex, cpi->prob_skip_false, *size);
5360 if (cpi->is_src_frame_alt_ref == 1)
5361 fprintf(f, "skipcount: %4d framesize: %d\n", cpi->skip_true_count , *size);
5369 cpi->common.error.setjmp = 0;
5374 int vp8_get_preview_raw_frame(VP8_COMP *cpi, YV12_BUFFER_CONFIG *dest, vp8_ppflags_t *flags)
5376 if (cpi->common.refresh_alt_ref_frame)
5382 #if CONFIG_MULTITHREAD
5383 if(cpi->b_lpf_running)
5385 sem_wait(&cpi->h_event_end_lpf);
5386 cpi->b_lpf_running = 0;
5391 cpi->common.show_frame_mi = cpi->common.mi;
5392 ret = vp8_post_proc_frame(&cpi->common, dest, flags);
5395 if (cpi->common.frame_to_show)
5397 *dest = *cpi->common.frame_to_show;
5398 dest->y_width = cpi->common.Width;
5399 dest->y_height = cpi->common.Height;
5400 dest->uv_height = cpi->common.Height / 2;
5409 vp8_clear_system_state();
5414 int vp8_set_roimap(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols, int delta_q[4], int delta_lf[4], unsigned int threshold[4])
5416 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
5417 int internal_delta_q[MAX_MB_SEGMENTS];
5418 const int range = 63;
5421 // This method is currently incompatible with the cyclic refresh method
5422 if ( cpi->cyclic_refresh_mode_enabled )
5425 // Check number of rows and columns match
5426 if (cpi->common.mb_rows != rows || cpi->common.mb_cols != cols)
5429 // Range check the delta Q values and convert the external Q range values
5430 // to internal ones.
5431 if ( (abs(delta_q[0]) > range) || (abs(delta_q[1]) > range) ||
5432 (abs(delta_q[2]) > range) || (abs(delta_q[3]) > range) )
5435 // Range check the delta lf values
5436 if ( (abs(delta_lf[0]) > range) || (abs(delta_lf[1]) > range) ||
5437 (abs(delta_lf[2]) > range) || (abs(delta_lf[3]) > range) )
5442 disable_segmentation(cpi);
5446 // Translate the external delta q values to internal values.
5447 for ( i = 0; i < MAX_MB_SEGMENTS; i++ )
5448 internal_delta_q[i] =
5449 ( delta_q[i] >= 0 ) ? q_trans[delta_q[i]] : -q_trans[-delta_q[i]];
5451 /* Set the segmentation Map */
5452 set_segmentation_map(cpi, map);
5454 /* Activate segmentation. */
5455 enable_segmentation(cpi);
5457 /* Set up the quant segment data */
5458 feature_data[MB_LVL_ALT_Q][0] = internal_delta_q[0];
5459 feature_data[MB_LVL_ALT_Q][1] = internal_delta_q[1];
5460 feature_data[MB_LVL_ALT_Q][2] = internal_delta_q[2];
5461 feature_data[MB_LVL_ALT_Q][3] = internal_delta_q[3];
5463 /* Set up the loop segment data s */
5464 feature_data[MB_LVL_ALT_LF][0] = delta_lf[0];
5465 feature_data[MB_LVL_ALT_LF][1] = delta_lf[1];
5466 feature_data[MB_LVL_ALT_LF][2] = delta_lf[2];
5467 feature_data[MB_LVL_ALT_LF][3] = delta_lf[3];
5469 cpi->segment_encode_breakout[0] = threshold[0];
5470 cpi->segment_encode_breakout[1] = threshold[1];
5471 cpi->segment_encode_breakout[2] = threshold[2];
5472 cpi->segment_encode_breakout[3] = threshold[3];
5474 /* Initialise the feature data structure */
5475 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
5480 int vp8_set_active_map(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols)
5482 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols)
5486 vpx_memcpy(cpi->active_map, map, rows * cols);
5487 cpi->active_map_enabled = 1;
5490 cpi->active_map_enabled = 0;
5500 int vp8_set_internal_size(VP8_COMP *cpi, VPX_SCALING horiz_mode, VPX_SCALING vert_mode)
5502 if (horiz_mode <= ONETWO)
5503 cpi->common.horiz_scale = horiz_mode;
5507 if (vert_mode <= ONETWO)
5508 cpi->common.vert_scale = vert_mode;
5517 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest)
5522 unsigned char *src = source->y_buffer;
5523 unsigned char *dst = dest->y_buffer;
5525 /* Loop through the Y plane raw and reconstruction data summing
5526 * (square differences)
5528 for (i = 0; i < source->y_height; i += 16)
5530 for (j = 0; j < source->y_width; j += 16)
5533 Total += vp8_mse16x16(src + j, source->y_stride, dst + j, dest->y_stride, &sse);
5536 src += 16 * source->y_stride;
5537 dst += 16 * dest->y_stride;
5544 int vp8_get_quantizer(VP8_COMP *cpi)
5546 return cpi->common.base_qindex;