2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
12 #include "vpx_config.h"
13 #include "./vpx_scale_rtcd.h"
14 #include "vp8/common/onyxc_int.h"
15 #include "vp8/common/blockd.h"
17 #include "vp8/common/systemdependent.h"
19 #include "vp8/common/alloccommon.h"
21 #include "firstpass.h"
22 #include "vpx/internal/vpx_psnr.h"
23 #include "vpx_scale/vpx_scale.h"
24 #include "vp8/common/extend.h"
26 #include "vp8/common/quant_common.h"
27 #include "segmentation.h"
29 #include "vp8/common/postproc.h"
31 #include "vpx_mem/vpx_mem.h"
32 #include "vp8/common/swapyv12buffer.h"
33 #include "vp8/common/threading.h"
34 #include "vpx_ports/vpx_timer.h"
36 #include "vpx_ports/arm.h"
38 #if CONFIG_MULTI_RES_ENCODING
39 #include "mr_dissim.h"
41 #include "encodeframe.h"
47 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
48 extern int vp8_update_coef_context(VP8_COMP *cpi);
49 extern void vp8_update_coef_probs(VP8_COMP *cpi);
52 extern void vp8cx_pick_filter_level_fast(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
53 extern void vp8cx_set_alt_lf_level(VP8_COMP *cpi, int filt_val);
54 extern void vp8cx_pick_filter_level(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
56 extern void vp8_deblock_frame(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *post, int filt_lvl, int low_var_thresh, int flag);
57 extern void print_parms(VP8_CONFIG *ocf, char *filenam);
58 extern unsigned int vp8_get_processor_freq();
59 extern void print_tree_update_probs();
60 extern int vp8cx_create_encoder_threads(VP8_COMP *cpi);
61 extern void vp8cx_remove_encoder_threads(VP8_COMP *cpi);
63 int vp8_estimate_entropy_savings(VP8_COMP *cpi);
65 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest);
67 extern void vp8_temporal_filter_prepare_c(VP8_COMP *cpi, int distance);
69 static void set_default_lf_deltas(VP8_COMP *cpi);
71 extern const int vp8_gf_interval_table[101];
73 #if CONFIG_INTERNAL_STATS
76 extern double vp8_calc_ssim
78 YV12_BUFFER_CONFIG *source,
79 YV12_BUFFER_CONFIG *dest,
85 extern double vp8_calc_ssimg
87 YV12_BUFFER_CONFIG *source,
88 YV12_BUFFER_CONFIG *dest,
101 #ifdef OUTPUT_YUV_DENOISED
102 FILE *yuv_denoised_file;
112 extern int skip_true_count;
113 extern int skip_false_count;
117 #ifdef VP8_ENTROPY_STATS
118 extern int intra_mode_stats[10][10][10];
122 unsigned int frames_at_speed[16] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
123 unsigned int tot_pm = 0;
124 unsigned int cnt_pm = 0;
125 unsigned int tot_ef = 0;
126 unsigned int cnt_ef = 0;
130 extern unsigned __int64 Sectionbits[50];
131 extern int y_modes[5] ;
132 extern int uv_modes[4] ;
133 extern int b_modes[10] ;
135 extern int inter_y_modes[10] ;
136 extern int inter_uv_modes[4] ;
137 extern unsigned int inter_b_modes[15];
140 extern const int vp8_bits_per_mb[2][QINDEX_RANGE];
142 extern const int qrounding_factors[129];
143 extern const int qzbin_factors[129];
144 extern void vp8cx_init_quantizer(VP8_COMP *cpi);
145 extern const int vp8cx_base_skip_false_prob[128];
147 /* Tables relating active max Q to active min Q */
148 static const unsigned char kf_low_motion_minq[QINDEX_RANGE] =
150 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
151 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
152 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
153 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
154 3,3,3,3,3,3,4,4,4,5,5,5,5,5,6,6,
155 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
156 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
157 16,16,17,17,18,18,18,18,19,20,20,21,21,22,23,23
159 static const unsigned char kf_high_motion_minq[QINDEX_RANGE] =
161 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
162 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
163 1,1,1,1,1,1,1,1,2,2,2,2,3,3,3,3,
164 3,3,3,3,4,4,4,4,5,5,5,5,5,5,6,6,
165 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
166 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
167 16,16,17,17,18,18,18,18,19,19,20,20,20,20,21,21,
168 21,21,22,22,23,23,24,25,25,26,26,27,28,28,29,30
170 static const unsigned char gf_low_motion_minq[QINDEX_RANGE] =
172 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
173 3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,
174 7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,
175 11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,
176 19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,
177 27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34,
178 35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,
179 43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58
181 static const unsigned char gf_mid_motion_minq[QINDEX_RANGE] =
183 0,0,0,0,1,1,1,1,1,1,2,2,3,3,3,4,
184 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
185 9,10,10,10,10,11,11,11,12,12,12,12,13,13,13,14,
186 14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,
187 22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,
188 30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,
189 38,39,39,40,40,41,41,42,42,43,43,44,45,46,47,48,
190 49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64
192 static const unsigned char gf_high_motion_minq[QINDEX_RANGE] =
194 0,0,0,0,1,1,1,1,1,2,2,2,3,3,3,4,
195 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
196 9,10,10,10,11,11,12,12,13,13,14,14,15,15,16,16,
197 17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,
198 25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,
199 33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,
200 41,41,42,42,43,44,45,46,47,48,49,50,51,52,53,54,
201 55,56,57,58,59,60,62,64,66,68,70,72,74,76,78,80
203 static const unsigned char inter_minq[QINDEX_RANGE] =
205 0,0,1,1,2,3,3,4,4,5,6,6,7,8,8,9,
206 9,10,11,11,12,13,13,14,15,15,16,17,17,18,19,20,
207 20,21,22,22,23,24,24,25,26,27,27,28,29,30,30,31,
208 32,33,33,34,35,36,36,37,38,39,39,40,41,42,42,43,
209 44,45,46,46,47,48,49,50,50,51,52,53,54,55,55,56,
210 57,58,59,60,60,61,62,63,64,65,66,67,67,68,69,70,
211 71,72,73,74,75,75,76,77,78,79,80,81,82,83,84,85,
212 86,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100
215 #ifdef PACKET_TESTING
216 extern FILE *vpxlogc;
219 static void save_layer_context(VP8_COMP *cpi)
221 LAYER_CONTEXT *lc = &cpi->layer_context[cpi->current_layer];
223 /* Save layer dependent coding state */
224 lc->target_bandwidth = cpi->target_bandwidth;
225 lc->starting_buffer_level = cpi->oxcf.starting_buffer_level;
226 lc->optimal_buffer_level = cpi->oxcf.optimal_buffer_level;
227 lc->maximum_buffer_size = cpi->oxcf.maximum_buffer_size;
228 lc->starting_buffer_level_in_ms = cpi->oxcf.starting_buffer_level_in_ms;
229 lc->optimal_buffer_level_in_ms = cpi->oxcf.optimal_buffer_level_in_ms;
230 lc->maximum_buffer_size_in_ms = cpi->oxcf.maximum_buffer_size_in_ms;
231 lc->buffer_level = cpi->buffer_level;
232 lc->bits_off_target = cpi->bits_off_target;
233 lc->total_actual_bits = cpi->total_actual_bits;
234 lc->worst_quality = cpi->worst_quality;
235 lc->active_worst_quality = cpi->active_worst_quality;
236 lc->best_quality = cpi->best_quality;
237 lc->active_best_quality = cpi->active_best_quality;
238 lc->ni_av_qi = cpi->ni_av_qi;
239 lc->ni_tot_qi = cpi->ni_tot_qi;
240 lc->ni_frames = cpi->ni_frames;
241 lc->avg_frame_qindex = cpi->avg_frame_qindex;
242 lc->rate_correction_factor = cpi->rate_correction_factor;
243 lc->key_frame_rate_correction_factor = cpi->key_frame_rate_correction_factor;
244 lc->gf_rate_correction_factor = cpi->gf_rate_correction_factor;
245 lc->zbin_over_quant = cpi->mb.zbin_over_quant;
246 lc->inter_frame_target = cpi->inter_frame_target;
247 lc->total_byte_count = cpi->total_byte_count;
248 lc->filter_level = cpi->common.filter_level;
250 lc->last_frame_percent_intra = cpi->last_frame_percent_intra;
252 memcpy (lc->count_mb_ref_frame_usage,
253 cpi->mb.count_mb_ref_frame_usage,
254 sizeof(cpi->mb.count_mb_ref_frame_usage));
257 static void restore_layer_context(VP8_COMP *cpi, const int layer)
259 LAYER_CONTEXT *lc = &cpi->layer_context[layer];
261 /* Restore layer dependent coding state */
262 cpi->current_layer = layer;
263 cpi->target_bandwidth = lc->target_bandwidth;
264 cpi->oxcf.target_bandwidth = lc->target_bandwidth;
265 cpi->oxcf.starting_buffer_level = lc->starting_buffer_level;
266 cpi->oxcf.optimal_buffer_level = lc->optimal_buffer_level;
267 cpi->oxcf.maximum_buffer_size = lc->maximum_buffer_size;
268 cpi->oxcf.starting_buffer_level_in_ms = lc->starting_buffer_level_in_ms;
269 cpi->oxcf.optimal_buffer_level_in_ms = lc->optimal_buffer_level_in_ms;
270 cpi->oxcf.maximum_buffer_size_in_ms = lc->maximum_buffer_size_in_ms;
271 cpi->buffer_level = lc->buffer_level;
272 cpi->bits_off_target = lc->bits_off_target;
273 cpi->total_actual_bits = lc->total_actual_bits;
274 cpi->active_worst_quality = lc->active_worst_quality;
275 cpi->active_best_quality = lc->active_best_quality;
276 cpi->ni_av_qi = lc->ni_av_qi;
277 cpi->ni_tot_qi = lc->ni_tot_qi;
278 cpi->ni_frames = lc->ni_frames;
279 cpi->avg_frame_qindex = lc->avg_frame_qindex;
280 cpi->rate_correction_factor = lc->rate_correction_factor;
281 cpi->key_frame_rate_correction_factor = lc->key_frame_rate_correction_factor;
282 cpi->gf_rate_correction_factor = lc->gf_rate_correction_factor;
283 cpi->mb.zbin_over_quant = lc->zbin_over_quant;
284 cpi->inter_frame_target = lc->inter_frame_target;
285 cpi->total_byte_count = lc->total_byte_count;
286 cpi->common.filter_level = lc->filter_level;
288 cpi->last_frame_percent_intra = lc->last_frame_percent_intra;
290 memcpy (cpi->mb.count_mb_ref_frame_usage,
291 lc->count_mb_ref_frame_usage,
292 sizeof(cpi->mb.count_mb_ref_frame_usage));
295 static int rescale(int val, int num, int denom)
298 int64_t llden = denom;
301 return (int)(llval * llnum / llden);
304 static void init_temporal_layer_context(VP8_COMP *cpi,
307 double prev_layer_framerate)
309 LAYER_CONTEXT *lc = &cpi->layer_context[layer];
311 lc->framerate = cpi->output_framerate / cpi->oxcf.rate_decimator[layer];
312 lc->target_bandwidth = cpi->oxcf.target_bitrate[layer] * 1000;
314 lc->starting_buffer_level_in_ms = oxcf->starting_buffer_level;
315 lc->optimal_buffer_level_in_ms = oxcf->optimal_buffer_level;
316 lc->maximum_buffer_size_in_ms = oxcf->maximum_buffer_size;
318 lc->starting_buffer_level =
319 rescale((int)(oxcf->starting_buffer_level),
320 lc->target_bandwidth, 1000);
322 if (oxcf->optimal_buffer_level == 0)
323 lc->optimal_buffer_level = lc->target_bandwidth / 8;
325 lc->optimal_buffer_level =
326 rescale((int)(oxcf->optimal_buffer_level),
327 lc->target_bandwidth, 1000);
329 if (oxcf->maximum_buffer_size == 0)
330 lc->maximum_buffer_size = lc->target_bandwidth / 8;
332 lc->maximum_buffer_size =
333 rescale((int)(oxcf->maximum_buffer_size),
334 lc->target_bandwidth, 1000);
336 /* Work out the average size of a frame within this layer */
338 lc->avg_frame_size_for_layer =
339 (int)((cpi->oxcf.target_bitrate[layer] -
340 cpi->oxcf.target_bitrate[layer-1]) * 1000 /
341 (lc->framerate - prev_layer_framerate));
343 lc->active_worst_quality = cpi->oxcf.worst_allowed_q;
344 lc->active_best_quality = cpi->oxcf.best_allowed_q;
345 lc->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
347 lc->buffer_level = lc->starting_buffer_level;
348 lc->bits_off_target = lc->starting_buffer_level;
350 lc->total_actual_bits = 0;
354 lc->rate_correction_factor = 1.0;
355 lc->key_frame_rate_correction_factor = 1.0;
356 lc->gf_rate_correction_factor = 1.0;
357 lc->inter_frame_target = 0;
360 // Upon a run-time change in temporal layers, reset the layer context parameters
361 // for any "new" layers. For "existing" layers, let them inherit the parameters
362 // from the previous layer state (at the same layer #). In future we may want
363 // to better map the previous layer state(s) to the "new" ones.
364 static void reset_temporal_layer_change(VP8_COMP *cpi,
366 const int prev_num_layers)
369 double prev_layer_framerate = 0;
370 const int curr_num_layers = cpi->oxcf.number_of_layers;
371 // If the previous state was 1 layer, get current layer context from cpi.
372 // We need this to set the layer context for the new layers below.
373 if (prev_num_layers == 1)
375 cpi->current_layer = 0;
376 save_layer_context(cpi);
378 for (i = 0; i < curr_num_layers; i++)
380 LAYER_CONTEXT *lc = &cpi->layer_context[i];
381 if (i >= prev_num_layers)
383 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
385 // The initial buffer levels are set based on their starting levels.
386 // We could set the buffer levels based on the previous state (normalized
387 // properly by the layer bandwidths) but we would need to keep track of
388 // the previous set of layer bandwidths (i.e., target_bitrate[i])
389 // before the layer change. For now, reset to the starting levels.
390 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
391 cpi->oxcf.target_bitrate[i];
392 lc->bits_off_target = lc->buffer_level;
393 // TDOD(marpan): Should we set the rate_correction_factor and
394 // active_worst/best_quality to values derived from the previous layer
395 // state (to smooth-out quality dips/rate fluctuation at transition)?
397 // We need to treat the 1 layer case separately: oxcf.target_bitrate[i]
398 // is not set for 1 layer, and the restore_layer_context/save_context()
399 // are not called in the encoding loop, so we need to call it here to
400 // pass the layer context state to |cpi|.
401 if (curr_num_layers == 1)
403 lc->target_bandwidth = cpi->oxcf.target_bandwidth;
404 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
405 lc->target_bandwidth / 1000;
406 lc->bits_off_target = lc->buffer_level;
407 restore_layer_context(cpi, 0);
409 prev_layer_framerate = cpi->output_framerate /
410 cpi->oxcf.rate_decimator[i];
414 static void setup_features(VP8_COMP *cpi)
416 // If segmentation enabled set the update flags
417 if ( cpi->mb.e_mbd.segmentation_enabled )
419 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
420 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
424 cpi->mb.e_mbd.update_mb_segmentation_map = 0;
425 cpi->mb.e_mbd.update_mb_segmentation_data = 0;
428 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 0;
429 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
430 vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
431 vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
432 vpx_memset(cpi->mb.e_mbd.last_ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
433 vpx_memset(cpi->mb.e_mbd.last_mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
435 set_default_lf_deltas(cpi);
440 static void dealloc_raw_frame_buffers(VP8_COMP *cpi);
443 static void dealloc_compressor_data(VP8_COMP *cpi)
445 vpx_free(cpi->tplist);
448 /* Delete last frame MV storage buffers */
452 vpx_free(cpi->lf_ref_frame_sign_bias);
453 cpi->lf_ref_frame_sign_bias = 0;
455 vpx_free(cpi->lf_ref_frame);
456 cpi->lf_ref_frame = 0;
458 /* Delete sementation map */
459 vpx_free(cpi->segmentation_map);
460 cpi->segmentation_map = 0;
462 vpx_free(cpi->active_map);
465 vp8_de_alloc_frame_buffers(&cpi->common);
467 vp8_yv12_de_alloc_frame_buffer(&cpi->pick_lf_lvl_frame);
468 vp8_yv12_de_alloc_frame_buffer(&cpi->scaled_source);
469 dealloc_raw_frame_buffers(cpi);
474 /* Structure used to monitor GF usage */
475 vpx_free(cpi->gf_active_flags);
476 cpi->gf_active_flags = 0;
478 /* Activity mask based per mb zbin adjustments */
479 vpx_free(cpi->mb_activity_map);
480 cpi->mb_activity_map = 0;
482 vpx_free(cpi->mb.pip);
485 #if CONFIG_MULTITHREAD
486 vpx_free(cpi->mt_current_mb_col);
487 cpi->mt_current_mb_col = NULL;
491 static void enable_segmentation(VP8_COMP *cpi)
493 /* Set the appropriate feature bit */
494 cpi->mb.e_mbd.segmentation_enabled = 1;
495 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
496 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
498 static void disable_segmentation(VP8_COMP *cpi)
500 /* Clear the appropriate feature bit */
501 cpi->mb.e_mbd.segmentation_enabled = 0;
504 /* Valid values for a segment are 0 to 3
505 * Segmentation map is arrange as [Rows][Columns]
507 static void set_segmentation_map(VP8_COMP *cpi, unsigned char *segmentation_map)
509 /* Copy in the new segmentation map */
510 vpx_memcpy(cpi->segmentation_map, segmentation_map, (cpi->common.mb_rows * cpi->common.mb_cols));
512 /* Signal that the map should be updated. */
513 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
514 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
517 /* The values given for each segment can be either deltas (from the default
518 * value chosen for the frame) or absolute values.
520 * Valid range for abs values is:
521 * (0-127 for MB_LVL_ALT_Q), (0-63 for SEGMENT_ALT_LF)
522 * Valid range for delta values are:
523 * (+/-127 for MB_LVL_ALT_Q), (+/-63 for SEGMENT_ALT_LF)
525 * abs_delta = SEGMENT_DELTADATA (deltas)
526 * abs_delta = SEGMENT_ABSDATA (use the absolute values given).
529 static void set_segment_data(VP8_COMP *cpi, signed char *feature_data, unsigned char abs_delta)
531 cpi->mb.e_mbd.mb_segement_abs_delta = abs_delta;
532 vpx_memcpy(cpi->segment_feature_data, feature_data, sizeof(cpi->segment_feature_data));
536 static void segmentation_test_function(VP8_COMP *cpi)
538 unsigned char *seg_map;
539 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
541 // Create a temporary map for segmentation data.
542 CHECK_MEM_ERROR(seg_map, vpx_calloc(cpi->common.mb_rows * cpi->common.mb_cols, 1));
544 // Set the segmentation Map
545 set_segmentation_map(cpi, seg_map);
547 // Activate segmentation.
548 enable_segmentation(cpi);
550 // Set up the quant segment data
551 feature_data[MB_LVL_ALT_Q][0] = 0;
552 feature_data[MB_LVL_ALT_Q][1] = 4;
553 feature_data[MB_LVL_ALT_Q][2] = 0;
554 feature_data[MB_LVL_ALT_Q][3] = 0;
555 // Set up the loop segment data
556 feature_data[MB_LVL_ALT_LF][0] = 0;
557 feature_data[MB_LVL_ALT_LF][1] = 0;
558 feature_data[MB_LVL_ALT_LF][2] = 0;
559 feature_data[MB_LVL_ALT_LF][3] = 0;
561 // Initialise the feature data structure
562 // SEGMENT_DELTADATA 0, SEGMENT_ABSDATA 1
563 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
565 // Delete sementation map
571 /* A simple function to cyclically refresh the background at a lower Q */
572 static void cyclic_background_refresh(VP8_COMP *cpi, int Q, int lf_adjustment)
574 unsigned char *seg_map = cpi->segmentation_map;
575 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
577 int block_count = cpi->cyclic_refresh_mode_max_mbs_perframe;
578 int mbs_in_frame = cpi->common.mb_rows * cpi->common.mb_cols;
580 cpi->cyclic_refresh_q = Q / 2;
582 // Set every macroblock to be eligible for update.
583 // For key frame this will reset seg map to 0.
584 vpx_memset(cpi->segmentation_map, 0, mbs_in_frame);
586 if (cpi->common.frame_type != KEY_FRAME)
588 /* Cycle through the macro_block rows */
589 /* MB loop to set local segmentation map */
590 i = cpi->cyclic_refresh_mode_index;
591 assert(i < mbs_in_frame);
594 /* If the MB is as a candidate for clean up then mark it for
595 * possible boost/refresh (segment 1) The segment id may get
596 * reset to 0 later if the MB gets coded anything other than
597 * last frame 0,0 as only (last frame 0,0) MBs are eligable for
598 * refresh : that is to say Mbs likely to be background blocks.
600 if (cpi->cyclic_refresh_map[i] == 0)
605 else if (cpi->cyclic_refresh_map[i] < 0)
606 cpi->cyclic_refresh_map[i]++;
609 if (i == mbs_in_frame)
613 while(block_count && i != cpi->cyclic_refresh_mode_index);
615 cpi->cyclic_refresh_mode_index = i;
618 /* Activate segmentation. */
619 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
620 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
621 enable_segmentation(cpi);
623 /* Set up the quant segment data */
624 feature_data[MB_LVL_ALT_Q][0] = 0;
625 feature_data[MB_LVL_ALT_Q][1] = (cpi->cyclic_refresh_q - Q);
626 feature_data[MB_LVL_ALT_Q][2] = 0;
627 feature_data[MB_LVL_ALT_Q][3] = 0;
629 /* Set up the loop segment data */
630 feature_data[MB_LVL_ALT_LF][0] = 0;
631 feature_data[MB_LVL_ALT_LF][1] = lf_adjustment;
632 feature_data[MB_LVL_ALT_LF][2] = 0;
633 feature_data[MB_LVL_ALT_LF][3] = 0;
635 /* Initialise the feature data structure */
636 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
640 static void set_default_lf_deltas(VP8_COMP *cpi)
642 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 1;
643 cpi->mb.e_mbd.mode_ref_lf_delta_update = 1;
645 vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
646 vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
648 /* Test of ref frame deltas */
649 cpi->mb.e_mbd.ref_lf_deltas[INTRA_FRAME] = 2;
650 cpi->mb.e_mbd.ref_lf_deltas[LAST_FRAME] = 0;
651 cpi->mb.e_mbd.ref_lf_deltas[GOLDEN_FRAME] = -2;
652 cpi->mb.e_mbd.ref_lf_deltas[ALTREF_FRAME] = -2;
654 cpi->mb.e_mbd.mode_lf_deltas[0] = 4; /* BPRED */
656 if(cpi->oxcf.Mode == MODE_REALTIME)
657 cpi->mb.e_mbd.mode_lf_deltas[1] = -12; /* Zero */
659 cpi->mb.e_mbd.mode_lf_deltas[1] = -2; /* Zero */
661 cpi->mb.e_mbd.mode_lf_deltas[2] = 2; /* New mv */
662 cpi->mb.e_mbd.mode_lf_deltas[3] = 4; /* Split mv */
665 /* Convenience macros for mapping speed and mode into a continuous
668 #define GOOD(x) (x+1)
671 static int speed_map(int speed, const int *map)
678 } while(speed >= *map++);
682 static const int thresh_mult_map_znn[] = {
683 /* map common to zero, nearest, and near */
684 0, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(2), 2000, INT_MAX
687 static const int thresh_mult_map_vhpred[] = {
688 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(1), 2000,
689 RT(7), INT_MAX, INT_MAX
692 static const int thresh_mult_map_bpred[] = {
693 2000, GOOD(0), 2500, GOOD(2), 5000, GOOD(3), 7500, RT(0), 2500, RT(1), 5000,
694 RT(6), INT_MAX, INT_MAX
697 static const int thresh_mult_map_tm[] = {
698 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 0, RT(1), 1000, RT(2), 2000,
699 RT(7), INT_MAX, INT_MAX
702 static const int thresh_mult_map_new1[] = {
703 1000, GOOD(2), 2000, RT(0), 2000, INT_MAX
706 static const int thresh_mult_map_new2[] = {
707 1000, GOOD(2), 2000, GOOD(3), 2500, GOOD(5), 4000, RT(0), 2000, RT(2), 2500,
711 static const int thresh_mult_map_split1[] = {
712 2500, GOOD(0), 1700, GOOD(2), 10000, GOOD(3), 25000, GOOD(4), INT_MAX,
713 RT(0), 5000, RT(1), 10000, RT(2), 25000, RT(3), INT_MAX, INT_MAX
716 static const int thresh_mult_map_split2[] = {
717 5000, GOOD(0), 4500, GOOD(2), 20000, GOOD(3), 50000, GOOD(4), INT_MAX,
718 RT(0), 10000, RT(1), 20000, RT(2), 50000, RT(3), INT_MAX, INT_MAX
721 static const int mode_check_freq_map_zn2[] = {
722 /* {zero,nearest}{2,3} */
723 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
726 static const int mode_check_freq_map_vhbpred[] = {
727 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(5), 4, INT_MAX
730 static const int mode_check_freq_map_near2[] = {
731 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(10), 1<<2, RT(11), 1<<3, RT(12), 1<<4,
735 static const int mode_check_freq_map_new1[] = {
736 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
739 static const int mode_check_freq_map_new2[] = {
740 0, GOOD(5), 4, RT(0), 0, RT(3), 4, RT(10), 1<<3, RT(11), 1<<4, RT(12), 1<<5,
744 static const int mode_check_freq_map_split1[] = {
745 0, GOOD(2), 2, GOOD(3), 7, RT(1), 2, RT(2), 7, INT_MAX
748 static const int mode_check_freq_map_split2[] = {
749 0, GOOD(1), 2, GOOD(2), 4, GOOD(3), 15, RT(1), 4, RT(2), 15, INT_MAX
752 void vp8_set_speed_features(VP8_COMP *cpi)
754 SPEED_FEATURES *sf = &cpi->sf;
755 int Mode = cpi->compressor_speed;
756 int Speed = cpi->Speed;
758 VP8_COMMON *cm = &cpi->common;
759 int last_improved_quant = sf->improved_quant;
762 /* Initialise default mode frequency sampling variables */
763 for (i = 0; i < MAX_MODES; i ++)
765 cpi->mode_check_freq[i] = 0;
768 cpi->mb.mbs_tested_so_far = 0;
770 /* best quality defaults */
772 sf->search_method = NSTEP;
773 sf->improved_quant = 1;
774 sf->improved_dct = 1;
777 sf->quarter_pixel_search = 1;
778 sf->half_pixel_search = 1;
779 sf->iterative_sub_pixel = 1;
780 sf->optimize_coefficients = 1;
781 sf->use_fastquant_for_pick = 0;
782 sf->no_skip_block4x4_search = 1;
785 sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
786 sf->improved_mv_pred = 1;
788 /* default thresholds to 0 */
789 for (i = 0; i < MAX_MODES; i++)
790 sf->thresh_mult[i] = 0;
792 /* Count enabled references */
794 if (cpi->ref_frame_flags & VP8_LAST_FRAME)
796 if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
798 if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
801 /* Convert speed to continuous range, with clamping */
813 sf->thresh_mult[THR_ZERO1] =
814 sf->thresh_mult[THR_NEAREST1] =
815 sf->thresh_mult[THR_NEAR1] =
816 sf->thresh_mult[THR_DC] = 0; /* always */
818 sf->thresh_mult[THR_ZERO2] =
819 sf->thresh_mult[THR_ZERO3] =
820 sf->thresh_mult[THR_NEAREST2] =
821 sf->thresh_mult[THR_NEAREST3] =
822 sf->thresh_mult[THR_NEAR2] =
823 sf->thresh_mult[THR_NEAR3] = speed_map(Speed, thresh_mult_map_znn);
825 sf->thresh_mult[THR_V_PRED] =
826 sf->thresh_mult[THR_H_PRED] = speed_map(Speed, thresh_mult_map_vhpred);
827 sf->thresh_mult[THR_B_PRED] = speed_map(Speed, thresh_mult_map_bpred);
828 sf->thresh_mult[THR_TM] = speed_map(Speed, thresh_mult_map_tm);
829 sf->thresh_mult[THR_NEW1] = speed_map(Speed, thresh_mult_map_new1);
830 sf->thresh_mult[THR_NEW2] =
831 sf->thresh_mult[THR_NEW3] = speed_map(Speed, thresh_mult_map_new2);
832 sf->thresh_mult[THR_SPLIT1] = speed_map(Speed, thresh_mult_map_split1);
833 sf->thresh_mult[THR_SPLIT2] =
834 sf->thresh_mult[THR_SPLIT3] = speed_map(Speed, thresh_mult_map_split2);
836 cpi->mode_check_freq[THR_ZERO1] =
837 cpi->mode_check_freq[THR_NEAREST1] =
838 cpi->mode_check_freq[THR_NEAR1] =
839 cpi->mode_check_freq[THR_TM] =
840 cpi->mode_check_freq[THR_DC] = 0; /* always */
842 cpi->mode_check_freq[THR_ZERO2] =
843 cpi->mode_check_freq[THR_ZERO3] =
844 cpi->mode_check_freq[THR_NEAREST2] =
845 cpi->mode_check_freq[THR_NEAREST3] = speed_map(Speed,
846 mode_check_freq_map_zn2);
848 cpi->mode_check_freq[THR_NEAR2] =
849 cpi->mode_check_freq[THR_NEAR3] = speed_map(Speed,
850 mode_check_freq_map_near2);
852 cpi->mode_check_freq[THR_V_PRED] =
853 cpi->mode_check_freq[THR_H_PRED] =
854 cpi->mode_check_freq[THR_B_PRED] = speed_map(Speed,
855 mode_check_freq_map_vhbpred);
856 cpi->mode_check_freq[THR_NEW1] = speed_map(Speed,
857 mode_check_freq_map_new1);
858 cpi->mode_check_freq[THR_NEW2] =
859 cpi->mode_check_freq[THR_NEW3] = speed_map(Speed,
860 mode_check_freq_map_new2);
861 cpi->mode_check_freq[THR_SPLIT1] = speed_map(Speed,
862 mode_check_freq_map_split1);
863 cpi->mode_check_freq[THR_SPLIT2] =
864 cpi->mode_check_freq[THR_SPLIT3] = speed_map(Speed,
865 mode_check_freq_map_split2);
869 #if !(CONFIG_REALTIME_ONLY)
870 case 0: /* best quality mode */
872 sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
878 /* Disable coefficient optimization above speed 0 */
879 sf->optimize_coefficients = 0;
880 sf->use_fastquant_for_pick = 1;
881 sf->no_skip_block4x4_search = 0;
888 sf->improved_quant = 0;
889 sf->improved_dct = 0;
891 /* Only do recode loop on key frames, golden frames and
901 sf->recode_loop = 0; /* recode loop off */
902 sf->RD = 0; /* Turn rd off */
908 sf->auto_filter = 0; /* Faster selection of loop filter */
914 sf->optimize_coefficients = 0;
917 sf->iterative_sub_pixel = 1;
918 sf->search_method = NSTEP;
922 sf->improved_quant = 0;
923 sf->improved_dct = 0;
925 sf->use_fastquant_for_pick = 1;
926 sf->no_skip_block4x4_search = 0;
931 sf->auto_filter = 0; /* Faster selection of loop filter */
941 sf->auto_filter = 0; /* Faster selection of loop filter */
942 sf->search_method = HEX;
943 sf->iterative_sub_pixel = 0;
948 unsigned int sum = 0;
949 unsigned int total_mbs = cm->MBs;
951 unsigned int total_skip;
955 if (cpi->oxcf.encode_breakout > 2000)
956 min = cpi->oxcf.encode_breakout;
960 for (i = 0; i < min; i++)
962 sum += cpi->mb.error_bins[i];
968 /* i starts from 2 to make sure thresh started from 2048 */
969 for (; i < 1024; i++)
971 sum += cpi->mb.error_bins[i];
973 if (10 * sum >= (unsigned int)(cpi->Speed - 6)*(total_mbs - total_skip))
985 sf->thresh_mult[THR_NEW1 ] = thresh;
986 sf->thresh_mult[THR_NEAREST1 ] = thresh >> 1;
987 sf->thresh_mult[THR_NEAR1 ] = thresh >> 1;
992 sf->thresh_mult[THR_NEW2] = thresh << 1;
993 sf->thresh_mult[THR_NEAREST2 ] = thresh;
994 sf->thresh_mult[THR_NEAR2 ] = thresh;
999 sf->thresh_mult[THR_NEW3] = thresh << 1;
1000 sf->thresh_mult[THR_NEAREST3 ] = thresh;
1001 sf->thresh_mult[THR_NEAR3 ] = thresh;
1004 sf->improved_mv_pred = 0;
1008 sf->quarter_pixel_search = 0;
1010 if(cm->version == 0)
1012 cm->filter_type = NORMAL_LOOPFILTER;
1015 cm->filter_type = SIMPLE_LOOPFILTER;
1019 cm->filter_type = SIMPLE_LOOPFILTER;
1022 /* This has a big hit on quality. Last resort */
1024 sf->half_pixel_search = 0;
1026 vpx_memset(cpi->mb.error_bins, 0, sizeof(cpi->mb.error_bins));
1030 /* Slow quant, dct and trellis not worthwhile for first pass
1031 * so make sure they are always turned off.
1033 if ( cpi->pass == 1 )
1035 sf->improved_quant = 0;
1036 sf->optimize_coefficients = 0;
1037 sf->improved_dct = 0;
1040 if (cpi->sf.search_method == NSTEP)
1042 vp8_init3smotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
1044 else if (cpi->sf.search_method == DIAMOND)
1046 vp8_init_dsmotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
1049 if (cpi->sf.improved_dct)
1051 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
1052 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
1056 /* No fast FDCT defined for any platform at this time. */
1057 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
1058 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
1061 cpi->mb.short_walsh4x4 = vp8_short_walsh4x4;
1063 if (cpi->sf.improved_quant)
1065 cpi->mb.quantize_b = vp8_regular_quantize_b;
1066 cpi->mb.quantize_b_pair = vp8_regular_quantize_b_pair;
1070 cpi->mb.quantize_b = vp8_fast_quantize_b;
1071 cpi->mb.quantize_b_pair = vp8_fast_quantize_b_pair;
1073 if (cpi->sf.improved_quant != last_improved_quant)
1074 vp8cx_init_quantizer(cpi);
1076 if (cpi->sf.iterative_sub_pixel == 1)
1078 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step_iteratively;
1080 else if (cpi->sf.quarter_pixel_search)
1082 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step;
1084 else if (cpi->sf.half_pixel_search)
1086 cpi->find_fractional_mv_step = vp8_find_best_half_pixel_step;
1090 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
1093 if (cpi->sf.optimize_coefficients == 1 && cpi->pass!=1)
1094 cpi->mb.optimize = 1;
1096 cpi->mb.optimize = 0;
1098 if (cpi->common.full_pixel)
1099 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
1102 frames_at_speed[cpi->Speed]++;
1108 static void alloc_raw_frame_buffers(VP8_COMP *cpi)
1110 #if VP8_TEMPORAL_ALT_REF
1111 int width = (cpi->oxcf.Width + 15) & ~15;
1112 int height = (cpi->oxcf.Height + 15) & ~15;
1115 cpi->lookahead = vp8_lookahead_init(cpi->oxcf.Width, cpi->oxcf.Height,
1116 cpi->oxcf.lag_in_frames);
1118 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1119 "Failed to allocate lag buffers");
1121 #if VP8_TEMPORAL_ALT_REF
1123 if (vp8_yv12_alloc_frame_buffer(&cpi->alt_ref_buffer,
1124 width, height, VP8BORDERINPIXELS))
1125 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1126 "Failed to allocate altref buffer");
1132 static void dealloc_raw_frame_buffers(VP8_COMP *cpi)
1134 #if VP8_TEMPORAL_ALT_REF
1135 vp8_yv12_de_alloc_frame_buffer(&cpi->alt_ref_buffer);
1137 vp8_lookahead_destroy(cpi->lookahead);
1141 static int vp8_alloc_partition_data(VP8_COMP *cpi)
1143 vpx_free(cpi->mb.pip);
1145 cpi->mb.pip = vpx_calloc((cpi->common.mb_cols + 1) *
1146 (cpi->common.mb_rows + 1),
1147 sizeof(PARTITION_INFO));
1151 cpi->mb.pi = cpi->mb.pip + cpi->common.mode_info_stride + 1;
1156 void vp8_alloc_compressor_data(VP8_COMP *cpi)
1158 VP8_COMMON *cm = & cpi->common;
1160 int width = cm->Width;
1161 int height = cm->Height;
1163 if (vp8_alloc_frame_buffers(cm, width, height))
1164 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1165 "Failed to allocate frame buffers");
1167 if (vp8_alloc_partition_data(cpi))
1168 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1169 "Failed to allocate partition data");
1172 if ((width & 0xf) != 0)
1173 width += 16 - (width & 0xf);
1175 if ((height & 0xf) != 0)
1176 height += 16 - (height & 0xf);
1179 if (vp8_yv12_alloc_frame_buffer(&cpi->pick_lf_lvl_frame,
1180 width, height, VP8BORDERINPIXELS))
1181 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1182 "Failed to allocate last frame buffer");
1184 if (vp8_yv12_alloc_frame_buffer(&cpi->scaled_source,
1185 width, height, VP8BORDERINPIXELS))
1186 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1187 "Failed to allocate scaled source buffer");
1192 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
1193 unsigned int tokens = 8 * 24 * 16; /* one MB for each thread */
1195 unsigned int tokens = cm->mb_rows * cm->mb_cols * 24 * 16;
1197 CHECK_MEM_ERROR(cpi->tok, vpx_calloc(tokens, sizeof(*cpi->tok)));
1200 /* Data used for real time vc mode to see if gf needs refreshing */
1201 cpi->zeromv_count = 0;
1204 /* Structures used to monitor GF usage */
1205 vpx_free(cpi->gf_active_flags);
1206 CHECK_MEM_ERROR(cpi->gf_active_flags,
1207 vpx_calloc(sizeof(*cpi->gf_active_flags),
1208 cm->mb_rows * cm->mb_cols));
1209 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
1211 vpx_free(cpi->mb_activity_map);
1212 CHECK_MEM_ERROR(cpi->mb_activity_map,
1213 vpx_calloc(sizeof(*cpi->mb_activity_map),
1214 cm->mb_rows * cm->mb_cols));
1216 /* allocate memory for storing last frame's MVs for MV prediction. */
1217 vpx_free(cpi->lfmv);
1218 CHECK_MEM_ERROR(cpi->lfmv, vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1219 sizeof(*cpi->lfmv)));
1220 vpx_free(cpi->lf_ref_frame_sign_bias);
1221 CHECK_MEM_ERROR(cpi->lf_ref_frame_sign_bias,
1222 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1223 sizeof(*cpi->lf_ref_frame_sign_bias)));
1224 vpx_free(cpi->lf_ref_frame);
1225 CHECK_MEM_ERROR(cpi->lf_ref_frame,
1226 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1227 sizeof(*cpi->lf_ref_frame)));
1229 /* Create the encoder segmentation map and set all entries to 0 */
1230 vpx_free(cpi->segmentation_map);
1231 CHECK_MEM_ERROR(cpi->segmentation_map,
1232 vpx_calloc(cm->mb_rows * cm->mb_cols,
1233 sizeof(*cpi->segmentation_map)));
1234 cpi->cyclic_refresh_mode_index = 0;
1235 vpx_free(cpi->active_map);
1236 CHECK_MEM_ERROR(cpi->active_map,
1237 vpx_calloc(cm->mb_rows * cm->mb_cols,
1238 sizeof(*cpi->active_map)));
1239 vpx_memset(cpi->active_map , 1, (cm->mb_rows * cm->mb_cols));
1241 #if CONFIG_MULTITHREAD
1243 cpi->mt_sync_range = 1;
1244 else if (width <= 1280)
1245 cpi->mt_sync_range = 4;
1246 else if (width <= 2560)
1247 cpi->mt_sync_range = 8;
1249 cpi->mt_sync_range = 16;
1251 if (cpi->oxcf.multi_threaded > 1)
1253 vpx_free(cpi->mt_current_mb_col);
1254 CHECK_MEM_ERROR(cpi->mt_current_mb_col,
1255 vpx_malloc(sizeof(*cpi->mt_current_mb_col) * cm->mb_rows));
1260 vpx_free(cpi->tplist);
1261 CHECK_MEM_ERROR(cpi->tplist, vpx_malloc(sizeof(TOKENLIST) * cm->mb_rows));
1263 #if CONFIG_TEMPORAL_DENOISING
1264 if (cpi->oxcf.noise_sensitivity > 0) {
1265 vp8_denoiser_free(&cpi->denoiser);
1266 vp8_denoiser_allocate(&cpi->denoiser, width, height,
1267 cm->mb_rows, cm->mb_cols);
1274 static const int q_trans[] =
1276 0, 1, 2, 3, 4, 5, 7, 8,
1277 9, 10, 12, 13, 15, 17, 18, 19,
1278 20, 21, 23, 24, 25, 26, 27, 28,
1279 29, 30, 31, 33, 35, 37, 39, 41,
1280 43, 45, 47, 49, 51, 53, 55, 57,
1281 59, 61, 64, 67, 70, 73, 76, 79,
1282 82, 85, 88, 91, 94, 97, 100, 103,
1283 106, 109, 112, 115, 118, 121, 124, 127,
1286 int vp8_reverse_trans(int x)
1290 for (i = 0; i < 64; i++)
1291 if (q_trans[i] >= x)
1296 void vp8_new_framerate(VP8_COMP *cpi, double framerate)
1301 cpi->framerate = framerate;
1302 cpi->output_framerate = framerate;
1303 cpi->per_frame_bandwidth = (int)(cpi->oxcf.target_bandwidth /
1304 cpi->output_framerate);
1305 cpi->av_per_frame_bandwidth = cpi->per_frame_bandwidth;
1306 cpi->min_frame_bandwidth = (int)(cpi->av_per_frame_bandwidth *
1307 cpi->oxcf.two_pass_vbrmin_section / 100);
1309 /* Set Maximum gf/arf interval */
1310 cpi->max_gf_interval = ((int)(cpi->output_framerate / 2.0) + 2);
1312 if(cpi->max_gf_interval < 12)
1313 cpi->max_gf_interval = 12;
1315 /* Extended interval for genuinely static scenes */
1316 cpi->twopass.static_scene_max_gf_interval = cpi->key_frame_frequency >> 1;
1318 /* Special conditions when altr ref frame enabled in lagged compress mode */
1319 if (cpi->oxcf.play_alternate && cpi->oxcf.lag_in_frames)
1321 if (cpi->max_gf_interval > cpi->oxcf.lag_in_frames - 1)
1322 cpi->max_gf_interval = cpi->oxcf.lag_in_frames - 1;
1324 if (cpi->twopass.static_scene_max_gf_interval > cpi->oxcf.lag_in_frames - 1)
1325 cpi->twopass.static_scene_max_gf_interval = cpi->oxcf.lag_in_frames - 1;
1328 if ( cpi->max_gf_interval > cpi->twopass.static_scene_max_gf_interval )
1329 cpi->max_gf_interval = cpi->twopass.static_scene_max_gf_interval;
1333 static void init_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
1335 VP8_COMMON *cm = &cpi->common;
1340 cpi->auto_adjust_gold_quantizer = 1;
1342 cm->version = oxcf->Version;
1343 vp8_setup_version(cm);
1345 /* frame rate is not available on the first frame, as it's derived from
1346 * the observed timestamps. The actual value used here doesn't matter
1347 * too much, as it will adapt quickly. If the reciprocal of the timebase
1348 * seems like a reasonable framerate, then use that as a guess, otherwise
1351 cpi->framerate = (double)(oxcf->timebase.den) /
1352 (double)(oxcf->timebase.num);
1354 if (cpi->framerate > 180)
1355 cpi->framerate = 30;
1357 cpi->ref_framerate = cpi->framerate;
1359 /* change includes all joint functionality */
1360 vp8_change_config(cpi, oxcf);
1362 /* Initialize active best and worst q and average q values. */
1363 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
1364 cpi->active_best_quality = cpi->oxcf.best_allowed_q;
1365 cpi->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
1367 /* Initialise the starting buffer levels */
1368 cpi->buffer_level = cpi->oxcf.starting_buffer_level;
1369 cpi->bits_off_target = cpi->oxcf.starting_buffer_level;
1371 cpi->rolling_target_bits = cpi->av_per_frame_bandwidth;
1372 cpi->rolling_actual_bits = cpi->av_per_frame_bandwidth;
1373 cpi->long_rolling_target_bits = cpi->av_per_frame_bandwidth;
1374 cpi->long_rolling_actual_bits = cpi->av_per_frame_bandwidth;
1376 cpi->total_actual_bits = 0;
1377 cpi->total_target_vs_actual = 0;
1379 /* Temporal scalabilty */
1380 if (cpi->oxcf.number_of_layers > 1)
1383 double prev_layer_framerate=0;
1385 for (i=0; i<cpi->oxcf.number_of_layers; i++)
1387 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
1388 prev_layer_framerate = cpi->output_framerate /
1389 cpi->oxcf.rate_decimator[i];
1393 #if VP8_TEMPORAL_ALT_REF
1397 cpi->fixed_divide[0] = 0;
1399 for (i = 1; i < 512; i++)
1400 cpi->fixed_divide[i] = 0x80000 / i;
1405 static void update_layer_contexts (VP8_COMP *cpi)
1407 VP8_CONFIG *oxcf = &cpi->oxcf;
1409 /* Update snapshots of the layer contexts to reflect new parameters */
1410 if (oxcf->number_of_layers > 1)
1413 double prev_layer_framerate=0;
1415 assert(oxcf->number_of_layers <= VPX_TS_MAX_LAYERS);
1416 for (i = 0; i < oxcf->number_of_layers && i < VPX_TS_MAX_LAYERS; ++i)
1418 LAYER_CONTEXT *lc = &cpi->layer_context[i];
1421 cpi->ref_framerate / oxcf->rate_decimator[i];
1422 lc->target_bandwidth = oxcf->target_bitrate[i] * 1000;
1424 lc->starting_buffer_level = rescale(
1425 (int)oxcf->starting_buffer_level_in_ms,
1426 lc->target_bandwidth, 1000);
1428 if (oxcf->optimal_buffer_level == 0)
1429 lc->optimal_buffer_level = lc->target_bandwidth / 8;
1431 lc->optimal_buffer_level = rescale(
1432 (int)oxcf->optimal_buffer_level_in_ms,
1433 lc->target_bandwidth, 1000);
1435 if (oxcf->maximum_buffer_size == 0)
1436 lc->maximum_buffer_size = lc->target_bandwidth / 8;
1438 lc->maximum_buffer_size = rescale(
1439 (int)oxcf->maximum_buffer_size_in_ms,
1440 lc->target_bandwidth, 1000);
1442 /* Work out the average size of a frame within this layer */
1444 lc->avg_frame_size_for_layer =
1445 (int)((oxcf->target_bitrate[i] -
1446 oxcf->target_bitrate[i-1]) * 1000 /
1447 (lc->framerate - prev_layer_framerate));
1449 prev_layer_framerate = lc->framerate;
1454 void vp8_change_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
1456 VP8_COMMON *cm = &cpi->common;
1457 int last_w, last_h, prev_number_of_layers;
1465 #if CONFIG_MULTITHREAD
1466 /* wait for the last picture loopfilter thread done */
1467 if (cpi->b_lpf_running)
1469 sem_wait(&cpi->h_event_end_lpf);
1470 cpi->b_lpf_running = 0;
1474 if (cm->version != oxcf->Version)
1476 cm->version = oxcf->Version;
1477 vp8_setup_version(cm);
1480 last_w = cpi->oxcf.Width;
1481 last_h = cpi->oxcf.Height;
1482 prev_number_of_layers = cpi->oxcf.number_of_layers;
1486 switch (cpi->oxcf.Mode)
1491 cpi->compressor_speed = 2;
1493 if (cpi->oxcf.cpu_used < -16)
1495 cpi->oxcf.cpu_used = -16;
1498 if (cpi->oxcf.cpu_used > 16)
1499 cpi->oxcf.cpu_used = 16;
1503 case MODE_GOODQUALITY:
1505 cpi->compressor_speed = 1;
1507 if (cpi->oxcf.cpu_used < -5)
1509 cpi->oxcf.cpu_used = -5;
1512 if (cpi->oxcf.cpu_used > 5)
1513 cpi->oxcf.cpu_used = 5;
1517 case MODE_BESTQUALITY:
1519 cpi->compressor_speed = 0;
1522 case MODE_FIRSTPASS:
1524 cpi->compressor_speed = 1;
1526 case MODE_SECONDPASS:
1528 cpi->compressor_speed = 1;
1530 if (cpi->oxcf.cpu_used < -5)
1532 cpi->oxcf.cpu_used = -5;
1535 if (cpi->oxcf.cpu_used > 5)
1536 cpi->oxcf.cpu_used = 5;
1539 case MODE_SECONDPASS_BEST:
1541 cpi->compressor_speed = 0;
1546 cpi->auto_worst_q = 1;
1548 cpi->oxcf.worst_allowed_q = q_trans[oxcf->worst_allowed_q];
1549 cpi->oxcf.best_allowed_q = q_trans[oxcf->best_allowed_q];
1550 cpi->oxcf.cq_level = q_trans[cpi->oxcf.cq_level];
1552 if (oxcf->fixed_q >= 0)
1554 if (oxcf->worst_allowed_q < 0)
1555 cpi->oxcf.fixed_q = q_trans[0];
1557 cpi->oxcf.fixed_q = q_trans[oxcf->worst_allowed_q];
1559 if (oxcf->alt_q < 0)
1560 cpi->oxcf.alt_q = q_trans[0];
1562 cpi->oxcf.alt_q = q_trans[oxcf->alt_q];
1564 if (oxcf->key_q < 0)
1565 cpi->oxcf.key_q = q_trans[0];
1567 cpi->oxcf.key_q = q_trans[oxcf->key_q];
1569 if (oxcf->gold_q < 0)
1570 cpi->oxcf.gold_q = q_trans[0];
1572 cpi->oxcf.gold_q = q_trans[oxcf->gold_q];
1576 cpi->baseline_gf_interval =
1577 cpi->oxcf.alt_freq ? cpi->oxcf.alt_freq : DEFAULT_GF_INTERVAL;
1579 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
1581 cm->refresh_golden_frame = 0;
1582 cm->refresh_last_frame = 1;
1583 cm->refresh_entropy_probs = 1;
1585 #if (CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
1586 cpi->oxcf.token_partitions = 3;
1589 if (cpi->oxcf.token_partitions >= 0 && cpi->oxcf.token_partitions <= 3)
1590 cm->multi_token_partition =
1591 (TOKEN_PARTITION) cpi->oxcf.token_partitions;
1593 setup_features(cpi);
1598 for (i = 0; i < MAX_MB_SEGMENTS; i++)
1599 cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout;
1602 /* At the moment the first order values may not be > MAXQ */
1603 if (cpi->oxcf.fixed_q > MAXQ)
1604 cpi->oxcf.fixed_q = MAXQ;
1606 /* local file playback mode == really big buffer */
1607 if (cpi->oxcf.end_usage == USAGE_LOCAL_FILE_PLAYBACK)
1609 cpi->oxcf.starting_buffer_level = 60000;
1610 cpi->oxcf.optimal_buffer_level = 60000;
1611 cpi->oxcf.maximum_buffer_size = 240000;
1612 cpi->oxcf.starting_buffer_level_in_ms = 60000;
1613 cpi->oxcf.optimal_buffer_level_in_ms = 60000;
1614 cpi->oxcf.maximum_buffer_size_in_ms = 240000;
1617 /* Convert target bandwidth from Kbit/s to Bit/s */
1618 cpi->oxcf.target_bandwidth *= 1000;
1620 cpi->oxcf.starting_buffer_level =
1621 rescale((int)cpi->oxcf.starting_buffer_level,
1622 cpi->oxcf.target_bandwidth, 1000);
1624 /* Set or reset optimal and maximum buffer levels. */
1625 if (cpi->oxcf.optimal_buffer_level == 0)
1626 cpi->oxcf.optimal_buffer_level = cpi->oxcf.target_bandwidth / 8;
1628 cpi->oxcf.optimal_buffer_level =
1629 rescale((int)cpi->oxcf.optimal_buffer_level,
1630 cpi->oxcf.target_bandwidth, 1000);
1632 if (cpi->oxcf.maximum_buffer_size == 0)
1633 cpi->oxcf.maximum_buffer_size = cpi->oxcf.target_bandwidth / 8;
1635 cpi->oxcf.maximum_buffer_size =
1636 rescale((int)cpi->oxcf.maximum_buffer_size,
1637 cpi->oxcf.target_bandwidth, 1000);
1638 // Under a configuration change, where maximum_buffer_size may change,
1639 // keep buffer level clipped to the maximum allowed buffer size.
1640 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size) {
1641 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
1642 cpi->buffer_level = cpi->bits_off_target;
1645 /* Set up frame rate and related parameters rate control values. */
1646 vp8_new_framerate(cpi, cpi->framerate);
1648 /* Set absolute upper and lower quality limits */
1649 cpi->worst_quality = cpi->oxcf.worst_allowed_q;
1650 cpi->best_quality = cpi->oxcf.best_allowed_q;
1652 /* active values should only be modified if out of new range */
1653 if (cpi->active_worst_quality > cpi->oxcf.worst_allowed_q)
1655 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
1658 else if (cpi->active_worst_quality < cpi->oxcf.best_allowed_q)
1660 cpi->active_worst_quality = cpi->oxcf.best_allowed_q;
1662 if (cpi->active_best_quality < cpi->oxcf.best_allowed_q)
1664 cpi->active_best_quality = cpi->oxcf.best_allowed_q;
1667 else if (cpi->active_best_quality > cpi->oxcf.worst_allowed_q)
1669 cpi->active_best_quality = cpi->oxcf.worst_allowed_q;
1672 cpi->buffered_mode = cpi->oxcf.optimal_buffer_level > 0;
1674 cpi->cq_target_quality = cpi->oxcf.cq_level;
1676 /* Only allow dropped frames in buffered mode */
1677 cpi->drop_frames_allowed = cpi->oxcf.allow_df && cpi->buffered_mode;
1679 cpi->target_bandwidth = cpi->oxcf.target_bandwidth;
1681 // Check if the number of temporal layers has changed, and if so reset the
1682 // pattern counter and set/initialize the temporal layer context for the
1683 // new layer configuration.
1684 if (cpi->oxcf.number_of_layers != prev_number_of_layers)
1686 // If the number of temporal layers are changed we must start at the
1687 // base of the pattern cycle, so reset temporal_pattern_counter.
1688 cpi->temporal_pattern_counter = 0;
1689 reset_temporal_layer_change(cpi, oxcf, prev_number_of_layers);
1692 cm->Width = cpi->oxcf.Width;
1693 cm->Height = cpi->oxcf.Height;
1695 /* TODO(jkoleszar): if an internal spatial resampling is active,
1696 * and we downsize the input image, maybe we should clear the
1697 * internal scale immediately rather than waiting for it to
1701 /* VP8 sharpness level mapping 0-7 (vs 0-10 in general VPx dialogs) */
1702 if (cpi->oxcf.Sharpness > 7)
1703 cpi->oxcf.Sharpness = 7;
1705 cm->sharpness_level = cpi->oxcf.Sharpness;
1707 if (cm->horiz_scale != NORMAL || cm->vert_scale != NORMAL)
1709 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
1710 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
1712 Scale2Ratio(cm->horiz_scale, &hr, &hs);
1713 Scale2Ratio(cm->vert_scale, &vr, &vs);
1715 /* always go to the next whole number */
1716 cm->Width = (hs - 1 + cpi->oxcf.Width * hr) / hs;
1717 cm->Height = (vs - 1 + cpi->oxcf.Height * vr) / vs;
1720 if (last_w != cpi->oxcf.Width || last_h != cpi->oxcf.Height)
1721 cpi->force_next_frame_intra = 1;
1723 if (((cm->Width + 15) & 0xfffffff0) !=
1724 cm->yv12_fb[cm->lst_fb_idx].y_width ||
1725 ((cm->Height + 15) & 0xfffffff0) !=
1726 cm->yv12_fb[cm->lst_fb_idx].y_height ||
1727 cm->yv12_fb[cm->lst_fb_idx].y_width == 0)
1729 dealloc_raw_frame_buffers(cpi);
1730 alloc_raw_frame_buffers(cpi);
1731 vp8_alloc_compressor_data(cpi);
1734 if (cpi->oxcf.fixed_q >= 0)
1736 cpi->last_q[0] = cpi->oxcf.fixed_q;
1737 cpi->last_q[1] = cpi->oxcf.fixed_q;
1740 cpi->Speed = cpi->oxcf.cpu_used;
1742 /* force to allowlag to 0 if lag_in_frames is 0; */
1743 if (cpi->oxcf.lag_in_frames == 0)
1745 cpi->oxcf.allow_lag = 0;
1747 /* Limit on lag buffers as these are not currently dynamically allocated */
1748 else if (cpi->oxcf.lag_in_frames > MAX_LAG_BUFFERS)
1749 cpi->oxcf.lag_in_frames = MAX_LAG_BUFFERS;
1752 cpi->alt_ref_source = NULL;
1753 cpi->is_src_frame_alt_ref = 0;
1755 #if CONFIG_TEMPORAL_DENOISING
1756 if (cpi->oxcf.noise_sensitivity)
1758 if (!cpi->denoiser.yv12_mc_running_avg.buffer_alloc)
1760 int width = (cpi->oxcf.Width + 15) & ~15;
1761 int height = (cpi->oxcf.Height + 15) & ~15;
1762 vp8_denoiser_allocate(&cpi->denoiser, width, height,
1763 cpi->common.mb_rows, cpi->common.mb_cols);
1769 /* Experimental RD Code */
1770 cpi->frame_distortion = 0;
1771 cpi->last_frame_distortion = 0;
1777 #define M_LOG2_E 0.693147180559945309417
1779 #define log2f(x) (log (x) / (float) M_LOG2_E)
1781 static void cal_mvsadcosts(int *mvsadcost[2])
1785 mvsadcost [0] [0] = 300;
1786 mvsadcost [1] [0] = 300;
1790 double z = 256 * (2 * (log2f(8 * i) + .6));
1791 mvsadcost [0][i] = (int) z;
1792 mvsadcost [1][i] = (int) z;
1793 mvsadcost [0][-i] = (int) z;
1794 mvsadcost [1][-i] = (int) z;
1796 while (++i <= mvfp_max);
1799 struct VP8_COMP* vp8_create_compressor(VP8_CONFIG *oxcf)
1806 cpi = vpx_memalign(32, sizeof(VP8_COMP));
1807 /* Check that the CPI instance is valid */
1813 vpx_memset(cpi, 0, sizeof(VP8_COMP));
1815 if (setjmp(cm->error.jmp))
1817 cpi->common.error.setjmp = 0;
1818 vp8_remove_compressor(&cpi);
1822 cpi->common.error.setjmp = 1;
1824 CHECK_MEM_ERROR(cpi->mb.ss, vpx_calloc(sizeof(search_site), (MAX_MVSEARCH_STEPS * 8) + 1));
1826 vp8_create_common(&cpi->common);
1828 init_config(cpi, oxcf);
1830 memcpy(cpi->base_skip_false_prob, vp8cx_base_skip_false_prob, sizeof(vp8cx_base_skip_false_prob));
1831 cpi->common.current_video_frame = 0;
1832 cpi->temporal_pattern_counter = 0;
1833 cpi->kf_overspend_bits = 0;
1834 cpi->kf_bitrate_adjustment = 0;
1835 cpi->frames_till_gf_update_due = 0;
1836 cpi->gf_overspend_bits = 0;
1837 cpi->non_gf_bitrate_adjustment = 0;
1838 cpi->prob_last_coded = 128;
1839 cpi->prob_gf_coded = 128;
1840 cpi->prob_intra_coded = 63;
1842 /* Prime the recent reference frame usage counters.
1843 * Hereafter they will be maintained as a sort of moving average
1845 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
1846 cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
1847 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
1848 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
1850 /* Set reference frame sign bias for ALTREF frame to 1 (for now) */
1851 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
1853 cpi->twopass.gf_decay_rate = 0;
1854 cpi->baseline_gf_interval = DEFAULT_GF_INTERVAL;
1856 cpi->gold_is_last = 0 ;
1857 cpi->alt_is_last = 0 ;
1858 cpi->gold_is_alt = 0 ;
1860 cpi->active_map_enabled = 0;
1863 /* Experimental code for lagged and one pass */
1864 /* Initialise one_pass GF frames stats */
1865 /* Update stats used for GF selection */
1868 cpi->one_pass_frame_index = 0;
1870 for (i = 0; i < MAX_LAG_BUFFERS; i++)
1872 cpi->one_pass_frame_stats[i].frames_so_far = 0;
1873 cpi->one_pass_frame_stats[i].frame_intra_error = 0.0;
1874 cpi->one_pass_frame_stats[i].frame_coded_error = 0.0;
1875 cpi->one_pass_frame_stats[i].frame_pcnt_inter = 0.0;
1876 cpi->one_pass_frame_stats[i].frame_pcnt_motion = 0.0;
1877 cpi->one_pass_frame_stats[i].frame_mvr = 0.0;
1878 cpi->one_pass_frame_stats[i].frame_mvr_abs = 0.0;
1879 cpi->one_pass_frame_stats[i].frame_mvc = 0.0;
1880 cpi->one_pass_frame_stats[i].frame_mvc_abs = 0.0;
1885 /* Should we use the cyclic refresh method.
1886 * Currently this is tied to error resilliant mode
1888 cpi->cyclic_refresh_mode_enabled = cpi->oxcf.error_resilient_mode;
1889 cpi->cyclic_refresh_mode_max_mbs_perframe = (cpi->common.mb_rows * cpi->common.mb_cols) / 5;
1890 cpi->cyclic_refresh_mode_index = 0;
1891 cpi->cyclic_refresh_q = 32;
1893 if (cpi->cyclic_refresh_mode_enabled)
1895 CHECK_MEM_ERROR(cpi->cyclic_refresh_map, vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1));
1898 cpi->cyclic_refresh_map = (signed char *) NULL;
1900 #ifdef VP8_ENTROPY_STATS
1901 init_context_counters();
1904 /*Initialize the feed-forward activity masking.*/
1905 cpi->activity_avg = 90<<12;
1907 /* Give a sensible default for the first frame. */
1908 cpi->frames_since_key = 8;
1909 cpi->key_frame_frequency = cpi->oxcf.key_freq;
1910 cpi->this_key_frame_forced = 0;
1911 cpi->next_key_frame_forced = 0;
1913 cpi->source_alt_ref_pending = 0;
1914 cpi->source_alt_ref_active = 0;
1915 cpi->common.refresh_alt_ref_frame = 0;
1917 cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
1918 #if CONFIG_INTERNAL_STATS
1919 cpi->b_calculate_ssimg = 0;
1924 if (cpi->b_calculate_psnr)
1926 cpi->total_sq_error = 0.0;
1927 cpi->total_sq_error2 = 0.0;
1932 cpi->totalp_y = 0.0;
1933 cpi->totalp_u = 0.0;
1934 cpi->totalp_v = 0.0;
1936 cpi->tot_recode_hits = 0;
1937 cpi->summed_quality = 0;
1938 cpi->summed_weights = 0;
1941 if (cpi->b_calculate_ssimg)
1943 cpi->total_ssimg_y = 0;
1944 cpi->total_ssimg_u = 0;
1945 cpi->total_ssimg_v = 0;
1946 cpi->total_ssimg_all = 0;
1951 cpi->first_time_stamp_ever = 0x7FFFFFFF;
1953 cpi->frames_till_gf_update_due = 0;
1954 cpi->key_frame_count = 1;
1956 cpi->ni_av_qi = cpi->oxcf.worst_allowed_q;
1959 cpi->total_byte_count = 0;
1961 cpi->drop_frame = 0;
1963 cpi->rate_correction_factor = 1.0;
1964 cpi->key_frame_rate_correction_factor = 1.0;
1965 cpi->gf_rate_correction_factor = 1.0;
1966 cpi->twopass.est_max_qcorrection_factor = 1.0;
1968 for (i = 0; i < KEY_FRAME_CONTEXT; i++)
1970 cpi->prior_key_frame_distance[i] = (int)cpi->output_framerate;
1973 #ifdef OUTPUT_YUV_SRC
1974 yuv_file = fopen("bd.yuv", "ab");
1976 #ifdef OUTPUT_YUV_DENOISED
1977 yuv_denoised_file = fopen("denoised.yuv", "ab");
1981 framepsnr = fopen("framepsnr.stt", "a");
1982 kf_list = fopen("kf_list.stt", "w");
1985 cpi->output_pkt_list = oxcf->output_pkt_list;
1987 #if !(CONFIG_REALTIME_ONLY)
1991 vp8_init_first_pass(cpi);
1993 else if (cpi->pass == 2)
1995 size_t packet_sz = sizeof(FIRSTPASS_STATS);
1996 int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
1998 cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
1999 cpi->twopass.stats_in = cpi->twopass.stats_in_start;
2000 cpi->twopass.stats_in_end = (void*)((char *)cpi->twopass.stats_in
2001 + (packets - 1) * packet_sz);
2002 vp8_init_second_pass(cpi);
2007 if (cpi->compressor_speed == 2)
2009 cpi->avg_encode_time = 0;
2010 cpi->avg_pick_mode_time = 0;
2013 vp8_set_speed_features(cpi);
2015 /* Set starting values of RD threshold multipliers (128 = *1) */
2016 for (i = 0; i < MAX_MODES; i++)
2018 cpi->mb.rd_thresh_mult[i] = 128;
2021 #ifdef VP8_ENTROPY_STATS
2022 init_mv_ref_counts();
2025 #if CONFIG_MULTITHREAD
2026 if(vp8cx_create_encoder_threads(cpi))
2028 vp8_remove_compressor(&cpi);
2033 cpi->fn_ptr[BLOCK_16X16].sdf = vp8_sad16x16;
2034 cpi->fn_ptr[BLOCK_16X16].vf = vp8_variance16x16;
2035 cpi->fn_ptr[BLOCK_16X16].svf = vp8_sub_pixel_variance16x16;
2036 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_h = vp8_variance_halfpixvar16x16_h;
2037 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_v = vp8_variance_halfpixvar16x16_v;
2038 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_hv = vp8_variance_halfpixvar16x16_hv;
2039 cpi->fn_ptr[BLOCK_16X16].sdx3f = vp8_sad16x16x3;
2040 cpi->fn_ptr[BLOCK_16X16].sdx8f = vp8_sad16x16x8;
2041 cpi->fn_ptr[BLOCK_16X16].sdx4df = vp8_sad16x16x4d;
2043 cpi->fn_ptr[BLOCK_16X8].sdf = vp8_sad16x8;
2044 cpi->fn_ptr[BLOCK_16X8].vf = vp8_variance16x8;
2045 cpi->fn_ptr[BLOCK_16X8].svf = vp8_sub_pixel_variance16x8;
2046 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_h = NULL;
2047 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_v = NULL;
2048 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_hv = NULL;
2049 cpi->fn_ptr[BLOCK_16X8].sdx3f = vp8_sad16x8x3;
2050 cpi->fn_ptr[BLOCK_16X8].sdx8f = vp8_sad16x8x8;
2051 cpi->fn_ptr[BLOCK_16X8].sdx4df = vp8_sad16x8x4d;
2053 cpi->fn_ptr[BLOCK_8X16].sdf = vp8_sad8x16;
2054 cpi->fn_ptr[BLOCK_8X16].vf = vp8_variance8x16;
2055 cpi->fn_ptr[BLOCK_8X16].svf = vp8_sub_pixel_variance8x16;
2056 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_h = NULL;
2057 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_v = NULL;
2058 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_hv = NULL;
2059 cpi->fn_ptr[BLOCK_8X16].sdx3f = vp8_sad8x16x3;
2060 cpi->fn_ptr[BLOCK_8X16].sdx8f = vp8_sad8x16x8;
2061 cpi->fn_ptr[BLOCK_8X16].sdx4df = vp8_sad8x16x4d;
2063 cpi->fn_ptr[BLOCK_8X8].sdf = vp8_sad8x8;
2064 cpi->fn_ptr[BLOCK_8X8].vf = vp8_variance8x8;
2065 cpi->fn_ptr[BLOCK_8X8].svf = vp8_sub_pixel_variance8x8;
2066 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_h = NULL;
2067 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_v = NULL;
2068 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_hv = NULL;
2069 cpi->fn_ptr[BLOCK_8X8].sdx3f = vp8_sad8x8x3;
2070 cpi->fn_ptr[BLOCK_8X8].sdx8f = vp8_sad8x8x8;
2071 cpi->fn_ptr[BLOCK_8X8].sdx4df = vp8_sad8x8x4d;
2073 cpi->fn_ptr[BLOCK_4X4].sdf = vp8_sad4x4;
2074 cpi->fn_ptr[BLOCK_4X4].vf = vp8_variance4x4;
2075 cpi->fn_ptr[BLOCK_4X4].svf = vp8_sub_pixel_variance4x4;
2076 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_h = NULL;
2077 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_v = NULL;
2078 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_hv = NULL;
2079 cpi->fn_ptr[BLOCK_4X4].sdx3f = vp8_sad4x4x3;
2080 cpi->fn_ptr[BLOCK_4X4].sdx8f = vp8_sad4x4x8;
2081 cpi->fn_ptr[BLOCK_4X4].sdx4df = vp8_sad4x4x4d;
2083 #if ARCH_X86 || ARCH_X86_64
2084 cpi->fn_ptr[BLOCK_16X16].copymem = vp8_copy32xn;
2085 cpi->fn_ptr[BLOCK_16X8].copymem = vp8_copy32xn;
2086 cpi->fn_ptr[BLOCK_8X16].copymem = vp8_copy32xn;
2087 cpi->fn_ptr[BLOCK_8X8].copymem = vp8_copy32xn;
2088 cpi->fn_ptr[BLOCK_4X4].copymem = vp8_copy32xn;
2091 cpi->full_search_sad = vp8_full_search_sad;
2092 cpi->diamond_search_sad = vp8_diamond_search_sad;
2093 cpi->refining_search_sad = vp8_refining_search_sad;
2095 /* make sure frame 1 is okay */
2096 cpi->mb.error_bins[0] = cpi->common.MBs;
2098 /* vp8cx_init_quantizer() is first called here. Add check in
2099 * vp8cx_frame_init_quantizer() so that vp8cx_init_quantizer is only
2100 * called later when needed. This will avoid unnecessary calls of
2101 * vp8cx_init_quantizer() for every frame.
2103 vp8cx_init_quantizer(cpi);
2105 vp8_loop_filter_init(cm);
2107 cpi->common.error.setjmp = 0;
2109 #if CONFIG_MULTI_RES_ENCODING
2111 /* Calculate # of MBs in a row in lower-resolution level image. */
2112 if (cpi->oxcf.mr_encoder_id > 0)
2113 vp8_cal_low_res_mb_cols(cpi);
2117 /* setup RD costs to MACROBLOCK struct */
2119 cpi->mb.mvcost[0] = &cpi->rd_costs.mvcosts[0][mv_max+1];
2120 cpi->mb.mvcost[1] = &cpi->rd_costs.mvcosts[1][mv_max+1];
2121 cpi->mb.mvsadcost[0] = &cpi->rd_costs.mvsadcosts[0][mvfp_max+1];
2122 cpi->mb.mvsadcost[1] = &cpi->rd_costs.mvsadcosts[1][mvfp_max+1];
2124 cal_mvsadcosts(cpi->mb.mvsadcost);
2126 cpi->mb.mbmode_cost = cpi->rd_costs.mbmode_cost;
2127 cpi->mb.intra_uv_mode_cost = cpi->rd_costs.intra_uv_mode_cost;
2128 cpi->mb.bmode_costs = cpi->rd_costs.bmode_costs;
2129 cpi->mb.inter_bmode_costs = cpi->rd_costs.inter_bmode_costs;
2130 cpi->mb.token_costs = cpi->rd_costs.token_costs;
2132 /* setup block ptrs & offsets */
2133 vp8_setup_block_ptrs(&cpi->mb);
2134 vp8_setup_block_dptrs(&cpi->mb.e_mbd);
2140 void vp8_remove_compressor(VP8_COMP **ptr)
2142 VP8_COMP *cpi = *ptr;
2147 if (cpi && (cpi->common.current_video_frame > 0))
2149 #if !(CONFIG_REALTIME_ONLY)
2153 vp8_end_second_pass(cpi);
2158 #ifdef VP8_ENTROPY_STATS
2159 print_context_counters();
2160 print_tree_update_probs();
2161 print_mode_context();
2164 #if CONFIG_INTERNAL_STATS
2168 FILE *f = fopen("opsnr.stt", "a");
2169 double time_encoded = (cpi->last_end_time_stamp_seen
2170 - cpi->first_time_stamp_ever) / 10000000.000;
2171 double total_encode_time = (cpi->time_receive_data +
2172 cpi->time_compress_data) / 1000.000;
2173 double dr = (double)cpi->bytes * 8.0 / 1000.0 / time_encoded;
2175 if (cpi->b_calculate_psnr)
2177 YV12_BUFFER_CONFIG *lst_yv12 =
2178 &cpi->common.yv12_fb[cpi->common.lst_fb_idx];
2180 if (cpi->oxcf.number_of_layers > 1)
2184 fprintf(f, "Layer\tBitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
2185 "GLPsnrP\tVPXSSIM\t\n");
2186 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
2188 double dr = (double)cpi->bytes_in_layer[i] *
2189 8.0 / 1000.0 / time_encoded;
2190 double samples = 3.0 / 2 * cpi->frames_in_layer[i] *
2191 lst_yv12->y_width * lst_yv12->y_height;
2193 vpx_sse_to_psnr(samples, 255.0,
2194 cpi->total_error2[i]);
2195 double total_psnr2 =
2196 vpx_sse_to_psnr(samples, 255.0,
2197 cpi->total_error2_p[i]);
2198 double total_ssim = 100 * pow(cpi->sum_ssim[i] /
2199 cpi->sum_weights[i], 8.0);
2201 fprintf(f, "%5d\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2204 cpi->sum_psnr[i] / cpi->frames_in_layer[i],
2206 cpi->sum_psnr_p[i] / cpi->frames_in_layer[i],
2207 total_psnr2, total_ssim);
2212 double samples = 3.0 / 2 * cpi->count *
2213 lst_yv12->y_width * lst_yv12->y_height;
2214 double total_psnr = vpx_sse_to_psnr(samples, 255.0,
2215 cpi->total_sq_error);
2216 double total_psnr2 = vpx_sse_to_psnr(samples, 255.0,
2217 cpi->total_sq_error2);
2218 double total_ssim = 100 * pow(cpi->summed_quality /
2219 cpi->summed_weights, 8.0);
2221 fprintf(f, "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
2222 "GLPsnrP\tVPXSSIM\t Time(us)\n");
2223 fprintf(f, "%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2225 dr, cpi->total / cpi->count, total_psnr,
2226 cpi->totalp / cpi->count, total_psnr2,
2227 total_ssim, total_encode_time);
2231 if (cpi->b_calculate_ssimg)
2233 if (cpi->oxcf.number_of_layers > 1)
2237 fprintf(f, "Layer\tBitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
2239 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
2241 double dr = (double)cpi->bytes_in_layer[i] *
2242 8.0 / 1000.0 / time_encoded;
2243 fprintf(f, "%5d\t%7.3f\t%6.4f\t"
2244 "%6.4f\t%6.4f\t%6.4f\t%8.0f\n",
2246 cpi->total_ssimg_y_in_layer[i] /
2247 cpi->frames_in_layer[i],
2248 cpi->total_ssimg_u_in_layer[i] /
2249 cpi->frames_in_layer[i],
2250 cpi->total_ssimg_v_in_layer[i] /
2251 cpi->frames_in_layer[i],
2252 cpi->total_ssimg_all_in_layer[i] /
2253 cpi->frames_in_layer[i],
2259 fprintf(f, "BitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
2261 fprintf(f, "%7.3f\t%6.4f\t%6.4f\t%6.4f\t%6.4f\t%8.0f\n", dr,
2262 cpi->total_ssimg_y / cpi->count,
2263 cpi->total_ssimg_u / cpi->count,
2264 cpi->total_ssimg_v / cpi->count,
2265 cpi->total_ssimg_all / cpi->count, total_encode_time);
2271 f = fopen("qskip.stt", "a");
2272 fprintf(f, "minq:%d -maxq:%d skiptrue:skipfalse = %d:%d\n", cpi->oxcf.best_allowed_q, cpi->oxcf.worst_allowed_q, skiptruecount, skipfalsecount);
2283 if (cpi->compressor_speed == 2)
2286 FILE *f = fopen("cxspeed.stt", "a");
2287 cnt_pm /= cpi->common.MBs;
2289 for (i = 0; i < 16; i++)
2290 fprintf(f, "%5d", frames_at_speed[i]);
2301 extern int count_mb_seg[4];
2302 FILE *f = fopen("modes.stt", "a");
2303 double dr = (double)cpi->framerate * (double)bytes * (double)8 / (double)count / (double)1000 ;
2304 fprintf(f, "intra_mode in Intra Frames:\n");
2305 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d\n", y_modes[0], y_modes[1], y_modes[2], y_modes[3], y_modes[4]);
2306 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", uv_modes[0], uv_modes[1], uv_modes[2], uv_modes[3]);
2311 for (i = 0; i < 10; i++)
2312 fprintf(f, "%8d, ", b_modes[i]);
2318 fprintf(f, "Modes in Inter Frames:\n");
2319 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d\n",
2320 inter_y_modes[0], inter_y_modes[1], inter_y_modes[2], inter_y_modes[3], inter_y_modes[4],
2321 inter_y_modes[5], inter_y_modes[6], inter_y_modes[7], inter_y_modes[8], inter_y_modes[9]);
2322 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", inter_uv_modes[0], inter_uv_modes[1], inter_uv_modes[2], inter_uv_modes[3]);
2327 for (i = 0; i < 15; i++)
2328 fprintf(f, "%8d, ", inter_b_modes[i]);
2333 fprintf(f, "P:%8d, %8d, %8d, %8d\n", count_mb_seg[0], count_mb_seg[1], count_mb_seg[2], count_mb_seg[3]);
2334 fprintf(f, "PB:%8d, %8d, %8d, %8d\n", inter_b_modes[LEFT4X4], inter_b_modes[ABOVE4X4], inter_b_modes[ZERO4X4], inter_b_modes[NEW4X4]);
2342 #ifdef VP8_ENTROPY_STATS
2345 FILE *fmode = fopen("modecontext.c", "w");
2347 fprintf(fmode, "\n#include \"entropymode.h\"\n\n");
2348 fprintf(fmode, "const unsigned int vp8_kf_default_bmode_counts ");
2349 fprintf(fmode, "[VP8_BINTRAMODES] [VP8_BINTRAMODES] [VP8_BINTRAMODES] =\n{\n");
2351 for (i = 0; i < 10; i++)
2354 fprintf(fmode, " { /* Above Mode : %d */\n", i);
2356 for (j = 0; j < 10; j++)
2359 fprintf(fmode, " {");
2361 for (k = 0; k < 10; k++)
2363 if (!intra_mode_stats[i][j][k])
2364 fprintf(fmode, " %5d, ", 1);
2366 fprintf(fmode, " %5d, ", intra_mode_stats[i][j][k]);
2369 fprintf(fmode, "}, /* left_mode %d */\n", j);
2373 fprintf(fmode, " },\n");
2377 fprintf(fmode, "};\n");
2383 #if defined(SECTIONBITS_OUTPUT)
2388 FILE *f = fopen("tokenbits.stt", "a");
2390 for (i = 0; i < 28; i++)
2391 fprintf(f, "%8d", (int)(Sectionbits[i] / 256));
2401 printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000);
2402 printf("\n_frames recive_data encod_mb_row compress_frame Total\n");
2403 printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame, cpi->time_receive_data / 1000, cpi->time_encode_mb_row / 1000, cpi->time_compress_data / 1000, (cpi->time_receive_data + cpi->time_compress_data) / 1000);
2409 #if CONFIG_MULTITHREAD
2410 vp8cx_remove_encoder_threads(cpi);
2413 #if CONFIG_TEMPORAL_DENOISING
2414 vp8_denoiser_free(&cpi->denoiser);
2416 dealloc_compressor_data(cpi);
2417 vpx_free(cpi->mb.ss);
2419 vpx_free(cpi->cyclic_refresh_map);
2421 vp8_remove_common(&cpi->common);
2425 #ifdef OUTPUT_YUV_SRC
2428 #ifdef OUTPUT_YUV_DENOISED
2429 fclose(yuv_denoised_file);
2448 static uint64_t calc_plane_error(unsigned char *orig, int orig_stride,
2449 unsigned char *recon, int recon_stride,
2450 unsigned int cols, unsigned int rows)
2452 unsigned int row, col;
2453 uint64_t total_sse = 0;
2456 for (row = 0; row + 16 <= rows; row += 16)
2458 for (col = 0; col + 16 <= cols; col += 16)
2462 vp8_mse16x16(orig + col, orig_stride,
2463 recon + col, recon_stride,
2468 /* Handle odd-sized width */
2471 unsigned int border_row, border_col;
2472 unsigned char *border_orig = orig;
2473 unsigned char *border_recon = recon;
2475 for (border_row = 0; border_row < 16; border_row++)
2477 for (border_col = col; border_col < cols; border_col++)
2479 diff = border_orig[border_col] - border_recon[border_col];
2480 total_sse += diff * diff;
2483 border_orig += orig_stride;
2484 border_recon += recon_stride;
2488 orig += orig_stride * 16;
2489 recon += recon_stride * 16;
2492 /* Handle odd-sized height */
2493 for (; row < rows; row++)
2495 for (col = 0; col < cols; col++)
2497 diff = orig[col] - recon[col];
2498 total_sse += diff * diff;
2501 orig += orig_stride;
2502 recon += recon_stride;
2505 vp8_clear_system_state();
2510 static void generate_psnr_packet(VP8_COMP *cpi)
2512 YV12_BUFFER_CONFIG *orig = cpi->Source;
2513 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
2514 struct vpx_codec_cx_pkt pkt;
2517 unsigned int width = cpi->common.Width;
2518 unsigned int height = cpi->common.Height;
2520 pkt.kind = VPX_CODEC_PSNR_PKT;
2521 sse = calc_plane_error(orig->y_buffer, orig->y_stride,
2522 recon->y_buffer, recon->y_stride,
2524 pkt.data.psnr.sse[0] = sse;
2525 pkt.data.psnr.sse[1] = sse;
2526 pkt.data.psnr.samples[0] = width * height;
2527 pkt.data.psnr.samples[1] = width * height;
2529 width = (width + 1) / 2;
2530 height = (height + 1) / 2;
2532 sse = calc_plane_error(orig->u_buffer, orig->uv_stride,
2533 recon->u_buffer, recon->uv_stride,
2535 pkt.data.psnr.sse[0] += sse;
2536 pkt.data.psnr.sse[2] = sse;
2537 pkt.data.psnr.samples[0] += width * height;
2538 pkt.data.psnr.samples[2] = width * height;
2540 sse = calc_plane_error(orig->v_buffer, orig->uv_stride,
2541 recon->v_buffer, recon->uv_stride,
2543 pkt.data.psnr.sse[0] += sse;
2544 pkt.data.psnr.sse[3] = sse;
2545 pkt.data.psnr.samples[0] += width * height;
2546 pkt.data.psnr.samples[3] = width * height;
2548 for (i = 0; i < 4; i++)
2549 pkt.data.psnr.psnr[i] = vpx_sse_to_psnr(pkt.data.psnr.samples[i], 255.0,
2550 (double)(pkt.data.psnr.sse[i]));
2552 vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
2556 int vp8_use_as_reference(VP8_COMP *cpi, int ref_frame_flags)
2558 if (ref_frame_flags > 7)
2561 cpi->ref_frame_flags = ref_frame_flags;
2564 int vp8_update_reference(VP8_COMP *cpi, int ref_frame_flags)
2566 if (ref_frame_flags > 7)
2569 cpi->common.refresh_golden_frame = 0;
2570 cpi->common.refresh_alt_ref_frame = 0;
2571 cpi->common.refresh_last_frame = 0;
2573 if (ref_frame_flags & VP8_LAST_FRAME)
2574 cpi->common.refresh_last_frame = 1;
2576 if (ref_frame_flags & VP8_GOLD_FRAME)
2577 cpi->common.refresh_golden_frame = 1;
2579 if (ref_frame_flags & VP8_ALTR_FRAME)
2580 cpi->common.refresh_alt_ref_frame = 1;
2585 int vp8_get_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
2587 VP8_COMMON *cm = &cpi->common;
2590 if (ref_frame_flag == VP8_LAST_FRAME)
2591 ref_fb_idx = cm->lst_fb_idx;
2592 else if (ref_frame_flag == VP8_GOLD_FRAME)
2593 ref_fb_idx = cm->gld_fb_idx;
2594 else if (ref_frame_flag == VP8_ALTR_FRAME)
2595 ref_fb_idx = cm->alt_fb_idx;
2599 vp8_yv12_copy_frame(&cm->yv12_fb[ref_fb_idx], sd);
2603 int vp8_set_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
2605 VP8_COMMON *cm = &cpi->common;
2609 if (ref_frame_flag == VP8_LAST_FRAME)
2610 ref_fb_idx = cm->lst_fb_idx;
2611 else if (ref_frame_flag == VP8_GOLD_FRAME)
2612 ref_fb_idx = cm->gld_fb_idx;
2613 else if (ref_frame_flag == VP8_ALTR_FRAME)
2614 ref_fb_idx = cm->alt_fb_idx;
2618 vp8_yv12_copy_frame(sd, &cm->yv12_fb[ref_fb_idx]);
2622 int vp8_update_entropy(VP8_COMP *cpi, int update)
2624 VP8_COMMON *cm = &cpi->common;
2625 cm->refresh_entropy_probs = update;
2631 #if defined(OUTPUT_YUV_SRC) || defined(OUTPUT_YUV_DENOISED)
2632 void vp8_write_yuv_frame(FILE *yuv_file, YV12_BUFFER_CONFIG *s)
2634 unsigned char *src = s->y_buffer;
2635 int h = s->y_height;
2639 fwrite(src, s->y_width, 1, yuv_file);
2649 fwrite(src, s->uv_width, 1, yuv_file);
2650 src += s->uv_stride;
2659 fwrite(src, s->uv_width, 1, yuv_file);
2660 src += s->uv_stride;
2666 static void scale_and_extend_source(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi)
2668 VP8_COMMON *cm = &cpi->common;
2670 /* are we resizing the image */
2671 if (cm->horiz_scale != 0 || cm->vert_scale != 0)
2673 #if CONFIG_SPATIAL_RESAMPLING
2674 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
2675 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
2678 if (cm->vert_scale == 3)
2683 Scale2Ratio(cm->horiz_scale, &hr, &hs);
2684 Scale2Ratio(cm->vert_scale, &vr, &vs);
2686 vpx_scale_frame(sd, &cpi->scaled_source, cm->temp_scale_frame.y_buffer,
2687 tmp_height, hs, hr, vs, vr, 0);
2689 vp8_yv12_extend_frame_borders(&cpi->scaled_source);
2690 cpi->Source = &cpi->scaled_source;
2698 static int resize_key_frame(VP8_COMP *cpi)
2700 #if CONFIG_SPATIAL_RESAMPLING
2701 VP8_COMMON *cm = &cpi->common;
2703 /* Do we need to apply resampling for one pass cbr.
2704 * In one pass this is more limited than in two pass cbr.
2705 * The test and any change is only made once per key frame sequence.
2707 if (cpi->oxcf.allow_spatial_resampling && (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER))
2709 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
2710 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
2711 int new_width, new_height;
2713 /* If we are below the resample DOWN watermark then scale down a
2716 if (cpi->buffer_level < (cpi->oxcf.resample_down_water_mark * cpi->oxcf.optimal_buffer_level / 100))
2718 cm->horiz_scale = (cm->horiz_scale < ONETWO) ? cm->horiz_scale + 1 : ONETWO;
2719 cm->vert_scale = (cm->vert_scale < ONETWO) ? cm->vert_scale + 1 : ONETWO;
2721 /* Should we now start scaling back up */
2722 else if (cpi->buffer_level > (cpi->oxcf.resample_up_water_mark * cpi->oxcf.optimal_buffer_level / 100))
2724 cm->horiz_scale = (cm->horiz_scale > NORMAL) ? cm->horiz_scale - 1 : NORMAL;
2725 cm->vert_scale = (cm->vert_scale > NORMAL) ? cm->vert_scale - 1 : NORMAL;
2728 /* Get the new height and width */
2729 Scale2Ratio(cm->horiz_scale, &hr, &hs);
2730 Scale2Ratio(cm->vert_scale, &vr, &vs);
2731 new_width = ((hs - 1) + (cpi->oxcf.Width * hr)) / hs;
2732 new_height = ((vs - 1) + (cpi->oxcf.Height * vr)) / vs;
2734 /* If the image size has changed we need to reallocate the buffers
2735 * and resample the source image
2737 if ((cm->Width != new_width) || (cm->Height != new_height))
2739 cm->Width = new_width;
2740 cm->Height = new_height;
2741 vp8_alloc_compressor_data(cpi);
2742 scale_and_extend_source(cpi->un_scaled_source, cpi);
2752 static void update_alt_ref_frame_stats(VP8_COMP *cpi)
2754 VP8_COMMON *cm = &cpi->common;
2756 /* Select an interval before next GF or altref */
2757 if (!cpi->auto_gold)
2758 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
2760 if ((cpi->pass != 2) && cpi->frames_till_gf_update_due)
2762 cpi->current_gf_interval = cpi->frames_till_gf_update_due;
2764 /* Set the bits per frame that we should try and recover in
2765 * subsequent inter frames to account for the extra GF spend...
2766 * note that his does not apply for GF updates that occur
2767 * coincident with a key frame as the extra cost of key frames is
2768 * dealt with elsewhere.
2770 cpi->gf_overspend_bits += cpi->projected_frame_size;
2771 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
2774 /* Update data structure that monitors level of reference to last GF */
2775 vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
2776 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
2778 /* this frame refreshes means next frames don't unless specified by user */
2779 cpi->frames_since_golden = 0;
2781 /* Clear the alternate reference update pending flag. */
2782 cpi->source_alt_ref_pending = 0;
2784 /* Set the alternate reference frame active flag */
2785 cpi->source_alt_ref_active = 1;
2789 static void update_golden_frame_stats(VP8_COMP *cpi)
2791 VP8_COMMON *cm = &cpi->common;
2793 /* Update the Golden frame usage counts. */
2794 if (cm->refresh_golden_frame)
2796 /* Select an interval before next GF */
2797 if (!cpi->auto_gold)
2798 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
2800 if ((cpi->pass != 2) && (cpi->frames_till_gf_update_due > 0))
2802 cpi->current_gf_interval = cpi->frames_till_gf_update_due;
2804 /* Set the bits per frame that we should try and recover in
2805 * subsequent inter frames to account for the extra GF spend...
2806 * note that his does not apply for GF updates that occur
2807 * coincident with a key frame as the extra cost of key frames
2808 * is dealt with elsewhere.
2810 if ((cm->frame_type != KEY_FRAME) && !cpi->source_alt_ref_active)
2812 /* Calcluate GF bits to be recovered
2813 * Projected size - av frame bits available for inter
2814 * frames for clip as a whole
2816 cpi->gf_overspend_bits += (cpi->projected_frame_size - cpi->inter_frame_target);
2819 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
2823 /* Update data structure that monitors level of reference to last GF */
2824 vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
2825 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
2827 /* this frame refreshes means next frames don't unless specified by
2830 cm->refresh_golden_frame = 0;
2831 cpi->frames_since_golden = 0;
2833 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
2834 cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
2835 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
2836 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
2838 /* ******** Fixed Q test code only ************ */
2839 /* If we are going to use the ALT reference for the next group of
2840 * frames set a flag to say so.
2842 if (cpi->oxcf.fixed_q >= 0 &&
2843 cpi->oxcf.play_alternate && !cpi->common.refresh_alt_ref_frame)
2845 cpi->source_alt_ref_pending = 1;
2846 cpi->frames_till_gf_update_due = cpi->baseline_gf_interval;
2849 if (!cpi->source_alt_ref_pending)
2850 cpi->source_alt_ref_active = 0;
2852 /* Decrement count down till next gf */
2853 if (cpi->frames_till_gf_update_due > 0)
2854 cpi->frames_till_gf_update_due--;
2857 else if (!cpi->common.refresh_alt_ref_frame)
2859 /* Decrement count down till next gf */
2860 if (cpi->frames_till_gf_update_due > 0)
2861 cpi->frames_till_gf_update_due--;
2863 if (cpi->frames_till_alt_ref_frame)
2864 cpi->frames_till_alt_ref_frame --;
2866 cpi->frames_since_golden ++;
2868 if (cpi->frames_since_golden > 1)
2870 cpi->recent_ref_frame_usage[INTRA_FRAME] +=
2871 cpi->mb.count_mb_ref_frame_usage[INTRA_FRAME];
2872 cpi->recent_ref_frame_usage[LAST_FRAME] +=
2873 cpi->mb.count_mb_ref_frame_usage[LAST_FRAME];
2874 cpi->recent_ref_frame_usage[GOLDEN_FRAME] +=
2875 cpi->mb.count_mb_ref_frame_usage[GOLDEN_FRAME];
2876 cpi->recent_ref_frame_usage[ALTREF_FRAME] +=
2877 cpi->mb.count_mb_ref_frame_usage[ALTREF_FRAME];
2882 /* This function updates the reference frame probability estimates that
2883 * will be used during mode selection
2885 static void update_rd_ref_frame_probs(VP8_COMP *cpi)
2887 VP8_COMMON *cm = &cpi->common;
2889 const int *const rfct = cpi->mb.count_mb_ref_frame_usage;
2890 const int rf_intra = rfct[INTRA_FRAME];
2891 const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
2893 if (cm->frame_type == KEY_FRAME)
2895 cpi->prob_intra_coded = 255;
2896 cpi->prob_last_coded = 128;
2897 cpi->prob_gf_coded = 128;
2899 else if (!(rf_intra + rf_inter))
2901 cpi->prob_intra_coded = 63;
2902 cpi->prob_last_coded = 128;
2903 cpi->prob_gf_coded = 128;
2906 /* update reference frame costs since we can do better than what we got
2909 if (cpi->oxcf.number_of_layers == 1)
2911 if (cpi->common.refresh_alt_ref_frame)
2913 cpi->prob_intra_coded += 40;
2914 if (cpi->prob_intra_coded > 255)
2915 cpi->prob_intra_coded = 255;
2916 cpi->prob_last_coded = 200;
2917 cpi->prob_gf_coded = 1;
2919 else if (cpi->frames_since_golden == 0)
2921 cpi->prob_last_coded = 214;
2923 else if (cpi->frames_since_golden == 1)
2925 cpi->prob_last_coded = 192;
2926 cpi->prob_gf_coded = 220;
2928 else if (cpi->source_alt_ref_active)
2930 cpi->prob_gf_coded -= 20;
2932 if (cpi->prob_gf_coded < 10)
2933 cpi->prob_gf_coded = 10;
2935 if (!cpi->source_alt_ref_active)
2936 cpi->prob_gf_coded = 255;
2941 /* 1 = key, 0 = inter */
2942 static int decide_key_frame(VP8_COMP *cpi)
2944 VP8_COMMON *cm = &cpi->common;
2946 int code_key_frame = 0;
2950 if (cpi->Speed > 11)
2953 /* Clear down mmx registers */
2954 vp8_clear_system_state();
2956 if ((cpi->compressor_speed == 2) && (cpi->Speed >= 5) && (cpi->sf.RD == 0))
2958 double change = 1.0 * abs((int)(cpi->mb.intra_error -
2959 cpi->last_intra_error)) / (1 + cpi->last_intra_error);
2960 double change2 = 1.0 * abs((int)(cpi->mb.prediction_error -
2961 cpi->last_prediction_error)) / (1 + cpi->last_prediction_error);
2962 double minerror = cm->MBs * 256;
2964 cpi->last_intra_error = cpi->mb.intra_error;
2965 cpi->last_prediction_error = cpi->mb.prediction_error;
2967 if (10 * cpi->mb.intra_error / (1 + cpi->mb.prediction_error) < 15
2968 && cpi->mb.prediction_error > minerror
2969 && (change > .25 || change2 > .25))
2971 /*(change > 1.4 || change < .75)&& cpi->this_frame_percent_intra > cpi->last_frame_percent_intra + 3*/
2979 /* If the following are true we might as well code a key frame */
2980 if (((cpi->this_frame_percent_intra == 100) &&
2981 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 2))) ||
2982 ((cpi->this_frame_percent_intra > 95) &&
2983 (cpi->this_frame_percent_intra >= (cpi->last_frame_percent_intra + 5))))
2987 /* in addition if the following are true and this is not a golden frame
2988 * then code a key frame Note that on golden frames there often seems
2989 * to be a pop in intra useage anyway hence this restriction is
2990 * designed to prevent spurious key frames. The Intra pop needs to be
2993 else if (((cpi->this_frame_percent_intra > 60) &&
2994 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 2))) ||
2995 ((cpi->this_frame_percent_intra > 75) &&
2996 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 3 / 2))) ||
2997 ((cpi->this_frame_percent_intra > 90) &&
2998 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 10))))
3000 if (!cm->refresh_golden_frame)
3004 return code_key_frame;
3008 #if !(CONFIG_REALTIME_ONLY)
3009 static void Pass1Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned int *frame_flags)
3014 vp8_set_quantizer(cpi, 26);
3016 vp8_first_pass(cpi);
3021 void write_cx_frame_to_file(YV12_BUFFER_CONFIG *frame, int this_frame)
3024 /* write the frame */
3029 sprintf(filename, "cx\\y%04d.raw", this_frame);
3030 yframe = fopen(filename, "wb");
3032 for (i = 0; i < frame->y_height; i++)
3033 fwrite(frame->y_buffer + i * frame->y_stride, frame->y_width, 1, yframe);
3036 sprintf(filename, "cx\\u%04d.raw", this_frame);
3037 yframe = fopen(filename, "wb");
3039 for (i = 0; i < frame->uv_height; i++)
3040 fwrite(frame->u_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
3043 sprintf(filename, "cx\\v%04d.raw", this_frame);
3044 yframe = fopen(filename, "wb");
3046 for (i = 0; i < frame->uv_height; i++)
3047 fwrite(frame->v_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
3052 /* return of 0 means drop frame */
3054 /* Function to test for conditions that indeicate we should loop
3055 * back and recode a frame.
3057 static int recode_loop_test( VP8_COMP *cpi,
3058 int high_limit, int low_limit,
3059 int q, int maxq, int minq )
3061 int force_recode = 0;
3062 VP8_COMMON *cm = &cpi->common;
3064 /* Is frame recode allowed at all
3065 * Yes if either recode mode 1 is selected or mode two is selcted
3066 * and the frame is a key frame. golden frame or alt_ref_frame
3068 if ( (cpi->sf.recode_loop == 1) ||
3069 ( (cpi->sf.recode_loop == 2) &&
3070 ( (cm->frame_type == KEY_FRAME) ||
3071 cm->refresh_golden_frame ||
3072 cm->refresh_alt_ref_frame ) ) )
3074 /* General over and under shoot tests */
3075 if ( ((cpi->projected_frame_size > high_limit) && (q < maxq)) ||
3076 ((cpi->projected_frame_size < low_limit) && (q > minq)) )
3080 /* Special Constrained quality tests */
3081 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
3083 /* Undershoot and below auto cq level */
3084 if ( (q > cpi->cq_target_quality) &&
3085 (cpi->projected_frame_size <
3086 ((cpi->this_frame_target * 7) >> 3)))
3090 /* Severe undershoot and between auto and user cq level */
3091 else if ( (q > cpi->oxcf.cq_level) &&
3092 (cpi->projected_frame_size < cpi->min_frame_bandwidth) &&
3093 (cpi->active_best_quality > cpi->oxcf.cq_level))
3096 cpi->active_best_quality = cpi->oxcf.cq_level;
3101 return force_recode;
3104 static void update_reference_frames(VP8_COMP *cpi)
3106 VP8_COMMON *cm = &cpi->common;
3107 YV12_BUFFER_CONFIG *yv12_fb = cm->yv12_fb;
3109 /* At this point the new frame has been encoded.
3110 * If any buffer copy / swapping is signaled it should be done here.
3113 if (cm->frame_type == KEY_FRAME)
3115 yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME | VP8_ALTR_FRAME ;
3117 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3118 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3120 cm->alt_fb_idx = cm->gld_fb_idx = cm->new_fb_idx;
3122 #if CONFIG_MULTI_RES_ENCODING
3123 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
3124 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
3127 else /* For non key frames */
3129 if (cm->refresh_alt_ref_frame)
3131 assert(!cm->copy_buffer_to_arf);
3133 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_ALTR_FRAME;
3134 cm->yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3135 cm->alt_fb_idx = cm->new_fb_idx;
3137 #if CONFIG_MULTI_RES_ENCODING
3138 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
3141 else if (cm->copy_buffer_to_arf)
3143 assert(!(cm->copy_buffer_to_arf & ~0x3));
3145 if (cm->copy_buffer_to_arf == 1)
3147 if(cm->alt_fb_idx != cm->lst_fb_idx)
3149 yv12_fb[cm->lst_fb_idx].flags |= VP8_ALTR_FRAME;
3150 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3151 cm->alt_fb_idx = cm->lst_fb_idx;
3153 #if CONFIG_MULTI_RES_ENCODING
3154 cpi->current_ref_frames[ALTREF_FRAME] =
3155 cpi->current_ref_frames[LAST_FRAME];
3159 else /* if (cm->copy_buffer_to_arf == 2) */
3161 if(cm->alt_fb_idx != cm->gld_fb_idx)
3163 yv12_fb[cm->gld_fb_idx].flags |= VP8_ALTR_FRAME;
3164 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3165 cm->alt_fb_idx = cm->gld_fb_idx;
3167 #if CONFIG_MULTI_RES_ENCODING
3168 cpi->current_ref_frames[ALTREF_FRAME] =
3169 cpi->current_ref_frames[GOLDEN_FRAME];
3175 if (cm->refresh_golden_frame)
3177 assert(!cm->copy_buffer_to_gf);
3179 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME;
3180 cm->yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3181 cm->gld_fb_idx = cm->new_fb_idx;
3183 #if CONFIG_MULTI_RES_ENCODING
3184 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
3187 else if (cm->copy_buffer_to_gf)
3189 assert(!(cm->copy_buffer_to_arf & ~0x3));
3191 if (cm->copy_buffer_to_gf == 1)
3193 if(cm->gld_fb_idx != cm->lst_fb_idx)
3195 yv12_fb[cm->lst_fb_idx].flags |= VP8_GOLD_FRAME;
3196 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3197 cm->gld_fb_idx = cm->lst_fb_idx;
3199 #if CONFIG_MULTI_RES_ENCODING
3200 cpi->current_ref_frames[GOLDEN_FRAME] =
3201 cpi->current_ref_frames[LAST_FRAME];
3205 else /* if (cm->copy_buffer_to_gf == 2) */
3207 if(cm->alt_fb_idx != cm->gld_fb_idx)
3209 yv12_fb[cm->alt_fb_idx].flags |= VP8_GOLD_FRAME;
3210 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3211 cm->gld_fb_idx = cm->alt_fb_idx;
3213 #if CONFIG_MULTI_RES_ENCODING
3214 cpi->current_ref_frames[GOLDEN_FRAME] =
3215 cpi->current_ref_frames[ALTREF_FRAME];
3222 if (cm->refresh_last_frame)
3224 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_LAST_FRAME;
3225 cm->yv12_fb[cm->lst_fb_idx].flags &= ~VP8_LAST_FRAME;
3226 cm->lst_fb_idx = cm->new_fb_idx;
3228 #if CONFIG_MULTI_RES_ENCODING
3229 cpi->current_ref_frames[LAST_FRAME] = cm->current_video_frame;
3233 #if CONFIG_TEMPORAL_DENOISING
3234 if (cpi->oxcf.noise_sensitivity)
3236 /* we shouldn't have to keep multiple copies as we know in advance which
3237 * buffer we should start - for now to get something up and running
3238 * I've chosen to copy the buffers
3240 if (cm->frame_type == KEY_FRAME)
3243 for (i = LAST_FRAME; i < MAX_REF_FRAMES; ++i)
3244 vp8_yv12_copy_frame(cpi->Source,
3245 &cpi->denoiser.yv12_running_avg[i]);
3247 else /* For non key frames */
3249 vp8_yv12_extend_frame_borders(
3250 &cpi->denoiser.yv12_running_avg[INTRA_FRAME]);
3252 if (cm->refresh_alt_ref_frame || cm->copy_buffer_to_arf)
3254 vp8_yv12_copy_frame(
3255 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3256 &cpi->denoiser.yv12_running_avg[ALTREF_FRAME]);
3258 if (cm->refresh_golden_frame || cm->copy_buffer_to_gf)
3260 vp8_yv12_copy_frame(
3261 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3262 &cpi->denoiser.yv12_running_avg[GOLDEN_FRAME]);
3264 if(cm->refresh_last_frame)
3266 vp8_yv12_copy_frame(
3267 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3268 &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
3277 void vp8_loopfilter_frame(VP8_COMP *cpi, VP8_COMMON *cm)
3279 const FRAME_TYPE frame_type = cm->frame_type;
3283 cm->filter_level = 0;
3287 struct vpx_usec_timer timer;
3289 vp8_clear_system_state();
3291 vpx_usec_timer_start(&timer);
3292 if (cpi->sf.auto_filter == 0)
3293 vp8cx_pick_filter_level_fast(cpi->Source, cpi);
3296 vp8cx_pick_filter_level(cpi->Source, cpi);
3298 if (cm->filter_level > 0)
3300 vp8cx_set_alt_lf_level(cpi, cm->filter_level);
3303 vpx_usec_timer_mark(&timer);
3304 cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer);
3307 #if CONFIG_MULTITHREAD
3308 if (cpi->b_multi_threaded)
3309 sem_post(&cpi->h_event_end_lpf); /* signal that we have set filter_level */
3312 if (cm->filter_level > 0)
3314 vp8_loop_filter_frame(cm, &cpi->mb.e_mbd, frame_type);
3317 vp8_yv12_extend_frame_borders(cm->frame_to_show);
3321 static void encode_frame_to_data_rate
3324 unsigned long *size,
3325 unsigned char *dest,
3326 unsigned char* dest_end,
3327 unsigned int *frame_flags
3331 int frame_over_shoot_limit;
3332 int frame_under_shoot_limit;
3337 VP8_COMMON *cm = &cpi->common;
3338 int active_worst_qchanged = 0;
3340 #if !(CONFIG_REALTIME_ONLY)
3344 int zbin_oq_low = 0;
3347 int overshoot_seen = 0;
3348 int undershoot_seen = 0;
3351 int drop_mark = (int)(cpi->oxcf.drop_frames_water_mark *
3352 cpi->oxcf.optimal_buffer_level / 100);
3353 int drop_mark75 = drop_mark * 2 / 3;
3354 int drop_mark50 = drop_mark / 4;
3355 int drop_mark25 = drop_mark / 8;
3358 /* Clear down mmx registers to allow floating point in what follows */
3359 vp8_clear_system_state();
3361 #if CONFIG_MULTITHREAD
3362 /* wait for the last picture loopfilter thread done */
3363 if (cpi->b_lpf_running)
3365 sem_wait(&cpi->h_event_end_lpf);
3366 cpi->b_lpf_running = 0;
3370 if(cpi->force_next_frame_intra)
3372 cm->frame_type = KEY_FRAME; /* delayed intra frame */
3373 cpi->force_next_frame_intra = 0;
3376 /* For an alt ref frame in 2 pass we skip the call to the second pass
3377 * function that sets the target bandwidth
3379 #if !(CONFIG_REALTIME_ONLY)
3383 if (cpi->common.refresh_alt_ref_frame)
3385 /* Per frame bit target for the alt ref frame */
3386 cpi->per_frame_bandwidth = cpi->twopass.gf_bits;
3387 /* per second target bitrate */
3388 cpi->target_bandwidth = (int)(cpi->twopass.gf_bits *
3389 cpi->output_framerate);
3394 cpi->per_frame_bandwidth = (int)(cpi->target_bandwidth / cpi->output_framerate);
3396 /* Default turn off buffer to buffer copying */
3397 cm->copy_buffer_to_gf = 0;
3398 cm->copy_buffer_to_arf = 0;
3400 /* Clear zbin over-quant value and mode boost values. */
3401 cpi->mb.zbin_over_quant = 0;
3402 cpi->mb.zbin_mode_boost = 0;
3404 /* Enable or disable mode based tweaking of the zbin
3405 * For 2 Pass Only used where GF/ARF prediction quality
3406 * is above a threshold
3408 cpi->mb.zbin_mode_boost_enabled = 1;
3411 if ( cpi->gfu_boost <= 400 )
3413 cpi->mb.zbin_mode_boost_enabled = 0;
3417 /* Current default encoder behaviour for the altref sign bias */
3418 if (cpi->source_alt_ref_active)
3419 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
3421 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 0;
3423 /* Check to see if a key frame is signaled
3424 * For two pass with auto key frame enabled cm->frame_type may already
3425 * be set, but not for one pass.
3427 if ((cm->current_video_frame == 0) ||
3428 (cm->frame_flags & FRAMEFLAGS_KEY) ||
3429 (cpi->oxcf.auto_key && (cpi->frames_since_key % cpi->key_frame_frequency == 0)))
3431 /* Key frame from VFW/auto-keyframe/first frame */
3432 cm->frame_type = KEY_FRAME;
3435 #if CONFIG_MULTI_RES_ENCODING
3436 /* In multi-resolution encoding, frame_type is decided by lowest-resolution
3437 * encoder. Same frame_type is adopted while encoding at other resolution.
3439 if (cpi->oxcf.mr_encoder_id)
3441 LOWER_RES_FRAME_INFO* low_res_frame_info
3442 = (LOWER_RES_FRAME_INFO*)cpi->oxcf.mr_low_res_mode_info;
3444 cm->frame_type = low_res_frame_info->frame_type;
3446 if(cm->frame_type != KEY_FRAME)
3448 cpi->mr_low_res_mv_avail = 1;
3449 cpi->mr_low_res_mv_avail &= !(low_res_frame_info->is_frame_dropped);
3451 if (cpi->ref_frame_flags & VP8_LAST_FRAME)
3452 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[LAST_FRAME]
3453 == low_res_frame_info->low_res_ref_frames[LAST_FRAME]);
3455 if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
3456 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[GOLDEN_FRAME]
3457 == low_res_frame_info->low_res_ref_frames[GOLDEN_FRAME]);
3459 if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
3460 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[ALTREF_FRAME]
3461 == low_res_frame_info->low_res_ref_frames[ALTREF_FRAME]);
3466 /* Set various flags etc to special state if it is a key frame */
3467 if (cm->frame_type == KEY_FRAME)
3471 // Set the loop filter deltas and segmentation map update
3472 setup_features(cpi);
3474 /* The alternate reference frame cannot be active for a key frame */
3475 cpi->source_alt_ref_active = 0;
3477 /* Reset the RD threshold multipliers to default of * 1 (128) */
3478 for (i = 0; i < MAX_MODES; i++)
3480 cpi->mb.rd_thresh_mult[i] = 128;
3485 /* Experimental code for lagged compress and one pass
3486 * Initialise one_pass GF frames stats
3487 * Update stats used for GF selection
3490 cpi->one_pass_frame_index = cm->current_video_frame % MAX_LAG_BUFFERS;
3492 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frames_so_far = 0;
3493 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_intra_error = 0.0;
3494 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_coded_error = 0.0;
3495 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_inter = 0.0;
3496 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_motion = 0.0;
3497 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr = 0.0;
3498 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr_abs = 0.0;
3499 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc = 0.0;
3500 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc_abs = 0.0;
3504 update_rd_ref_frame_probs(cpi);
3506 if (cpi->drop_frames_allowed)
3508 /* The reset to decimation 0 is only done here for one pass.
3509 * Once it is set two pass leaves decimation on till the next kf.
3511 if ((cpi->buffer_level > drop_mark) && (cpi->decimation_factor > 0))
3512 cpi->decimation_factor --;
3514 if (cpi->buffer_level > drop_mark75 && cpi->decimation_factor > 0)
3515 cpi->decimation_factor = 1;
3517 else if (cpi->buffer_level < drop_mark25 && (cpi->decimation_factor == 2 || cpi->decimation_factor == 3))
3519 cpi->decimation_factor = 3;
3521 else if (cpi->buffer_level < drop_mark50 && (cpi->decimation_factor == 1 || cpi->decimation_factor == 2))
3523 cpi->decimation_factor = 2;
3525 else if (cpi->buffer_level < drop_mark75 && (cpi->decimation_factor == 0 || cpi->decimation_factor == 1))
3527 cpi->decimation_factor = 1;
3531 /* The following decimates the frame rate according to a regular
3532 * pattern (i.e. to 1/2 or 2/3 frame rate) This can be used to help
3533 * prevent buffer under-run in CBR mode. Alternatively it might be
3534 * desirable in some situations to drop frame rate but throw more bits
3537 * Note that dropping a key frame can be problematic if spatial
3538 * resampling is also active
3540 if (cpi->decimation_factor > 0)
3542 switch (cpi->decimation_factor)
3545 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 3 / 2;
3548 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
3551 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
3555 /* Note that we should not throw out a key frame (especially when
3556 * spatial resampling is enabled).
3558 if (cm->frame_type == KEY_FRAME)
3560 cpi->decimation_count = cpi->decimation_factor;
3562 else if (cpi->decimation_count > 0)
3564 cpi->decimation_count --;
3566 cpi->bits_off_target += cpi->av_per_frame_bandwidth;
3567 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
3568 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
3570 #if CONFIG_MULTI_RES_ENCODING
3571 vp8_store_drop_frame_info(cpi);
3574 cm->current_video_frame++;
3575 cpi->frames_since_key++;
3576 // We advance the temporal pattern for dropped frames.
3577 cpi->temporal_pattern_counter++;
3579 #if CONFIG_INTERNAL_STATS
3583 cpi->buffer_level = cpi->bits_off_target;
3585 if (cpi->oxcf.number_of_layers > 1)
3589 /* Propagate bits saved by dropping the frame to higher
3592 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
3594 LAYER_CONTEXT *lc = &cpi->layer_context[i];
3595 lc->bits_off_target += (int)(lc->target_bandwidth /
3597 if (lc->bits_off_target > lc->maximum_buffer_size)
3598 lc->bits_off_target = lc->maximum_buffer_size;
3599 lc->buffer_level = lc->bits_off_target;
3606 cpi->decimation_count = cpi->decimation_factor;
3609 cpi->decimation_count = 0;
3611 /* Decide how big to make the frame */
3612 if (!vp8_pick_frame_size(cpi))
3614 /*TODO: 2 drop_frame and return code could be put together. */
3615 #if CONFIG_MULTI_RES_ENCODING
3616 vp8_store_drop_frame_info(cpi);
3618 cm->current_video_frame++;
3619 cpi->frames_since_key++;
3620 // We advance the temporal pattern for dropped frames.
3621 cpi->temporal_pattern_counter++;
3625 /* Reduce active_worst_allowed_q for CBR if our buffer is getting too full.
3626 * This has a knock on effect on active best quality as well.
3627 * For CBR if the buffer reaches its maximum level then we can no longer
3628 * save up bits for later frames so we might as well use them up
3629 * on the current frame.
3631 if ((cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) &&
3632 (cpi->buffer_level >= cpi->oxcf.optimal_buffer_level) && cpi->buffered_mode)
3634 /* Max adjustment is 1/4 */
3635 int Adjustment = cpi->active_worst_quality / 4;
3641 if (cpi->buffer_level < cpi->oxcf.maximum_buffer_size)
3643 buff_lvl_step = (int)
3644 ((cpi->oxcf.maximum_buffer_size -
3645 cpi->oxcf.optimal_buffer_level) /
3650 ((cpi->buffer_level -
3651 cpi->oxcf.optimal_buffer_level) /
3657 cpi->active_worst_quality -= Adjustment;
3659 if(cpi->active_worst_quality < cpi->active_best_quality)
3660 cpi->active_worst_quality = cpi->active_best_quality;
3664 /* Set an active best quality and if necessary active worst quality
3665 * There is some odd behavior for one pass here that needs attention.
3667 if ( (cpi->pass == 2) || (cpi->ni_frames > 150))
3669 vp8_clear_system_state();
3671 Q = cpi->active_worst_quality;
3673 if ( cm->frame_type == KEY_FRAME )
3675 if ( cpi->pass == 2 )
3677 if (cpi->gfu_boost > 600)
3678 cpi->active_best_quality = kf_low_motion_minq[Q];
3680 cpi->active_best_quality = kf_high_motion_minq[Q];
3682 /* Special case for key frames forced because we have reached
3683 * the maximum key frame interval. Here force the Q to a range
3684 * based on the ambient Q to reduce the risk of popping
3686 if ( cpi->this_key_frame_forced )
3688 if ( cpi->active_best_quality > cpi->avg_frame_qindex * 7/8)
3689 cpi->active_best_quality = cpi->avg_frame_qindex * 7/8;
3690 else if ( cpi->active_best_quality < cpi->avg_frame_qindex >> 2 )
3691 cpi->active_best_quality = cpi->avg_frame_qindex >> 2;
3694 /* One pass more conservative */
3696 cpi->active_best_quality = kf_high_motion_minq[Q];
3699 else if (cpi->oxcf.number_of_layers==1 &&
3700 (cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame))
3702 /* Use the lower of cpi->active_worst_quality and recent
3703 * average Q as basis for GF/ARF Q limit unless last frame was
3706 if ( (cpi->frames_since_key > 1) &&
3707 (cpi->avg_frame_qindex < cpi->active_worst_quality) )
3709 Q = cpi->avg_frame_qindex;
3712 /* For constrained quality dont allow Q less than the cq level */
3713 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
3714 (Q < cpi->cq_target_quality) )
3716 Q = cpi->cq_target_quality;
3719 if ( cpi->pass == 2 )
3721 if ( cpi->gfu_boost > 1000 )
3722 cpi->active_best_quality = gf_low_motion_minq[Q];
3723 else if ( cpi->gfu_boost < 400 )
3724 cpi->active_best_quality = gf_high_motion_minq[Q];
3726 cpi->active_best_quality = gf_mid_motion_minq[Q];
3728 /* Constrained quality use slightly lower active best. */
3729 if ( cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY )
3731 cpi->active_best_quality =
3732 cpi->active_best_quality * 15/16;
3735 /* One pass more conservative */
3737 cpi->active_best_quality = gf_high_motion_minq[Q];
3741 cpi->active_best_quality = inter_minq[Q];
3743 /* For the constant/constrained quality mode we dont want
3744 * q to fall below the cq level.
3746 if ((cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
3747 (cpi->active_best_quality < cpi->cq_target_quality) )
3749 /* If we are strongly undershooting the target rate in the last
3750 * frames then use the user passed in cq value not the auto
3753 if ( cpi->rolling_actual_bits < cpi->min_frame_bandwidth )
3754 cpi->active_best_quality = cpi->oxcf.cq_level;
3756 cpi->active_best_quality = cpi->cq_target_quality;
3760 /* If CBR and the buffer is as full then it is reasonable to allow
3761 * higher quality on the frames to prevent bits just going to waste.
3763 if (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)
3765 /* Note that the use of >= here elliminates the risk of a devide
3766 * by 0 error in the else if clause
3768 if (cpi->buffer_level >= cpi->oxcf.maximum_buffer_size)
3769 cpi->active_best_quality = cpi->best_quality;
3771 else if (cpi->buffer_level > cpi->oxcf.optimal_buffer_level)
3773 int Fraction = (int)
3774 (((cpi->buffer_level - cpi->oxcf.optimal_buffer_level) * 128)
3775 / (cpi->oxcf.maximum_buffer_size -
3776 cpi->oxcf.optimal_buffer_level));
3777 int min_qadjustment = ((cpi->active_best_quality -
3778 cpi->best_quality) * Fraction) / 128;
3780 cpi->active_best_quality -= min_qadjustment;
3784 /* Make sure constrained quality mode limits are adhered to for the first
3785 * few frames of one pass encodes
3787 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
3789 if ( (cm->frame_type == KEY_FRAME) ||
3790 cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame )
3792 cpi->active_best_quality = cpi->best_quality;
3794 else if (cpi->active_best_quality < cpi->cq_target_quality)
3796 cpi->active_best_quality = cpi->cq_target_quality;
3800 /* Clip the active best and worst quality values to limits */
3801 if (cpi->active_worst_quality > cpi->worst_quality)
3802 cpi->active_worst_quality = cpi->worst_quality;
3804 if (cpi->active_best_quality < cpi->best_quality)
3805 cpi->active_best_quality = cpi->best_quality;
3807 if ( cpi->active_worst_quality < cpi->active_best_quality )
3808 cpi->active_worst_quality = cpi->active_best_quality;
3810 /* Determine initial Q to try */
3811 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
3813 #if !(CONFIG_REALTIME_ONLY)
3815 /* Set highest allowed value for Zbin over quant */
3816 if (cm->frame_type == KEY_FRAME)
3818 else if ((cpi->oxcf.number_of_layers == 1) && ((cm->refresh_alt_ref_frame ||
3819 (cm->refresh_golden_frame && !cpi->source_alt_ref_active))))
3824 zbin_oq_high = ZBIN_OQ_MAX;
3827 /* Setup background Q adjustment for error resilient mode.
3828 * For multi-layer encodes only enable this for the base layer.
3830 if (cpi->cyclic_refresh_mode_enabled)
3832 if (cpi->current_layer==0)
3833 cyclic_background_refresh(cpi, Q, 0);
3835 disable_segmentation(cpi);
3838 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
3840 #if !(CONFIG_REALTIME_ONLY)
3841 /* Limit Q range for the adaptive loop. */
3842 bottom_index = cpi->active_best_quality;
3843 top_index = cpi->active_worst_quality;
3844 q_low = cpi->active_best_quality;
3845 q_high = cpi->active_worst_quality;
3848 vp8_save_coding_context(cpi);
3852 scale_and_extend_source(cpi->un_scaled_source, cpi);
3854 #if !(CONFIG_REALTIME_ONLY) && CONFIG_POSTPROC && !(CONFIG_TEMPORAL_DENOISING)
3856 if (cpi->oxcf.noise_sensitivity > 0)
3861 switch (cpi->oxcf.noise_sensitivity)
3884 if (cm->frame_type == KEY_FRAME)
3886 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0);
3890 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0);
3892 src = cpi->Source->y_buffer;
3894 if (cpi->Source->y_stride < 0)
3896 src += cpi->Source->y_stride * (cpi->Source->y_height - 1);
3903 #ifdef OUTPUT_YUV_SRC
3904 vp8_write_yuv_frame(yuv_file, cpi->Source);
3909 vp8_clear_system_state();
3911 vp8_set_quantizer(cpi, Q);
3913 /* setup skip prob for costing in mode/mv decision */
3914 if (cpi->common.mb_no_coeff_skip)
3916 cpi->prob_skip_false = cpi->base_skip_false_prob[Q];
3918 if (cm->frame_type != KEY_FRAME)
3920 if (cpi->common.refresh_alt_ref_frame)
3922 if (cpi->last_skip_false_probs[2] != 0)
3923 cpi->prob_skip_false = cpi->last_skip_false_probs[2];
3926 if(cpi->last_skip_false_probs[2]!=0 && abs(Q- cpi->last_skip_probs_q[2])<=16 )
3927 cpi->prob_skip_false = cpi->last_skip_false_probs[2];
3928 else if (cpi->last_skip_false_probs[2]!=0)
3929 cpi->prob_skip_false = (cpi->last_skip_false_probs[2] + cpi->prob_skip_false ) / 2;
3932 else if (cpi->common.refresh_golden_frame)
3934 if (cpi->last_skip_false_probs[1] != 0)
3935 cpi->prob_skip_false = cpi->last_skip_false_probs[1];
3938 if(cpi->last_skip_false_probs[1]!=0 && abs(Q- cpi->last_skip_probs_q[1])<=16 )
3939 cpi->prob_skip_false = cpi->last_skip_false_probs[1];
3940 else if (cpi->last_skip_false_probs[1]!=0)
3941 cpi->prob_skip_false = (cpi->last_skip_false_probs[1] + cpi->prob_skip_false ) / 2;
3946 if (cpi->last_skip_false_probs[0] != 0)
3947 cpi->prob_skip_false = cpi->last_skip_false_probs[0];
3950 if(cpi->last_skip_false_probs[0]!=0 && abs(Q- cpi->last_skip_probs_q[0])<=16 )
3951 cpi->prob_skip_false = cpi->last_skip_false_probs[0];
3952 else if(cpi->last_skip_false_probs[0]!=0)
3953 cpi->prob_skip_false = (cpi->last_skip_false_probs[0] + cpi->prob_skip_false ) / 2;
3957 /* as this is for cost estimate, let's make sure it does not
3958 * go extreme eitehr way
3960 if (cpi->prob_skip_false < 5)
3961 cpi->prob_skip_false = 5;
3963 if (cpi->prob_skip_false > 250)
3964 cpi->prob_skip_false = 250;
3966 if (cpi->oxcf.number_of_layers == 1 && cpi->is_src_frame_alt_ref)
3967 cpi->prob_skip_false = 1;
3974 FILE *f = fopen("skip.stt", "a");
3975 fprintf(f, "%d, %d, %4d ", cpi->common.refresh_golden_frame, cpi->common.refresh_alt_ref_frame, cpi->prob_skip_false);
3983 if (cm->frame_type == KEY_FRAME)
3985 if(resize_key_frame(cpi))
3987 /* If the frame size has changed, need to reset Q, quantizer,
3988 * and background refresh.
3990 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
3991 if (cpi->cyclic_refresh_mode_enabled)
3993 if (cpi->current_layer==0)
3994 cyclic_background_refresh(cpi, Q, 0);
3996 disable_segmentation(cpi);
3998 vp8_set_quantizer(cpi, Q);
4001 vp8_setup_key_frame(cpi);
4006 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
4008 if(cpi->oxcf.error_resilient_mode)
4009 cm->refresh_entropy_probs = 0;
4011 if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS)
4013 if (cm->frame_type == KEY_FRAME)
4014 cm->refresh_entropy_probs = 1;
4017 if (cm->refresh_entropy_probs == 0)
4019 /* save a copy for later refresh */
4020 vpx_memcpy(&cm->lfc, &cm->fc, sizeof(cm->fc));
4023 vp8_update_coef_context(cpi);
4025 vp8_update_coef_probs(cpi);
4027 /* transform / motion compensation build reconstruction frame
4028 * +pack coef partitions
4030 vp8_encode_frame(cpi);
4032 /* cpi->projected_frame_size is not needed for RT mode */
4035 /* transform / motion compensation build reconstruction frame */
4036 vp8_encode_frame(cpi);
4038 cpi->projected_frame_size -= vp8_estimate_entropy_savings(cpi);
4039 cpi->projected_frame_size = (cpi->projected_frame_size > 0) ? cpi->projected_frame_size : 0;
4041 vp8_clear_system_state();
4043 /* Test to see if the stats generated for this frame indicate that
4044 * we should have coded a key frame (assuming that we didn't)!
4047 if (cpi->pass != 2 && cpi->oxcf.auto_key && cm->frame_type != KEY_FRAME
4048 && cpi->compressor_speed != 2)
4050 #if !(CONFIG_REALTIME_ONLY)
4051 if (decide_key_frame(cpi))
4053 /* Reset all our sizing numbers and recode */
4054 cm->frame_type = KEY_FRAME;
4056 vp8_pick_frame_size(cpi);
4058 /* Clear the Alt reference frame active flag when we have
4061 cpi->source_alt_ref_active = 0;
4063 // Set the loop filter deltas and segmentation map update
4064 setup_features(cpi);
4066 vp8_restore_coding_context(cpi);
4068 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4070 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
4072 /* Limit Q range for the adaptive loop. */
4073 bottom_index = cpi->active_best_quality;
4074 top_index = cpi->active_worst_quality;
4075 q_low = cpi->active_best_quality;
4076 q_high = cpi->active_worst_quality;
4086 vp8_clear_system_state();
4088 if (frame_over_shoot_limit == 0)
4089 frame_over_shoot_limit = 1;
4091 /* Are we are overshooting and up against the limit of active max Q. */
4092 if (((cpi->pass != 2) || (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)) &&
4093 (Q == cpi->active_worst_quality) &&
4094 (cpi->active_worst_quality < cpi->worst_quality) &&
4095 (cpi->projected_frame_size > frame_over_shoot_limit))
4097 int over_size_percent = ((cpi->projected_frame_size - frame_over_shoot_limit) * 100) / frame_over_shoot_limit;
4099 /* If so is there any scope for relaxing it */
4100 while ((cpi->active_worst_quality < cpi->worst_quality) && (over_size_percent > 0))
4102 cpi->active_worst_quality++;
4103 /* Assume 1 qstep = about 4% on frame size. */
4104 over_size_percent = (int)(over_size_percent * 0.96);
4106 #if !(CONFIG_REALTIME_ONLY)
4107 top_index = cpi->active_worst_quality;
4109 /* If we have updated the active max Q do not call
4110 * vp8_update_rate_correction_factors() this loop.
4112 active_worst_qchanged = 1;
4115 active_worst_qchanged = 0;
4117 #if !(CONFIG_REALTIME_ONLY)
4118 /* Special case handling for forced key frames */
4119 if ( (cm->frame_type == KEY_FRAME) && cpi->this_key_frame_forced )
4122 int kf_err = vp8_calc_ss_err(cpi->Source,
4123 &cm->yv12_fb[cm->new_fb_idx]);
4125 /* The key frame is not good enough */
4126 if ( kf_err > ((cpi->ambient_err * 7) >> 3) )
4129 q_high = (Q > q_low) ? (Q - 1) : q_low;
4132 Q = (q_high + q_low) >> 1;
4134 /* The key frame is much better than the previous frame */
4135 else if ( kf_err < (cpi->ambient_err >> 1) )
4138 q_low = (Q < q_high) ? (Q + 1) : q_high;
4141 Q = (q_high + q_low + 1) >> 1;
4144 /* Clamp Q to upper and lower limits: */
4153 /* Is the projected frame size out of range and are we allowed
4154 * to attempt to recode.
4156 else if ( recode_loop_test( cpi,
4157 frame_over_shoot_limit, frame_under_shoot_limit,
4158 Q, top_index, bottom_index ) )
4163 /* Frame size out of permitted range. Update correction factor
4164 * & compute new Q to try...
4167 /* Frame is too large */
4168 if (cpi->projected_frame_size > cpi->this_frame_target)
4170 /* Raise Qlow as to at least the current value */
4171 q_low = (Q < q_high) ? (Q + 1) : q_high;
4173 /* If we are using over quant do the same for zbin_oq_low */
4174 if (cpi->mb.zbin_over_quant > 0)
4175 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
4176 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
4178 if (undershoot_seen)
4180 /* Update rate_correction_factor unless
4181 * cpi->active_worst_quality has changed.
4183 if (!active_worst_qchanged)
4184 vp8_update_rate_correction_factors(cpi, 1);
4186 Q = (q_high + q_low + 1) / 2;
4188 /* Adjust cpi->zbin_over_quant (only allowed when Q
4192 cpi->mb.zbin_over_quant = 0;
4195 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
4196 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
4197 cpi->mb.zbin_over_quant =
4198 (zbin_oq_high + zbin_oq_low) / 2;
4203 /* Update rate_correction_factor unless
4204 * cpi->active_worst_quality has changed.
4206 if (!active_worst_qchanged)
4207 vp8_update_rate_correction_factors(cpi, 0);
4209 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4211 while (((Q < q_low) ||
4212 (cpi->mb.zbin_over_quant < zbin_oq_low)) &&
4215 vp8_update_rate_correction_factors(cpi, 0);
4216 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4223 /* Frame is too small */
4226 if (cpi->mb.zbin_over_quant == 0)
4227 /* Lower q_high if not using over quant */
4228 q_high = (Q > q_low) ? (Q - 1) : q_low;
4230 /* else lower zbin_oq_high */
4231 zbin_oq_high = (cpi->mb.zbin_over_quant > zbin_oq_low) ?
4232 (cpi->mb.zbin_over_quant - 1) : zbin_oq_low;
4236 /* Update rate_correction_factor unless
4237 * cpi->active_worst_quality has changed.
4239 if (!active_worst_qchanged)
4240 vp8_update_rate_correction_factors(cpi, 1);
4242 Q = (q_high + q_low) / 2;
4244 /* Adjust cpi->zbin_over_quant (only allowed when Q
4248 cpi->mb.zbin_over_quant = 0;
4250 cpi->mb.zbin_over_quant =
4251 (zbin_oq_high + zbin_oq_low) / 2;
4255 /* Update rate_correction_factor unless
4256 * cpi->active_worst_quality has changed.
4258 if (!active_worst_qchanged)
4259 vp8_update_rate_correction_factors(cpi, 0);
4261 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4263 /* Special case reset for qlow for constrained quality.
4264 * This should only trigger where there is very substantial
4265 * undershoot on a frame and the auto cq level is above
4266 * the user passsed in value.
4268 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
4274 while (((Q > q_high) ||
4275 (cpi->mb.zbin_over_quant > zbin_oq_high)) &&
4278 vp8_update_rate_correction_factors(cpi, 0);
4279 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4284 undershoot_seen = 1;
4287 /* Clamp Q to upper and lower limits: */
4293 /* Clamp cpi->zbin_over_quant */
4294 cpi->mb.zbin_over_quant = (cpi->mb.zbin_over_quant < zbin_oq_low) ?
4295 zbin_oq_low : (cpi->mb.zbin_over_quant > zbin_oq_high) ?
4296 zbin_oq_high : cpi->mb.zbin_over_quant;
4304 if (cpi->is_src_frame_alt_ref)
4309 vp8_restore_coding_context(cpi);
4311 #if CONFIG_INTERNAL_STATS
4312 cpi->tot_recode_hits++;
4319 /* Experimental code for lagged and one pass
4320 * Update stats used for one pass GF selection
4323 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_coded_error = (double)cpi->prediction_error;
4324 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_intra_error = (double)cpi->intra_error;
4325 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_pcnt_inter = (double)(100 - cpi->this_frame_percent_intra) / 100.0;
4329 /* Special case code to reduce pulsing when key frames are forced at a
4330 * fixed interval. Note the reconstruction error if it is the frame before
4331 * the force key frame
4333 if ( cpi->next_key_frame_forced && (cpi->twopass.frames_to_key == 0) )
4335 cpi->ambient_err = vp8_calc_ss_err(cpi->Source,
4336 &cm->yv12_fb[cm->new_fb_idx]);
4339 /* This frame's MVs are saved and will be used in next frame's MV predictor.
4340 * Last frame has one more line(add to bottom) and one more column(add to
4341 * right) than cm->mip. The edge elements are initialized to 0.
4343 #if CONFIG_MULTI_RES_ENCODING
4344 if(!cpi->oxcf.mr_encoder_id && cm->show_frame)
4346 if(cm->show_frame) /* do not save for altref frame */
4351 /* Point to beginning of allocated MODE_INFO arrays. */
4352 MODE_INFO *tmp = cm->mip;
4354 if(cm->frame_type != KEY_FRAME)
4356 for (mb_row = 0; mb_row < cm->mb_rows+1; mb_row ++)
4358 for (mb_col = 0; mb_col < cm->mb_cols+1; mb_col ++)
4360 if(tmp->mbmi.ref_frame != INTRA_FRAME)
4361 cpi->lfmv[mb_col + mb_row*(cm->mode_info_stride+1)].as_int = tmp->mbmi.mv.as_int;
4363 cpi->lf_ref_frame_sign_bias[mb_col + mb_row*(cm->mode_info_stride+1)] = cm->ref_frame_sign_bias[tmp->mbmi.ref_frame];
4364 cpi->lf_ref_frame[mb_col + mb_row*(cm->mode_info_stride+1)] = tmp->mbmi.ref_frame;
4371 /* Count last ref frame 0,0 usage on current encoded frame. */
4375 /* Point to beginning of MODE_INFO arrays. */
4376 MODE_INFO *tmp = cm->mi;
4378 cpi->zeromv_count = 0;
4380 if(cm->frame_type != KEY_FRAME)
4382 for (mb_row = 0; mb_row < cm->mb_rows; mb_row ++)
4384 for (mb_col = 0; mb_col < cm->mb_cols; mb_col ++)
4386 if(tmp->mbmi.mode == ZEROMV)
4387 cpi->zeromv_count++;
4395 #if CONFIG_MULTI_RES_ENCODING
4396 vp8_cal_dissimilarity(cpi);
4399 /* Update the GF useage maps.
4400 * This is done after completing the compression of a frame when all
4401 * modes etc. are finalized but before loop filter
4403 if (cpi->oxcf.number_of_layers == 1)
4404 vp8_update_gf_useage_maps(cpi, cm, &cpi->mb);
4406 if (cm->frame_type == KEY_FRAME)
4407 cm->refresh_last_frame = 1;
4411 FILE *f = fopen("gfactive.stt", "a");
4412 fprintf(f, "%8d %8d %8d %8d %8d\n", cm->current_video_frame, (100 * cpi->gf_active_count) / (cpi->common.mb_rows * cpi->common.mb_cols), cpi->this_iiratio, cpi->next_iiratio, cm->refresh_golden_frame);
4417 /* For inter frames the current default behavior is that when
4418 * cm->refresh_golden_frame is set we copy the old GF over to the ARF buffer
4419 * This is purely an encoder decision at present.
4421 if (!cpi->oxcf.error_resilient_mode && cm->refresh_golden_frame)
4422 cm->copy_buffer_to_arf = 2;
4424 cm->copy_buffer_to_arf = 0;
4426 cm->frame_to_show = &cm->yv12_fb[cm->new_fb_idx];
4428 #if CONFIG_MULTITHREAD
4429 if (cpi->b_multi_threaded)
4431 /* start loopfilter in separate thread */
4432 sem_post(&cpi->h_event_start_lpf);
4433 cpi->b_lpf_running = 1;
4438 vp8_loopfilter_frame(cpi, cm);
4441 update_reference_frames(cpi);
4443 #ifdef OUTPUT_YUV_DENOISED
4444 vp8_write_yuv_frame(yuv_denoised_file,
4445 &cpi->denoiser.yv12_running_avg[INTRA_FRAME]);
4448 #if !(CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
4449 if (cpi->oxcf.error_resilient_mode)
4451 cm->refresh_entropy_probs = 0;
4455 #if CONFIG_MULTITHREAD
4456 /* wait that filter_level is picked so that we can continue with stream packing */
4457 if (cpi->b_multi_threaded)
4458 sem_wait(&cpi->h_event_end_lpf);
4461 /* build the bitstream */
4462 vp8_pack_bitstream(cpi, dest, dest_end, size);
4464 #if CONFIG_MULTITHREAD
4465 /* if PSNR packets are generated we have to wait for the lpf */
4466 if (cpi->b_lpf_running && cpi->b_calculate_psnr)
4468 sem_wait(&cpi->h_event_end_lpf);
4469 cpi->b_lpf_running = 0;
4473 /* Move storing frame_type out of the above loop since it is also
4474 * needed in motion search besides loopfilter */
4475 cm->last_frame_type = cm->frame_type;
4477 /* Update rate control heuristics */
4478 cpi->total_byte_count += (*size);
4479 cpi->projected_frame_size = (*size) << 3;
4481 if (cpi->oxcf.number_of_layers > 1)
4484 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
4485 cpi->layer_context[i].total_byte_count += (*size);
4488 if (!active_worst_qchanged)
4489 vp8_update_rate_correction_factors(cpi, 2);
4491 cpi->last_q[cm->frame_type] = cm->base_qindex;
4493 if (cm->frame_type == KEY_FRAME)
4495 vp8_adjust_key_frame_context(cpi);
4498 /* Keep a record of ambient average Q. */
4499 if (cm->frame_type != KEY_FRAME)
4500 cpi->avg_frame_qindex = (2 + 3 * cpi->avg_frame_qindex + cm->base_qindex) >> 2;
4502 /* Keep a record from which we can calculate the average Q excluding
4503 * GF updates and key frames
4505 if ((cm->frame_type != KEY_FRAME) && ((cpi->oxcf.number_of_layers > 1) ||
4506 (!cm->refresh_golden_frame && !cm->refresh_alt_ref_frame)))
4510 /* Calculate the average Q for normal inter frames (not key or GFU
4513 if ( cpi->pass == 2 )
4515 cpi->ni_tot_qi += Q;
4516 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
4520 /* Damp value for first few frames */
4521 if (cpi->ni_frames > 150 )
4523 cpi->ni_tot_qi += Q;
4524 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
4526 /* For one pass, early in the clip ... average the current frame Q
4527 * value with the worstq entered by the user as a dampening measure
4531 cpi->ni_tot_qi += Q;
4532 cpi->ni_av_qi = ((cpi->ni_tot_qi / cpi->ni_frames) + cpi->worst_quality + 1) / 2;
4535 /* If the average Q is higher than what was used in the last
4536 * frame (after going through the recode loop to keep the frame
4537 * size within range) then use the last frame value - 1. The -1
4538 * is designed to stop Q and hence the data rate, from
4539 * progressively falling away during difficult sections, but at
4540 * the same time reduce the number of itterations around the
4543 if (Q > cpi->ni_av_qi)
4544 cpi->ni_av_qi = Q - 1;
4548 /* Update the buffer level variable. */
4549 /* Non-viewable frames are a special case and are treated as pure overhead. */
4550 if ( !cm->show_frame )
4551 cpi->bits_off_target -= cpi->projected_frame_size;
4553 cpi->bits_off_target += cpi->av_per_frame_bandwidth - cpi->projected_frame_size;
4555 /* Clip the buffer level to the maximum specified buffer size */
4556 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
4557 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
4559 /* Rolling monitors of whether we are over or underspending used to
4560 * help regulate min and Max Q in two pass.
4562 cpi->rolling_target_bits = ((cpi->rolling_target_bits * 3) + cpi->this_frame_target + 2) / 4;
4563 cpi->rolling_actual_bits = ((cpi->rolling_actual_bits * 3) + cpi->projected_frame_size + 2) / 4;
4564 cpi->long_rolling_target_bits = ((cpi->long_rolling_target_bits * 31) + cpi->this_frame_target + 16) / 32;
4565 cpi->long_rolling_actual_bits = ((cpi->long_rolling_actual_bits * 31) + cpi->projected_frame_size + 16) / 32;
4567 /* Actual bits spent */
4568 cpi->total_actual_bits += cpi->projected_frame_size;
4571 cpi->total_target_vs_actual += (cpi->this_frame_target - cpi->projected_frame_size);
4573 cpi->buffer_level = cpi->bits_off_target;
4575 /* Propagate values to higher temporal layers */
4576 if (cpi->oxcf.number_of_layers > 1)
4580 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
4582 LAYER_CONTEXT *lc = &cpi->layer_context[i];
4583 int bits_off_for_this_layer =
4584 (int)(lc->target_bandwidth / lc->framerate -
4585 cpi->projected_frame_size);
4587 lc->bits_off_target += bits_off_for_this_layer;
4589 /* Clip buffer level to maximum buffer size for the layer */
4590 if (lc->bits_off_target > lc->maximum_buffer_size)
4591 lc->bits_off_target = lc->maximum_buffer_size;
4593 lc->total_actual_bits += cpi->projected_frame_size;
4594 lc->total_target_vs_actual += bits_off_for_this_layer;
4595 lc->buffer_level = lc->bits_off_target;
4599 /* Update bits left to the kf and gf groups to account for overshoot
4600 * or undershoot on these frames
4602 if (cm->frame_type == KEY_FRAME)
4604 cpi->twopass.kf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
4606 if (cpi->twopass.kf_group_bits < 0)
4607 cpi->twopass.kf_group_bits = 0 ;
4609 else if (cm->refresh_golden_frame || cm->refresh_alt_ref_frame)
4611 cpi->twopass.gf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
4613 if (cpi->twopass.gf_group_bits < 0)
4614 cpi->twopass.gf_group_bits = 0 ;
4617 if (cm->frame_type != KEY_FRAME)
4619 if (cpi->common.refresh_alt_ref_frame)
4621 cpi->last_skip_false_probs[2] = cpi->prob_skip_false;
4622 cpi->last_skip_probs_q[2] = cm->base_qindex;
4624 else if (cpi->common.refresh_golden_frame)
4626 cpi->last_skip_false_probs[1] = cpi->prob_skip_false;
4627 cpi->last_skip_probs_q[1] = cm->base_qindex;
4631 cpi->last_skip_false_probs[0] = cpi->prob_skip_false;
4632 cpi->last_skip_probs_q[0] = cm->base_qindex;
4634 /* update the baseline */
4635 cpi->base_skip_false_prob[cm->base_qindex] = cpi->prob_skip_false;
4640 #if 0 && CONFIG_INTERNAL_STATS
4642 FILE *f = fopen("tmp.stt", "a");
4644 vp8_clear_system_state();
4646 if (cpi->twopass.total_left_stats.coded_error != 0.0)
4647 fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
4648 "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
4649 "%8.2lf %"PRId64" %10.3lf %10"PRId64" %8d\n",
4650 cpi->common.current_video_frame, cpi->this_frame_target,
4651 cpi->projected_frame_size,
4652 (cpi->projected_frame_size - cpi->this_frame_target),
4653 cpi->total_target_vs_actual,
4655 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
4656 cpi->total_actual_bits, cm->base_qindex,
4657 cpi->active_best_quality, cpi->active_worst_quality,
4658 cpi->ni_av_qi, cpi->cq_target_quality,
4659 cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
4660 cm->frame_type, cpi->gfu_boost,
4661 cpi->twopass.est_max_qcorrection_factor,
4662 cpi->twopass.bits_left,
4663 cpi->twopass.total_left_stats.coded_error,
4664 (double)cpi->twopass.bits_left /
4665 cpi->twopass.total_left_stats.coded_error,
4666 cpi->tot_recode_hits);
4668 fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
4669 "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
4670 "%8.2lf %"PRId64" %10.3lf %8d\n",
4671 cpi->common.current_video_frame, cpi->this_frame_target,
4672 cpi->projected_frame_size,
4673 (cpi->projected_frame_size - cpi->this_frame_target),
4674 cpi->total_target_vs_actual,
4676 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
4677 cpi->total_actual_bits, cm->base_qindex,
4678 cpi->active_best_quality, cpi->active_worst_quality,
4679 cpi->ni_av_qi, cpi->cq_target_quality,
4680 cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
4681 cm->frame_type, cpi->gfu_boost,
4682 cpi->twopass.est_max_qcorrection_factor,
4683 cpi->twopass.bits_left,
4684 cpi->twopass.total_left_stats.coded_error,
4685 cpi->tot_recode_hits);
4690 FILE *fmodes = fopen("Modes.stt", "a");
4692 fprintf(fmodes, "%6d:%1d:%1d:%1d ",
4693 cpi->common.current_video_frame,
4694 cm->frame_type, cm->refresh_golden_frame,
4695 cm->refresh_alt_ref_frame);
4697 fprintf(fmodes, "\n");
4705 if (cm->refresh_golden_frame == 1)
4706 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_GOLDEN;
4708 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_GOLDEN;
4710 if (cm->refresh_alt_ref_frame == 1)
4711 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_ALTREF;
4713 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_ALTREF;
4716 if (cm->refresh_last_frame & cm->refresh_golden_frame)
4717 /* both refreshed */
4718 cpi->gold_is_last = 1;
4719 else if (cm->refresh_last_frame ^ cm->refresh_golden_frame)
4720 /* 1 refreshed but not the other */
4721 cpi->gold_is_last = 0;
4723 if (cm->refresh_last_frame & cm->refresh_alt_ref_frame)
4724 /* both refreshed */
4725 cpi->alt_is_last = 1;
4726 else if (cm->refresh_last_frame ^ cm->refresh_alt_ref_frame)
4727 /* 1 refreshed but not the other */
4728 cpi->alt_is_last = 0;
4730 if (cm->refresh_alt_ref_frame & cm->refresh_golden_frame)
4731 /* both refreshed */
4732 cpi->gold_is_alt = 1;
4733 else if (cm->refresh_alt_ref_frame ^ cm->refresh_golden_frame)
4734 /* 1 refreshed but not the other */
4735 cpi->gold_is_alt = 0;
4737 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
4739 if (cpi->gold_is_last)
4740 cpi->ref_frame_flags &= ~VP8_GOLD_FRAME;
4742 if (cpi->alt_is_last)
4743 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
4745 if (cpi->gold_is_alt)
4746 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
4749 if (!cpi->oxcf.error_resilient_mode)
4751 if (cpi->oxcf.play_alternate && cm->refresh_alt_ref_frame && (cm->frame_type != KEY_FRAME))
4752 /* Update the alternate reference frame stats as appropriate. */
4753 update_alt_ref_frame_stats(cpi);
4755 /* Update the Golden frame stats as appropriate. */
4756 update_golden_frame_stats(cpi);
4759 if (cm->frame_type == KEY_FRAME)
4761 /* Tell the caller that the frame was coded as a key frame */
4762 *frame_flags = cm->frame_flags | FRAMEFLAGS_KEY;
4764 /* As this frame is a key frame the next defaults to an inter frame. */
4765 cm->frame_type = INTER_FRAME;
4767 cpi->last_frame_percent_intra = 100;
4771 *frame_flags = cm->frame_flags&~FRAMEFLAGS_KEY;
4773 cpi->last_frame_percent_intra = cpi->this_frame_percent_intra;
4776 /* Clear the one shot update flags for segmentation map and mode/ref
4777 * loop filter deltas.
4779 cpi->mb.e_mbd.update_mb_segmentation_map = 0;
4780 cpi->mb.e_mbd.update_mb_segmentation_data = 0;
4781 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
4784 /* Dont increment frame counters if this was an altref buffer update
4789 cm->current_video_frame++;
4790 cpi->frames_since_key++;
4791 cpi->temporal_pattern_counter++;
4794 /* reset to normal state now that we are done. */
4802 sprintf(filename, "enc%04d.yuv", (int) cm->current_video_frame);
4803 recon_file = fopen(filename, "wb");
4804 fwrite(cm->yv12_fb[cm->lst_fb_idx].buffer_alloc,
4805 cm->yv12_fb[cm->lst_fb_idx].frame_size, 1, recon_file);
4811 /* vp8_write_yuv_frame("encoder_recon.yuv", cm->frame_to_show); */
4815 #if !(CONFIG_REALTIME_ONLY)
4816 static void Pass2Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned char * dest_end, unsigned int *frame_flags)
4819 if (!cpi->common.refresh_alt_ref_frame)
4820 vp8_second_pass(cpi);
4822 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
4823 cpi->twopass.bits_left -= 8 * *size;
4825 if (!cpi->common.refresh_alt_ref_frame)
4827 double two_pass_min_rate = (double)(cpi->oxcf.target_bandwidth
4828 *cpi->oxcf.two_pass_vbrmin_section / 100);
4829 cpi->twopass.bits_left += (int64_t)(two_pass_min_rate / cpi->framerate);
4834 int vp8_receive_raw_frame(VP8_COMP *cpi, unsigned int frame_flags, YV12_BUFFER_CONFIG *sd, int64_t time_stamp, int64_t end_time)
4836 struct vpx_usec_timer timer;
4839 vpx_usec_timer_start(&timer);
4841 /* Reinit the lookahead buffer if the frame size changes */
4842 if (sd->y_width != cpi->oxcf.Width || sd->y_height != cpi->oxcf.Height)
4844 assert(cpi->oxcf.lag_in_frames < 2);
4845 dealloc_raw_frame_buffers(cpi);
4846 alloc_raw_frame_buffers(cpi);
4849 if(vp8_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
4850 frame_flags, cpi->active_map_enabled ? cpi->active_map : NULL))
4852 vpx_usec_timer_mark(&timer);
4853 cpi->time_receive_data += vpx_usec_timer_elapsed(&timer);
4859 static int frame_is_reference(const VP8_COMP *cpi)
4861 const VP8_COMMON *cm = &cpi->common;
4862 const MACROBLOCKD *xd = &cpi->mb.e_mbd;
4864 return cm->frame_type == KEY_FRAME || cm->refresh_last_frame
4865 || cm->refresh_golden_frame || cm->refresh_alt_ref_frame
4866 || cm->copy_buffer_to_gf || cm->copy_buffer_to_arf
4867 || cm->refresh_entropy_probs
4868 || xd->mode_ref_lf_delta_update
4869 || xd->update_mb_segmentation_map || xd->update_mb_segmentation_data;
4873 int vp8_get_compressed_data(VP8_COMP *cpi, unsigned int *frame_flags, unsigned long *size, unsigned char *dest, unsigned char *dest_end, int64_t *time_stamp, int64_t *time_end, int flush)
4876 struct vpx_usec_timer tsctimer;
4877 struct vpx_usec_timer ticktimer;
4878 struct vpx_usec_timer cmptimer;
4879 YV12_BUFFER_CONFIG *force_src_buffer = NULL;
4886 if (setjmp(cpi->common.error.jmp))
4888 cpi->common.error.setjmp = 0;
4889 vp8_clear_system_state();
4890 return VPX_CODEC_CORRUPT_FRAME;
4893 cpi->common.error.setjmp = 1;
4895 vpx_usec_timer_start(&cmptimer);
4899 #if !(CONFIG_REALTIME_ONLY)
4900 /* Should we code an alternate reference frame */
4901 if (cpi->oxcf.error_resilient_mode == 0 &&
4902 cpi->oxcf.play_alternate &&
4903 cpi->source_alt_ref_pending)
4905 if ((cpi->source = vp8_lookahead_peek(cpi->lookahead,
4906 cpi->frames_till_gf_update_due,
4909 cpi->alt_ref_source = cpi->source;
4910 if (cpi->oxcf.arnr_max_frames > 0)
4912 vp8_temporal_filter_prepare_c(cpi,
4913 cpi->frames_till_gf_update_due);
4914 force_src_buffer = &cpi->alt_ref_buffer;
4916 cpi->frames_till_alt_ref_frame = cpi->frames_till_gf_update_due;
4917 cm->refresh_alt_ref_frame = 1;
4918 cm->refresh_golden_frame = 0;
4919 cm->refresh_last_frame = 0;
4921 /* Clear Pending alt Ref flag. */
4922 cpi->source_alt_ref_pending = 0;
4923 cpi->is_src_frame_alt_ref = 0;
4930 /* Read last frame source if we are encoding first pass. */
4931 if (cpi->pass == 1 && cm->current_video_frame > 0)
4933 if((cpi->last_source = vp8_lookahead_peek(cpi->lookahead, 1,
4934 PEEK_BACKWARD)) == NULL)
4939 if ((cpi->source = vp8_lookahead_pop(cpi->lookahead, flush)))
4943 cpi->is_src_frame_alt_ref = cpi->alt_ref_source
4944 && (cpi->source == cpi->alt_ref_source);
4946 if(cpi->is_src_frame_alt_ref)
4947 cpi->alt_ref_source = NULL;
4953 cpi->Source = force_src_buffer ? force_src_buffer : &cpi->source->img;
4954 cpi->un_scaled_source = cpi->Source;
4955 *time_stamp = cpi->source->ts_start;
4956 *time_end = cpi->source->ts_end;
4957 *frame_flags = cpi->source->flags;
4959 if (cpi->pass == 1 && cm->current_video_frame > 0)
4961 cpi->last_frame_unscaled_source = &cpi->last_source->img;
4967 #if !(CONFIG_REALTIME_ONLY)
4969 if (flush && cpi->pass == 1 && !cpi->twopass.first_pass_done)
4971 vp8_end_first_pass(cpi); /* get last stats packet */
4972 cpi->twopass.first_pass_done = 1;
4980 if (cpi->source->ts_start < cpi->first_time_stamp_ever)
4982 cpi->first_time_stamp_ever = cpi->source->ts_start;
4983 cpi->last_end_time_stamp_seen = cpi->source->ts_start;
4986 /* adjust frame rates based on timestamps given */
4989 int64_t this_duration;
4992 if (cpi->source->ts_start == cpi->first_time_stamp_ever)
4994 this_duration = cpi->source->ts_end - cpi->source->ts_start;
4999 int64_t last_duration;
5001 this_duration = cpi->source->ts_end - cpi->last_end_time_stamp_seen;
5002 last_duration = cpi->last_end_time_stamp_seen
5003 - cpi->last_time_stamp_seen;
5004 /* do a step update if the duration changes by 10% */
5006 step = (int)(((this_duration - last_duration) *
5007 10 / last_duration));
5013 cpi->ref_framerate = 10000000.0 / this_duration;
5016 double avg_duration, interval;
5018 /* Average this frame's rate into the last second's average
5019 * frame rate. If we haven't seen 1 second yet, then average
5020 * over the whole interval seen.
5022 interval = (double)(cpi->source->ts_end -
5023 cpi->first_time_stamp_ever);
5024 if(interval > 10000000.0)
5025 interval = 10000000;
5027 avg_duration = 10000000.0 / cpi->ref_framerate;
5028 avg_duration *= (interval - avg_duration + this_duration);
5029 avg_duration /= interval;
5031 cpi->ref_framerate = 10000000.0 / avg_duration;
5034 if (cpi->oxcf.number_of_layers > 1)
5038 /* Update frame rates for each layer */
5039 assert(cpi->oxcf.number_of_layers <= VPX_TS_MAX_LAYERS);
5040 for (i = 0; i < cpi->oxcf.number_of_layers &&
5041 i < VPX_TS_MAX_LAYERS; ++i)
5043 LAYER_CONTEXT *lc = &cpi->layer_context[i];
5044 lc->framerate = cpi->ref_framerate /
5045 cpi->oxcf.rate_decimator[i];
5049 vp8_new_framerate(cpi, cpi->ref_framerate);
5052 cpi->last_time_stamp_seen = cpi->source->ts_start;
5053 cpi->last_end_time_stamp_seen = cpi->source->ts_end;
5056 if (cpi->oxcf.number_of_layers > 1)
5060 update_layer_contexts (cpi);
5062 /* Restore layer specific context & set frame rate */
5063 layer = cpi->oxcf.layer_id[
5064 cpi->temporal_pattern_counter % cpi->oxcf.periodicity];
5065 restore_layer_context (cpi, layer);
5066 vp8_new_framerate(cpi, cpi->layer_context[layer].framerate);
5069 if (cpi->compressor_speed == 2)
5071 vpx_usec_timer_start(&tsctimer);
5072 vpx_usec_timer_start(&ticktimer);
5075 cpi->lf_zeromv_pct = (cpi->zeromv_count * 100)/cm->MBs;
5077 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
5080 const int num_part = (1 << cm->multi_token_partition);
5081 /* the available bytes in dest */
5082 const unsigned long dest_size = dest_end - dest;
5083 const int tok_part_buff_size = (dest_size * 9) / (10 * num_part);
5085 unsigned char *dp = dest;
5087 cpi->partition_d[0] = dp;
5088 dp += dest_size/10; /* reserve 1/10 for control partition */
5089 cpi->partition_d_end[0] = dp;
5091 for(i = 0; i < num_part; i++)
5093 cpi->partition_d[i + 1] = dp;
5094 dp += tok_part_buff_size;
5095 cpi->partition_d_end[i + 1] = dp;
5100 /* start with a 0 size frame */
5103 /* Clear down mmx registers */
5104 vp8_clear_system_state();
5106 cm->frame_type = INTER_FRAME;
5107 cm->frame_flags = *frame_flags;
5111 if (cm->refresh_alt_ref_frame)
5113 cm->refresh_golden_frame = 0;
5114 cm->refresh_last_frame = 0;
5118 cm->refresh_golden_frame = 0;
5119 cm->refresh_last_frame = 1;
5123 /* find a free buffer for the new frame */
5126 for(; i < NUM_YV12_BUFFERS; i++)
5128 if(!cm->yv12_fb[i].flags)
5135 assert(i < NUM_YV12_BUFFERS );
5137 #if !(CONFIG_REALTIME_ONLY)
5141 Pass1Encode(cpi, size, dest, frame_flags);
5143 else if (cpi->pass == 2)
5145 Pass2Encode(cpi, size, dest, dest_end, frame_flags);
5149 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
5151 if (cpi->compressor_speed == 2)
5153 unsigned int duration, duration2;
5154 vpx_usec_timer_mark(&tsctimer);
5155 vpx_usec_timer_mark(&ticktimer);
5157 duration = (int)(vpx_usec_timer_elapsed(&ticktimer));
5158 duration2 = (unsigned int)((double)duration / 2);
5160 if (cm->frame_type != KEY_FRAME)
5162 if (cpi->avg_encode_time == 0)
5163 cpi->avg_encode_time = duration;
5165 cpi->avg_encode_time = (7 * cpi->avg_encode_time + duration) >> 3;
5172 if (cpi->avg_pick_mode_time == 0)
5173 cpi->avg_pick_mode_time = duration2;
5175 cpi->avg_pick_mode_time = (7 * cpi->avg_pick_mode_time + duration2) >> 3;
5181 if (cm->refresh_entropy_probs == 0)
5183 vpx_memcpy(&cm->fc, &cm->lfc, sizeof(cm->fc));
5186 /* Save the contexts separately for alt ref, gold and last. */
5187 /* (TODO jbb -> Optimize this with pointers to avoid extra copies. ) */
5188 if(cm->refresh_alt_ref_frame)
5189 vpx_memcpy(&cpi->lfc_a, &cm->fc, sizeof(cm->fc));
5191 if(cm->refresh_golden_frame)
5192 vpx_memcpy(&cpi->lfc_g, &cm->fc, sizeof(cm->fc));
5194 if(cm->refresh_last_frame)
5195 vpx_memcpy(&cpi->lfc_n, &cm->fc, sizeof(cm->fc));
5197 /* if its a dropped frame honor the requests on subsequent frames */
5200 cpi->droppable = !frame_is_reference(cpi);
5202 /* return to normal state */
5203 cm->refresh_entropy_probs = 1;
5204 cm->refresh_alt_ref_frame = 0;
5205 cm->refresh_golden_frame = 0;
5206 cm->refresh_last_frame = 1;
5207 cm->frame_type = INTER_FRAME;
5211 /* Save layer specific state */
5212 if (cpi->oxcf.number_of_layers > 1)
5213 save_layer_context (cpi);
5215 vpx_usec_timer_mark(&cmptimer);
5216 cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer);
5218 if (cpi->b_calculate_psnr && cpi->pass != 1 && cm->show_frame)
5220 generate_psnr_packet(cpi);
5223 #if CONFIG_INTERNAL_STATS
5227 cpi->bytes += *size;
5231 cpi->common.show_frame_mi = cpi->common.mi;
5234 if (cpi->b_calculate_psnr)
5238 YV12_BUFFER_CONFIG *orig = cpi->Source;
5239 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
5240 int y_samples = orig->y_height * orig->y_width ;
5241 int uv_samples = orig->uv_height * orig->uv_width ;
5242 int t_samples = y_samples + 2 * uv_samples;
5245 ye = calc_plane_error(orig->y_buffer, orig->y_stride,
5246 recon->y_buffer, recon->y_stride, orig->y_width, orig->y_height);
5248 ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
5249 recon->u_buffer, recon->uv_stride, orig->uv_width, orig->uv_height);
5251 ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
5252 recon->v_buffer, recon->uv_stride, orig->uv_width, orig->uv_height);
5254 sq_error = (double)(ye + ue + ve);
5256 frame_psnr = vpx_sse_to_psnr(t_samples, 255.0, sq_error);
5258 cpi->total_y += vpx_sse_to_psnr(y_samples, 255.0, (double)ye);
5259 cpi->total_u += vpx_sse_to_psnr(uv_samples, 255.0, (double)ue);
5260 cpi->total_v += vpx_sse_to_psnr(uv_samples, 255.0, (double)ve);
5261 cpi->total_sq_error += sq_error;
5262 cpi->total += frame_psnr;
5265 YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer;
5267 double frame_psnr2, frame_ssim2 = 0;
5270 vp8_deblock(cm, cm->frame_to_show, &cm->post_proc_buffer, cm->filter_level * 10 / 6, 1, 0);
5271 vp8_clear_system_state();
5273 ye = calc_plane_error(orig->y_buffer, orig->y_stride,
5274 pp->y_buffer, pp->y_stride, orig->y_width, orig->y_height);
5276 ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
5277 pp->u_buffer, pp->uv_stride, orig->uv_width, orig->uv_height);
5279 ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
5280 pp->v_buffer, pp->uv_stride, orig->uv_width, orig->uv_height);
5282 sq_error2 = (double)(ye + ue + ve);
5284 frame_psnr2 = vpx_sse_to_psnr(t_samples, 255.0, sq_error2);
5286 cpi->totalp_y += vpx_sse_to_psnr(y_samples,
5288 cpi->totalp_u += vpx_sse_to_psnr(uv_samples,
5290 cpi->totalp_v += vpx_sse_to_psnr(uv_samples,
5292 cpi->total_sq_error2 += sq_error2;
5293 cpi->totalp += frame_psnr2;
5295 frame_ssim2 = vp8_calc_ssim(cpi->Source,
5296 &cm->post_proc_buffer, 1, &weight);
5298 cpi->summed_quality += frame_ssim2 * weight;
5299 cpi->summed_weights += weight;
5301 if (cpi->oxcf.number_of_layers > 1)
5305 for (i=cpi->current_layer;
5306 i<cpi->oxcf.number_of_layers; i++)
5308 cpi->frames_in_layer[i]++;
5310 cpi->bytes_in_layer[i] += *size;
5311 cpi->sum_psnr[i] += frame_psnr;
5312 cpi->sum_psnr_p[i] += frame_psnr2;
5313 cpi->total_error2[i] += sq_error;
5314 cpi->total_error2_p[i] += sq_error2;
5315 cpi->sum_ssim[i] += frame_ssim2 * weight;
5316 cpi->sum_weights[i] += weight;
5323 if (cpi->b_calculate_ssimg)
5325 double y, u, v, frame_all;
5326 frame_all = vp8_calc_ssimg(cpi->Source, cm->frame_to_show,
5329 if (cpi->oxcf.number_of_layers > 1)
5333 for (i=cpi->current_layer;
5334 i<cpi->oxcf.number_of_layers; i++)
5336 if (!cpi->b_calculate_psnr)
5337 cpi->frames_in_layer[i]++;
5339 cpi->total_ssimg_y_in_layer[i] += y;
5340 cpi->total_ssimg_u_in_layer[i] += u;
5341 cpi->total_ssimg_v_in_layer[i] += v;
5342 cpi->total_ssimg_all_in_layer[i] += frame_all;
5347 cpi->total_ssimg_y += y;
5348 cpi->total_ssimg_u += u;
5349 cpi->total_ssimg_v += v;
5350 cpi->total_ssimg_all += frame_all;
5359 if (cpi->common.frame_type != 0 && cpi->common.base_qindex == cpi->oxcf.worst_allowed_q)
5361 skiptruecount += cpi->skip_true_count;
5362 skipfalsecount += cpi->skip_false_count;
5370 FILE *f = fopen("skip.stt", "a");
5371 fprintf(f, "frame:%4d flags:%4x Q:%4d P:%4d Size:%5d\n", cpi->common.current_video_frame, *frame_flags, cpi->common.base_qindex, cpi->prob_skip_false, *size);
5373 if (cpi->is_src_frame_alt_ref == 1)
5374 fprintf(f, "skipcount: %4d framesize: %d\n", cpi->skip_true_count , *size);
5382 cpi->common.error.setjmp = 0;
5387 int vp8_get_preview_raw_frame(VP8_COMP *cpi, YV12_BUFFER_CONFIG *dest, vp8_ppflags_t *flags)
5389 if (cpi->common.refresh_alt_ref_frame)
5395 #if CONFIG_MULTITHREAD
5396 if(cpi->b_lpf_running)
5398 sem_wait(&cpi->h_event_end_lpf);
5399 cpi->b_lpf_running = 0;
5404 cpi->common.show_frame_mi = cpi->common.mi;
5405 ret = vp8_post_proc_frame(&cpi->common, dest, flags);
5408 if (cpi->common.frame_to_show)
5410 *dest = *cpi->common.frame_to_show;
5411 dest->y_width = cpi->common.Width;
5412 dest->y_height = cpi->common.Height;
5413 dest->uv_height = cpi->common.Height / 2;
5422 vp8_clear_system_state();
5427 int vp8_set_roimap(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols, int delta_q[4], int delta_lf[4], unsigned int threshold[4])
5429 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
5430 int internal_delta_q[MAX_MB_SEGMENTS];
5431 const int range = 63;
5434 // This method is currently incompatible with the cyclic refresh method
5435 if ( cpi->cyclic_refresh_mode_enabled )
5438 // Check number of rows and columns match
5439 if (cpi->common.mb_rows != rows || cpi->common.mb_cols != cols)
5442 // Range check the delta Q values and convert the external Q range values
5443 // to internal ones.
5444 if ( (abs(delta_q[0]) > range) || (abs(delta_q[1]) > range) ||
5445 (abs(delta_q[2]) > range) || (abs(delta_q[3]) > range) )
5448 // Range check the delta lf values
5449 if ( (abs(delta_lf[0]) > range) || (abs(delta_lf[1]) > range) ||
5450 (abs(delta_lf[2]) > range) || (abs(delta_lf[3]) > range) )
5455 disable_segmentation(cpi);
5459 // Translate the external delta q values to internal values.
5460 for ( i = 0; i < MAX_MB_SEGMENTS; i++ )
5461 internal_delta_q[i] =
5462 ( delta_q[i] >= 0 ) ? q_trans[delta_q[i]] : -q_trans[-delta_q[i]];
5464 /* Set the segmentation Map */
5465 set_segmentation_map(cpi, map);
5467 /* Activate segmentation. */
5468 enable_segmentation(cpi);
5470 /* Set up the quant segment data */
5471 feature_data[MB_LVL_ALT_Q][0] = internal_delta_q[0];
5472 feature_data[MB_LVL_ALT_Q][1] = internal_delta_q[1];
5473 feature_data[MB_LVL_ALT_Q][2] = internal_delta_q[2];
5474 feature_data[MB_LVL_ALT_Q][3] = internal_delta_q[3];
5476 /* Set up the loop segment data s */
5477 feature_data[MB_LVL_ALT_LF][0] = delta_lf[0];
5478 feature_data[MB_LVL_ALT_LF][1] = delta_lf[1];
5479 feature_data[MB_LVL_ALT_LF][2] = delta_lf[2];
5480 feature_data[MB_LVL_ALT_LF][3] = delta_lf[3];
5482 cpi->segment_encode_breakout[0] = threshold[0];
5483 cpi->segment_encode_breakout[1] = threshold[1];
5484 cpi->segment_encode_breakout[2] = threshold[2];
5485 cpi->segment_encode_breakout[3] = threshold[3];
5487 /* Initialise the feature data structure */
5488 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
5493 int vp8_set_active_map(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols)
5495 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols)
5499 vpx_memcpy(cpi->active_map, map, rows * cols);
5500 cpi->active_map_enabled = 1;
5503 cpi->active_map_enabled = 0;
5513 int vp8_set_internal_size(VP8_COMP *cpi, VPX_SCALING horiz_mode, VPX_SCALING vert_mode)
5515 if (horiz_mode <= ONETWO)
5516 cpi->common.horiz_scale = horiz_mode;
5520 if (vert_mode <= ONETWO)
5521 cpi->common.vert_scale = vert_mode;
5530 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest)
5535 unsigned char *src = source->y_buffer;
5536 unsigned char *dst = dest->y_buffer;
5538 /* Loop through the Y plane raw and reconstruction data summing
5539 * (square differences)
5541 for (i = 0; i < source->y_height; i += 16)
5543 for (j = 0; j < source->y_width; j += 16)
5546 Total += vp8_mse16x16(src + j, source->y_stride, dst + j, dest->y_stride, &sse);
5549 src += 16 * source->y_stride;
5550 dst += 16 * dest->y_stride;
5557 int vp8_get_quantizer(VP8_COMP *cpi)
5559 return cpi->common.base_qindex;