2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
12 #include "vpx_config.h"
13 #include "./vpx_scale_rtcd.h"
14 #include "./vpx_dsp_rtcd.h"
15 #include "./vp8_rtcd.h"
16 #include "vp8/common/onyxc_int.h"
17 #include "vp8/common/blockd.h"
19 #include "vp8/common/systemdependent.h"
20 #include "vp8/encoder/quantize.h"
21 #include "vp8/common/alloccommon.h"
23 #include "firstpass.h"
24 #include "vpx/internal/vpx_psnr.h"
25 #include "vpx_scale/vpx_scale.h"
26 #include "vp8/common/extend.h"
28 #include "vp8/common/quant_common.h"
29 #include "segmentation.h"
31 #include "vp8/common/postproc.h"
33 #include "vpx_mem/vpx_mem.h"
34 #include "vp8/common/reconintra.h"
35 #include "vp8/common/swapyv12buffer.h"
36 #include "vp8/common/threading.h"
37 #include "vpx_ports/vpx_timer.h"
39 #include "vpx_ports/arm.h"
41 #if CONFIG_MULTI_RES_ENCODING
42 #include "mr_dissim.h"
44 #include "encodeframe.h"
50 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
51 extern int vp8_update_coef_context(VP8_COMP *cpi);
52 extern void vp8_update_coef_probs(VP8_COMP *cpi);
55 extern void vp8cx_pick_filter_level_fast(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
56 extern void vp8cx_set_alt_lf_level(VP8_COMP *cpi, int filt_val);
57 extern void vp8cx_pick_filter_level(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
59 extern void vp8_deblock_frame(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *post, int filt_lvl, int low_var_thresh, int flag);
60 extern void print_parms(VP8_CONFIG *ocf, char *filenam);
61 extern unsigned int vp8_get_processor_freq();
62 extern void print_tree_update_probs();
63 extern int vp8cx_create_encoder_threads(VP8_COMP *cpi);
64 extern void vp8cx_remove_encoder_threads(VP8_COMP *cpi);
66 int vp8_estimate_entropy_savings(VP8_COMP *cpi);
68 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest);
70 extern void vp8_temporal_filter_prepare_c(VP8_COMP *cpi, int distance);
72 static void set_default_lf_deltas(VP8_COMP *cpi);
74 extern const int vp8_gf_interval_table[101];
76 #if CONFIG_INTERNAL_STATS
78 #include "vpx_dsp/ssim.h"
85 #ifdef OUTPUT_YUV_DENOISED
86 FILE *yuv_denoised_file;
96 extern int skip_true_count;
97 extern int skip_false_count;
101 #ifdef VP8_ENTROPY_STATS
102 extern int intra_mode_stats[10][10][10];
106 unsigned int frames_at_speed[16] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
107 unsigned int tot_pm = 0;
108 unsigned int cnt_pm = 0;
109 unsigned int tot_ef = 0;
110 unsigned int cnt_ef = 0;
114 extern unsigned __int64 Sectionbits[50];
115 extern int y_modes[5] ;
116 extern int uv_modes[4] ;
117 extern int b_modes[10] ;
119 extern int inter_y_modes[10] ;
120 extern int inter_uv_modes[4] ;
121 extern unsigned int inter_b_modes[15];
124 extern const int vp8_bits_per_mb[2][QINDEX_RANGE];
126 extern const int qrounding_factors[129];
127 extern const int qzbin_factors[129];
128 extern void vp8cx_init_quantizer(VP8_COMP *cpi);
129 extern const int vp8cx_base_skip_false_prob[128];
131 /* Tables relating active max Q to active min Q */
132 static const unsigned char kf_low_motion_minq[QINDEX_RANGE] =
134 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
135 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
136 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
137 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
138 3,3,3,3,3,3,4,4,4,5,5,5,5,5,6,6,
139 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
140 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
141 16,16,17,17,18,18,18,18,19,20,20,21,21,22,23,23
143 static const unsigned char kf_high_motion_minq[QINDEX_RANGE] =
145 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
146 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
147 1,1,1,1,1,1,1,1,2,2,2,2,3,3,3,3,
148 3,3,3,3,4,4,4,4,5,5,5,5,5,5,6,6,
149 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
150 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
151 16,16,17,17,18,18,18,18,19,19,20,20,20,20,21,21,
152 21,21,22,22,23,23,24,25,25,26,26,27,28,28,29,30
154 static const unsigned char gf_low_motion_minq[QINDEX_RANGE] =
156 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
157 3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,
158 7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,
159 11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,
160 19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,
161 27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34,
162 35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,
163 43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58
165 static const unsigned char gf_mid_motion_minq[QINDEX_RANGE] =
167 0,0,0,0,1,1,1,1,1,1,2,2,3,3,3,4,
168 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
169 9,10,10,10,10,11,11,11,12,12,12,12,13,13,13,14,
170 14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,
171 22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,
172 30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,
173 38,39,39,40,40,41,41,42,42,43,43,44,45,46,47,48,
174 49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64
176 static const unsigned char gf_high_motion_minq[QINDEX_RANGE] =
178 0,0,0,0,1,1,1,1,1,2,2,2,3,3,3,4,
179 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
180 9,10,10,10,11,11,12,12,13,13,14,14,15,15,16,16,
181 17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,
182 25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,
183 33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,
184 41,41,42,42,43,44,45,46,47,48,49,50,51,52,53,54,
185 55,56,57,58,59,60,62,64,66,68,70,72,74,76,78,80
187 static const unsigned char inter_minq[QINDEX_RANGE] =
189 0,0,1,1,2,3,3,4,4,5,6,6,7,8,8,9,
190 9,10,11,11,12,13,13,14,15,15,16,17,17,18,19,20,
191 20,21,22,22,23,24,24,25,26,27,27,28,29,30,30,31,
192 32,33,33,34,35,36,36,37,38,39,39,40,41,42,42,43,
193 44,45,46,46,47,48,49,50,50,51,52,53,54,55,55,56,
194 57,58,59,60,60,61,62,63,64,65,66,67,67,68,69,70,
195 71,72,73,74,75,75,76,77,78,79,80,81,82,83,84,85,
196 86,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100
199 #ifdef PACKET_TESTING
200 extern FILE *vpxlogc;
203 static void save_layer_context(VP8_COMP *cpi)
205 LAYER_CONTEXT *lc = &cpi->layer_context[cpi->current_layer];
207 /* Save layer dependent coding state */
208 lc->target_bandwidth = cpi->target_bandwidth;
209 lc->starting_buffer_level = cpi->oxcf.starting_buffer_level;
210 lc->optimal_buffer_level = cpi->oxcf.optimal_buffer_level;
211 lc->maximum_buffer_size = cpi->oxcf.maximum_buffer_size;
212 lc->starting_buffer_level_in_ms = cpi->oxcf.starting_buffer_level_in_ms;
213 lc->optimal_buffer_level_in_ms = cpi->oxcf.optimal_buffer_level_in_ms;
214 lc->maximum_buffer_size_in_ms = cpi->oxcf.maximum_buffer_size_in_ms;
215 lc->buffer_level = cpi->buffer_level;
216 lc->bits_off_target = cpi->bits_off_target;
217 lc->total_actual_bits = cpi->total_actual_bits;
218 lc->worst_quality = cpi->worst_quality;
219 lc->active_worst_quality = cpi->active_worst_quality;
220 lc->best_quality = cpi->best_quality;
221 lc->active_best_quality = cpi->active_best_quality;
222 lc->ni_av_qi = cpi->ni_av_qi;
223 lc->ni_tot_qi = cpi->ni_tot_qi;
224 lc->ni_frames = cpi->ni_frames;
225 lc->avg_frame_qindex = cpi->avg_frame_qindex;
226 lc->rate_correction_factor = cpi->rate_correction_factor;
227 lc->key_frame_rate_correction_factor = cpi->key_frame_rate_correction_factor;
228 lc->gf_rate_correction_factor = cpi->gf_rate_correction_factor;
229 lc->zbin_over_quant = cpi->mb.zbin_over_quant;
230 lc->inter_frame_target = cpi->inter_frame_target;
231 lc->total_byte_count = cpi->total_byte_count;
232 lc->filter_level = cpi->common.filter_level;
234 lc->last_frame_percent_intra = cpi->last_frame_percent_intra;
236 memcpy (lc->count_mb_ref_frame_usage,
237 cpi->mb.count_mb_ref_frame_usage,
238 sizeof(cpi->mb.count_mb_ref_frame_usage));
241 static void restore_layer_context(VP8_COMP *cpi, const int layer)
243 LAYER_CONTEXT *lc = &cpi->layer_context[layer];
245 /* Restore layer dependent coding state */
246 cpi->current_layer = layer;
247 cpi->target_bandwidth = lc->target_bandwidth;
248 cpi->oxcf.target_bandwidth = lc->target_bandwidth;
249 cpi->oxcf.starting_buffer_level = lc->starting_buffer_level;
250 cpi->oxcf.optimal_buffer_level = lc->optimal_buffer_level;
251 cpi->oxcf.maximum_buffer_size = lc->maximum_buffer_size;
252 cpi->oxcf.starting_buffer_level_in_ms = lc->starting_buffer_level_in_ms;
253 cpi->oxcf.optimal_buffer_level_in_ms = lc->optimal_buffer_level_in_ms;
254 cpi->oxcf.maximum_buffer_size_in_ms = lc->maximum_buffer_size_in_ms;
255 cpi->buffer_level = lc->buffer_level;
256 cpi->bits_off_target = lc->bits_off_target;
257 cpi->total_actual_bits = lc->total_actual_bits;
258 cpi->active_worst_quality = lc->active_worst_quality;
259 cpi->active_best_quality = lc->active_best_quality;
260 cpi->ni_av_qi = lc->ni_av_qi;
261 cpi->ni_tot_qi = lc->ni_tot_qi;
262 cpi->ni_frames = lc->ni_frames;
263 cpi->avg_frame_qindex = lc->avg_frame_qindex;
264 cpi->rate_correction_factor = lc->rate_correction_factor;
265 cpi->key_frame_rate_correction_factor = lc->key_frame_rate_correction_factor;
266 cpi->gf_rate_correction_factor = lc->gf_rate_correction_factor;
267 cpi->mb.zbin_over_quant = lc->zbin_over_quant;
268 cpi->inter_frame_target = lc->inter_frame_target;
269 cpi->total_byte_count = lc->total_byte_count;
270 cpi->common.filter_level = lc->filter_level;
272 cpi->last_frame_percent_intra = lc->last_frame_percent_intra;
274 memcpy (cpi->mb.count_mb_ref_frame_usage,
275 lc->count_mb_ref_frame_usage,
276 sizeof(cpi->mb.count_mb_ref_frame_usage));
279 static int rescale(int val, int num, int denom)
282 int64_t llden = denom;
285 return (int)(llval * llnum / llden);
288 static void init_temporal_layer_context(VP8_COMP *cpi,
291 double prev_layer_framerate)
293 LAYER_CONTEXT *lc = &cpi->layer_context[layer];
295 lc->framerate = cpi->output_framerate / cpi->oxcf.rate_decimator[layer];
296 lc->target_bandwidth = cpi->oxcf.target_bitrate[layer] * 1000;
298 lc->starting_buffer_level_in_ms = oxcf->starting_buffer_level;
299 lc->optimal_buffer_level_in_ms = oxcf->optimal_buffer_level;
300 lc->maximum_buffer_size_in_ms = oxcf->maximum_buffer_size;
302 lc->starting_buffer_level =
303 rescale((int)(oxcf->starting_buffer_level),
304 lc->target_bandwidth, 1000);
306 if (oxcf->optimal_buffer_level == 0)
307 lc->optimal_buffer_level = lc->target_bandwidth / 8;
309 lc->optimal_buffer_level =
310 rescale((int)(oxcf->optimal_buffer_level),
311 lc->target_bandwidth, 1000);
313 if (oxcf->maximum_buffer_size == 0)
314 lc->maximum_buffer_size = lc->target_bandwidth / 8;
316 lc->maximum_buffer_size =
317 rescale((int)(oxcf->maximum_buffer_size),
318 lc->target_bandwidth, 1000);
320 /* Work out the average size of a frame within this layer */
322 lc->avg_frame_size_for_layer =
323 (int)((cpi->oxcf.target_bitrate[layer] -
324 cpi->oxcf.target_bitrate[layer-1]) * 1000 /
325 (lc->framerate - prev_layer_framerate));
327 lc->active_worst_quality = cpi->oxcf.worst_allowed_q;
328 lc->active_best_quality = cpi->oxcf.best_allowed_q;
329 lc->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
331 lc->buffer_level = lc->starting_buffer_level;
332 lc->bits_off_target = lc->starting_buffer_level;
334 lc->total_actual_bits = 0;
338 lc->rate_correction_factor = 1.0;
339 lc->key_frame_rate_correction_factor = 1.0;
340 lc->gf_rate_correction_factor = 1.0;
341 lc->inter_frame_target = 0;
344 // Upon a run-time change in temporal layers, reset the layer context parameters
345 // for any "new" layers. For "existing" layers, let them inherit the parameters
346 // from the previous layer state (at the same layer #). In future we may want
347 // to better map the previous layer state(s) to the "new" ones.
348 static void reset_temporal_layer_change(VP8_COMP *cpi,
350 const int prev_num_layers)
353 double prev_layer_framerate = 0;
354 const int curr_num_layers = cpi->oxcf.number_of_layers;
355 // If the previous state was 1 layer, get current layer context from cpi.
356 // We need this to set the layer context for the new layers below.
357 if (prev_num_layers == 1)
359 cpi->current_layer = 0;
360 save_layer_context(cpi);
362 for (i = 0; i < curr_num_layers; i++)
364 LAYER_CONTEXT *lc = &cpi->layer_context[i];
365 if (i >= prev_num_layers)
367 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
369 // The initial buffer levels are set based on their starting levels.
370 // We could set the buffer levels based on the previous state (normalized
371 // properly by the layer bandwidths) but we would need to keep track of
372 // the previous set of layer bandwidths (i.e., target_bitrate[i])
373 // before the layer change. For now, reset to the starting levels.
374 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
375 cpi->oxcf.target_bitrate[i];
376 lc->bits_off_target = lc->buffer_level;
377 // TDOD(marpan): Should we set the rate_correction_factor and
378 // active_worst/best_quality to values derived from the previous layer
379 // state (to smooth-out quality dips/rate fluctuation at transition)?
381 // We need to treat the 1 layer case separately: oxcf.target_bitrate[i]
382 // is not set for 1 layer, and the restore_layer_context/save_context()
383 // are not called in the encoding loop, so we need to call it here to
384 // pass the layer context state to |cpi|.
385 if (curr_num_layers == 1)
387 lc->target_bandwidth = cpi->oxcf.target_bandwidth;
388 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
389 lc->target_bandwidth / 1000;
390 lc->bits_off_target = lc->buffer_level;
391 restore_layer_context(cpi, 0);
393 prev_layer_framerate = cpi->output_framerate /
394 cpi->oxcf.rate_decimator[i];
398 static void setup_features(VP8_COMP *cpi)
400 // If segmentation enabled set the update flags
401 if ( cpi->mb.e_mbd.segmentation_enabled )
403 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
404 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
408 cpi->mb.e_mbd.update_mb_segmentation_map = 0;
409 cpi->mb.e_mbd.update_mb_segmentation_data = 0;
412 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 0;
413 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
414 memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
415 memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
416 memset(cpi->mb.e_mbd.last_ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
417 memset(cpi->mb.e_mbd.last_mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
419 set_default_lf_deltas(cpi);
424 static void dealloc_raw_frame_buffers(VP8_COMP *cpi);
426 void vp8_initialize_enc(void)
428 static volatile int init_done = 0;
432 vp8_init_intra_predictors();
437 static void dealloc_compressor_data(VP8_COMP *cpi)
439 vpx_free(cpi->tplist);
442 /* Delete last frame MV storage buffers */
446 vpx_free(cpi->lf_ref_frame_sign_bias);
447 cpi->lf_ref_frame_sign_bias = 0;
449 vpx_free(cpi->lf_ref_frame);
450 cpi->lf_ref_frame = 0;
452 /* Delete sementation map */
453 vpx_free(cpi->segmentation_map);
454 cpi->segmentation_map = 0;
456 vpx_free(cpi->active_map);
459 vp8_de_alloc_frame_buffers(&cpi->common);
461 vp8_yv12_de_alloc_frame_buffer(&cpi->pick_lf_lvl_frame);
462 vp8_yv12_de_alloc_frame_buffer(&cpi->scaled_source);
463 dealloc_raw_frame_buffers(cpi);
468 /* Structure used to monitor GF usage */
469 vpx_free(cpi->gf_active_flags);
470 cpi->gf_active_flags = 0;
472 /* Activity mask based per mb zbin adjustments */
473 vpx_free(cpi->mb_activity_map);
474 cpi->mb_activity_map = 0;
476 vpx_free(cpi->mb.pip);
479 #if CONFIG_MULTITHREAD
480 /* De-allocate mutex */
481 if (cpi->pmutex != NULL) {
482 VP8_COMMON *const pc = &cpi->common;
485 for (i = 0; i < pc->mb_rows; i++) {
486 pthread_mutex_destroy(&cpi->pmutex[i]);
488 vpx_free(cpi->pmutex);
492 vpx_free(cpi->mt_current_mb_col);
493 cpi->mt_current_mb_col = NULL;
497 static void enable_segmentation(VP8_COMP *cpi)
499 /* Set the appropriate feature bit */
500 cpi->mb.e_mbd.segmentation_enabled = 1;
501 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
502 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
504 static void disable_segmentation(VP8_COMP *cpi)
506 /* Clear the appropriate feature bit */
507 cpi->mb.e_mbd.segmentation_enabled = 0;
510 /* Valid values for a segment are 0 to 3
511 * Segmentation map is arrange as [Rows][Columns]
513 static void set_segmentation_map(VP8_COMP *cpi, unsigned char *segmentation_map)
515 /* Copy in the new segmentation map */
516 memcpy(cpi->segmentation_map, segmentation_map, (cpi->common.mb_rows * cpi->common.mb_cols));
518 /* Signal that the map should be updated. */
519 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
520 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
523 /* The values given for each segment can be either deltas (from the default
524 * value chosen for the frame) or absolute values.
526 * Valid range for abs values is:
527 * (0-127 for MB_LVL_ALT_Q), (0-63 for SEGMENT_ALT_LF)
528 * Valid range for delta values are:
529 * (+/-127 for MB_LVL_ALT_Q), (+/-63 for SEGMENT_ALT_LF)
531 * abs_delta = SEGMENT_DELTADATA (deltas)
532 * abs_delta = SEGMENT_ABSDATA (use the absolute values given).
535 static void set_segment_data(VP8_COMP *cpi, signed char *feature_data, unsigned char abs_delta)
537 cpi->mb.e_mbd.mb_segement_abs_delta = abs_delta;
538 memcpy(cpi->segment_feature_data, feature_data, sizeof(cpi->segment_feature_data));
542 /* A simple function to cyclically refresh the background at a lower Q */
543 static void cyclic_background_refresh(VP8_COMP *cpi, int Q, int lf_adjustment)
545 unsigned char *seg_map = cpi->segmentation_map;
546 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
548 int block_count = cpi->cyclic_refresh_mode_max_mbs_perframe;
549 int mbs_in_frame = cpi->common.mb_rows * cpi->common.mb_cols;
551 cpi->cyclic_refresh_q = Q / 2;
553 if (cpi->oxcf.screen_content_mode) {
554 // Modify quality ramp-up based on Q. Above some Q level, increase the
555 // number of blocks to be refreshed, and reduce it below the thredhold.
556 // Turn-off under certain conditions (i.e., away from key frame, and if
557 // we are at good quality (low Q) and most of the blocks were skipped-encoded
558 // in previous frame.
559 int qp_thresh = (cpi->oxcf.screen_content_mode == 2) ? 80 : 100;
560 if (Q >= qp_thresh) {
561 cpi->cyclic_refresh_mode_max_mbs_perframe =
562 (cpi->common.mb_rows * cpi->common.mb_cols) / 10;
563 } else if (cpi->frames_since_key > 250 &&
565 cpi->mb.skip_true_count > (int)(0.95 * mbs_in_frame)) {
566 cpi->cyclic_refresh_mode_max_mbs_perframe = 0;
568 cpi->cyclic_refresh_mode_max_mbs_perframe =
569 (cpi->common.mb_rows * cpi->common.mb_cols) / 20;
571 block_count = cpi->cyclic_refresh_mode_max_mbs_perframe;
574 // Set every macroblock to be eligible for update.
575 // For key frame this will reset seg map to 0.
576 memset(cpi->segmentation_map, 0, mbs_in_frame);
578 if (cpi->common.frame_type != KEY_FRAME && block_count > 0)
580 /* Cycle through the macro_block rows */
581 /* MB loop to set local segmentation map */
582 i = cpi->cyclic_refresh_mode_index;
583 assert(i < mbs_in_frame);
586 /* If the MB is as a candidate for clean up then mark it for
587 * possible boost/refresh (segment 1) The segment id may get
588 * reset to 0 later if the MB gets coded anything other than
589 * last frame 0,0 as only (last frame 0,0) MBs are eligable for
590 * refresh : that is to say Mbs likely to be background blocks.
592 if (cpi->cyclic_refresh_map[i] == 0)
597 else if (cpi->cyclic_refresh_map[i] < 0)
598 cpi->cyclic_refresh_map[i]++;
601 if (i == mbs_in_frame)
605 while(block_count && i != cpi->cyclic_refresh_mode_index);
607 cpi->cyclic_refresh_mode_index = i;
609 #if CONFIG_TEMPORAL_DENOISING
610 if (cpi->oxcf.noise_sensitivity > 0) {
611 if (cpi->denoiser.denoiser_mode == kDenoiserOnYUVAggressive &&
612 Q < (int)cpi->denoiser.denoise_pars.qp_thresh &&
613 (cpi->frames_since_key >
614 2 * cpi->denoiser.denoise_pars.consec_zerolast)) {
615 // Under aggressive denoising, use segmentation to turn off loop
616 // filter below some qp thresh. The filter is reduced for all
617 // blocks that have been encoded as ZEROMV LAST x frames in a row,
618 // where x is set by cpi->denoiser.denoise_pars.consec_zerolast.
619 // This is to avoid "dot" artifacts that can occur from repeated
620 // loop filtering on noisy input source.
621 cpi->cyclic_refresh_q = Q;
622 // lf_adjustment = -MAX_LOOP_FILTER;
624 for (i = 0; i < mbs_in_frame; ++i) {
625 seg_map[i] = (cpi->consec_zero_last[i] >
626 cpi->denoiser.denoise_pars.consec_zerolast) ? 1 : 0;
633 /* Activate segmentation. */
634 cpi->mb.e_mbd.update_mb_segmentation_map = 1;
635 cpi->mb.e_mbd.update_mb_segmentation_data = 1;
636 enable_segmentation(cpi);
638 /* Set up the quant segment data */
639 feature_data[MB_LVL_ALT_Q][0] = 0;
640 feature_data[MB_LVL_ALT_Q][1] = (cpi->cyclic_refresh_q - Q);
641 feature_data[MB_LVL_ALT_Q][2] = 0;
642 feature_data[MB_LVL_ALT_Q][3] = 0;
644 /* Set up the loop segment data */
645 feature_data[MB_LVL_ALT_LF][0] = 0;
646 feature_data[MB_LVL_ALT_LF][1] = lf_adjustment;
647 feature_data[MB_LVL_ALT_LF][2] = 0;
648 feature_data[MB_LVL_ALT_LF][3] = 0;
650 /* Initialise the feature data structure */
651 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
655 static void set_default_lf_deltas(VP8_COMP *cpi)
657 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 1;
658 cpi->mb.e_mbd.mode_ref_lf_delta_update = 1;
660 memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
661 memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
663 /* Test of ref frame deltas */
664 cpi->mb.e_mbd.ref_lf_deltas[INTRA_FRAME] = 2;
665 cpi->mb.e_mbd.ref_lf_deltas[LAST_FRAME] = 0;
666 cpi->mb.e_mbd.ref_lf_deltas[GOLDEN_FRAME] = -2;
667 cpi->mb.e_mbd.ref_lf_deltas[ALTREF_FRAME] = -2;
669 cpi->mb.e_mbd.mode_lf_deltas[0] = 4; /* BPRED */
671 if(cpi->oxcf.Mode == MODE_REALTIME)
672 cpi->mb.e_mbd.mode_lf_deltas[1] = -12; /* Zero */
674 cpi->mb.e_mbd.mode_lf_deltas[1] = -2; /* Zero */
676 cpi->mb.e_mbd.mode_lf_deltas[2] = 2; /* New mv */
677 cpi->mb.e_mbd.mode_lf_deltas[3] = 4; /* Split mv */
680 /* Convenience macros for mapping speed and mode into a continuous
683 #define GOOD(x) (x+1)
686 static int speed_map(int speed, const int *map)
693 } while(speed >= *map++);
697 static const int thresh_mult_map_znn[] = {
698 /* map common to zero, nearest, and near */
699 0, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(2), 2000, INT_MAX
702 static const int thresh_mult_map_vhpred[] = {
703 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(1), 2000,
704 RT(7), INT_MAX, INT_MAX
707 static const int thresh_mult_map_bpred[] = {
708 2000, GOOD(0), 2500, GOOD(2), 5000, GOOD(3), 7500, RT(0), 2500, RT(1), 5000,
709 RT(6), INT_MAX, INT_MAX
712 static const int thresh_mult_map_tm[] = {
713 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 0, RT(1), 1000, RT(2), 2000,
714 RT(7), INT_MAX, INT_MAX
717 static const int thresh_mult_map_new1[] = {
718 1000, GOOD(2), 2000, RT(0), 2000, INT_MAX
721 static const int thresh_mult_map_new2[] = {
722 1000, GOOD(2), 2000, GOOD(3), 2500, GOOD(5), 4000, RT(0), 2000, RT(2), 2500,
726 static const int thresh_mult_map_split1[] = {
727 2500, GOOD(0), 1700, GOOD(2), 10000, GOOD(3), 25000, GOOD(4), INT_MAX,
728 RT(0), 5000, RT(1), 10000, RT(2), 25000, RT(3), INT_MAX, INT_MAX
731 static const int thresh_mult_map_split2[] = {
732 5000, GOOD(0), 4500, GOOD(2), 20000, GOOD(3), 50000, GOOD(4), INT_MAX,
733 RT(0), 10000, RT(1), 20000, RT(2), 50000, RT(3), INT_MAX, INT_MAX
736 static const int mode_check_freq_map_zn2[] = {
737 /* {zero,nearest}{2,3} */
738 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
741 static const int mode_check_freq_map_vhbpred[] = {
742 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(5), 4, INT_MAX
745 static const int mode_check_freq_map_near2[] = {
746 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(10), 1<<2, RT(11), 1<<3, RT(12), 1<<4,
750 static const int mode_check_freq_map_new1[] = {
751 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
754 static const int mode_check_freq_map_new2[] = {
755 0, GOOD(5), 4, RT(0), 0, RT(3), 4, RT(10), 1<<3, RT(11), 1<<4, RT(12), 1<<5,
759 static const int mode_check_freq_map_split1[] = {
760 0, GOOD(2), 2, GOOD(3), 7, RT(1), 2, RT(2), 7, INT_MAX
763 static const int mode_check_freq_map_split2[] = {
764 0, GOOD(1), 2, GOOD(2), 4, GOOD(3), 15, RT(1), 4, RT(2), 15, INT_MAX
767 void vp8_set_speed_features(VP8_COMP *cpi)
769 SPEED_FEATURES *sf = &cpi->sf;
770 int Mode = cpi->compressor_speed;
771 int Speed = cpi->Speed;
773 VP8_COMMON *cm = &cpi->common;
774 int last_improved_quant = sf->improved_quant;
777 /* Initialise default mode frequency sampling variables */
778 for (i = 0; i < MAX_MODES; i ++)
780 cpi->mode_check_freq[i] = 0;
783 cpi->mb.mbs_tested_so_far = 0;
784 cpi->mb.mbs_zero_last_dot_suppress = 0;
786 /* best quality defaults */
788 sf->search_method = NSTEP;
789 sf->improved_quant = 1;
790 sf->improved_dct = 1;
793 sf->quarter_pixel_search = 1;
794 sf->half_pixel_search = 1;
795 sf->iterative_sub_pixel = 1;
796 sf->optimize_coefficients = 1;
797 sf->use_fastquant_for_pick = 0;
798 sf->no_skip_block4x4_search = 1;
801 sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
802 sf->improved_mv_pred = 1;
804 /* default thresholds to 0 */
805 for (i = 0; i < MAX_MODES; i++)
806 sf->thresh_mult[i] = 0;
808 /* Count enabled references */
810 if (cpi->ref_frame_flags & VP8_LAST_FRAME)
812 if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
814 if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
817 /* Convert speed to continuous range, with clamping */
829 sf->thresh_mult[THR_ZERO1] =
830 sf->thresh_mult[THR_NEAREST1] =
831 sf->thresh_mult[THR_NEAR1] =
832 sf->thresh_mult[THR_DC] = 0; /* always */
834 sf->thresh_mult[THR_ZERO2] =
835 sf->thresh_mult[THR_ZERO3] =
836 sf->thresh_mult[THR_NEAREST2] =
837 sf->thresh_mult[THR_NEAREST3] =
838 sf->thresh_mult[THR_NEAR2] =
839 sf->thresh_mult[THR_NEAR3] = speed_map(Speed, thresh_mult_map_znn);
841 sf->thresh_mult[THR_V_PRED] =
842 sf->thresh_mult[THR_H_PRED] = speed_map(Speed, thresh_mult_map_vhpred);
843 sf->thresh_mult[THR_B_PRED] = speed_map(Speed, thresh_mult_map_bpred);
844 sf->thresh_mult[THR_TM] = speed_map(Speed, thresh_mult_map_tm);
845 sf->thresh_mult[THR_NEW1] = speed_map(Speed, thresh_mult_map_new1);
846 sf->thresh_mult[THR_NEW2] =
847 sf->thresh_mult[THR_NEW3] = speed_map(Speed, thresh_mult_map_new2);
848 sf->thresh_mult[THR_SPLIT1] = speed_map(Speed, thresh_mult_map_split1);
849 sf->thresh_mult[THR_SPLIT2] =
850 sf->thresh_mult[THR_SPLIT3] = speed_map(Speed, thresh_mult_map_split2);
852 // Special case for temporal layers.
853 // Reduce the thresholds for zero/nearest/near for GOLDEN, if GOLDEN is
854 // used as second reference. We don't modify thresholds for ALTREF case
855 // since ALTREF is usually used as long-term reference in temporal layers.
856 if ((cpi->Speed <= 6) &&
857 (cpi->oxcf.number_of_layers > 1) &&
858 (cpi->ref_frame_flags & VP8_LAST_FRAME) &&
859 (cpi->ref_frame_flags & VP8_GOLD_FRAME)) {
860 if (cpi->closest_reference_frame == GOLDEN_FRAME) {
861 sf->thresh_mult[THR_ZERO2] = sf->thresh_mult[THR_ZERO2] >> 3;
862 sf->thresh_mult[THR_NEAREST2] = sf->thresh_mult[THR_NEAREST2] >> 3;
863 sf->thresh_mult[THR_NEAR2] = sf->thresh_mult[THR_NEAR2] >> 3;
865 sf->thresh_mult[THR_ZERO2] = sf->thresh_mult[THR_ZERO2] >> 1;
866 sf->thresh_mult[THR_NEAREST2] = sf->thresh_mult[THR_NEAREST2] >> 1;
867 sf->thresh_mult[THR_NEAR2] = sf->thresh_mult[THR_NEAR2] >> 1;
871 cpi->mode_check_freq[THR_ZERO1] =
872 cpi->mode_check_freq[THR_NEAREST1] =
873 cpi->mode_check_freq[THR_NEAR1] =
874 cpi->mode_check_freq[THR_TM] =
875 cpi->mode_check_freq[THR_DC] = 0; /* always */
877 cpi->mode_check_freq[THR_ZERO2] =
878 cpi->mode_check_freq[THR_ZERO3] =
879 cpi->mode_check_freq[THR_NEAREST2] =
880 cpi->mode_check_freq[THR_NEAREST3] = speed_map(Speed,
881 mode_check_freq_map_zn2);
883 cpi->mode_check_freq[THR_NEAR2] =
884 cpi->mode_check_freq[THR_NEAR3] = speed_map(Speed,
885 mode_check_freq_map_near2);
887 cpi->mode_check_freq[THR_V_PRED] =
888 cpi->mode_check_freq[THR_H_PRED] =
889 cpi->mode_check_freq[THR_B_PRED] = speed_map(Speed,
890 mode_check_freq_map_vhbpred);
891 cpi->mode_check_freq[THR_NEW1] = speed_map(Speed,
892 mode_check_freq_map_new1);
893 cpi->mode_check_freq[THR_NEW2] =
894 cpi->mode_check_freq[THR_NEW3] = speed_map(Speed,
895 mode_check_freq_map_new2);
896 cpi->mode_check_freq[THR_SPLIT1] = speed_map(Speed,
897 mode_check_freq_map_split1);
898 cpi->mode_check_freq[THR_SPLIT2] =
899 cpi->mode_check_freq[THR_SPLIT3] = speed_map(Speed,
900 mode_check_freq_map_split2);
904 #if !CONFIG_REALTIME_ONLY
905 case 0: /* best quality mode */
907 sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
913 /* Disable coefficient optimization above speed 0 */
914 sf->optimize_coefficients = 0;
915 sf->use_fastquant_for_pick = 1;
916 sf->no_skip_block4x4_search = 0;
923 sf->improved_quant = 0;
924 sf->improved_dct = 0;
926 /* Only do recode loop on key frames, golden frames and
936 sf->recode_loop = 0; /* recode loop off */
937 sf->RD = 0; /* Turn rd off */
943 sf->auto_filter = 0; /* Faster selection of loop filter */
949 sf->optimize_coefficients = 0;
952 sf->iterative_sub_pixel = 1;
953 sf->search_method = NSTEP;
957 sf->improved_quant = 0;
958 sf->improved_dct = 0;
960 sf->use_fastquant_for_pick = 1;
961 sf->no_skip_block4x4_search = 0;
966 sf->auto_filter = 0; /* Faster selection of loop filter */
976 sf->auto_filter = 0; /* Faster selection of loop filter */
977 sf->search_method = HEX;
978 sf->iterative_sub_pixel = 0;
983 unsigned int sum = 0;
984 unsigned int total_mbs = cm->MBs;
986 unsigned int total_skip;
990 if (cpi->oxcf.encode_breakout > 2000)
991 min = cpi->oxcf.encode_breakout;
995 for (i = 0; i < min; i++)
997 sum += cpi->mb.error_bins[i];
1003 /* i starts from 2 to make sure thresh started from 2048 */
1004 for (; i < 1024; i++)
1006 sum += cpi->mb.error_bins[i];
1008 if (10 * sum >= (unsigned int)(cpi->Speed - 6)*(total_mbs - total_skip))
1020 sf->thresh_mult[THR_NEW1 ] = thresh;
1021 sf->thresh_mult[THR_NEAREST1 ] = thresh >> 1;
1022 sf->thresh_mult[THR_NEAR1 ] = thresh >> 1;
1027 sf->thresh_mult[THR_NEW2] = thresh << 1;
1028 sf->thresh_mult[THR_NEAREST2 ] = thresh;
1029 sf->thresh_mult[THR_NEAR2 ] = thresh;
1034 sf->thresh_mult[THR_NEW3] = thresh << 1;
1035 sf->thresh_mult[THR_NEAREST3 ] = thresh;
1036 sf->thresh_mult[THR_NEAR3 ] = thresh;
1039 sf->improved_mv_pred = 0;
1043 sf->quarter_pixel_search = 0;
1045 if(cm->version == 0)
1047 cm->filter_type = NORMAL_LOOPFILTER;
1050 cm->filter_type = SIMPLE_LOOPFILTER;
1054 cm->filter_type = SIMPLE_LOOPFILTER;
1057 /* This has a big hit on quality. Last resort */
1059 sf->half_pixel_search = 0;
1061 memset(cpi->mb.error_bins, 0, sizeof(cpi->mb.error_bins));
1065 /* Slow quant, dct and trellis not worthwhile for first pass
1066 * so make sure they are always turned off.
1068 if ( cpi->pass == 1 )
1070 sf->improved_quant = 0;
1071 sf->optimize_coefficients = 0;
1072 sf->improved_dct = 0;
1075 if (cpi->sf.search_method == NSTEP)
1077 vp8_init3smotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
1079 else if (cpi->sf.search_method == DIAMOND)
1081 vp8_init_dsmotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
1084 if (cpi->sf.improved_dct)
1086 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
1087 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
1091 /* No fast FDCT defined for any platform at this time. */
1092 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
1093 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
1096 cpi->mb.short_walsh4x4 = vp8_short_walsh4x4;
1098 if (cpi->sf.improved_quant)
1100 cpi->mb.quantize_b = vp8_regular_quantize_b;
1104 cpi->mb.quantize_b = vp8_fast_quantize_b;
1106 if (cpi->sf.improved_quant != last_improved_quant)
1107 vp8cx_init_quantizer(cpi);
1109 if (cpi->sf.iterative_sub_pixel == 1)
1111 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step_iteratively;
1113 else if (cpi->sf.quarter_pixel_search)
1115 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step;
1117 else if (cpi->sf.half_pixel_search)
1119 cpi->find_fractional_mv_step = vp8_find_best_half_pixel_step;
1123 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
1126 if (cpi->sf.optimize_coefficients == 1 && cpi->pass!=1)
1127 cpi->mb.optimize = 1;
1129 cpi->mb.optimize = 0;
1131 if (cpi->common.full_pixel)
1132 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
1135 frames_at_speed[cpi->Speed]++;
1141 static void alloc_raw_frame_buffers(VP8_COMP *cpi)
1143 #if VP8_TEMPORAL_ALT_REF
1144 int width = (cpi->oxcf.Width + 15) & ~15;
1145 int height = (cpi->oxcf.Height + 15) & ~15;
1148 cpi->lookahead = vp8_lookahead_init(cpi->oxcf.Width, cpi->oxcf.Height,
1149 cpi->oxcf.lag_in_frames);
1151 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1152 "Failed to allocate lag buffers");
1154 #if VP8_TEMPORAL_ALT_REF
1156 if (vp8_yv12_alloc_frame_buffer(&cpi->alt_ref_buffer,
1157 width, height, VP8BORDERINPIXELS))
1158 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1159 "Failed to allocate altref buffer");
1165 static void dealloc_raw_frame_buffers(VP8_COMP *cpi)
1167 #if VP8_TEMPORAL_ALT_REF
1168 vp8_yv12_de_alloc_frame_buffer(&cpi->alt_ref_buffer);
1170 vp8_lookahead_destroy(cpi->lookahead);
1174 static int vp8_alloc_partition_data(VP8_COMP *cpi)
1176 vpx_free(cpi->mb.pip);
1178 cpi->mb.pip = vpx_calloc((cpi->common.mb_cols + 1) *
1179 (cpi->common.mb_rows + 1),
1180 sizeof(PARTITION_INFO));
1184 cpi->mb.pi = cpi->mb.pip + cpi->common.mode_info_stride + 1;
1189 void vp8_alloc_compressor_data(VP8_COMP *cpi)
1191 VP8_COMMON *cm = & cpi->common;
1193 int width = cm->Width;
1194 int height = cm->Height;
1195 #if CONFIG_MULTITHREAD
1196 int prev_mb_rows = cm->mb_rows;
1199 if (vp8_alloc_frame_buffers(cm, width, height))
1200 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1201 "Failed to allocate frame buffers");
1203 if (vp8_alloc_partition_data(cpi))
1204 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1205 "Failed to allocate partition data");
1208 if ((width & 0xf) != 0)
1209 width += 16 - (width & 0xf);
1211 if ((height & 0xf) != 0)
1212 height += 16 - (height & 0xf);
1215 if (vp8_yv12_alloc_frame_buffer(&cpi->pick_lf_lvl_frame,
1216 width, height, VP8BORDERINPIXELS))
1217 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1218 "Failed to allocate last frame buffer");
1220 if (vp8_yv12_alloc_frame_buffer(&cpi->scaled_source,
1221 width, height, VP8BORDERINPIXELS))
1222 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1223 "Failed to allocate scaled source buffer");
1228 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
1229 unsigned int tokens = 8 * 24 * 16; /* one MB for each thread */
1231 unsigned int tokens = cm->mb_rows * cm->mb_cols * 24 * 16;
1233 CHECK_MEM_ERROR(cpi->tok, vpx_calloc(tokens, sizeof(*cpi->tok)));
1236 /* Data used for real time vc mode to see if gf needs refreshing */
1237 cpi->zeromv_count = 0;
1240 /* Structures used to monitor GF usage */
1241 vpx_free(cpi->gf_active_flags);
1242 CHECK_MEM_ERROR(cpi->gf_active_flags,
1243 vpx_calloc(sizeof(*cpi->gf_active_flags),
1244 cm->mb_rows * cm->mb_cols));
1245 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
1247 vpx_free(cpi->mb_activity_map);
1248 CHECK_MEM_ERROR(cpi->mb_activity_map,
1249 vpx_calloc(sizeof(*cpi->mb_activity_map),
1250 cm->mb_rows * cm->mb_cols));
1252 /* allocate memory for storing last frame's MVs for MV prediction. */
1253 vpx_free(cpi->lfmv);
1254 CHECK_MEM_ERROR(cpi->lfmv, vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1255 sizeof(*cpi->lfmv)));
1256 vpx_free(cpi->lf_ref_frame_sign_bias);
1257 CHECK_MEM_ERROR(cpi->lf_ref_frame_sign_bias,
1258 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1259 sizeof(*cpi->lf_ref_frame_sign_bias)));
1260 vpx_free(cpi->lf_ref_frame);
1261 CHECK_MEM_ERROR(cpi->lf_ref_frame,
1262 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
1263 sizeof(*cpi->lf_ref_frame)));
1265 /* Create the encoder segmentation map and set all entries to 0 */
1266 vpx_free(cpi->segmentation_map);
1267 CHECK_MEM_ERROR(cpi->segmentation_map,
1268 vpx_calloc(cm->mb_rows * cm->mb_cols,
1269 sizeof(*cpi->segmentation_map)));
1270 cpi->cyclic_refresh_mode_index = 0;
1271 vpx_free(cpi->active_map);
1272 CHECK_MEM_ERROR(cpi->active_map,
1273 vpx_calloc(cm->mb_rows * cm->mb_cols,
1274 sizeof(*cpi->active_map)));
1275 memset(cpi->active_map , 1, (cm->mb_rows * cm->mb_cols));
1277 #if CONFIG_MULTITHREAD
1279 cpi->mt_sync_range = 1;
1280 else if (width <= 1280)
1281 cpi->mt_sync_range = 4;
1282 else if (width <= 2560)
1283 cpi->mt_sync_range = 8;
1285 cpi->mt_sync_range = 16;
1287 if (cpi->oxcf.multi_threaded > 1)
1291 /* De-allocate and re-allocate mutex */
1292 if (cpi->pmutex != NULL) {
1293 for (i = 0; i < prev_mb_rows; i++) {
1294 pthread_mutex_destroy(&cpi->pmutex[i]);
1296 vpx_free(cpi->pmutex);
1300 CHECK_MEM_ERROR(cpi->pmutex, vpx_malloc(sizeof(*cpi->pmutex) *
1303 for (i = 0; i < cm->mb_rows; i++) {
1304 pthread_mutex_init(&cpi->pmutex[i], NULL);
1308 vpx_free(cpi->mt_current_mb_col);
1309 CHECK_MEM_ERROR(cpi->mt_current_mb_col,
1310 vpx_malloc(sizeof(*cpi->mt_current_mb_col) * cm->mb_rows));
1315 vpx_free(cpi->tplist);
1316 CHECK_MEM_ERROR(cpi->tplist, vpx_malloc(sizeof(TOKENLIST) * cm->mb_rows));
1318 #if CONFIG_TEMPORAL_DENOISING
1319 if (cpi->oxcf.noise_sensitivity > 0) {
1320 vp8_denoiser_free(&cpi->denoiser);
1321 vp8_denoiser_allocate(&cpi->denoiser, width, height,
1322 cm->mb_rows, cm->mb_cols,
1323 cpi->oxcf.noise_sensitivity);
1330 static const int q_trans[] =
1332 0, 1, 2, 3, 4, 5, 7, 8,
1333 9, 10, 12, 13, 15, 17, 18, 19,
1334 20, 21, 23, 24, 25, 26, 27, 28,
1335 29, 30, 31, 33, 35, 37, 39, 41,
1336 43, 45, 47, 49, 51, 53, 55, 57,
1337 59, 61, 64, 67, 70, 73, 76, 79,
1338 82, 85, 88, 91, 94, 97, 100, 103,
1339 106, 109, 112, 115, 118, 121, 124, 127,
1342 int vp8_reverse_trans(int x)
1346 for (i = 0; i < 64; i++)
1347 if (q_trans[i] >= x)
1352 void vp8_new_framerate(VP8_COMP *cpi, double framerate)
1357 cpi->framerate = framerate;
1358 cpi->output_framerate = framerate;
1359 cpi->per_frame_bandwidth = (int)(cpi->oxcf.target_bandwidth /
1360 cpi->output_framerate);
1361 cpi->av_per_frame_bandwidth = cpi->per_frame_bandwidth;
1362 cpi->min_frame_bandwidth = (int)(cpi->av_per_frame_bandwidth *
1363 cpi->oxcf.two_pass_vbrmin_section / 100);
1365 /* Set Maximum gf/arf interval */
1366 cpi->max_gf_interval = ((int)(cpi->output_framerate / 2.0) + 2);
1368 if(cpi->max_gf_interval < 12)
1369 cpi->max_gf_interval = 12;
1371 /* Extended interval for genuinely static scenes */
1372 cpi->twopass.static_scene_max_gf_interval = cpi->key_frame_frequency >> 1;
1374 /* Special conditions when altr ref frame enabled in lagged compress mode */
1375 if (cpi->oxcf.play_alternate && cpi->oxcf.lag_in_frames)
1377 if (cpi->max_gf_interval > cpi->oxcf.lag_in_frames - 1)
1378 cpi->max_gf_interval = cpi->oxcf.lag_in_frames - 1;
1380 if (cpi->twopass.static_scene_max_gf_interval > cpi->oxcf.lag_in_frames - 1)
1381 cpi->twopass.static_scene_max_gf_interval = cpi->oxcf.lag_in_frames - 1;
1384 if ( cpi->max_gf_interval > cpi->twopass.static_scene_max_gf_interval )
1385 cpi->max_gf_interval = cpi->twopass.static_scene_max_gf_interval;
1389 static void init_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
1391 VP8_COMMON *cm = &cpi->common;
1396 cpi->auto_adjust_gold_quantizer = 1;
1398 cm->version = oxcf->Version;
1399 vp8_setup_version(cm);
1401 /* Frame rate is not available on the first frame, as it's derived from
1402 * the observed timestamps. The actual value used here doesn't matter
1403 * too much, as it will adapt quickly.
1405 if (oxcf->timebase.num > 0) {
1406 cpi->framerate = (double)(oxcf->timebase.den) /
1407 (double)(oxcf->timebase.num);
1409 cpi->framerate = 30;
1412 /* If the reciprocal of the timebase seems like a reasonable framerate,
1413 * then use that as a guess, otherwise use 30.
1415 if (cpi->framerate > 180)
1416 cpi->framerate = 30;
1418 cpi->ref_framerate = cpi->framerate;
1420 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
1422 cm->refresh_golden_frame = 0;
1423 cm->refresh_last_frame = 1;
1424 cm->refresh_entropy_probs = 1;
1426 /* change includes all joint functionality */
1427 vp8_change_config(cpi, oxcf);
1429 /* Initialize active best and worst q and average q values. */
1430 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
1431 cpi->active_best_quality = cpi->oxcf.best_allowed_q;
1432 cpi->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
1434 /* Initialise the starting buffer levels */
1435 cpi->buffer_level = cpi->oxcf.starting_buffer_level;
1436 cpi->bits_off_target = cpi->oxcf.starting_buffer_level;
1438 cpi->rolling_target_bits = cpi->av_per_frame_bandwidth;
1439 cpi->rolling_actual_bits = cpi->av_per_frame_bandwidth;
1440 cpi->long_rolling_target_bits = cpi->av_per_frame_bandwidth;
1441 cpi->long_rolling_actual_bits = cpi->av_per_frame_bandwidth;
1443 cpi->total_actual_bits = 0;
1444 cpi->total_target_vs_actual = 0;
1446 /* Temporal scalabilty */
1447 if (cpi->oxcf.number_of_layers > 1)
1450 double prev_layer_framerate=0;
1452 for (i=0; i<cpi->oxcf.number_of_layers; i++)
1454 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
1455 prev_layer_framerate = cpi->output_framerate /
1456 cpi->oxcf.rate_decimator[i];
1460 #if VP8_TEMPORAL_ALT_REF
1464 cpi->fixed_divide[0] = 0;
1466 for (i = 1; i < 512; i++)
1467 cpi->fixed_divide[i] = 0x80000 / i;
1472 static void update_layer_contexts (VP8_COMP *cpi)
1474 VP8_CONFIG *oxcf = &cpi->oxcf;
1476 /* Update snapshots of the layer contexts to reflect new parameters */
1477 if (oxcf->number_of_layers > 1)
1480 double prev_layer_framerate=0;
1482 assert(oxcf->number_of_layers <= VPX_TS_MAX_LAYERS);
1483 for (i = 0; i < oxcf->number_of_layers && i < VPX_TS_MAX_LAYERS; ++i)
1485 LAYER_CONTEXT *lc = &cpi->layer_context[i];
1488 cpi->ref_framerate / oxcf->rate_decimator[i];
1489 lc->target_bandwidth = oxcf->target_bitrate[i] * 1000;
1491 lc->starting_buffer_level = rescale(
1492 (int)oxcf->starting_buffer_level_in_ms,
1493 lc->target_bandwidth, 1000);
1495 if (oxcf->optimal_buffer_level == 0)
1496 lc->optimal_buffer_level = lc->target_bandwidth / 8;
1498 lc->optimal_buffer_level = rescale(
1499 (int)oxcf->optimal_buffer_level_in_ms,
1500 lc->target_bandwidth, 1000);
1502 if (oxcf->maximum_buffer_size == 0)
1503 lc->maximum_buffer_size = lc->target_bandwidth / 8;
1505 lc->maximum_buffer_size = rescale(
1506 (int)oxcf->maximum_buffer_size_in_ms,
1507 lc->target_bandwidth, 1000);
1509 /* Work out the average size of a frame within this layer */
1511 lc->avg_frame_size_for_layer =
1512 (int)((oxcf->target_bitrate[i] -
1513 oxcf->target_bitrate[i-1]) * 1000 /
1514 (lc->framerate - prev_layer_framerate));
1516 prev_layer_framerate = lc->framerate;
1521 void vp8_change_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
1523 VP8_COMMON *cm = &cpi->common;
1524 int last_w, last_h, prev_number_of_layers;
1532 #if CONFIG_MULTITHREAD
1533 /* wait for the last picture loopfilter thread done */
1534 if (cpi->b_lpf_running)
1536 sem_wait(&cpi->h_event_end_lpf);
1537 cpi->b_lpf_running = 0;
1541 if (cm->version != oxcf->Version)
1543 cm->version = oxcf->Version;
1544 vp8_setup_version(cm);
1547 last_w = cpi->oxcf.Width;
1548 last_h = cpi->oxcf.Height;
1549 prev_number_of_layers = cpi->oxcf.number_of_layers;
1553 switch (cpi->oxcf.Mode)
1558 cpi->compressor_speed = 2;
1560 if (cpi->oxcf.cpu_used < -16)
1562 cpi->oxcf.cpu_used = -16;
1565 if (cpi->oxcf.cpu_used > 16)
1566 cpi->oxcf.cpu_used = 16;
1570 case MODE_GOODQUALITY:
1572 cpi->compressor_speed = 1;
1574 if (cpi->oxcf.cpu_used < -5)
1576 cpi->oxcf.cpu_used = -5;
1579 if (cpi->oxcf.cpu_used > 5)
1580 cpi->oxcf.cpu_used = 5;
1584 case MODE_BESTQUALITY:
1586 cpi->compressor_speed = 0;
1589 case MODE_FIRSTPASS:
1591 cpi->compressor_speed = 1;
1593 case MODE_SECONDPASS:
1595 cpi->compressor_speed = 1;
1597 if (cpi->oxcf.cpu_used < -5)
1599 cpi->oxcf.cpu_used = -5;
1602 if (cpi->oxcf.cpu_used > 5)
1603 cpi->oxcf.cpu_used = 5;
1606 case MODE_SECONDPASS_BEST:
1608 cpi->compressor_speed = 0;
1613 cpi->auto_worst_q = 1;
1615 cpi->oxcf.worst_allowed_q = q_trans[oxcf->worst_allowed_q];
1616 cpi->oxcf.best_allowed_q = q_trans[oxcf->best_allowed_q];
1617 cpi->oxcf.cq_level = q_trans[cpi->oxcf.cq_level];
1619 if (oxcf->fixed_q >= 0)
1621 if (oxcf->worst_allowed_q < 0)
1622 cpi->oxcf.fixed_q = q_trans[0];
1624 cpi->oxcf.fixed_q = q_trans[oxcf->worst_allowed_q];
1626 if (oxcf->alt_q < 0)
1627 cpi->oxcf.alt_q = q_trans[0];
1629 cpi->oxcf.alt_q = q_trans[oxcf->alt_q];
1631 if (oxcf->key_q < 0)
1632 cpi->oxcf.key_q = q_trans[0];
1634 cpi->oxcf.key_q = q_trans[oxcf->key_q];
1636 if (oxcf->gold_q < 0)
1637 cpi->oxcf.gold_q = q_trans[0];
1639 cpi->oxcf.gold_q = q_trans[oxcf->gold_q];
1643 cpi->baseline_gf_interval =
1644 cpi->oxcf.alt_freq ? cpi->oxcf.alt_freq : DEFAULT_GF_INTERVAL;
1646 #if (CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
1647 cpi->oxcf.token_partitions = 3;
1650 if (cpi->oxcf.token_partitions >= 0 && cpi->oxcf.token_partitions <= 3)
1651 cm->multi_token_partition =
1652 (TOKEN_PARTITION) cpi->oxcf.token_partitions;
1654 setup_features(cpi);
1659 for (i = 0; i < MAX_MB_SEGMENTS; i++)
1660 cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout;
1663 /* At the moment the first order values may not be > MAXQ */
1664 if (cpi->oxcf.fixed_q > MAXQ)
1665 cpi->oxcf.fixed_q = MAXQ;
1667 /* local file playback mode == really big buffer */
1668 if (cpi->oxcf.end_usage == USAGE_LOCAL_FILE_PLAYBACK)
1670 cpi->oxcf.starting_buffer_level = 60000;
1671 cpi->oxcf.optimal_buffer_level = 60000;
1672 cpi->oxcf.maximum_buffer_size = 240000;
1673 cpi->oxcf.starting_buffer_level_in_ms = 60000;
1674 cpi->oxcf.optimal_buffer_level_in_ms = 60000;
1675 cpi->oxcf.maximum_buffer_size_in_ms = 240000;
1678 /* Convert target bandwidth from Kbit/s to Bit/s */
1679 cpi->oxcf.target_bandwidth *= 1000;
1681 cpi->oxcf.starting_buffer_level =
1682 rescale((int)cpi->oxcf.starting_buffer_level,
1683 cpi->oxcf.target_bandwidth, 1000);
1685 /* Set or reset optimal and maximum buffer levels. */
1686 if (cpi->oxcf.optimal_buffer_level == 0)
1687 cpi->oxcf.optimal_buffer_level = cpi->oxcf.target_bandwidth / 8;
1689 cpi->oxcf.optimal_buffer_level =
1690 rescale((int)cpi->oxcf.optimal_buffer_level,
1691 cpi->oxcf.target_bandwidth, 1000);
1693 if (cpi->oxcf.maximum_buffer_size == 0)
1694 cpi->oxcf.maximum_buffer_size = cpi->oxcf.target_bandwidth / 8;
1696 cpi->oxcf.maximum_buffer_size =
1697 rescale((int)cpi->oxcf.maximum_buffer_size,
1698 cpi->oxcf.target_bandwidth, 1000);
1699 // Under a configuration change, where maximum_buffer_size may change,
1700 // keep buffer level clipped to the maximum allowed buffer size.
1701 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size) {
1702 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
1703 cpi->buffer_level = cpi->bits_off_target;
1706 /* Set up frame rate and related parameters rate control values. */
1707 vp8_new_framerate(cpi, cpi->framerate);
1709 /* Set absolute upper and lower quality limits */
1710 cpi->worst_quality = cpi->oxcf.worst_allowed_q;
1711 cpi->best_quality = cpi->oxcf.best_allowed_q;
1713 /* active values should only be modified if out of new range */
1714 if (cpi->active_worst_quality > cpi->oxcf.worst_allowed_q)
1716 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
1719 else if (cpi->active_worst_quality < cpi->oxcf.best_allowed_q)
1721 cpi->active_worst_quality = cpi->oxcf.best_allowed_q;
1723 if (cpi->active_best_quality < cpi->oxcf.best_allowed_q)
1725 cpi->active_best_quality = cpi->oxcf.best_allowed_q;
1728 else if (cpi->active_best_quality > cpi->oxcf.worst_allowed_q)
1730 cpi->active_best_quality = cpi->oxcf.worst_allowed_q;
1733 cpi->buffered_mode = cpi->oxcf.optimal_buffer_level > 0;
1735 cpi->cq_target_quality = cpi->oxcf.cq_level;
1737 /* Only allow dropped frames in buffered mode */
1738 cpi->drop_frames_allowed = cpi->oxcf.allow_df && cpi->buffered_mode;
1740 cpi->target_bandwidth = cpi->oxcf.target_bandwidth;
1742 // Check if the number of temporal layers has changed, and if so reset the
1743 // pattern counter and set/initialize the temporal layer context for the
1744 // new layer configuration.
1745 if (cpi->oxcf.number_of_layers != prev_number_of_layers)
1747 // If the number of temporal layers are changed we must start at the
1748 // base of the pattern cycle, so set the layer id to 0 and reset
1749 // the temporal pattern counter.
1750 if (cpi->temporal_layer_id > 0) {
1751 cpi->temporal_layer_id = 0;
1753 cpi->temporal_pattern_counter = 0;
1754 reset_temporal_layer_change(cpi, oxcf, prev_number_of_layers);
1757 if (!cpi->initial_width)
1759 cpi->initial_width = cpi->oxcf.Width;
1760 cpi->initial_height = cpi->oxcf.Height;
1763 cm->Width = cpi->oxcf.Width;
1764 cm->Height = cpi->oxcf.Height;
1765 assert(cm->Width <= cpi->initial_width);
1766 assert(cm->Height <= cpi->initial_height);
1768 /* TODO(jkoleszar): if an internal spatial resampling is active,
1769 * and we downsize the input image, maybe we should clear the
1770 * internal scale immediately rather than waiting for it to
1774 /* VP8 sharpness level mapping 0-7 (vs 0-10 in general VPx dialogs) */
1775 if (cpi->oxcf.Sharpness > 7)
1776 cpi->oxcf.Sharpness = 7;
1778 cm->sharpness_level = cpi->oxcf.Sharpness;
1780 if (cm->horiz_scale != NORMAL || cm->vert_scale != NORMAL)
1782 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
1783 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
1785 Scale2Ratio(cm->horiz_scale, &hr, &hs);
1786 Scale2Ratio(cm->vert_scale, &vr, &vs);
1788 /* always go to the next whole number */
1789 cm->Width = (hs - 1 + cpi->oxcf.Width * hr) / hs;
1790 cm->Height = (vs - 1 + cpi->oxcf.Height * vr) / vs;
1793 if (last_w != cpi->oxcf.Width || last_h != cpi->oxcf.Height)
1794 cpi->force_next_frame_intra = 1;
1796 if (((cm->Width + 15) & 0xfffffff0) !=
1797 cm->yv12_fb[cm->lst_fb_idx].y_width ||
1798 ((cm->Height + 15) & 0xfffffff0) !=
1799 cm->yv12_fb[cm->lst_fb_idx].y_height ||
1800 cm->yv12_fb[cm->lst_fb_idx].y_width == 0)
1802 dealloc_raw_frame_buffers(cpi);
1803 alloc_raw_frame_buffers(cpi);
1804 vp8_alloc_compressor_data(cpi);
1807 if (cpi->oxcf.fixed_q >= 0)
1809 cpi->last_q[0] = cpi->oxcf.fixed_q;
1810 cpi->last_q[1] = cpi->oxcf.fixed_q;
1813 cpi->Speed = cpi->oxcf.cpu_used;
1815 /* force to allowlag to 0 if lag_in_frames is 0; */
1816 if (cpi->oxcf.lag_in_frames == 0)
1818 cpi->oxcf.allow_lag = 0;
1820 /* Limit on lag buffers as these are not currently dynamically allocated */
1821 else if (cpi->oxcf.lag_in_frames > MAX_LAG_BUFFERS)
1822 cpi->oxcf.lag_in_frames = MAX_LAG_BUFFERS;
1825 cpi->alt_ref_source = NULL;
1826 cpi->is_src_frame_alt_ref = 0;
1828 #if CONFIG_TEMPORAL_DENOISING
1829 if (cpi->oxcf.noise_sensitivity)
1831 if (!cpi->denoiser.yv12_mc_running_avg.buffer_alloc)
1833 int width = (cpi->oxcf.Width + 15) & ~15;
1834 int height = (cpi->oxcf.Height + 15) & ~15;
1835 vp8_denoiser_allocate(&cpi->denoiser, width, height,
1836 cm->mb_rows, cm->mb_cols,
1837 cpi->oxcf.noise_sensitivity);
1843 /* Experimental RD Code */
1844 cpi->frame_distortion = 0;
1845 cpi->last_frame_distortion = 0;
1851 #define M_LOG2_E 0.693147180559945309417
1853 #define log2f(x) (log (x) / (float) M_LOG2_E)
1855 static void cal_mvsadcosts(int *mvsadcost[2])
1859 mvsadcost [0] [0] = 300;
1860 mvsadcost [1] [0] = 300;
1864 double z = 256 * (2 * (log2f(8 * i) + .6));
1865 mvsadcost [0][i] = (int) z;
1866 mvsadcost [1][i] = (int) z;
1867 mvsadcost [0][-i] = (int) z;
1868 mvsadcost [1][-i] = (int) z;
1870 while (++i <= mvfp_max);
1873 struct VP8_COMP* vp8_create_compressor(VP8_CONFIG *oxcf)
1880 cpi = vpx_memalign(32, sizeof(VP8_COMP));
1881 /* Check that the CPI instance is valid */
1887 memset(cpi, 0, sizeof(VP8_COMP));
1889 if (setjmp(cm->error.jmp))
1891 cpi->common.error.setjmp = 0;
1892 vp8_remove_compressor(&cpi);
1896 cpi->common.error.setjmp = 1;
1898 CHECK_MEM_ERROR(cpi->mb.ss, vpx_calloc(sizeof(search_site), (MAX_MVSEARCH_STEPS * 8) + 1));
1900 vp8_create_common(&cpi->common);
1902 init_config(cpi, oxcf);
1904 memcpy(cpi->base_skip_false_prob, vp8cx_base_skip_false_prob, sizeof(vp8cx_base_skip_false_prob));
1905 cpi->common.current_video_frame = 0;
1906 cpi->temporal_pattern_counter = 0;
1907 cpi->temporal_layer_id = -1;
1908 cpi->kf_overspend_bits = 0;
1909 cpi->kf_bitrate_adjustment = 0;
1910 cpi->frames_till_gf_update_due = 0;
1911 cpi->gf_overspend_bits = 0;
1912 cpi->non_gf_bitrate_adjustment = 0;
1913 cpi->prob_last_coded = 128;
1914 cpi->prob_gf_coded = 128;
1915 cpi->prob_intra_coded = 63;
1917 /* Prime the recent reference frame usage counters.
1918 * Hereafter they will be maintained as a sort of moving average
1920 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
1921 cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
1922 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
1923 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
1925 /* Set reference frame sign bias for ALTREF frame to 1 (for now) */
1926 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
1928 cpi->twopass.gf_decay_rate = 0;
1929 cpi->baseline_gf_interval = DEFAULT_GF_INTERVAL;
1931 cpi->gold_is_last = 0 ;
1932 cpi->alt_is_last = 0 ;
1933 cpi->gold_is_alt = 0 ;
1935 cpi->active_map_enabled = 0;
1938 /* Experimental code for lagged and one pass */
1939 /* Initialise one_pass GF frames stats */
1940 /* Update stats used for GF selection */
1943 cpi->one_pass_frame_index = 0;
1945 for (i = 0; i < MAX_LAG_BUFFERS; i++)
1947 cpi->one_pass_frame_stats[i].frames_so_far = 0;
1948 cpi->one_pass_frame_stats[i].frame_intra_error = 0.0;
1949 cpi->one_pass_frame_stats[i].frame_coded_error = 0.0;
1950 cpi->one_pass_frame_stats[i].frame_pcnt_inter = 0.0;
1951 cpi->one_pass_frame_stats[i].frame_pcnt_motion = 0.0;
1952 cpi->one_pass_frame_stats[i].frame_mvr = 0.0;
1953 cpi->one_pass_frame_stats[i].frame_mvr_abs = 0.0;
1954 cpi->one_pass_frame_stats[i].frame_mvc = 0.0;
1955 cpi->one_pass_frame_stats[i].frame_mvc_abs = 0.0;
1960 cpi->mse_source_denoised = 0;
1962 /* Should we use the cyclic refresh method.
1963 * Currently this is tied to error resilliant mode
1965 cpi->cyclic_refresh_mode_enabled = cpi->oxcf.error_resilient_mode;
1966 cpi->cyclic_refresh_mode_max_mbs_perframe = (cpi->common.mb_rows * cpi->common.mb_cols) / 7;
1967 if (cpi->oxcf.number_of_layers == 1) {
1968 cpi->cyclic_refresh_mode_max_mbs_perframe =
1969 (cpi->common.mb_rows * cpi->common.mb_cols) / 20;
1970 } else if (cpi->oxcf.number_of_layers == 2) {
1971 cpi->cyclic_refresh_mode_max_mbs_perframe =
1972 (cpi->common.mb_rows * cpi->common.mb_cols) / 10;
1974 cpi->cyclic_refresh_mode_index = 0;
1975 cpi->cyclic_refresh_q = 32;
1977 if (cpi->cyclic_refresh_mode_enabled)
1979 CHECK_MEM_ERROR(cpi->cyclic_refresh_map, vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1));
1982 cpi->cyclic_refresh_map = (signed char *) NULL;
1984 CHECK_MEM_ERROR(cpi->consec_zero_last,
1985 vpx_calloc(cm->mb_rows * cm->mb_cols, 1));
1986 CHECK_MEM_ERROR(cpi->consec_zero_last_mvbias,
1987 vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1));
1989 #ifdef VP8_ENTROPY_STATS
1990 init_context_counters();
1993 /*Initialize the feed-forward activity masking.*/
1994 cpi->activity_avg = 90<<12;
1996 /* Give a sensible default for the first frame. */
1997 cpi->frames_since_key = 8;
1998 cpi->key_frame_frequency = cpi->oxcf.key_freq;
1999 cpi->this_key_frame_forced = 0;
2000 cpi->next_key_frame_forced = 0;
2002 cpi->source_alt_ref_pending = 0;
2003 cpi->source_alt_ref_active = 0;
2004 cpi->common.refresh_alt_ref_frame = 0;
2006 cpi->force_maxqp = 0;
2008 cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
2009 #if CONFIG_INTERNAL_STATS
2010 cpi->b_calculate_ssimg = 0;
2015 if (cpi->b_calculate_psnr)
2017 cpi->total_sq_error = 0.0;
2018 cpi->total_sq_error2 = 0.0;
2023 cpi->totalp_y = 0.0;
2024 cpi->totalp_u = 0.0;
2025 cpi->totalp_v = 0.0;
2027 cpi->tot_recode_hits = 0;
2028 cpi->summed_quality = 0;
2029 cpi->summed_weights = 0;
2032 if (cpi->b_calculate_ssimg)
2034 cpi->total_ssimg_y = 0;
2035 cpi->total_ssimg_u = 0;
2036 cpi->total_ssimg_v = 0;
2037 cpi->total_ssimg_all = 0;
2042 cpi->first_time_stamp_ever = 0x7FFFFFFF;
2044 cpi->frames_till_gf_update_due = 0;
2045 cpi->key_frame_count = 1;
2047 cpi->ni_av_qi = cpi->oxcf.worst_allowed_q;
2050 cpi->total_byte_count = 0;
2052 cpi->drop_frame = 0;
2054 cpi->rate_correction_factor = 1.0;
2055 cpi->key_frame_rate_correction_factor = 1.0;
2056 cpi->gf_rate_correction_factor = 1.0;
2057 cpi->twopass.est_max_qcorrection_factor = 1.0;
2059 for (i = 0; i < KEY_FRAME_CONTEXT; i++)
2061 cpi->prior_key_frame_distance[i] = (int)cpi->output_framerate;
2064 #ifdef OUTPUT_YUV_SRC
2065 yuv_file = fopen("bd.yuv", "ab");
2067 #ifdef OUTPUT_YUV_DENOISED
2068 yuv_denoised_file = fopen("denoised.yuv", "ab");
2072 framepsnr = fopen("framepsnr.stt", "a");
2073 kf_list = fopen("kf_list.stt", "w");
2076 cpi->output_pkt_list = oxcf->output_pkt_list;
2078 #if !CONFIG_REALTIME_ONLY
2082 vp8_init_first_pass(cpi);
2084 else if (cpi->pass == 2)
2086 size_t packet_sz = sizeof(FIRSTPASS_STATS);
2087 int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
2089 cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
2090 cpi->twopass.stats_in = cpi->twopass.stats_in_start;
2091 cpi->twopass.stats_in_end = (void*)((char *)cpi->twopass.stats_in
2092 + (packets - 1) * packet_sz);
2093 vp8_init_second_pass(cpi);
2098 if (cpi->compressor_speed == 2)
2100 cpi->avg_encode_time = 0;
2101 cpi->avg_pick_mode_time = 0;
2104 vp8_set_speed_features(cpi);
2106 /* Set starting values of RD threshold multipliers (128 = *1) */
2107 for (i = 0; i < MAX_MODES; i++)
2109 cpi->mb.rd_thresh_mult[i] = 128;
2112 #ifdef VP8_ENTROPY_STATS
2113 init_mv_ref_counts();
2116 #if CONFIG_MULTITHREAD
2117 if(vp8cx_create_encoder_threads(cpi))
2119 vp8_remove_compressor(&cpi);
2124 cpi->fn_ptr[BLOCK_16X16].sdf = vpx_sad16x16;
2125 cpi->fn_ptr[BLOCK_16X16].vf = vpx_variance16x16;
2126 cpi->fn_ptr[BLOCK_16X16].svf = vpx_sub_pixel_variance16x16;
2127 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_h = vpx_variance_halfpixvar16x16_h;
2128 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_v = vpx_variance_halfpixvar16x16_v;
2129 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_hv = vpx_variance_halfpixvar16x16_hv;
2130 cpi->fn_ptr[BLOCK_16X16].sdx3f = vpx_sad16x16x3;
2131 cpi->fn_ptr[BLOCK_16X16].sdx8f = vpx_sad16x16x8;
2132 cpi->fn_ptr[BLOCK_16X16].sdx4df = vpx_sad16x16x4d;
2134 cpi->fn_ptr[BLOCK_16X8].sdf = vpx_sad16x8;
2135 cpi->fn_ptr[BLOCK_16X8].vf = vpx_variance16x8;
2136 cpi->fn_ptr[BLOCK_16X8].svf = vpx_sub_pixel_variance16x8;
2137 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_h = NULL;
2138 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_v = NULL;
2139 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_hv = NULL;
2140 cpi->fn_ptr[BLOCK_16X8].sdx3f = vpx_sad16x8x3;
2141 cpi->fn_ptr[BLOCK_16X8].sdx8f = vpx_sad16x8x8;
2142 cpi->fn_ptr[BLOCK_16X8].sdx4df = vpx_sad16x8x4d;
2144 cpi->fn_ptr[BLOCK_8X16].sdf = vpx_sad8x16;
2145 cpi->fn_ptr[BLOCK_8X16].vf = vpx_variance8x16;
2146 cpi->fn_ptr[BLOCK_8X16].svf = vpx_sub_pixel_variance8x16;
2147 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_h = NULL;
2148 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_v = NULL;
2149 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_hv = NULL;
2150 cpi->fn_ptr[BLOCK_8X16].sdx3f = vpx_sad8x16x3;
2151 cpi->fn_ptr[BLOCK_8X16].sdx8f = vpx_sad8x16x8;
2152 cpi->fn_ptr[BLOCK_8X16].sdx4df = vpx_sad8x16x4d;
2154 cpi->fn_ptr[BLOCK_8X8].sdf = vpx_sad8x8;
2155 cpi->fn_ptr[BLOCK_8X8].vf = vpx_variance8x8;
2156 cpi->fn_ptr[BLOCK_8X8].svf = vpx_sub_pixel_variance8x8;
2157 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_h = NULL;
2158 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_v = NULL;
2159 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_hv = NULL;
2160 cpi->fn_ptr[BLOCK_8X8].sdx3f = vpx_sad8x8x3;
2161 cpi->fn_ptr[BLOCK_8X8].sdx8f = vpx_sad8x8x8;
2162 cpi->fn_ptr[BLOCK_8X8].sdx4df = vpx_sad8x8x4d;
2164 cpi->fn_ptr[BLOCK_4X4].sdf = vpx_sad4x4;
2165 cpi->fn_ptr[BLOCK_4X4].vf = vpx_variance4x4;
2166 cpi->fn_ptr[BLOCK_4X4].svf = vpx_sub_pixel_variance4x4;
2167 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_h = NULL;
2168 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_v = NULL;
2169 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_hv = NULL;
2170 cpi->fn_ptr[BLOCK_4X4].sdx3f = vpx_sad4x4x3;
2171 cpi->fn_ptr[BLOCK_4X4].sdx8f = vpx_sad4x4x8;
2172 cpi->fn_ptr[BLOCK_4X4].sdx4df = vpx_sad4x4x4d;
2174 #if ARCH_X86 || ARCH_X86_64
2175 cpi->fn_ptr[BLOCK_16X16].copymem = vp8_copy32xn;
2176 cpi->fn_ptr[BLOCK_16X8].copymem = vp8_copy32xn;
2177 cpi->fn_ptr[BLOCK_8X16].copymem = vp8_copy32xn;
2178 cpi->fn_ptr[BLOCK_8X8].copymem = vp8_copy32xn;
2179 cpi->fn_ptr[BLOCK_4X4].copymem = vp8_copy32xn;
2182 cpi->full_search_sad = vp8_full_search_sad;
2183 cpi->diamond_search_sad = vp8_diamond_search_sad;
2184 cpi->refining_search_sad = vp8_refining_search_sad;
2186 /* make sure frame 1 is okay */
2187 cpi->mb.error_bins[0] = cpi->common.MBs;
2189 /* vp8cx_init_quantizer() is first called here. Add check in
2190 * vp8cx_frame_init_quantizer() so that vp8cx_init_quantizer is only
2191 * called later when needed. This will avoid unnecessary calls of
2192 * vp8cx_init_quantizer() for every frame.
2194 vp8cx_init_quantizer(cpi);
2196 vp8_loop_filter_init(cm);
2198 cpi->common.error.setjmp = 0;
2200 #if CONFIG_MULTI_RES_ENCODING
2202 /* Calculate # of MBs in a row in lower-resolution level image. */
2203 if (cpi->oxcf.mr_encoder_id > 0)
2204 vp8_cal_low_res_mb_cols(cpi);
2208 /* setup RD costs to MACROBLOCK struct */
2210 cpi->mb.mvcost[0] = &cpi->rd_costs.mvcosts[0][mv_max+1];
2211 cpi->mb.mvcost[1] = &cpi->rd_costs.mvcosts[1][mv_max+1];
2212 cpi->mb.mvsadcost[0] = &cpi->rd_costs.mvsadcosts[0][mvfp_max+1];
2213 cpi->mb.mvsadcost[1] = &cpi->rd_costs.mvsadcosts[1][mvfp_max+1];
2215 cal_mvsadcosts(cpi->mb.mvsadcost);
2217 cpi->mb.mbmode_cost = cpi->rd_costs.mbmode_cost;
2218 cpi->mb.intra_uv_mode_cost = cpi->rd_costs.intra_uv_mode_cost;
2219 cpi->mb.bmode_costs = cpi->rd_costs.bmode_costs;
2220 cpi->mb.inter_bmode_costs = cpi->rd_costs.inter_bmode_costs;
2221 cpi->mb.token_costs = cpi->rd_costs.token_costs;
2223 /* setup block ptrs & offsets */
2224 vp8_setup_block_ptrs(&cpi->mb);
2225 vp8_setup_block_dptrs(&cpi->mb.e_mbd);
2231 void vp8_remove_compressor(VP8_COMP **ptr)
2233 VP8_COMP *cpi = *ptr;
2238 if (cpi && (cpi->common.current_video_frame > 0))
2240 #if !CONFIG_REALTIME_ONLY
2244 vp8_end_second_pass(cpi);
2249 #ifdef VP8_ENTROPY_STATS
2250 print_context_counters();
2251 print_tree_update_probs();
2252 print_mode_context();
2255 #if CONFIG_INTERNAL_STATS
2259 FILE *f = fopen("opsnr.stt", "a");
2260 double time_encoded = (cpi->last_end_time_stamp_seen
2261 - cpi->first_time_stamp_ever) / 10000000.000;
2262 double total_encode_time = (cpi->time_receive_data +
2263 cpi->time_compress_data) / 1000.000;
2264 double dr = (double)cpi->bytes * 8.0 / 1000.0 / time_encoded;
2266 if (cpi->b_calculate_psnr)
2268 if (cpi->oxcf.number_of_layers > 1)
2272 fprintf(f, "Layer\tBitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
2273 "GLPsnrP\tVPXSSIM\t\n");
2274 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
2276 double dr = (double)cpi->bytes_in_layer[i] *
2277 8.0 / 1000.0 / time_encoded;
2278 double samples = 3.0 / 2 * cpi->frames_in_layer[i] *
2279 cpi->common.Width * cpi->common.Height;
2281 vpx_sse_to_psnr(samples, 255.0,
2282 cpi->total_error2[i]);
2283 double total_psnr2 =
2284 vpx_sse_to_psnr(samples, 255.0,
2285 cpi->total_error2_p[i]);
2286 double total_ssim = 100 * pow(cpi->sum_ssim[i] /
2287 cpi->sum_weights[i], 8.0);
2289 fprintf(f, "%5d\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2292 cpi->sum_psnr[i] / cpi->frames_in_layer[i],
2294 cpi->sum_psnr_p[i] / cpi->frames_in_layer[i],
2295 total_psnr2, total_ssim);
2300 double samples = 3.0 / 2 * cpi->count *
2301 cpi->common.Width * cpi->common.Height;
2302 double total_psnr = vpx_sse_to_psnr(samples, 255.0,
2303 cpi->total_sq_error);
2304 double total_psnr2 = vpx_sse_to_psnr(samples, 255.0,
2305 cpi->total_sq_error2);
2306 double total_ssim = 100 * pow(cpi->summed_quality /
2307 cpi->summed_weights, 8.0);
2309 fprintf(f, "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
2310 "GLPsnrP\tVPXSSIM\t Time(us)\n");
2311 fprintf(f, "%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2313 dr, cpi->total / cpi->count, total_psnr,
2314 cpi->totalp / cpi->count, total_psnr2,
2315 total_ssim, total_encode_time);
2319 if (cpi->b_calculate_ssimg)
2321 if (cpi->oxcf.number_of_layers > 1)
2325 fprintf(f, "Layer\tBitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
2327 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
2329 double dr = (double)cpi->bytes_in_layer[i] *
2330 8.0 / 1000.0 / time_encoded;
2331 fprintf(f, "%5d\t%7.3f\t%6.4f\t"
2332 "%6.4f\t%6.4f\t%6.4f\t%8.0f\n",
2334 cpi->total_ssimg_y_in_layer[i] /
2335 cpi->frames_in_layer[i],
2336 cpi->total_ssimg_u_in_layer[i] /
2337 cpi->frames_in_layer[i],
2338 cpi->total_ssimg_v_in_layer[i] /
2339 cpi->frames_in_layer[i],
2340 cpi->total_ssimg_all_in_layer[i] /
2341 cpi->frames_in_layer[i],
2347 fprintf(f, "BitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
2349 fprintf(f, "%7.3f\t%6.4f\t%6.4f\t%6.4f\t%6.4f\t%8.0f\n", dr,
2350 cpi->total_ssimg_y / cpi->count,
2351 cpi->total_ssimg_u / cpi->count,
2352 cpi->total_ssimg_v / cpi->count,
2353 cpi->total_ssimg_all / cpi->count, total_encode_time);
2359 f = fopen("qskip.stt", "a");
2360 fprintf(f, "minq:%d -maxq:%d skiptrue:skipfalse = %d:%d\n", cpi->oxcf.best_allowed_q, cpi->oxcf.worst_allowed_q, skiptruecount, skipfalsecount);
2371 if (cpi->compressor_speed == 2)
2374 FILE *f = fopen("cxspeed.stt", "a");
2375 cnt_pm /= cpi->common.MBs;
2377 for (i = 0; i < 16; i++)
2378 fprintf(f, "%5d", frames_at_speed[i]);
2389 extern int count_mb_seg[4];
2390 FILE *f = fopen("modes.stt", "a");
2391 double dr = (double)cpi->framerate * (double)bytes * (double)8 / (double)count / (double)1000 ;
2392 fprintf(f, "intra_mode in Intra Frames:\n");
2393 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d\n", y_modes[0], y_modes[1], y_modes[2], y_modes[3], y_modes[4]);
2394 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", uv_modes[0], uv_modes[1], uv_modes[2], uv_modes[3]);
2399 for (i = 0; i < 10; i++)
2400 fprintf(f, "%8d, ", b_modes[i]);
2406 fprintf(f, "Modes in Inter Frames:\n");
2407 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d\n",
2408 inter_y_modes[0], inter_y_modes[1], inter_y_modes[2], inter_y_modes[3], inter_y_modes[4],
2409 inter_y_modes[5], inter_y_modes[6], inter_y_modes[7], inter_y_modes[8], inter_y_modes[9]);
2410 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", inter_uv_modes[0], inter_uv_modes[1], inter_uv_modes[2], inter_uv_modes[3]);
2415 for (i = 0; i < 15; i++)
2416 fprintf(f, "%8d, ", inter_b_modes[i]);
2421 fprintf(f, "P:%8d, %8d, %8d, %8d\n", count_mb_seg[0], count_mb_seg[1], count_mb_seg[2], count_mb_seg[3]);
2422 fprintf(f, "PB:%8d, %8d, %8d, %8d\n", inter_b_modes[LEFT4X4], inter_b_modes[ABOVE4X4], inter_b_modes[ZERO4X4], inter_b_modes[NEW4X4]);
2430 #ifdef VP8_ENTROPY_STATS
2433 FILE *fmode = fopen("modecontext.c", "w");
2435 fprintf(fmode, "\n#include \"entropymode.h\"\n\n");
2436 fprintf(fmode, "const unsigned int vp8_kf_default_bmode_counts ");
2437 fprintf(fmode, "[VP8_BINTRAMODES] [VP8_BINTRAMODES] [VP8_BINTRAMODES] =\n{\n");
2439 for (i = 0; i < 10; i++)
2442 fprintf(fmode, " { /* Above Mode : %d */\n", i);
2444 for (j = 0; j < 10; j++)
2447 fprintf(fmode, " {");
2449 for (k = 0; k < 10; k++)
2451 if (!intra_mode_stats[i][j][k])
2452 fprintf(fmode, " %5d, ", 1);
2454 fprintf(fmode, " %5d, ", intra_mode_stats[i][j][k]);
2457 fprintf(fmode, "}, /* left_mode %d */\n", j);
2461 fprintf(fmode, " },\n");
2465 fprintf(fmode, "};\n");
2471 #if defined(SECTIONBITS_OUTPUT)
2476 FILE *f = fopen("tokenbits.stt", "a");
2478 for (i = 0; i < 28; i++)
2479 fprintf(f, "%8d", (int)(Sectionbits[i] / 256));
2489 printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000);
2490 printf("\n_frames recive_data encod_mb_row compress_frame Total\n");
2491 printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame, cpi->time_receive_data / 1000, cpi->time_encode_mb_row / 1000, cpi->time_compress_data / 1000, (cpi->time_receive_data + cpi->time_compress_data) / 1000);
2497 #if CONFIG_MULTITHREAD
2498 vp8cx_remove_encoder_threads(cpi);
2501 #if CONFIG_TEMPORAL_DENOISING
2502 vp8_denoiser_free(&cpi->denoiser);
2504 dealloc_compressor_data(cpi);
2505 vpx_free(cpi->mb.ss);
2507 vpx_free(cpi->cyclic_refresh_map);
2508 vpx_free(cpi->consec_zero_last);
2509 vpx_free(cpi->consec_zero_last_mvbias);
2511 vp8_remove_common(&cpi->common);
2515 #ifdef OUTPUT_YUV_SRC
2518 #ifdef OUTPUT_YUV_DENOISED
2519 fclose(yuv_denoised_file);
2538 static uint64_t calc_plane_error(unsigned char *orig, int orig_stride,
2539 unsigned char *recon, int recon_stride,
2540 unsigned int cols, unsigned int rows)
2542 unsigned int row, col;
2543 uint64_t total_sse = 0;
2546 for (row = 0; row + 16 <= rows; row += 16)
2548 for (col = 0; col + 16 <= cols; col += 16)
2552 vpx_mse16x16(orig + col, orig_stride,
2553 recon + col, recon_stride,
2558 /* Handle odd-sized width */
2561 unsigned int border_row, border_col;
2562 unsigned char *border_orig = orig;
2563 unsigned char *border_recon = recon;
2565 for (border_row = 0; border_row < 16; border_row++)
2567 for (border_col = col; border_col < cols; border_col++)
2569 diff = border_orig[border_col] - border_recon[border_col];
2570 total_sse += diff * diff;
2573 border_orig += orig_stride;
2574 border_recon += recon_stride;
2578 orig += orig_stride * 16;
2579 recon += recon_stride * 16;
2582 /* Handle odd-sized height */
2583 for (; row < rows; row++)
2585 for (col = 0; col < cols; col++)
2587 diff = orig[col] - recon[col];
2588 total_sse += diff * diff;
2591 orig += orig_stride;
2592 recon += recon_stride;
2595 vp8_clear_system_state();
2600 static void generate_psnr_packet(VP8_COMP *cpi)
2602 YV12_BUFFER_CONFIG *orig = cpi->Source;
2603 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
2604 struct vpx_codec_cx_pkt pkt;
2607 unsigned int width = cpi->common.Width;
2608 unsigned int height = cpi->common.Height;
2610 pkt.kind = VPX_CODEC_PSNR_PKT;
2611 sse = calc_plane_error(orig->y_buffer, orig->y_stride,
2612 recon->y_buffer, recon->y_stride,
2614 pkt.data.psnr.sse[0] = sse;
2615 pkt.data.psnr.sse[1] = sse;
2616 pkt.data.psnr.samples[0] = width * height;
2617 pkt.data.psnr.samples[1] = width * height;
2619 width = (width + 1) / 2;
2620 height = (height + 1) / 2;
2622 sse = calc_plane_error(orig->u_buffer, orig->uv_stride,
2623 recon->u_buffer, recon->uv_stride,
2625 pkt.data.psnr.sse[0] += sse;
2626 pkt.data.psnr.sse[2] = sse;
2627 pkt.data.psnr.samples[0] += width * height;
2628 pkt.data.psnr.samples[2] = width * height;
2630 sse = calc_plane_error(orig->v_buffer, orig->uv_stride,
2631 recon->v_buffer, recon->uv_stride,
2633 pkt.data.psnr.sse[0] += sse;
2634 pkt.data.psnr.sse[3] = sse;
2635 pkt.data.psnr.samples[0] += width * height;
2636 pkt.data.psnr.samples[3] = width * height;
2638 for (i = 0; i < 4; i++)
2639 pkt.data.psnr.psnr[i] = vpx_sse_to_psnr(pkt.data.psnr.samples[i], 255.0,
2640 (double)(pkt.data.psnr.sse[i]));
2642 vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
2646 int vp8_use_as_reference(VP8_COMP *cpi, int ref_frame_flags)
2648 if (ref_frame_flags > 7)
2651 cpi->ref_frame_flags = ref_frame_flags;
2654 int vp8_update_reference(VP8_COMP *cpi, int ref_frame_flags)
2656 if (ref_frame_flags > 7)
2659 cpi->common.refresh_golden_frame = 0;
2660 cpi->common.refresh_alt_ref_frame = 0;
2661 cpi->common.refresh_last_frame = 0;
2663 if (ref_frame_flags & VP8_LAST_FRAME)
2664 cpi->common.refresh_last_frame = 1;
2666 if (ref_frame_flags & VP8_GOLD_FRAME)
2667 cpi->common.refresh_golden_frame = 1;
2669 if (ref_frame_flags & VP8_ALTR_FRAME)
2670 cpi->common.refresh_alt_ref_frame = 1;
2675 int vp8_get_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
2677 VP8_COMMON *cm = &cpi->common;
2680 if (ref_frame_flag == VP8_LAST_FRAME)
2681 ref_fb_idx = cm->lst_fb_idx;
2682 else if (ref_frame_flag == VP8_GOLD_FRAME)
2683 ref_fb_idx = cm->gld_fb_idx;
2684 else if (ref_frame_flag == VP8_ALTR_FRAME)
2685 ref_fb_idx = cm->alt_fb_idx;
2689 vp8_yv12_copy_frame(&cm->yv12_fb[ref_fb_idx], sd);
2693 int vp8_set_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
2695 VP8_COMMON *cm = &cpi->common;
2699 if (ref_frame_flag == VP8_LAST_FRAME)
2700 ref_fb_idx = cm->lst_fb_idx;
2701 else if (ref_frame_flag == VP8_GOLD_FRAME)
2702 ref_fb_idx = cm->gld_fb_idx;
2703 else if (ref_frame_flag == VP8_ALTR_FRAME)
2704 ref_fb_idx = cm->alt_fb_idx;
2708 vp8_yv12_copy_frame(sd, &cm->yv12_fb[ref_fb_idx]);
2712 int vp8_update_entropy(VP8_COMP *cpi, int update)
2714 VP8_COMMON *cm = &cpi->common;
2715 cm->refresh_entropy_probs = update;
2721 #if defined(OUTPUT_YUV_SRC) || defined(OUTPUT_YUV_DENOISED)
2722 void vp8_write_yuv_frame(FILE *yuv_file, YV12_BUFFER_CONFIG *s)
2724 unsigned char *src = s->y_buffer;
2725 int h = s->y_height;
2729 fwrite(src, s->y_width, 1, yuv_file);
2739 fwrite(src, s->uv_width, 1, yuv_file);
2740 src += s->uv_stride;
2749 fwrite(src, s->uv_width, 1, yuv_file);
2750 src += s->uv_stride;
2756 static void scale_and_extend_source(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi)
2758 VP8_COMMON *cm = &cpi->common;
2760 /* are we resizing the image */
2761 if (cm->horiz_scale != 0 || cm->vert_scale != 0)
2763 #if CONFIG_SPATIAL_RESAMPLING
2764 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
2765 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
2768 if (cm->vert_scale == 3)
2773 Scale2Ratio(cm->horiz_scale, &hr, &hs);
2774 Scale2Ratio(cm->vert_scale, &vr, &vs);
2776 vpx_scale_frame(sd, &cpi->scaled_source, cm->temp_scale_frame.y_buffer,
2777 tmp_height, hs, hr, vs, vr, 0);
2779 vp8_yv12_extend_frame_borders(&cpi->scaled_source);
2780 cpi->Source = &cpi->scaled_source;
2788 static int resize_key_frame(VP8_COMP *cpi)
2790 #if CONFIG_SPATIAL_RESAMPLING
2791 VP8_COMMON *cm = &cpi->common;
2793 /* Do we need to apply resampling for one pass cbr.
2794 * In one pass this is more limited than in two pass cbr.
2795 * The test and any change is only made once per key frame sequence.
2797 if (cpi->oxcf.allow_spatial_resampling && (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER))
2799 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
2800 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
2801 int new_width, new_height;
2803 /* If we are below the resample DOWN watermark then scale down a
2806 if (cpi->buffer_level < (cpi->oxcf.resample_down_water_mark * cpi->oxcf.optimal_buffer_level / 100))
2808 cm->horiz_scale = (cm->horiz_scale < ONETWO) ? cm->horiz_scale + 1 : ONETWO;
2809 cm->vert_scale = (cm->vert_scale < ONETWO) ? cm->vert_scale + 1 : ONETWO;
2811 /* Should we now start scaling back up */
2812 else if (cpi->buffer_level > (cpi->oxcf.resample_up_water_mark * cpi->oxcf.optimal_buffer_level / 100))
2814 cm->horiz_scale = (cm->horiz_scale > NORMAL) ? cm->horiz_scale - 1 : NORMAL;
2815 cm->vert_scale = (cm->vert_scale > NORMAL) ? cm->vert_scale - 1 : NORMAL;
2818 /* Get the new height and width */
2819 Scale2Ratio(cm->horiz_scale, &hr, &hs);
2820 Scale2Ratio(cm->vert_scale, &vr, &vs);
2821 new_width = ((hs - 1) + (cpi->oxcf.Width * hr)) / hs;
2822 new_height = ((vs - 1) + (cpi->oxcf.Height * vr)) / vs;
2824 /* If the image size has changed we need to reallocate the buffers
2825 * and resample the source image
2827 if ((cm->Width != new_width) || (cm->Height != new_height))
2829 cm->Width = new_width;
2830 cm->Height = new_height;
2831 vp8_alloc_compressor_data(cpi);
2832 scale_and_extend_source(cpi->un_scaled_source, cpi);
2842 static void update_alt_ref_frame_stats(VP8_COMP *cpi)
2844 VP8_COMMON *cm = &cpi->common;
2846 /* Select an interval before next GF or altref */
2847 if (!cpi->auto_gold)
2848 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
2850 if ((cpi->pass != 2) && cpi->frames_till_gf_update_due)
2852 cpi->current_gf_interval = cpi->frames_till_gf_update_due;
2854 /* Set the bits per frame that we should try and recover in
2855 * subsequent inter frames to account for the extra GF spend...
2856 * note that his does not apply for GF updates that occur
2857 * coincident with a key frame as the extra cost of key frames is
2858 * dealt with elsewhere.
2860 cpi->gf_overspend_bits += cpi->projected_frame_size;
2861 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
2864 /* Update data structure that monitors level of reference to last GF */
2865 memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
2866 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
2868 /* this frame refreshes means next frames don't unless specified by user */
2869 cpi->frames_since_golden = 0;
2871 /* Clear the alternate reference update pending flag. */
2872 cpi->source_alt_ref_pending = 0;
2874 /* Set the alternate reference frame active flag */
2875 cpi->source_alt_ref_active = 1;
2879 static void update_golden_frame_stats(VP8_COMP *cpi)
2881 VP8_COMMON *cm = &cpi->common;
2883 /* Update the Golden frame usage counts. */
2884 if (cm->refresh_golden_frame)
2886 /* Select an interval before next GF */
2887 if (!cpi->auto_gold)
2888 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
2890 if ((cpi->pass != 2) && (cpi->frames_till_gf_update_due > 0))
2892 cpi->current_gf_interval = cpi->frames_till_gf_update_due;
2894 /* Set the bits per frame that we should try and recover in
2895 * subsequent inter frames to account for the extra GF spend...
2896 * note that his does not apply for GF updates that occur
2897 * coincident with a key frame as the extra cost of key frames
2898 * is dealt with elsewhere.
2900 if ((cm->frame_type != KEY_FRAME) && !cpi->source_alt_ref_active)
2902 /* Calcluate GF bits to be recovered
2903 * Projected size - av frame bits available for inter
2904 * frames for clip as a whole
2906 cpi->gf_overspend_bits += (cpi->projected_frame_size - cpi->inter_frame_target);
2909 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
2913 /* Update data structure that monitors level of reference to last GF */
2914 memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
2915 cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
2917 /* this frame refreshes means next frames don't unless specified by
2920 cm->refresh_golden_frame = 0;
2921 cpi->frames_since_golden = 0;
2923 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
2924 cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
2925 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
2926 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
2928 /* ******** Fixed Q test code only ************ */
2929 /* If we are going to use the ALT reference for the next group of
2930 * frames set a flag to say so.
2932 if (cpi->oxcf.fixed_q >= 0 &&
2933 cpi->oxcf.play_alternate && !cpi->common.refresh_alt_ref_frame)
2935 cpi->source_alt_ref_pending = 1;
2936 cpi->frames_till_gf_update_due = cpi->baseline_gf_interval;
2939 if (!cpi->source_alt_ref_pending)
2940 cpi->source_alt_ref_active = 0;
2942 /* Decrement count down till next gf */
2943 if (cpi->frames_till_gf_update_due > 0)
2944 cpi->frames_till_gf_update_due--;
2947 else if (!cpi->common.refresh_alt_ref_frame)
2949 /* Decrement count down till next gf */
2950 if (cpi->frames_till_gf_update_due > 0)
2951 cpi->frames_till_gf_update_due--;
2953 if (cpi->frames_till_alt_ref_frame)
2954 cpi->frames_till_alt_ref_frame --;
2956 cpi->frames_since_golden ++;
2958 if (cpi->frames_since_golden > 1)
2960 cpi->recent_ref_frame_usage[INTRA_FRAME] +=
2961 cpi->mb.count_mb_ref_frame_usage[INTRA_FRAME];
2962 cpi->recent_ref_frame_usage[LAST_FRAME] +=
2963 cpi->mb.count_mb_ref_frame_usage[LAST_FRAME];
2964 cpi->recent_ref_frame_usage[GOLDEN_FRAME] +=
2965 cpi->mb.count_mb_ref_frame_usage[GOLDEN_FRAME];
2966 cpi->recent_ref_frame_usage[ALTREF_FRAME] +=
2967 cpi->mb.count_mb_ref_frame_usage[ALTREF_FRAME];
2972 /* This function updates the reference frame probability estimates that
2973 * will be used during mode selection
2975 static void update_rd_ref_frame_probs(VP8_COMP *cpi)
2977 VP8_COMMON *cm = &cpi->common;
2979 const int *const rfct = cpi->mb.count_mb_ref_frame_usage;
2980 const int rf_intra = rfct[INTRA_FRAME];
2981 const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
2983 if (cm->frame_type == KEY_FRAME)
2985 cpi->prob_intra_coded = 255;
2986 cpi->prob_last_coded = 128;
2987 cpi->prob_gf_coded = 128;
2989 else if (!(rf_intra + rf_inter))
2991 cpi->prob_intra_coded = 63;
2992 cpi->prob_last_coded = 128;
2993 cpi->prob_gf_coded = 128;
2996 /* update reference frame costs since we can do better than what we got
2999 if (cpi->oxcf.number_of_layers == 1)
3001 if (cpi->common.refresh_alt_ref_frame)
3003 cpi->prob_intra_coded += 40;
3004 if (cpi->prob_intra_coded > 255)
3005 cpi->prob_intra_coded = 255;
3006 cpi->prob_last_coded = 200;
3007 cpi->prob_gf_coded = 1;
3009 else if (cpi->frames_since_golden == 0)
3011 cpi->prob_last_coded = 214;
3013 else if (cpi->frames_since_golden == 1)
3015 cpi->prob_last_coded = 192;
3016 cpi->prob_gf_coded = 220;
3018 else if (cpi->source_alt_ref_active)
3020 cpi->prob_gf_coded -= 20;
3022 if (cpi->prob_gf_coded < 10)
3023 cpi->prob_gf_coded = 10;
3025 if (!cpi->source_alt_ref_active)
3026 cpi->prob_gf_coded = 255;
3031 #if !CONFIG_REALTIME_ONLY
3032 /* 1 = key, 0 = inter */
3033 static int decide_key_frame(VP8_COMP *cpi)
3035 VP8_COMMON *cm = &cpi->common;
3037 int code_key_frame = 0;
3041 if (cpi->Speed > 11)
3044 /* Clear down mmx registers */
3045 vp8_clear_system_state();
3047 if ((cpi->compressor_speed == 2) && (cpi->Speed >= 5) && (cpi->sf.RD == 0))
3049 double change = 1.0 * abs((int)(cpi->mb.intra_error -
3050 cpi->last_intra_error)) / (1 + cpi->last_intra_error);
3051 double change2 = 1.0 * abs((int)(cpi->mb.prediction_error -
3052 cpi->last_prediction_error)) / (1 + cpi->last_prediction_error);
3053 double minerror = cm->MBs * 256;
3055 cpi->last_intra_error = cpi->mb.intra_error;
3056 cpi->last_prediction_error = cpi->mb.prediction_error;
3058 if (10 * cpi->mb.intra_error / (1 + cpi->mb.prediction_error) < 15
3059 && cpi->mb.prediction_error > minerror
3060 && (change > .25 || change2 > .25))
3062 /*(change > 1.4 || change < .75)&& cpi->this_frame_percent_intra > cpi->last_frame_percent_intra + 3*/
3070 /* If the following are true we might as well code a key frame */
3071 if (((cpi->this_frame_percent_intra == 100) &&
3072 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 2))) ||
3073 ((cpi->this_frame_percent_intra > 95) &&
3074 (cpi->this_frame_percent_intra >= (cpi->last_frame_percent_intra + 5))))
3078 /* in addition if the following are true and this is not a golden frame
3079 * then code a key frame Note that on golden frames there often seems
3080 * to be a pop in intra useage anyway hence this restriction is
3081 * designed to prevent spurious key frames. The Intra pop needs to be
3084 else if (((cpi->this_frame_percent_intra > 60) &&
3085 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 2))) ||
3086 ((cpi->this_frame_percent_intra > 75) &&
3087 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 3 / 2))) ||
3088 ((cpi->this_frame_percent_intra > 90) &&
3089 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 10))))
3091 if (!cm->refresh_golden_frame)
3095 return code_key_frame;
3099 static void Pass1Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned int *frame_flags)
3104 vp8_set_quantizer(cpi, 26);
3106 vp8_first_pass(cpi);
3111 void write_cx_frame_to_file(YV12_BUFFER_CONFIG *frame, int this_frame)
3114 /* write the frame */
3119 sprintf(filename, "cx\\y%04d.raw", this_frame);
3120 yframe = fopen(filename, "wb");
3122 for (i = 0; i < frame->y_height; i++)
3123 fwrite(frame->y_buffer + i * frame->y_stride, frame->y_width, 1, yframe);
3126 sprintf(filename, "cx\\u%04d.raw", this_frame);
3127 yframe = fopen(filename, "wb");
3129 for (i = 0; i < frame->uv_height; i++)
3130 fwrite(frame->u_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
3133 sprintf(filename, "cx\\v%04d.raw", this_frame);
3134 yframe = fopen(filename, "wb");
3136 for (i = 0; i < frame->uv_height; i++)
3137 fwrite(frame->v_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
3142 /* return of 0 means drop frame */
3144 #if !CONFIG_REALTIME_ONLY
3145 /* Function to test for conditions that indeicate we should loop
3146 * back and recode a frame.
3148 static int recode_loop_test( VP8_COMP *cpi,
3149 int high_limit, int low_limit,
3150 int q, int maxq, int minq )
3152 int force_recode = 0;
3153 VP8_COMMON *cm = &cpi->common;
3155 /* Is frame recode allowed at all
3156 * Yes if either recode mode 1 is selected or mode two is selcted
3157 * and the frame is a key frame. golden frame or alt_ref_frame
3159 if ( (cpi->sf.recode_loop == 1) ||
3160 ( (cpi->sf.recode_loop == 2) &&
3161 ( (cm->frame_type == KEY_FRAME) ||
3162 cm->refresh_golden_frame ||
3163 cm->refresh_alt_ref_frame ) ) )
3165 /* General over and under shoot tests */
3166 if ( ((cpi->projected_frame_size > high_limit) && (q < maxq)) ||
3167 ((cpi->projected_frame_size < low_limit) && (q > minq)) )
3171 /* Special Constrained quality tests */
3172 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
3174 /* Undershoot and below auto cq level */
3175 if ( (q > cpi->cq_target_quality) &&
3176 (cpi->projected_frame_size <
3177 ((cpi->this_frame_target * 7) >> 3)))
3181 /* Severe undershoot and between auto and user cq level */
3182 else if ( (q > cpi->oxcf.cq_level) &&
3183 (cpi->projected_frame_size < cpi->min_frame_bandwidth) &&
3184 (cpi->active_best_quality > cpi->oxcf.cq_level))
3187 cpi->active_best_quality = cpi->oxcf.cq_level;
3192 return force_recode;
3194 #endif // !CONFIG_REALTIME_ONLY
3196 static void update_reference_frames(VP8_COMP *cpi)
3198 VP8_COMMON *cm = &cpi->common;
3199 YV12_BUFFER_CONFIG *yv12_fb = cm->yv12_fb;
3201 /* At this point the new frame has been encoded.
3202 * If any buffer copy / swapping is signaled it should be done here.
3205 if (cm->frame_type == KEY_FRAME)
3207 yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME | VP8_ALTR_FRAME ;
3209 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3210 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3212 cm->alt_fb_idx = cm->gld_fb_idx = cm->new_fb_idx;
3214 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
3215 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
3217 else /* For non key frames */
3219 if (cm->refresh_alt_ref_frame)
3221 assert(!cm->copy_buffer_to_arf);
3223 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_ALTR_FRAME;
3224 cm->yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3225 cm->alt_fb_idx = cm->new_fb_idx;
3227 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
3229 else if (cm->copy_buffer_to_arf)
3231 assert(!(cm->copy_buffer_to_arf & ~0x3));
3233 if (cm->copy_buffer_to_arf == 1)
3235 if(cm->alt_fb_idx != cm->lst_fb_idx)
3237 yv12_fb[cm->lst_fb_idx].flags |= VP8_ALTR_FRAME;
3238 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3239 cm->alt_fb_idx = cm->lst_fb_idx;
3241 cpi->current_ref_frames[ALTREF_FRAME] =
3242 cpi->current_ref_frames[LAST_FRAME];
3245 else /* if (cm->copy_buffer_to_arf == 2) */
3247 if(cm->alt_fb_idx != cm->gld_fb_idx)
3249 yv12_fb[cm->gld_fb_idx].flags |= VP8_ALTR_FRAME;
3250 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
3251 cm->alt_fb_idx = cm->gld_fb_idx;
3253 cpi->current_ref_frames[ALTREF_FRAME] =
3254 cpi->current_ref_frames[GOLDEN_FRAME];
3259 if (cm->refresh_golden_frame)
3261 assert(!cm->copy_buffer_to_gf);
3263 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME;
3264 cm->yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3265 cm->gld_fb_idx = cm->new_fb_idx;
3267 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
3269 else if (cm->copy_buffer_to_gf)
3271 assert(!(cm->copy_buffer_to_arf & ~0x3));
3273 if (cm->copy_buffer_to_gf == 1)
3275 if(cm->gld_fb_idx != cm->lst_fb_idx)
3277 yv12_fb[cm->lst_fb_idx].flags |= VP8_GOLD_FRAME;
3278 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3279 cm->gld_fb_idx = cm->lst_fb_idx;
3281 cpi->current_ref_frames[GOLDEN_FRAME] =
3282 cpi->current_ref_frames[LAST_FRAME];
3285 else /* if (cm->copy_buffer_to_gf == 2) */
3287 if(cm->alt_fb_idx != cm->gld_fb_idx)
3289 yv12_fb[cm->alt_fb_idx].flags |= VP8_GOLD_FRAME;
3290 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
3291 cm->gld_fb_idx = cm->alt_fb_idx;
3293 cpi->current_ref_frames[GOLDEN_FRAME] =
3294 cpi->current_ref_frames[ALTREF_FRAME];
3300 if (cm->refresh_last_frame)
3302 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_LAST_FRAME;
3303 cm->yv12_fb[cm->lst_fb_idx].flags &= ~VP8_LAST_FRAME;
3304 cm->lst_fb_idx = cm->new_fb_idx;
3306 cpi->current_ref_frames[LAST_FRAME] = cm->current_video_frame;
3309 #if CONFIG_TEMPORAL_DENOISING
3310 if (cpi->oxcf.noise_sensitivity)
3312 /* we shouldn't have to keep multiple copies as we know in advance which
3313 * buffer we should start - for now to get something up and running
3314 * I've chosen to copy the buffers
3316 if (cm->frame_type == KEY_FRAME)
3319 for (i = LAST_FRAME; i < MAX_REF_FRAMES; ++i)
3320 vp8_yv12_copy_frame(cpi->Source,
3321 &cpi->denoiser.yv12_running_avg[i]);
3323 else /* For non key frames */
3325 vp8_yv12_extend_frame_borders(
3326 &cpi->denoiser.yv12_running_avg[INTRA_FRAME]);
3328 if (cm->refresh_alt_ref_frame || cm->copy_buffer_to_arf)
3330 vp8_yv12_copy_frame(
3331 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3332 &cpi->denoiser.yv12_running_avg[ALTREF_FRAME]);
3334 if (cm->refresh_golden_frame || cm->copy_buffer_to_gf)
3336 vp8_yv12_copy_frame(
3337 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3338 &cpi->denoiser.yv12_running_avg[GOLDEN_FRAME]);
3340 if(cm->refresh_last_frame)
3342 vp8_yv12_copy_frame(
3343 &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
3344 &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
3347 if (cpi->oxcf.noise_sensitivity == 4)
3348 vp8_yv12_copy_frame(cpi->Source, &cpi->denoiser.yv12_last_source);
3355 static int measure_square_diff_partial(YV12_BUFFER_CONFIG *source,
3356 YV12_BUFFER_CONFIG *dest,
3363 int min_consec_zero_last = 10;
3364 int tot_num_blocks = (source->y_height * source->y_width) >> 8;
3365 unsigned char *src = source->y_buffer;
3366 unsigned char *dst = dest->y_buffer;
3368 /* Loop through the Y plane, every |skip| blocks along rows and colmumns,
3369 * summing the square differences, and only for blocks that have been
3370 * zero_last mode at least |x| frames in a row.
3372 for (i = 0; i < source->y_height; i += 16 * skip)
3374 int block_index_row = (i >> 4) * cpi->common.mb_cols;
3375 for (j = 0; j < source->y_width; j += 16 * skip)
3377 int index = block_index_row + (j >> 4);
3378 if (cpi->consec_zero_last[index] >= min_consec_zero_last) {
3380 Total += vpx_mse16x16(src + j,
3382 dst + j, dest->y_stride,
3387 src += 16 * skip * source->y_stride;
3388 dst += 16 * skip * dest->y_stride;
3390 // Only return non-zero if we have at least ~1/16 samples for estimate.
3391 if (num_blocks > (tot_num_blocks >> 4)) {
3392 return (Total / num_blocks);
3398 #if CONFIG_TEMPORAL_DENOISING
3399 static void process_denoiser_mode_change(VP8_COMP *cpi) {
3400 const VP8_COMMON *const cm = &cpi->common;
3404 // Number of blocks skipped along row/column in computing the
3405 // nmse (normalized mean square error) of source.
3407 // Only select blocks for computing nmse that have been encoded
3408 // as ZERO LAST min_consec_zero_last frames in a row.
3409 // Scale with number of temporal layers.
3410 int min_consec_zero_last = 12 / cpi->oxcf.number_of_layers;
3411 // Decision is tested for changing the denoising mode every
3412 // num_mode_change times this function is called. Note that this
3413 // function called every 8 frames, so (8 * num_mode_change) is number
3414 // of frames where denoising mode change is tested for switch.
3415 int num_mode_change = 20;
3416 // Framerate factor, to compensate for larger mse at lower framerates.
3417 // Use ref_framerate, which is full source framerate for temporal layers.
3418 // TODO(marpan): Adjust this factor.
3419 int fac_framerate = cpi->ref_framerate < 25.0f ? 80 : 100;
3420 int tot_num_blocks = cm->mb_rows * cm->mb_cols;
3421 int ystride = cpi->Source->y_stride;
3422 unsigned char *src = cpi->Source->y_buffer;
3423 unsigned char *dst = cpi->denoiser.yv12_last_source.y_buffer;
3424 static const unsigned char const_source[16] = {
3425 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
3427 int bandwidth = (int)(cpi->target_bandwidth);
3428 // For temporal layers, use full bandwidth (top layer).
3429 if (cpi->oxcf.number_of_layers > 1) {
3430 LAYER_CONTEXT *lc = &cpi->layer_context[cpi->oxcf.number_of_layers - 1];
3431 bandwidth = (int)(lc->target_bandwidth);
3433 // Loop through the Y plane, every skip blocks along rows and columns,
3434 // summing the normalized mean square error, only for blocks that have
3435 // been encoded as ZEROMV LAST at least min_consec_zero_last least frames in
3436 // a row and have small sum difference between current and previous frame.
3437 // Normalization here is by the contrast of the current frame block.
3438 for (i = 0; i < cm->Height; i += 16 * skip) {
3439 int block_index_row = (i >> 4) * cm->mb_cols;
3440 for (j = 0; j < cm->Width; j += 16 * skip) {
3441 int index = block_index_row + (j >> 4);
3442 if (cpi->consec_zero_last[index] >= min_consec_zero_last) {
3444 const unsigned int var = vpx_variance16x16(src + j,
3449 // Only consider this block as valid for noise measurement
3450 // if the sum_diff average of the current and previous frame
3451 // is small (to avoid effects from lighting change).
3452 if ((sse - var) < 128) {
3454 const unsigned int act = vpx_variance16x16(src + j,
3465 src += 16 * skip * ystride;
3466 dst += 16 * skip * ystride;
3468 total = total * fac_framerate / 100;
3470 // Only consider this frame as valid sample if we have computed nmse over
3471 // at least ~1/16 blocks, and Total > 0 (Total == 0 can happen if the
3472 // application inputs duplicate frames, or contrast is all zero).
3474 (num_blocks > (tot_num_blocks >> 4))) {
3475 // Update the recursive mean square source_diff.
3476 total = (total << 8) / num_blocks;
3477 if (cpi->denoiser.nmse_source_diff_count == 0) {
3478 // First sample in new interval.
3479 cpi->denoiser.nmse_source_diff = total;
3480 cpi->denoiser.qp_avg = cm->base_qindex;
3482 // For subsequent samples, use average with weight ~1/4 for new sample.
3483 cpi->denoiser.nmse_source_diff = (int)((total +
3484 3 * cpi->denoiser.nmse_source_diff) >> 2);
3485 cpi->denoiser.qp_avg = (int)((cm->base_qindex +
3486 3 * cpi->denoiser.qp_avg) >> 2);
3488 cpi->denoiser.nmse_source_diff_count++;
3490 // Check for changing the denoiser mode, when we have obtained #samples =
3491 // num_mode_change. Condition the change also on the bitrate and QP.
3492 if (cpi->denoiser.nmse_source_diff_count == num_mode_change) {
3493 // Check for going up: from normal to aggressive mode.
3494 if ((cpi->denoiser.denoiser_mode == kDenoiserOnYUV) &&
3495 (cpi->denoiser.nmse_source_diff >
3496 cpi->denoiser.threshold_aggressive_mode) &&
3497 (cpi->denoiser.qp_avg < cpi->denoiser.qp_threshold_up &&
3498 bandwidth > cpi->denoiser.bitrate_threshold)) {
3499 vp8_denoiser_set_parameters(&cpi->denoiser, kDenoiserOnYUVAggressive);
3501 // Check for going down: from aggressive to normal mode.
3502 if (((cpi->denoiser.denoiser_mode == kDenoiserOnYUVAggressive) &&
3503 (cpi->denoiser.nmse_source_diff <
3504 cpi->denoiser.threshold_aggressive_mode)) ||
3505 ((cpi->denoiser.denoiser_mode == kDenoiserOnYUVAggressive) &&
3506 (cpi->denoiser.qp_avg > cpi->denoiser.qp_threshold_down ||
3507 bandwidth < cpi->denoiser.bitrate_threshold))) {
3508 vp8_denoiser_set_parameters(&cpi->denoiser, kDenoiserOnYUV);
3511 // Reset metric and counter for next interval.
3512 cpi->denoiser.nmse_source_diff = 0;
3513 cpi->denoiser.qp_avg = 0;
3514 cpi->denoiser.nmse_source_diff_count = 0;
3519 void vp8_loopfilter_frame(VP8_COMP *cpi, VP8_COMMON *cm)
3521 const FRAME_TYPE frame_type = cm->frame_type;
3523 int update_any_ref_buffers = 1;
3524 if (cpi->common.refresh_last_frame == 0 &&
3525 cpi->common.refresh_golden_frame == 0 &&
3526 cpi->common.refresh_alt_ref_frame == 0) {
3527 update_any_ref_buffers = 0;
3532 cm->filter_level = 0;
3536 struct vpx_usec_timer timer;
3538 vp8_clear_system_state();
3540 vpx_usec_timer_start(&timer);
3541 if (cpi->sf.auto_filter == 0) {
3542 #if CONFIG_TEMPORAL_DENOISING
3543 if (cpi->oxcf.noise_sensitivity && cm->frame_type != KEY_FRAME) {
3544 // Use the denoised buffer for selecting base loop filter level.
3545 // Denoised signal for current frame is stored in INTRA_FRAME.
3546 // No denoising on key frames.
3547 vp8cx_pick_filter_level_fast(
3548 &cpi->denoiser.yv12_running_avg[INTRA_FRAME], cpi);
3550 vp8cx_pick_filter_level_fast(cpi->Source, cpi);
3553 vp8cx_pick_filter_level_fast(cpi->Source, cpi);
3556 #if CONFIG_TEMPORAL_DENOISING
3557 if (cpi->oxcf.noise_sensitivity && cm->frame_type != KEY_FRAME) {
3558 // Use the denoised buffer for selecting base loop filter level.
3559 // Denoised signal for current frame is stored in INTRA_FRAME.
3560 // No denoising on key frames.
3561 vp8cx_pick_filter_level(
3562 &cpi->denoiser.yv12_running_avg[INTRA_FRAME], cpi);
3564 vp8cx_pick_filter_level(cpi->Source, cpi);
3567 vp8cx_pick_filter_level(cpi->Source, cpi);
3572 if (cm->filter_level > 0)
3574 vp8cx_set_alt_lf_level(cpi, cm->filter_level);
3577 vpx_usec_timer_mark(&timer);
3578 cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer);
3581 #if CONFIG_MULTITHREAD
3582 if (cpi->b_multi_threaded)
3583 sem_post(&cpi->h_event_end_lpf); /* signal that we have set filter_level */
3586 // No need to apply loop-filter if the encoded frame does not update
3587 // any reference buffers.
3588 if (cm->filter_level > 0 && update_any_ref_buffers)
3590 vp8_loop_filter_frame(cm, &cpi->mb.e_mbd, frame_type);
3593 vp8_yv12_extend_frame_borders(cm->frame_to_show);
3597 static void encode_frame_to_data_rate
3600 unsigned long *size,
3601 unsigned char *dest,
3602 unsigned char* dest_end,
3603 unsigned int *frame_flags
3607 int frame_over_shoot_limit;
3608 int frame_under_shoot_limit;
3613 VP8_COMMON *cm = &cpi->common;
3614 int active_worst_qchanged = 0;
3616 #if !CONFIG_REALTIME_ONLY
3620 int zbin_oq_low = 0;
3623 int overshoot_seen = 0;
3624 int undershoot_seen = 0;
3627 int drop_mark = (int)(cpi->oxcf.drop_frames_water_mark *
3628 cpi->oxcf.optimal_buffer_level / 100);
3629 int drop_mark75 = drop_mark * 2 / 3;
3630 int drop_mark50 = drop_mark / 4;
3631 int drop_mark25 = drop_mark / 8;
3634 /* Clear down mmx registers to allow floating point in what follows */
3635 vp8_clear_system_state();
3637 #if CONFIG_MULTITHREAD
3638 /* wait for the last picture loopfilter thread done */
3639 if (cpi->b_lpf_running)
3641 sem_wait(&cpi->h_event_end_lpf);
3642 cpi->b_lpf_running = 0;
3646 if(cpi->force_next_frame_intra)
3648 cm->frame_type = KEY_FRAME; /* delayed intra frame */
3649 cpi->force_next_frame_intra = 0;
3652 /* For an alt ref frame in 2 pass we skip the call to the second pass
3653 * function that sets the target bandwidth
3655 #if !CONFIG_REALTIME_ONLY
3659 if (cpi->common.refresh_alt_ref_frame)
3661 /* Per frame bit target for the alt ref frame */
3662 cpi->per_frame_bandwidth = cpi->twopass.gf_bits;
3663 /* per second target bitrate */
3664 cpi->target_bandwidth = (int)(cpi->twopass.gf_bits *
3665 cpi->output_framerate);
3670 cpi->per_frame_bandwidth = (int)(cpi->target_bandwidth / cpi->output_framerate);
3672 /* Default turn off buffer to buffer copying */
3673 cm->copy_buffer_to_gf = 0;
3674 cm->copy_buffer_to_arf = 0;
3676 /* Clear zbin over-quant value and mode boost values. */
3677 cpi->mb.zbin_over_quant = 0;
3678 cpi->mb.zbin_mode_boost = 0;
3680 /* Enable or disable mode based tweaking of the zbin
3681 * For 2 Pass Only used where GF/ARF prediction quality
3682 * is above a threshold
3684 cpi->mb.zbin_mode_boost_enabled = 1;
3687 if ( cpi->gfu_boost <= 400 )
3689 cpi->mb.zbin_mode_boost_enabled = 0;
3693 /* Current default encoder behaviour for the altref sign bias */
3694 if (cpi->source_alt_ref_active)
3695 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
3697 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 0;
3699 /* Check to see if a key frame is signaled
3700 * For two pass with auto key frame enabled cm->frame_type may already
3701 * be set, but not for one pass.
3703 if ((cm->current_video_frame == 0) ||
3704 (cm->frame_flags & FRAMEFLAGS_KEY) ||
3705 (cpi->oxcf.auto_key && (cpi->frames_since_key % cpi->key_frame_frequency == 0)))
3707 /* Key frame from VFW/auto-keyframe/first frame */
3708 cm->frame_type = KEY_FRAME;
3709 #if CONFIG_TEMPORAL_DENOISING
3710 if (cpi->oxcf.noise_sensitivity == 4) {
3711 // For adaptive mode, reset denoiser to normal mode on key frame.
3712 vp8_denoiser_set_parameters(&cpi->denoiser, kDenoiserOnYUV);
3717 #if CONFIG_MULTI_RES_ENCODING
3718 if (cpi->oxcf.mr_total_resolutions > 1) {
3719 LOWER_RES_FRAME_INFO* low_res_frame_info
3720 = (LOWER_RES_FRAME_INFO*)cpi->oxcf.mr_low_res_mode_info;
3722 if (cpi->oxcf.mr_encoder_id) {
3724 // TODO(marpan): This constraint shouldn't be needed, as we would like
3725 // to allow for key frame setting (forced or periodic) defined per
3726 // spatial layer. For now, keep this in.
3727 cm->frame_type = low_res_frame_info->frame_type;
3729 // Check if lower resolution is available for motion vector reuse.
3730 if(cm->frame_type != KEY_FRAME)
3732 cpi->mr_low_res_mv_avail = 1;
3733 cpi->mr_low_res_mv_avail &= !(low_res_frame_info->is_frame_dropped);
3735 if (cpi->ref_frame_flags & VP8_LAST_FRAME)
3736 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[LAST_FRAME]
3737 == low_res_frame_info->low_res_ref_frames[LAST_FRAME]);
3739 if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
3740 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[GOLDEN_FRAME]
3741 == low_res_frame_info->low_res_ref_frames[GOLDEN_FRAME]);
3743 // Don't use altref to determine whether low res is available.
3744 // TODO (marpan): Should we make this type of condition on a
3745 // per-reference frame basis?
3747 if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
3748 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[ALTREF_FRAME]
3749 == low_res_frame_info->low_res_ref_frames[ALTREF_FRAME]);
3754 // On a key frame: For the lowest resolution, keep track of the key frame
3755 // counter value. For the higher resolutions, reset the current video
3756 // frame counter to that of the lowest resolution.
3757 // This is done to the handle the case where we may stop/start encoding
3758 // higher layer(s). The restart-encoding of higher layer is only signaled
3759 // by a key frame for now.
3760 // TODO (marpan): Add flag to indicate restart-encoding of higher layer.
3761 if (cm->frame_type == KEY_FRAME) {
3762 if (cpi->oxcf.mr_encoder_id) {
3763 // If the initial starting value of the buffer level is zero (this can
3764 // happen because we may have not started encoding this higher stream),
3765 // then reset it to non-zero value based on |starting_buffer_level|.
3766 if (cpi->common.current_video_frame == 0 && cpi->buffer_level == 0) {
3768 cpi->bits_off_target = cpi->oxcf.starting_buffer_level;
3769 cpi->buffer_level = cpi->oxcf.starting_buffer_level;
3770 for (i = 0; i < cpi->oxcf.number_of_layers; i++) {
3771 LAYER_CONTEXT *lc = &cpi->layer_context[i];
3772 lc->bits_off_target = lc->starting_buffer_level;
3773 lc->buffer_level = lc->starting_buffer_level;
3776 cpi->common.current_video_frame =
3777 low_res_frame_info->key_frame_counter_value;
3779 low_res_frame_info->key_frame_counter_value =
3780 cpi->common.current_video_frame;
3787 // Find the reference frame closest to the current frame.
3788 cpi->closest_reference_frame = LAST_FRAME;
3789 if(cm->frame_type != KEY_FRAME) {
3791 MV_REFERENCE_FRAME closest_ref = INTRA_FRAME;
3792 if (cpi->ref_frame_flags & VP8_LAST_FRAME) {
3793 closest_ref = LAST_FRAME;
3794 } else if (cpi->ref_frame_flags & VP8_GOLD_FRAME) {
3795 closest_ref = GOLDEN_FRAME;
3796 } else if (cpi->ref_frame_flags & VP8_ALTR_FRAME) {
3797 closest_ref = ALTREF_FRAME;
3799 for(i = 1; i <= 3; i++) {
3800 vpx_ref_frame_type_t ref_frame_type = (vpx_ref_frame_type_t)
3802 if (cpi->ref_frame_flags & ref_frame_type) {
3803 if ((cm->current_video_frame - cpi->current_ref_frames[i]) <
3804 (cm->current_video_frame - cpi->current_ref_frames[closest_ref])) {
3809 cpi->closest_reference_frame = closest_ref;
3812 /* Set various flags etc to special state if it is a key frame */
3813 if (cm->frame_type == KEY_FRAME)
3817 // Set the loop filter deltas and segmentation map update
3818 setup_features(cpi);
3820 /* The alternate reference frame cannot be active for a key frame */
3821 cpi->source_alt_ref_active = 0;
3823 /* Reset the RD threshold multipliers to default of * 1 (128) */
3824 for (i = 0; i < MAX_MODES; i++)
3826 cpi->mb.rd_thresh_mult[i] = 128;
3829 // Reset the zero_last counter to 0 on key frame.
3830 memset(cpi->consec_zero_last, 0, cm->mb_rows * cm->mb_cols);
3831 memset(cpi->consec_zero_last_mvbias, 0,
3832 (cpi->common.mb_rows * cpi->common.mb_cols));
3836 /* Experimental code for lagged compress and one pass
3837 * Initialise one_pass GF frames stats
3838 * Update stats used for GF selection
3841 cpi->one_pass_frame_index = cm->current_video_frame % MAX_LAG_BUFFERS;
3843 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frames_so_far = 0;
3844 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_intra_error = 0.0;
3845 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_coded_error = 0.0;
3846 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_inter = 0.0;
3847 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_motion = 0.0;
3848 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr = 0.0;
3849 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr_abs = 0.0;
3850 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc = 0.0;
3851 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc_abs = 0.0;
3855 update_rd_ref_frame_probs(cpi);
3857 if (cpi->drop_frames_allowed)
3859 /* The reset to decimation 0 is only done here for one pass.
3860 * Once it is set two pass leaves decimation on till the next kf.
3862 if ((cpi->buffer_level > drop_mark) && (cpi->decimation_factor > 0))
3863 cpi->decimation_factor --;
3865 if (cpi->buffer_level > drop_mark75 && cpi->decimation_factor > 0)
3866 cpi->decimation_factor = 1;
3868 else if (cpi->buffer_level < drop_mark25 && (cpi->decimation_factor == 2 || cpi->decimation_factor == 3))
3870 cpi->decimation_factor = 3;
3872 else if (cpi->buffer_level < drop_mark50 && (cpi->decimation_factor == 1 || cpi->decimation_factor == 2))
3874 cpi->decimation_factor = 2;
3876 else if (cpi->buffer_level < drop_mark75 && (cpi->decimation_factor == 0 || cpi->decimation_factor == 1))
3878 cpi->decimation_factor = 1;
3882 /* The following decimates the frame rate according to a regular
3883 * pattern (i.e. to 1/2 or 2/3 frame rate) This can be used to help
3884 * prevent buffer under-run in CBR mode. Alternatively it might be
3885 * desirable in some situations to drop frame rate but throw more bits
3888 * Note that dropping a key frame can be problematic if spatial
3889 * resampling is also active
3891 if (cpi->decimation_factor > 0)
3893 switch (cpi->decimation_factor)
3896 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 3 / 2;
3899 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
3902 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
3906 /* Note that we should not throw out a key frame (especially when
3907 * spatial resampling is enabled).
3909 if (cm->frame_type == KEY_FRAME)
3911 cpi->decimation_count = cpi->decimation_factor;
3913 else if (cpi->decimation_count > 0)
3915 cpi->decimation_count --;
3917 cpi->bits_off_target += cpi->av_per_frame_bandwidth;
3918 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
3919 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
3921 #if CONFIG_MULTI_RES_ENCODING
3922 vp8_store_drop_frame_info(cpi);
3925 cm->current_video_frame++;
3926 cpi->frames_since_key++;
3927 // We advance the temporal pattern for dropped frames.
3928 cpi->temporal_pattern_counter++;
3930 #if CONFIG_INTERNAL_STATS
3934 cpi->buffer_level = cpi->bits_off_target;
3936 if (cpi->oxcf.number_of_layers > 1)
3940 /* Propagate bits saved by dropping the frame to higher
3943 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
3945 LAYER_CONTEXT *lc = &cpi->layer_context[i];
3946 lc->bits_off_target += (int)(lc->target_bandwidth /
3948 if (lc->bits_off_target > lc->maximum_buffer_size)
3949 lc->bits_off_target = lc->maximum_buffer_size;
3950 lc->buffer_level = lc->bits_off_target;
3957 cpi->decimation_count = cpi->decimation_factor;
3960 cpi->decimation_count = 0;
3962 /* Decide how big to make the frame */
3963 if (!vp8_pick_frame_size(cpi))
3965 /*TODO: 2 drop_frame and return code could be put together. */
3966 #if CONFIG_MULTI_RES_ENCODING
3967 vp8_store_drop_frame_info(cpi);
3969 cm->current_video_frame++;
3970 cpi->frames_since_key++;
3971 // We advance the temporal pattern for dropped frames.
3972 cpi->temporal_pattern_counter++;
3976 /* Reduce active_worst_allowed_q for CBR if our buffer is getting too full.
3977 * This has a knock on effect on active best quality as well.
3978 * For CBR if the buffer reaches its maximum level then we can no longer
3979 * save up bits for later frames so we might as well use them up
3980 * on the current frame.
3982 if ((cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) &&
3983 (cpi->buffer_level >= cpi->oxcf.optimal_buffer_level) && cpi->buffered_mode)
3985 /* Max adjustment is 1/4 */
3986 int Adjustment = cpi->active_worst_quality / 4;
3992 if (cpi->buffer_level < cpi->oxcf.maximum_buffer_size)
3994 buff_lvl_step = (int)
3995 ((cpi->oxcf.maximum_buffer_size -
3996 cpi->oxcf.optimal_buffer_level) /
4001 ((cpi->buffer_level -
4002 cpi->oxcf.optimal_buffer_level) /
4008 cpi->active_worst_quality -= Adjustment;
4010 if(cpi->active_worst_quality < cpi->active_best_quality)
4011 cpi->active_worst_quality = cpi->active_best_quality;
4015 /* Set an active best quality and if necessary active worst quality
4016 * There is some odd behavior for one pass here that needs attention.
4018 if ( (cpi->pass == 2) || (cpi->ni_frames > 150))
4020 vp8_clear_system_state();
4022 Q = cpi->active_worst_quality;
4024 if ( cm->frame_type == KEY_FRAME )
4026 if ( cpi->pass == 2 )
4028 if (cpi->gfu_boost > 600)
4029 cpi->active_best_quality = kf_low_motion_minq[Q];
4031 cpi->active_best_quality = kf_high_motion_minq[Q];
4033 /* Special case for key frames forced because we have reached
4034 * the maximum key frame interval. Here force the Q to a range
4035 * based on the ambient Q to reduce the risk of popping
4037 if ( cpi->this_key_frame_forced )
4039 if ( cpi->active_best_quality > cpi->avg_frame_qindex * 7/8)
4040 cpi->active_best_quality = cpi->avg_frame_qindex * 7/8;
4041 else if ( cpi->active_best_quality < cpi->avg_frame_qindex >> 2 )
4042 cpi->active_best_quality = cpi->avg_frame_qindex >> 2;
4045 /* One pass more conservative */
4047 cpi->active_best_quality = kf_high_motion_minq[Q];
4050 else if (cpi->oxcf.number_of_layers==1 &&
4051 (cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame))
4053 /* Use the lower of cpi->active_worst_quality and recent
4054 * average Q as basis for GF/ARF Q limit unless last frame was
4057 if ( (cpi->frames_since_key > 1) &&
4058 (cpi->avg_frame_qindex < cpi->active_worst_quality) )
4060 Q = cpi->avg_frame_qindex;
4063 /* For constrained quality dont allow Q less than the cq level */
4064 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
4065 (Q < cpi->cq_target_quality) )
4067 Q = cpi->cq_target_quality;
4070 if ( cpi->pass == 2 )
4072 if ( cpi->gfu_boost > 1000 )
4073 cpi->active_best_quality = gf_low_motion_minq[Q];
4074 else if ( cpi->gfu_boost < 400 )
4075 cpi->active_best_quality = gf_high_motion_minq[Q];
4077 cpi->active_best_quality = gf_mid_motion_minq[Q];
4079 /* Constrained quality use slightly lower active best. */
4080 if ( cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY )
4082 cpi->active_best_quality =
4083 cpi->active_best_quality * 15/16;
4086 /* One pass more conservative */
4088 cpi->active_best_quality = gf_high_motion_minq[Q];
4092 cpi->active_best_quality = inter_minq[Q];
4094 /* For the constant/constrained quality mode we dont want
4095 * q to fall below the cq level.
4097 if ((cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
4098 (cpi->active_best_quality < cpi->cq_target_quality) )
4100 /* If we are strongly undershooting the target rate in the last
4101 * frames then use the user passed in cq value not the auto
4104 if ( cpi->rolling_actual_bits < cpi->min_frame_bandwidth )
4105 cpi->active_best_quality = cpi->oxcf.cq_level;
4107 cpi->active_best_quality = cpi->cq_target_quality;
4111 /* If CBR and the buffer is as full then it is reasonable to allow
4112 * higher quality on the frames to prevent bits just going to waste.
4114 if (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)
4116 /* Note that the use of >= here elliminates the risk of a devide
4117 * by 0 error in the else if clause
4119 if (cpi->buffer_level >= cpi->oxcf.maximum_buffer_size)
4120 cpi->active_best_quality = cpi->best_quality;
4122 else if (cpi->buffer_level > cpi->oxcf.optimal_buffer_level)
4124 int Fraction = (int)
4125 (((cpi->buffer_level - cpi->oxcf.optimal_buffer_level) * 128)
4126 / (cpi->oxcf.maximum_buffer_size -
4127 cpi->oxcf.optimal_buffer_level));
4128 int min_qadjustment = ((cpi->active_best_quality -
4129 cpi->best_quality) * Fraction) / 128;
4131 cpi->active_best_quality -= min_qadjustment;
4135 /* Make sure constrained quality mode limits are adhered to for the first
4136 * few frames of one pass encodes
4138 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
4140 if ( (cm->frame_type == KEY_FRAME) ||
4141 cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame )
4143 cpi->active_best_quality = cpi->best_quality;
4145 else if (cpi->active_best_quality < cpi->cq_target_quality)
4147 cpi->active_best_quality = cpi->cq_target_quality;
4151 /* Clip the active best and worst quality values to limits */
4152 if (cpi->active_worst_quality > cpi->worst_quality)
4153 cpi->active_worst_quality = cpi->worst_quality;
4155 if (cpi->active_best_quality < cpi->best_quality)
4156 cpi->active_best_quality = cpi->best_quality;
4158 if ( cpi->active_worst_quality < cpi->active_best_quality )
4159 cpi->active_worst_quality = cpi->active_best_quality;
4161 /* Determine initial Q to try */
4162 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4164 #if !CONFIG_REALTIME_ONLY
4166 /* Set highest allowed value for Zbin over quant */
4167 if (cm->frame_type == KEY_FRAME)
4169 else if ((cpi->oxcf.number_of_layers == 1) && ((cm->refresh_alt_ref_frame ||
4170 (cm->refresh_golden_frame && !cpi->source_alt_ref_active))))
4175 zbin_oq_high = ZBIN_OQ_MAX;
4178 /* Setup background Q adjustment for error resilient mode.
4179 * For multi-layer encodes only enable this for the base layer.
4181 if (cpi->cyclic_refresh_mode_enabled)
4183 // Special case for screen_content_mode with golden frame updates.
4184 int disable_cr_gf = (cpi->oxcf.screen_content_mode == 2 &&
4185 cm->refresh_golden_frame);
4186 if (cpi->current_layer == 0 && cpi->force_maxqp == 0 && !disable_cr_gf)
4187 cyclic_background_refresh(cpi, Q, 0);
4189 disable_segmentation(cpi);
4192 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
4194 #if !CONFIG_REALTIME_ONLY
4195 /* Limit Q range for the adaptive loop. */
4196 bottom_index = cpi->active_best_quality;
4197 top_index = cpi->active_worst_quality;
4198 q_low = cpi->active_best_quality;
4199 q_high = cpi->active_worst_quality;
4202 vp8_save_coding_context(cpi);
4206 scale_and_extend_source(cpi->un_scaled_source, cpi);
4208 #if CONFIG_TEMPORAL_DENOISING && CONFIG_POSTPROC
4209 // Option to apply spatial blur under the aggressive or adaptive
4210 // (temporal denoising) mode.
4211 if (cpi->oxcf.noise_sensitivity >= 3) {
4212 if (cpi->denoiser.denoise_pars.spatial_blur != 0) {
4213 vp8_de_noise(cm, cpi->Source, cpi->Source,
4214 cpi->denoiser.denoise_pars.spatial_blur, 1, 0, 0);
4219 #if !(CONFIG_REALTIME_ONLY) && CONFIG_POSTPROC && !(CONFIG_TEMPORAL_DENOISING)
4221 if (cpi->oxcf.noise_sensitivity > 0)
4226 switch (cpi->oxcf.noise_sensitivity)
4249 if (cm->frame_type == KEY_FRAME)
4251 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0, 1);
4255 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0, 1);
4257 src = cpi->Source->y_buffer;
4259 if (cpi->Source->y_stride < 0)
4261 src += cpi->Source->y_stride * (cpi->Source->y_height - 1);
4269 #ifdef OUTPUT_YUV_SRC
4270 vp8_write_yuv_frame(yuv_file, cpi->Source);
4275 vp8_clear_system_state();
4277 vp8_set_quantizer(cpi, Q);
4279 /* setup skip prob for costing in mode/mv decision */
4280 if (cpi->common.mb_no_coeff_skip)
4282 cpi->prob_skip_false = cpi->base_skip_false_prob[Q];
4284 if (cm->frame_type != KEY_FRAME)
4286 if (cpi->common.refresh_alt_ref_frame)
4288 if (cpi->last_skip_false_probs[2] != 0)
4289 cpi->prob_skip_false = cpi->last_skip_false_probs[2];
4292 if(cpi->last_skip_false_probs[2]!=0 && abs(Q- cpi->last_skip_probs_q[2])<=16 )
4293 cpi->prob_skip_false = cpi->last_skip_false_probs[2];
4294 else if (cpi->last_skip_false_probs[2]!=0)
4295 cpi->prob_skip_false = (cpi->last_skip_false_probs[2] + cpi->prob_skip_false ) / 2;
4298 else if (cpi->common.refresh_golden_frame)
4300 if (cpi->last_skip_false_probs[1] != 0)
4301 cpi->prob_skip_false = cpi->last_skip_false_probs[1];
4304 if(cpi->last_skip_false_probs[1]!=0 && abs(Q- cpi->last_skip_probs_q[1])<=16 )
4305 cpi->prob_skip_false = cpi->last_skip_false_probs[1];
4306 else if (cpi->last_skip_false_probs[1]!=0)
4307 cpi->prob_skip_false = (cpi->last_skip_false_probs[1] + cpi->prob_skip_false ) / 2;
4312 if (cpi->last_skip_false_probs[0] != 0)
4313 cpi->prob_skip_false = cpi->last_skip_false_probs[0];
4316 if(cpi->last_skip_false_probs[0]!=0 && abs(Q- cpi->last_skip_probs_q[0])<=16 )
4317 cpi->prob_skip_false = cpi->last_skip_false_probs[0];
4318 else if(cpi->last_skip_false_probs[0]!=0)
4319 cpi->prob_skip_false = (cpi->last_skip_false_probs[0] + cpi->prob_skip_false ) / 2;
4323 /* as this is for cost estimate, let's make sure it does not
4324 * go extreme eitehr way
4326 if (cpi->prob_skip_false < 5)
4327 cpi->prob_skip_false = 5;
4329 if (cpi->prob_skip_false > 250)
4330 cpi->prob_skip_false = 250;
4332 if (cpi->oxcf.number_of_layers == 1 && cpi->is_src_frame_alt_ref)
4333 cpi->prob_skip_false = 1;
4340 FILE *f = fopen("skip.stt", "a");
4341 fprintf(f, "%d, %d, %4d ", cpi->common.refresh_golden_frame, cpi->common.refresh_alt_ref_frame, cpi->prob_skip_false);
4349 if (cm->frame_type == KEY_FRAME)
4351 if(resize_key_frame(cpi))
4353 /* If the frame size has changed, need to reset Q, quantizer,
4354 * and background refresh.
4356 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4357 if (cpi->cyclic_refresh_mode_enabled)
4359 if (cpi->current_layer==0)
4360 cyclic_background_refresh(cpi, Q, 0);
4362 disable_segmentation(cpi);
4364 // Reset the zero_last counter to 0 on key frame.
4365 memset(cpi->consec_zero_last, 0, cm->mb_rows * cm->mb_cols);
4366 memset(cpi->consec_zero_last_mvbias, 0,
4367 (cpi->common.mb_rows * cpi->common.mb_cols));
4368 vp8_set_quantizer(cpi, Q);
4371 vp8_setup_key_frame(cpi);
4376 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
4378 if(cpi->oxcf.error_resilient_mode)
4379 cm->refresh_entropy_probs = 0;
4381 if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS)
4383 if (cm->frame_type == KEY_FRAME)
4384 cm->refresh_entropy_probs = 1;
4387 if (cm->refresh_entropy_probs == 0)
4389 /* save a copy for later refresh */
4390 memcpy(&cm->lfc, &cm->fc, sizeof(cm->fc));
4393 vp8_update_coef_context(cpi);
4395 vp8_update_coef_probs(cpi);
4397 /* transform / motion compensation build reconstruction frame
4398 * +pack coef partitions
4400 vp8_encode_frame(cpi);
4402 /* cpi->projected_frame_size is not needed for RT mode */
4405 /* transform / motion compensation build reconstruction frame */
4406 vp8_encode_frame(cpi);
4408 if (cpi->oxcf.screen_content_mode == 2) {
4409 if (vp8_drop_encodedframe_overshoot(cpi, Q))
4413 cpi->projected_frame_size -= vp8_estimate_entropy_savings(cpi);
4414 cpi->projected_frame_size = (cpi->projected_frame_size > 0) ? cpi->projected_frame_size : 0;
4416 vp8_clear_system_state();
4418 /* Test to see if the stats generated for this frame indicate that
4419 * we should have coded a key frame (assuming that we didn't)!
4422 if (cpi->pass != 2 && cpi->oxcf.auto_key && cm->frame_type != KEY_FRAME
4423 && cpi->compressor_speed != 2)
4425 #if !CONFIG_REALTIME_ONLY
4426 if (decide_key_frame(cpi))
4428 /* Reset all our sizing numbers and recode */
4429 cm->frame_type = KEY_FRAME;
4431 vp8_pick_frame_size(cpi);
4433 /* Clear the Alt reference frame active flag when we have
4436 cpi->source_alt_ref_active = 0;
4438 // Set the loop filter deltas and segmentation map update
4439 setup_features(cpi);
4441 vp8_restore_coding_context(cpi);
4443 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4445 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
4447 /* Limit Q range for the adaptive loop. */
4448 bottom_index = cpi->active_best_quality;
4449 top_index = cpi->active_worst_quality;
4450 q_low = cpi->active_best_quality;
4451 q_high = cpi->active_worst_quality;
4461 vp8_clear_system_state();
4463 if (frame_over_shoot_limit == 0)
4464 frame_over_shoot_limit = 1;
4466 /* Are we are overshooting and up against the limit of active max Q. */
4467 if (((cpi->pass != 2) || (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)) &&
4468 (Q == cpi->active_worst_quality) &&
4469 (cpi->active_worst_quality < cpi->worst_quality) &&
4470 (cpi->projected_frame_size > frame_over_shoot_limit))
4472 int over_size_percent = ((cpi->projected_frame_size - frame_over_shoot_limit) * 100) / frame_over_shoot_limit;
4474 /* If so is there any scope for relaxing it */
4475 while ((cpi->active_worst_quality < cpi->worst_quality) && (over_size_percent > 0))
4477 cpi->active_worst_quality++;
4478 /* Assume 1 qstep = about 4% on frame size. */
4479 over_size_percent = (int)(over_size_percent * 0.96);
4481 #if !CONFIG_REALTIME_ONLY
4482 top_index = cpi->active_worst_quality;
4483 #endif // !CONFIG_REALTIME_ONLY
4484 /* If we have updated the active max Q do not call
4485 * vp8_update_rate_correction_factors() this loop.
4487 active_worst_qchanged = 1;
4490 active_worst_qchanged = 0;
4492 #if !CONFIG_REALTIME_ONLY
4493 /* Special case handling for forced key frames */
4494 if ( (cm->frame_type == KEY_FRAME) && cpi->this_key_frame_forced )
4497 int kf_err = vp8_calc_ss_err(cpi->Source,
4498 &cm->yv12_fb[cm->new_fb_idx]);
4500 /* The key frame is not good enough */
4501 if ( kf_err > ((cpi->ambient_err * 7) >> 3) )
4504 q_high = (Q > q_low) ? (Q - 1) : q_low;
4507 Q = (q_high + q_low) >> 1;
4509 /* The key frame is much better than the previous frame */
4510 else if ( kf_err < (cpi->ambient_err >> 1) )
4513 q_low = (Q < q_high) ? (Q + 1) : q_high;
4516 Q = (q_high + q_low + 1) >> 1;
4519 /* Clamp Q to upper and lower limits: */
4528 /* Is the projected frame size out of range and are we allowed
4529 * to attempt to recode.
4531 else if ( recode_loop_test( cpi,
4532 frame_over_shoot_limit, frame_under_shoot_limit,
4533 Q, top_index, bottom_index ) )
4538 /* Frame size out of permitted range. Update correction factor
4539 * & compute new Q to try...
4542 /* Frame is too large */
4543 if (cpi->projected_frame_size > cpi->this_frame_target)
4545 /* Raise Qlow as to at least the current value */
4546 q_low = (Q < q_high) ? (Q + 1) : q_high;
4548 /* If we are using over quant do the same for zbin_oq_low */
4549 if (cpi->mb.zbin_over_quant > 0)
4550 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
4551 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
4553 if (undershoot_seen)
4555 /* Update rate_correction_factor unless
4556 * cpi->active_worst_quality has changed.
4558 if (!active_worst_qchanged)
4559 vp8_update_rate_correction_factors(cpi, 1);
4561 Q = (q_high + q_low + 1) / 2;
4563 /* Adjust cpi->zbin_over_quant (only allowed when Q
4567 cpi->mb.zbin_over_quant = 0;
4570 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
4571 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
4572 cpi->mb.zbin_over_quant =
4573 (zbin_oq_high + zbin_oq_low) / 2;
4578 /* Update rate_correction_factor unless
4579 * cpi->active_worst_quality has changed.
4581 if (!active_worst_qchanged)
4582 vp8_update_rate_correction_factors(cpi, 0);
4584 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4586 while (((Q < q_low) ||
4587 (cpi->mb.zbin_over_quant < zbin_oq_low)) &&
4590 vp8_update_rate_correction_factors(cpi, 0);
4591 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4598 /* Frame is too small */
4601 if (cpi->mb.zbin_over_quant == 0)
4602 /* Lower q_high if not using over quant */
4603 q_high = (Q > q_low) ? (Q - 1) : q_low;
4605 /* else lower zbin_oq_high */
4606 zbin_oq_high = (cpi->mb.zbin_over_quant > zbin_oq_low) ?
4607 (cpi->mb.zbin_over_quant - 1) : zbin_oq_low;
4611 /* Update rate_correction_factor unless
4612 * cpi->active_worst_quality has changed.
4614 if (!active_worst_qchanged)
4615 vp8_update_rate_correction_factors(cpi, 1);
4617 Q = (q_high + q_low) / 2;
4619 /* Adjust cpi->zbin_over_quant (only allowed when Q
4623 cpi->mb.zbin_over_quant = 0;
4625 cpi->mb.zbin_over_quant =
4626 (zbin_oq_high + zbin_oq_low) / 2;
4630 /* Update rate_correction_factor unless
4631 * cpi->active_worst_quality has changed.
4633 if (!active_worst_qchanged)
4634 vp8_update_rate_correction_factors(cpi, 0);
4636 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4638 /* Special case reset for qlow for constrained quality.
4639 * This should only trigger where there is very substantial
4640 * undershoot on a frame and the auto cq level is above
4641 * the user passsed in value.
4643 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
4649 while (((Q > q_high) ||
4650 (cpi->mb.zbin_over_quant > zbin_oq_high)) &&
4653 vp8_update_rate_correction_factors(cpi, 0);
4654 Q = vp8_regulate_q(cpi, cpi->this_frame_target);
4659 undershoot_seen = 1;
4662 /* Clamp Q to upper and lower limits: */
4668 /* Clamp cpi->zbin_over_quant */
4669 cpi->mb.zbin_over_quant = (cpi->mb.zbin_over_quant < zbin_oq_low) ?
4670 zbin_oq_low : (cpi->mb.zbin_over_quant > zbin_oq_high) ?
4671 zbin_oq_high : cpi->mb.zbin_over_quant;
4679 if (cpi->is_src_frame_alt_ref)
4684 vp8_restore_coding_context(cpi);
4686 #if CONFIG_INTERNAL_STATS
4687 cpi->tot_recode_hits++;
4694 /* Experimental code for lagged and one pass
4695 * Update stats used for one pass GF selection
4698 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_coded_error = (double)cpi->prediction_error;
4699 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_intra_error = (double)cpi->intra_error;
4700 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_pcnt_inter = (double)(100 - cpi->this_frame_percent_intra) / 100.0;
4704 /* Special case code to reduce pulsing when key frames are forced at a
4705 * fixed interval. Note the reconstruction error if it is the frame before
4706 * the force key frame
4708 if ( cpi->next_key_frame_forced && (cpi->twopass.frames_to_key == 0) )
4710 cpi->ambient_err = vp8_calc_ss_err(cpi->Source,
4711 &cm->yv12_fb[cm->new_fb_idx]);
4714 /* This frame's MVs are saved and will be used in next frame's MV predictor.
4715 * Last frame has one more line(add to bottom) and one more column(add to
4716 * right) than cm->mip. The edge elements are initialized to 0.
4718 #if CONFIG_MULTI_RES_ENCODING
4719 if(!cpi->oxcf.mr_encoder_id && cm->show_frame)
4721 if(cm->show_frame) /* do not save for altref frame */
4726 /* Point to beginning of allocated MODE_INFO arrays. */
4727 MODE_INFO *tmp = cm->mip;
4729 if(cm->frame_type != KEY_FRAME)
4731 for (mb_row = 0; mb_row < cm->mb_rows+1; mb_row ++)
4733 for (mb_col = 0; mb_col < cm->mb_cols+1; mb_col ++)
4735 if(tmp->mbmi.ref_frame != INTRA_FRAME)
4736 cpi->lfmv[mb_col + mb_row*(cm->mode_info_stride+1)].as_int = tmp->mbmi.mv.as_int;
4738 cpi->lf_ref_frame_sign_bias[mb_col + mb_row*(cm->mode_info_stride+1)] = cm->ref_frame_sign_bias[tmp->mbmi.ref_frame];
4739 cpi->lf_ref_frame[mb_col + mb_row*(cm->mode_info_stride+1)] = tmp->mbmi.ref_frame;
4746 /* Count last ref frame 0,0 usage on current encoded frame. */
4750 /* Point to beginning of MODE_INFO arrays. */
4751 MODE_INFO *tmp = cm->mi;
4753 cpi->zeromv_count = 0;
4755 if(cm->frame_type != KEY_FRAME)
4757 for (mb_row = 0; mb_row < cm->mb_rows; mb_row ++)
4759 for (mb_col = 0; mb_col < cm->mb_cols; mb_col ++)
4761 if (tmp->mbmi.mode == ZEROMV &&
4762 tmp->mbmi.ref_frame == LAST_FRAME)
4763 cpi->zeromv_count++;
4771 #if CONFIG_MULTI_RES_ENCODING
4772 vp8_cal_dissimilarity(cpi);
4775 /* Update the GF useage maps.
4776 * This is done after completing the compression of a frame when all
4777 * modes etc. are finalized but before loop filter
4779 if (cpi->oxcf.number_of_layers == 1)
4780 vp8_update_gf_useage_maps(cpi, cm, &cpi->mb);
4782 if (cm->frame_type == KEY_FRAME)
4783 cm->refresh_last_frame = 1;
4787 FILE *f = fopen("gfactive.stt", "a");
4788 fprintf(f, "%8d %8d %8d %8d %8d\n", cm->current_video_frame, (100 * cpi->gf_active_count) / (cpi->common.mb_rows * cpi->common.mb_cols), cpi->this_iiratio, cpi->next_iiratio, cm->refresh_golden_frame);
4793 /* For inter frames the current default behavior is that when
4794 * cm->refresh_golden_frame is set we copy the old GF over to the ARF buffer
4795 * This is purely an encoder decision at present.
4797 if (!cpi->oxcf.error_resilient_mode && cm->refresh_golden_frame)
4798 cm->copy_buffer_to_arf = 2;
4800 cm->copy_buffer_to_arf = 0;
4802 cm->frame_to_show = &cm->yv12_fb[cm->new_fb_idx];
4804 #if CONFIG_TEMPORAL_DENOISING
4805 // Get some measure of the amount of noise, by measuring the (partial) mse
4806 // between source and denoised buffer, for y channel. Partial refers to
4807 // computing the sse for a sub-sample of the frame (i.e., skip x blocks along row/column),
4808 // and only for blocks in that set that are consecutive ZEROMV_LAST mode.
4809 // Do this every ~8 frames, to further reduce complexity.
4810 // TODO(marpan): Keep this for now for the case cpi->oxcf.noise_sensitivity < 4,
4811 // should be removed in favor of the process_denoiser_mode_change() function below.
4812 if (cpi->oxcf.noise_sensitivity > 0 &&
4813 cpi->oxcf.noise_sensitivity < 4 &&
4814 !cpi->oxcf.screen_content_mode &&
4815 cpi->frames_since_key%8 == 0 &&
4816 cm->frame_type != KEY_FRAME) {
4817 cpi->mse_source_denoised = measure_square_diff_partial(
4818 &cpi->denoiser.yv12_running_avg[INTRA_FRAME], cpi->Source, cpi);
4821 // For the adaptive denoising mode (noise_sensitivity == 4), sample the mse
4822 // of source diff (between current and previous frame), and determine if we
4823 // should switch the denoiser mode. Sampling refers to computing the mse for
4824 // a sub-sample of the frame (i.e., skip x blocks along row/column), and
4825 // only for blocks in that set that have used ZEROMV LAST, along with some
4826 // constraint on the sum diff between blocks. This process is called every
4827 // ~8 frames, to further reduce complexity.
4828 if (cpi->oxcf.noise_sensitivity == 4 &&
4829 !cpi->oxcf.screen_content_mode &&
4830 cpi->frames_since_key % 8 == 0 &&
4831 cm->frame_type != KEY_FRAME) {
4832 process_denoiser_mode_change(cpi);
4836 #if CONFIG_MULTITHREAD
4837 if (cpi->b_multi_threaded)
4839 /* start loopfilter in separate thread */
4840 sem_post(&cpi->h_event_start_lpf);
4841 cpi->b_lpf_running = 1;
4846 vp8_loopfilter_frame(cpi, cm);
4849 update_reference_frames(cpi);
4851 #ifdef OUTPUT_YUV_DENOISED
4852 vp8_write_yuv_frame(yuv_denoised_file,
4853 &cpi->denoiser.yv12_running_avg[INTRA_FRAME]);
4856 #if !(CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
4857 if (cpi->oxcf.error_resilient_mode)
4859 cm->refresh_entropy_probs = 0;
4863 #if CONFIG_MULTITHREAD
4864 /* wait that filter_level is picked so that we can continue with stream packing */
4865 if (cpi->b_multi_threaded)
4866 sem_wait(&cpi->h_event_end_lpf);
4869 /* build the bitstream */
4870 vp8_pack_bitstream(cpi, dest, dest_end, size);
4872 #if CONFIG_MULTITHREAD
4873 /* if PSNR packets are generated we have to wait for the lpf */
4874 if (cpi->b_lpf_running && cpi->b_calculate_psnr)
4876 sem_wait(&cpi->h_event_end_lpf);
4877 cpi->b_lpf_running = 0;
4881 /* Move storing frame_type out of the above loop since it is also
4882 * needed in motion search besides loopfilter */
4883 cm->last_frame_type = cm->frame_type;
4885 /* Update rate control heuristics */
4886 cpi->total_byte_count += (*size);
4887 cpi->projected_frame_size = (*size) << 3;
4889 if (cpi->oxcf.number_of_layers > 1)
4892 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
4893 cpi->layer_context[i].total_byte_count += (*size);
4896 if (!active_worst_qchanged)
4897 vp8_update_rate_correction_factors(cpi, 2);
4899 cpi->last_q[cm->frame_type] = cm->base_qindex;
4901 if (cm->frame_type == KEY_FRAME)
4903 vp8_adjust_key_frame_context(cpi);
4906 /* Keep a record of ambient average Q. */
4907 if (cm->frame_type != KEY_FRAME)
4908 cpi->avg_frame_qindex = (2 + 3 * cpi->avg_frame_qindex + cm->base_qindex) >> 2;
4910 /* Keep a record from which we can calculate the average Q excluding
4911 * GF updates and key frames
4913 if ((cm->frame_type != KEY_FRAME) && ((cpi->oxcf.number_of_layers > 1) ||
4914 (!cm->refresh_golden_frame && !cm->refresh_alt_ref_frame)))
4918 /* Calculate the average Q for normal inter frames (not key or GFU
4921 if ( cpi->pass == 2 )
4923 cpi->ni_tot_qi += Q;
4924 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
4928 /* Damp value for first few frames */
4929 if (cpi->ni_frames > 150 )
4931 cpi->ni_tot_qi += Q;
4932 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
4934 /* For one pass, early in the clip ... average the current frame Q
4935 * value with the worstq entered by the user as a dampening measure
4939 cpi->ni_tot_qi += Q;
4940 cpi->ni_av_qi = ((cpi->ni_tot_qi / cpi->ni_frames) + cpi->worst_quality + 1) / 2;
4943 /* If the average Q is higher than what was used in the last
4944 * frame (after going through the recode loop to keep the frame
4945 * size within range) then use the last frame value - 1. The -1
4946 * is designed to stop Q and hence the data rate, from
4947 * progressively falling away during difficult sections, but at
4948 * the same time reduce the number of itterations around the
4951 if (Q > cpi->ni_av_qi)
4952 cpi->ni_av_qi = Q - 1;
4956 /* Update the buffer level variable. */
4957 /* Non-viewable frames are a special case and are treated as pure overhead. */
4958 if ( !cm->show_frame )
4959 cpi->bits_off_target -= cpi->projected_frame_size;
4961 cpi->bits_off_target += cpi->av_per_frame_bandwidth - cpi->projected_frame_size;
4963 /* Clip the buffer level to the maximum specified buffer size */
4964 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
4965 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
4967 // If the frame dropper is not enabled, don't let the buffer level go below
4968 // some threshold, given here by -|maximum_buffer_size|. For now we only do
4969 // this for screen content input.
4970 if (cpi->drop_frames_allowed == 0 && cpi->oxcf.screen_content_mode &&
4971 cpi->bits_off_target < -cpi->oxcf.maximum_buffer_size)
4972 cpi->bits_off_target = -cpi->oxcf.maximum_buffer_size;
4974 /* Rolling monitors of whether we are over or underspending used to
4975 * help regulate min and Max Q in two pass.
4977 cpi->rolling_target_bits = ((cpi->rolling_target_bits * 3) + cpi->this_frame_target + 2) / 4;
4978 cpi->rolling_actual_bits = ((cpi->rolling_actual_bits * 3) + cpi->projected_frame_size + 2) / 4;
4979 cpi->long_rolling_target_bits = ((cpi->long_rolling_target_bits * 31) + cpi->this_frame_target + 16) / 32;
4980 cpi->long_rolling_actual_bits = ((cpi->long_rolling_actual_bits * 31) + cpi->projected_frame_size + 16) / 32;
4982 /* Actual bits spent */
4983 cpi->total_actual_bits += cpi->projected_frame_size;
4986 cpi->total_target_vs_actual += (cpi->this_frame_target - cpi->projected_frame_size);
4988 cpi->buffer_level = cpi->bits_off_target;
4990 /* Propagate values to higher temporal layers */
4991 if (cpi->oxcf.number_of_layers > 1)
4995 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
4997 LAYER_CONTEXT *lc = &cpi->layer_context[i];
4998 int bits_off_for_this_layer =
4999 (int)(lc->target_bandwidth / lc->framerate -
5000 cpi->projected_frame_size);
5002 lc->bits_off_target += bits_off_for_this_layer;
5004 /* Clip buffer level to maximum buffer size for the layer */
5005 if (lc->bits_off_target > lc->maximum_buffer_size)
5006 lc->bits_off_target = lc->maximum_buffer_size;
5008 lc->total_actual_bits += cpi->projected_frame_size;
5009 lc->total_target_vs_actual += bits_off_for_this_layer;
5010 lc->buffer_level = lc->bits_off_target;
5014 /* Update bits left to the kf and gf groups to account for overshoot
5015 * or undershoot on these frames
5017 if (cm->frame_type == KEY_FRAME)
5019 cpi->twopass.kf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
5021 if (cpi->twopass.kf_group_bits < 0)
5022 cpi->twopass.kf_group_bits = 0 ;
5024 else if (cm->refresh_golden_frame || cm->refresh_alt_ref_frame)
5026 cpi->twopass.gf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
5028 if (cpi->twopass.gf_group_bits < 0)
5029 cpi->twopass.gf_group_bits = 0 ;
5032 if (cm->frame_type != KEY_FRAME)
5034 if (cpi->common.refresh_alt_ref_frame)
5036 cpi->last_skip_false_probs[2] = cpi->prob_skip_false;
5037 cpi->last_skip_probs_q[2] = cm->base_qindex;
5039 else if (cpi->common.refresh_golden_frame)
5041 cpi->last_skip_false_probs[1] = cpi->prob_skip_false;
5042 cpi->last_skip_probs_q[1] = cm->base_qindex;
5046 cpi->last_skip_false_probs[0] = cpi->prob_skip_false;
5047 cpi->last_skip_probs_q[0] = cm->base_qindex;
5049 /* update the baseline */
5050 cpi->base_skip_false_prob[cm->base_qindex] = cpi->prob_skip_false;
5055 #if 0 && CONFIG_INTERNAL_STATS
5057 FILE *f = fopen("tmp.stt", "a");
5059 vp8_clear_system_state();
5061 if (cpi->twopass.total_left_stats.coded_error != 0.0)
5062 fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
5063 "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
5064 "%8.2lf %"PRId64" %10.3lf %10"PRId64" %8d\n",
5065 cpi->common.current_video_frame, cpi->this_frame_target,
5066 cpi->projected_frame_size,
5067 (cpi->projected_frame_size - cpi->this_frame_target),
5068 cpi->total_target_vs_actual,
5070 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
5071 cpi->total_actual_bits, cm->base_qindex,
5072 cpi->active_best_quality, cpi->active_worst_quality,
5073 cpi->ni_av_qi, cpi->cq_target_quality,
5074 cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
5075 cm->frame_type, cpi->gfu_boost,
5076 cpi->twopass.est_max_qcorrection_factor,
5077 cpi->twopass.bits_left,
5078 cpi->twopass.total_left_stats.coded_error,
5079 (double)cpi->twopass.bits_left /
5080 cpi->twopass.total_left_stats.coded_error,
5081 cpi->tot_recode_hits);
5083 fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
5084 "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
5085 "%8.2lf %"PRId64" %10.3lf %8d\n",
5086 cpi->common.current_video_frame, cpi->this_frame_target,
5087 cpi->projected_frame_size,
5088 (cpi->projected_frame_size - cpi->this_frame_target),
5089 cpi->total_target_vs_actual,
5091 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
5092 cpi->total_actual_bits, cm->base_qindex,
5093 cpi->active_best_quality, cpi->active_worst_quality,
5094 cpi->ni_av_qi, cpi->cq_target_quality,
5095 cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
5096 cm->frame_type, cpi->gfu_boost,
5097 cpi->twopass.est_max_qcorrection_factor,
5098 cpi->twopass.bits_left,
5099 cpi->twopass.total_left_stats.coded_error,
5100 cpi->tot_recode_hits);
5105 FILE *fmodes = fopen("Modes.stt", "a");
5107 fprintf(fmodes, "%6d:%1d:%1d:%1d ",
5108 cpi->common.current_video_frame,
5109 cm->frame_type, cm->refresh_golden_frame,
5110 cm->refresh_alt_ref_frame);
5112 fprintf(fmodes, "\n");
5120 if (cm->refresh_golden_frame == 1)
5121 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_GOLDEN;
5123 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_GOLDEN;
5125 if (cm->refresh_alt_ref_frame == 1)
5126 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_ALTREF;
5128 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_ALTREF;
5131 if (cm->refresh_last_frame & cm->refresh_golden_frame)
5132 /* both refreshed */
5133 cpi->gold_is_last = 1;
5134 else if (cm->refresh_last_frame ^ cm->refresh_golden_frame)
5135 /* 1 refreshed but not the other */
5136 cpi->gold_is_last = 0;
5138 if (cm->refresh_last_frame & cm->refresh_alt_ref_frame)
5139 /* both refreshed */
5140 cpi->alt_is_last = 1;
5141 else if (cm->refresh_last_frame ^ cm->refresh_alt_ref_frame)
5142 /* 1 refreshed but not the other */
5143 cpi->alt_is_last = 0;
5145 if (cm->refresh_alt_ref_frame & cm->refresh_golden_frame)
5146 /* both refreshed */
5147 cpi->gold_is_alt = 1;
5148 else if (cm->refresh_alt_ref_frame ^ cm->refresh_golden_frame)
5149 /* 1 refreshed but not the other */
5150 cpi->gold_is_alt = 0;
5152 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
5154 if (cpi->gold_is_last)
5155 cpi->ref_frame_flags &= ~VP8_GOLD_FRAME;
5157 if (cpi->alt_is_last)
5158 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
5160 if (cpi->gold_is_alt)
5161 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
5164 if (!cpi->oxcf.error_resilient_mode)
5166 if (cpi->oxcf.play_alternate && cm->refresh_alt_ref_frame && (cm->frame_type != KEY_FRAME))
5167 /* Update the alternate reference frame stats as appropriate. */
5168 update_alt_ref_frame_stats(cpi);
5170 /* Update the Golden frame stats as appropriate. */
5171 update_golden_frame_stats(cpi);
5174 if (cm->frame_type == KEY_FRAME)
5176 /* Tell the caller that the frame was coded as a key frame */
5177 *frame_flags = cm->frame_flags | FRAMEFLAGS_KEY;
5179 /* As this frame is a key frame the next defaults to an inter frame. */
5180 cm->frame_type = INTER_FRAME;
5182 cpi->last_frame_percent_intra = 100;
5186 *frame_flags = cm->frame_flags&~FRAMEFLAGS_KEY;
5188 cpi->last_frame_percent_intra = cpi->this_frame_percent_intra;
5191 /* Clear the one shot update flags for segmentation map and mode/ref
5192 * loop filter deltas.
5194 cpi->mb.e_mbd.update_mb_segmentation_map = 0;
5195 cpi->mb.e_mbd.update_mb_segmentation_data = 0;
5196 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
5199 /* Dont increment frame counters if this was an altref buffer update
5204 cm->current_video_frame++;
5205 cpi->frames_since_key++;
5206 cpi->temporal_pattern_counter++;
5209 /* reset to normal state now that we are done. */
5217 sprintf(filename, "enc%04d.yuv", (int) cm->current_video_frame);
5218 recon_file = fopen(filename, "wb");
5219 fwrite(cm->yv12_fb[cm->lst_fb_idx].buffer_alloc,
5220 cm->yv12_fb[cm->lst_fb_idx].frame_size, 1, recon_file);
5226 /* vp8_write_yuv_frame("encoder_recon.yuv", cm->frame_to_show); */
5230 #if !CONFIG_REALTIME_ONLY
5231 static void Pass2Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned char * dest_end, unsigned int *frame_flags)
5234 if (!cpi->common.refresh_alt_ref_frame)
5235 vp8_second_pass(cpi);
5237 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
5238 cpi->twopass.bits_left -= 8 * *size;
5240 if (!cpi->common.refresh_alt_ref_frame)
5242 double two_pass_min_rate = (double)(cpi->oxcf.target_bandwidth
5243 *cpi->oxcf.two_pass_vbrmin_section / 100);
5244 cpi->twopass.bits_left += (int64_t)(two_pass_min_rate / cpi->framerate);
5249 int vp8_receive_raw_frame(VP8_COMP *cpi, unsigned int frame_flags, YV12_BUFFER_CONFIG *sd, int64_t time_stamp, int64_t end_time)
5251 struct vpx_usec_timer timer;
5254 vpx_usec_timer_start(&timer);
5256 /* Reinit the lookahead buffer if the frame size changes */
5257 if (sd->y_width != cpi->oxcf.Width || sd->y_height != cpi->oxcf.Height)
5259 assert(cpi->oxcf.lag_in_frames < 2);
5260 dealloc_raw_frame_buffers(cpi);
5261 alloc_raw_frame_buffers(cpi);
5264 if(vp8_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
5265 frame_flags, cpi->active_map_enabled ? cpi->active_map : NULL))
5267 vpx_usec_timer_mark(&timer);
5268 cpi->time_receive_data += vpx_usec_timer_elapsed(&timer);
5274 static int frame_is_reference(const VP8_COMP *cpi)
5276 const VP8_COMMON *cm = &cpi->common;
5277 const MACROBLOCKD *xd = &cpi->mb.e_mbd;
5279 return cm->frame_type == KEY_FRAME || cm->refresh_last_frame
5280 || cm->refresh_golden_frame || cm->refresh_alt_ref_frame
5281 || cm->copy_buffer_to_gf || cm->copy_buffer_to_arf
5282 || cm->refresh_entropy_probs
5283 || xd->mode_ref_lf_delta_update
5284 || xd->update_mb_segmentation_map || xd->update_mb_segmentation_data;
5288 int vp8_get_compressed_data(VP8_COMP *cpi, unsigned int *frame_flags, unsigned long *size, unsigned char *dest, unsigned char *dest_end, int64_t *time_stamp, int64_t *time_end, int flush)
5291 struct vpx_usec_timer tsctimer;
5292 struct vpx_usec_timer ticktimer;
5293 struct vpx_usec_timer cmptimer;
5294 YV12_BUFFER_CONFIG *force_src_buffer = NULL;
5301 if (setjmp(cpi->common.error.jmp))
5303 cpi->common.error.setjmp = 0;
5304 vp8_clear_system_state();
5305 return VPX_CODEC_CORRUPT_FRAME;
5308 cpi->common.error.setjmp = 1;
5310 vpx_usec_timer_start(&cmptimer);
5314 #if !CONFIG_REALTIME_ONLY
5315 /* Should we code an alternate reference frame */
5316 if (cpi->oxcf.error_resilient_mode == 0 &&
5317 cpi->oxcf.play_alternate &&
5318 cpi->source_alt_ref_pending)
5320 if ((cpi->source = vp8_lookahead_peek(cpi->lookahead,
5321 cpi->frames_till_gf_update_due,
5324 cpi->alt_ref_source = cpi->source;
5325 if (cpi->oxcf.arnr_max_frames > 0)
5327 vp8_temporal_filter_prepare_c(cpi,
5328 cpi->frames_till_gf_update_due);
5329 force_src_buffer = &cpi->alt_ref_buffer;
5331 cpi->frames_till_alt_ref_frame = cpi->frames_till_gf_update_due;
5332 cm->refresh_alt_ref_frame = 1;
5333 cm->refresh_golden_frame = 0;
5334 cm->refresh_last_frame = 0;
5336 /* Clear Pending alt Ref flag. */
5337 cpi->source_alt_ref_pending = 0;
5338 cpi->is_src_frame_alt_ref = 0;
5345 /* Read last frame source if we are encoding first pass. */
5346 if (cpi->pass == 1 && cm->current_video_frame > 0)
5348 if((cpi->last_source = vp8_lookahead_peek(cpi->lookahead, 1,
5349 PEEK_BACKWARD)) == NULL)
5354 if ((cpi->source = vp8_lookahead_pop(cpi->lookahead, flush)))
5358 cpi->is_src_frame_alt_ref = cpi->alt_ref_source
5359 && (cpi->source == cpi->alt_ref_source);
5361 if(cpi->is_src_frame_alt_ref)
5362 cpi->alt_ref_source = NULL;
5368 cpi->Source = force_src_buffer ? force_src_buffer : &cpi->source->img;
5369 cpi->un_scaled_source = cpi->Source;
5370 *time_stamp = cpi->source->ts_start;
5371 *time_end = cpi->source->ts_end;
5372 *frame_flags = cpi->source->flags;
5374 if (cpi->pass == 1 && cm->current_video_frame > 0)
5376 cpi->last_frame_unscaled_source = &cpi->last_source->img;
5382 #if !CONFIG_REALTIME_ONLY
5384 if (flush && cpi->pass == 1 && !cpi->twopass.first_pass_done)
5386 vp8_end_first_pass(cpi); /* get last stats packet */
5387 cpi->twopass.first_pass_done = 1;
5395 if (cpi->source->ts_start < cpi->first_time_stamp_ever)
5397 cpi->first_time_stamp_ever = cpi->source->ts_start;
5398 cpi->last_end_time_stamp_seen = cpi->source->ts_start;
5401 /* adjust frame rates based on timestamps given */
5404 int64_t this_duration;
5407 if (cpi->source->ts_start == cpi->first_time_stamp_ever)
5409 this_duration = cpi->source->ts_end - cpi->source->ts_start;
5414 int64_t last_duration;
5416 this_duration = cpi->source->ts_end - cpi->last_end_time_stamp_seen;
5417 last_duration = cpi->last_end_time_stamp_seen
5418 - cpi->last_time_stamp_seen;
5419 /* do a step update if the duration changes by 10% */
5421 step = (int)(((this_duration - last_duration) *
5422 10 / last_duration));
5428 cpi->ref_framerate = 10000000.0 / this_duration;
5431 double avg_duration, interval;
5433 /* Average this frame's rate into the last second's average
5434 * frame rate. If we haven't seen 1 second yet, then average
5435 * over the whole interval seen.
5437 interval = (double)(cpi->source->ts_end -
5438 cpi->first_time_stamp_ever);
5439 if(interval > 10000000.0)
5440 interval = 10000000;
5442 avg_duration = 10000000.0 / cpi->ref_framerate;
5443 avg_duration *= (interval - avg_duration + this_duration);
5444 avg_duration /= interval;
5446 cpi->ref_framerate = 10000000.0 / avg_duration;
5448 #if CONFIG_MULTI_RES_ENCODING
5449 if (cpi->oxcf.mr_total_resolutions > 1) {
5450 LOWER_RES_FRAME_INFO* low_res_frame_info = (LOWER_RES_FRAME_INFO*)
5451 cpi->oxcf.mr_low_res_mode_info;
5452 // Frame rate should be the same for all spatial layers in
5453 // multi-res-encoding (simulcast), so we constrain the frame for
5454 // higher layers to be that of lowest resolution. This is needed
5455 // as he application may decide to skip encoding a high layer and
5456 // then start again, in which case a big jump in time-stamps will
5457 // be received for that high layer, which will yield an incorrect
5458 // frame rate (from time-stamp adjustment in above calculation).
5459 if (cpi->oxcf.mr_encoder_id) {
5460 cpi->ref_framerate = low_res_frame_info->low_res_framerate;
5463 // Keep track of frame rate for lowest resolution.
5464 low_res_frame_info->low_res_framerate = cpi->ref_framerate;
5468 if (cpi->oxcf.number_of_layers > 1)
5472 /* Update frame rates for each layer */
5473 assert(cpi->oxcf.number_of_layers <= VPX_TS_MAX_LAYERS);
5474 for (i = 0; i < cpi->oxcf.number_of_layers &&
5475 i < VPX_TS_MAX_LAYERS; ++i)
5477 LAYER_CONTEXT *lc = &cpi->layer_context[i];
5478 lc->framerate = cpi->ref_framerate /
5479 cpi->oxcf.rate_decimator[i];
5483 vp8_new_framerate(cpi, cpi->ref_framerate);
5486 cpi->last_time_stamp_seen = cpi->source->ts_start;
5487 cpi->last_end_time_stamp_seen = cpi->source->ts_end;
5490 if (cpi->oxcf.number_of_layers > 1)
5494 update_layer_contexts (cpi);
5496 /* Restore layer specific context & set frame rate */
5497 if (cpi->temporal_layer_id >= 0) {
5498 layer = cpi->temporal_layer_id;
5500 layer = cpi->oxcf.layer_id[
5501 cpi->temporal_pattern_counter % cpi->oxcf.periodicity];
5503 restore_layer_context (cpi, layer);
5504 vp8_new_framerate(cpi, cpi->layer_context[layer].framerate);
5507 if (cpi->compressor_speed == 2)
5509 vpx_usec_timer_start(&tsctimer);
5510 vpx_usec_timer_start(&ticktimer);
5513 cpi->lf_zeromv_pct = (cpi->zeromv_count * 100)/cm->MBs;
5515 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
5518 const int num_part = (1 << cm->multi_token_partition);
5519 /* the available bytes in dest */
5520 const unsigned long dest_size = dest_end - dest;
5521 const int tok_part_buff_size = (dest_size * 9) / (10 * num_part);
5523 unsigned char *dp = dest;
5525 cpi->partition_d[0] = dp;
5526 dp += dest_size/10; /* reserve 1/10 for control partition */
5527 cpi->partition_d_end[0] = dp;
5529 for(i = 0; i < num_part; i++)
5531 cpi->partition_d[i + 1] = dp;
5532 dp += tok_part_buff_size;
5533 cpi->partition_d_end[i + 1] = dp;
5538 /* start with a 0 size frame */
5541 /* Clear down mmx registers */
5542 vp8_clear_system_state();
5544 cm->frame_type = INTER_FRAME;
5545 cm->frame_flags = *frame_flags;
5549 if (cm->refresh_alt_ref_frame)
5551 cm->refresh_golden_frame = 0;
5552 cm->refresh_last_frame = 0;
5556 cm->refresh_golden_frame = 0;
5557 cm->refresh_last_frame = 1;
5561 /* find a free buffer for the new frame */
5564 for(; i < NUM_YV12_BUFFERS; i++)
5566 if(!cm->yv12_fb[i].flags)
5573 assert(i < NUM_YV12_BUFFERS );
5575 #if !CONFIG_REALTIME_ONLY
5579 Pass1Encode(cpi, size, dest, frame_flags);
5581 else if (cpi->pass == 2)
5583 Pass2Encode(cpi, size, dest, dest_end, frame_flags);
5587 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
5589 if (cpi->compressor_speed == 2)
5591 unsigned int duration, duration2;
5592 vpx_usec_timer_mark(&tsctimer);
5593 vpx_usec_timer_mark(&ticktimer);
5595 duration = (int)(vpx_usec_timer_elapsed(&ticktimer));
5596 duration2 = (unsigned int)((double)duration / 2);
5598 if (cm->frame_type != KEY_FRAME)
5600 if (cpi->avg_encode_time == 0)
5601 cpi->avg_encode_time = duration;
5603 cpi->avg_encode_time = (7 * cpi->avg_encode_time + duration) >> 3;
5610 if (cpi->avg_pick_mode_time == 0)
5611 cpi->avg_pick_mode_time = duration2;
5613 cpi->avg_pick_mode_time = (7 * cpi->avg_pick_mode_time + duration2) >> 3;
5619 if (cm->refresh_entropy_probs == 0)
5621 memcpy(&cm->fc, &cm->lfc, sizeof(cm->fc));
5624 /* Save the contexts separately for alt ref, gold and last. */
5625 /* (TODO jbb -> Optimize this with pointers to avoid extra copies. ) */
5626 if(cm->refresh_alt_ref_frame)
5627 memcpy(&cpi->lfc_a, &cm->fc, sizeof(cm->fc));
5629 if(cm->refresh_golden_frame)
5630 memcpy(&cpi->lfc_g, &cm->fc, sizeof(cm->fc));
5632 if(cm->refresh_last_frame)
5633 memcpy(&cpi->lfc_n, &cm->fc, sizeof(cm->fc));
5635 /* if its a dropped frame honor the requests on subsequent frames */
5638 cpi->droppable = !frame_is_reference(cpi);
5640 /* return to normal state */
5641 cm->refresh_entropy_probs = 1;
5642 cm->refresh_alt_ref_frame = 0;
5643 cm->refresh_golden_frame = 0;
5644 cm->refresh_last_frame = 1;
5645 cm->frame_type = INTER_FRAME;
5649 /* Save layer specific state */
5650 if (cpi->oxcf.number_of_layers > 1)
5651 save_layer_context (cpi);
5653 vpx_usec_timer_mark(&cmptimer);
5654 cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer);
5656 if (cpi->b_calculate_psnr && cpi->pass != 1 && cm->show_frame)
5658 generate_psnr_packet(cpi);
5661 #if CONFIG_INTERNAL_STATS
5665 cpi->bytes += *size;
5669 cpi->common.show_frame_mi = cpi->common.mi;
5672 if (cpi->b_calculate_psnr)
5676 YV12_BUFFER_CONFIG *orig = cpi->Source;
5677 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
5678 unsigned int y_width = cpi->common.Width;
5679 unsigned int y_height = cpi->common.Height;
5680 unsigned int uv_width = (y_width + 1) / 2;
5681 unsigned int uv_height = (y_height + 1) / 2;
5682 int y_samples = y_height * y_width;
5683 int uv_samples = uv_height * uv_width;
5684 int t_samples = y_samples + 2 * uv_samples;
5687 ye = calc_plane_error(orig->y_buffer, orig->y_stride,
5688 recon->y_buffer, recon->y_stride, y_width, y_height);
5690 ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
5691 recon->u_buffer, recon->uv_stride, uv_width, uv_height);
5693 ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
5694 recon->v_buffer, recon->uv_stride, uv_width, uv_height);
5696 sq_error = (double)(ye + ue + ve);
5698 frame_psnr = vpx_sse_to_psnr(t_samples, 255.0, sq_error);
5700 cpi->total_y += vpx_sse_to_psnr(y_samples, 255.0, (double)ye);
5701 cpi->total_u += vpx_sse_to_psnr(uv_samples, 255.0, (double)ue);
5702 cpi->total_v += vpx_sse_to_psnr(uv_samples, 255.0, (double)ve);
5703 cpi->total_sq_error += sq_error;
5704 cpi->total += frame_psnr;
5707 YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer;
5709 double frame_psnr2, frame_ssim2 = 0;
5712 vp8_deblock(cm, cm->frame_to_show, &cm->post_proc_buffer, cm->filter_level * 10 / 6, 1, 0);
5713 vp8_clear_system_state();
5715 ye = calc_plane_error(orig->y_buffer, orig->y_stride,
5716 pp->y_buffer, pp->y_stride, y_width, y_height);
5718 ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
5719 pp->u_buffer, pp->uv_stride, uv_width, uv_height);
5721 ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
5722 pp->v_buffer, pp->uv_stride, uv_width, uv_height);
5724 sq_error2 = (double)(ye + ue + ve);
5726 frame_psnr2 = vpx_sse_to_psnr(t_samples, 255.0, sq_error2);
5728 cpi->totalp_y += vpx_sse_to_psnr(y_samples,
5730 cpi->totalp_u += vpx_sse_to_psnr(uv_samples,
5732 cpi->totalp_v += vpx_sse_to_psnr(uv_samples,
5734 cpi->total_sq_error2 += sq_error2;
5735 cpi->totalp += frame_psnr2;
5737 frame_ssim2 = vpx_calc_ssim(cpi->Source,
5738 &cm->post_proc_buffer, &weight);
5740 cpi->summed_quality += frame_ssim2 * weight;
5741 cpi->summed_weights += weight;
5743 if (cpi->oxcf.number_of_layers > 1)
5747 for (i=cpi->current_layer;
5748 i<cpi->oxcf.number_of_layers; i++)
5750 cpi->frames_in_layer[i]++;
5752 cpi->bytes_in_layer[i] += *size;
5753 cpi->sum_psnr[i] += frame_psnr;
5754 cpi->sum_psnr_p[i] += frame_psnr2;
5755 cpi->total_error2[i] += sq_error;
5756 cpi->total_error2_p[i] += sq_error2;
5757 cpi->sum_ssim[i] += frame_ssim2 * weight;
5758 cpi->sum_weights[i] += weight;
5765 if (cpi->b_calculate_ssimg)
5767 double y, u, v, frame_all;
5768 frame_all = vpx_calc_ssimg(cpi->Source, cm->frame_to_show,
5771 if (cpi->oxcf.number_of_layers > 1)
5775 for (i=cpi->current_layer;
5776 i<cpi->oxcf.number_of_layers; i++)
5778 if (!cpi->b_calculate_psnr)
5779 cpi->frames_in_layer[i]++;
5781 cpi->total_ssimg_y_in_layer[i] += y;
5782 cpi->total_ssimg_u_in_layer[i] += u;
5783 cpi->total_ssimg_v_in_layer[i] += v;
5784 cpi->total_ssimg_all_in_layer[i] += frame_all;
5789 cpi->total_ssimg_y += y;
5790 cpi->total_ssimg_u += u;
5791 cpi->total_ssimg_v += v;
5792 cpi->total_ssimg_all += frame_all;
5801 if (cpi->common.frame_type != 0 && cpi->common.base_qindex == cpi->oxcf.worst_allowed_q)
5803 skiptruecount += cpi->skip_true_count;
5804 skipfalsecount += cpi->skip_false_count;
5812 FILE *f = fopen("skip.stt", "a");
5813 fprintf(f, "frame:%4d flags:%4x Q:%4d P:%4d Size:%5d\n", cpi->common.current_video_frame, *frame_flags, cpi->common.base_qindex, cpi->prob_skip_false, *size);
5815 if (cpi->is_src_frame_alt_ref == 1)
5816 fprintf(f, "skipcount: %4d framesize: %d\n", cpi->skip_true_count , *size);
5824 cpi->common.error.setjmp = 0;
5829 int vp8_get_preview_raw_frame(VP8_COMP *cpi, YV12_BUFFER_CONFIG *dest, vp8_ppflags_t *flags)
5831 if (cpi->common.refresh_alt_ref_frame)
5837 #if CONFIG_MULTITHREAD
5838 if(cpi->b_lpf_running)
5840 sem_wait(&cpi->h_event_end_lpf);
5841 cpi->b_lpf_running = 0;
5846 cpi->common.show_frame_mi = cpi->common.mi;
5847 ret = vp8_post_proc_frame(&cpi->common, dest, flags);
5851 if (cpi->common.frame_to_show)
5853 *dest = *cpi->common.frame_to_show;
5854 dest->y_width = cpi->common.Width;
5855 dest->y_height = cpi->common.Height;
5856 dest->uv_height = cpi->common.Height / 2;
5865 vp8_clear_system_state();
5870 int vp8_set_roimap(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols, int delta_q[4], int delta_lf[4], unsigned int threshold[4])
5872 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
5873 int internal_delta_q[MAX_MB_SEGMENTS];
5874 const int range = 63;
5877 // This method is currently incompatible with the cyclic refresh method
5878 if ( cpi->cyclic_refresh_mode_enabled )
5881 // Check number of rows and columns match
5882 if (cpi->common.mb_rows != rows || cpi->common.mb_cols != cols)
5885 // Range check the delta Q values and convert the external Q range values
5886 // to internal ones.
5887 if ( (abs(delta_q[0]) > range) || (abs(delta_q[1]) > range) ||
5888 (abs(delta_q[2]) > range) || (abs(delta_q[3]) > range) )
5891 // Range check the delta lf values
5892 if ( (abs(delta_lf[0]) > range) || (abs(delta_lf[1]) > range) ||
5893 (abs(delta_lf[2]) > range) || (abs(delta_lf[3]) > range) )
5898 disable_segmentation(cpi);
5902 // Translate the external delta q values to internal values.
5903 for ( i = 0; i < MAX_MB_SEGMENTS; i++ )
5904 internal_delta_q[i] =
5905 ( delta_q[i] >= 0 ) ? q_trans[delta_q[i]] : -q_trans[-delta_q[i]];
5907 /* Set the segmentation Map */
5908 set_segmentation_map(cpi, map);
5910 /* Activate segmentation. */
5911 enable_segmentation(cpi);
5913 /* Set up the quant segment data */
5914 feature_data[MB_LVL_ALT_Q][0] = internal_delta_q[0];
5915 feature_data[MB_LVL_ALT_Q][1] = internal_delta_q[1];
5916 feature_data[MB_LVL_ALT_Q][2] = internal_delta_q[2];
5917 feature_data[MB_LVL_ALT_Q][3] = internal_delta_q[3];
5919 /* Set up the loop segment data s */
5920 feature_data[MB_LVL_ALT_LF][0] = delta_lf[0];
5921 feature_data[MB_LVL_ALT_LF][1] = delta_lf[1];
5922 feature_data[MB_LVL_ALT_LF][2] = delta_lf[2];
5923 feature_data[MB_LVL_ALT_LF][3] = delta_lf[3];
5925 cpi->segment_encode_breakout[0] = threshold[0];
5926 cpi->segment_encode_breakout[1] = threshold[1];
5927 cpi->segment_encode_breakout[2] = threshold[2];
5928 cpi->segment_encode_breakout[3] = threshold[3];
5930 /* Initialise the feature data structure */
5931 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
5936 int vp8_set_active_map(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols)
5938 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols)
5942 memcpy(cpi->active_map, map, rows * cols);
5943 cpi->active_map_enabled = 1;
5946 cpi->active_map_enabled = 0;
5956 int vp8_set_internal_size(VP8_COMP *cpi, VPX_SCALING horiz_mode, VPX_SCALING vert_mode)
5958 if (horiz_mode <= ONETWO)
5959 cpi->common.horiz_scale = horiz_mode;
5963 if (vert_mode <= ONETWO)
5964 cpi->common.vert_scale = vert_mode;
5973 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest)
5978 unsigned char *src = source->y_buffer;
5979 unsigned char *dst = dest->y_buffer;
5981 /* Loop through the Y plane raw and reconstruction data summing
5982 * (square differences)
5984 for (i = 0; i < source->y_height; i += 16)
5986 for (j = 0; j < source->y_width; j += 16)
5989 Total += vpx_mse16x16(src + j, source->y_stride,
5990 dst + j, dest->y_stride, &sse);
5993 src += 16 * source->y_stride;
5994 dst += 16 * dest->y_stride;
6001 int vp8_get_quantizer(VP8_COMP *cpi)
6003 return cpi->common.base_qindex;