2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
15 #include "./vpx_config.h"
16 #include "./vpx_scale_rtcd.h"
17 #include "vpx/internal/vpx_psnr.h"
18 #include "vpx_ports/vpx_timer.h"
20 #include "vp9/common/vp9_alloccommon.h"
21 #include "vp9/common/vp9_filter.h"
22 #include "vp9/common/vp9_idct.h"
23 #if CONFIG_VP9_POSTPROC
24 #include "vp9/common/vp9_postproc.h"
26 #include "vp9/common/vp9_reconinter.h"
27 #include "vp9/common/vp9_reconintra.h"
28 #include "vp9/common/vp9_systemdependent.h"
29 #include "vp9/common/vp9_tile_common.h"
31 #include "vp9/encoder/vp9_aq_complexity.h"
32 #include "vp9/encoder/vp9_aq_cyclicrefresh.h"
33 #include "vp9/encoder/vp9_aq_variance.h"
34 #include "vp9/encoder/vp9_bitstream.h"
35 #include "vp9/encoder/vp9_context_tree.h"
36 #include "vp9/encoder/vp9_encodeframe.h"
37 #include "vp9/encoder/vp9_encodemv.h"
38 #include "vp9/encoder/vp9_firstpass.h"
39 #include "vp9/encoder/vp9_mbgraph.h"
40 #include "vp9/encoder/vp9_encoder.h"
41 #include "vp9/encoder/vp9_picklpf.h"
42 #include "vp9/encoder/vp9_ratectrl.h"
43 #include "vp9/encoder/vp9_rd.h"
44 #include "vp9/encoder/vp9_segmentation.h"
45 #include "vp9/encoder/vp9_speed_features.h"
46 #if CONFIG_INTERNAL_STATS
47 #include "vp9/encoder/vp9_ssim.h"
49 #include "vp9/encoder/vp9_temporal_filter.h"
50 #include "vp9/encoder/vp9_resize.h"
51 #include "vp9/encoder/vp9_svc_layercontext.h"
53 void vp9_coef_tree_initialize();
55 #define SHARP_FILTER_QTHRESH 0 /* Q threshold for 8-tap sharp filter */
57 #define ALTREF_HIGH_PRECISION_MV 1 // Whether to use high precision mv
58 // for altref computation.
59 #define HIGH_PRECISION_MV_QTHRESH 200 // Q threshold for high precision
60 // mv. Choose a very high value for
61 // now so that HIGH_PRECISION is always
64 // #define OUTPUT_YUV_REC
66 #ifdef OUTPUT_YUV_DENOISED
67 FILE *yuv_denoised_file = NULL;
79 static INLINE void Scale2Ratio(VPX_SCALING mode, int *hr, int *hs) {
105 void vp9_set_high_precision_mv(VP9_COMP *cpi, int allow_high_precision_mv) {
106 MACROBLOCK *const mb = &cpi->mb;
107 cpi->common.allow_high_precision_mv = allow_high_precision_mv;
108 if (cpi->common.allow_high_precision_mv) {
109 mb->mvcost = mb->nmvcost_hp;
110 mb->mvsadcost = mb->nmvsadcost_hp;
112 mb->mvcost = mb->nmvcost;
113 mb->mvsadcost = mb->nmvsadcost;
117 static void setup_frame(VP9_COMP *cpi) {
118 VP9_COMMON *const cm = &cpi->common;
119 // Set up entropy context depending on frame type. The decoder mandates
120 // the use of the default context, index 0, for keyframes and inter
121 // frames where the error_resilient_mode or intra_only flag is set. For
122 // other inter-frames the encoder currently uses only two contexts;
123 // context 1 for ALTREF frames and context 0 for the others.
124 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
125 vp9_setup_past_independence(cm);
128 cm->frame_context_idx = cpi->refresh_alt_ref_frame;
131 if (cm->frame_type == KEY_FRAME) {
132 if (!is_two_pass_svc(cpi))
133 cpi->refresh_golden_frame = 1;
134 cpi->refresh_alt_ref_frame = 1;
135 vp9_zero(cpi->interp_filter_selected);
137 cm->fc = cm->frame_contexts[cm->frame_context_idx];
138 vp9_zero(cpi->interp_filter_selected[0]);
142 void vp9_initialize_enc() {
143 static int init_done = 0;
147 vp9_init_neighbors();
148 vp9_init_intra_predictors();
149 vp9_coef_tree_initialize();
150 vp9_tokenize_initialize();
152 vp9_rc_init_minq_luts();
153 vp9_entropy_mv_init();
154 vp9_entropy_mode_init();
155 vp9_temporal_filter_init();
160 static void dealloc_compressor_data(VP9_COMP *cpi) {
161 VP9_COMMON *const cm = &cpi->common;
164 // Delete sementation map
165 vpx_free(cpi->segmentation_map);
166 cpi->segmentation_map = NULL;
167 vpx_free(cm->last_frame_seg_map);
168 cm->last_frame_seg_map = NULL;
169 vpx_free(cpi->coding_context.last_frame_seg_map_copy);
170 cpi->coding_context.last_frame_seg_map_copy = NULL;
172 vpx_free(cpi->complexity_map);
173 cpi->complexity_map = NULL;
175 vpx_free(cpi->nmvcosts[0]);
176 vpx_free(cpi->nmvcosts[1]);
177 cpi->nmvcosts[0] = NULL;
178 cpi->nmvcosts[1] = NULL;
180 vpx_free(cpi->nmvcosts_hp[0]);
181 vpx_free(cpi->nmvcosts_hp[1]);
182 cpi->nmvcosts_hp[0] = NULL;
183 cpi->nmvcosts_hp[1] = NULL;
185 vpx_free(cpi->nmvsadcosts[0]);
186 vpx_free(cpi->nmvsadcosts[1]);
187 cpi->nmvsadcosts[0] = NULL;
188 cpi->nmvsadcosts[1] = NULL;
190 vpx_free(cpi->nmvsadcosts_hp[0]);
191 vpx_free(cpi->nmvsadcosts_hp[1]);
192 cpi->nmvsadcosts_hp[0] = NULL;
193 cpi->nmvsadcosts_hp[1] = NULL;
195 vp9_cyclic_refresh_free(cpi->cyclic_refresh);
196 cpi->cyclic_refresh = NULL;
198 vp9_free_ref_frame_buffers(cm);
199 vp9_free_context_buffers(cm);
201 vp9_free_frame_buffer(&cpi->last_frame_uf);
202 vp9_free_frame_buffer(&cpi->scaled_source);
203 vp9_free_frame_buffer(&cpi->scaled_last_source);
204 vp9_free_frame_buffer(&cpi->alt_ref_buffer);
205 vp9_lookahead_destroy(cpi->lookahead);
210 vp9_free_pc_tree(cpi);
212 for (i = 0; i < cpi->svc.number_spatial_layers; ++i) {
213 LAYER_CONTEXT *const lc = &cpi->svc.layer_context[i];
214 vpx_free(lc->rc_twopass_stats_in.buf);
215 lc->rc_twopass_stats_in.buf = NULL;
216 lc->rc_twopass_stats_in.sz = 0;
219 if (cpi->source_diff_var != NULL) {
220 vpx_free(cpi->source_diff_var);
221 cpi->source_diff_var = NULL;
224 for (i = 0; i < MAX_LAG_BUFFERS; ++i) {
225 vp9_free_frame_buffer(&cpi->svc.scaled_frames[i]);
227 vpx_memset(&cpi->svc.scaled_frames[0], 0,
228 MAX_LAG_BUFFERS * sizeof(cpi->svc.scaled_frames[0]));
231 static void save_coding_context(VP9_COMP *cpi) {
232 CODING_CONTEXT *const cc = &cpi->coding_context;
233 VP9_COMMON *cm = &cpi->common;
235 // Stores a snapshot of key state variables which can subsequently be
236 // restored with a call to vp9_restore_coding_context. These functions are
237 // intended for use in a re-code loop in vp9_compress_frame where the
238 // quantizer value is adjusted between loop iterations.
239 vp9_copy(cc->nmvjointcost, cpi->mb.nmvjointcost);
241 vpx_memcpy(cc->nmvcosts[0], cpi->nmvcosts[0],
242 MV_VALS * sizeof(*cpi->nmvcosts[0]));
243 vpx_memcpy(cc->nmvcosts[1], cpi->nmvcosts[1],
244 MV_VALS * sizeof(*cpi->nmvcosts[1]));
245 vpx_memcpy(cc->nmvcosts_hp[0], cpi->nmvcosts_hp[0],
246 MV_VALS * sizeof(*cpi->nmvcosts_hp[0]));
247 vpx_memcpy(cc->nmvcosts_hp[1], cpi->nmvcosts_hp[1],
248 MV_VALS * sizeof(*cpi->nmvcosts_hp[1]));
250 vp9_copy(cc->segment_pred_probs, cm->seg.pred_probs);
252 vpx_memcpy(cpi->coding_context.last_frame_seg_map_copy,
253 cm->last_frame_seg_map, (cm->mi_rows * cm->mi_cols));
255 vp9_copy(cc->last_ref_lf_deltas, cm->lf.last_ref_deltas);
256 vp9_copy(cc->last_mode_lf_deltas, cm->lf.last_mode_deltas);
261 static void restore_coding_context(VP9_COMP *cpi) {
262 CODING_CONTEXT *const cc = &cpi->coding_context;
263 VP9_COMMON *cm = &cpi->common;
265 // Restore key state variables to the snapshot state stored in the
266 // previous call to vp9_save_coding_context.
267 vp9_copy(cpi->mb.nmvjointcost, cc->nmvjointcost);
269 vpx_memcpy(cpi->nmvcosts[0], cc->nmvcosts[0],
270 MV_VALS * sizeof(*cc->nmvcosts[0]));
271 vpx_memcpy(cpi->nmvcosts[1], cc->nmvcosts[1],
272 MV_VALS * sizeof(*cc->nmvcosts[1]));
273 vpx_memcpy(cpi->nmvcosts_hp[0], cc->nmvcosts_hp[0],
274 MV_VALS * sizeof(*cc->nmvcosts_hp[0]));
275 vpx_memcpy(cpi->nmvcosts_hp[1], cc->nmvcosts_hp[1],
276 MV_VALS * sizeof(*cc->nmvcosts_hp[1]));
278 vp9_copy(cm->seg.pred_probs, cc->segment_pred_probs);
280 vpx_memcpy(cm->last_frame_seg_map,
281 cpi->coding_context.last_frame_seg_map_copy,
282 (cm->mi_rows * cm->mi_cols));
284 vp9_copy(cm->lf.last_ref_deltas, cc->last_ref_lf_deltas);
285 vp9_copy(cm->lf.last_mode_deltas, cc->last_mode_lf_deltas);
290 static void configure_static_seg_features(VP9_COMP *cpi) {
291 VP9_COMMON *const cm = &cpi->common;
292 const RATE_CONTROL *const rc = &cpi->rc;
293 struct segmentation *const seg = &cm->seg;
295 int high_q = (int)(rc->avg_q > 48.0);
298 // Disable and clear down for KF
299 if (cm->frame_type == KEY_FRAME) {
300 // Clear down the global segmentation map
301 vpx_memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
303 seg->update_data = 0;
304 cpi->static_mb_pct = 0;
306 // Disable segmentation
307 vp9_disable_segmentation(seg);
309 // Clear down the segment features.
310 vp9_clearall_segfeatures(seg);
311 } else if (cpi->refresh_alt_ref_frame) {
312 // If this is an alt ref frame
313 // Clear down the global segmentation map
314 vpx_memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
316 seg->update_data = 0;
317 cpi->static_mb_pct = 0;
319 // Disable segmentation and individual segment features by default
320 vp9_disable_segmentation(seg);
321 vp9_clearall_segfeatures(seg);
323 // Scan frames from current to arf frame.
324 // This function re-enables segmentation if appropriate.
325 vp9_update_mbgraph_stats(cpi);
327 // If segmentation was enabled set those features needed for the
331 seg->update_data = 1;
333 qi_delta = vp9_compute_qdelta(rc, rc->avg_q, rc->avg_q * 0.875);
334 vp9_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta - 2);
335 vp9_set_segdata(seg, 1, SEG_LVL_ALT_LF, -2);
337 vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
338 vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_LF);
340 // Where relevant assume segment data is delta data
341 seg->abs_delta = SEGMENT_DELTADATA;
343 } else if (seg->enabled) {
344 // All other frames if segmentation has been enabled
346 // First normal frame in a valid gf or alt ref group
347 if (rc->frames_since_golden == 0) {
348 // Set up segment features for normal frames in an arf group
349 if (rc->source_alt_ref_active) {
351 seg->update_data = 1;
352 seg->abs_delta = SEGMENT_DELTADATA;
354 qi_delta = vp9_compute_qdelta(rc, rc->avg_q, rc->avg_q * 1.125);
355 vp9_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta + 2);
356 vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
358 vp9_set_segdata(seg, 1, SEG_LVL_ALT_LF, -2);
359 vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_LF);
361 // Segment coding disabled for compred testing
362 if (high_q || (cpi->static_mb_pct == 100)) {
363 vp9_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
364 vp9_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
365 vp9_enable_segfeature(seg, 1, SEG_LVL_SKIP);
368 // Disable segmentation and clear down features if alt ref
369 // is not active for this group
371 vp9_disable_segmentation(seg);
373 vpx_memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
376 seg->update_data = 0;
378 vp9_clearall_segfeatures(seg);
380 } else if (rc->is_src_frame_alt_ref) {
381 // Special case where we are coding over the top of a previous
383 // Segment coding disabled for compred testing
385 // Enable ref frame features for segment 0 as well
386 vp9_enable_segfeature(seg, 0, SEG_LVL_REF_FRAME);
387 vp9_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
389 // All mbs should use ALTREF_FRAME
390 vp9_clear_segdata(seg, 0, SEG_LVL_REF_FRAME);
391 vp9_set_segdata(seg, 0, SEG_LVL_REF_FRAME, ALTREF_FRAME);
392 vp9_clear_segdata(seg, 1, SEG_LVL_REF_FRAME);
393 vp9_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
395 // Skip all MBs if high Q (0,0 mv and skip coeffs)
397 vp9_enable_segfeature(seg, 0, SEG_LVL_SKIP);
398 vp9_enable_segfeature(seg, 1, SEG_LVL_SKIP);
400 // Enable data update
401 seg->update_data = 1;
405 // No updates.. leave things as they are.
407 seg->update_data = 0;
412 static void update_reference_segmentation_map(VP9_COMP *cpi) {
413 VP9_COMMON *const cm = &cpi->common;
414 MODE_INFO **mi_8x8_ptr = cm->mi_grid_visible;
415 uint8_t *cache_ptr = cm->last_frame_seg_map;
418 for (row = 0; row < cm->mi_rows; row++) {
419 MODE_INFO **mi_8x8 = mi_8x8_ptr;
420 uint8_t *cache = cache_ptr;
421 for (col = 0; col < cm->mi_cols; col++, mi_8x8++, cache++)
422 cache[0] = mi_8x8[0]->mbmi.segment_id;
423 mi_8x8_ptr += cm->mi_stride;
424 cache_ptr += cm->mi_cols;
428 static void alloc_raw_frame_buffers(VP9_COMP *cpi) {
429 VP9_COMMON *cm = &cpi->common;
430 const VP9EncoderConfig *oxcf = &cpi->oxcf;
432 cpi->lookahead = vp9_lookahead_init(oxcf->width, oxcf->height,
433 cm->subsampling_x, cm->subsampling_y,
434 #if CONFIG_VP9_HIGHBITDEPTH
435 cm->use_highbitdepth,
437 oxcf->lag_in_frames);
439 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
440 "Failed to allocate lag buffers");
442 if (vp9_realloc_frame_buffer(&cpi->alt_ref_buffer,
443 oxcf->width, oxcf->height,
444 cm->subsampling_x, cm->subsampling_y,
445 #if CONFIG_VP9_HIGHBITDEPTH
446 cm->use_highbitdepth,
448 VP9_ENC_BORDER_IN_PIXELS, NULL, NULL, NULL))
449 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
450 "Failed to allocate altref buffer");
453 static void alloc_ref_frame_buffers(VP9_COMP *cpi) {
454 VP9_COMMON *const cm = &cpi->common;
455 if (vp9_alloc_ref_frame_buffers(cm, cm->width, cm->height))
456 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
457 "Failed to allocate frame buffers");
460 static void alloc_util_frame_buffers(VP9_COMP *cpi) {
461 VP9_COMMON *const cm = &cpi->common;
462 if (vp9_realloc_frame_buffer(&cpi->last_frame_uf,
463 cm->width, cm->height,
464 cm->subsampling_x, cm->subsampling_y,
465 #if CONFIG_VP9_HIGHBITDEPTH
466 cm->use_highbitdepth,
468 VP9_ENC_BORDER_IN_PIXELS, NULL, NULL, NULL))
469 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
470 "Failed to allocate last frame buffer");
472 if (vp9_realloc_frame_buffer(&cpi->scaled_source,
473 cm->width, cm->height,
474 cm->subsampling_x, cm->subsampling_y,
475 #if CONFIG_VP9_HIGHBITDEPTH
476 cm->use_highbitdepth,
478 VP9_ENC_BORDER_IN_PIXELS, NULL, NULL, NULL))
479 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
480 "Failed to allocate scaled source buffer");
482 if (vp9_realloc_frame_buffer(&cpi->scaled_last_source,
483 cm->width, cm->height,
484 cm->subsampling_x, cm->subsampling_y,
485 #if CONFIG_VP9_HIGHBITDEPTH
486 cm->use_highbitdepth,
488 VP9_ENC_BORDER_IN_PIXELS, NULL, NULL, NULL))
489 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
490 "Failed to allocate scaled last source buffer");
493 void vp9_alloc_compressor_data(VP9_COMP *cpi) {
494 VP9_COMMON *cm = &cpi->common;
496 vp9_alloc_context_buffers(cm, cm->width, cm->height);
501 unsigned int tokens = get_token_alloc(cm->mb_rows, cm->mb_cols);
502 CHECK_MEM_ERROR(cm, cpi->tok, vpx_calloc(tokens, sizeof(*cpi->tok)));
505 vp9_setup_pc_tree(&cpi->common, cpi);
508 static void update_frame_size(VP9_COMP *cpi) {
509 VP9_COMMON *const cm = &cpi->common;
510 MACROBLOCKD *const xd = &cpi->mb.e_mbd;
512 vp9_set_mb_mi(cm, cm->width, cm->height);
513 vp9_init_context_buffers(cm);
514 init_macroblockd(cm, xd);
516 if (is_two_pass_svc(cpi)) {
517 if (vp9_realloc_frame_buffer(&cpi->alt_ref_buffer,
518 cm->width, cm->height,
519 cm->subsampling_x, cm->subsampling_y,
520 #if CONFIG_VP9_HIGHBITDEPTH
521 cm->use_highbitdepth,
523 VP9_ENC_BORDER_IN_PIXELS, NULL, NULL, NULL))
524 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
525 "Failed to reallocate alt_ref_buffer");
529 void vp9_new_framerate(VP9_COMP *cpi, double framerate) {
530 cpi->framerate = framerate < 0.1 ? 30 : framerate;
531 vp9_rc_update_framerate(cpi);
534 static void set_tile_limits(VP9_COMP *cpi) {
535 VP9_COMMON *const cm = &cpi->common;
537 int min_log2_tile_cols, max_log2_tile_cols;
538 vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
540 cm->log2_tile_cols = clamp(cpi->oxcf.tile_columns,
541 min_log2_tile_cols, max_log2_tile_cols);
542 cm->log2_tile_rows = cpi->oxcf.tile_rows;
545 static void init_buffer_indices(VP9_COMP *cpi) {
551 static void init_config(struct VP9_COMP *cpi, VP9EncoderConfig *oxcf) {
552 VP9_COMMON *const cm = &cpi->common;
555 cpi->framerate = oxcf->init_framerate;
557 cm->profile = oxcf->profile;
558 cm->bit_depth = oxcf->bit_depth;
559 cm->color_space = UNKNOWN;
561 cm->width = oxcf->width;
562 cm->height = oxcf->height;
563 vp9_alloc_compressor_data(cpi);
565 // Spatial scalability.
566 cpi->svc.number_spatial_layers = oxcf->ss_number_layers;
567 // Temporal scalability.
568 cpi->svc.number_temporal_layers = oxcf->ts_number_layers;
570 if ((cpi->svc.number_temporal_layers > 1 && cpi->oxcf.rc_mode == VPX_CBR) ||
571 ((cpi->svc.number_temporal_layers > 1 ||
572 cpi->svc.number_spatial_layers > 1) &&
573 cpi->oxcf.pass == 2)) {
574 vp9_init_layer_context(cpi);
577 // change includes all joint functionality
578 vp9_change_config(cpi, oxcf);
580 cpi->static_mb_pct = 0;
581 cpi->ref_frame_flags = 0;
583 init_buffer_indices(cpi);
585 set_tile_limits(cpi);
588 static void set_rc_buffer_sizes(RATE_CONTROL *rc,
589 const VP9EncoderConfig *oxcf) {
590 const int64_t bandwidth = oxcf->target_bandwidth;
591 const int64_t starting = oxcf->starting_buffer_level_ms;
592 const int64_t optimal = oxcf->optimal_buffer_level_ms;
593 const int64_t maximum = oxcf->maximum_buffer_size_ms;
595 rc->starting_buffer_level = starting * bandwidth / 1000;
596 rc->optimal_buffer_level = (optimal == 0) ? bandwidth / 8
597 : optimal * bandwidth / 1000;
598 rc->maximum_buffer_size = (maximum == 0) ? bandwidth / 8
599 : maximum * bandwidth / 1000;
602 void vp9_change_config(struct VP9_COMP *cpi, const VP9EncoderConfig *oxcf) {
603 VP9_COMMON *const cm = &cpi->common;
604 RATE_CONTROL *const rc = &cpi->rc;
606 if (cm->profile != oxcf->profile)
607 cm->profile = oxcf->profile;
608 cm->bit_depth = oxcf->bit_depth;
610 if (cm->profile <= PROFILE_1)
611 assert(cm->bit_depth == VPX_BITS_8);
613 assert(cm->bit_depth > VPX_BITS_8);
617 rc->baseline_gf_interval = DEFAULT_GF_INTERVAL;
619 cpi->refresh_golden_frame = 0;
620 cpi->refresh_last_frame = 1;
621 cm->refresh_frame_context = 1;
622 cm->reset_frame_context = 0;
624 vp9_reset_segment_features(&cm->seg);
625 vp9_set_high_precision_mv(cpi, 0);
630 for (i = 0; i < MAX_SEGMENTS; i++)
631 cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout;
633 cpi->encode_breakout = cpi->oxcf.encode_breakout;
635 set_rc_buffer_sizes(rc, &cpi->oxcf);
637 // Under a configuration change, where maximum_buffer_size may change,
638 // keep buffer level clipped to the maximum allowed buffer size.
639 rc->bits_off_target = MIN(rc->bits_off_target, rc->maximum_buffer_size);
640 rc->buffer_level = MIN(rc->buffer_level, rc->maximum_buffer_size);
642 // Set up frame rate and related parameters rate control values.
643 vp9_new_framerate(cpi, cpi->framerate);
645 // Set absolute upper and lower quality limits
646 rc->worst_quality = cpi->oxcf.worst_allowed_q;
647 rc->best_quality = cpi->oxcf.best_allowed_q;
649 cm->interp_filter = cpi->sf.default_interp_filter;
651 cm->display_width = cpi->oxcf.width;
652 cm->display_height = cpi->oxcf.height;
654 if (cpi->initial_width) {
655 // Increasing the size of the frame beyond the first seen frame, or some
656 // otherwise signaled maximum size, is not supported.
657 // TODO(jkoleszar): exit gracefully.
658 assert(cm->width <= cpi->initial_width);
659 assert(cm->height <= cpi->initial_height);
661 update_frame_size(cpi);
663 if ((cpi->svc.number_temporal_layers > 1 &&
664 cpi->oxcf.rc_mode == VPX_CBR) ||
665 ((cpi->svc.number_temporal_layers > 1 ||
666 cpi->svc.number_spatial_layers > 1) &&
667 cpi->oxcf.pass == 2)) {
668 vp9_update_layer_context_change_config(cpi,
669 (int)cpi->oxcf.target_bandwidth);
672 cpi->alt_ref_source = NULL;
673 rc->is_src_frame_alt_ref = 0;
676 // Experimental RD Code
677 cpi->frame_distortion = 0;
678 cpi->last_frame_distortion = 0;
681 set_tile_limits(cpi);
683 cpi->ext_refresh_frame_flags_pending = 0;
684 cpi->ext_refresh_frame_context_pending = 0;
686 #if CONFIG_VP9_TEMPORAL_DENOISING
687 if (cpi->oxcf.noise_sensitivity > 0) {
688 vp9_denoiser_alloc(&(cpi->denoiser), cm->width, cm->height,
689 cm->subsampling_x, cm->subsampling_y,
690 #if CONFIG_VP9_HIGHBITDEPTH
691 cm->use_highbitdepth,
693 VP9_ENC_BORDER_IN_PIXELS);
699 #define M_LOG2_E 0.693147180559945309417
701 #define log2f(x) (log (x) / (float) M_LOG2_E)
703 static void cal_nmvjointsadcost(int *mvjointsadcost) {
704 mvjointsadcost[0] = 600;
705 mvjointsadcost[1] = 300;
706 mvjointsadcost[2] = 300;
707 mvjointsadcost[3] = 300;
710 static void cal_nmvsadcosts(int *mvsadcost[2]) {
717 double z = 256 * (2 * (log2f(8 * i) + .6));
718 mvsadcost[0][i] = (int)z;
719 mvsadcost[1][i] = (int)z;
720 mvsadcost[0][-i] = (int)z;
721 mvsadcost[1][-i] = (int)z;
722 } while (++i <= MV_MAX);
725 static void cal_nmvsadcosts_hp(int *mvsadcost[2]) {
732 double z = 256 * (2 * (log2f(8 * i) + .6));
733 mvsadcost[0][i] = (int)z;
734 mvsadcost[1][i] = (int)z;
735 mvsadcost[0][-i] = (int)z;
736 mvsadcost[1][-i] = (int)z;
737 } while (++i <= MV_MAX);
741 VP9_COMP *vp9_create_compressor(VP9EncoderConfig *oxcf) {
743 VP9_COMP *const cpi = vpx_memalign(32, sizeof(VP9_COMP));
744 VP9_COMMON *const cm = cpi != NULL ? &cpi->common : NULL;
751 if (setjmp(cm->error.jmp)) {
752 cm->error.setjmp = 0;
753 vp9_remove_compressor(cpi);
757 cm->error.setjmp = 1;
761 init_config(cpi, oxcf);
762 vp9_rc_init(&cpi->oxcf, oxcf->pass, &cpi->rc);
764 cm->current_video_frame = 0;
765 cpi->skippable_frame = 0;
767 // Create the encoder segmentation map and set all entries to 0
768 CHECK_MEM_ERROR(cm, cpi->segmentation_map,
769 vpx_calloc(cm->mi_rows * cm->mi_cols, 1));
771 // Create a complexity map used for rd adjustment
772 CHECK_MEM_ERROR(cm, cpi->complexity_map,
773 vpx_calloc(cm->mi_rows * cm->mi_cols, 1));
775 // Create a map used for cyclic background refresh.
776 CHECK_MEM_ERROR(cm, cpi->cyclic_refresh,
777 vp9_cyclic_refresh_alloc(cm->mi_rows, cm->mi_cols));
779 // And a place holder structure is the coding context
780 // for use if we want to save and restore it
781 CHECK_MEM_ERROR(cm, cpi->coding_context.last_frame_seg_map_copy,
782 vpx_calloc(cm->mi_rows * cm->mi_cols, 1));
784 CHECK_MEM_ERROR(cm, cpi->nmvcosts[0],
785 vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts[0])));
786 CHECK_MEM_ERROR(cm, cpi->nmvcosts[1],
787 vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts[1])));
788 CHECK_MEM_ERROR(cm, cpi->nmvcosts_hp[0],
789 vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts_hp[0])));
790 CHECK_MEM_ERROR(cm, cpi->nmvcosts_hp[1],
791 vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts_hp[1])));
792 CHECK_MEM_ERROR(cm, cpi->nmvsadcosts[0],
793 vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts[0])));
794 CHECK_MEM_ERROR(cm, cpi->nmvsadcosts[1],
795 vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts[1])));
796 CHECK_MEM_ERROR(cm, cpi->nmvsadcosts_hp[0],
797 vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts_hp[0])));
798 CHECK_MEM_ERROR(cm, cpi->nmvsadcosts_hp[1],
799 vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts_hp[1])));
801 for (i = 0; i < (sizeof(cpi->mbgraph_stats) /
802 sizeof(cpi->mbgraph_stats[0])); i++) {
803 CHECK_MEM_ERROR(cm, cpi->mbgraph_stats[i].mb_stats,
805 sizeof(*cpi->mbgraph_stats[i].mb_stats), 1));
808 #if CONFIG_FP_MB_STATS
809 cpi->use_fp_mb_stats = 0;
810 if (cpi->use_fp_mb_stats) {
811 // a place holder used to store the first pass mb stats in the first pass
812 CHECK_MEM_ERROR(cm, cpi->twopass.frame_mb_stats_buf,
813 vpx_calloc(cm->MBs * sizeof(uint8_t), 1));
815 cpi->twopass.frame_mb_stats_buf = NULL;
819 cpi->refresh_alt_ref_frame = 0;
821 // Note that at the moment multi_arf will not work with svc.
822 // For the current check in all the execution paths are defaulted to 0
823 // pending further tuning and testing. The code is left in place here
824 // as a place holder in regard to the required paths.
825 cpi->multi_arf_last_grp_enabled = 0;
826 if (oxcf->pass == 2) {
828 cpi->multi_arf_allowed = 0;
829 cpi->multi_arf_enabled = 0;
831 // Disable by default for now.
832 cpi->multi_arf_allowed = 0;
833 cpi->multi_arf_enabled = 0;
836 cpi->multi_arf_allowed = 0;
837 cpi->multi_arf_enabled = 0;
840 cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
841 #if CONFIG_INTERNAL_STATS
842 cpi->b_calculate_ssimg = 0;
847 if (cpi->b_calculate_psnr) {
852 cpi->total_sq_error = 0;
853 cpi->total_samples = 0;
859 cpi->totalp_sq_error = 0;
860 cpi->totalp_samples = 0;
862 cpi->tot_recode_hits = 0;
863 cpi->summed_quality = 0;
864 cpi->summed_weights = 0;
865 cpi->summedp_quality = 0;
866 cpi->summedp_weights = 0;
869 if (cpi->b_calculate_ssimg) {
870 cpi->total_ssimg_y = 0;
871 cpi->total_ssimg_u = 0;
872 cpi->total_ssimg_v = 0;
873 cpi->total_ssimg_all = 0;
878 cpi->first_time_stamp_ever = INT64_MAX;
880 cal_nmvjointsadcost(cpi->mb.nmvjointsadcost);
881 cpi->mb.nmvcost[0] = &cpi->nmvcosts[0][MV_MAX];
882 cpi->mb.nmvcost[1] = &cpi->nmvcosts[1][MV_MAX];
883 cpi->mb.nmvsadcost[0] = &cpi->nmvsadcosts[0][MV_MAX];
884 cpi->mb.nmvsadcost[1] = &cpi->nmvsadcosts[1][MV_MAX];
885 cal_nmvsadcosts(cpi->mb.nmvsadcost);
887 cpi->mb.nmvcost_hp[0] = &cpi->nmvcosts_hp[0][MV_MAX];
888 cpi->mb.nmvcost_hp[1] = &cpi->nmvcosts_hp[1][MV_MAX];
889 cpi->mb.nmvsadcost_hp[0] = &cpi->nmvsadcosts_hp[0][MV_MAX];
890 cpi->mb.nmvsadcost_hp[1] = &cpi->nmvsadcosts_hp[1][MV_MAX];
891 cal_nmvsadcosts_hp(cpi->mb.nmvsadcost_hp);
893 #if CONFIG_VP9_TEMPORAL_DENOISING
894 #ifdef OUTPUT_YUV_DENOISED
895 yuv_denoised_file = fopen("denoised.yuv", "ab");
898 #ifdef OUTPUT_YUV_REC
899 yuv_rec_file = fopen("rec.yuv", "wb");
903 framepsnr = fopen("framepsnr.stt", "a");
904 kf_list = fopen("kf_list.stt", "w");
907 cpi->allow_encode_breakout = ENCODE_BREAKOUT_ENABLED;
909 if (oxcf->pass == 1) {
910 vp9_init_first_pass(cpi);
911 } else if (oxcf->pass == 2) {
912 const size_t packet_sz = sizeof(FIRSTPASS_STATS);
913 const int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
915 if (cpi->svc.number_spatial_layers > 1
916 || cpi->svc.number_temporal_layers > 1) {
917 FIRSTPASS_STATS *const stats = oxcf->two_pass_stats_in.buf;
918 FIRSTPASS_STATS *stats_copy[VPX_SS_MAX_LAYERS] = {0};
921 for (i = 0; i < oxcf->ss_number_layers; ++i) {
922 FIRSTPASS_STATS *const last_packet_for_layer =
923 &stats[packets - oxcf->ss_number_layers + i];
924 const int layer_id = (int)last_packet_for_layer->spatial_layer_id;
925 const int packets_in_layer = (int)last_packet_for_layer->count + 1;
926 if (layer_id >= 0 && layer_id < oxcf->ss_number_layers) {
927 LAYER_CONTEXT *const lc = &cpi->svc.layer_context[layer_id];
929 vpx_free(lc->rc_twopass_stats_in.buf);
931 lc->rc_twopass_stats_in.sz = packets_in_layer * packet_sz;
932 CHECK_MEM_ERROR(cm, lc->rc_twopass_stats_in.buf,
933 vpx_malloc(lc->rc_twopass_stats_in.sz));
934 lc->twopass.stats_in_start = lc->rc_twopass_stats_in.buf;
935 lc->twopass.stats_in = lc->twopass.stats_in_start;
936 lc->twopass.stats_in_end = lc->twopass.stats_in_start
937 + packets_in_layer - 1;
938 stats_copy[layer_id] = lc->rc_twopass_stats_in.buf;
942 for (i = 0; i < packets; ++i) {
943 const int layer_id = (int)stats[i].spatial_layer_id;
944 if (layer_id >= 0 && layer_id < oxcf->ss_number_layers
945 && stats_copy[layer_id] != NULL) {
946 *stats_copy[layer_id] = stats[i];
947 ++stats_copy[layer_id];
951 vp9_init_second_pass_spatial_svc(cpi);
953 #if CONFIG_FP_MB_STATS
954 if (cpi->use_fp_mb_stats) {
955 const size_t psz = cpi->common.MBs * sizeof(uint8_t);
956 const int ps = (int)(oxcf->firstpass_mb_stats_in.sz / psz);
958 cpi->twopass.firstpass_mb_stats.mb_stats_start =
959 oxcf->firstpass_mb_stats_in.buf;
960 cpi->twopass.firstpass_mb_stats.mb_stats_end =
961 cpi->twopass.firstpass_mb_stats.mb_stats_start +
962 (ps - 1) * cpi->common.MBs * sizeof(uint8_t);
966 cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
967 cpi->twopass.stats_in = cpi->twopass.stats_in_start;
968 cpi->twopass.stats_in_end = &cpi->twopass.stats_in[packets - 1];
970 vp9_init_second_pass(cpi);
974 vp9_set_speed_features(cpi);
976 // Allocate memory to store variances for a frame.
977 CHECK_MEM_ERROR(cm, cpi->source_diff_var,
978 vpx_calloc(cm->MBs, sizeof(diff)));
979 cpi->source_var_thresh = 0;
980 cpi->frames_till_next_var_check = 0;
982 // Default rd threshold factors for mode selection
983 for (i = 0; i < BLOCK_SIZES; ++i) {
984 for (j = 0; j < MAX_MODES; ++j)
985 cpi->rd.thresh_freq_fact[i][j] = 32;
988 #define BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX3F, SDX8F, SDX4DF)\
989 cpi->fn_ptr[BT].sdf = SDF; \
990 cpi->fn_ptr[BT].sdaf = SDAF; \
991 cpi->fn_ptr[BT].vf = VF; \
992 cpi->fn_ptr[BT].svf = SVF; \
993 cpi->fn_ptr[BT].svaf = SVAF; \
994 cpi->fn_ptr[BT].sdx3f = SDX3F; \
995 cpi->fn_ptr[BT].sdx8f = SDX8F; \
996 cpi->fn_ptr[BT].sdx4df = SDX4DF;
998 BFP(BLOCK_32X16, vp9_sad32x16, vp9_sad32x16_avg,
999 vp9_variance32x16, vp9_sub_pixel_variance32x16,
1000 vp9_sub_pixel_avg_variance32x16, NULL, NULL, vp9_sad32x16x4d)
1002 BFP(BLOCK_16X32, vp9_sad16x32, vp9_sad16x32_avg,
1003 vp9_variance16x32, vp9_sub_pixel_variance16x32,
1004 vp9_sub_pixel_avg_variance16x32, NULL, NULL, vp9_sad16x32x4d)
1006 BFP(BLOCK_64X32, vp9_sad64x32, vp9_sad64x32_avg,
1007 vp9_variance64x32, vp9_sub_pixel_variance64x32,
1008 vp9_sub_pixel_avg_variance64x32, NULL, NULL, vp9_sad64x32x4d)
1010 BFP(BLOCK_32X64, vp9_sad32x64, vp9_sad32x64_avg,
1011 vp9_variance32x64, vp9_sub_pixel_variance32x64,
1012 vp9_sub_pixel_avg_variance32x64, NULL, NULL, vp9_sad32x64x4d)
1014 BFP(BLOCK_32X32, vp9_sad32x32, vp9_sad32x32_avg,
1015 vp9_variance32x32, vp9_sub_pixel_variance32x32,
1016 vp9_sub_pixel_avg_variance32x32, vp9_sad32x32x3, vp9_sad32x32x8,
1019 BFP(BLOCK_64X64, vp9_sad64x64, vp9_sad64x64_avg,
1020 vp9_variance64x64, vp9_sub_pixel_variance64x64,
1021 vp9_sub_pixel_avg_variance64x64, vp9_sad64x64x3, vp9_sad64x64x8,
1024 BFP(BLOCK_16X16, vp9_sad16x16, vp9_sad16x16_avg,
1025 vp9_variance16x16, vp9_sub_pixel_variance16x16,
1026 vp9_sub_pixel_avg_variance16x16, vp9_sad16x16x3, vp9_sad16x16x8,
1029 BFP(BLOCK_16X8, vp9_sad16x8, vp9_sad16x8_avg,
1030 vp9_variance16x8, vp9_sub_pixel_variance16x8,
1031 vp9_sub_pixel_avg_variance16x8,
1032 vp9_sad16x8x3, vp9_sad16x8x8, vp9_sad16x8x4d)
1034 BFP(BLOCK_8X16, vp9_sad8x16, vp9_sad8x16_avg,
1035 vp9_variance8x16, vp9_sub_pixel_variance8x16,
1036 vp9_sub_pixel_avg_variance8x16,
1037 vp9_sad8x16x3, vp9_sad8x16x8, vp9_sad8x16x4d)
1039 BFP(BLOCK_8X8, vp9_sad8x8, vp9_sad8x8_avg,
1040 vp9_variance8x8, vp9_sub_pixel_variance8x8,
1041 vp9_sub_pixel_avg_variance8x8,
1042 vp9_sad8x8x3, vp9_sad8x8x8, vp9_sad8x8x4d)
1044 BFP(BLOCK_8X4, vp9_sad8x4, vp9_sad8x4_avg,
1045 vp9_variance8x4, vp9_sub_pixel_variance8x4,
1046 vp9_sub_pixel_avg_variance8x4, NULL, vp9_sad8x4x8, vp9_sad8x4x4d)
1048 BFP(BLOCK_4X8, vp9_sad4x8, vp9_sad4x8_avg,
1049 vp9_variance4x8, vp9_sub_pixel_variance4x8,
1050 vp9_sub_pixel_avg_variance4x8, NULL, vp9_sad4x8x8, vp9_sad4x8x4d)
1052 BFP(BLOCK_4X4, vp9_sad4x4, vp9_sad4x4_avg,
1053 vp9_variance4x4, vp9_sub_pixel_variance4x4,
1054 vp9_sub_pixel_avg_variance4x4,
1055 vp9_sad4x4x3, vp9_sad4x4x8, vp9_sad4x4x4d)
1057 /* vp9_init_quantizer() is first called here. Add check in
1058 * vp9_frame_init_quantizer() so that vp9_init_quantizer is only
1059 * called later when needed. This will avoid unnecessary calls of
1060 * vp9_init_quantizer() for every frame.
1062 vp9_init_quantizer(cpi);
1064 vp9_loop_filter_init(cm);
1066 cm->error.setjmp = 0;
1071 void vp9_remove_compressor(VP9_COMP *cpi) {
1077 if (cpi && (cpi->common.current_video_frame > 0)) {
1078 #if CONFIG_INTERNAL_STATS
1080 vp9_clear_system_state();
1082 // printf("\n8x8-4x4:%d-%d\n", cpi->t8x8_count, cpi->t4x4_count);
1083 if (cpi->oxcf.pass != 1) {
1084 FILE *f = fopen("opsnr.stt", "a");
1085 double time_encoded = (cpi->last_end_time_stamp_seen
1086 - cpi->first_time_stamp_ever) / 10000000.000;
1087 double total_encode_time = (cpi->time_receive_data +
1088 cpi->time_compress_data) / 1000.000;
1089 double dr = (double)cpi->bytes * (double) 8 / (double)1000
1092 if (cpi->b_calculate_psnr) {
1093 const double total_psnr =
1094 vpx_sse_to_psnr((double)cpi->total_samples, 255.0,
1095 (double)cpi->total_sq_error);
1096 const double totalp_psnr =
1097 vpx_sse_to_psnr((double)cpi->totalp_samples, 255.0,
1098 (double)cpi->totalp_sq_error);
1099 const double total_ssim = 100 * pow(cpi->summed_quality /
1100 cpi->summed_weights, 8.0);
1101 const double totalp_ssim = 100 * pow(cpi->summedp_quality /
1102 cpi->summedp_weights, 8.0);
1104 fprintf(f, "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\tGLPsnrP\t"
1105 "VPXSSIM\tVPSSIMP\t Time(ms)\n");
1106 fprintf(f, "%7.2f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t%8.0f\n",
1107 dr, cpi->total / cpi->count, total_psnr,
1108 cpi->totalp / cpi->count, totalp_psnr, total_ssim, totalp_ssim,
1112 if (cpi->b_calculate_ssimg) {
1113 fprintf(f, "BitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t Time(ms)\n");
1114 fprintf(f, "%7.2f\t%6.4f\t%6.4f\t%6.4f\t%6.4f\t%8.0f\n", dr,
1115 cpi->total_ssimg_y / cpi->count,
1116 cpi->total_ssimg_u / cpi->count,
1117 cpi->total_ssimg_v / cpi->count,
1118 cpi->total_ssimg_all / cpi->count, total_encode_time);
1128 printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000);
1129 printf("\n_frames recive_data encod_mb_row compress_frame Total\n");
1130 printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame,
1131 cpi->time_receive_data / 1000, cpi->time_encode_sb_row / 1000,
1132 cpi->time_compress_data / 1000,
1133 (cpi->time_receive_data + cpi->time_compress_data) / 1000);
1138 #if CONFIG_VP9_TEMPORAL_DENOISING
1139 if (cpi->oxcf.noise_sensitivity > 0) {
1140 vp9_denoiser_free(&(cpi->denoiser));
1144 dealloc_compressor_data(cpi);
1147 for (i = 0; i < sizeof(cpi->mbgraph_stats) /
1148 sizeof(cpi->mbgraph_stats[0]); ++i) {
1149 vpx_free(cpi->mbgraph_stats[i].mb_stats);
1152 #if CONFIG_FP_MB_STATS
1153 if (cpi->use_fp_mb_stats) {
1154 vpx_free(cpi->twopass.frame_mb_stats_buf);
1155 cpi->twopass.frame_mb_stats_buf = NULL;
1159 vp9_remove_common(&cpi->common);
1162 #if CONFIG_VP9_TEMPORAL_DENOISING
1163 #ifdef OUTPUT_YUV_DENOISED
1164 fclose(yuv_denoised_file);
1167 #ifdef OUTPUT_YUV_REC
1168 fclose(yuv_rec_file);
1184 static int64_t get_sse(const uint8_t *a, int a_stride,
1185 const uint8_t *b, int b_stride,
1186 int width, int height) {
1187 const int dw = width % 16;
1188 const int dh = height % 16;
1189 int64_t total_sse = 0;
1190 unsigned int sse = 0;
1195 variance(&a[width - dw], a_stride, &b[width - dw], b_stride,
1196 dw, height, &sse, &sum);
1201 variance(&a[(height - dh) * a_stride], a_stride,
1202 &b[(height - dh) * b_stride], b_stride,
1203 width - dw, dh, &sse, &sum);
1207 for (y = 0; y < height / 16; ++y) {
1208 const uint8_t *pa = a;
1209 const uint8_t *pb = b;
1210 for (x = 0; x < width / 16; ++x) {
1211 vp9_mse16x16(pa, a_stride, pb, b_stride, &sse);
1226 double psnr[4]; // total/y/u/v
1227 uint64_t sse[4]; // total/y/u/v
1228 uint32_t samples[4]; // total/y/u/v
1231 static void calc_psnr(const YV12_BUFFER_CONFIG *a, const YV12_BUFFER_CONFIG *b,
1233 const int widths[3] = {a->y_width, a->uv_width, a->uv_width };
1234 const int heights[3] = {a->y_height, a->uv_height, a->uv_height};
1235 const uint8_t *a_planes[3] = {a->y_buffer, a->u_buffer, a->v_buffer };
1236 const int a_strides[3] = {a->y_stride, a->uv_stride, a->uv_stride};
1237 const uint8_t *b_planes[3] = {b->y_buffer, b->u_buffer, b->v_buffer };
1238 const int b_strides[3] = {b->y_stride, b->uv_stride, b->uv_stride};
1240 uint64_t total_sse = 0;
1241 uint32_t total_samples = 0;
1243 for (i = 0; i < 3; ++i) {
1244 const int w = widths[i];
1245 const int h = heights[i];
1246 const uint32_t samples = w * h;
1247 const uint64_t sse = get_sse(a_planes[i], a_strides[i],
1248 b_planes[i], b_strides[i],
1250 psnr->sse[1 + i] = sse;
1251 psnr->samples[1 + i] = samples;
1252 psnr->psnr[1 + i] = vpx_sse_to_psnr(samples, 255.0, (double)sse);
1255 total_samples += samples;
1258 psnr->sse[0] = total_sse;
1259 psnr->samples[0] = total_samples;
1260 psnr->psnr[0] = vpx_sse_to_psnr((double)total_samples, 255.0,
1264 static void generate_psnr_packet(VP9_COMP *cpi) {
1265 struct vpx_codec_cx_pkt pkt;
1268 calc_psnr(cpi->Source, cpi->common.frame_to_show, &psnr);
1269 for (i = 0; i < 4; ++i) {
1270 pkt.data.psnr.samples[i] = psnr.samples[i];
1271 pkt.data.psnr.sse[i] = psnr.sse[i];
1272 pkt.data.psnr.psnr[i] = psnr.psnr[i];
1274 pkt.kind = VPX_CODEC_PSNR_PKT;
1275 vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
1278 int vp9_use_as_reference(VP9_COMP *cpi, int ref_frame_flags) {
1279 if (ref_frame_flags > 7)
1282 cpi->ref_frame_flags = ref_frame_flags;
1286 void vp9_update_reference(VP9_COMP *cpi, int ref_frame_flags) {
1287 cpi->ext_refresh_golden_frame = (ref_frame_flags & VP9_GOLD_FLAG) != 0;
1288 cpi->ext_refresh_alt_ref_frame = (ref_frame_flags & VP9_ALT_FLAG) != 0;
1289 cpi->ext_refresh_last_frame = (ref_frame_flags & VP9_LAST_FLAG) != 0;
1290 cpi->ext_refresh_frame_flags_pending = 1;
1293 static YV12_BUFFER_CONFIG *get_vp9_ref_frame_buffer(VP9_COMP *cpi,
1294 VP9_REFFRAME ref_frame_flag) {
1295 MV_REFERENCE_FRAME ref_frame = NONE;
1296 if (ref_frame_flag == VP9_LAST_FLAG)
1297 ref_frame = LAST_FRAME;
1298 else if (ref_frame_flag == VP9_GOLD_FLAG)
1299 ref_frame = GOLDEN_FRAME;
1300 else if (ref_frame_flag == VP9_ALT_FLAG)
1301 ref_frame = ALTREF_FRAME;
1303 return ref_frame == NONE ? NULL : get_ref_frame_buffer(cpi, ref_frame);
1306 int vp9_copy_reference_enc(VP9_COMP *cpi, VP9_REFFRAME ref_frame_flag,
1307 YV12_BUFFER_CONFIG *sd) {
1308 YV12_BUFFER_CONFIG *cfg = get_vp9_ref_frame_buffer(cpi, ref_frame_flag);
1310 vp8_yv12_copy_frame(cfg, sd);
1317 int vp9_set_reference_enc(VP9_COMP *cpi, VP9_REFFRAME ref_frame_flag,
1318 YV12_BUFFER_CONFIG *sd) {
1319 YV12_BUFFER_CONFIG *cfg = get_vp9_ref_frame_buffer(cpi, ref_frame_flag);
1321 vp8_yv12_copy_frame(sd, cfg);
1328 int vp9_update_entropy(VP9_COMP * cpi, int update) {
1329 cpi->ext_refresh_frame_context = update;
1330 cpi->ext_refresh_frame_context_pending = 1;
1334 #if CONFIG_VP9_TEMPORAL_DENOISING
1335 #if defined(OUTPUT_YUV_DENOISED)
1336 // The denoiser buffer is allocated as a YUV 440 buffer. This function writes it
1337 // as YUV 420. We simply use the top-left pixels of the UV buffers, since we do
1338 // not denoise the UV channels at this time. If ever we implement UV channel
1339 // denoising we will have to modify this.
1340 void vp9_write_yuv_frame_420(YV12_BUFFER_CONFIG *s, FILE *f) {
1341 uint8_t *src = s->y_buffer;
1342 int h = s->y_height;
1345 fwrite(src, s->y_width, 1, f);
1350 h = s->uv_height / 2;
1353 fwrite(src, s->uv_width / 2, 1, f);
1354 src += s->uv_stride + s->uv_width / 2;
1358 h = s->uv_height / 2;
1361 fwrite(src, s->uv_width / 2, 1, f);
1362 src += s->uv_stride + s->uv_width / 2;
1368 #ifdef OUTPUT_YUV_REC
1369 void vp9_write_yuv_rec_frame(VP9_COMMON *cm) {
1370 YV12_BUFFER_CONFIG *s = cm->frame_to_show;
1371 uint8_t *src = s->y_buffer;
1375 fwrite(src, s->y_width, 1, yuv_rec_file);
1383 fwrite(src, s->uv_width, 1, yuv_rec_file);
1384 src += s->uv_stride;
1391 fwrite(src, s->uv_width, 1, yuv_rec_file);
1392 src += s->uv_stride;
1395 fflush(yuv_rec_file);
1399 static void scale_and_extend_frame_nonnormative(const YV12_BUFFER_CONFIG *src,
1400 YV12_BUFFER_CONFIG *dst) {
1401 // TODO(dkovalev): replace YV12_BUFFER_CONFIG with vpx_image_t
1403 const uint8_t *const srcs[3] = {src->y_buffer, src->u_buffer, src->v_buffer};
1404 const int src_strides[3] = {src->y_stride, src->uv_stride, src->uv_stride};
1405 const int src_widths[3] = {src->y_crop_width, src->uv_crop_width,
1406 src->uv_crop_width };
1407 const int src_heights[3] = {src->y_crop_height, src->uv_crop_height,
1408 src->uv_crop_height};
1409 uint8_t *const dsts[3] = {dst->y_buffer, dst->u_buffer, dst->v_buffer};
1410 const int dst_strides[3] = {dst->y_stride, dst->uv_stride, dst->uv_stride};
1411 const int dst_widths[3] = {dst->y_crop_width, dst->uv_crop_width,
1412 dst->uv_crop_width};
1413 const int dst_heights[3] = {dst->y_crop_height, dst->uv_crop_height,
1414 dst->uv_crop_height};
1416 for (i = 0; i < MAX_MB_PLANE; ++i)
1417 vp9_resize_plane(srcs[i], src_heights[i], src_widths[i], src_strides[i],
1418 dsts[i], dst_heights[i], dst_widths[i], dst_strides[i]);
1420 vp9_extend_frame_borders(dst);
1423 static void scale_and_extend_frame(const YV12_BUFFER_CONFIG *src,
1424 YV12_BUFFER_CONFIG *dst) {
1425 const int src_w = src->y_crop_width;
1426 const int src_h = src->y_crop_height;
1427 const int dst_w = dst->y_crop_width;
1428 const int dst_h = dst->y_crop_height;
1429 const uint8_t *const srcs[3] = {src->y_buffer, src->u_buffer, src->v_buffer};
1430 const int src_strides[3] = {src->y_stride, src->uv_stride, src->uv_stride};
1431 uint8_t *const dsts[3] = {dst->y_buffer, dst->u_buffer, dst->v_buffer};
1432 const int dst_strides[3] = {dst->y_stride, dst->uv_stride, dst->uv_stride};
1433 const InterpKernel *const kernel = vp9_get_interp_kernel(EIGHTTAP);
1436 for (y = 0; y < dst_h; y += 16) {
1437 for (x = 0; x < dst_w; x += 16) {
1438 for (i = 0; i < MAX_MB_PLANE; ++i) {
1439 const int factor = (i == 0 || i == 3 ? 1 : 2);
1440 const int x_q4 = x * (16 / factor) * src_w / dst_w;
1441 const int y_q4 = y * (16 / factor) * src_h / dst_h;
1442 const int src_stride = src_strides[i];
1443 const int dst_stride = dst_strides[i];
1444 const uint8_t *src_ptr = srcs[i] + (y / factor) * src_h / dst_h *
1445 src_stride + (x / factor) * src_w / dst_w;
1446 uint8_t *dst_ptr = dsts[i] + (y / factor) * dst_stride + (x / factor);
1448 vp9_convolve8(src_ptr, src_stride, dst_ptr, dst_stride,
1449 kernel[x_q4 & 0xf], 16 * src_w / dst_w,
1450 kernel[y_q4 & 0xf], 16 * src_h / dst_h,
1451 16 / factor, 16 / factor);
1456 vp9_extend_frame_borders(dst);
1459 // Function to test for conditions that indicate we should loop
1460 // back and recode a frame.
1461 static int recode_loop_test(const VP9_COMP *cpi,
1462 int high_limit, int low_limit,
1463 int q, int maxq, int minq) {
1464 const VP9_COMMON *const cm = &cpi->common;
1465 const RATE_CONTROL *const rc = &cpi->rc;
1466 const VP9EncoderConfig *const oxcf = &cpi->oxcf;
1467 int force_recode = 0;
1469 // Special case trap if maximum allowed frame size exceeded.
1470 if (rc->projected_frame_size > rc->max_frame_bandwidth) {
1473 // Is frame recode allowed.
1474 // Yes if either recode mode 1 is selected or mode 2 is selected
1475 // and the frame is a key frame, golden frame or alt_ref_frame
1476 } else if ((cpi->sf.recode_loop == ALLOW_RECODE) ||
1477 ((cpi->sf.recode_loop == ALLOW_RECODE_KFARFGF) &&
1478 (cm->frame_type == KEY_FRAME ||
1479 cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame))) {
1480 // General over and under shoot tests
1481 if ((rc->projected_frame_size > high_limit && q < maxq) ||
1482 (rc->projected_frame_size < low_limit && q > minq)) {
1484 } else if (cpi->oxcf.rc_mode == VPX_CQ) {
1485 // Deal with frame undershoot and whether or not we are
1486 // below the automatically set cq level.
1487 if (q > oxcf->cq_level &&
1488 rc->projected_frame_size < ((rc->this_frame_target * 7) >> 3)) {
1493 return force_recode;
1496 void vp9_update_reference_frames(VP9_COMP *cpi) {
1497 VP9_COMMON * const cm = &cpi->common;
1499 // At this point the new frame has been encoded.
1500 // If any buffer copy / swapping is signaled it should be done here.
1501 if (cm->frame_type == KEY_FRAME) {
1502 ref_cnt_fb(cm->frame_bufs,
1503 &cm->ref_frame_map[cpi->gld_fb_idx], cm->new_fb_idx);
1504 ref_cnt_fb(cm->frame_bufs,
1505 &cm->ref_frame_map[cpi->alt_fb_idx], cm->new_fb_idx);
1506 } else if (vp9_preserve_existing_gf(cpi)) {
1507 // We have decided to preserve the previously existing golden frame as our
1508 // new ARF frame. However, in the short term in function
1509 // vp9_bitstream.c::get_refresh_mask() we left it in the GF slot and, if
1510 // we're updating the GF with the current decoded frame, we save it to the
1511 // ARF slot instead.
1512 // We now have to update the ARF with the current frame and swap gld_fb_idx
1513 // and alt_fb_idx so that, overall, we've stored the old GF in the new ARF
1514 // slot and, if we're updating the GF, the current frame becomes the new GF.
1517 ref_cnt_fb(cm->frame_bufs,
1518 &cm->ref_frame_map[cpi->alt_fb_idx], cm->new_fb_idx);
1520 tmp = cpi->alt_fb_idx;
1521 cpi->alt_fb_idx = cpi->gld_fb_idx;
1522 cpi->gld_fb_idx = tmp;
1524 if (is_two_pass_svc(cpi)) {
1525 cpi->svc.layer_context[0].gold_ref_idx = cpi->gld_fb_idx;
1526 cpi->svc.layer_context[0].alt_ref_idx = cpi->alt_fb_idx;
1528 } else { /* For non key/golden frames */
1529 if (cpi->refresh_alt_ref_frame) {
1530 int arf_idx = cpi->alt_fb_idx;
1531 if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) {
1532 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
1533 arf_idx = gf_group->arf_update_idx[gf_group->index];
1536 ref_cnt_fb(cm->frame_bufs,
1537 &cm->ref_frame_map[arf_idx], cm->new_fb_idx);
1538 vpx_memcpy(cpi->interp_filter_selected[ALTREF_FRAME],
1539 cpi->interp_filter_selected[0],
1540 sizeof(cpi->interp_filter_selected[0]));
1543 if (cpi->refresh_golden_frame) {
1544 ref_cnt_fb(cm->frame_bufs,
1545 &cm->ref_frame_map[cpi->gld_fb_idx], cm->new_fb_idx);
1546 if (!cpi->rc.is_src_frame_alt_ref)
1547 vpx_memcpy(cpi->interp_filter_selected[GOLDEN_FRAME],
1548 cpi->interp_filter_selected[0],
1549 sizeof(cpi->interp_filter_selected[0]));
1551 vpx_memcpy(cpi->interp_filter_selected[GOLDEN_FRAME],
1552 cpi->interp_filter_selected[ALTREF_FRAME],
1553 sizeof(cpi->interp_filter_selected[ALTREF_FRAME]));
1557 if (cpi->refresh_last_frame) {
1558 ref_cnt_fb(cm->frame_bufs,
1559 &cm->ref_frame_map[cpi->lst_fb_idx], cm->new_fb_idx);
1560 if (!cpi->rc.is_src_frame_alt_ref)
1561 vpx_memcpy(cpi->interp_filter_selected[LAST_FRAME],
1562 cpi->interp_filter_selected[0],
1563 sizeof(cpi->interp_filter_selected[0]));
1565 #if CONFIG_VP9_TEMPORAL_DENOISING
1566 if (cpi->oxcf.noise_sensitivity > 0) {
1567 vp9_denoiser_update_frame_info(&cpi->denoiser,
1569 cpi->common.frame_type,
1570 cpi->refresh_alt_ref_frame,
1571 cpi->refresh_golden_frame,
1572 cpi->refresh_last_frame);
1577 static void loopfilter_frame(VP9_COMP *cpi, VP9_COMMON *cm) {
1578 MACROBLOCKD *xd = &cpi->mb.e_mbd;
1579 struct loopfilter *lf = &cm->lf;
1581 lf->filter_level = 0;
1583 struct vpx_usec_timer timer;
1585 vp9_clear_system_state();
1587 vpx_usec_timer_start(&timer);
1589 vp9_pick_filter_level(cpi->Source, cpi, cpi->sf.lpf_pick);
1591 vpx_usec_timer_mark(&timer);
1592 cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer);
1595 if (lf->filter_level > 0) {
1596 vp9_loop_filter_frame(cm->frame_to_show, cm, xd, lf->filter_level, 0, 0);
1599 vp9_extend_frame_inner_borders(cm->frame_to_show);
1602 void vp9_scale_references(VP9_COMP *cpi) {
1603 VP9_COMMON *cm = &cpi->common;
1604 MV_REFERENCE_FRAME ref_frame;
1605 const VP9_REFFRAME ref_mask[3] = {VP9_LAST_FLAG, VP9_GOLD_FLAG, VP9_ALT_FLAG};
1607 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
1608 const int idx = cm->ref_frame_map[get_ref_frame_idx(cpi, ref_frame)];
1609 const YV12_BUFFER_CONFIG *const ref = &cm->frame_bufs[idx].buf;
1611 // Need to convert from VP9_REFFRAME to index into ref_mask (subtract 1).
1612 if ((cpi->ref_frame_flags & ref_mask[ref_frame - 1]) &&
1613 (ref->y_crop_width != cm->width || ref->y_crop_height != cm->height)) {
1614 const int new_fb = get_free_fb(cm);
1615 vp9_realloc_frame_buffer(&cm->frame_bufs[new_fb].buf,
1616 cm->width, cm->height,
1617 cm->subsampling_x, cm->subsampling_y,
1618 #if CONFIG_VP9_HIGHBITDEPTH
1619 cm->use_highbitdepth,
1621 VP9_ENC_BORDER_IN_PIXELS, NULL, NULL, NULL);
1622 scale_and_extend_frame(ref, &cm->frame_bufs[new_fb].buf);
1623 cpi->scaled_ref_idx[ref_frame - 1] = new_fb;
1625 cpi->scaled_ref_idx[ref_frame - 1] = idx;
1626 cm->frame_bufs[idx].ref_count++;
1631 static void release_scaled_references(VP9_COMP *cpi) {
1632 VP9_COMMON *cm = &cpi->common;
1635 for (i = 0; i < 3; i++)
1636 cm->frame_bufs[cpi->scaled_ref_idx[i]].ref_count--;
1639 static void full_to_model_count(unsigned int *model_count,
1640 unsigned int *full_count) {
1642 model_count[ZERO_TOKEN] = full_count[ZERO_TOKEN];
1643 model_count[ONE_TOKEN] = full_count[ONE_TOKEN];
1644 model_count[TWO_TOKEN] = full_count[TWO_TOKEN];
1645 for (n = THREE_TOKEN; n < EOB_TOKEN; ++n)
1646 model_count[TWO_TOKEN] += full_count[n];
1647 model_count[EOB_MODEL_TOKEN] = full_count[EOB_TOKEN];
1650 static void full_to_model_counts(vp9_coeff_count_model *model_count,
1651 vp9_coeff_count *full_count) {
1654 for (i = 0; i < PLANE_TYPES; ++i)
1655 for (j = 0; j < REF_TYPES; ++j)
1656 for (k = 0; k < COEF_BANDS; ++k)
1657 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l)
1658 full_to_model_count(model_count[i][j][k][l], full_count[i][j][k][l]);
1661 #if 0 && CONFIG_INTERNAL_STATS
1662 static void output_frame_level_debug_stats(VP9_COMP *cpi) {
1663 VP9_COMMON *const cm = &cpi->common;
1664 FILE *const f = fopen("tmp.stt", cm->current_video_frame ? "a" : "w");
1667 vp9_clear_system_state();
1669 recon_err = vp9_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
1671 if (cpi->twopass.total_left_stats.coded_error != 0.0)
1672 fprintf(f, "%10u %10d %10d %10d %10d"
1673 "%10"PRId64" %10"PRId64" %10"PRId64" %10"PRId64" %10d "
1674 "%7.2lf %7.2lf %7.2lf %7.2lf %7.2lf"
1675 "%6d %6d %5d %5d %5d "
1676 "%10"PRId64" %10.3lf"
1677 "%10lf %8u %10d %10d %10d\n",
1678 cpi->common.current_video_frame, cpi->rc.this_frame_target,
1679 cpi->rc.projected_frame_size,
1680 cpi->rc.projected_frame_size / cpi->common.MBs,
1681 (cpi->rc.projected_frame_size - cpi->rc.this_frame_target),
1682 cpi->rc.vbr_bits_off_target,
1683 cpi->rc.total_target_vs_actual,
1684 (cpi->rc.starting_buffer_level - cpi->rc.bits_off_target),
1685 cpi->rc.total_actual_bits, cm->base_qindex,
1686 vp9_convert_qindex_to_q(cm->base_qindex),
1687 (double)vp9_dc_quant(cm->base_qindex, 0) / 4.0,
1688 vp9_convert_qindex_to_q(cpi->twopass.active_worst_quality),
1690 vp9_convert_qindex_to_q(cpi->oxcf.cq_level),
1691 cpi->refresh_last_frame, cpi->refresh_golden_frame,
1692 cpi->refresh_alt_ref_frame, cm->frame_type, cpi->rc.gfu_boost,
1693 cpi->twopass.bits_left,
1694 cpi->twopass.total_left_stats.coded_error,
1695 cpi->twopass.bits_left /
1696 (1 + cpi->twopass.total_left_stats.coded_error),
1697 cpi->tot_recode_hits, recon_err, cpi->rc.kf_boost,
1698 cpi->twopass.kf_zeromotion_pct);
1703 FILE *const fmodes = fopen("Modes.stt", "a");
1706 fprintf(fmodes, "%6d:%1d:%1d:%1d ", cpi->common.current_video_frame,
1707 cm->frame_type, cpi->refresh_golden_frame,
1708 cpi->refresh_alt_ref_frame);
1710 for (i = 0; i < MAX_MODES; ++i)
1711 fprintf(fmodes, "%5d ", cpi->mode_chosen_counts[i]);
1713 fprintf(fmodes, "\n");
1720 static void encode_without_recode_loop(VP9_COMP *cpi,
1722 VP9_COMMON *const cm = &cpi->common;
1723 vp9_clear_system_state();
1724 vp9_set_quantizer(cm, q);
1726 // Variance adaptive and in frame q adjustment experiments are mutually
1728 if (cpi->oxcf.aq_mode == VARIANCE_AQ) {
1729 vp9_vaq_frame_setup(cpi);
1730 } else if (cpi->oxcf.aq_mode == COMPLEXITY_AQ) {
1731 vp9_setup_in_frame_q_adj(cpi);
1732 } else if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) {
1733 vp9_cyclic_refresh_setup(cpi);
1735 // transform / motion compensation build reconstruction frame
1736 vp9_encode_frame(cpi);
1738 // Update the skip mb flag probabilities based on the distribution
1739 // seen in the last encoder iteration.
1740 // update_base_skip_probs(cpi);
1741 vp9_clear_system_state();
1744 static void encode_with_recode_loop(VP9_COMP *cpi,
1750 VP9_COMMON *const cm = &cpi->common;
1751 RATE_CONTROL *const rc = &cpi->rc;
1754 int overshoot_seen = 0;
1755 int undershoot_seen = 0;
1756 int q_low = bottom_index, q_high = top_index;
1757 int frame_over_shoot_limit;
1758 int frame_under_shoot_limit;
1760 // Decide frame size bounds
1761 vp9_rc_compute_frame_size_bounds(cpi, rc->this_frame_target,
1762 &frame_under_shoot_limit,
1763 &frame_over_shoot_limit);
1766 vp9_clear_system_state();
1768 vp9_set_quantizer(cm, q);
1770 if (loop_count == 0)
1773 // Variance adaptive and in frame q adjustment experiments are mutually
1775 if (cpi->oxcf.aq_mode == VARIANCE_AQ) {
1776 vp9_vaq_frame_setup(cpi);
1777 } else if (cpi->oxcf.aq_mode == COMPLEXITY_AQ) {
1778 vp9_setup_in_frame_q_adj(cpi);
1781 // transform / motion compensation build reconstruction frame
1782 vp9_encode_frame(cpi);
1784 // Update the skip mb flag probabilities based on the distribution
1785 // seen in the last encoder iteration.
1786 // update_base_skip_probs(cpi);
1788 vp9_clear_system_state();
1790 // Dummy pack of the bitstream using up to date stats to get an
1791 // accurate estimate of output frame size to determine if we need
1793 if (cpi->sf.recode_loop >= ALLOW_RECODE_KFARFGF) {
1794 save_coding_context(cpi);
1795 if (!cpi->sf.use_nonrd_pick_mode)
1796 vp9_pack_bitstream(cpi, dest, size);
1798 rc->projected_frame_size = (int)(*size) << 3;
1799 restore_coding_context(cpi);
1801 if (frame_over_shoot_limit == 0)
1802 frame_over_shoot_limit = 1;
1805 if (cpi->oxcf.rc_mode == VPX_Q) {
1808 if ((cm->frame_type == KEY_FRAME) &&
1809 rc->this_key_frame_forced &&
1810 (rc->projected_frame_size < rc->max_frame_bandwidth)) {
1812 int kf_err = vp9_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
1814 int high_err_target = cpi->ambient_err;
1815 int low_err_target = cpi->ambient_err >> 1;
1817 // Prevent possible divide by zero error below for perfect KF
1820 // The key frame is not good enough or we can afford
1821 // to make it better without undue risk of popping.
1822 if ((kf_err > high_err_target &&
1823 rc->projected_frame_size <= frame_over_shoot_limit) ||
1824 (kf_err > low_err_target &&
1825 rc->projected_frame_size <= frame_under_shoot_limit)) {
1827 q_high = q > q_low ? q - 1 : q_low;
1830 q = (q * high_err_target) / kf_err;
1831 q = MIN(q, (q_high + q_low) >> 1);
1832 } else if (kf_err < low_err_target &&
1833 rc->projected_frame_size >= frame_under_shoot_limit) {
1834 // The key frame is much better than the previous frame
1836 q_low = q < q_high ? q + 1 : q_high;
1839 q = (q * low_err_target) / kf_err;
1840 q = MIN(q, (q_high + q_low + 1) >> 1);
1843 // Clamp Q to upper and lower limits:
1844 q = clamp(q, q_low, q_high);
1847 } else if (recode_loop_test(
1848 cpi, frame_over_shoot_limit, frame_under_shoot_limit,
1849 q, MAX(q_high, top_index), bottom_index)) {
1850 // Is the projected frame size out of range and are we allowed
1851 // to attempt to recode.
1855 // Frame size out of permitted range:
1856 // Update correction factor & compute new Q to try...
1858 // Frame is too large
1859 if (rc->projected_frame_size > rc->this_frame_target) {
1860 // Special case if the projected size is > the max allowed.
1861 if (rc->projected_frame_size >= rc->max_frame_bandwidth)
1862 q_high = rc->worst_quality;
1864 // Raise Qlow as to at least the current value
1865 q_low = q < q_high ? q + 1 : q_high;
1867 if (undershoot_seen || loop_count > 1) {
1868 // Update rate_correction_factor unless
1869 vp9_rc_update_rate_correction_factors(cpi, 1);
1871 q = (q_high + q_low + 1) / 2;
1873 // Update rate_correction_factor unless
1874 vp9_rc_update_rate_correction_factors(cpi, 0);
1876 q = vp9_rc_regulate_q(cpi, rc->this_frame_target,
1877 bottom_index, MAX(q_high, top_index));
1879 while (q < q_low && retries < 10) {
1880 vp9_rc_update_rate_correction_factors(cpi, 0);
1881 q = vp9_rc_regulate_q(cpi, rc->this_frame_target,
1882 bottom_index, MAX(q_high, top_index));
1889 // Frame is too small
1890 q_high = q > q_low ? q - 1 : q_low;
1892 if (overshoot_seen || loop_count > 1) {
1893 vp9_rc_update_rate_correction_factors(cpi, 1);
1894 q = (q_high + q_low) / 2;
1896 vp9_rc_update_rate_correction_factors(cpi, 0);
1897 q = vp9_rc_regulate_q(cpi, rc->this_frame_target,
1898 bottom_index, top_index);
1899 // Special case reset for qlow for constrained quality.
1900 // This should only trigger where there is very substantial
1901 // undershoot on a frame and the auto cq level is above
1902 // the user passsed in value.
1903 if (cpi->oxcf.rc_mode == VPX_CQ &&
1908 while (q > q_high && retries < 10) {
1909 vp9_rc_update_rate_correction_factors(cpi, 0);
1910 q = vp9_rc_regulate_q(cpi, rc->this_frame_target,
1911 bottom_index, top_index);
1916 undershoot_seen = 1;
1919 // Clamp Q to upper and lower limits:
1920 q = clamp(q, q_low, q_high);
1928 // Special case for overlay frame.
1929 if (rc->is_src_frame_alt_ref &&
1930 rc->projected_frame_size < rc->max_frame_bandwidth)
1936 #if CONFIG_INTERNAL_STATS
1937 cpi->tot_recode_hits++;
1943 static int get_ref_frame_flags(const VP9_COMP *cpi) {
1944 const int *const map = cpi->common.ref_frame_map;
1945 const int gold_is_last = map[cpi->gld_fb_idx] == map[cpi->lst_fb_idx];
1946 const int alt_is_last = map[cpi->alt_fb_idx] == map[cpi->lst_fb_idx];
1947 const int gold_is_alt = map[cpi->gld_fb_idx] == map[cpi->alt_fb_idx];
1948 int flags = VP9_ALT_FLAG | VP9_GOLD_FLAG | VP9_LAST_FLAG;
1951 flags &= ~VP9_GOLD_FLAG;
1953 if (cpi->rc.frames_till_gf_update_due == INT_MAX && !is_two_pass_svc(cpi))
1954 flags &= ~VP9_GOLD_FLAG;
1957 flags &= ~VP9_ALT_FLAG;
1960 flags &= ~VP9_ALT_FLAG;
1965 static void set_ext_overrides(VP9_COMP *cpi) {
1966 // Overrides the defaults with the externally supplied values with
1967 // vp9_update_reference() and vp9_update_entropy() calls
1968 // Note: The overrides are valid only for the next frame passed
1969 // to encode_frame_to_data_rate() function
1970 if (cpi->ext_refresh_frame_context_pending) {
1971 cpi->common.refresh_frame_context = cpi->ext_refresh_frame_context;
1972 cpi->ext_refresh_frame_context_pending = 0;
1974 if (cpi->ext_refresh_frame_flags_pending) {
1975 cpi->refresh_last_frame = cpi->ext_refresh_last_frame;
1976 cpi->refresh_golden_frame = cpi->ext_refresh_golden_frame;
1977 cpi->refresh_alt_ref_frame = cpi->ext_refresh_alt_ref_frame;
1978 cpi->ext_refresh_frame_flags_pending = 0;
1982 YV12_BUFFER_CONFIG *vp9_scale_if_required(VP9_COMMON *cm,
1983 YV12_BUFFER_CONFIG *unscaled,
1984 YV12_BUFFER_CONFIG *scaled) {
1985 if (cm->mi_cols * MI_SIZE != unscaled->y_width ||
1986 cm->mi_rows * MI_SIZE != unscaled->y_height) {
1987 scale_and_extend_frame_nonnormative(unscaled, scaled);
1994 static int is_skippable_frame(const VP9_COMP *cpi) {
1995 // If the current frame does not have non-zero motion vector detected in the
1996 // first pass, and so do its previous and forward frames, then this frame
1997 // can be skipped for partition check, and the partition size is assigned
1998 // according to the variance
1999 const SVC *const svc = &cpi->svc;
2000 const TWO_PASS *const twopass = is_two_pass_svc(cpi) ?
2001 &svc->layer_context[svc->spatial_layer_id].twopass : &cpi->twopass;
2003 return (!frame_is_intra_only(&cpi->common) &&
2004 twopass->stats_in - 2 > twopass->stats_in_start &&
2005 twopass->stats_in < twopass->stats_in_end &&
2006 (twopass->stats_in - 1)->pcnt_inter - (twopass->stats_in - 1)->pcnt_motion
2008 (twopass->stats_in - 2)->pcnt_inter - (twopass->stats_in - 2)->pcnt_motion
2010 twopass->stats_in->pcnt_inter - twopass->stats_in->pcnt_motion == 1);
2013 static void set_arf_sign_bias(VP9_COMP *cpi) {
2014 VP9_COMMON *const cm = &cpi->common;
2017 if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) {
2018 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2019 arf_sign_bias = cpi->rc.source_alt_ref_active &&
2020 (!cpi->refresh_alt_ref_frame ||
2021 (gf_group->rf_level[gf_group->index] == GF_ARF_LOW));
2024 (cpi->rc.source_alt_ref_active && !cpi->refresh_alt_ref_frame);
2026 cm->ref_frame_sign_bias[ALTREF_FRAME] = arf_sign_bias;
2029 static void set_mv_search_params(VP9_COMP *cpi) {
2030 const VP9_COMMON *const cm = &cpi->common;
2031 const unsigned int max_mv_def = MIN(cm->width, cm->height);
2033 // Default based on max resolution.
2034 cpi->mv_step_param = vp9_init_search_range(max_mv_def);
2036 if (cpi->sf.mv.auto_mv_step_size) {
2037 if (frame_is_intra_only(cm)) {
2038 // Initialize max_mv_magnitude for use in the first INTER frame
2039 // after a key/intra-only frame.
2040 cpi->max_mv_magnitude = max_mv_def;
2043 // Allow mv_steps to correspond to twice the max mv magnitude found
2044 // in the previous frame, capped by the default max_mv_magnitude based
2046 cpi->mv_step_param =
2047 vp9_init_search_range(MIN(max_mv_def, 2 * cpi->max_mv_magnitude));
2048 cpi->max_mv_magnitude = 0;
2054 int setup_interp_filter_search_mask(VP9_COMP *cpi) {
2055 INTERP_FILTER ifilter;
2056 int ref_total[MAX_REF_FRAMES] = {0};
2057 MV_REFERENCE_FRAME ref;
2059 if (cpi->common.last_frame_type == KEY_FRAME ||
2060 cpi->refresh_alt_ref_frame)
2062 for (ref = LAST_FRAME; ref <= ALTREF_FRAME; ++ref)
2063 for (ifilter = EIGHTTAP; ifilter <= EIGHTTAP_SHARP; ++ifilter)
2064 ref_total[ref] += cpi->interp_filter_selected[ref][ifilter];
2066 for (ifilter = EIGHTTAP; ifilter <= EIGHTTAP_SHARP; ++ifilter) {
2067 if ((ref_total[LAST_FRAME] &&
2068 cpi->interp_filter_selected[LAST_FRAME][ifilter] == 0) &&
2069 (ref_total[GOLDEN_FRAME] == 0 ||
2070 cpi->interp_filter_selected[GOLDEN_FRAME][ifilter] * 50
2071 < ref_total[GOLDEN_FRAME]) &&
2072 (ref_total[ALTREF_FRAME] == 0 ||
2073 cpi->interp_filter_selected[ALTREF_FRAME][ifilter] * 50
2074 < ref_total[ALTREF_FRAME]))
2075 mask |= 1 << ifilter;
2080 static void encode_frame_to_data_rate(VP9_COMP *cpi,
2083 unsigned int *frame_flags) {
2084 VP9_COMMON *const cm = &cpi->common;
2085 const VP9EncoderConfig *const oxcf = &cpi->oxcf;
2086 struct segmentation *const seg = &cm->seg;
2092 set_ext_overrides(cpi);
2094 cpi->Source = vp9_scale_if_required(cm, cpi->un_scaled_source,
2095 &cpi->scaled_source);
2097 if (cpi->unscaled_last_source != NULL)
2098 cpi->Last_Source = vp9_scale_if_required(cm, cpi->unscaled_last_source,
2099 &cpi->scaled_last_source);
2101 vp9_scale_references(cpi);
2103 vp9_clear_system_state();
2105 // Enable or disable mode based tweaking of the zbin.
2106 // For 2 pass only used where GF/ARF prediction quality
2107 // is above a threshold.
2108 cpi->zbin_mode_boost = 0;
2109 cpi->zbin_mode_boost_enabled = 0;
2111 // Set the arf sign bias for this frame.
2112 set_arf_sign_bias(cpi);
2114 // Set default state for segment based loop filter update flags.
2115 cm->lf.mode_ref_delta_update = 0;
2117 set_mv_search_params(cpi);
2119 if (cpi->oxcf.pass == 2 &&
2120 cpi->sf.adaptive_interp_filter_search)
2121 cpi->sf.interp_filter_search_mask =
2122 setup_interp_filter_search_mask(cpi);
2125 // Set various flags etc to special state if it is a key frame.
2126 if (frame_is_intra_only(cm)) {
2127 // Reset the loop filter deltas and segmentation map.
2128 vp9_reset_segment_features(&cm->seg);
2130 // If segmentation is enabled force a map update for key frames.
2132 seg->update_map = 1;
2133 seg->update_data = 1;
2136 // The alternate reference frame cannot be active for a key frame.
2137 cpi->rc.source_alt_ref_active = 0;
2139 cm->error_resilient_mode = oxcf->error_resilient_mode;
2141 // By default, encoder assumes decoder can use prev_mi.
2142 if (cm->error_resilient_mode) {
2143 cm->frame_parallel_decoding_mode = 1;
2144 cm->reset_frame_context = 0;
2145 cm->refresh_frame_context = 0;
2146 } else if (cm->intra_only) {
2147 cm->frame_parallel_decoding_mode = oxcf->frame_parallel_decoding_mode;
2148 // Only reset the current context.
2149 cm->reset_frame_context = 2;
2152 if (is_two_pass_svc(cpi) && cm->error_resilient_mode == 0) {
2153 cm->frame_context_idx =
2154 cpi->svc.spatial_layer_id * cpi->svc.number_temporal_layers +
2155 cpi->svc.temporal_layer_id;
2157 // The probs will be updated based on the frame type of its previous
2158 // frame if frame_parallel_decoding_mode is 0. The type may vary for
2159 // the frame after a key frame in base layer since we may drop enhancement
2160 // layers. So set frame_parallel_decoding_mode to 1 in this case.
2161 if (cpi->svc.number_temporal_layers == 1) {
2162 if (cpi->svc.spatial_layer_id == 0 &&
2163 cpi->svc.layer_context[0].last_frame_type == KEY_FRAME)
2164 cm->frame_parallel_decoding_mode = 1;
2166 cm->frame_parallel_decoding_mode = 0;
2167 } else if (cpi->svc.spatial_layer_id == 0) {
2168 // Find the 2nd frame in temporal base layer and 1st frame in temporal
2169 // enhancement layers from the key frame.
2171 for (i = 0; i < cpi->svc.number_temporal_layers; ++i) {
2172 if (cpi->svc.layer_context[0].frames_from_key_frame == 1 << i) {
2173 cm->frame_parallel_decoding_mode = 1;
2177 if (i == cpi->svc.number_temporal_layers)
2178 cm->frame_parallel_decoding_mode = 0;
2182 // Configure experimental use of segmentation for enhanced coding of
2183 // static regions if indicated.
2184 // Only allowed in second pass of two pass (as requires lagged coding)
2185 // and if the relevant speed feature flag is set.
2186 if (oxcf->pass == 2 && cpi->sf.static_segmentation)
2187 configure_static_seg_features(cpi);
2189 // Check if the current frame is skippable for the partition search in the
2190 // second pass according to the first pass stats
2191 if (oxcf->pass == 2 &&
2192 (!cpi->use_svc || is_two_pass_svc(cpi))) {
2193 cpi->skippable_frame = is_skippable_frame(cpi);
2196 // For 1 pass CBR, check if we are dropping this frame.
2197 // Never drop on key frame.
2198 if (oxcf->pass == 0 &&
2199 oxcf->rc_mode == VPX_CBR &&
2200 cm->frame_type != KEY_FRAME) {
2201 if (vp9_rc_drop_frame(cpi)) {
2202 vp9_rc_postencode_update_drop_frame(cpi);
2203 ++cm->current_video_frame;
2208 vp9_clear_system_state();
2210 #if CONFIG_VP9_POSTPROC
2211 if (oxcf->noise_sensitivity > 0) {
2213 switch (oxcf->noise_sensitivity) {
2231 vp9_denoise(cpi->Source, cpi->Source, l);
2235 #if CONFIG_INTERNAL_STATS
2238 for (i = 0; i < MAX_MODES; ++i)
2239 cpi->mode_chosen_counts[i] = 0;
2243 vp9_set_speed_features(cpi);
2245 vp9_set_rd_speed_thresholds(cpi);
2246 vp9_set_rd_speed_thresholds_sub8x8(cpi);
2248 // Decide q and q bounds.
2249 q = vp9_rc_pick_q_and_bounds(cpi, &bottom_index, &top_index);
2251 if (!frame_is_intra_only(cm)) {
2252 cm->interp_filter = cpi->sf.default_interp_filter;
2253 /* TODO: Decide this more intelligently */
2254 vp9_set_high_precision_mv(cpi, q < HIGH_PRECISION_MV_QTHRESH);
2257 if (cpi->sf.recode_loop == DISALLOW_RECODE) {
2258 encode_without_recode_loop(cpi, q);
2260 encode_with_recode_loop(cpi, size, dest, q, bottom_index, top_index);
2263 #if CONFIG_VP9_TEMPORAL_DENOISING
2264 #ifdef OUTPUT_YUV_DENOISED
2265 if (oxcf->noise_sensitivity > 0) {
2266 vp9_write_yuv_frame_420(&cpi->denoiser.running_avg_y[INTRA_FRAME],
2273 // Special case code to reduce pulsing when key frames are forced at a
2274 // fixed interval. Note the reconstruction error if it is the frame before
2275 // the force key frame
2276 if (cpi->rc.next_key_frame_forced && cpi->rc.frames_to_key == 1) {
2277 cpi->ambient_err = vp9_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
2280 // If the encoder forced a KEY_FRAME decision
2281 if (cm->frame_type == KEY_FRAME)
2282 cpi->refresh_last_frame = 1;
2284 cm->frame_to_show = get_frame_new_buffer(cm);
2286 // Pick the loop filter level for the frame.
2287 loopfilter_frame(cpi, cm);
2289 // build the bitstream
2290 vp9_pack_bitstream(cpi, dest, size);
2292 if (cm->seg.update_map)
2293 update_reference_segmentation_map(cpi);
2295 release_scaled_references(cpi);
2296 vp9_update_reference_frames(cpi);
2298 for (t = TX_4X4; t <= TX_32X32; t++)
2299 full_to_model_counts(cm->counts.coef[t], cpi->coef_counts[t]);
2301 if (!cm->error_resilient_mode && !cm->frame_parallel_decoding_mode)
2302 vp9_adapt_coef_probs(cm);
2304 if (!frame_is_intra_only(cm)) {
2305 if (!cm->error_resilient_mode && !cm->frame_parallel_decoding_mode) {
2306 vp9_adapt_mode_probs(cm);
2307 vp9_adapt_mv_probs(cm, cm->allow_high_precision_mv);
2311 if (cpi->refresh_golden_frame == 1)
2312 cpi->frame_flags |= FRAMEFLAGS_GOLDEN;
2314 cpi->frame_flags &= ~FRAMEFLAGS_GOLDEN;
2316 if (cpi->refresh_alt_ref_frame == 1)
2317 cpi->frame_flags |= FRAMEFLAGS_ALTREF;
2319 cpi->frame_flags &= ~FRAMEFLAGS_ALTREF;
2321 cpi->ref_frame_flags = get_ref_frame_flags(cpi);
2323 cm->last_frame_type = cm->frame_type;
2324 vp9_rc_postencode_update(cpi, *size);
2327 output_frame_level_debug_stats(cpi);
2330 if (cm->frame_type == KEY_FRAME) {
2331 // Tell the caller that the frame was coded as a key frame
2332 *frame_flags = cpi->frame_flags | FRAMEFLAGS_KEY;
2334 *frame_flags = cpi->frame_flags & ~FRAMEFLAGS_KEY;
2337 // Clear the one shot update flags for segmentation map and mode/ref loop
2339 cm->seg.update_map = 0;
2340 cm->seg.update_data = 0;
2341 cm->lf.mode_ref_delta_update = 0;
2343 // keep track of the last coded dimensions
2344 cm->last_width = cm->width;
2345 cm->last_height = cm->height;
2347 // reset to normal state now that we are done.
2348 if (!cm->show_existing_frame) {
2349 if (is_two_pass_svc(cpi) && cm->error_resilient_mode == 0)
2350 cm->last_show_frame = 0;
2352 cm->last_show_frame = cm->show_frame;
2355 if (cm->show_frame) {
2356 vp9_swap_mi_and_prev_mi(cm);
2358 // Don't increment frame counters if this was an altref buffer
2359 // update not a real frame
2360 ++cm->current_video_frame;
2362 vp9_inc_frame_in_layer(cpi);
2365 if (is_two_pass_svc(cpi))
2366 cpi->svc.layer_context[cpi->svc.spatial_layer_id].last_frame_type =
2370 static void SvcEncode(VP9_COMP *cpi, size_t *size, uint8_t *dest,
2371 unsigned int *frame_flags) {
2372 vp9_rc_get_svc_params(cpi);
2373 encode_frame_to_data_rate(cpi, size, dest, frame_flags);
2376 static void Pass0Encode(VP9_COMP *cpi, size_t *size, uint8_t *dest,
2377 unsigned int *frame_flags) {
2378 if (cpi->oxcf.rc_mode == VPX_CBR) {
2379 vp9_rc_get_one_pass_cbr_params(cpi);
2381 vp9_rc_get_one_pass_vbr_params(cpi);
2383 encode_frame_to_data_rate(cpi, size, dest, frame_flags);
2386 static void Pass2Encode(VP9_COMP *cpi, size_t *size,
2387 uint8_t *dest, unsigned int *frame_flags) {
2388 cpi->allow_encode_breakout = ENCODE_BREAKOUT_ENABLED;
2390 vp9_rc_get_second_pass_params(cpi);
2391 encode_frame_to_data_rate(cpi, size, dest, frame_flags);
2393 vp9_twopass_postencode_update(cpi);
2396 static void init_motion_estimation(VP9_COMP *cpi) {
2397 int y_stride = cpi->scaled_source.y_stride;
2399 if (cpi->sf.mv.search_method == NSTEP) {
2400 vp9_init3smotion_compensation(&cpi->ss_cfg, y_stride);
2401 } else if (cpi->sf.mv.search_method == DIAMOND) {
2402 vp9_init_dsmotion_compensation(&cpi->ss_cfg, y_stride);
2406 static void check_initial_width(VP9_COMP *cpi, int subsampling_x,
2407 int subsampling_y) {
2408 VP9_COMMON *const cm = &cpi->common;
2410 if (!cpi->initial_width) {
2411 cm->subsampling_x = subsampling_x;
2412 cm->subsampling_y = subsampling_y;
2414 alloc_raw_frame_buffers(cpi);
2415 alloc_ref_frame_buffers(cpi);
2416 alloc_util_frame_buffers(cpi);
2418 init_motion_estimation(cpi);
2420 cpi->initial_width = cm->width;
2421 cpi->initial_height = cm->height;
2426 int vp9_receive_raw_frame(VP9_COMP *cpi, unsigned int frame_flags,
2427 YV12_BUFFER_CONFIG *sd, int64_t time_stamp,
2429 VP9_COMMON *cm = &cpi->common;
2430 struct vpx_usec_timer timer;
2432 const int subsampling_x = sd->uv_width < sd->y_width;
2433 const int subsampling_y = sd->uv_height < sd->y_height;
2435 check_initial_width(cpi, subsampling_x, subsampling_y);
2437 vpx_usec_timer_start(&timer);
2439 #if CONFIG_SPATIAL_SVC
2440 if (is_two_pass_svc(cpi))
2441 res = vp9_svc_lookahead_push(cpi, cpi->lookahead, sd, time_stamp, end_time,
2445 res = vp9_lookahead_push(cpi->lookahead,
2446 sd, time_stamp, end_time, frame_flags);
2449 vpx_usec_timer_mark(&timer);
2450 cpi->time_receive_data += vpx_usec_timer_elapsed(&timer);
2452 if ((cm->profile == PROFILE_0 || cm->profile == PROFILE_2) &&
2453 (subsampling_x != 1 || subsampling_y != 1)) {
2454 vpx_internal_error(&cm->error, VPX_CODEC_INVALID_PARAM,
2455 "Non-4:2:0 color space requires profile 1 or 3");
2458 if ((cm->profile == PROFILE_1 || cm->profile == PROFILE_3) &&
2459 (subsampling_x == 1 && subsampling_y == 1)) {
2460 vpx_internal_error(&cm->error, VPX_CODEC_INVALID_PARAM,
2461 "4:2:0 color space requires profile 0 or 2");
2469 static int frame_is_reference(const VP9_COMP *cpi) {
2470 const VP9_COMMON *cm = &cpi->common;
2472 return cm->frame_type == KEY_FRAME ||
2473 cpi->refresh_last_frame ||
2474 cpi->refresh_golden_frame ||
2475 cpi->refresh_alt_ref_frame ||
2476 cm->refresh_frame_context ||
2477 cm->lf.mode_ref_delta_update ||
2478 cm->seg.update_map ||
2479 cm->seg.update_data;
2482 void adjust_frame_rate(VP9_COMP *cpi,
2483 const struct lookahead_entry *source) {
2484 int64_t this_duration;
2487 if (source->ts_start == cpi->first_time_stamp_ever) {
2488 this_duration = source->ts_end - source->ts_start;
2491 int64_t last_duration = cpi->last_end_time_stamp_seen
2492 - cpi->last_time_stamp_seen;
2494 this_duration = source->ts_end - cpi->last_end_time_stamp_seen;
2496 // do a step update if the duration changes by 10%
2498 step = (int)((this_duration - last_duration) * 10 / last_duration);
2501 if (this_duration) {
2503 vp9_new_framerate(cpi, 10000000.0 / this_duration);
2505 // Average this frame's rate into the last second's average
2506 // frame rate. If we haven't seen 1 second yet, then average
2507 // over the whole interval seen.
2508 const double interval = MIN((double)(source->ts_end
2509 - cpi->first_time_stamp_ever), 10000000.0);
2510 double avg_duration = 10000000.0 / cpi->framerate;
2511 avg_duration *= (interval - avg_duration + this_duration);
2512 avg_duration /= interval;
2514 vp9_new_framerate(cpi, 10000000.0 / avg_duration);
2517 cpi->last_time_stamp_seen = source->ts_start;
2518 cpi->last_end_time_stamp_seen = source->ts_end;
2521 // Returns 0 if this is not an alt ref else the offset of the source frame
2522 // used as the arf midpoint.
2523 static int get_arf_src_index(VP9_COMP *cpi) {
2524 RATE_CONTROL *const rc = &cpi->rc;
2525 int arf_src_index = 0;
2526 if (is_altref_enabled(cpi)) {
2527 if (cpi->oxcf.pass == 2) {
2528 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2529 if (gf_group->update_type[gf_group->index] == ARF_UPDATE) {
2530 arf_src_index = gf_group->arf_src_offset[gf_group->index];
2532 } else if (rc->source_alt_ref_pending) {
2533 arf_src_index = rc->frames_till_gf_update_due;
2536 return arf_src_index;
2539 static void check_src_altref(VP9_COMP *cpi,
2540 const struct lookahead_entry *source) {
2541 RATE_CONTROL *const rc = &cpi->rc;
2543 if (cpi->oxcf.pass == 2) {
2544 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2545 rc->is_src_frame_alt_ref =
2546 (gf_group->update_type[gf_group->index] == OVERLAY_UPDATE);
2548 rc->is_src_frame_alt_ref = cpi->alt_ref_source &&
2549 (source == cpi->alt_ref_source);
2552 if (rc->is_src_frame_alt_ref) {
2553 // Current frame is an ARF overlay frame.
2554 cpi->alt_ref_source = NULL;
2556 // Don't refresh the last buffer for an ARF overlay frame. It will
2557 // become the GF so preserve last as an alternative prediction option.
2558 cpi->refresh_last_frame = 0;
2562 int vp9_get_compressed_data(VP9_COMP *cpi, unsigned int *frame_flags,
2563 size_t *size, uint8_t *dest,
2564 int64_t *time_stamp, int64_t *time_end, int flush) {
2565 const VP9EncoderConfig *const oxcf = &cpi->oxcf;
2566 VP9_COMMON *const cm = &cpi->common;
2567 MACROBLOCKD *const xd = &cpi->mb.e_mbd;
2568 RATE_CONTROL *const rc = &cpi->rc;
2569 struct vpx_usec_timer cmptimer;
2570 YV12_BUFFER_CONFIG *force_src_buffer = NULL;
2571 struct lookahead_entry *last_source = NULL;
2572 struct lookahead_entry *source = NULL;
2573 MV_REFERENCE_FRAME ref_frame;
2576 if (is_two_pass_svc(cpi) && oxcf->pass == 2) {
2577 #if CONFIG_SPATIAL_SVC
2578 vp9_svc_lookahead_peek(cpi, cpi->lookahead, 0, 1);
2580 vp9_restore_layer_context(cpi);
2583 vpx_usec_timer_start(&cmptimer);
2585 vp9_set_high_precision_mv(cpi, ALTREF_HIGH_PRECISION_MV);
2588 cm->reset_frame_context = 0;
2589 cm->refresh_frame_context = 1;
2590 cpi->refresh_last_frame = 1;
2591 cpi->refresh_golden_frame = 0;
2592 cpi->refresh_alt_ref_frame = 0;
2594 // Should we encode an arf frame.
2595 arf_src_index = get_arf_src_index(cpi);
2596 if (arf_src_index) {
2597 assert(arf_src_index <= rc->frames_to_key);
2599 #if CONFIG_SPATIAL_SVC
2600 if (is_two_pass_svc(cpi))
2601 source = vp9_svc_lookahead_peek(cpi, cpi->lookahead, arf_src_index, 0);
2604 source = vp9_lookahead_peek(cpi->lookahead, arf_src_index);
2605 if (source != NULL) {
2606 cpi->alt_ref_source = source;
2608 #if CONFIG_SPATIAL_SVC
2609 if (is_two_pass_svc(cpi) && cpi->svc.spatial_layer_id > 0) {
2611 // Reference a hidden frame from a lower layer
2612 for (i = cpi->svc.spatial_layer_id - 1; i >= 0; --i) {
2613 if (oxcf->ss_play_alternate[i]) {
2614 cpi->gld_fb_idx = cpi->svc.layer_context[i].alt_ref_idx;
2619 cpi->svc.layer_context[cpi->svc.spatial_layer_id].has_alt_frame = 1;
2622 if (oxcf->arnr_max_frames > 0) {
2623 // Produce the filtered ARF frame.
2624 vp9_temporal_filter(cpi, arf_src_index);
2625 vp9_extend_frame_borders(&cpi->alt_ref_buffer);
2626 force_src_buffer = &cpi->alt_ref_buffer;
2630 cpi->refresh_alt_ref_frame = 1;
2631 cpi->refresh_golden_frame = 0;
2632 cpi->refresh_last_frame = 0;
2633 rc->is_src_frame_alt_ref = 0;
2634 rc->source_alt_ref_pending = 0;
2636 rc->source_alt_ref_pending = 0;
2641 // Get last frame source.
2642 if (cm->current_video_frame > 0) {
2643 #if CONFIG_SPATIAL_SVC
2644 if (is_two_pass_svc(cpi))
2645 last_source = vp9_svc_lookahead_peek(cpi, cpi->lookahead, -1, 0);
2648 last_source = vp9_lookahead_peek(cpi->lookahead, -1);
2649 if (last_source == NULL)
2653 // Read in the source frame.
2654 #if CONFIG_SPATIAL_SVC
2655 if (is_two_pass_svc(cpi))
2656 source = vp9_svc_lookahead_pop(cpi, cpi->lookahead, flush);
2659 source = vp9_lookahead_pop(cpi->lookahead, flush);
2660 if (source != NULL) {
2664 // Check to see if the frame should be encoded as an arf overlay.
2665 check_src_altref(cpi, source);
2670 cpi->un_scaled_source = cpi->Source = force_src_buffer ? force_src_buffer
2673 cpi->unscaled_last_source = last_source != NULL ? &last_source->img : NULL;
2675 *time_stamp = source->ts_start;
2676 *time_end = source->ts_end;
2677 *frame_flags = (source->flags & VPX_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0;
2681 if (flush && oxcf->pass == 1 && !cpi->twopass.first_pass_done) {
2682 vp9_end_first_pass(cpi); /* get last stats packet */
2683 cpi->twopass.first_pass_done = 1;
2688 if (source->ts_start < cpi->first_time_stamp_ever) {
2689 cpi->first_time_stamp_ever = source->ts_start;
2690 cpi->last_end_time_stamp_seen = source->ts_start;
2693 // Clear down mmx registers
2694 vp9_clear_system_state();
2696 // adjust frame rates based on timestamps given
2697 if (cm->show_frame) {
2698 adjust_frame_rate(cpi, source);
2701 if (cpi->svc.number_temporal_layers > 1 &&
2702 oxcf->rc_mode == VPX_CBR) {
2703 vp9_update_temporal_layer_framerate(cpi);
2704 vp9_restore_layer_context(cpi);
2707 // start with a 0 size frame
2710 /* find a free buffer for the new frame, releasing the reference previously
2713 cm->frame_bufs[cm->new_fb_idx].ref_count--;
2714 cm->new_fb_idx = get_free_fb(cm);
2716 if (!cpi->use_svc && cpi->multi_arf_allowed) {
2717 if (cm->frame_type == KEY_FRAME) {
2718 init_buffer_indices(cpi);
2719 } else if (oxcf->pass == 2) {
2720 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2721 cpi->alt_fb_idx = gf_group->arf_ref_idx[gf_group->index];
2725 cpi->frame_flags = *frame_flags;
2727 if (oxcf->pass == 2 &&
2728 cm->current_video_frame == 0 &&
2729 oxcf->allow_spatial_resampling &&
2730 oxcf->rc_mode == VPX_VBR) {
2731 // Internal scaling is triggered on the first frame.
2732 vp9_set_size_literal(cpi, oxcf->scaled_frame_width,
2733 oxcf->scaled_frame_height);
2736 // Reset the frame pointers to the current frame size
2737 vp9_realloc_frame_buffer(get_frame_new_buffer(cm),
2738 cm->width, cm->height,
2739 cm->subsampling_x, cm->subsampling_y,
2740 #if CONFIG_VP9_HIGHBITDEPTH
2741 cm->use_highbitdepth,
2743 VP9_ENC_BORDER_IN_PIXELS, NULL, NULL, NULL);
2745 alloc_util_frame_buffers(cpi);
2746 init_motion_estimation(cpi);
2748 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
2749 const int idx = cm->ref_frame_map[get_ref_frame_idx(cpi, ref_frame)];
2750 YV12_BUFFER_CONFIG *const buf = &cm->frame_bufs[idx].buf;
2751 RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - 1];
2754 vp9_setup_scale_factors_for_frame(&ref_buf->sf,
2755 buf->y_crop_width, buf->y_crop_height,
2756 cm->width, cm->height);
2758 if (vp9_is_scaled(&ref_buf->sf))
2759 vp9_extend_frame_borders(buf);
2762 set_ref_ptrs(cm, xd, LAST_FRAME, LAST_FRAME);
2764 if (oxcf->aq_mode == VARIANCE_AQ) {
2768 if (oxcf->pass == 1 &&
2769 (!cpi->use_svc || is_two_pass_svc(cpi))) {
2770 const int lossless = is_lossless_requested(oxcf);
2771 cpi->mb.fwd_txm4x4 = lossless ? vp9_fwht4x4 : vp9_fdct4x4;
2772 cpi->mb.itxm_add = lossless ? vp9_iwht4x4_add : vp9_idct4x4_add;
2773 vp9_first_pass(cpi, source);
2774 } else if (oxcf->pass == 2 &&
2775 (!cpi->use_svc || is_two_pass_svc(cpi))) {
2776 Pass2Encode(cpi, size, dest, frame_flags);
2777 } else if (cpi->use_svc) {
2778 SvcEncode(cpi, size, dest, frame_flags);
2781 Pass0Encode(cpi, size, dest, frame_flags);
2784 if (cm->refresh_frame_context)
2785 cm->frame_contexts[cm->frame_context_idx] = cm->fc;
2787 // Frame was dropped, release scaled references.
2789 release_scaled_references(cpi);
2793 cpi->droppable = !frame_is_reference(cpi);
2796 // Save layer specific state.
2797 if ((cpi->svc.number_temporal_layers > 1 &&
2798 oxcf->rc_mode == VPX_CBR) ||
2799 ((cpi->svc.number_temporal_layers > 1 ||
2800 cpi->svc.number_spatial_layers > 1) &&
2802 vp9_save_layer_context(cpi);
2805 vpx_usec_timer_mark(&cmptimer);
2806 cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer);
2808 if (cpi->b_calculate_psnr && oxcf->pass != 1 && cm->show_frame)
2809 generate_psnr_packet(cpi);
2811 #if CONFIG_INTERNAL_STATS
2813 if (oxcf->pass != 1) {
2814 cpi->bytes += (int)(*size);
2816 if (cm->show_frame) {
2819 if (cpi->b_calculate_psnr) {
2820 YV12_BUFFER_CONFIG *orig = cpi->Source;
2821 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
2822 YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer;
2824 calc_psnr(orig, recon, &psnr);
2826 cpi->total += psnr.psnr[0];
2827 cpi->total_y += psnr.psnr[1];
2828 cpi->total_u += psnr.psnr[2];
2829 cpi->total_v += psnr.psnr[3];
2830 cpi->total_sq_error += psnr.sse[0];
2831 cpi->total_samples += psnr.samples[0];
2835 double frame_ssim2 = 0, weight = 0;
2836 #if CONFIG_VP9_POSTPROC
2837 // TODO(agrange) Add resizing of post-proc buffer in here when the
2838 // encoder is changed to use on-demand buffer allocation.
2839 vp9_deblock(cm->frame_to_show, &cm->post_proc_buffer,
2840 cm->lf.filter_level * 10 / 6);
2842 vp9_clear_system_state();
2844 calc_psnr(orig, pp, &psnr2);
2846 cpi->totalp += psnr2.psnr[0];
2847 cpi->totalp_y += psnr2.psnr[1];
2848 cpi->totalp_u += psnr2.psnr[2];
2849 cpi->totalp_v += psnr2.psnr[3];
2850 cpi->totalp_sq_error += psnr2.sse[0];
2851 cpi->totalp_samples += psnr2.samples[0];
2853 frame_ssim2 = vp9_calc_ssim(orig, recon, &weight);
2855 cpi->summed_quality += frame_ssim2 * weight;
2856 cpi->summed_weights += weight;
2858 frame_ssim2 = vp9_calc_ssim(orig, &cm->post_proc_buffer, &weight);
2860 cpi->summedp_quality += frame_ssim2 * weight;
2861 cpi->summedp_weights += weight;
2864 FILE *f = fopen("q_used.stt", "a");
2865 fprintf(f, "%5d : Y%f7.3:U%f7.3:V%f7.3:F%f7.3:S%7.3f\n",
2866 cpi->common.current_video_frame, y2, u2, v2,
2867 frame_psnr2, frame_ssim2);
2875 if (cpi->b_calculate_ssimg) {
2876 double y, u, v, frame_all;
2877 frame_all = vp9_calc_ssimg(cpi->Source, cm->frame_to_show, &y, &u, &v);
2878 cpi->total_ssimg_y += y;
2879 cpi->total_ssimg_u += u;
2880 cpi->total_ssimg_v += v;
2881 cpi->total_ssimg_all += frame_all;
2890 int vp9_get_preview_raw_frame(VP9_COMP *cpi, YV12_BUFFER_CONFIG *dest,
2891 vp9_ppflags_t *flags) {
2892 VP9_COMMON *cm = &cpi->common;
2893 #if !CONFIG_VP9_POSTPROC
2897 if (!cm->show_frame) {
2901 #if CONFIG_VP9_POSTPROC
2902 ret = vp9_post_proc_frame(cm, dest, flags);
2904 if (cm->frame_to_show) {
2905 *dest = *cm->frame_to_show;
2906 dest->y_width = cm->width;
2907 dest->y_height = cm->height;
2908 dest->uv_width = cm->width >> cm->subsampling_x;
2909 dest->uv_height = cm->height >> cm->subsampling_y;
2914 #endif // !CONFIG_VP9_POSTPROC
2915 vp9_clear_system_state();
2920 int vp9_set_active_map(VP9_COMP *cpi, unsigned char *map, int rows, int cols) {
2921 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols) {
2922 const int mi_rows = cpi->common.mi_rows;
2923 const int mi_cols = cpi->common.mi_cols;
2926 for (r = 0; r < mi_rows; r++) {
2927 for (c = 0; c < mi_cols; c++) {
2928 cpi->segmentation_map[r * mi_cols + c] =
2929 !map[(r >> 1) * cols + (c >> 1)];
2932 vp9_enable_segfeature(&cpi->common.seg, 1, SEG_LVL_SKIP);
2933 vp9_enable_segmentation(&cpi->common.seg);
2935 vp9_disable_segmentation(&cpi->common.seg);
2943 int vp9_set_internal_size(VP9_COMP *cpi,
2944 VPX_SCALING horiz_mode, VPX_SCALING vert_mode) {
2945 VP9_COMMON *cm = &cpi->common;
2946 int hr = 0, hs = 0, vr = 0, vs = 0;
2948 if (horiz_mode > ONETWO || vert_mode > ONETWO)
2951 Scale2Ratio(horiz_mode, &hr, &hs);
2952 Scale2Ratio(vert_mode, &vr, &vs);
2954 // always go to the next whole number
2955 cm->width = (hs - 1 + cpi->oxcf.width * hr) / hs;
2956 cm->height = (vs - 1 + cpi->oxcf.height * vr) / vs;
2957 assert(cm->width <= cpi->initial_width);
2958 assert(cm->height <= cpi->initial_height);
2960 update_frame_size(cpi);
2965 int vp9_set_size_literal(VP9_COMP *cpi, unsigned int width,
2966 unsigned int height) {
2967 VP9_COMMON *cm = &cpi->common;
2969 check_initial_width(cpi, 1, 1);
2973 if (cm->width * 5 < cpi->initial_width) {
2974 cm->width = cpi->initial_width / 5 + 1;
2975 printf("Warning: Desired width too small, changed to %d\n", cm->width);
2977 if (cm->width > cpi->initial_width) {
2978 cm->width = cpi->initial_width;
2979 printf("Warning: Desired width too large, changed to %d\n", cm->width);
2984 cm->height = height;
2985 if (cm->height * 5 < cpi->initial_height) {
2986 cm->height = cpi->initial_height / 5 + 1;
2987 printf("Warning: Desired height too small, changed to %d\n", cm->height);
2989 if (cm->height > cpi->initial_height) {
2990 cm->height = cpi->initial_height;
2991 printf("Warning: Desired height too large, changed to %d\n", cm->height);
2994 assert(cm->width <= cpi->initial_width);
2995 assert(cm->height <= cpi->initial_height);
2997 update_frame_size(cpi);
3002 void vp9_set_svc(VP9_COMP *cpi, int use_svc) {
3003 cpi->use_svc = use_svc;
3007 int vp9_get_y_sse(const YV12_BUFFER_CONFIG *a, const YV12_BUFFER_CONFIG *b) {
3008 assert(a->y_crop_width == b->y_crop_width);
3009 assert(a->y_crop_height == b->y_crop_height);
3011 return (int)get_sse(a->y_buffer, a->y_stride, b->y_buffer, b->y_stride,
3012 a->y_crop_width, a->y_crop_height);
3016 int vp9_get_quantizer(VP9_COMP *cpi) {
3017 return cpi->common.base_qindex;
3020 void vp9_apply_encoding_flags(VP9_COMP *cpi, vpx_enc_frame_flags_t flags) {
3021 if (flags & (VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_GF |
3022 VP8_EFLAG_NO_REF_ARF)) {
3025 if (flags & VP8_EFLAG_NO_REF_LAST)
3026 ref ^= VP9_LAST_FLAG;
3028 if (flags & VP8_EFLAG_NO_REF_GF)
3029 ref ^= VP9_GOLD_FLAG;
3031 if (flags & VP8_EFLAG_NO_REF_ARF)
3032 ref ^= VP9_ALT_FLAG;
3034 vp9_use_as_reference(cpi, ref);
3037 if (flags & (VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF |
3038 VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_FORCE_GF |
3039 VP8_EFLAG_FORCE_ARF)) {
3042 if (flags & VP8_EFLAG_NO_UPD_LAST)
3043 upd ^= VP9_LAST_FLAG;
3045 if (flags & VP8_EFLAG_NO_UPD_GF)
3046 upd ^= VP9_GOLD_FLAG;
3048 if (flags & VP8_EFLAG_NO_UPD_ARF)
3049 upd ^= VP9_ALT_FLAG;
3051 vp9_update_reference(cpi, upd);
3054 if (flags & VP8_EFLAG_NO_UPD_ENTROPY) {
3055 vp9_update_entropy(cpi, 0);