2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
15 #include "./vp9_rtcd.h"
16 #include "./vpx_config.h"
17 #include "./vpx_dsp_rtcd.h"
18 #include "./vpx_scale_rtcd.h"
19 #include "vpx_dsp/psnr.h"
20 #include "vpx_dsp/vpx_dsp_common.h"
21 #include "vpx_dsp/vpx_filter.h"
22 #if CONFIG_INTERNAL_STATS
23 #include "vpx_dsp/ssim.h"
25 #include "vpx_ports/mem.h"
26 #include "vpx_ports/system_state.h"
27 #include "vpx_ports/vpx_timer.h"
29 #include "vp9/common/vp9_alloccommon.h"
30 #include "vp9/common/vp9_filter.h"
31 #include "vp9/common/vp9_idct.h"
32 #if CONFIG_VP9_POSTPROC
33 #include "vp9/common/vp9_postproc.h"
35 #include "vp9/common/vp9_reconinter.h"
36 #include "vp9/common/vp9_reconintra.h"
37 #include "vp9/common/vp9_tile_common.h"
39 #include "vp9/encoder/vp9_alt_ref_aq.h"
40 #include "vp9/encoder/vp9_aq_360.h"
41 #include "vp9/encoder/vp9_aq_complexity.h"
42 #include "vp9/encoder/vp9_aq_cyclicrefresh.h"
43 #include "vp9/encoder/vp9_aq_variance.h"
44 #include "vp9/encoder/vp9_bitstream.h"
45 #include "vp9/encoder/vp9_context_tree.h"
46 #include "vp9/encoder/vp9_encodeframe.h"
47 #include "vp9/encoder/vp9_encodemv.h"
48 #include "vp9/encoder/vp9_encoder.h"
49 #include "vp9/encoder/vp9_extend.h"
50 #include "vp9/encoder/vp9_ethread.h"
51 #include "vp9/encoder/vp9_firstpass.h"
52 #include "vp9/encoder/vp9_mbgraph.h"
53 #include "vp9/encoder/vp9_multi_thread.h"
54 #include "vp9/encoder/vp9_noise_estimate.h"
55 #include "vp9/encoder/vp9_picklpf.h"
56 #include "vp9/encoder/vp9_ratectrl.h"
57 #include "vp9/encoder/vp9_rd.h"
58 #include "vp9/encoder/vp9_resize.h"
59 #include "vp9/encoder/vp9_segmentation.h"
60 #include "vp9/encoder/vp9_skin_detection.h"
61 #include "vp9/encoder/vp9_speed_features.h"
62 #include "vp9/encoder/vp9_svc_layercontext.h"
63 #include "vp9/encoder/vp9_temporal_filter.h"
65 #define AM_SEGMENT_ID_INACTIVE 7
66 #define AM_SEGMENT_ID_ACTIVE 0
68 #define ALTREF_HIGH_PRECISION_MV 1 // Whether to use high precision mv
69 // for altref computation.
70 #define HIGH_PRECISION_MV_QTHRESH 200 // Q threshold for high precision
71 // mv. Choose a very high value for
72 // now so that HIGH_PRECISION is always
75 #define FRAME_SIZE_FACTOR 128 // empirical params for context model threshold
76 #define FRAME_RATE_FACTOR 8
78 #ifdef OUTPUT_YUV_DENOISED
79 FILE *yuv_denoised_file = NULL;
81 #ifdef OUTPUT_YUV_SKINMAP
82 static FILE *yuv_skinmap_file = NULL;
94 #ifdef ENABLE_KF_DENOISE
95 // Test condition for spatial denoise of source.
96 static int is_spatial_denoise_enabled(VP9_COMP *cpi) {
97 VP9_COMMON *const cm = &cpi->common;
98 const VP9EncoderConfig *const oxcf = &cpi->oxcf;
100 return (oxcf->pass != 1) && !is_lossless_requested(&cpi->oxcf) &&
101 frame_is_intra_only(cm);
105 // compute adaptive threshold for skip recoding
106 static int compute_context_model_thresh(const VP9_COMP *const cpi) {
107 const VP9_COMMON *const cm = &cpi->common;
108 const VP9EncoderConfig *const oxcf = &cpi->oxcf;
109 const int frame_size = (cm->width * cm->height) >> 10;
110 const int bitrate = (int)(oxcf->target_bandwidth >> 10);
111 const int qindex_factor = cm->base_qindex + (MAXQ >> 1);
113 // This equation makes the threshold adaptive to frame size.
114 // Coding gain obtained by recoding comes from alternate frames of large
115 // content change. We skip recoding if the difference of previous and current
116 // frame context probability model is less than a certain threshold.
117 // The first component is the most critical part to guarantee adaptivity.
118 // Other parameters are estimated based on normal setting of hd resolution
119 // parameters. e.g frame_size = 1920x1080, bitrate = 8000, qindex_factor < 50
121 ((FRAME_SIZE_FACTOR * frame_size - FRAME_RATE_FACTOR * bitrate) *
128 // compute the total cost difference between current
129 // and previous frame context prob model.
130 static int compute_context_model_diff(const VP9_COMMON *const cm) {
131 const FRAME_CONTEXT *const pre_fc =
132 &cm->frame_contexts[cm->frame_context_idx];
133 const FRAME_CONTEXT *const cur_fc = cm->fc;
134 const FRAME_COUNTS *counts = &cm->counts;
135 vpx_prob pre_last_prob, cur_last_prob;
137 int i, j, k, l, m, n;
140 for (i = 0; i < BLOCK_SIZE_GROUPS; ++i) {
141 for (j = 0; j < INTRA_MODES - 1; ++j) {
142 diff += (int)counts->y_mode[i][j] *
143 (pre_fc->y_mode_prob[i][j] - cur_fc->y_mode_prob[i][j]);
145 pre_last_prob = MAX_PROB - pre_fc->y_mode_prob[i][INTRA_MODES - 2];
146 cur_last_prob = MAX_PROB - cur_fc->y_mode_prob[i][INTRA_MODES - 2];
148 diff += (int)counts->y_mode[i][INTRA_MODES - 1] *
149 (pre_last_prob - cur_last_prob);
153 for (i = 0; i < INTRA_MODES; ++i) {
154 for (j = 0; j < INTRA_MODES - 1; ++j) {
155 diff += (int)counts->uv_mode[i][j] *
156 (pre_fc->uv_mode_prob[i][j] - cur_fc->uv_mode_prob[i][j]);
158 pre_last_prob = MAX_PROB - pre_fc->uv_mode_prob[i][INTRA_MODES - 2];
159 cur_last_prob = MAX_PROB - cur_fc->uv_mode_prob[i][INTRA_MODES - 2];
161 diff += (int)counts->uv_mode[i][INTRA_MODES - 1] *
162 (pre_last_prob - cur_last_prob);
166 for (i = 0; i < PARTITION_CONTEXTS; ++i) {
167 for (j = 0; j < PARTITION_TYPES - 1; ++j) {
168 diff += (int)counts->partition[i][j] *
169 (pre_fc->partition_prob[i][j] - cur_fc->partition_prob[i][j]);
171 pre_last_prob = MAX_PROB - pre_fc->partition_prob[i][PARTITION_TYPES - 2];
172 cur_last_prob = MAX_PROB - cur_fc->partition_prob[i][PARTITION_TYPES - 2];
174 diff += (int)counts->partition[i][PARTITION_TYPES - 1] *
175 (pre_last_prob - cur_last_prob);
179 for (i = 0; i < TX_SIZES; ++i) {
180 for (j = 0; j < PLANE_TYPES; ++j) {
181 for (k = 0; k < REF_TYPES; ++k) {
182 for (l = 0; l < COEF_BANDS; ++l) {
183 for (m = 0; m < BAND_COEFF_CONTEXTS(l); ++m) {
184 for (n = 0; n < UNCONSTRAINED_NODES; ++n) {
185 diff += (int)counts->coef[i][j][k][l][m][n] *
186 (pre_fc->coef_probs[i][j][k][l][m][n] -
187 cur_fc->coef_probs[i][j][k][l][m][n]);
192 pre_fc->coef_probs[i][j][k][l][m][UNCONSTRAINED_NODES - 1];
195 cur_fc->coef_probs[i][j][k][l][m][UNCONSTRAINED_NODES - 1];
197 diff += (int)counts->coef[i][j][k][l][m][UNCONSTRAINED_NODES] *
198 (pre_last_prob - cur_last_prob);
205 // switchable_interp_prob
206 for (i = 0; i < SWITCHABLE_FILTER_CONTEXTS; ++i) {
207 for (j = 0; j < SWITCHABLE_FILTERS - 1; ++j) {
208 diff += (int)counts->switchable_interp[i][j] *
209 (pre_fc->switchable_interp_prob[i][j] -
210 cur_fc->switchable_interp_prob[i][j]);
213 MAX_PROB - pre_fc->switchable_interp_prob[i][SWITCHABLE_FILTERS - 2];
215 MAX_PROB - cur_fc->switchable_interp_prob[i][SWITCHABLE_FILTERS - 2];
217 diff += (int)counts->switchable_interp[i][SWITCHABLE_FILTERS - 1] *
218 (pre_last_prob - cur_last_prob);
222 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
223 for (j = 0; j < INTER_MODES - 1; ++j) {
224 diff += (int)counts->inter_mode[i][j] *
225 (pre_fc->inter_mode_probs[i][j] - cur_fc->inter_mode_probs[i][j]);
227 pre_last_prob = MAX_PROB - pre_fc->inter_mode_probs[i][INTER_MODES - 2];
228 cur_last_prob = MAX_PROB - cur_fc->inter_mode_probs[i][INTER_MODES - 2];
230 diff += (int)counts->inter_mode[i][INTER_MODES - 1] *
231 (pre_last_prob - cur_last_prob);
235 for (i = 0; i < INTRA_INTER_CONTEXTS; ++i) {
236 diff += (int)counts->intra_inter[i][0] *
237 (pre_fc->intra_inter_prob[i] - cur_fc->intra_inter_prob[i]);
239 pre_last_prob = MAX_PROB - pre_fc->intra_inter_prob[i];
240 cur_last_prob = MAX_PROB - cur_fc->intra_inter_prob[i];
242 diff += (int)counts->intra_inter[i][1] * (pre_last_prob - cur_last_prob);
246 for (i = 0; i < COMP_INTER_CONTEXTS; ++i) {
247 diff += (int)counts->comp_inter[i][0] *
248 (pre_fc->comp_inter_prob[i] - cur_fc->comp_inter_prob[i]);
250 pre_last_prob = MAX_PROB - pre_fc->comp_inter_prob[i];
251 cur_last_prob = MAX_PROB - cur_fc->comp_inter_prob[i];
253 diff += (int)counts->comp_inter[i][1] * (pre_last_prob - cur_last_prob);
257 for (i = 0; i < REF_CONTEXTS; ++i) {
258 for (j = 0; j < 2; ++j) {
259 diff += (int)counts->single_ref[i][j][0] *
260 (pre_fc->single_ref_prob[i][j] - cur_fc->single_ref_prob[i][j]);
262 pre_last_prob = MAX_PROB - pre_fc->single_ref_prob[i][j];
263 cur_last_prob = MAX_PROB - cur_fc->single_ref_prob[i][j];
266 (int)counts->single_ref[i][j][1] * (pre_last_prob - cur_last_prob);
271 for (i = 0; i < REF_CONTEXTS; ++i) {
272 diff += (int)counts->comp_ref[i][0] *
273 (pre_fc->comp_ref_prob[i] - cur_fc->comp_ref_prob[i]);
275 pre_last_prob = MAX_PROB - pre_fc->comp_ref_prob[i];
276 cur_last_prob = MAX_PROB - cur_fc->comp_ref_prob[i];
278 diff += (int)counts->comp_ref[i][1] * (pre_last_prob - cur_last_prob);
282 for (i = 0; i < TX_SIZE_CONTEXTS; ++i) {
284 for (j = 0; j < TX_SIZES - 1; ++j) {
285 diff += (int)counts->tx.p32x32[i][j] *
286 (pre_fc->tx_probs.p32x32[i][j] - cur_fc->tx_probs.p32x32[i][j]);
288 pre_last_prob = MAX_PROB - pre_fc->tx_probs.p32x32[i][TX_SIZES - 2];
289 cur_last_prob = MAX_PROB - cur_fc->tx_probs.p32x32[i][TX_SIZES - 2];
291 diff += (int)counts->tx.p32x32[i][TX_SIZES - 1] *
292 (pre_last_prob - cur_last_prob);
295 for (j = 0; j < TX_SIZES - 2; ++j) {
296 diff += (int)counts->tx.p16x16[i][j] *
297 (pre_fc->tx_probs.p16x16[i][j] - cur_fc->tx_probs.p16x16[i][j]);
299 pre_last_prob = MAX_PROB - pre_fc->tx_probs.p16x16[i][TX_SIZES - 3];
300 cur_last_prob = MAX_PROB - cur_fc->tx_probs.p16x16[i][TX_SIZES - 3];
302 diff += (int)counts->tx.p16x16[i][TX_SIZES - 2] *
303 (pre_last_prob - cur_last_prob);
306 for (j = 0; j < TX_SIZES - 3; ++j) {
307 diff += (int)counts->tx.p8x8[i][j] *
308 (pre_fc->tx_probs.p8x8[i][j] - cur_fc->tx_probs.p8x8[i][j]);
310 pre_last_prob = MAX_PROB - pre_fc->tx_probs.p8x8[i][TX_SIZES - 4];
311 cur_last_prob = MAX_PROB - cur_fc->tx_probs.p8x8[i][TX_SIZES - 4];
314 (int)counts->tx.p8x8[i][TX_SIZES - 3] * (pre_last_prob - cur_last_prob);
318 for (i = 0; i < SKIP_CONTEXTS; ++i) {
319 diff += (int)counts->skip[i][0] *
320 (pre_fc->skip_probs[i] - cur_fc->skip_probs[i]);
322 pre_last_prob = MAX_PROB - pre_fc->skip_probs[i];
323 cur_last_prob = MAX_PROB - cur_fc->skip_probs[i];
325 diff += (int)counts->skip[i][1] * (pre_last_prob - cur_last_prob);
329 for (i = 0; i < MV_JOINTS - 1; ++i) {
330 diff += (int)counts->mv.joints[i] *
331 (pre_fc->nmvc.joints[i] - cur_fc->nmvc.joints[i]);
333 pre_last_prob = MAX_PROB - pre_fc->nmvc.joints[MV_JOINTS - 2];
334 cur_last_prob = MAX_PROB - cur_fc->nmvc.joints[MV_JOINTS - 2];
337 (int)counts->mv.joints[MV_JOINTS - 1] * (pre_last_prob - cur_last_prob);
339 for (i = 0; i < 2; ++i) {
340 const nmv_component_counts *nmv_count = &counts->mv.comps[i];
341 const nmv_component *pre_nmv_prob = &pre_fc->nmvc.comps[i];
342 const nmv_component *cur_nmv_prob = &cur_fc->nmvc.comps[i];
345 diff += (int)nmv_count->sign[0] * (pre_nmv_prob->sign - cur_nmv_prob->sign);
347 pre_last_prob = MAX_PROB - pre_nmv_prob->sign;
348 cur_last_prob = MAX_PROB - cur_nmv_prob->sign;
350 diff += (int)nmv_count->sign[1] * (pre_last_prob - cur_last_prob);
353 for (j = 0; j < MV_CLASSES - 1; ++j) {
354 diff += (int)nmv_count->classes[j] *
355 (pre_nmv_prob->classes[j] - cur_nmv_prob->classes[j]);
357 pre_last_prob = MAX_PROB - pre_nmv_prob->classes[MV_CLASSES - 2];
358 cur_last_prob = MAX_PROB - cur_nmv_prob->classes[MV_CLASSES - 2];
360 diff += (int)nmv_count->classes[MV_CLASSES - 1] *
361 (pre_last_prob - cur_last_prob);
364 for (j = 0; j < CLASS0_SIZE - 1; ++j) {
365 diff += (int)nmv_count->class0[j] *
366 (pre_nmv_prob->class0[j] - cur_nmv_prob->class0[j]);
368 pre_last_prob = MAX_PROB - pre_nmv_prob->class0[CLASS0_SIZE - 2];
369 cur_last_prob = MAX_PROB - cur_nmv_prob->class0[CLASS0_SIZE - 2];
371 diff += (int)nmv_count->class0[CLASS0_SIZE - 1] *
372 (pre_last_prob - cur_last_prob);
375 for (j = 0; j < MV_OFFSET_BITS; ++j) {
376 diff += (int)nmv_count->bits[j][0] *
377 (pre_nmv_prob->bits[j] - cur_nmv_prob->bits[j]);
379 pre_last_prob = MAX_PROB - pre_nmv_prob->bits[j];
380 cur_last_prob = MAX_PROB - cur_nmv_prob->bits[j];
382 diff += (int)nmv_count->bits[j][1] * (pre_last_prob - cur_last_prob);
386 for (j = 0; j < CLASS0_SIZE; ++j) {
387 for (k = 0; k < MV_FP_SIZE - 1; ++k) {
388 diff += (int)nmv_count->class0_fp[j][k] *
389 (pre_nmv_prob->class0_fp[j][k] - cur_nmv_prob->class0_fp[j][k]);
391 pre_last_prob = MAX_PROB - pre_nmv_prob->class0_fp[j][MV_FP_SIZE - 2];
392 cur_last_prob = MAX_PROB - cur_nmv_prob->class0_fp[j][MV_FP_SIZE - 2];
394 diff += (int)nmv_count->class0_fp[j][MV_FP_SIZE - 1] *
395 (pre_last_prob - cur_last_prob);
399 for (j = 0; j < MV_FP_SIZE - 1; ++j) {
401 (int)nmv_count->fp[j] * (pre_nmv_prob->fp[j] - cur_nmv_prob->fp[j]);
403 pre_last_prob = MAX_PROB - pre_nmv_prob->fp[MV_FP_SIZE - 2];
404 cur_last_prob = MAX_PROB - cur_nmv_prob->fp[MV_FP_SIZE - 2];
407 (int)nmv_count->fp[MV_FP_SIZE - 1] * (pre_last_prob - cur_last_prob);
410 diff += (int)nmv_count->class0_hp[0] *
411 (pre_nmv_prob->class0_hp - cur_nmv_prob->class0_hp);
413 pre_last_prob = MAX_PROB - pre_nmv_prob->class0_hp;
414 cur_last_prob = MAX_PROB - cur_nmv_prob->class0_hp;
416 diff += (int)nmv_count->class0_hp[1] * (pre_last_prob - cur_last_prob);
419 diff += (int)nmv_count->hp[0] * (pre_nmv_prob->hp - cur_nmv_prob->hp);
421 pre_last_prob = MAX_PROB - pre_nmv_prob->hp;
422 cur_last_prob = MAX_PROB - cur_nmv_prob->hp;
424 diff += (int)nmv_count->hp[1] * (pre_last_prob - cur_last_prob);
430 // Test for whether to calculate metrics for the frame.
431 static int is_psnr_calc_enabled(VP9_COMP *cpi) {
432 VP9_COMMON *const cm = &cpi->common;
433 const VP9EncoderConfig *const oxcf = &cpi->oxcf;
435 return cpi->b_calculate_psnr && (oxcf->pass != 1) && cm->show_frame;
438 /* clang-format off */
439 const Vp9LevelSpec vp9_level_defs[VP9_LEVELS] = {
440 { LEVEL_1, 829440, 36864, 200, 400, 2, 1, 4, 8 },
441 { LEVEL_1_1, 2764800, 73728, 800, 1000, 2, 1, 4, 8 },
442 { LEVEL_2, 4608000, 122880, 1800, 1500, 2, 1, 4, 8 },
443 { LEVEL_2_1, 9216000, 245760, 3600, 2800, 2, 2, 4, 8 },
444 { LEVEL_3, 20736000, 552960, 7200, 6000, 2, 4, 4, 8 },
445 { LEVEL_3_1, 36864000, 983040, 12000, 10000, 2, 4, 4, 8 },
446 { LEVEL_4, 83558400, 2228224, 18000, 16000, 4, 4, 4, 8 },
447 { LEVEL_4_1, 160432128, 2228224, 30000, 18000, 4, 4, 5, 6 },
448 { LEVEL_5, 311951360, 8912896, 60000, 36000, 6, 8, 6, 4 },
449 { LEVEL_5_1, 588251136, 8912896, 120000, 46000, 8, 8, 10, 4 },
450 // TODO(huisu): update max_cpb_size for level 5_2 ~ 6_2 when
451 // they are finalized (currently tentative).
452 { LEVEL_5_2, 1176502272, 8912896, 180000, 90000, 8, 8, 10, 4 },
453 { LEVEL_6, 1176502272, 35651584, 180000, 90000, 8, 16, 10, 4 },
454 { LEVEL_6_1, 2353004544u, 35651584, 240000, 180000, 8, 16, 10, 4 },
455 { LEVEL_6_2, 4706009088u, 35651584, 480000, 360000, 8, 16, 10, 4 },
457 /* clang-format on */
459 static const char *level_fail_messages[TARGET_LEVEL_FAIL_IDS] =
460 { "The average bit-rate is too high.",
461 "The picture size is too large.",
462 "The luma sample rate is too large.",
463 "The CPB size is too large.",
464 "The compression ratio is too small",
465 "Too many column tiles are used.",
466 "The alt-ref distance is too small.",
467 "Too many reference buffers are used." };
469 static INLINE void Scale2Ratio(VPX_SCALING mode, int *hr, int *hs) {
495 // Mark all inactive blocks as active. Other segmentation features may be set
496 // so memset cannot be used, instead only inactive blocks should be reset.
497 static void suppress_active_map(VP9_COMP *cpi) {
498 unsigned char *const seg_map = cpi->segmentation_map;
500 if (cpi->active_map.enabled || cpi->active_map.update) {
501 const int rows = cpi->common.mi_rows;
502 const int cols = cpi->common.mi_cols;
505 for (i = 0; i < rows * cols; ++i)
506 if (seg_map[i] == AM_SEGMENT_ID_INACTIVE)
507 seg_map[i] = AM_SEGMENT_ID_ACTIVE;
511 static void apply_active_map(VP9_COMP *cpi) {
512 struct segmentation *const seg = &cpi->common.seg;
513 unsigned char *const seg_map = cpi->segmentation_map;
514 const unsigned char *const active_map = cpi->active_map.map;
517 assert(AM_SEGMENT_ID_ACTIVE == CR_SEGMENT_ID_BASE);
519 if (frame_is_intra_only(&cpi->common)) {
520 cpi->active_map.enabled = 0;
521 cpi->active_map.update = 1;
524 if (cpi->active_map.update) {
525 if (cpi->active_map.enabled) {
526 for (i = 0; i < cpi->common.mi_rows * cpi->common.mi_cols; ++i)
527 if (seg_map[i] == AM_SEGMENT_ID_ACTIVE) seg_map[i] = active_map[i];
528 vp9_enable_segmentation(seg);
529 vp9_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
530 vp9_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF);
531 // Setting the data to -MAX_LOOP_FILTER will result in the computed loop
532 // filter level being zero regardless of the value of seg->abs_delta.
533 vp9_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF,
536 vp9_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
537 vp9_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF);
539 seg->update_data = 1;
543 cpi->active_map.update = 0;
547 static void init_level_info(Vp9LevelInfo *level_info) {
548 Vp9LevelStats *const level_stats = &level_info->level_stats;
549 Vp9LevelSpec *const level_spec = &level_info->level_spec;
551 memset(level_stats, 0, sizeof(*level_stats));
552 memset(level_spec, 0, sizeof(*level_spec));
553 level_spec->level = LEVEL_UNKNOWN;
554 level_spec->min_altref_distance = INT_MAX;
557 VP9_LEVEL vp9_get_level(const Vp9LevelSpec *const level_spec) {
559 const Vp9LevelSpec *this_level;
561 vpx_clear_system_state();
563 for (i = 0; i < VP9_LEVELS; ++i) {
564 this_level = &vp9_level_defs[i];
565 if ((double)level_spec->max_luma_sample_rate >
566 (double)this_level->max_luma_sample_rate *
567 (1 + SAMPLE_RATE_GRACE_P) ||
568 level_spec->max_luma_picture_size > this_level->max_luma_picture_size ||
569 level_spec->average_bitrate > this_level->average_bitrate ||
570 level_spec->max_cpb_size > this_level->max_cpb_size ||
571 level_spec->compression_ratio < this_level->compression_ratio ||
572 level_spec->max_col_tiles > this_level->max_col_tiles ||
573 level_spec->min_altref_distance < this_level->min_altref_distance ||
574 level_spec->max_ref_frame_buffers > this_level->max_ref_frame_buffers)
578 return (i == VP9_LEVELS) ? LEVEL_UNKNOWN : vp9_level_defs[i].level;
581 int vp9_set_active_map(VP9_COMP *cpi, unsigned char *new_map_16x16, int rows,
583 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols) {
584 unsigned char *const active_map_8x8 = cpi->active_map.map;
585 const int mi_rows = cpi->common.mi_rows;
586 const int mi_cols = cpi->common.mi_cols;
587 cpi->active_map.update = 1;
590 for (r = 0; r < mi_rows; ++r) {
591 for (c = 0; c < mi_cols; ++c) {
592 active_map_8x8[r * mi_cols + c] =
593 new_map_16x16[(r >> 1) * cols + (c >> 1)]
594 ? AM_SEGMENT_ID_ACTIVE
595 : AM_SEGMENT_ID_INACTIVE;
598 cpi->active_map.enabled = 1;
600 cpi->active_map.enabled = 0;
608 int vp9_get_active_map(VP9_COMP *cpi, unsigned char *new_map_16x16, int rows,
610 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols &&
612 unsigned char *const seg_map_8x8 = cpi->segmentation_map;
613 const int mi_rows = cpi->common.mi_rows;
614 const int mi_cols = cpi->common.mi_cols;
615 memset(new_map_16x16, !cpi->active_map.enabled, rows * cols);
616 if (cpi->active_map.enabled) {
618 for (r = 0; r < mi_rows; ++r) {
619 for (c = 0; c < mi_cols; ++c) {
620 // Cyclic refresh segments are considered active despite not having
621 // AM_SEGMENT_ID_ACTIVE
622 new_map_16x16[(r >> 1) * cols + (c >> 1)] |=
623 seg_map_8x8[r * mi_cols + c] != AM_SEGMENT_ID_INACTIVE;
633 void vp9_set_high_precision_mv(VP9_COMP *cpi, int allow_high_precision_mv) {
634 MACROBLOCK *const mb = &cpi->td.mb;
635 cpi->common.allow_high_precision_mv = allow_high_precision_mv;
636 if (cpi->common.allow_high_precision_mv) {
637 mb->mvcost = mb->nmvcost_hp;
638 mb->mvsadcost = mb->nmvsadcost_hp;
640 mb->mvcost = mb->nmvcost;
641 mb->mvsadcost = mb->nmvsadcost;
645 static void setup_frame(VP9_COMP *cpi) {
646 VP9_COMMON *const cm = &cpi->common;
647 // Set up entropy context depending on frame type. The decoder mandates
648 // the use of the default context, index 0, for keyframes and inter
649 // frames where the error_resilient_mode or intra_only flag is set. For
650 // other inter-frames the encoder currently uses only two contexts;
651 // context 1 for ALTREF frames and context 0 for the others.
652 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
653 vp9_setup_past_independence(cm);
655 if (!cpi->use_svc) cm->frame_context_idx = cpi->refresh_alt_ref_frame;
658 if (cm->frame_type == KEY_FRAME) {
659 if (!is_two_pass_svc(cpi)) cpi->refresh_golden_frame = 1;
660 cpi->refresh_alt_ref_frame = 1;
661 vp9_zero(cpi->interp_filter_selected);
663 *cm->fc = cm->frame_contexts[cm->frame_context_idx];
664 vp9_zero(cpi->interp_filter_selected[0]);
668 static void vp9_enc_setup_mi(VP9_COMMON *cm) {
670 cm->mi = cm->mip + cm->mi_stride + 1;
671 memset(cm->mip, 0, cm->mi_stride * (cm->mi_rows + 1) * sizeof(*cm->mip));
672 cm->prev_mi = cm->prev_mip + cm->mi_stride + 1;
673 // Clear top border row
674 memset(cm->prev_mip, 0, sizeof(*cm->prev_mip) * cm->mi_stride);
675 // Clear left border column
676 for (i = 1; i < cm->mi_rows + 1; ++i)
677 memset(&cm->prev_mip[i * cm->mi_stride], 0, sizeof(*cm->prev_mip));
679 cm->mi_grid_visible = cm->mi_grid_base + cm->mi_stride + 1;
680 cm->prev_mi_grid_visible = cm->prev_mi_grid_base + cm->mi_stride + 1;
682 memset(cm->mi_grid_base, 0,
683 cm->mi_stride * (cm->mi_rows + 1) * sizeof(*cm->mi_grid_base));
686 static int vp9_enc_alloc_mi(VP9_COMMON *cm, int mi_size) {
687 cm->mip = vpx_calloc(mi_size, sizeof(*cm->mip));
688 if (!cm->mip) return 1;
689 cm->prev_mip = vpx_calloc(mi_size, sizeof(*cm->prev_mip));
690 if (!cm->prev_mip) return 1;
691 cm->mi_alloc_size = mi_size;
693 cm->mi_grid_base = (MODE_INFO **)vpx_calloc(mi_size, sizeof(MODE_INFO *));
694 if (!cm->mi_grid_base) return 1;
695 cm->prev_mi_grid_base =
696 (MODE_INFO **)vpx_calloc(mi_size, sizeof(MODE_INFO *));
697 if (!cm->prev_mi_grid_base) return 1;
702 static void vp9_enc_free_mi(VP9_COMMON *cm) {
705 vpx_free(cm->prev_mip);
707 vpx_free(cm->mi_grid_base);
708 cm->mi_grid_base = NULL;
709 vpx_free(cm->prev_mi_grid_base);
710 cm->prev_mi_grid_base = NULL;
713 static void vp9_swap_mi_and_prev_mi(VP9_COMMON *cm) {
714 // Current mip will be the prev_mip for the next frame.
715 MODE_INFO **temp_base = cm->prev_mi_grid_base;
716 MODE_INFO *temp = cm->prev_mip;
717 cm->prev_mip = cm->mip;
720 // Update the upper left visible macroblock ptrs.
721 cm->mi = cm->mip + cm->mi_stride + 1;
722 cm->prev_mi = cm->prev_mip + cm->mi_stride + 1;
724 cm->prev_mi_grid_base = cm->mi_grid_base;
725 cm->mi_grid_base = temp_base;
726 cm->mi_grid_visible = cm->mi_grid_base + cm->mi_stride + 1;
727 cm->prev_mi_grid_visible = cm->prev_mi_grid_base + cm->mi_stride + 1;
730 void vp9_initialize_enc(void) {
731 static volatile int init_done = 0;
737 vp9_init_intra_predictors();
739 vp9_rc_init_minq_luts();
740 vp9_entropy_mv_init();
741 #if !CONFIG_REALTIME_ONLY
742 vp9_temporal_filter_init();
748 static void dealloc_compressor_data(VP9_COMP *cpi) {
749 VP9_COMMON *const cm = &cpi->common;
752 vpx_free(cpi->mbmi_ext_base);
753 cpi->mbmi_ext_base = NULL;
755 vpx_free(cpi->tile_data);
756 cpi->tile_data = NULL;
758 vpx_free(cpi->segmentation_map);
759 cpi->segmentation_map = NULL;
760 vpx_free(cpi->coding_context.last_frame_seg_map_copy);
761 cpi->coding_context.last_frame_seg_map_copy = NULL;
763 vpx_free(cpi->nmvcosts[0]);
764 vpx_free(cpi->nmvcosts[1]);
765 cpi->nmvcosts[0] = NULL;
766 cpi->nmvcosts[1] = NULL;
768 vpx_free(cpi->nmvcosts_hp[0]);
769 vpx_free(cpi->nmvcosts_hp[1]);
770 cpi->nmvcosts_hp[0] = NULL;
771 cpi->nmvcosts_hp[1] = NULL;
773 vpx_free(cpi->nmvsadcosts[0]);
774 vpx_free(cpi->nmvsadcosts[1]);
775 cpi->nmvsadcosts[0] = NULL;
776 cpi->nmvsadcosts[1] = NULL;
778 vpx_free(cpi->nmvsadcosts_hp[0]);
779 vpx_free(cpi->nmvsadcosts_hp[1]);
780 cpi->nmvsadcosts_hp[0] = NULL;
781 cpi->nmvsadcosts_hp[1] = NULL;
783 vpx_free(cpi->skin_map);
784 cpi->skin_map = NULL;
786 vpx_free(cpi->prev_partition);
787 cpi->prev_partition = NULL;
789 vpx_free(cpi->prev_segment_id);
790 cpi->prev_segment_id = NULL;
792 vpx_free(cpi->prev_variance_low);
793 cpi->prev_variance_low = NULL;
795 vpx_free(cpi->copied_frame_cnt);
796 cpi->copied_frame_cnt = NULL;
798 vpx_free(cpi->content_state_sb_fd);
799 cpi->content_state_sb_fd = NULL;
801 vpx_free(cpi->count_arf_frame_usage);
802 cpi->count_arf_frame_usage = NULL;
803 vpx_free(cpi->count_lastgolden_frame_usage);
804 cpi->count_lastgolden_frame_usage = NULL;
806 vp9_cyclic_refresh_free(cpi->cyclic_refresh);
807 cpi->cyclic_refresh = NULL;
809 vpx_free(cpi->active_map.map);
810 cpi->active_map.map = NULL;
812 vpx_free(cpi->consec_zero_mv);
813 cpi->consec_zero_mv = NULL;
815 vp9_free_ref_frame_buffers(cm->buffer_pool);
816 #if CONFIG_VP9_POSTPROC
817 vp9_free_postproc_buffers(cm);
819 vp9_free_context_buffers(cm);
821 vpx_free_frame_buffer(&cpi->last_frame_uf);
822 vpx_free_frame_buffer(&cpi->scaled_source);
823 vpx_free_frame_buffer(&cpi->scaled_last_source);
824 vpx_free_frame_buffer(&cpi->alt_ref_buffer);
825 #ifdef ENABLE_KF_DENOISE
826 vpx_free_frame_buffer(&cpi->raw_unscaled_source);
827 vpx_free_frame_buffer(&cpi->raw_scaled_source);
830 vp9_lookahead_destroy(cpi->lookahead);
832 vpx_free(cpi->tile_tok[0][0]);
833 cpi->tile_tok[0][0] = 0;
835 vpx_free(cpi->tplist[0][0]);
836 cpi->tplist[0][0] = NULL;
838 vp9_free_pc_tree(&cpi->td);
840 for (i = 0; i < cpi->svc.number_spatial_layers; ++i) {
841 LAYER_CONTEXT *const lc = &cpi->svc.layer_context[i];
842 vpx_free(lc->rc_twopass_stats_in.buf);
843 lc->rc_twopass_stats_in.buf = NULL;
844 lc->rc_twopass_stats_in.sz = 0;
847 if (cpi->source_diff_var != NULL) {
848 vpx_free(cpi->source_diff_var);
849 cpi->source_diff_var = NULL;
852 for (i = 0; i < MAX_LAG_BUFFERS; ++i) {
853 vpx_free_frame_buffer(&cpi->svc.scaled_frames[i]);
855 memset(&cpi->svc.scaled_frames[0], 0,
856 MAX_LAG_BUFFERS * sizeof(cpi->svc.scaled_frames[0]));
858 vpx_free_frame_buffer(&cpi->svc.scaled_temp);
859 memset(&cpi->svc.scaled_temp, 0, sizeof(cpi->svc.scaled_temp));
861 vpx_free_frame_buffer(&cpi->svc.empty_frame.img);
862 memset(&cpi->svc.empty_frame, 0, sizeof(cpi->svc.empty_frame));
864 vp9_free_svc_cyclic_refresh(cpi);
867 static void save_coding_context(VP9_COMP *cpi) {
868 CODING_CONTEXT *const cc = &cpi->coding_context;
869 VP9_COMMON *cm = &cpi->common;
871 // Stores a snapshot of key state variables which can subsequently be
872 // restored with a call to vp9_restore_coding_context. These functions are
873 // intended for use in a re-code loop in vp9_compress_frame where the
874 // quantizer value is adjusted between loop iterations.
875 vp9_copy(cc->nmvjointcost, cpi->td.mb.nmvjointcost);
877 memcpy(cc->nmvcosts[0], cpi->nmvcosts[0],
878 MV_VALS * sizeof(*cpi->nmvcosts[0]));
879 memcpy(cc->nmvcosts[1], cpi->nmvcosts[1],
880 MV_VALS * sizeof(*cpi->nmvcosts[1]));
881 memcpy(cc->nmvcosts_hp[0], cpi->nmvcosts_hp[0],
882 MV_VALS * sizeof(*cpi->nmvcosts_hp[0]));
883 memcpy(cc->nmvcosts_hp[1], cpi->nmvcosts_hp[1],
884 MV_VALS * sizeof(*cpi->nmvcosts_hp[1]));
886 vp9_copy(cc->segment_pred_probs, cm->seg.pred_probs);
888 memcpy(cpi->coding_context.last_frame_seg_map_copy, cm->last_frame_seg_map,
889 (cm->mi_rows * cm->mi_cols));
891 vp9_copy(cc->last_ref_lf_deltas, cm->lf.last_ref_deltas);
892 vp9_copy(cc->last_mode_lf_deltas, cm->lf.last_mode_deltas);
897 static void restore_coding_context(VP9_COMP *cpi) {
898 CODING_CONTEXT *const cc = &cpi->coding_context;
899 VP9_COMMON *cm = &cpi->common;
901 // Restore key state variables to the snapshot state stored in the
902 // previous call to vp9_save_coding_context.
903 vp9_copy(cpi->td.mb.nmvjointcost, cc->nmvjointcost);
905 memcpy(cpi->nmvcosts[0], cc->nmvcosts[0], MV_VALS * sizeof(*cc->nmvcosts[0]));
906 memcpy(cpi->nmvcosts[1], cc->nmvcosts[1], MV_VALS * sizeof(*cc->nmvcosts[1]));
907 memcpy(cpi->nmvcosts_hp[0], cc->nmvcosts_hp[0],
908 MV_VALS * sizeof(*cc->nmvcosts_hp[0]));
909 memcpy(cpi->nmvcosts_hp[1], cc->nmvcosts_hp[1],
910 MV_VALS * sizeof(*cc->nmvcosts_hp[1]));
912 vp9_copy(cm->seg.pred_probs, cc->segment_pred_probs);
914 memcpy(cm->last_frame_seg_map, cpi->coding_context.last_frame_seg_map_copy,
915 (cm->mi_rows * cm->mi_cols));
917 vp9_copy(cm->lf.last_ref_deltas, cc->last_ref_lf_deltas);
918 vp9_copy(cm->lf.last_mode_deltas, cc->last_mode_lf_deltas);
923 #if !CONFIG_REALTIME_ONLY
924 static void configure_static_seg_features(VP9_COMP *cpi) {
925 VP9_COMMON *const cm = &cpi->common;
926 const RATE_CONTROL *const rc = &cpi->rc;
927 struct segmentation *const seg = &cm->seg;
929 int high_q = (int)(rc->avg_q > 48.0);
932 // Disable and clear down for KF
933 if (cm->frame_type == KEY_FRAME) {
934 // Clear down the global segmentation map
935 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
937 seg->update_data = 0;
938 cpi->static_mb_pct = 0;
940 // Disable segmentation
941 vp9_disable_segmentation(seg);
943 // Clear down the segment features.
944 vp9_clearall_segfeatures(seg);
945 } else if (cpi->refresh_alt_ref_frame) {
946 // If this is an alt ref frame
947 // Clear down the global segmentation map
948 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
950 seg->update_data = 0;
951 cpi->static_mb_pct = 0;
953 // Disable segmentation and individual segment features by default
954 vp9_disable_segmentation(seg);
955 vp9_clearall_segfeatures(seg);
957 // Scan frames from current to arf frame.
958 // This function re-enables segmentation if appropriate.
959 vp9_update_mbgraph_stats(cpi);
961 // If segmentation was enabled set those features needed for the
965 seg->update_data = 1;
968 vp9_compute_qdelta(rc, rc->avg_q, rc->avg_q * 0.875, cm->bit_depth);
969 vp9_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta - 2);
970 vp9_set_segdata(seg, 1, SEG_LVL_ALT_LF, -2);
972 vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
973 vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_LF);
975 // Where relevant assume segment data is delta data
976 seg->abs_delta = SEGMENT_DELTADATA;
978 } else if (seg->enabled) {
979 // All other frames if segmentation has been enabled
981 // First normal frame in a valid gf or alt ref group
982 if (rc->frames_since_golden == 0) {
983 // Set up segment features for normal frames in an arf group
984 if (rc->source_alt_ref_active) {
986 seg->update_data = 1;
987 seg->abs_delta = SEGMENT_DELTADATA;
990 vp9_compute_qdelta(rc, rc->avg_q, rc->avg_q * 1.125, cm->bit_depth);
991 vp9_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta + 2);
992 vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
994 vp9_set_segdata(seg, 1, SEG_LVL_ALT_LF, -2);
995 vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_LF);
997 // Segment coding disabled for compred testing
998 if (high_q || (cpi->static_mb_pct == 100)) {
999 vp9_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
1000 vp9_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
1001 vp9_enable_segfeature(seg, 1, SEG_LVL_SKIP);
1004 // Disable segmentation and clear down features if alt ref
1005 // is not active for this group
1007 vp9_disable_segmentation(seg);
1009 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
1011 seg->update_map = 0;
1012 seg->update_data = 0;
1014 vp9_clearall_segfeatures(seg);
1016 } else if (rc->is_src_frame_alt_ref) {
1017 // Special case where we are coding over the top of a previous
1019 // Segment coding disabled for compred testing
1021 // Enable ref frame features for segment 0 as well
1022 vp9_enable_segfeature(seg, 0, SEG_LVL_REF_FRAME);
1023 vp9_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
1025 // All mbs should use ALTREF_FRAME
1026 vp9_clear_segdata(seg, 0, SEG_LVL_REF_FRAME);
1027 vp9_set_segdata(seg, 0, SEG_LVL_REF_FRAME, ALTREF_FRAME);
1028 vp9_clear_segdata(seg, 1, SEG_LVL_REF_FRAME);
1029 vp9_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
1031 // Skip all MBs if high Q (0,0 mv and skip coeffs)
1033 vp9_enable_segfeature(seg, 0, SEG_LVL_SKIP);
1034 vp9_enable_segfeature(seg, 1, SEG_LVL_SKIP);
1036 // Enable data update
1037 seg->update_data = 1;
1039 // All other frames.
1041 // No updates.. leave things as they are.
1042 seg->update_map = 0;
1043 seg->update_data = 0;
1047 #endif // !CONFIG_REALTIME_ONLY
1049 static void update_reference_segmentation_map(VP9_COMP *cpi) {
1050 VP9_COMMON *const cm = &cpi->common;
1051 MODE_INFO **mi_8x8_ptr = cm->mi_grid_visible;
1052 uint8_t *cache_ptr = cm->last_frame_seg_map;
1055 for (row = 0; row < cm->mi_rows; row++) {
1056 MODE_INFO **mi_8x8 = mi_8x8_ptr;
1057 uint8_t *cache = cache_ptr;
1058 for (col = 0; col < cm->mi_cols; col++, mi_8x8++, cache++)
1059 cache[0] = mi_8x8[0]->segment_id;
1060 mi_8x8_ptr += cm->mi_stride;
1061 cache_ptr += cm->mi_cols;
1065 static void alloc_raw_frame_buffers(VP9_COMP *cpi) {
1066 VP9_COMMON *cm = &cpi->common;
1067 const VP9EncoderConfig *oxcf = &cpi->oxcf;
1069 if (!cpi->lookahead)
1070 cpi->lookahead = vp9_lookahead_init(oxcf->width, oxcf->height,
1071 cm->subsampling_x, cm->subsampling_y,
1072 #if CONFIG_VP9_HIGHBITDEPTH
1073 cm->use_highbitdepth,
1075 oxcf->lag_in_frames);
1076 if (!cpi->lookahead)
1077 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
1078 "Failed to allocate lag buffers");
1080 // TODO(agrange) Check if ARF is enabled and skip allocation if not.
1081 if (vpx_realloc_frame_buffer(&cpi->alt_ref_buffer, oxcf->width, oxcf->height,
1082 cm->subsampling_x, cm->subsampling_y,
1083 #if CONFIG_VP9_HIGHBITDEPTH
1084 cm->use_highbitdepth,
1086 VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
1088 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
1089 "Failed to allocate altref buffer");
1092 static void alloc_util_frame_buffers(VP9_COMP *cpi) {
1093 VP9_COMMON *const cm = &cpi->common;
1094 if (vpx_realloc_frame_buffer(&cpi->last_frame_uf, cm->width, cm->height,
1095 cm->subsampling_x, cm->subsampling_y,
1096 #if CONFIG_VP9_HIGHBITDEPTH
1097 cm->use_highbitdepth,
1099 VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
1101 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
1102 "Failed to allocate last frame buffer");
1104 if (vpx_realloc_frame_buffer(&cpi->scaled_source, cm->width, cm->height,
1105 cm->subsampling_x, cm->subsampling_y,
1106 #if CONFIG_VP9_HIGHBITDEPTH
1107 cm->use_highbitdepth,
1109 VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
1111 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
1112 "Failed to allocate scaled source buffer");
1114 // For 1 pass cbr: allocate scaled_frame that may be used as an intermediate
1115 // buffer for a 2 stage down-sampling: two stages of 1:2 down-sampling for a
1116 // target of 1/4x1/4.
1117 if (is_one_pass_cbr_svc(cpi) && !cpi->svc.scaled_temp_is_alloc) {
1118 cpi->svc.scaled_temp_is_alloc = 1;
1119 if (vpx_realloc_frame_buffer(
1120 &cpi->svc.scaled_temp, cm->width >> 1, cm->height >> 1,
1121 cm->subsampling_x, cm->subsampling_y,
1122 #if CONFIG_VP9_HIGHBITDEPTH
1123 cm->use_highbitdepth,
1125 VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment, NULL, NULL, NULL))
1126 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
1127 "Failed to allocate scaled_frame for svc ");
1130 if (vpx_realloc_frame_buffer(&cpi->scaled_last_source, cm->width, cm->height,
1131 cm->subsampling_x, cm->subsampling_y,
1132 #if CONFIG_VP9_HIGHBITDEPTH
1133 cm->use_highbitdepth,
1135 VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
1137 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
1138 "Failed to allocate scaled last source buffer");
1139 #ifdef ENABLE_KF_DENOISE
1140 if (vpx_realloc_frame_buffer(&cpi->raw_unscaled_source, cm->width, cm->height,
1141 cm->subsampling_x, cm->subsampling_y,
1142 #if CONFIG_VP9_HIGHBITDEPTH
1143 cm->use_highbitdepth,
1145 VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
1147 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
1148 "Failed to allocate unscaled raw source frame buffer");
1150 if (vpx_realloc_frame_buffer(&cpi->raw_scaled_source, cm->width, cm->height,
1151 cm->subsampling_x, cm->subsampling_y,
1152 #if CONFIG_VP9_HIGHBITDEPTH
1153 cm->use_highbitdepth,
1155 VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
1157 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
1158 "Failed to allocate scaled raw source frame buffer");
1162 static int alloc_context_buffers_ext(VP9_COMP *cpi) {
1163 VP9_COMMON *cm = &cpi->common;
1164 int mi_size = cm->mi_cols * cm->mi_rows;
1166 cpi->mbmi_ext_base = vpx_calloc(mi_size, sizeof(*cpi->mbmi_ext_base));
1167 if (!cpi->mbmi_ext_base) return 1;
1172 static void alloc_compressor_data(VP9_COMP *cpi) {
1173 VP9_COMMON *cm = &cpi->common;
1176 vp9_alloc_context_buffers(cm, cm->width, cm->height);
1178 alloc_context_buffers_ext(cpi);
1180 vpx_free(cpi->tile_tok[0][0]);
1183 unsigned int tokens = get_token_alloc(cm->mb_rows, cm->mb_cols);
1184 CHECK_MEM_ERROR(cm, cpi->tile_tok[0][0],
1185 vpx_calloc(tokens, sizeof(*cpi->tile_tok[0][0])));
1188 sb_rows = mi_cols_aligned_to_sb(cm->mi_rows) >> MI_BLOCK_SIZE_LOG2;
1189 vpx_free(cpi->tplist[0][0]);
1191 cm, cpi->tplist[0][0],
1192 vpx_calloc(sb_rows * 4 * (1 << 6), sizeof(*cpi->tplist[0][0])));
1194 vp9_setup_pc_tree(&cpi->common, &cpi->td);
1197 void vp9_new_framerate(VP9_COMP *cpi, double framerate) {
1198 cpi->framerate = framerate < 0.1 ? 30 : framerate;
1199 vp9_rc_update_framerate(cpi);
1202 static void set_tile_limits(VP9_COMP *cpi) {
1203 VP9_COMMON *const cm = &cpi->common;
1205 int min_log2_tile_cols, max_log2_tile_cols;
1206 vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
1208 if (is_two_pass_svc(cpi) && (cpi->svc.encode_empty_frame_state == ENCODING ||
1209 cpi->svc.number_spatial_layers > 1)) {
1210 cm->log2_tile_cols = 0;
1211 cm->log2_tile_rows = 0;
1213 cm->log2_tile_cols =
1214 clamp(cpi->oxcf.tile_columns, min_log2_tile_cols, max_log2_tile_cols);
1215 cm->log2_tile_rows = cpi->oxcf.tile_rows;
1218 if (cpi->oxcf.target_level == LEVEL_AUTO) {
1219 const uint32_t pic_size = cpi->common.width * cpi->common.height;
1220 const int level_tile_cols = log_tile_cols_from_picsize_level(pic_size);
1221 if (cm->log2_tile_cols > level_tile_cols) {
1222 cm->log2_tile_cols = VPXMAX(level_tile_cols, min_log2_tile_cols);
1227 static void update_frame_size(VP9_COMP *cpi) {
1228 VP9_COMMON *const cm = &cpi->common;
1229 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1231 vp9_set_mb_mi(cm, cm->width, cm->height);
1232 vp9_init_context_buffers(cm);
1233 vp9_init_macroblockd(cm, xd, NULL);
1234 cpi->td.mb.mbmi_ext_base = cpi->mbmi_ext_base;
1235 memset(cpi->mbmi_ext_base, 0,
1236 cm->mi_rows * cm->mi_cols * sizeof(*cpi->mbmi_ext_base));
1238 set_tile_limits(cpi);
1240 if (is_two_pass_svc(cpi)) {
1241 if (vpx_realloc_frame_buffer(&cpi->alt_ref_buffer, cm->width, cm->height,
1242 cm->subsampling_x, cm->subsampling_y,
1243 #if CONFIG_VP9_HIGHBITDEPTH
1244 cm->use_highbitdepth,
1246 VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
1248 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
1249 "Failed to reallocate alt_ref_buffer");
1253 static void init_buffer_indices(VP9_COMP *cpi) {
1254 cpi->lst_fb_idx = 0;
1255 cpi->gld_fb_idx = 1;
1256 cpi->alt_fb_idx = 2;
1259 static void init_level_constraint(LevelConstraint *lc) {
1260 lc->level_index = -1;
1261 lc->max_cpb_size = INT_MAX;
1262 lc->max_frame_size = INT_MAX;
1263 lc->rc_config_updated = 0;
1267 static void set_level_constraint(LevelConstraint *ls, int8_t level_index) {
1268 vpx_clear_system_state();
1269 ls->level_index = level_index;
1270 if (level_index >= 0) {
1271 ls->max_cpb_size = vp9_level_defs[level_index].max_cpb_size * (double)1000;
1275 static void init_config(struct VP9_COMP *cpi, VP9EncoderConfig *oxcf) {
1276 VP9_COMMON *const cm = &cpi->common;
1279 cpi->framerate = oxcf->init_framerate;
1280 cm->profile = oxcf->profile;
1281 cm->bit_depth = oxcf->bit_depth;
1282 #if CONFIG_VP9_HIGHBITDEPTH
1283 cm->use_highbitdepth = oxcf->use_highbitdepth;
1285 cm->color_space = oxcf->color_space;
1286 cm->color_range = oxcf->color_range;
1288 cpi->target_level = oxcf->target_level;
1289 cpi->keep_level_stats = oxcf->target_level != LEVEL_MAX;
1290 set_level_constraint(&cpi->level_constraint,
1291 get_level_index(cpi->target_level));
1293 cm->width = oxcf->width;
1294 cm->height = oxcf->height;
1295 alloc_compressor_data(cpi);
1297 cpi->svc.temporal_layering_mode = oxcf->temporal_layering_mode;
1299 // Single thread case: use counts in common.
1300 cpi->td.counts = &cm->counts;
1302 // Spatial scalability.
1303 cpi->svc.number_spatial_layers = oxcf->ss_number_layers;
1304 // Temporal scalability.
1305 cpi->svc.number_temporal_layers = oxcf->ts_number_layers;
1307 if ((cpi->svc.number_temporal_layers > 1 && cpi->oxcf.rc_mode == VPX_CBR) ||
1308 ((cpi->svc.number_temporal_layers > 1 ||
1309 cpi->svc.number_spatial_layers > 1) &&
1310 cpi->oxcf.pass != 1)) {
1311 vp9_init_layer_context(cpi);
1314 // change includes all joint functionality
1315 vp9_change_config(cpi, oxcf);
1317 cpi->static_mb_pct = 0;
1318 cpi->ref_frame_flags = 0;
1320 init_buffer_indices(cpi);
1322 vp9_noise_estimate_init(&cpi->noise_estimate, cm->width, cm->height);
1325 static void set_rc_buffer_sizes(RATE_CONTROL *rc,
1326 const VP9EncoderConfig *oxcf) {
1327 const int64_t bandwidth = oxcf->target_bandwidth;
1328 const int64_t starting = oxcf->starting_buffer_level_ms;
1329 const int64_t optimal = oxcf->optimal_buffer_level_ms;
1330 const int64_t maximum = oxcf->maximum_buffer_size_ms;
1332 rc->starting_buffer_level = starting * bandwidth / 1000;
1333 rc->optimal_buffer_level =
1334 (optimal == 0) ? bandwidth / 8 : optimal * bandwidth / 1000;
1335 rc->maximum_buffer_size =
1336 (maximum == 0) ? bandwidth / 8 : maximum * bandwidth / 1000;
1339 #if CONFIG_VP9_HIGHBITDEPTH
1340 #define HIGHBD_BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX4DF) \
1341 cpi->fn_ptr[BT].sdf = SDF; \
1342 cpi->fn_ptr[BT].sdaf = SDAF; \
1343 cpi->fn_ptr[BT].vf = VF; \
1344 cpi->fn_ptr[BT].svf = SVF; \
1345 cpi->fn_ptr[BT].svaf = SVAF; \
1346 cpi->fn_ptr[BT].sdx4df = SDX4DF;
1348 #define MAKE_BFP_SAD_WRAPPER(fnname) \
1349 static unsigned int fnname##_bits8(const uint8_t *src_ptr, \
1350 int source_stride, \
1351 const uint8_t *ref_ptr, int ref_stride) { \
1352 return fnname(src_ptr, source_stride, ref_ptr, ref_stride); \
1354 static unsigned int fnname##_bits10( \
1355 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1357 return fnname(src_ptr, source_stride, ref_ptr, ref_stride) >> 2; \
1359 static unsigned int fnname##_bits12( \
1360 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1362 return fnname(src_ptr, source_stride, ref_ptr, ref_stride) >> 4; \
1365 #define MAKE_BFP_SADAVG_WRAPPER(fnname) \
1366 static unsigned int fnname##_bits8( \
1367 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1368 int ref_stride, const uint8_t *second_pred) { \
1369 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred); \
1371 static unsigned int fnname##_bits10( \
1372 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1373 int ref_stride, const uint8_t *second_pred) { \
1374 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred) >> \
1377 static unsigned int fnname##_bits12( \
1378 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1379 int ref_stride, const uint8_t *second_pred) { \
1380 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred) >> \
1384 #define MAKE_BFP_SAD4D_WRAPPER(fnname) \
1385 static void fnname##_bits8(const uint8_t *src_ptr, int source_stride, \
1386 const uint8_t *const ref_ptr[], int ref_stride, \
1387 unsigned int *sad_array) { \
1388 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1390 static void fnname##_bits10(const uint8_t *src_ptr, int source_stride, \
1391 const uint8_t *const ref_ptr[], int ref_stride, \
1392 unsigned int *sad_array) { \
1394 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1395 for (i = 0; i < 4; i++) sad_array[i] >>= 2; \
1397 static void fnname##_bits12(const uint8_t *src_ptr, int source_stride, \
1398 const uint8_t *const ref_ptr[], int ref_stride, \
1399 unsigned int *sad_array) { \
1401 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1402 for (i = 0; i < 4; i++) sad_array[i] >>= 4; \
1405 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad32x16)
1406 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad32x16_avg)
1407 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad32x16x4d)
1408 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad16x32)
1409 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad16x32_avg)
1410 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad16x32x4d)
1411 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad64x32)
1412 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad64x32_avg)
1413 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad64x32x4d)
1414 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad32x64)
1415 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad32x64_avg)
1416 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad32x64x4d)
1417 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad32x32)
1418 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad32x32_avg)
1419 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad32x32x4d)
1420 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad64x64)
1421 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad64x64_avg)
1422 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad64x64x4d)
1423 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad16x16)
1424 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad16x16_avg)
1425 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad16x16x4d)
1426 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad16x8)
1427 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad16x8_avg)
1428 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad16x8x4d)
1429 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad8x16)
1430 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad8x16_avg)
1431 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad8x16x4d)
1432 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad8x8)
1433 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad8x8_avg)
1434 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad8x8x4d)
1435 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad8x4)
1436 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad8x4_avg)
1437 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad8x4x4d)
1438 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad4x8)
1439 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad4x8_avg)
1440 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad4x8x4d)
1441 MAKE_BFP_SAD_WRAPPER(vpx_highbd_sad4x4)
1442 MAKE_BFP_SADAVG_WRAPPER(vpx_highbd_sad4x4_avg)
1443 MAKE_BFP_SAD4D_WRAPPER(vpx_highbd_sad4x4x4d)
1445 static void highbd_set_var_fns(VP9_COMP *const cpi) {
1446 VP9_COMMON *const cm = &cpi->common;
1447 if (cm->use_highbitdepth) {
1448 switch (cm->bit_depth) {
1450 HIGHBD_BFP(BLOCK_32X16, vpx_highbd_sad32x16_bits8,
1451 vpx_highbd_sad32x16_avg_bits8, vpx_highbd_8_variance32x16,
1452 vpx_highbd_8_sub_pixel_variance32x16,
1453 vpx_highbd_8_sub_pixel_avg_variance32x16,
1454 vpx_highbd_sad32x16x4d_bits8)
1456 HIGHBD_BFP(BLOCK_16X32, vpx_highbd_sad16x32_bits8,
1457 vpx_highbd_sad16x32_avg_bits8, vpx_highbd_8_variance16x32,
1458 vpx_highbd_8_sub_pixel_variance16x32,
1459 vpx_highbd_8_sub_pixel_avg_variance16x32,
1460 vpx_highbd_sad16x32x4d_bits8)
1462 HIGHBD_BFP(BLOCK_64X32, vpx_highbd_sad64x32_bits8,
1463 vpx_highbd_sad64x32_avg_bits8, vpx_highbd_8_variance64x32,
1464 vpx_highbd_8_sub_pixel_variance64x32,
1465 vpx_highbd_8_sub_pixel_avg_variance64x32,
1466 vpx_highbd_sad64x32x4d_bits8)
1468 HIGHBD_BFP(BLOCK_32X64, vpx_highbd_sad32x64_bits8,
1469 vpx_highbd_sad32x64_avg_bits8, vpx_highbd_8_variance32x64,
1470 vpx_highbd_8_sub_pixel_variance32x64,
1471 vpx_highbd_8_sub_pixel_avg_variance32x64,
1472 vpx_highbd_sad32x64x4d_bits8)
1474 HIGHBD_BFP(BLOCK_32X32, vpx_highbd_sad32x32_bits8,
1475 vpx_highbd_sad32x32_avg_bits8, vpx_highbd_8_variance32x32,
1476 vpx_highbd_8_sub_pixel_variance32x32,
1477 vpx_highbd_8_sub_pixel_avg_variance32x32,
1478 vpx_highbd_sad32x32x4d_bits8)
1480 HIGHBD_BFP(BLOCK_64X64, vpx_highbd_sad64x64_bits8,
1481 vpx_highbd_sad64x64_avg_bits8, vpx_highbd_8_variance64x64,
1482 vpx_highbd_8_sub_pixel_variance64x64,
1483 vpx_highbd_8_sub_pixel_avg_variance64x64,
1484 vpx_highbd_sad64x64x4d_bits8)
1486 HIGHBD_BFP(BLOCK_16X16, vpx_highbd_sad16x16_bits8,
1487 vpx_highbd_sad16x16_avg_bits8, vpx_highbd_8_variance16x16,
1488 vpx_highbd_8_sub_pixel_variance16x16,
1489 vpx_highbd_8_sub_pixel_avg_variance16x16,
1490 vpx_highbd_sad16x16x4d_bits8)
1492 HIGHBD_BFP(BLOCK_16X8, vpx_highbd_sad16x8_bits8,
1493 vpx_highbd_sad16x8_avg_bits8, vpx_highbd_8_variance16x8,
1494 vpx_highbd_8_sub_pixel_variance16x8,
1495 vpx_highbd_8_sub_pixel_avg_variance16x8,
1496 vpx_highbd_sad16x8x4d_bits8)
1498 HIGHBD_BFP(BLOCK_8X16, vpx_highbd_sad8x16_bits8,
1499 vpx_highbd_sad8x16_avg_bits8, vpx_highbd_8_variance8x16,
1500 vpx_highbd_8_sub_pixel_variance8x16,
1501 vpx_highbd_8_sub_pixel_avg_variance8x16,
1502 vpx_highbd_sad8x16x4d_bits8)
1505 BLOCK_8X8, vpx_highbd_sad8x8_bits8, vpx_highbd_sad8x8_avg_bits8,
1506 vpx_highbd_8_variance8x8, vpx_highbd_8_sub_pixel_variance8x8,
1507 vpx_highbd_8_sub_pixel_avg_variance8x8, vpx_highbd_sad8x8x4d_bits8)
1510 BLOCK_8X4, vpx_highbd_sad8x4_bits8, vpx_highbd_sad8x4_avg_bits8,
1511 vpx_highbd_8_variance8x4, vpx_highbd_8_sub_pixel_variance8x4,
1512 vpx_highbd_8_sub_pixel_avg_variance8x4, vpx_highbd_sad8x4x4d_bits8)
1515 BLOCK_4X8, vpx_highbd_sad4x8_bits8, vpx_highbd_sad4x8_avg_bits8,
1516 vpx_highbd_8_variance4x8, vpx_highbd_8_sub_pixel_variance4x8,
1517 vpx_highbd_8_sub_pixel_avg_variance4x8, vpx_highbd_sad4x8x4d_bits8)
1520 BLOCK_4X4, vpx_highbd_sad4x4_bits8, vpx_highbd_sad4x4_avg_bits8,
1521 vpx_highbd_8_variance4x4, vpx_highbd_8_sub_pixel_variance4x4,
1522 vpx_highbd_8_sub_pixel_avg_variance4x4, vpx_highbd_sad4x4x4d_bits8)
1526 HIGHBD_BFP(BLOCK_32X16, vpx_highbd_sad32x16_bits10,
1527 vpx_highbd_sad32x16_avg_bits10, vpx_highbd_10_variance32x16,
1528 vpx_highbd_10_sub_pixel_variance32x16,
1529 vpx_highbd_10_sub_pixel_avg_variance32x16,
1530 vpx_highbd_sad32x16x4d_bits10)
1532 HIGHBD_BFP(BLOCK_16X32, vpx_highbd_sad16x32_bits10,
1533 vpx_highbd_sad16x32_avg_bits10, vpx_highbd_10_variance16x32,
1534 vpx_highbd_10_sub_pixel_variance16x32,
1535 vpx_highbd_10_sub_pixel_avg_variance16x32,
1536 vpx_highbd_sad16x32x4d_bits10)
1538 HIGHBD_BFP(BLOCK_64X32, vpx_highbd_sad64x32_bits10,
1539 vpx_highbd_sad64x32_avg_bits10, vpx_highbd_10_variance64x32,
1540 vpx_highbd_10_sub_pixel_variance64x32,
1541 vpx_highbd_10_sub_pixel_avg_variance64x32,
1542 vpx_highbd_sad64x32x4d_bits10)
1544 HIGHBD_BFP(BLOCK_32X64, vpx_highbd_sad32x64_bits10,
1545 vpx_highbd_sad32x64_avg_bits10, vpx_highbd_10_variance32x64,
1546 vpx_highbd_10_sub_pixel_variance32x64,
1547 vpx_highbd_10_sub_pixel_avg_variance32x64,
1548 vpx_highbd_sad32x64x4d_bits10)
1550 HIGHBD_BFP(BLOCK_32X32, vpx_highbd_sad32x32_bits10,
1551 vpx_highbd_sad32x32_avg_bits10, vpx_highbd_10_variance32x32,
1552 vpx_highbd_10_sub_pixel_variance32x32,
1553 vpx_highbd_10_sub_pixel_avg_variance32x32,
1554 vpx_highbd_sad32x32x4d_bits10)
1556 HIGHBD_BFP(BLOCK_64X64, vpx_highbd_sad64x64_bits10,
1557 vpx_highbd_sad64x64_avg_bits10, vpx_highbd_10_variance64x64,
1558 vpx_highbd_10_sub_pixel_variance64x64,
1559 vpx_highbd_10_sub_pixel_avg_variance64x64,
1560 vpx_highbd_sad64x64x4d_bits10)
1562 HIGHBD_BFP(BLOCK_16X16, vpx_highbd_sad16x16_bits10,
1563 vpx_highbd_sad16x16_avg_bits10, vpx_highbd_10_variance16x16,
1564 vpx_highbd_10_sub_pixel_variance16x16,
1565 vpx_highbd_10_sub_pixel_avg_variance16x16,
1566 vpx_highbd_sad16x16x4d_bits10)
1568 HIGHBD_BFP(BLOCK_16X8, vpx_highbd_sad16x8_bits10,
1569 vpx_highbd_sad16x8_avg_bits10, vpx_highbd_10_variance16x8,
1570 vpx_highbd_10_sub_pixel_variance16x8,
1571 vpx_highbd_10_sub_pixel_avg_variance16x8,
1572 vpx_highbd_sad16x8x4d_bits10)
1574 HIGHBD_BFP(BLOCK_8X16, vpx_highbd_sad8x16_bits10,
1575 vpx_highbd_sad8x16_avg_bits10, vpx_highbd_10_variance8x16,
1576 vpx_highbd_10_sub_pixel_variance8x16,
1577 vpx_highbd_10_sub_pixel_avg_variance8x16,
1578 vpx_highbd_sad8x16x4d_bits10)
1580 HIGHBD_BFP(BLOCK_8X8, vpx_highbd_sad8x8_bits10,
1581 vpx_highbd_sad8x8_avg_bits10, vpx_highbd_10_variance8x8,
1582 vpx_highbd_10_sub_pixel_variance8x8,
1583 vpx_highbd_10_sub_pixel_avg_variance8x8,
1584 vpx_highbd_sad8x8x4d_bits10)
1586 HIGHBD_BFP(BLOCK_8X4, vpx_highbd_sad8x4_bits10,
1587 vpx_highbd_sad8x4_avg_bits10, vpx_highbd_10_variance8x4,
1588 vpx_highbd_10_sub_pixel_variance8x4,
1589 vpx_highbd_10_sub_pixel_avg_variance8x4,
1590 vpx_highbd_sad8x4x4d_bits10)
1592 HIGHBD_BFP(BLOCK_4X8, vpx_highbd_sad4x8_bits10,
1593 vpx_highbd_sad4x8_avg_bits10, vpx_highbd_10_variance4x8,
1594 vpx_highbd_10_sub_pixel_variance4x8,
1595 vpx_highbd_10_sub_pixel_avg_variance4x8,
1596 vpx_highbd_sad4x8x4d_bits10)
1598 HIGHBD_BFP(BLOCK_4X4, vpx_highbd_sad4x4_bits10,
1599 vpx_highbd_sad4x4_avg_bits10, vpx_highbd_10_variance4x4,
1600 vpx_highbd_10_sub_pixel_variance4x4,
1601 vpx_highbd_10_sub_pixel_avg_variance4x4,
1602 vpx_highbd_sad4x4x4d_bits10)
1606 HIGHBD_BFP(BLOCK_32X16, vpx_highbd_sad32x16_bits12,
1607 vpx_highbd_sad32x16_avg_bits12, vpx_highbd_12_variance32x16,
1608 vpx_highbd_12_sub_pixel_variance32x16,
1609 vpx_highbd_12_sub_pixel_avg_variance32x16,
1610 vpx_highbd_sad32x16x4d_bits12)
1612 HIGHBD_BFP(BLOCK_16X32, vpx_highbd_sad16x32_bits12,
1613 vpx_highbd_sad16x32_avg_bits12, vpx_highbd_12_variance16x32,
1614 vpx_highbd_12_sub_pixel_variance16x32,
1615 vpx_highbd_12_sub_pixel_avg_variance16x32,
1616 vpx_highbd_sad16x32x4d_bits12)
1618 HIGHBD_BFP(BLOCK_64X32, vpx_highbd_sad64x32_bits12,
1619 vpx_highbd_sad64x32_avg_bits12, vpx_highbd_12_variance64x32,
1620 vpx_highbd_12_sub_pixel_variance64x32,
1621 vpx_highbd_12_sub_pixel_avg_variance64x32,
1622 vpx_highbd_sad64x32x4d_bits12)
1624 HIGHBD_BFP(BLOCK_32X64, vpx_highbd_sad32x64_bits12,
1625 vpx_highbd_sad32x64_avg_bits12, vpx_highbd_12_variance32x64,
1626 vpx_highbd_12_sub_pixel_variance32x64,
1627 vpx_highbd_12_sub_pixel_avg_variance32x64,
1628 vpx_highbd_sad32x64x4d_bits12)
1630 HIGHBD_BFP(BLOCK_32X32, vpx_highbd_sad32x32_bits12,
1631 vpx_highbd_sad32x32_avg_bits12, vpx_highbd_12_variance32x32,
1632 vpx_highbd_12_sub_pixel_variance32x32,
1633 vpx_highbd_12_sub_pixel_avg_variance32x32,
1634 vpx_highbd_sad32x32x4d_bits12)
1636 HIGHBD_BFP(BLOCK_64X64, vpx_highbd_sad64x64_bits12,
1637 vpx_highbd_sad64x64_avg_bits12, vpx_highbd_12_variance64x64,
1638 vpx_highbd_12_sub_pixel_variance64x64,
1639 vpx_highbd_12_sub_pixel_avg_variance64x64,
1640 vpx_highbd_sad64x64x4d_bits12)
1642 HIGHBD_BFP(BLOCK_16X16, vpx_highbd_sad16x16_bits12,
1643 vpx_highbd_sad16x16_avg_bits12, vpx_highbd_12_variance16x16,
1644 vpx_highbd_12_sub_pixel_variance16x16,
1645 vpx_highbd_12_sub_pixel_avg_variance16x16,
1646 vpx_highbd_sad16x16x4d_bits12)
1648 HIGHBD_BFP(BLOCK_16X8, vpx_highbd_sad16x8_bits12,
1649 vpx_highbd_sad16x8_avg_bits12, vpx_highbd_12_variance16x8,
1650 vpx_highbd_12_sub_pixel_variance16x8,
1651 vpx_highbd_12_sub_pixel_avg_variance16x8,
1652 vpx_highbd_sad16x8x4d_bits12)
1654 HIGHBD_BFP(BLOCK_8X16, vpx_highbd_sad8x16_bits12,
1655 vpx_highbd_sad8x16_avg_bits12, vpx_highbd_12_variance8x16,
1656 vpx_highbd_12_sub_pixel_variance8x16,
1657 vpx_highbd_12_sub_pixel_avg_variance8x16,
1658 vpx_highbd_sad8x16x4d_bits12)
1660 HIGHBD_BFP(BLOCK_8X8, vpx_highbd_sad8x8_bits12,
1661 vpx_highbd_sad8x8_avg_bits12, vpx_highbd_12_variance8x8,
1662 vpx_highbd_12_sub_pixel_variance8x8,
1663 vpx_highbd_12_sub_pixel_avg_variance8x8,
1664 vpx_highbd_sad8x8x4d_bits12)
1666 HIGHBD_BFP(BLOCK_8X4, vpx_highbd_sad8x4_bits12,
1667 vpx_highbd_sad8x4_avg_bits12, vpx_highbd_12_variance8x4,
1668 vpx_highbd_12_sub_pixel_variance8x4,
1669 vpx_highbd_12_sub_pixel_avg_variance8x4,
1670 vpx_highbd_sad8x4x4d_bits12)
1672 HIGHBD_BFP(BLOCK_4X8, vpx_highbd_sad4x8_bits12,
1673 vpx_highbd_sad4x8_avg_bits12, vpx_highbd_12_variance4x8,
1674 vpx_highbd_12_sub_pixel_variance4x8,
1675 vpx_highbd_12_sub_pixel_avg_variance4x8,
1676 vpx_highbd_sad4x8x4d_bits12)
1678 HIGHBD_BFP(BLOCK_4X4, vpx_highbd_sad4x4_bits12,
1679 vpx_highbd_sad4x4_avg_bits12, vpx_highbd_12_variance4x4,
1680 vpx_highbd_12_sub_pixel_variance4x4,
1681 vpx_highbd_12_sub_pixel_avg_variance4x4,
1682 vpx_highbd_sad4x4x4d_bits12)
1687 "cm->bit_depth should be VPX_BITS_8, "
1688 "VPX_BITS_10 or VPX_BITS_12");
1692 #endif // CONFIG_VP9_HIGHBITDEPTH
1694 static void realloc_segmentation_maps(VP9_COMP *cpi) {
1695 VP9_COMMON *const cm = &cpi->common;
1697 // Create the encoder segmentation map and set all entries to 0
1698 vpx_free(cpi->segmentation_map);
1699 CHECK_MEM_ERROR(cm, cpi->segmentation_map,
1700 vpx_calloc(cm->mi_rows * cm->mi_cols, 1));
1702 // Create a map used for cyclic background refresh.
1703 if (cpi->cyclic_refresh) vp9_cyclic_refresh_free(cpi->cyclic_refresh);
1704 CHECK_MEM_ERROR(cm, cpi->cyclic_refresh,
1705 vp9_cyclic_refresh_alloc(cm->mi_rows, cm->mi_cols));
1707 // Create a map used to mark inactive areas.
1708 vpx_free(cpi->active_map.map);
1709 CHECK_MEM_ERROR(cm, cpi->active_map.map,
1710 vpx_calloc(cm->mi_rows * cm->mi_cols, 1));
1712 // And a place holder structure is the coding context
1713 // for use if we want to save and restore it
1714 vpx_free(cpi->coding_context.last_frame_seg_map_copy);
1715 CHECK_MEM_ERROR(cm, cpi->coding_context.last_frame_seg_map_copy,
1716 vpx_calloc(cm->mi_rows * cm->mi_cols, 1));
1719 static void alloc_copy_partition_data(VP9_COMP *cpi) {
1720 VP9_COMMON *const cm = &cpi->common;
1721 if (cpi->prev_partition == NULL) {
1722 CHECK_MEM_ERROR(cm, cpi->prev_partition,
1723 (BLOCK_SIZE *)vpx_calloc(cm->mi_stride * cm->mi_rows,
1724 sizeof(*cpi->prev_partition)));
1726 if (cpi->prev_segment_id == NULL) {
1728 cm, cpi->prev_segment_id,
1729 (int8_t *)vpx_calloc((cm->mi_stride >> 3) * ((cm->mi_rows >> 3) + 1),
1730 sizeof(*cpi->prev_segment_id)));
1732 if (cpi->prev_variance_low == NULL) {
1733 CHECK_MEM_ERROR(cm, cpi->prev_variance_low,
1734 (uint8_t *)vpx_calloc(
1735 (cm->mi_stride >> 3) * ((cm->mi_rows >> 3) + 1) * 25,
1736 sizeof(*cpi->prev_variance_low)));
1738 if (cpi->copied_frame_cnt == NULL) {
1740 cm, cpi->copied_frame_cnt,
1741 (uint8_t *)vpx_calloc((cm->mi_stride >> 3) * ((cm->mi_rows >> 3) + 1),
1742 sizeof(*cpi->copied_frame_cnt)));
1746 void vp9_change_config(struct VP9_COMP *cpi, const VP9EncoderConfig *oxcf) {
1747 VP9_COMMON *const cm = &cpi->common;
1748 RATE_CONTROL *const rc = &cpi->rc;
1749 int last_w = cpi->oxcf.width;
1750 int last_h = cpi->oxcf.height;
1752 if (cm->profile != oxcf->profile) cm->profile = oxcf->profile;
1753 cm->bit_depth = oxcf->bit_depth;
1754 cm->color_space = oxcf->color_space;
1755 cm->color_range = oxcf->color_range;
1757 cpi->target_level = oxcf->target_level;
1758 cpi->keep_level_stats = oxcf->target_level != LEVEL_MAX;
1759 set_level_constraint(&cpi->level_constraint,
1760 get_level_index(cpi->target_level));
1762 if (cm->profile <= PROFILE_1)
1763 assert(cm->bit_depth == VPX_BITS_8);
1765 assert(cm->bit_depth > VPX_BITS_8);
1768 #if CONFIG_VP9_HIGHBITDEPTH
1769 cpi->td.mb.e_mbd.bd = (int)cm->bit_depth;
1770 #endif // CONFIG_VP9_HIGHBITDEPTH
1772 if ((oxcf->pass == 0) && (oxcf->rc_mode == VPX_Q)) {
1773 rc->baseline_gf_interval = FIXED_GF_INTERVAL;
1775 rc->baseline_gf_interval = (MIN_GF_INTERVAL + MAX_GF_INTERVAL) / 2;
1778 cpi->refresh_golden_frame = 0;
1779 cpi->refresh_last_frame = 1;
1780 cm->refresh_frame_context = 1;
1781 cm->reset_frame_context = 0;
1783 vp9_reset_segment_features(&cm->seg);
1784 vp9_set_high_precision_mv(cpi, 0);
1789 for (i = 0; i < MAX_SEGMENTS; i++)
1790 cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout;
1792 cpi->encode_breakout = cpi->oxcf.encode_breakout;
1794 set_rc_buffer_sizes(rc, &cpi->oxcf);
1796 // Under a configuration change, where maximum_buffer_size may change,
1797 // keep buffer level clipped to the maximum allowed buffer size.
1798 rc->bits_off_target = VPXMIN(rc->bits_off_target, rc->maximum_buffer_size);
1799 rc->buffer_level = VPXMIN(rc->buffer_level, rc->maximum_buffer_size);
1801 // Set up frame rate and related parameters rate control values.
1802 vp9_new_framerate(cpi, cpi->framerate);
1804 // Set absolute upper and lower quality limits
1805 rc->worst_quality = cpi->oxcf.worst_allowed_q;
1806 rc->best_quality = cpi->oxcf.best_allowed_q;
1808 cm->interp_filter = cpi->sf.default_interp_filter;
1810 if (cpi->oxcf.render_width > 0 && cpi->oxcf.render_height > 0) {
1811 cm->render_width = cpi->oxcf.render_width;
1812 cm->render_height = cpi->oxcf.render_height;
1814 cm->render_width = cpi->oxcf.width;
1815 cm->render_height = cpi->oxcf.height;
1817 if (last_w != cpi->oxcf.width || last_h != cpi->oxcf.height) {
1818 cm->width = cpi->oxcf.width;
1819 cm->height = cpi->oxcf.height;
1820 cpi->external_resize = 1;
1823 if (cpi->initial_width) {
1824 int new_mi_size = 0;
1825 vp9_set_mb_mi(cm, cm->width, cm->height);
1826 new_mi_size = cm->mi_stride * calc_mi_size(cm->mi_rows);
1827 if (cm->mi_alloc_size < new_mi_size) {
1828 vp9_free_context_buffers(cm);
1829 alloc_compressor_data(cpi);
1830 realloc_segmentation_maps(cpi);
1831 cpi->initial_width = cpi->initial_height = 0;
1832 cpi->external_resize = 0;
1833 } else if (cm->mi_alloc_size == new_mi_size &&
1834 (cpi->oxcf.width > last_w || cpi->oxcf.height > last_h)) {
1835 vp9_alloc_loop_filter(cm);
1839 if (cm->current_video_frame == 0 || last_w != cpi->oxcf.width ||
1840 last_h != cpi->oxcf.height)
1841 update_frame_size(cpi);
1843 if (last_w != cpi->oxcf.width || last_h != cpi->oxcf.height) {
1844 memset(cpi->consec_zero_mv, 0,
1845 cm->mi_rows * cm->mi_cols * sizeof(*cpi->consec_zero_mv));
1846 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
1847 vp9_cyclic_refresh_reset_resize(cpi);
1850 if ((cpi->svc.number_temporal_layers > 1 && cpi->oxcf.rc_mode == VPX_CBR) ||
1851 ((cpi->svc.number_temporal_layers > 1 ||
1852 cpi->svc.number_spatial_layers > 1) &&
1853 cpi->oxcf.pass != 1)) {
1854 vp9_update_layer_context_change_config(cpi,
1855 (int)cpi->oxcf.target_bandwidth);
1858 cpi->alt_ref_source = NULL;
1859 rc->is_src_frame_alt_ref = 0;
1862 // Experimental RD Code
1863 cpi->frame_distortion = 0;
1864 cpi->last_frame_distortion = 0;
1867 set_tile_limits(cpi);
1869 cpi->ext_refresh_frame_flags_pending = 0;
1870 cpi->ext_refresh_frame_context_pending = 0;
1872 #if CONFIG_VP9_HIGHBITDEPTH
1873 highbd_set_var_fns(cpi);
1876 vp9_set_row_mt(cpi);
1880 #define M_LOG2_E 0.693147180559945309417
1882 #define log2f(x) (log(x) / (float)M_LOG2_E)
1884 /***********************************************************************
1885 * Read before modifying 'cal_nmvjointsadcost' or 'cal_nmvsadcosts' *
1886 ***********************************************************************
1887 * The following 2 functions ('cal_nmvjointsadcost' and *
1888 * 'cal_nmvsadcosts') are used to calculate cost lookup tables *
1889 * used by 'vp9_diamond_search_sad'. The C implementation of the *
1890 * function is generic, but the AVX intrinsics optimised version *
1891 * relies on the following properties of the computed tables: *
1892 * For cal_nmvjointsadcost: *
1893 * - mvjointsadcost[1] == mvjointsadcost[2] == mvjointsadcost[3] *
1894 * For cal_nmvsadcosts: *
1895 * - For all i: mvsadcost[0][i] == mvsadcost[1][i] *
1896 * (Equal costs for both components) *
1897 * - For all i: mvsadcost[0][i] == mvsadcost[0][-i] *
1898 * (Cost function is even) *
1899 * If these do not hold, then the AVX optimised version of the *
1900 * 'vp9_diamond_search_sad' function cannot be used as it is, in which *
1901 * case you can revert to using the C function instead. *
1902 ***********************************************************************/
1904 static void cal_nmvjointsadcost(int *mvjointsadcost) {
1905 /*********************************************************************
1906 * Warning: Read the comments above before modifying this function *
1907 *********************************************************************/
1908 mvjointsadcost[0] = 600;
1909 mvjointsadcost[1] = 300;
1910 mvjointsadcost[2] = 300;
1911 mvjointsadcost[3] = 300;
1914 static void cal_nmvsadcosts(int *mvsadcost[2]) {
1915 /*********************************************************************
1916 * Warning: Read the comments above before modifying this function *
1917 *********************************************************************/
1920 mvsadcost[0][0] = 0;
1921 mvsadcost[1][0] = 0;
1924 double z = 256 * (2 * (log2f(8 * i) + .6));
1925 mvsadcost[0][i] = (int)z;
1926 mvsadcost[1][i] = (int)z;
1927 mvsadcost[0][-i] = (int)z;
1928 mvsadcost[1][-i] = (int)z;
1929 } while (++i <= MV_MAX);
1932 static void cal_nmvsadcosts_hp(int *mvsadcost[2]) {
1935 mvsadcost[0][0] = 0;
1936 mvsadcost[1][0] = 0;
1939 double z = 256 * (2 * (log2f(8 * i) + .6));
1940 mvsadcost[0][i] = (int)z;
1941 mvsadcost[1][i] = (int)z;
1942 mvsadcost[0][-i] = (int)z;
1943 mvsadcost[1][-i] = (int)z;
1944 } while (++i <= MV_MAX);
1947 VP9_COMP *vp9_create_compressor(VP9EncoderConfig *oxcf,
1948 BufferPool *const pool) {
1950 VP9_COMP *volatile const cpi = vpx_memalign(32, sizeof(VP9_COMP));
1951 VP9_COMMON *volatile const cm = cpi != NULL ? &cpi->common : NULL;
1953 if (!cm) return NULL;
1957 if (setjmp(cm->error.jmp)) {
1958 cm->error.setjmp = 0;
1959 vp9_remove_compressor(cpi);
1963 cm->error.setjmp = 1;
1964 cm->alloc_mi = vp9_enc_alloc_mi;
1965 cm->free_mi = vp9_enc_free_mi;
1966 cm->setup_mi = vp9_enc_setup_mi;
1968 CHECK_MEM_ERROR(cm, cm->fc, (FRAME_CONTEXT *)vpx_calloc(1, sizeof(*cm->fc)));
1970 cm, cm->frame_contexts,
1971 (FRAME_CONTEXT *)vpx_calloc(FRAME_CONTEXTS, sizeof(*cm->frame_contexts)));
1974 cpi->resize_state = ORIG;
1975 cpi->external_resize = 0;
1976 cpi->resize_avg_qp = 0;
1977 cpi->resize_buffer_underflow = 0;
1978 cpi->use_skin_detection = 0;
1979 cpi->common.buffer_pool = pool;
1981 cpi->force_update_segmentation = 0;
1983 init_config(cpi, oxcf);
1984 vp9_rc_init(&cpi->oxcf, oxcf->pass, &cpi->rc);
1986 cm->current_video_frame = 0;
1987 cpi->partition_search_skippable_frame = 0;
1988 cpi->tile_data = NULL;
1990 realloc_segmentation_maps(cpi);
1992 CHECK_MEM_ERROR(cm, cpi->skin_map, vpx_calloc(cm->mi_rows * cm->mi_cols,
1993 sizeof(cpi->skin_map[0])));
1995 CHECK_MEM_ERROR(cm, cpi->alt_ref_aq, vp9_alt_ref_aq_create());
1998 cm, cpi->consec_zero_mv,
1999 vpx_calloc(cm->mi_rows * cm->mi_cols, sizeof(*cpi->consec_zero_mv)));
2001 CHECK_MEM_ERROR(cm, cpi->nmvcosts[0],
2002 vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts[0])));
2003 CHECK_MEM_ERROR(cm, cpi->nmvcosts[1],
2004 vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts[1])));
2005 CHECK_MEM_ERROR(cm, cpi->nmvcosts_hp[0],
2006 vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts_hp[0])));
2007 CHECK_MEM_ERROR(cm, cpi->nmvcosts_hp[1],
2008 vpx_calloc(MV_VALS, sizeof(*cpi->nmvcosts_hp[1])));
2009 CHECK_MEM_ERROR(cm, cpi->nmvsadcosts[0],
2010 vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts[0])));
2011 CHECK_MEM_ERROR(cm, cpi->nmvsadcosts[1],
2012 vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts[1])));
2013 CHECK_MEM_ERROR(cm, cpi->nmvsadcosts_hp[0],
2014 vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts_hp[0])));
2015 CHECK_MEM_ERROR(cm, cpi->nmvsadcosts_hp[1],
2016 vpx_calloc(MV_VALS, sizeof(*cpi->nmvsadcosts_hp[1])));
2018 for (i = 0; i < (sizeof(cpi->mbgraph_stats) / sizeof(cpi->mbgraph_stats[0]));
2021 cm, cpi->mbgraph_stats[i].mb_stats,
2022 vpx_calloc(cm->MBs * sizeof(*cpi->mbgraph_stats[i].mb_stats), 1));
2025 #if CONFIG_FP_MB_STATS
2026 cpi->use_fp_mb_stats = 0;
2027 if (cpi->use_fp_mb_stats) {
2028 // a place holder used to store the first pass mb stats in the first pass
2029 CHECK_MEM_ERROR(cm, cpi->twopass.frame_mb_stats_buf,
2030 vpx_calloc(cm->MBs * sizeof(uint8_t), 1));
2032 cpi->twopass.frame_mb_stats_buf = NULL;
2036 cpi->refresh_alt_ref_frame = 0;
2037 cpi->multi_arf_last_grp_enabled = 0;
2039 cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
2041 init_level_info(&cpi->level_info);
2042 init_level_constraint(&cpi->level_constraint);
2044 #if CONFIG_INTERNAL_STATS
2045 cpi->b_calculate_blockiness = 1;
2046 cpi->b_calculate_consistency = 1;
2047 cpi->total_inconsistency = 0;
2048 cpi->psnr.worst = 100.0;
2049 cpi->worst_ssim = 100.0;
2054 if (cpi->b_calculate_psnr) {
2055 cpi->total_sq_error = 0;
2056 cpi->total_samples = 0;
2058 cpi->totalp_sq_error = 0;
2059 cpi->totalp_samples = 0;
2061 cpi->tot_recode_hits = 0;
2062 cpi->summed_quality = 0;
2063 cpi->summed_weights = 0;
2064 cpi->summedp_quality = 0;
2065 cpi->summedp_weights = 0;
2068 cpi->fastssim.worst = 100.0;
2070 cpi->psnrhvs.worst = 100.0;
2072 if (cpi->b_calculate_blockiness) {
2073 cpi->total_blockiness = 0;
2074 cpi->worst_blockiness = 0.0;
2077 if (cpi->b_calculate_consistency) {
2078 CHECK_MEM_ERROR(cm, cpi->ssim_vars,
2079 vpx_malloc(sizeof(*cpi->ssim_vars) * 4 *
2080 cpi->common.mi_rows * cpi->common.mi_cols));
2081 cpi->worst_consistency = 100.0;
2086 cpi->first_time_stamp_ever = INT64_MAX;
2088 /*********************************************************************
2089 * Warning: Read the comments around 'cal_nmvjointsadcost' and *
2090 * 'cal_nmvsadcosts' before modifying how these tables are computed. *
2091 *********************************************************************/
2092 cal_nmvjointsadcost(cpi->td.mb.nmvjointsadcost);
2093 cpi->td.mb.nmvcost[0] = &cpi->nmvcosts[0][MV_MAX];
2094 cpi->td.mb.nmvcost[1] = &cpi->nmvcosts[1][MV_MAX];
2095 cpi->td.mb.nmvsadcost[0] = &cpi->nmvsadcosts[0][MV_MAX];
2096 cpi->td.mb.nmvsadcost[1] = &cpi->nmvsadcosts[1][MV_MAX];
2097 cal_nmvsadcosts(cpi->td.mb.nmvsadcost);
2099 cpi->td.mb.nmvcost_hp[0] = &cpi->nmvcosts_hp[0][MV_MAX];
2100 cpi->td.mb.nmvcost_hp[1] = &cpi->nmvcosts_hp[1][MV_MAX];
2101 cpi->td.mb.nmvsadcost_hp[0] = &cpi->nmvsadcosts_hp[0][MV_MAX];
2102 cpi->td.mb.nmvsadcost_hp[1] = &cpi->nmvsadcosts_hp[1][MV_MAX];
2103 cal_nmvsadcosts_hp(cpi->td.mb.nmvsadcost_hp);
2105 #if CONFIG_VP9_TEMPORAL_DENOISING
2106 #ifdef OUTPUT_YUV_DENOISED
2107 yuv_denoised_file = fopen("denoised.yuv", "ab");
2110 #ifdef OUTPUT_YUV_SKINMAP
2111 yuv_skinmap_file = fopen("skinmap.yuv", "wb");
2113 #ifdef OUTPUT_YUV_REC
2114 yuv_rec_file = fopen("rec.yuv", "wb");
2118 framepsnr = fopen("framepsnr.stt", "a");
2119 kf_list = fopen("kf_list.stt", "w");
2122 cpi->allow_encode_breakout = ENCODE_BREAKOUT_ENABLED;
2124 #if !CONFIG_REALTIME_ONLY
2125 if (oxcf->pass == 1) {
2126 vp9_init_first_pass(cpi);
2127 } else if (oxcf->pass == 2) {
2128 const size_t packet_sz = sizeof(FIRSTPASS_STATS);
2129 const int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
2131 if (cpi->svc.number_spatial_layers > 1 ||
2132 cpi->svc.number_temporal_layers > 1) {
2133 FIRSTPASS_STATS *const stats = oxcf->two_pass_stats_in.buf;
2134 FIRSTPASS_STATS *stats_copy[VPX_SS_MAX_LAYERS] = { 0 };
2137 for (i = 0; i < oxcf->ss_number_layers; ++i) {
2138 FIRSTPASS_STATS *const last_packet_for_layer =
2139 &stats[packets - oxcf->ss_number_layers + i];
2140 const int layer_id = (int)last_packet_for_layer->spatial_layer_id;
2141 const int packets_in_layer = (int)last_packet_for_layer->count + 1;
2142 if (layer_id >= 0 && layer_id < oxcf->ss_number_layers) {
2143 LAYER_CONTEXT *const lc = &cpi->svc.layer_context[layer_id];
2145 vpx_free(lc->rc_twopass_stats_in.buf);
2147 lc->rc_twopass_stats_in.sz = packets_in_layer * packet_sz;
2148 CHECK_MEM_ERROR(cm, lc->rc_twopass_stats_in.buf,
2149 vpx_malloc(lc->rc_twopass_stats_in.sz));
2150 lc->twopass.stats_in_start = lc->rc_twopass_stats_in.buf;
2151 lc->twopass.stats_in = lc->twopass.stats_in_start;
2152 lc->twopass.stats_in_end =
2153 lc->twopass.stats_in_start + packets_in_layer - 1;
2154 stats_copy[layer_id] = lc->rc_twopass_stats_in.buf;
2158 for (i = 0; i < packets; ++i) {
2159 const int layer_id = (int)stats[i].spatial_layer_id;
2160 if (layer_id >= 0 && layer_id < oxcf->ss_number_layers &&
2161 stats_copy[layer_id] != NULL) {
2162 *stats_copy[layer_id] = stats[i];
2163 ++stats_copy[layer_id];
2167 vp9_init_second_pass_spatial_svc(cpi);
2169 #if CONFIG_FP_MB_STATS
2170 if (cpi->use_fp_mb_stats) {
2171 const size_t psz = cpi->common.MBs * sizeof(uint8_t);
2172 const int ps = (int)(oxcf->firstpass_mb_stats_in.sz / psz);
2174 cpi->twopass.firstpass_mb_stats.mb_stats_start =
2175 oxcf->firstpass_mb_stats_in.buf;
2176 cpi->twopass.firstpass_mb_stats.mb_stats_end =
2177 cpi->twopass.firstpass_mb_stats.mb_stats_start +
2178 (ps - 1) * cpi->common.MBs * sizeof(uint8_t);
2182 cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
2183 cpi->twopass.stats_in = cpi->twopass.stats_in_start;
2184 cpi->twopass.stats_in_end = &cpi->twopass.stats_in[packets - 1];
2186 vp9_init_second_pass(cpi);
2189 #endif // !CONFIG_REALTIME_ONLY
2191 vp9_set_speed_features_framesize_independent(cpi);
2192 vp9_set_speed_features_framesize_dependent(cpi);
2194 // Allocate memory to store variances for a frame.
2195 CHECK_MEM_ERROR(cm, cpi->source_diff_var, vpx_calloc(cm->MBs, sizeof(diff)));
2196 cpi->source_var_thresh = 0;
2197 cpi->frames_till_next_var_check = 0;
2199 #define BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX4DF) \
2200 cpi->fn_ptr[BT].sdf = SDF; \
2201 cpi->fn_ptr[BT].sdaf = SDAF; \
2202 cpi->fn_ptr[BT].vf = VF; \
2203 cpi->fn_ptr[BT].svf = SVF; \
2204 cpi->fn_ptr[BT].svaf = SVAF; \
2205 cpi->fn_ptr[BT].sdx4df = SDX4DF;
2207 BFP(BLOCK_32X16, vpx_sad32x16, vpx_sad32x16_avg, vpx_variance32x16,
2208 vpx_sub_pixel_variance32x16, vpx_sub_pixel_avg_variance32x16,
2211 BFP(BLOCK_16X32, vpx_sad16x32, vpx_sad16x32_avg, vpx_variance16x32,
2212 vpx_sub_pixel_variance16x32, vpx_sub_pixel_avg_variance16x32,
2215 BFP(BLOCK_64X32, vpx_sad64x32, vpx_sad64x32_avg, vpx_variance64x32,
2216 vpx_sub_pixel_variance64x32, vpx_sub_pixel_avg_variance64x32,
2219 BFP(BLOCK_32X64, vpx_sad32x64, vpx_sad32x64_avg, vpx_variance32x64,
2220 vpx_sub_pixel_variance32x64, vpx_sub_pixel_avg_variance32x64,
2223 BFP(BLOCK_32X32, vpx_sad32x32, vpx_sad32x32_avg, vpx_variance32x32,
2224 vpx_sub_pixel_variance32x32, vpx_sub_pixel_avg_variance32x32,
2227 BFP(BLOCK_64X64, vpx_sad64x64, vpx_sad64x64_avg, vpx_variance64x64,
2228 vpx_sub_pixel_variance64x64, vpx_sub_pixel_avg_variance64x64,
2231 BFP(BLOCK_16X16, vpx_sad16x16, vpx_sad16x16_avg, vpx_variance16x16,
2232 vpx_sub_pixel_variance16x16, vpx_sub_pixel_avg_variance16x16,
2235 BFP(BLOCK_16X8, vpx_sad16x8, vpx_sad16x8_avg, vpx_variance16x8,
2236 vpx_sub_pixel_variance16x8, vpx_sub_pixel_avg_variance16x8,
2239 BFP(BLOCK_8X16, vpx_sad8x16, vpx_sad8x16_avg, vpx_variance8x16,
2240 vpx_sub_pixel_variance8x16, vpx_sub_pixel_avg_variance8x16,
2243 BFP(BLOCK_8X8, vpx_sad8x8, vpx_sad8x8_avg, vpx_variance8x8,
2244 vpx_sub_pixel_variance8x8, vpx_sub_pixel_avg_variance8x8, vpx_sad8x8x4d)
2246 BFP(BLOCK_8X4, vpx_sad8x4, vpx_sad8x4_avg, vpx_variance8x4,
2247 vpx_sub_pixel_variance8x4, vpx_sub_pixel_avg_variance8x4, vpx_sad8x4x4d)
2249 BFP(BLOCK_4X8, vpx_sad4x8, vpx_sad4x8_avg, vpx_variance4x8,
2250 vpx_sub_pixel_variance4x8, vpx_sub_pixel_avg_variance4x8, vpx_sad4x8x4d)
2252 BFP(BLOCK_4X4, vpx_sad4x4, vpx_sad4x4_avg, vpx_variance4x4,
2253 vpx_sub_pixel_variance4x4, vpx_sub_pixel_avg_variance4x4, vpx_sad4x4x4d)
2255 #if CONFIG_VP9_HIGHBITDEPTH
2256 highbd_set_var_fns(cpi);
2259 /* vp9_init_quantizer() is first called here. Add check in
2260 * vp9_frame_init_quantizer() so that vp9_init_quantizer is only
2261 * called later when needed. This will avoid unnecessary calls of
2262 * vp9_init_quantizer() for every frame.
2264 vp9_init_quantizer(cpi);
2266 vp9_loop_filter_init(cm);
2268 cm->error.setjmp = 0;
2273 #if CONFIG_INTERNAL_STATS
2274 #define SNPRINT(H, T) snprintf((H) + strlen(H), sizeof(H) - strlen(H), (T))
2276 #define SNPRINT2(H, T, V) \
2277 snprintf((H) + strlen(H), sizeof(H) - strlen(H), (T), (V))
2278 #endif // CONFIG_INTERNAL_STATS
2280 void vp9_remove_compressor(VP9_COMP *cpi) {
2288 if (cm->current_video_frame > 0) {
2289 #if CONFIG_INTERNAL_STATS
2290 vpx_clear_system_state();
2292 if (cpi->oxcf.pass != 1) {
2293 char headings[512] = { 0 };
2294 char results[512] = { 0 };
2295 FILE *f = fopen("opsnr.stt", "a");
2296 double time_encoded =
2297 (cpi->last_end_time_stamp_seen - cpi->first_time_stamp_ever) /
2299 double total_encode_time =
2300 (cpi->time_receive_data + cpi->time_compress_data) / 1000.000;
2302 (double)cpi->bytes * (double)8 / (double)1000 / time_encoded;
2303 const double peak = (double)((1 << cpi->oxcf.input_bit_depth) - 1);
2304 const double target_rate = (double)cpi->oxcf.target_bandwidth / 1000;
2305 const double rate_err = ((100.0 * (dr - target_rate)) / target_rate);
2307 if (cpi->b_calculate_psnr) {
2308 const double total_psnr = vpx_sse_to_psnr(
2309 (double)cpi->total_samples, peak, (double)cpi->total_sq_error);
2310 const double totalp_psnr = vpx_sse_to_psnr(
2311 (double)cpi->totalp_samples, peak, (double)cpi->totalp_sq_error);
2312 const double total_ssim =
2313 100 * pow(cpi->summed_quality / cpi->summed_weights, 8.0);
2314 const double totalp_ssim =
2315 100 * pow(cpi->summedp_quality / cpi->summedp_weights, 8.0);
2317 snprintf(headings, sizeof(headings),
2318 "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\tGLPsnrP\t"
2319 "VPXSSIM\tVPSSIMP\tFASTSIM\tPSNRHVS\t"
2320 "WstPsnr\tWstSsim\tWstFast\tWstHVS");
2321 snprintf(results, sizeof(results),
2322 "%7.2f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2323 "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
2324 "%7.3f\t%7.3f\t%7.3f\t%7.3f",
2325 dr, cpi->psnr.stat[ALL] / cpi->count, total_psnr,
2326 cpi->psnrp.stat[ALL] / cpi->count, totalp_psnr, total_ssim,
2327 totalp_ssim, cpi->fastssim.stat[ALL] / cpi->count,
2328 cpi->psnrhvs.stat[ALL] / cpi->count, cpi->psnr.worst,
2329 cpi->worst_ssim, cpi->fastssim.worst, cpi->psnrhvs.worst);
2331 if (cpi->b_calculate_blockiness) {
2332 SNPRINT(headings, "\t Block\tWstBlck");
2333 SNPRINT2(results, "\t%7.3f", cpi->total_blockiness / cpi->count);
2334 SNPRINT2(results, "\t%7.3f", cpi->worst_blockiness);
2337 if (cpi->b_calculate_consistency) {
2338 double consistency =
2339 vpx_sse_to_psnr((double)cpi->totalp_samples, peak,
2340 (double)cpi->total_inconsistency);
2342 SNPRINT(headings, "\tConsist\tWstCons");
2343 SNPRINT2(results, "\t%7.3f", consistency);
2344 SNPRINT2(results, "\t%7.3f", cpi->worst_consistency);
2347 fprintf(f, "%s\t Time\tRcErr\tAbsErr\n", headings);
2348 fprintf(f, "%s\t%8.0f\t%7.2f\t%7.2f\n", results, total_encode_time,
2349 rate_err, fabs(rate_err));
2359 printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000);
2360 printf("\n_frames recive_data encod_mb_row compress_frame Total\n");
2361 printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame,
2362 cpi->time_receive_data / 1000, cpi->time_encode_sb_row / 1000,
2363 cpi->time_compress_data / 1000,
2364 (cpi->time_receive_data + cpi->time_compress_data) / 1000);
2369 #if CONFIG_VP9_TEMPORAL_DENOISING
2370 vp9_denoiser_free(&(cpi->denoiser));
2373 for (t = 0; t < cpi->num_workers; ++t) {
2374 VPxWorker *const worker = &cpi->workers[t];
2375 EncWorkerData *const thread_data = &cpi->tile_thr_data[t];
2377 // Deallocate allocated threads.
2378 vpx_get_worker_interface()->end(worker);
2380 // Deallocate allocated thread data.
2381 if (t < cpi->num_workers - 1) {
2382 vpx_free(thread_data->td->counts);
2383 vp9_free_pc_tree(thread_data->td);
2384 vpx_free(thread_data->td);
2387 vpx_free(cpi->tile_thr_data);
2388 vpx_free(cpi->workers);
2389 vp9_row_mt_mem_dealloc(cpi);
2391 if (cpi->num_workers > 1) {
2392 vp9_loop_filter_dealloc(&cpi->lf_row_sync);
2393 vp9_bitstream_encode_tiles_buffer_dealloc(cpi);
2396 vp9_alt_ref_aq_destroy(cpi->alt_ref_aq);
2398 dealloc_compressor_data(cpi);
2400 for (i = 0; i < sizeof(cpi->mbgraph_stats) / sizeof(cpi->mbgraph_stats[0]);
2402 vpx_free(cpi->mbgraph_stats[i].mb_stats);
2405 #if CONFIG_FP_MB_STATS
2406 if (cpi->use_fp_mb_stats) {
2407 vpx_free(cpi->twopass.frame_mb_stats_buf);
2408 cpi->twopass.frame_mb_stats_buf = NULL;
2412 vp9_remove_common(cm);
2413 vp9_free_ref_frame_buffers(cm->buffer_pool);
2414 #if CONFIG_VP9_POSTPROC
2415 vp9_free_postproc_buffers(cm);
2419 #if CONFIG_VP9_TEMPORAL_DENOISING
2420 #ifdef OUTPUT_YUV_DENOISED
2421 fclose(yuv_denoised_file);
2424 #ifdef OUTPUT_YUV_SKINMAP
2425 fclose(yuv_skinmap_file);
2427 #ifdef OUTPUT_YUV_REC
2428 fclose(yuv_rec_file);
2445 static void generate_psnr_packet(VP9_COMP *cpi) {
2446 struct vpx_codec_cx_pkt pkt;
2449 #if CONFIG_VP9_HIGHBITDEPTH
2450 vpx_calc_highbd_psnr(cpi->raw_source_frame, cpi->common.frame_to_show, &psnr,
2451 cpi->td.mb.e_mbd.bd, cpi->oxcf.input_bit_depth);
2453 vpx_calc_psnr(cpi->raw_source_frame, cpi->common.frame_to_show, &psnr);
2456 for (i = 0; i < 4; ++i) {
2457 pkt.data.psnr.samples[i] = psnr.samples[i];
2458 pkt.data.psnr.sse[i] = psnr.sse[i];
2459 pkt.data.psnr.psnr[i] = psnr.psnr[i];
2461 pkt.kind = VPX_CODEC_PSNR_PKT;
2464 .layer_context[cpi->svc.spatial_layer_id *
2465 cpi->svc.number_temporal_layers]
2466 .psnr_pkt = pkt.data.psnr;
2468 vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
2471 int vp9_use_as_reference(VP9_COMP *cpi, int ref_frame_flags) {
2472 if (ref_frame_flags > 7) return -1;
2474 cpi->ref_frame_flags = ref_frame_flags;
2478 void vp9_update_reference(VP9_COMP *cpi, int ref_frame_flags) {
2479 cpi->ext_refresh_golden_frame = (ref_frame_flags & VP9_GOLD_FLAG) != 0;
2480 cpi->ext_refresh_alt_ref_frame = (ref_frame_flags & VP9_ALT_FLAG) != 0;
2481 cpi->ext_refresh_last_frame = (ref_frame_flags & VP9_LAST_FLAG) != 0;
2482 cpi->ext_refresh_frame_flags_pending = 1;
2485 static YV12_BUFFER_CONFIG *get_vp9_ref_frame_buffer(
2486 VP9_COMP *cpi, VP9_REFFRAME ref_frame_flag) {
2487 MV_REFERENCE_FRAME ref_frame = NONE;
2488 if (ref_frame_flag == VP9_LAST_FLAG)
2489 ref_frame = LAST_FRAME;
2490 else if (ref_frame_flag == VP9_GOLD_FLAG)
2491 ref_frame = GOLDEN_FRAME;
2492 else if (ref_frame_flag == VP9_ALT_FLAG)
2493 ref_frame = ALTREF_FRAME;
2495 return ref_frame == NONE ? NULL : get_ref_frame_buffer(cpi, ref_frame);
2498 int vp9_copy_reference_enc(VP9_COMP *cpi, VP9_REFFRAME ref_frame_flag,
2499 YV12_BUFFER_CONFIG *sd) {
2500 YV12_BUFFER_CONFIG *cfg = get_vp9_ref_frame_buffer(cpi, ref_frame_flag);
2502 vpx_yv12_copy_frame(cfg, sd);
2509 int vp9_set_reference_enc(VP9_COMP *cpi, VP9_REFFRAME ref_frame_flag,
2510 YV12_BUFFER_CONFIG *sd) {
2511 YV12_BUFFER_CONFIG *cfg = get_vp9_ref_frame_buffer(cpi, ref_frame_flag);
2513 vpx_yv12_copy_frame(sd, cfg);
2520 int vp9_update_entropy(VP9_COMP *cpi, int update) {
2521 cpi->ext_refresh_frame_context = update;
2522 cpi->ext_refresh_frame_context_pending = 1;
2526 #ifdef OUTPUT_YUV_REC
2527 void vp9_write_yuv_rec_frame(VP9_COMMON *cm) {
2528 YV12_BUFFER_CONFIG *s = cm->frame_to_show;
2529 uint8_t *src = s->y_buffer;
2532 #if CONFIG_VP9_HIGHBITDEPTH
2533 if (s->flags & YV12_FLAG_HIGHBITDEPTH) {
2534 uint16_t *src16 = CONVERT_TO_SHORTPTR(s->y_buffer);
2537 fwrite(src16, s->y_width, 2, yuv_rec_file);
2538 src16 += s->y_stride;
2541 src16 = CONVERT_TO_SHORTPTR(s->u_buffer);
2545 fwrite(src16, s->uv_width, 2, yuv_rec_file);
2546 src16 += s->uv_stride;
2549 src16 = CONVERT_TO_SHORTPTR(s->v_buffer);
2553 fwrite(src16, s->uv_width, 2, yuv_rec_file);
2554 src16 += s->uv_stride;
2557 fflush(yuv_rec_file);
2560 #endif // CONFIG_VP9_HIGHBITDEPTH
2563 fwrite(src, s->y_width, 1, yuv_rec_file);
2571 fwrite(src, s->uv_width, 1, yuv_rec_file);
2572 src += s->uv_stride;
2579 fwrite(src, s->uv_width, 1, yuv_rec_file);
2580 src += s->uv_stride;
2583 fflush(yuv_rec_file);
2587 #if CONFIG_VP9_HIGHBITDEPTH
2588 static void scale_and_extend_frame_nonnormative(const YV12_BUFFER_CONFIG *src,
2589 YV12_BUFFER_CONFIG *dst,
2592 static void scale_and_extend_frame_nonnormative(const YV12_BUFFER_CONFIG *src,
2593 YV12_BUFFER_CONFIG *dst) {
2594 #endif // CONFIG_VP9_HIGHBITDEPTH
2595 // TODO(dkovalev): replace YV12_BUFFER_CONFIG with vpx_image_t
2597 const uint8_t *const srcs[3] = { src->y_buffer, src->u_buffer,
2599 const int src_strides[3] = { src->y_stride, src->uv_stride, src->uv_stride };
2600 const int src_widths[3] = { src->y_crop_width, src->uv_crop_width,
2601 src->uv_crop_width };
2602 const int src_heights[3] = { src->y_crop_height, src->uv_crop_height,
2603 src->uv_crop_height };
2604 uint8_t *const dsts[3] = { dst->y_buffer, dst->u_buffer, dst->v_buffer };
2605 const int dst_strides[3] = { dst->y_stride, dst->uv_stride, dst->uv_stride };
2606 const int dst_widths[3] = { dst->y_crop_width, dst->uv_crop_width,
2607 dst->uv_crop_width };
2608 const int dst_heights[3] = { dst->y_crop_height, dst->uv_crop_height,
2609 dst->uv_crop_height };
2611 for (i = 0; i < MAX_MB_PLANE; ++i) {
2612 #if CONFIG_VP9_HIGHBITDEPTH
2613 if (src->flags & YV12_FLAG_HIGHBITDEPTH) {
2614 vp9_highbd_resize_plane(srcs[i], src_heights[i], src_widths[i],
2615 src_strides[i], dsts[i], dst_heights[i],
2616 dst_widths[i], dst_strides[i], bd);
2618 vp9_resize_plane(srcs[i], src_heights[i], src_widths[i], src_strides[i],
2619 dsts[i], dst_heights[i], dst_widths[i], dst_strides[i]);
2622 vp9_resize_plane(srcs[i], src_heights[i], src_widths[i], src_strides[i],
2623 dsts[i], dst_heights[i], dst_widths[i], dst_strides[i]);
2624 #endif // CONFIG_VP9_HIGHBITDEPTH
2626 vpx_extend_frame_borders(dst);
2629 #if CONFIG_VP9_HIGHBITDEPTH
2630 static void scale_and_extend_frame(const YV12_BUFFER_CONFIG *src,
2631 YV12_BUFFER_CONFIG *dst, int bd,
2632 INTERP_FILTER filter_type,
2634 const int src_w = src->y_crop_width;
2635 const int src_h = src->y_crop_height;
2636 const int dst_w = dst->y_crop_width;
2637 const int dst_h = dst->y_crop_height;
2638 const uint8_t *const srcs[3] = { src->y_buffer, src->u_buffer,
2640 const int src_strides[3] = { src->y_stride, src->uv_stride, src->uv_stride };
2641 uint8_t *const dsts[3] = { dst->y_buffer, dst->u_buffer, dst->v_buffer };
2642 const int dst_strides[3] = { dst->y_stride, dst->uv_stride, dst->uv_stride };
2643 const InterpKernel *const kernel = vp9_filter_kernels[filter_type];
2646 for (i = 0; i < MAX_MB_PLANE; ++i) {
2647 const int factor = (i == 0 || i == 3 ? 1 : 2);
2648 const int src_stride = src_strides[i];
2649 const int dst_stride = dst_strides[i];
2650 for (y = 0; y < dst_h; y += 16) {
2651 const int y_q4 = y * (16 / factor) * src_h / dst_h + phase_scaler;
2652 for (x = 0; x < dst_w; x += 16) {
2653 const int x_q4 = x * (16 / factor) * src_w / dst_w + phase_scaler;
2654 const uint8_t *src_ptr = srcs[i] +
2655 (y / factor) * src_h / dst_h * src_stride +
2656 (x / factor) * src_w / dst_w;
2657 uint8_t *dst_ptr = dsts[i] + (y / factor) * dst_stride + (x / factor);
2659 if (src->flags & YV12_FLAG_HIGHBITDEPTH) {
2660 vpx_highbd_convolve8(CONVERT_TO_SHORTPTR(src_ptr), src_stride,
2661 CONVERT_TO_SHORTPTR(dst_ptr), dst_stride, kernel,
2662 x_q4 & 0xf, 16 * src_w / dst_w, y_q4 & 0xf,
2663 16 * src_h / dst_h, 16 / factor, 16 / factor,
2666 vpx_scaled_2d(src_ptr, src_stride, dst_ptr, dst_stride, kernel,
2667 x_q4 & 0xf, 16 * src_w / dst_w, y_q4 & 0xf,
2668 16 * src_h / dst_h, 16 / factor, 16 / factor);
2674 vpx_extend_frame_borders(dst);
2676 #endif // CONFIG_VP9_HIGHBITDEPTH
2678 static int scale_down(VP9_COMP *cpi, int q) {
2679 RATE_CONTROL *const rc = &cpi->rc;
2680 GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2682 assert(frame_is_kf_gf_arf(cpi));
2684 if (rc->frame_size_selector == UNSCALED &&
2685 q >= rc->rf_level_maxq[gf_group->rf_level[gf_group->index]]) {
2686 const int max_size_thresh =
2687 (int)(rate_thresh_mult[SCALE_STEP1] *
2688 VPXMAX(rc->this_frame_target, rc->avg_frame_bandwidth));
2689 scale = rc->projected_frame_size > max_size_thresh ? 1 : 0;
2694 static int big_rate_miss_high_threshold(VP9_COMP *cpi) {
2695 const RATE_CONTROL *const rc = &cpi->rc;
2698 if (frame_is_kf_gf_arf(cpi))
2699 big_miss_high = rc->this_frame_target * 3 / 2;
2701 big_miss_high = rc->this_frame_target * 2;
2703 return big_miss_high;
2706 static int big_rate_miss(VP9_COMP *cpi) {
2707 const RATE_CONTROL *const rc = &cpi->rc;
2711 // Ignore for overlay frames
2712 if (rc->is_src_frame_alt_ref) {
2715 big_miss_low = (rc->this_frame_target / 2);
2716 big_miss_high = big_rate_miss_high_threshold(cpi);
2718 return (rc->projected_frame_size > big_miss_high) ||
2719 (rc->projected_frame_size < big_miss_low);
2723 // test in two pass for the first
2724 static int two_pass_first_group_inter(VP9_COMP *cpi) {
2725 TWO_PASS *const twopass = &cpi->twopass;
2726 GF_GROUP *const gf_group = &twopass->gf_group;
2727 if ((cpi->oxcf.pass == 2) &&
2728 (gf_group->index == gf_group->first_inter_index)) {
2735 // Function to test for conditions that indicate we should loop
2736 // back and recode a frame.
2737 static int recode_loop_test(VP9_COMP *cpi, int high_limit, int low_limit, int q,
2738 int maxq, int minq) {
2739 const RATE_CONTROL *const rc = &cpi->rc;
2740 const VP9EncoderConfig *const oxcf = &cpi->oxcf;
2741 const int frame_is_kfgfarf = frame_is_kf_gf_arf(cpi);
2742 int force_recode = 0;
2744 if ((rc->projected_frame_size >= rc->max_frame_bandwidth) ||
2745 big_rate_miss(cpi) || (cpi->sf.recode_loop == ALLOW_RECODE) ||
2746 (two_pass_first_group_inter(cpi) &&
2747 (cpi->sf.recode_loop == ALLOW_RECODE_FIRST)) ||
2748 (frame_is_kfgfarf && (cpi->sf.recode_loop >= ALLOW_RECODE_KFARFGF))) {
2749 if (frame_is_kfgfarf && (oxcf->resize_mode == RESIZE_DYNAMIC) &&
2750 scale_down(cpi, q)) {
2751 // Code this group at a lower resolution.
2752 cpi->resize_pending = 1;
2756 // Force recode for extreme overshoot.
2757 if ((rc->projected_frame_size >= rc->max_frame_bandwidth) ||
2758 (cpi->sf.recode_loop >= ALLOW_RECODE_KFARFGF &&
2759 rc->projected_frame_size >= big_rate_miss_high_threshold(cpi))) {
2763 // TODO(agrange) high_limit could be greater than the scale-down threshold.
2764 if ((rc->projected_frame_size > high_limit && q < maxq) ||
2765 (rc->projected_frame_size < low_limit && q > minq)) {
2767 } else if (cpi->oxcf.rc_mode == VPX_CQ) {
2768 // Deal with frame undershoot and whether or not we are
2769 // below the automatically set cq level.
2770 if (q > oxcf->cq_level &&
2771 rc->projected_frame_size < ((rc->this_frame_target * 7) >> 3)) {
2776 return force_recode;
2779 void vp9_update_reference_frames(VP9_COMP *cpi) {
2780 VP9_COMMON *const cm = &cpi->common;
2781 BufferPool *const pool = cm->buffer_pool;
2783 // At this point the new frame has been encoded.
2784 // If any buffer copy / swapping is signaled it should be done here.
2785 if (cm->frame_type == KEY_FRAME) {
2786 ref_cnt_fb(pool->frame_bufs, &cm->ref_frame_map[cpi->gld_fb_idx],
2788 ref_cnt_fb(pool->frame_bufs, &cm->ref_frame_map[cpi->alt_fb_idx],
2790 } else if (vp9_preserve_existing_gf(cpi)) {
2791 // We have decided to preserve the previously existing golden frame as our
2792 // new ARF frame. However, in the short term in function
2793 // vp9_get_refresh_mask() we left it in the GF slot and, if
2794 // we're updating the GF with the current decoded frame, we save it to the
2795 // ARF slot instead.
2796 // We now have to update the ARF with the current frame and swap gld_fb_idx
2797 // and alt_fb_idx so that, overall, we've stored the old GF in the new ARF
2798 // slot and, if we're updating the GF, the current frame becomes the new GF.
2801 ref_cnt_fb(pool->frame_bufs, &cm->ref_frame_map[cpi->alt_fb_idx],
2804 tmp = cpi->alt_fb_idx;
2805 cpi->alt_fb_idx = cpi->gld_fb_idx;
2806 cpi->gld_fb_idx = tmp;
2808 if (is_two_pass_svc(cpi)) {
2809 cpi->svc.layer_context[0].gold_ref_idx = cpi->gld_fb_idx;
2810 cpi->svc.layer_context[0].alt_ref_idx = cpi->alt_fb_idx;
2812 } else { /* For non key/golden frames */
2813 if (cpi->refresh_alt_ref_frame) {
2814 int arf_idx = cpi->alt_fb_idx;
2815 if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) {
2816 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2817 arf_idx = gf_group->arf_update_idx[gf_group->index];
2820 ref_cnt_fb(pool->frame_bufs, &cm->ref_frame_map[arf_idx], cm->new_fb_idx);
2821 memcpy(cpi->interp_filter_selected[ALTREF_FRAME],
2822 cpi->interp_filter_selected[0],
2823 sizeof(cpi->interp_filter_selected[0]));
2826 if (cpi->refresh_golden_frame) {
2827 ref_cnt_fb(pool->frame_bufs, &cm->ref_frame_map[cpi->gld_fb_idx],
2829 if (!cpi->rc.is_src_frame_alt_ref)
2830 memcpy(cpi->interp_filter_selected[GOLDEN_FRAME],
2831 cpi->interp_filter_selected[0],
2832 sizeof(cpi->interp_filter_selected[0]));
2834 memcpy(cpi->interp_filter_selected[GOLDEN_FRAME],
2835 cpi->interp_filter_selected[ALTREF_FRAME],
2836 sizeof(cpi->interp_filter_selected[ALTREF_FRAME]));
2840 if (cpi->refresh_last_frame) {
2841 ref_cnt_fb(pool->frame_bufs, &cm->ref_frame_map[cpi->lst_fb_idx],
2843 if (!cpi->rc.is_src_frame_alt_ref)
2844 memcpy(cpi->interp_filter_selected[LAST_FRAME],
2845 cpi->interp_filter_selected[0],
2846 sizeof(cpi->interp_filter_selected[0]));
2848 #if CONFIG_VP9_TEMPORAL_DENOISING
2849 if (cpi->oxcf.noise_sensitivity > 0 && denoise_svc(cpi) &&
2850 cpi->denoiser.denoising_level > kDenLowLow) {
2851 int svc_base_is_key = 0;
2853 int realloc_fail = 0;
2854 int layer = LAYER_IDS_TO_IDX(cpi->svc.spatial_layer_id,
2855 cpi->svc.temporal_layer_id,
2856 cpi->svc.number_temporal_layers);
2857 LAYER_CONTEXT *lc = &cpi->svc.layer_context[layer];
2858 svc_base_is_key = lc->is_key_frame;
2860 // Check if we need to allocate extra buffers in the denoiser for
2861 // refreshed frames.
2862 realloc_fail = vp9_denoiser_realloc_svc(
2863 cm, &cpi->denoiser, cpi->refresh_alt_ref_frame,
2864 cpi->refresh_golden_frame, cpi->refresh_last_frame, cpi->alt_fb_idx,
2865 cpi->gld_fb_idx, cpi->lst_fb_idx);
2867 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
2868 "Failed to re-allocate denoiser for SVC");
2870 vp9_denoiser_update_frame_info(
2871 &cpi->denoiser, *cpi->Source, cpi->common.frame_type,
2872 cpi->refresh_alt_ref_frame, cpi->refresh_golden_frame,
2873 cpi->refresh_last_frame, cpi->alt_fb_idx, cpi->gld_fb_idx,
2874 cpi->lst_fb_idx, cpi->resize_pending, svc_base_is_key);
2877 if (is_one_pass_cbr_svc(cpi)) {
2878 // Keep track of frame index for each reference frame.
2879 SVC *const svc = &cpi->svc;
2880 if (cm->frame_type == KEY_FRAME) {
2881 svc->ref_frame_index[cpi->lst_fb_idx] = svc->current_superframe;
2882 svc->ref_frame_index[cpi->gld_fb_idx] = svc->current_superframe;
2883 svc->ref_frame_index[cpi->alt_fb_idx] = svc->current_superframe;
2885 if (cpi->refresh_last_frame)
2886 svc->ref_frame_index[cpi->lst_fb_idx] = svc->current_superframe;
2887 if (cpi->refresh_golden_frame)
2888 svc->ref_frame_index[cpi->gld_fb_idx] = svc->current_superframe;
2889 if (cpi->refresh_alt_ref_frame)
2890 svc->ref_frame_index[cpi->alt_fb_idx] = svc->current_superframe;
2895 static void loopfilter_frame(VP9_COMP *cpi, VP9_COMMON *cm) {
2896 MACROBLOCKD *xd = &cpi->td.mb.e_mbd;
2897 struct loopfilter *lf = &cm->lf;
2899 const int is_reference_frame =
2900 (cm->frame_type == KEY_FRAME || cpi->refresh_last_frame ||
2901 cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame);
2904 lf->filter_level = 0;
2905 lf->last_filt_level = 0;
2907 struct vpx_usec_timer timer;
2909 vpx_clear_system_state();
2911 vpx_usec_timer_start(&timer);
2913 if (!cpi->rc.is_src_frame_alt_ref) {
2914 if ((cpi->common.frame_type == KEY_FRAME) &&
2915 (!cpi->rc.this_key_frame_forced)) {
2916 lf->last_filt_level = 0;
2918 vp9_pick_filter_level(cpi->Source, cpi, cpi->sf.lpf_pick);
2919 lf->last_filt_level = lf->filter_level;
2921 lf->filter_level = 0;
2924 vpx_usec_timer_mark(&timer);
2925 cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer);
2928 if (lf->filter_level > 0 && is_reference_frame) {
2929 vp9_build_mask_frame(cm, lf->filter_level, 0);
2931 if (cpi->num_workers > 1)
2932 vp9_loop_filter_frame_mt(cm->frame_to_show, cm, xd->plane,
2933 lf->filter_level, 0, 0, cpi->workers,
2934 cpi->num_workers, &cpi->lf_row_sync);
2936 vp9_loop_filter_frame(cm->frame_to_show, cm, xd, lf->filter_level, 0, 0);
2939 vpx_extend_frame_inner_borders(cm->frame_to_show);
2942 static INLINE void alloc_frame_mvs(VP9_COMMON *const cm, int buffer_idx) {
2943 RefCntBuffer *const new_fb_ptr = &cm->buffer_pool->frame_bufs[buffer_idx];
2944 if (new_fb_ptr->mvs == NULL || new_fb_ptr->mi_rows < cm->mi_rows ||
2945 new_fb_ptr->mi_cols < cm->mi_cols) {
2946 vpx_free(new_fb_ptr->mvs);
2947 CHECK_MEM_ERROR(cm, new_fb_ptr->mvs,
2948 (MV_REF *)vpx_calloc(cm->mi_rows * cm->mi_cols,
2949 sizeof(*new_fb_ptr->mvs)));
2950 new_fb_ptr->mi_rows = cm->mi_rows;
2951 new_fb_ptr->mi_cols = cm->mi_cols;
2955 void vp9_scale_references(VP9_COMP *cpi) {
2956 VP9_COMMON *cm = &cpi->common;
2957 MV_REFERENCE_FRAME ref_frame;
2958 const VP9_REFFRAME ref_mask[3] = { VP9_LAST_FLAG, VP9_GOLD_FLAG,
2961 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
2962 // Need to convert from VP9_REFFRAME to index into ref_mask (subtract 1).
2963 if (cpi->ref_frame_flags & ref_mask[ref_frame - 1]) {
2964 BufferPool *const pool = cm->buffer_pool;
2965 const YV12_BUFFER_CONFIG *const ref =
2966 get_ref_frame_buffer(cpi, ref_frame);
2969 cpi->scaled_ref_idx[ref_frame - 1] = INVALID_IDX;
2973 #if CONFIG_VP9_HIGHBITDEPTH
2974 if (ref->y_crop_width != cm->width || ref->y_crop_height != cm->height) {
2975 RefCntBuffer *new_fb_ptr = NULL;
2976 int force_scaling = 0;
2977 int new_fb = cpi->scaled_ref_idx[ref_frame - 1];
2978 if (new_fb == INVALID_IDX) {
2979 new_fb = get_free_fb(cm);
2982 if (new_fb == INVALID_IDX) return;
2983 new_fb_ptr = &pool->frame_bufs[new_fb];
2984 if (force_scaling || new_fb_ptr->buf.y_crop_width != cm->width ||
2985 new_fb_ptr->buf.y_crop_height != cm->height) {
2986 if (vpx_realloc_frame_buffer(&new_fb_ptr->buf, cm->width, cm->height,
2987 cm->subsampling_x, cm->subsampling_y,
2988 cm->use_highbitdepth,
2989 VP9_ENC_BORDER_IN_PIXELS,
2990 cm->byte_alignment, NULL, NULL, NULL))
2991 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
2992 "Failed to allocate frame buffer");
2993 scale_and_extend_frame(ref, &new_fb_ptr->buf, (int)cm->bit_depth,
2995 cpi->scaled_ref_idx[ref_frame - 1] = new_fb;
2996 alloc_frame_mvs(cm, new_fb);
2999 if (ref->y_crop_width != cm->width || ref->y_crop_height != cm->height) {
3000 RefCntBuffer *new_fb_ptr = NULL;
3001 int force_scaling = 0;
3002 int new_fb = cpi->scaled_ref_idx[ref_frame - 1];
3003 if (new_fb == INVALID_IDX) {
3004 new_fb = get_free_fb(cm);
3007 if (new_fb == INVALID_IDX) return;
3008 new_fb_ptr = &pool->frame_bufs[new_fb];
3009 if (force_scaling || new_fb_ptr->buf.y_crop_width != cm->width ||
3010 new_fb_ptr->buf.y_crop_height != cm->height) {
3011 if (vpx_realloc_frame_buffer(&new_fb_ptr->buf, cm->width, cm->height,
3012 cm->subsampling_x, cm->subsampling_y,
3013 VP9_ENC_BORDER_IN_PIXELS,
3014 cm->byte_alignment, NULL, NULL, NULL))
3015 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
3016 "Failed to allocate frame buffer");
3017 vp9_scale_and_extend_frame(ref, &new_fb_ptr->buf, EIGHTTAP, 0);
3018 cpi->scaled_ref_idx[ref_frame - 1] = new_fb;
3019 alloc_frame_mvs(cm, new_fb);
3021 #endif // CONFIG_VP9_HIGHBITDEPTH
3024 RefCntBuffer *buf = NULL;
3025 if (cpi->oxcf.pass == 0 && !cpi->use_svc) {
3026 // Check for release of scaled reference.
3027 buf_idx = cpi->scaled_ref_idx[ref_frame - 1];
3028 buf = (buf_idx != INVALID_IDX) ? &pool->frame_bufs[buf_idx] : NULL;
3031 cpi->scaled_ref_idx[ref_frame - 1] = INVALID_IDX;
3034 buf_idx = get_ref_frame_buf_idx(cpi, ref_frame);
3035 buf = &pool->frame_bufs[buf_idx];
3036 buf->buf.y_crop_width = ref->y_crop_width;
3037 buf->buf.y_crop_height = ref->y_crop_height;
3038 cpi->scaled_ref_idx[ref_frame - 1] = buf_idx;
3042 if (cpi->oxcf.pass != 0 || cpi->use_svc)
3043 cpi->scaled_ref_idx[ref_frame - 1] = INVALID_IDX;
3048 static void release_scaled_references(VP9_COMP *cpi) {
3049 VP9_COMMON *cm = &cpi->common;
3051 if (cpi->oxcf.pass == 0 && !cpi->use_svc) {
3052 // Only release scaled references under certain conditions:
3053 // if reference will be updated, or if scaled reference has same resolution.
3055 refresh[0] = (cpi->refresh_last_frame) ? 1 : 0;
3056 refresh[1] = (cpi->refresh_golden_frame) ? 1 : 0;
3057 refresh[2] = (cpi->refresh_alt_ref_frame) ? 1 : 0;
3058 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
3059 const int idx = cpi->scaled_ref_idx[i - 1];
3060 RefCntBuffer *const buf =
3061 idx != INVALID_IDX ? &cm->buffer_pool->frame_bufs[idx] : NULL;
3062 const YV12_BUFFER_CONFIG *const ref = get_ref_frame_buffer(cpi, i);
3064 (refresh[i - 1] || (buf->buf.y_crop_width == ref->y_crop_width &&
3065 buf->buf.y_crop_height == ref->y_crop_height))) {
3067 cpi->scaled_ref_idx[i - 1] = INVALID_IDX;
3071 for (i = 0; i < MAX_REF_FRAMES; ++i) {
3072 const int idx = cpi->scaled_ref_idx[i];
3073 RefCntBuffer *const buf =
3074 idx != INVALID_IDX ? &cm->buffer_pool->frame_bufs[idx] : NULL;
3077 cpi->scaled_ref_idx[i] = INVALID_IDX;
3083 static void full_to_model_count(unsigned int *model_count,
3084 unsigned int *full_count) {
3086 model_count[ZERO_TOKEN] = full_count[ZERO_TOKEN];
3087 model_count[ONE_TOKEN] = full_count[ONE_TOKEN];
3088 model_count[TWO_TOKEN] = full_count[TWO_TOKEN];
3089 for (n = THREE_TOKEN; n < EOB_TOKEN; ++n)
3090 model_count[TWO_TOKEN] += full_count[n];
3091 model_count[EOB_MODEL_TOKEN] = full_count[EOB_TOKEN];
3094 static void full_to_model_counts(vp9_coeff_count_model *model_count,
3095 vp9_coeff_count *full_count) {
3098 for (i = 0; i < PLANE_TYPES; ++i)
3099 for (j = 0; j < REF_TYPES; ++j)
3100 for (k = 0; k < COEF_BANDS; ++k)
3101 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l)
3102 full_to_model_count(model_count[i][j][k][l], full_count[i][j][k][l]);
3105 #if 0 && CONFIG_INTERNAL_STATS
3106 static void output_frame_level_debug_stats(VP9_COMP *cpi) {
3107 VP9_COMMON *const cm = &cpi->common;
3108 FILE *const f = fopen("tmp.stt", cm->current_video_frame ? "a" : "w");
3111 vpx_clear_system_state();
3113 #if CONFIG_VP9_HIGHBITDEPTH
3114 if (cm->use_highbitdepth) {
3115 recon_err = vpx_highbd_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
3117 recon_err = vpx_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
3120 recon_err = vpx_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
3121 #endif // CONFIG_VP9_HIGHBITDEPTH
3124 if (cpi->twopass.total_left_stats.coded_error != 0.0) {
3125 double dc_quant_devisor;
3126 #if CONFIG_VP9_HIGHBITDEPTH
3127 switch (cm->bit_depth) {
3129 dc_quant_devisor = 4.0;
3132 dc_quant_devisor = 16.0;
3135 dc_quant_devisor = 64.0;
3138 assert(0 && "bit_depth must be VPX_BITS_8, VPX_BITS_10 or VPX_BITS_12");
3142 dc_quant_devisor = 4.0;
3145 if (!cm->current_video_frame) {
3146 fprintf(f, "frame, width, height, last ts, last end ts, "
3147 "source_alt_ref_pending, source_alt_ref_active, "
3148 "this_frame_target, projected_frame_size, "
3149 "projected_frame_size / MBs, "
3150 "projected_frame_size - this_frame_target, "
3151 "vbr_bits_off_target, vbr_bits_off_target_fast, "
3152 "twopass.extend_minq, twopass.extend_minq_fast, "
3153 "total_target_vs_actual, "
3154 "starting_buffer_level - bits_off_target, "
3155 "total_actual_bits, base_qindex, q for base_qindex, "
3156 "dc quant, q for active_worst_quality, avg_q, q for oxcf.cq_level, "
3157 "refresh_last_frame, refresh_golden_frame, refresh_alt_ref_frame, "
3158 "frame_type, gfu_boost, "
3159 "twopass.bits_left, "
3160 "twopass.total_left_stats.coded_error, "
3161 "twopass.bits_left / (1 + twopass.total_left_stats.coded_error), "
3162 "tot_recode_hits, recon_err, kf_boost, "
3163 "twopass.kf_zeromotion_pct, twopass.fr_content_type, "
3164 "filter_level, seg.aq_av_offset\n");
3167 fprintf(f, "%10u, %d, %d, %10"PRId64", %10"PRId64", %d, %d, %10d, %10d, "
3168 "%10d, %10d, %10"PRId64", %10"PRId64", %5d, %5d, %10"PRId64", "
3169 "%10"PRId64", %10"PRId64", %10d, %7.2lf, %7.2lf, %7.2lf, %7.2lf, "
3170 "%7.2lf, %6d, %6d, %5d, %5d, %5d, %10"PRId64", %10.3lf, %10lf, %8u, "
3171 "%10"PRId64", %10d, %10d, %10d, %10d, %10d\n",
3172 cpi->common.current_video_frame,
3173 cm->width, cm->height,
3174 cpi->last_time_stamp_seen,
3175 cpi->last_end_time_stamp_seen,
3176 cpi->rc.source_alt_ref_pending,
3177 cpi->rc.source_alt_ref_active,
3178 cpi->rc.this_frame_target,
3179 cpi->rc.projected_frame_size,
3180 cpi->rc.projected_frame_size / cpi->common.MBs,
3181 (cpi->rc.projected_frame_size - cpi->rc.this_frame_target),
3182 cpi->rc.vbr_bits_off_target,
3183 cpi->rc.vbr_bits_off_target_fast,
3184 cpi->twopass.extend_minq,
3185 cpi->twopass.extend_minq_fast,
3186 cpi->rc.total_target_vs_actual,
3187 (cpi->rc.starting_buffer_level - cpi->rc.bits_off_target),
3188 cpi->rc.total_actual_bits, cm->base_qindex,
3189 vp9_convert_qindex_to_q(cm->base_qindex, cm->bit_depth),
3190 (double)vp9_dc_quant(cm->base_qindex, 0, cm->bit_depth) /
3192 vp9_convert_qindex_to_q(cpi->twopass.active_worst_quality,
3195 vp9_convert_qindex_to_q(cpi->oxcf.cq_level, cm->bit_depth),
3196 cpi->refresh_last_frame, cpi->refresh_golden_frame,
3197 cpi->refresh_alt_ref_frame, cm->frame_type, cpi->rc.gfu_boost,
3198 cpi->twopass.bits_left,
3199 cpi->twopass.total_left_stats.coded_error,
3200 cpi->twopass.bits_left /
3201 (1 + cpi->twopass.total_left_stats.coded_error),
3202 cpi->tot_recode_hits, recon_err, cpi->rc.kf_boost,
3203 cpi->twopass.kf_zeromotion_pct,
3204 cpi->twopass.fr_content_type,
3205 cm->lf.filter_level,
3206 cm->seg.aq_av_offset);
3211 FILE *const fmodes = fopen("Modes.stt", "a");
3214 fprintf(fmodes, "%6d:%1d:%1d:%1d ", cpi->common.current_video_frame,
3215 cm->frame_type, cpi->refresh_golden_frame,
3216 cpi->refresh_alt_ref_frame);
3218 for (i = 0; i < MAX_MODES; ++i)
3219 fprintf(fmodes, "%5d ", cpi->mode_chosen_counts[i]);
3221 fprintf(fmodes, "\n");
3228 static void set_mv_search_params(VP9_COMP *cpi) {
3229 const VP9_COMMON *const cm = &cpi->common;
3230 const unsigned int max_mv_def = VPXMIN(cm->width, cm->height);
3232 // Default based on max resolution.
3233 cpi->mv_step_param = vp9_init_search_range(max_mv_def);
3235 if (cpi->sf.mv.auto_mv_step_size) {
3236 if (frame_is_intra_only(cm)) {
3237 // Initialize max_mv_magnitude for use in the first INTER frame
3238 // after a key/intra-only frame.
3239 cpi->max_mv_magnitude = max_mv_def;
3241 if (cm->show_frame) {
3242 // Allow mv_steps to correspond to twice the max mv magnitude found
3243 // in the previous frame, capped by the default max_mv_magnitude based
3245 cpi->mv_step_param = vp9_init_search_range(
3246 VPXMIN(max_mv_def, 2 * cpi->max_mv_magnitude));
3248 cpi->max_mv_magnitude = 0;
3253 static void set_size_independent_vars(VP9_COMP *cpi) {
3254 vp9_set_speed_features_framesize_independent(cpi);
3255 vp9_set_rd_speed_thresholds(cpi);
3256 vp9_set_rd_speed_thresholds_sub8x8(cpi);
3257 cpi->common.interp_filter = cpi->sf.default_interp_filter;
3260 static void set_size_dependent_vars(VP9_COMP *cpi, int *q, int *bottom_index,
3262 VP9_COMMON *const cm = &cpi->common;
3264 // Setup variables that depend on the dimensions of the frame.
3265 vp9_set_speed_features_framesize_dependent(cpi);
3267 // Decide q and q bounds.
3268 *q = vp9_rc_pick_q_and_bounds(cpi, bottom_index, top_index);
3270 if (!frame_is_intra_only(cm)) {
3271 vp9_set_high_precision_mv(cpi, (*q) < HIGH_PRECISION_MV_QTHRESH);
3274 #if !CONFIG_REALTIME_ONLY
3275 // Configure experimental use of segmentation for enhanced coding of
3276 // static regions if indicated.
3277 // Only allowed in the second pass of a two pass encode, as it requires
3278 // lagged coding, and if the relevant speed feature flag is set.
3279 if (cpi->oxcf.pass == 2 && cpi->sf.static_segmentation)
3280 configure_static_seg_features(cpi);
3281 #endif // !CONFIG_REALTIME_ONLY
3283 #if CONFIG_VP9_POSTPROC && !(CONFIG_VP9_TEMPORAL_DENOISING)
3284 if (cpi->oxcf.noise_sensitivity > 0) {
3286 switch (cpi->oxcf.noise_sensitivity) {
3287 case 1: l = 20; break;
3288 case 2: l = 40; break;
3289 case 3: l = 60; break;
3291 case 5: l = 100; break;
3292 case 6: l = 150; break;
3294 if (!cpi->common.postproc_state.limits) {
3295 cpi->common.postproc_state.limits =
3296 vpx_calloc(cpi->un_scaled_source->y_width,
3297 sizeof(*cpi->common.postproc_state.limits));
3299 vp9_denoise(cpi->Source, cpi->Source, l, cpi->common.postproc_state.limits);
3301 #endif // CONFIG_VP9_POSTPROC
3304 #if CONFIG_VP9_TEMPORAL_DENOISING
3305 static void setup_denoiser_buffer(VP9_COMP *cpi) {
3306 VP9_COMMON *const cm = &cpi->common;
3307 if (cpi->oxcf.noise_sensitivity > 0 &&
3308 !cpi->denoiser.frame_buffer_initialized) {
3309 if (vp9_denoiser_alloc(cm, cpi->use_svc, &cpi->denoiser, cm->width,
3310 cm->height, cm->subsampling_x, cm->subsampling_y,
3311 #if CONFIG_VP9_HIGHBITDEPTH
3312 cm->use_highbitdepth,
3314 VP9_ENC_BORDER_IN_PIXELS))
3315 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
3316 "Failed to allocate denoiser");
3321 static void init_motion_estimation(VP9_COMP *cpi) {
3322 int y_stride = cpi->scaled_source.y_stride;
3324 if (cpi->sf.mv.search_method == NSTEP) {
3325 vp9_init3smotion_compensation(&cpi->ss_cfg, y_stride);
3326 } else if (cpi->sf.mv.search_method == DIAMOND) {
3327 vp9_init_dsmotion_compensation(&cpi->ss_cfg, y_stride);
3331 static void set_frame_size(VP9_COMP *cpi) {
3333 VP9_COMMON *const cm = &cpi->common;
3334 VP9EncoderConfig *const oxcf = &cpi->oxcf;
3335 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3337 #if !CONFIG_REALTIME_ONLY
3338 if (oxcf->pass == 2 && oxcf->rc_mode == VPX_VBR &&
3339 ((oxcf->resize_mode == RESIZE_FIXED && cm->current_video_frame == 0) ||
3340 (oxcf->resize_mode == RESIZE_DYNAMIC && cpi->resize_pending))) {
3341 calculate_coded_size(cpi, &oxcf->scaled_frame_width,
3342 &oxcf->scaled_frame_height);
3344 // There has been a change in frame size.
3345 vp9_set_size_literal(cpi, oxcf->scaled_frame_width,
3346 oxcf->scaled_frame_height);
3348 #endif // !CONFIG_REALTIME_ONLY
3350 if (oxcf->pass == 0 && oxcf->rc_mode == VPX_CBR && !cpi->use_svc &&
3351 oxcf->resize_mode == RESIZE_DYNAMIC && cpi->resize_pending != 0) {
3352 oxcf->scaled_frame_width =
3353 (oxcf->width * cpi->resize_scale_num) / cpi->resize_scale_den;
3354 oxcf->scaled_frame_height =
3355 (oxcf->height * cpi->resize_scale_num) / cpi->resize_scale_den;
3356 // There has been a change in frame size.
3357 vp9_set_size_literal(cpi, oxcf->scaled_frame_width,
3358 oxcf->scaled_frame_height);
3360 // TODO(agrange) Scale cpi->max_mv_magnitude if frame-size has changed.
3361 set_mv_search_params(cpi);
3363 vp9_noise_estimate_init(&cpi->noise_estimate, cm->width, cm->height);
3364 #if CONFIG_VP9_TEMPORAL_DENOISING
3365 // Reset the denoiser on the resized frame.
3366 if (cpi->oxcf.noise_sensitivity > 0) {
3367 vp9_denoiser_free(&(cpi->denoiser));
3368 setup_denoiser_buffer(cpi);
3369 // Dynamic resize is only triggered for non-SVC, so we can force
3370 // golden frame update here as temporary fix to denoiser.
3371 cpi->refresh_golden_frame = 1;
3376 if ((oxcf->pass == 2) &&
3377 (!cpi->use_svc || (is_two_pass_svc(cpi) &&
3378 cpi->svc.encode_empty_frame_state != ENCODING))) {
3379 vp9_set_target_rate(cpi);
3382 alloc_frame_mvs(cm, cm->new_fb_idx);
3384 // Reset the frame pointers to the current frame size.
3385 if (vpx_realloc_frame_buffer(get_frame_new_buffer(cm), cm->width, cm->height,
3386 cm->subsampling_x, cm->subsampling_y,
3387 #if CONFIG_VP9_HIGHBITDEPTH
3388 cm->use_highbitdepth,
3390 VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment,
3392 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
3393 "Failed to allocate frame buffer");
3395 alloc_util_frame_buffers(cpi);
3396 init_motion_estimation(cpi);
3398 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3399 RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - 1];
3400 const int buf_idx = get_ref_frame_buf_idx(cpi, ref_frame);
3402 ref_buf->idx = buf_idx;
3404 if (buf_idx != INVALID_IDX) {
3405 YV12_BUFFER_CONFIG *const buf = &cm->buffer_pool->frame_bufs[buf_idx].buf;
3407 #if CONFIG_VP9_HIGHBITDEPTH
3408 vp9_setup_scale_factors_for_frame(
3409 &ref_buf->sf, buf->y_crop_width, buf->y_crop_height, cm->width,
3410 cm->height, (buf->flags & YV12_FLAG_HIGHBITDEPTH) ? 1 : 0);
3412 vp9_setup_scale_factors_for_frame(&ref_buf->sf, buf->y_crop_width,
3413 buf->y_crop_height, cm->width,
3415 #endif // CONFIG_VP9_HIGHBITDEPTH
3416 if (vp9_is_scaled(&ref_buf->sf)) vpx_extend_frame_borders(buf);
3418 ref_buf->buf = NULL;
3422 set_ref_ptrs(cm, xd, LAST_FRAME, LAST_FRAME);
3425 static void encode_without_recode_loop(VP9_COMP *cpi, size_t *size,
3427 VP9_COMMON *const cm = &cpi->common;
3428 int q = 0, bottom_index = 0, top_index = 0; // Dummy variables.
3429 const INTERP_FILTER filter_scaler =
3430 (is_one_pass_cbr_svc(cpi))
3431 ? cpi->svc.downsample_filter_type[cpi->svc.spatial_layer_id]
3433 const int phase_scaler =
3434 (is_one_pass_cbr_svc(cpi))
3435 ? cpi->svc.downsample_filter_phase[cpi->svc.spatial_layer_id]
3438 // Flag to check if its valid to compute the source sad (used for
3439 // scene detection and for superblock content state in CBR mode).
3440 // The flag may get reset below based on SVC or resizing state.
3441 cpi->compute_source_sad_onepass = cpi->oxcf.mode == REALTIME;
3443 vpx_clear_system_state();
3445 set_frame_size(cpi);
3447 if (is_one_pass_cbr_svc(cpi) &&
3448 cpi->un_scaled_source->y_width == cm->width << 2 &&
3449 cpi->un_scaled_source->y_height == cm->height << 2 &&
3450 cpi->svc.scaled_temp.y_width == cm->width << 1 &&
3451 cpi->svc.scaled_temp.y_height == cm->height << 1) {
3452 // For svc, if it is a 1/4x1/4 downscaling, do a two-stage scaling to take
3453 // advantage of the 1:2 optimized scaler. In the process, the 1/2x1/2
3454 // result will be saved in scaled_temp and might be used later.
3455 const INTERP_FILTER filter_scaler2 = cpi->svc.downsample_filter_type[1];
3456 const int phase_scaler2 = cpi->svc.downsample_filter_phase[1];
3457 cpi->Source = vp9_svc_twostage_scale(
3458 cm, cpi->un_scaled_source, &cpi->scaled_source, &cpi->svc.scaled_temp,
3459 filter_scaler, phase_scaler, filter_scaler2, phase_scaler2);
3460 cpi->svc.scaled_one_half = 1;
3461 } else if (is_one_pass_cbr_svc(cpi) &&
3462 cpi->un_scaled_source->y_width == cm->width << 1 &&
3463 cpi->un_scaled_source->y_height == cm->height << 1 &&
3464 cpi->svc.scaled_one_half) {
3465 // If the spatial layer is 1/2x1/2 and the scaling is already done in the
3466 // two-stage scaling, use the result directly.
3467 cpi->Source = &cpi->svc.scaled_temp;
3468 cpi->svc.scaled_one_half = 0;
3470 cpi->Source = vp9_scale_if_required(
3471 cm, cpi->un_scaled_source, &cpi->scaled_source, (cpi->oxcf.pass == 0),
3472 filter_scaler, phase_scaler);
3474 // Unfiltered raw source used in metrics calculation if the source
3475 // has been filtered.
3476 if (is_psnr_calc_enabled(cpi)) {
3477 #ifdef ENABLE_KF_DENOISE
3478 if (is_spatial_denoise_enabled(cpi)) {
3479 cpi->raw_source_frame = vp9_scale_if_required(
3480 cm, &cpi->raw_unscaled_source, &cpi->raw_scaled_source,
3481 (cpi->oxcf.pass == 0), EIGHTTAP, phase_scaler);
3483 cpi->raw_source_frame = cpi->Source;
3486 cpi->raw_source_frame = cpi->Source;
3490 if ((cpi->use_svc &&
3491 (cpi->svc.spatial_layer_id < cpi->svc.number_spatial_layers - 1 ||
3492 cpi->svc.temporal_layer_id < cpi->svc.number_temporal_layers - 1 ||
3493 cpi->svc.current_superframe < 1)) ||
3494 cpi->resize_pending || cpi->resize_state || cpi->external_resize ||
3495 cpi->resize_state != ORIG) {
3496 cpi->compute_source_sad_onepass = 0;
3497 if (cpi->content_state_sb_fd != NULL)
3498 memset(cpi->content_state_sb_fd, 0,
3499 (cm->mi_stride >> 3) * ((cm->mi_rows >> 3) + 1) *
3500 sizeof(*cpi->content_state_sb_fd));
3503 // Avoid scaling last_source unless its needed.
3504 // Last source is needed if avg_source_sad() is used, or if
3505 // partition_search_type == SOURCE_VAR_BASED_PARTITION, or if noise
3506 // estimation is enabled.
3507 if (cpi->unscaled_last_source != NULL &&
3508 (cpi->oxcf.content == VP9E_CONTENT_SCREEN ||
3509 (cpi->oxcf.pass == 0 && cpi->oxcf.rc_mode == VPX_VBR &&
3510 cpi->oxcf.mode == REALTIME && cpi->oxcf.speed >= 5) ||
3511 cpi->sf.partition_search_type == SOURCE_VAR_BASED_PARTITION ||
3512 (cpi->noise_estimate.enabled && !cpi->oxcf.noise_sensitivity) ||
3513 cpi->compute_source_sad_onepass))
3514 cpi->Last_Source = vp9_scale_if_required(
3515 cm, cpi->unscaled_last_source, &cpi->scaled_last_source,
3516 (cpi->oxcf.pass == 0), EIGHTTAP, 0);
3518 if (cpi->Last_Source == NULL ||
3519 cpi->Last_Source->y_width != cpi->Source->y_width ||
3520 cpi->Last_Source->y_height != cpi->Source->y_height)
3521 cpi->compute_source_sad_onepass = 0;
3523 if (cm->frame_type == KEY_FRAME || cpi->resize_pending != 0) {
3524 memset(cpi->consec_zero_mv, 0,
3525 cm->mi_rows * cm->mi_cols * sizeof(*cpi->consec_zero_mv));
3528 vp9_update_noise_estimate(cpi);
3530 // Scene detection is always used for VBR mode or screen-content case.
3531 // For other cases (e.g., CBR mode) use it for 5 <= speed < 8 for now
3532 // (need to check encoding time cost for doing this for speed 8).
3533 cpi->rc.high_source_sad = 0;
3534 if (cpi->compute_source_sad_onepass && cm->show_frame &&
3535 (cpi->oxcf.rc_mode == VPX_VBR ||
3536 cpi->oxcf.content == VP9E_CONTENT_SCREEN ||
3537 (cpi->oxcf.speed >= 5 && cpi->oxcf.speed < 8 && !cpi->use_svc)))
3538 vp9_scene_detection_onepass(cpi);
3540 // For 1 pass CBR SVC, only ZEROMV is allowed for spatial reference frame
3541 // when svc->force_zero_mode_spatial_ref = 1. Under those conditions we can
3542 // avoid this frame-level upsampling (for non intra_only frames).
3543 if (frame_is_intra_only(cm) == 0 &&
3544 !(is_one_pass_cbr_svc(cpi) && cpi->svc.force_zero_mode_spatial_ref)) {
3545 vp9_scale_references(cpi);
3548 set_size_independent_vars(cpi);
3549 set_size_dependent_vars(cpi, &q, &bottom_index, &top_index);
3551 if (cpi->sf.copy_partition_flag) alloc_copy_partition_data(cpi);
3553 if (cpi->oxcf.speed >= 5 && cpi->oxcf.pass == 0 &&
3554 cpi->oxcf.rc_mode == VPX_CBR &&
3555 cpi->oxcf.content != VP9E_CONTENT_SCREEN &&
3556 cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) {
3557 cpi->use_skin_detection = 1;
3560 vp9_set_quantizer(cm, q);
3561 vp9_set_variance_partition_thresholds(cpi, q, 0);
3565 suppress_active_map(cpi);
3567 // Variance adaptive and in frame q adjustment experiments are mutually
3569 if (cpi->oxcf.aq_mode == VARIANCE_AQ) {
3570 vp9_vaq_frame_setup(cpi);
3571 } else if (cpi->oxcf.aq_mode == EQUATOR360_AQ) {
3572 vp9_360aq_frame_setup(cpi);
3573 } else if (cpi->oxcf.aq_mode == COMPLEXITY_AQ) {
3574 vp9_setup_in_frame_q_adj(cpi);
3575 } else if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) {
3576 vp9_cyclic_refresh_setup(cpi);
3577 } else if (cpi->oxcf.aq_mode == LOOKAHEAD_AQ) {
3578 // it may be pretty bad for rate-control,
3579 // and I should handle it somehow
3580 vp9_alt_ref_aq_setup_map(cpi->alt_ref_aq, cpi);
3583 apply_active_map(cpi);
3585 vp9_encode_frame(cpi);
3587 // Check if we should drop this frame because of high overshoot.
3588 // Only for frames where high temporal-source SAD is detected.
3589 if (cpi->oxcf.pass == 0 && cpi->oxcf.rc_mode == VPX_CBR &&
3590 cpi->resize_state == ORIG && cm->frame_type != KEY_FRAME &&
3591 cpi->oxcf.content == VP9E_CONTENT_SCREEN &&
3592 cpi->rc.high_source_sad == 1) {
3594 // Get an estimate of the encoded frame size.
3595 save_coding_context(cpi);
3596 vp9_pack_bitstream(cpi, dest, size);
3597 restore_coding_context(cpi);
3598 frame_size = (int)(*size) << 3;
3599 // Check if encoded frame will overshoot too much, and if so, set the q and
3600 // adjust some rate control parameters, and return to re-encode the frame.
3601 if (vp9_encodedframe_overshoot(cpi, frame_size, &q)) {
3602 vpx_clear_system_state();
3603 vp9_set_quantizer(cm, q);
3604 vp9_set_variance_partition_thresholds(cpi, q, 0);
3605 suppress_active_map(cpi);
3606 // Turn-off cyclic refresh for re-encoded frame.
3607 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) {
3608 unsigned char *const seg_map = cpi->segmentation_map;
3609 memset(seg_map, 0, cm->mi_rows * cm->mi_cols);
3610 vp9_disable_segmentation(&cm->seg);
3612 apply_active_map(cpi);
3613 vp9_encode_frame(cpi);
3617 // Update some stats from cyclic refresh, and check for golden frame update.
3618 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cm->seg.enabled &&
3619 cm->frame_type != KEY_FRAME)
3620 vp9_cyclic_refresh_postencode(cpi);
3622 // Update the skip mb flag probabilities based on the distribution
3623 // seen in the last encoder iteration.
3624 // update_base_skip_probs(cpi);
3625 vpx_clear_system_state();
3628 #define MAX_QSTEP_ADJ 4
3629 static int get_qstep_adj(int rate_excess, int rate_limit) {
3631 rate_limit ? ((rate_excess + rate_limit / 2) / rate_limit) : INT_MAX;
3632 return VPXMIN(qstep, MAX_QSTEP_ADJ);
3635 static void encode_with_recode_loop(VP9_COMP *cpi, size_t *size,
3637 VP9_COMMON *const cm = &cpi->common;
3638 RATE_CONTROL *const rc = &cpi->rc;
3639 int bottom_index, top_index;
3641 int loop_at_this_size = 0;
3643 int overshoot_seen = 0;
3644 int undershoot_seen = 0;
3645 int frame_over_shoot_limit;
3646 int frame_under_shoot_limit;
3647 int q = 0, q_low = 0, q_high = 0;
3649 #ifdef AGGRESSIVE_VBR
3653 set_size_independent_vars(cpi);
3655 enable_acl = cpi->sf.allow_acl
3656 ? (cm->frame_type == KEY_FRAME) || (cm->show_frame == 0)
3660 vpx_clear_system_state();
3662 set_frame_size(cpi);
3664 if (loop_count == 0 || cpi->resize_pending != 0) {
3665 set_size_dependent_vars(cpi, &q, &bottom_index, &top_index);
3667 #ifdef AGGRESSIVE_VBR
3668 if (two_pass_first_group_inter(cpi)) {
3669 // Adjustment limits for min and max q
3670 qrange_adj = VPXMAX(1, (top_index - bottom_index) / 2);
3673 VPXMAX(bottom_index - qrange_adj / 2, cpi->oxcf.best_allowed_q);
3675 VPXMIN(cpi->oxcf.worst_allowed_q, top_index + qrange_adj / 2);
3678 // TODO(agrange) Scale cpi->max_mv_magnitude if frame-size has changed.
3679 set_mv_search_params(cpi);
3681 // Reset the loop state for new frame size.
3683 undershoot_seen = 0;
3685 // Reconfiguration for change in frame size has concluded.
3686 cpi->resize_pending = 0;
3688 q_low = bottom_index;
3691 loop_at_this_size = 0;
3694 // Decide frame size bounds first time through.
3695 if (loop_count == 0) {
3696 vp9_rc_compute_frame_size_bounds(cpi, rc->this_frame_target,
3697 &frame_under_shoot_limit,
3698 &frame_over_shoot_limit);
3702 vp9_scale_if_required(cm, cpi->un_scaled_source, &cpi->scaled_source,
3703 (cpi->oxcf.pass == 0), EIGHTTAP, 0);
3705 // Unfiltered raw source used in metrics calculation if the source
3706 // has been filtered.
3707 if (is_psnr_calc_enabled(cpi)) {
3708 #ifdef ENABLE_KF_DENOISE
3709 if (is_spatial_denoise_enabled(cpi)) {
3710 cpi->raw_source_frame = vp9_scale_if_required(
3711 cm, &cpi->raw_unscaled_source, &cpi->raw_scaled_source,
3712 (cpi->oxcf.pass == 0), EIGHTTAP, 0);
3714 cpi->raw_source_frame = cpi->Source;
3717 cpi->raw_source_frame = cpi->Source;
3721 if (cpi->unscaled_last_source != NULL)
3722 cpi->Last_Source = vp9_scale_if_required(
3723 cm, cpi->unscaled_last_source, &cpi->scaled_last_source,
3724 (cpi->oxcf.pass == 0), EIGHTTAP, 0);
3726 if (frame_is_intra_only(cm) == 0) {
3727 if (loop_count > 0) {
3728 release_scaled_references(cpi);
3730 vp9_scale_references(cpi);
3733 vp9_set_quantizer(cm, q);
3735 if (loop_count == 0) setup_frame(cpi);
3737 // Variance adaptive and in frame q adjustment experiments are mutually
3739 if (cpi->oxcf.aq_mode == VARIANCE_AQ) {
3740 vp9_vaq_frame_setup(cpi);
3741 } else if (cpi->oxcf.aq_mode == EQUATOR360_AQ) {
3742 vp9_360aq_frame_setup(cpi);
3743 } else if (cpi->oxcf.aq_mode == COMPLEXITY_AQ) {
3744 vp9_setup_in_frame_q_adj(cpi);
3745 } else if (cpi->oxcf.aq_mode == LOOKAHEAD_AQ) {
3746 vp9_alt_ref_aq_setup_map(cpi->alt_ref_aq, cpi);
3749 vp9_encode_frame(cpi);
3751 // Update the skip mb flag probabilities based on the distribution
3752 // seen in the last encoder iteration.
3753 // update_base_skip_probs(cpi);
3755 vpx_clear_system_state();
3757 // Dummy pack of the bitstream using up to date stats to get an
3758 // accurate estimate of output frame size to determine if we need
3760 if (cpi->sf.recode_loop >= ALLOW_RECODE_KFARFGF) {
3761 save_coding_context(cpi);
3762 if (!cpi->sf.use_nonrd_pick_mode) vp9_pack_bitstream(cpi, dest, size);
3764 rc->projected_frame_size = (int)(*size) << 3;
3766 if (frame_over_shoot_limit == 0) frame_over_shoot_limit = 1;
3769 if (cpi->oxcf.rc_mode == VPX_Q) {
3772 if ((cm->frame_type == KEY_FRAME) && rc->this_key_frame_forced &&
3773 (rc->projected_frame_size < rc->max_frame_bandwidth)) {
3777 int64_t high_err_target = cpi->ambient_err;
3778 int64_t low_err_target = cpi->ambient_err >> 1;
3780 #if CONFIG_VP9_HIGHBITDEPTH
3781 if (cm->use_highbitdepth) {
3782 kf_err = vpx_highbd_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
3784 kf_err = vpx_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
3787 kf_err = vpx_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
3788 #endif // CONFIG_VP9_HIGHBITDEPTH
3790 // Prevent possible divide by zero error below for perfect KF
3793 // The key frame is not good enough or we can afford
3794 // to make it better without undue risk of popping.
3795 if ((kf_err > high_err_target &&
3796 rc->projected_frame_size <= frame_over_shoot_limit) ||
3797 (kf_err > low_err_target &&
3798 rc->projected_frame_size <= frame_under_shoot_limit)) {
3800 q_high = q > q_low ? q - 1 : q_low;
3803 q = (int)((q * high_err_target) / kf_err);
3804 q = VPXMIN(q, (q_high + q_low) >> 1);
3805 } else if (kf_err < low_err_target &&
3806 rc->projected_frame_size >= frame_under_shoot_limit) {
3807 // The key frame is much better than the previous frame
3809 q_low = q < q_high ? q + 1 : q_high;
3812 q = (int)((q * low_err_target) / kf_err);
3813 q = VPXMIN(q, (q_high + q_low + 1) >> 1);
3816 // Clamp Q to upper and lower limits:
3817 q = clamp(q, q_low, q_high);
3820 } else if (recode_loop_test(cpi, frame_over_shoot_limit,
3821 frame_under_shoot_limit, q,
3822 VPXMAX(q_high, top_index), bottom_index)) {
3823 // Is the projected frame size out of range and are we allowed
3824 // to attempt to recode.
3829 if (cpi->resize_pending == 1) {
3830 // Change in frame size so go back around the recode loop.
3831 cpi->rc.frame_size_selector =
3832 SCALE_STEP1 - cpi->rc.frame_size_selector;
3833 cpi->rc.next_frame_size_selector = cpi->rc.frame_size_selector;
3835 #if CONFIG_INTERNAL_STATS
3836 ++cpi->tot_recode_hits;
3843 // Frame size out of permitted range:
3844 // Update correction factor & compute new Q to try...
3846 // Frame is too large
3847 if (rc->projected_frame_size > rc->this_frame_target) {
3848 // Special case if the projected size is > the max allowed.
3849 if ((q == q_high) &&
3850 ((rc->projected_frame_size >= rc->max_frame_bandwidth) ||
3851 (rc->projected_frame_size >=
3852 big_rate_miss_high_threshold(cpi)))) {
3853 int max_rate = VPXMAX(1, VPXMIN(rc->max_frame_bandwidth,
3854 big_rate_miss_high_threshold(cpi)));
3856 q_val_high = vp9_convert_qindex_to_q(q_high, cm->bit_depth);
3858 q_val_high * ((double)rc->projected_frame_size / max_rate);
3859 q_high = vp9_convert_q_to_qindex(q_val_high, cm->bit_depth);
3860 q_high = clamp(q_high, rc->best_quality, rc->worst_quality);
3863 // Raise Qlow as to at least the current value
3865 get_qstep_adj(rc->projected_frame_size, rc->this_frame_target);
3866 q_low = VPXMIN(q + qstep, q_high);
3868 if (undershoot_seen || loop_at_this_size > 1) {
3869 // Update rate_correction_factor unless
3870 vp9_rc_update_rate_correction_factors(cpi);
3872 q = (q_high + q_low + 1) / 2;
3874 // Update rate_correction_factor unless
3875 vp9_rc_update_rate_correction_factors(cpi);
3877 q = vp9_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
3878 VPXMAX(q_high, top_index));
3880 while (q < q_low && retries < 10) {
3881 vp9_rc_update_rate_correction_factors(cpi);
3882 q = vp9_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
3883 VPXMAX(q_high, top_index));
3890 // Frame is too small
3892 get_qstep_adj(rc->this_frame_target, rc->projected_frame_size);
3893 q_high = VPXMAX(q - qstep, q_low);
3895 if (overshoot_seen || loop_at_this_size > 1) {
3896 vp9_rc_update_rate_correction_factors(cpi);
3897 q = (q_high + q_low) / 2;
3899 vp9_rc_update_rate_correction_factors(cpi);
3900 q = vp9_rc_regulate_q(cpi, rc->this_frame_target,
3901 VPXMIN(q_low, bottom_index), top_index);
3902 // Special case reset for qlow for constrained quality.
3903 // This should only trigger where there is very substantial
3904 // undershoot on a frame and the auto cq level is above
3905 // the user passsed in value.
3906 if (cpi->oxcf.rc_mode == VPX_CQ && q < q_low) {
3910 while (q > q_high && retries < 10) {
3911 vp9_rc_update_rate_correction_factors(cpi);
3912 q = vp9_rc_regulate_q(cpi, rc->this_frame_target,
3913 VPXMIN(q_low, bottom_index), top_index);
3917 undershoot_seen = 1;
3920 // Clamp Q to upper and lower limits:
3921 q = clamp(q, q_low, q_high);
3923 loop = (q != last_q);
3929 // Special case for overlay frame.
3930 if (rc->is_src_frame_alt_ref &&
3931 rc->projected_frame_size < rc->max_frame_bandwidth)
3936 ++loop_at_this_size;
3938 #if CONFIG_INTERNAL_STATS
3939 ++cpi->tot_recode_hits;
3943 if (cpi->sf.recode_loop >= ALLOW_RECODE_KFARFGF)
3944 if (loop || !enable_acl) restore_coding_context(cpi);
3947 #ifdef AGGRESSIVE_VBR
3948 if (two_pass_first_group_inter(cpi)) {
3949 cpi->twopass.active_worst_quality =
3950 VPXMIN(q + qrange_adj, cpi->oxcf.worst_allowed_q);
3951 } else if (!frame_is_kf_gf_arf(cpi)) {
3953 if (!frame_is_kf_gf_arf(cpi)) {
3955 // Have we been forced to adapt Q outside the expected range by an extreme
3956 // rate miss. If so adjust the active maxQ for the subsequent frames.
3957 if (q > cpi->twopass.active_worst_quality) {
3958 cpi->twopass.active_worst_quality = q;
3963 // Skip recoding, if model diff is below threshold
3964 const int thresh = compute_context_model_thresh(cpi);
3965 const int diff = compute_context_model_diff(cm);
3966 if (diff < thresh) {
3967 vpx_clear_system_state();
3968 restore_coding_context(cpi);
3972 vp9_encode_frame(cpi);
3973 vpx_clear_system_state();
3974 restore_coding_context(cpi);
3975 vp9_pack_bitstream(cpi, dest, size);
3977 vp9_encode_frame(cpi);
3978 vpx_clear_system_state();
3980 restore_coding_context(cpi);
3984 static int get_ref_frame_flags(const VP9_COMP *cpi) {
3985 const int *const map = cpi->common.ref_frame_map;
3986 const int gold_is_last = map[cpi->gld_fb_idx] == map[cpi->lst_fb_idx];
3987 const int alt_is_last = map[cpi->alt_fb_idx] == map[cpi->lst_fb_idx];
3988 const int gold_is_alt = map[cpi->gld_fb_idx] == map[cpi->alt_fb_idx];
3989 int flags = VP9_ALT_FLAG | VP9_GOLD_FLAG | VP9_LAST_FLAG;
3991 if (gold_is_last) flags &= ~VP9_GOLD_FLAG;
3993 if (cpi->rc.frames_till_gf_update_due == INT_MAX &&
3994 (cpi->svc.number_temporal_layers == 1 &&
3995 cpi->svc.number_spatial_layers == 1))
3996 flags &= ~VP9_GOLD_FLAG;
3998 if (alt_is_last) flags &= ~VP9_ALT_FLAG;
4000 if (gold_is_alt) flags &= ~VP9_ALT_FLAG;
4005 static void set_ext_overrides(VP9_COMP *cpi) {
4006 // Overrides the defaults with the externally supplied values with
4007 // vp9_update_reference() and vp9_update_entropy() calls
4008 // Note: The overrides are valid only for the next frame passed
4009 // to encode_frame_to_data_rate() function
4010 if (cpi->ext_refresh_frame_context_pending) {
4011 cpi->common.refresh_frame_context = cpi->ext_refresh_frame_context;
4012 cpi->ext_refresh_frame_context_pending = 0;
4014 if (cpi->ext_refresh_frame_flags_pending) {
4015 cpi->refresh_last_frame = cpi->ext_refresh_last_frame;
4016 cpi->refresh_golden_frame = cpi->ext_refresh_golden_frame;
4017 cpi->refresh_alt_ref_frame = cpi->ext_refresh_alt_ref_frame;
4021 YV12_BUFFER_CONFIG *vp9_svc_twostage_scale(
4022 VP9_COMMON *cm, YV12_BUFFER_CONFIG *unscaled, YV12_BUFFER_CONFIG *scaled,
4023 YV12_BUFFER_CONFIG *scaled_temp, INTERP_FILTER filter_type,
4024 int phase_scaler, INTERP_FILTER filter_type2, int phase_scaler2) {
4025 if (cm->mi_cols * MI_SIZE != unscaled->y_width ||
4026 cm->mi_rows * MI_SIZE != unscaled->y_height) {
4027 #if CONFIG_VP9_HIGHBITDEPTH
4028 if (cm->bit_depth == VPX_BITS_8) {
4029 vp9_scale_and_extend_frame(unscaled, scaled_temp, filter_type2,
4031 vp9_scale_and_extend_frame(scaled_temp, scaled, filter_type,
4034 scale_and_extend_frame(unscaled, scaled_temp, (int)cm->bit_depth,
4035 filter_type2, phase_scaler2);
4036 scale_and_extend_frame(scaled_temp, scaled, (int)cm->bit_depth,
4037 filter_type, phase_scaler);
4040 vp9_scale_and_extend_frame(unscaled, scaled_temp, filter_type2,
4042 vp9_scale_and_extend_frame(scaled_temp, scaled, filter_type, phase_scaler);
4043 #endif // CONFIG_VP9_HIGHBITDEPTH
4050 YV12_BUFFER_CONFIG *vp9_scale_if_required(
4051 VP9_COMMON *cm, YV12_BUFFER_CONFIG *unscaled, YV12_BUFFER_CONFIG *scaled,
4052 int use_normative_scaler, INTERP_FILTER filter_type, int phase_scaler) {
4053 if (cm->mi_cols * MI_SIZE != unscaled->y_width ||
4054 cm->mi_rows * MI_SIZE != unscaled->y_height) {
4055 #if CONFIG_VP9_HIGHBITDEPTH
4056 if (use_normative_scaler && unscaled->y_width <= (scaled->y_width << 1) &&
4057 unscaled->y_height <= (scaled->y_height << 1))
4058 if (cm->bit_depth == VPX_BITS_8)
4059 vp9_scale_and_extend_frame(unscaled, scaled, filter_type, phase_scaler);
4061 scale_and_extend_frame(unscaled, scaled, (int)cm->bit_depth,
4062 filter_type, phase_scaler);
4064 scale_and_extend_frame_nonnormative(unscaled, scaled, (int)cm->bit_depth);
4066 if (use_normative_scaler && unscaled->y_width <= (scaled->y_width << 1) &&
4067 unscaled->y_height <= (scaled->y_height << 1))
4068 vp9_scale_and_extend_frame(unscaled, scaled, filter_type, phase_scaler);
4070 scale_and_extend_frame_nonnormative(unscaled, scaled);
4071 #endif // CONFIG_VP9_HIGHBITDEPTH
4078 static void set_arf_sign_bias(VP9_COMP *cpi) {
4079 VP9_COMMON *const cm = &cpi->common;
4082 if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) {
4083 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
4084 arf_sign_bias = cpi->rc.source_alt_ref_active &&
4085 (!cpi->refresh_alt_ref_frame ||
4086 (gf_group->rf_level[gf_group->index] == GF_ARF_LOW));
4089 (cpi->rc.source_alt_ref_active && !cpi->refresh_alt_ref_frame);
4091 cm->ref_frame_sign_bias[ALTREF_FRAME] = arf_sign_bias;
4094 static int setup_interp_filter_search_mask(VP9_COMP *cpi) {
4095 INTERP_FILTER ifilter;
4096 int ref_total[MAX_REF_FRAMES] = { 0 };
4097 MV_REFERENCE_FRAME ref;
4099 if (cpi->common.last_frame_type == KEY_FRAME || cpi->refresh_alt_ref_frame)
4101 for (ref = LAST_FRAME; ref <= ALTREF_FRAME; ++ref)
4102 for (ifilter = EIGHTTAP; ifilter <= EIGHTTAP_SHARP; ++ifilter)
4103 ref_total[ref] += cpi->interp_filter_selected[ref][ifilter];
4105 for (ifilter = EIGHTTAP; ifilter <= EIGHTTAP_SHARP; ++ifilter) {
4106 if ((ref_total[LAST_FRAME] &&
4107 cpi->interp_filter_selected[LAST_FRAME][ifilter] == 0) &&
4108 (ref_total[GOLDEN_FRAME] == 0 ||
4109 cpi->interp_filter_selected[GOLDEN_FRAME][ifilter] * 50 <
4110 ref_total[GOLDEN_FRAME]) &&
4111 (ref_total[ALTREF_FRAME] == 0 ||
4112 cpi->interp_filter_selected[ALTREF_FRAME][ifilter] * 50 <
4113 ref_total[ALTREF_FRAME]))
4114 mask |= 1 << ifilter;
4119 #ifdef ENABLE_KF_DENOISE
4120 // Baseline Kernal weights for denoise
4121 static uint8_t dn_kernal_3[9] = { 1, 2, 1, 2, 4, 2, 1, 2, 1 };
4122 static uint8_t dn_kernal_5[25] = { 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 4,
4123 2, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1 };
4125 static INLINE void add_denoise_point(int centre_val, int data_val, int thresh,
4126 uint8_t point_weight, int *sum_val,
4128 if (abs(centre_val - data_val) <= thresh) {
4129 *sum_weight += point_weight;
4130 *sum_val += (int)data_val * (int)point_weight;
4134 static void spatial_denoise_point(uint8_t *src_ptr, const int stride,
4135 const int strength) {
4138 int thresh = strength;
4139 int kernal_size = 5;
4140 int half_k_size = 2;
4144 uint8_t *kernal_ptr;
4146 // Find the maximum deviation from the source point in the locale.
4147 tmp_ptr = src_ptr - (stride * (half_k_size + 1)) - (half_k_size + 1);
4148 for (i = 0; i < kernal_size + 2; ++i) {
4149 for (j = 0; j < kernal_size + 2; ++j) {
4150 max_diff = VPXMAX(max_diff, abs((int)*src_ptr - (int)tmp_ptr[j]));
4155 // Select the kernal size.
4156 if (max_diff > (strength + (strength >> 1))) {
4159 thresh = thresh >> 1;
4161 kernal_ptr = (kernal_size == 3) ? dn_kernal_3 : dn_kernal_5;
4164 tmp_ptr = src_ptr - (stride * half_k_size) - half_k_size;
4165 for (i = 0; i < kernal_size; ++i) {
4166 for (j = 0; j < kernal_size; ++j) {
4167 add_denoise_point((int)*src_ptr, (int)tmp_ptr[j], thresh, *kernal_ptr,
4168 &sum_val, &sum_weight);
4174 // Update the source value with the new filtered value
4175 *src_ptr = (uint8_t)((sum_val + (sum_weight >> 1)) / sum_weight);
4178 #if CONFIG_VP9_HIGHBITDEPTH
4179 static void highbd_spatial_denoise_point(uint16_t *src_ptr, const int stride,
4180 const int strength) {
4183 int thresh = strength;
4184 int kernal_size = 5;
4185 int half_k_size = 2;
4189 uint8_t *kernal_ptr;
4191 // Find the maximum deviation from the source point in the locale.
4192 tmp_ptr = src_ptr - (stride * (half_k_size + 1)) - (half_k_size + 1);
4193 for (i = 0; i < kernal_size + 2; ++i) {
4194 for (j = 0; j < kernal_size + 2; ++j) {
4195 max_diff = VPXMAX(max_diff, abs((int)src_ptr - (int)tmp_ptr[j]));
4200 // Select the kernal size.
4201 if (max_diff > (strength + (strength >> 1))) {
4204 thresh = thresh >> 1;
4206 kernal_ptr = (kernal_size == 3) ? dn_kernal_3 : dn_kernal_5;
4209 tmp_ptr = src_ptr - (stride * half_k_size) - half_k_size;
4210 for (i = 0; i < kernal_size; ++i) {
4211 for (j = 0; j < kernal_size; ++j) {
4212 add_denoise_point((int)*src_ptr, (int)tmp_ptr[j], thresh, *kernal_ptr,
4213 &sum_val, &sum_weight);
4219 // Update the source value with the new filtered value
4220 *src_ptr = (uint16_t)((sum_val + (sum_weight >> 1)) / sum_weight);
4222 #endif // CONFIG_VP9_HIGHBITDEPTH
4224 // Apply thresholded spatial noise supression to a given buffer.
4225 static void spatial_denoise_buffer(VP9_COMP *cpi, uint8_t *buffer,
4226 const int stride, const int width,
4227 const int height, const int strength) {
4228 VP9_COMMON *const cm = &cpi->common;
4229 uint8_t *src_ptr = buffer;
4233 for (row = 0; row < height; ++row) {
4234 for (col = 0; col < width; ++col) {
4235 #if CONFIG_VP9_HIGHBITDEPTH
4236 if (cm->use_highbitdepth)
4237 highbd_spatial_denoise_point(CONVERT_TO_SHORTPTR(&src_ptr[col]), stride,
4240 spatial_denoise_point(&src_ptr[col], stride, strength);
4242 spatial_denoise_point(&src_ptr[col], stride, strength);
4243 #endif // CONFIG_VP9_HIGHBITDEPTH
4249 // Apply thresholded spatial noise supression to source.
4250 static void spatial_denoise_frame(VP9_COMP *cpi) {
4251 YV12_BUFFER_CONFIG *src = cpi->Source;
4252 const VP9EncoderConfig *const oxcf = &cpi->oxcf;
4253 TWO_PASS *const twopass = &cpi->twopass;
4254 VP9_COMMON *const cm = &cpi->common;
4256 // Base the filter strength on the current active max Q.
4257 const int q = (int)(vp9_convert_qindex_to_q(twopass->active_worst_quality,
4260 VPXMAX(oxcf->arnr_strength >> 2, VPXMIN(oxcf->arnr_strength, (q >> 4)));
4262 // Denoise each of Y,U and V buffers.
4263 spatial_denoise_buffer(cpi, src->y_buffer, src->y_stride, src->y_width,
4264 src->y_height, strength);
4266 strength += (strength >> 1);
4267 spatial_denoise_buffer(cpi, src->u_buffer, src->uv_stride, src->uv_width,
4268 src->uv_height, strength << 1);
4270 spatial_denoise_buffer(cpi, src->v_buffer, src->uv_stride, src->uv_width,
4271 src->uv_height, strength << 1);
4273 #endif // ENABLE_KF_DENOISE
4275 static void vp9_try_disable_lookahead_aq(VP9_COMP *cpi, size_t *size,
4277 if (cpi->common.seg.enabled)
4278 if (ALT_REF_AQ_PROTECT_GAIN) {
4279 size_t nsize = *size;
4282 // TODO(yuryg): optimize this, as
4283 // we don't really need to repack
4285 save_coding_context(cpi);
4286 vp9_disable_segmentation(&cpi->common.seg);
4287 vp9_pack_bitstream(cpi, dest, &nsize);
4288 restore_coding_context(cpi);
4290 overhead = (int)*size - (int)nsize;
4292 if (vp9_alt_ref_aq_disable_if(cpi->alt_ref_aq, overhead, (int)*size))
4293 vp9_encode_frame(cpi);
4295 vp9_enable_segmentation(&cpi->common.seg);
4299 static void encode_frame_to_data_rate(VP9_COMP *cpi, size_t *size,
4301 unsigned int *frame_flags) {
4302 VP9_COMMON *const cm = &cpi->common;
4303 const VP9EncoderConfig *const oxcf = &cpi->oxcf;
4304 struct segmentation *const seg = &cm->seg;
4307 set_ext_overrides(cpi);
4308 vpx_clear_system_state();
4310 #ifdef ENABLE_KF_DENOISE
4311 // Spatial denoise of key frame.
4312 if (is_spatial_denoise_enabled(cpi)) spatial_denoise_frame(cpi);
4315 // Set the arf sign bias for this frame.
4316 set_arf_sign_bias(cpi);
4318 // Set default state for segment based loop filter update flags.
4319 cm->lf.mode_ref_delta_update = 0;
4321 if (cpi->oxcf.pass == 2 && cpi->sf.adaptive_interp_filter_search)
4322 cpi->sf.interp_filter_search_mask = setup_interp_filter_search_mask(cpi);
4324 // Set various flags etc to special state if it is a key frame.
4325 if (frame_is_intra_only(cm)) {
4326 // Reset the loop filter deltas and segmentation map.
4327 vp9_reset_segment_features(&cm->seg);
4329 // If segmentation is enabled force a map update for key frames.
4331 seg->update_map = 1;
4332 seg->update_data = 1;
4335 // The alternate reference frame cannot be active for a key frame.
4336 cpi->rc.source_alt_ref_active = 0;
4338 cm->error_resilient_mode = oxcf->error_resilient_mode;
4339 cm->frame_parallel_decoding_mode = oxcf->frame_parallel_decoding_mode;
4341 // By default, encoder assumes decoder can use prev_mi.
4342 if (cm->error_resilient_mode) {
4343 cm->frame_parallel_decoding_mode = 1;
4344 cm->reset_frame_context = 0;
4345 cm->refresh_frame_context = 0;
4346 } else if (cm->intra_only) {
4347 // Only reset the current context.
4348 cm->reset_frame_context = 2;
4351 if (is_two_pass_svc(cpi) && cm->error_resilient_mode == 0) {
4352 // Use context 0 for intra only empty frame, but the last frame context
4353 // for other empty frames.
4354 if (cpi->svc.encode_empty_frame_state == ENCODING) {
4355 if (cpi->svc.encode_intra_empty_frame != 0)
4356 cm->frame_context_idx = 0;
4358 cm->frame_context_idx = FRAME_CONTEXTS - 1;
4360 cm->frame_context_idx =
4361 cpi->svc.spatial_layer_id * cpi->svc.number_temporal_layers +
4362 cpi->svc.temporal_layer_id;
4365 cm->frame_parallel_decoding_mode = oxcf->frame_parallel_decoding_mode;
4367 // The probs will be updated based on the frame type of its previous
4368 // frame if frame_parallel_decoding_mode is 0. The type may vary for
4369 // the frame after a key frame in base layer since we may drop enhancement
4370 // layers. So set frame_parallel_decoding_mode to 1 in this case.
4371 if (cm->frame_parallel_decoding_mode == 0) {
4372 if (cpi->svc.number_temporal_layers == 1) {
4373 if (cpi->svc.spatial_layer_id == 0 &&
4374 cpi->svc.layer_context[0].last_frame_type == KEY_FRAME)
4375 cm->frame_parallel_decoding_mode = 1;
4376 } else if (cpi->svc.spatial_layer_id == 0) {
4377 // Find the 2nd frame in temporal base layer and 1st frame in temporal
4378 // enhancement layers from the key frame.
4380 for (i = 0; i < cpi->svc.number_temporal_layers; ++i) {
4381 if (cpi->svc.layer_context[0].frames_from_key_frame == 1 << i) {
4382 cm->frame_parallel_decoding_mode = 1;
4390 // For 1 pass CBR, check if we are dropping this frame.
4391 // For spatial layers, for now only check for frame-dropping on first spatial
4392 // layer, and if decision is to drop, we drop whole super-frame.
4393 if (oxcf->pass == 0 && oxcf->rc_mode == VPX_CBR &&
4394 cm->frame_type != KEY_FRAME) {
4395 if (vp9_rc_drop_frame(cpi) ||
4396 (is_one_pass_cbr_svc(cpi) && cpi->svc.rc_drop_superframe == 1)) {
4397 vp9_rc_postencode_update_drop_frame(cpi);
4398 ++cm->current_video_frame;
4399 cpi->ext_refresh_frame_flags_pending = 0;
4400 cpi->svc.rc_drop_superframe = 1;
4401 cpi->last_frame_dropped = 1;
4402 // TODO(marpan): Advancing the svc counters on dropped frames can break
4403 // the referencing scheme for the fixed svc patterns defined in
4404 // vp9_one_pass_cbr_svc_start_layer(). Look into fixing this issue, but
4405 // for now, don't advance the svc frame counters on dropped frame.
4406 // if (cpi->use_svc)
4407 // vp9_inc_frame_in_layer(cpi);
4413 vpx_clear_system_state();
4415 #if CONFIG_INTERNAL_STATS
4416 memset(cpi->mode_chosen_counts, 0,
4417 MAX_MODES * sizeof(*cpi->mode_chosen_counts));
4420 if (cpi->sf.recode_loop == DISALLOW_RECODE) {
4421 encode_without_recode_loop(cpi, size, dest);
4423 encode_with_recode_loop(cpi, size, dest);
4426 cpi->last_frame_dropped = 0;
4428 // Disable segmentation if it decrease rate/distortion ratio
4429 if (cpi->oxcf.aq_mode == LOOKAHEAD_AQ)
4430 vp9_try_disable_lookahead_aq(cpi, size, dest);
4432 #if CONFIG_VP9_TEMPORAL_DENOISING
4433 #ifdef OUTPUT_YUV_DENOISED
4434 if (oxcf->noise_sensitivity > 0 && denoise_svc(cpi)) {
4435 vpx_write_yuv_frame(yuv_denoised_file,
4436 &cpi->denoiser.running_avg_y[INTRA_FRAME]);
4440 #ifdef OUTPUT_YUV_SKINMAP
4441 if (cpi->common.current_video_frame > 1) {
4442 vp9_output_skin_map(cpi, yuv_skinmap_file);
4446 // Special case code to reduce pulsing when key frames are forced at a
4447 // fixed interval. Note the reconstruction error if it is the frame before
4448 // the force key frame
4449 if (cpi->rc.next_key_frame_forced && cpi->rc.frames_to_key == 1) {
4450 #if CONFIG_VP9_HIGHBITDEPTH
4451 if (cm->use_highbitdepth) {
4453 vpx_highbd_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
4455 cpi->ambient_err = vpx_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
4458 cpi->ambient_err = vpx_get_y_sse(cpi->Source, get_frame_new_buffer(cm));
4459 #endif // CONFIG_VP9_HIGHBITDEPTH
4462 // If the encoder forced a KEY_FRAME decision
4463 if (cm->frame_type == KEY_FRAME) cpi->refresh_last_frame = 1;
4465 cm->frame_to_show = get_frame_new_buffer(cm);
4466 cm->frame_to_show->color_space = cm->color_space;
4467 cm->frame_to_show->color_range = cm->color_range;
4468 cm->frame_to_show->render_width = cm->render_width;
4469 cm->frame_to_show->render_height = cm->render_height;
4471 // Pick the loop filter level for the frame.
4472 loopfilter_frame(cpi, cm);
4474 // build the bitstream
4475 vp9_pack_bitstream(cpi, dest, size);
4477 if (cm->seg.update_map) update_reference_segmentation_map(cpi);
4479 if (frame_is_intra_only(cm) == 0) {
4480 release_scaled_references(cpi);
4482 vp9_update_reference_frames(cpi);
4484 for (t = TX_4X4; t <= TX_32X32; t++)
4485 full_to_model_counts(cpi->td.counts->coef[t],
4486 cpi->td.rd_counts.coef_counts[t]);
4488 if (!cm->error_resilient_mode && !cm->frame_parallel_decoding_mode)
4489 vp9_adapt_coef_probs(cm);
4491 if (!frame_is_intra_only(cm)) {
4492 if (!cm->error_resilient_mode && !cm->frame_parallel_decoding_mode) {
4493 vp9_adapt_mode_probs(cm);
4494 vp9_adapt_mv_probs(cm, cm->allow_high_precision_mv);
4498 cpi->ext_refresh_frame_flags_pending = 0;
4500 if (cpi->refresh_golden_frame == 1)
4501 cpi->frame_flags |= FRAMEFLAGS_GOLDEN;
4503 cpi->frame_flags &= ~FRAMEFLAGS_GOLDEN;
4505 if (cpi->refresh_alt_ref_frame == 1)
4506 cpi->frame_flags |= FRAMEFLAGS_ALTREF;
4508 cpi->frame_flags &= ~FRAMEFLAGS_ALTREF;
4510 cpi->ref_frame_flags = get_ref_frame_flags(cpi);
4512 cm->last_frame_type = cm->frame_type;
4514 if (!(is_two_pass_svc(cpi) && cpi->svc.encode_empty_frame_state == ENCODING))
4515 vp9_rc_postencode_update(cpi, *size);
4518 output_frame_level_debug_stats(cpi);
4521 if (cm->frame_type == KEY_FRAME) {
4522 // Tell the caller that the frame was coded as a key frame
4523 *frame_flags = cpi->frame_flags | FRAMEFLAGS_KEY;
4525 *frame_flags = cpi->frame_flags & ~FRAMEFLAGS_KEY;
4528 // Clear the one shot update flags for segmentation map and mode/ref loop
4530 cm->seg.update_map = 0;
4531 cm->seg.update_data = 0;
4532 cm->lf.mode_ref_delta_update = 0;
4534 // keep track of the last coded dimensions
4535 cm->last_width = cm->width;
4536 cm->last_height = cm->height;
4538 // reset to normal state now that we are done.
4539 if (!cm->show_existing_frame) cm->last_show_frame = cm->show_frame;
4541 if (cm->show_frame) {
4542 vp9_swap_mi_and_prev_mi(cm);
4543 // Don't increment frame counters if this was an altref buffer
4544 // update not a real frame
4545 ++cm->current_video_frame;
4546 if (cpi->use_svc) vp9_inc_frame_in_layer(cpi);
4548 cm->prev_frame = cm->cur_frame;
4552 .layer_context[cpi->svc.spatial_layer_id *
4553 cpi->svc.number_temporal_layers +
4554 cpi->svc.temporal_layer_id]
4555 .last_frame_type = cm->frame_type;
4557 cpi->force_update_segmentation = 0;
4559 if (cpi->oxcf.aq_mode == LOOKAHEAD_AQ)
4560 vp9_alt_ref_aq_unset_all(cpi->alt_ref_aq, cpi);
4563 static void SvcEncode(VP9_COMP *cpi, size_t *size, uint8_t *dest,
4564 unsigned int *frame_flags) {
4565 vp9_rc_get_svc_params(cpi);
4566 encode_frame_to_data_rate(cpi, size, dest, frame_flags);
4569 static void Pass0Encode(VP9_COMP *cpi, size_t *size, uint8_t *dest,
4570 unsigned int *frame_flags) {
4571 if (cpi->oxcf.rc_mode == VPX_CBR) {
4572 vp9_rc_get_one_pass_cbr_params(cpi);
4574 vp9_rc_get_one_pass_vbr_params(cpi);
4576 encode_frame_to_data_rate(cpi, size, dest, frame_flags);
4579 #if !CONFIG_REALTIME_ONLY
4580 static void Pass2Encode(VP9_COMP *cpi, size_t *size, uint8_t *dest,
4581 unsigned int *frame_flags) {
4582 cpi->allow_encode_breakout = ENCODE_BREAKOUT_ENABLED;
4583 encode_frame_to_data_rate(cpi, size, dest, frame_flags);
4585 if (!(is_two_pass_svc(cpi) && cpi->svc.encode_empty_frame_state == ENCODING))
4586 vp9_twopass_postencode_update(cpi);
4588 #endif // !CONFIG_REALTIME_ONLY
4590 static void init_ref_frame_bufs(VP9_COMMON *cm) {
4592 BufferPool *const pool = cm->buffer_pool;
4593 cm->new_fb_idx = INVALID_IDX;
4594 for (i = 0; i < REF_FRAMES; ++i) {
4595 cm->ref_frame_map[i] = INVALID_IDX;
4596 pool->frame_bufs[i].ref_count = 0;
4600 static void check_initial_width(VP9_COMP *cpi,
4601 #if CONFIG_VP9_HIGHBITDEPTH
4602 int use_highbitdepth,
4604 int subsampling_x, int subsampling_y) {
4605 VP9_COMMON *const cm = &cpi->common;
4607 if (!cpi->initial_width ||
4608 #if CONFIG_VP9_HIGHBITDEPTH
4609 cm->use_highbitdepth != use_highbitdepth ||
4611 cm->subsampling_x != subsampling_x ||
4612 cm->subsampling_y != subsampling_y) {
4613 cm->subsampling_x = subsampling_x;
4614 cm->subsampling_y = subsampling_y;
4615 #if CONFIG_VP9_HIGHBITDEPTH
4616 cm->use_highbitdepth = use_highbitdepth;
4619 alloc_raw_frame_buffers(cpi);
4620 init_ref_frame_bufs(cm);
4621 alloc_util_frame_buffers(cpi);
4623 init_motion_estimation(cpi); // TODO(agrange) This can be removed.
4625 cpi->initial_width = cm->width;
4626 cpi->initial_height = cm->height;
4627 cpi->initial_mbs = cm->MBs;
4631 int vp9_receive_raw_frame(VP9_COMP *cpi, vpx_enc_frame_flags_t frame_flags,
4632 YV12_BUFFER_CONFIG *sd, int64_t time_stamp,
4634 VP9_COMMON *const cm = &cpi->common;
4635 struct vpx_usec_timer timer;
4637 const int subsampling_x = sd->subsampling_x;
4638 const int subsampling_y = sd->subsampling_y;
4639 #if CONFIG_VP9_HIGHBITDEPTH
4640 const int use_highbitdepth = (sd->flags & YV12_FLAG_HIGHBITDEPTH) != 0;
4643 #if CONFIG_VP9_HIGHBITDEPTH
4644 check_initial_width(cpi, use_highbitdepth, subsampling_x, subsampling_y);
4646 check_initial_width(cpi, subsampling_x, subsampling_y);
4647 #endif // CONFIG_VP9_HIGHBITDEPTH
4649 #if CONFIG_VP9_TEMPORAL_DENOISING
4650 setup_denoiser_buffer(cpi);
4652 vpx_usec_timer_start(&timer);
4654 if (vp9_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
4655 #if CONFIG_VP9_HIGHBITDEPTH
4657 #endif // CONFIG_VP9_HIGHBITDEPTH
4660 vpx_usec_timer_mark(&timer);
4661 cpi->time_receive_data += vpx_usec_timer_elapsed(&timer);
4663 if ((cm->profile == PROFILE_0 || cm->profile == PROFILE_2) &&
4664 (subsampling_x != 1 || subsampling_y != 1)) {
4665 vpx_internal_error(&cm->error, VPX_CODEC_INVALID_PARAM,
4666 "Non-4:2:0 color format requires profile 1 or 3");
4669 if ((cm->profile == PROFILE_1 || cm->profile == PROFILE_3) &&
4670 (subsampling_x == 1 && subsampling_y == 1)) {
4671 vpx_internal_error(&cm->error, VPX_CODEC_INVALID_PARAM,
4672 "4:2:0 color format requires profile 0 or 2");
4679 static int frame_is_reference(const VP9_COMP *cpi) {
4680 const VP9_COMMON *cm = &cpi->common;
4682 return cm->frame_type == KEY_FRAME || cpi->refresh_last_frame ||
4683 cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame ||
4684 cm->refresh_frame_context || cm->lf.mode_ref_delta_update ||
4685 cm->seg.update_map || cm->seg.update_data;
4688 static void adjust_frame_rate(VP9_COMP *cpi,
4689 const struct lookahead_entry *source) {
4690 int64_t this_duration;
4693 if (source->ts_start == cpi->first_time_stamp_ever) {
4694 this_duration = source->ts_end - source->ts_start;
4697 int64_t last_duration =
4698 cpi->last_end_time_stamp_seen - cpi->last_time_stamp_seen;
4700 this_duration = source->ts_end - cpi->last_end_time_stamp_seen;
4702 // do a step update if the duration changes by 10%
4704 step = (int)((this_duration - last_duration) * 10 / last_duration);
4707 if (this_duration) {
4709 vp9_new_framerate(cpi, 10000000.0 / this_duration);
4711 // Average this frame's rate into the last second's average
4712 // frame rate. If we haven't seen 1 second yet, then average
4713 // over the whole interval seen.
4714 const double interval = VPXMIN(
4715 (double)(source->ts_end - cpi->first_time_stamp_ever), 10000000.0);
4716 double avg_duration = 10000000.0 / cpi->framerate;
4717 avg_duration *= (interval - avg_duration + this_duration);
4718 avg_duration /= interval;
4720 vp9_new_framerate(cpi, 10000000.0 / avg_duration);
4723 cpi->last_time_stamp_seen = source->ts_start;
4724 cpi->last_end_time_stamp_seen = source->ts_end;
4727 // Returns 0 if this is not an alt ref else the offset of the source frame
4728 // used as the arf midpoint.
4729 static int get_arf_src_index(VP9_COMP *cpi) {
4730 RATE_CONTROL *const rc = &cpi->rc;
4731 int arf_src_index = 0;
4732 if (is_altref_enabled(cpi)) {
4733 if (cpi->oxcf.pass == 2) {
4734 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
4735 if (gf_group->update_type[gf_group->index] == ARF_UPDATE) {
4736 arf_src_index = gf_group->arf_src_offset[gf_group->index];
4738 } else if (rc->source_alt_ref_pending) {
4739 arf_src_index = rc->frames_till_gf_update_due;
4742 return arf_src_index;
4745 static void check_src_altref(VP9_COMP *cpi,
4746 const struct lookahead_entry *source) {
4747 RATE_CONTROL *const rc = &cpi->rc;
4749 if (cpi->oxcf.pass == 2) {
4750 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
4751 rc->is_src_frame_alt_ref =
4752 (gf_group->update_type[gf_group->index] == OVERLAY_UPDATE);
4754 rc->is_src_frame_alt_ref =
4755 cpi->alt_ref_source && (source == cpi->alt_ref_source);
4758 if (rc->is_src_frame_alt_ref) {
4759 // Current frame is an ARF overlay frame.
4760 cpi->alt_ref_source = NULL;
4762 // Don't refresh the last buffer for an ARF overlay frame. It will
4763 // become the GF so preserve last as an alternative prediction option.
4764 cpi->refresh_last_frame = 0;
4768 #if CONFIG_INTERNAL_STATS
4769 extern double vp9_get_blockiness(const uint8_t *img1, int img1_pitch,
4770 const uint8_t *img2, int img2_pitch, int width,
4773 static void adjust_image_stat(double y, double u, double v, double all,
4778 s->stat[ALL] += all;
4779 s->worst = VPXMIN(s->worst, all);
4781 #endif // CONFIG_INTERNAL_STATS
4783 // Adjust the maximum allowable frame size for the target level.
4784 static void level_rc_framerate(VP9_COMP *cpi, int arf_src_index) {
4785 RATE_CONTROL *const rc = &cpi->rc;
4786 LevelConstraint *const ls = &cpi->level_constraint;
4787 VP9_COMMON *const cm = &cpi->common;
4788 const double max_cpb_size = ls->max_cpb_size;
4789 vpx_clear_system_state();
4790 rc->max_frame_bandwidth = VPXMIN(rc->max_frame_bandwidth, ls->max_frame_size);
4791 if (frame_is_intra_only(cm)) {
4792 rc->max_frame_bandwidth =
4793 VPXMIN(rc->max_frame_bandwidth, (int)(max_cpb_size * 0.5));
4794 } else if (arf_src_index > 0) {
4795 rc->max_frame_bandwidth =
4796 VPXMIN(rc->max_frame_bandwidth, (int)(max_cpb_size * 0.4));
4798 rc->max_frame_bandwidth =
4799 VPXMIN(rc->max_frame_bandwidth, (int)(max_cpb_size * 0.2));
4803 static void update_level_info(VP9_COMP *cpi, size_t *size, int arf_src_index) {
4804 VP9_COMMON *const cm = &cpi->common;
4805 Vp9LevelInfo *const level_info = &cpi->level_info;
4806 Vp9LevelSpec *const level_spec = &level_info->level_spec;
4807 Vp9LevelStats *const level_stats = &level_info->level_stats;
4809 uint64_t luma_samples, dur_end;
4810 const uint32_t luma_pic_size = cm->width * cm->height;
4811 LevelConstraint *const level_constraint = &cpi->level_constraint;
4812 const int8_t level_index = level_constraint->level_index;
4813 double cpb_data_size;
4815 vpx_clear_system_state();
4817 // update level_stats
4818 level_stats->total_compressed_size += *size;
4819 if (cm->show_frame) {
4820 level_stats->total_uncompressed_size +=
4822 2 * (luma_pic_size >> (cm->subsampling_x + cm->subsampling_y));
4823 level_stats->time_encoded =
4824 (cpi->last_end_time_stamp_seen - cpi->first_time_stamp_ever) /
4825 (double)TICKS_PER_SEC;
4828 if (arf_src_index > 0) {
4829 if (!level_stats->seen_first_altref) {
4830 level_stats->seen_first_altref = 1;
4831 } else if (level_stats->frames_since_last_altref <
4832 level_spec->min_altref_distance) {
4833 level_spec->min_altref_distance = level_stats->frames_since_last_altref;
4835 level_stats->frames_since_last_altref = 0;
4837 ++level_stats->frames_since_last_altref;
4840 if (level_stats->frame_window_buffer.len < FRAME_WINDOW_SIZE - 1) {
4841 idx = (level_stats->frame_window_buffer.start +
4842 level_stats->frame_window_buffer.len++) %
4845 idx = level_stats->frame_window_buffer.start;
4846 level_stats->frame_window_buffer.start = (idx + 1) % FRAME_WINDOW_SIZE;
4848 level_stats->frame_window_buffer.buf[idx].ts = cpi->last_time_stamp_seen;
4849 level_stats->frame_window_buffer.buf[idx].size = (uint32_t)(*size);
4850 level_stats->frame_window_buffer.buf[idx].luma_samples = luma_pic_size;
4852 if (cm->frame_type == KEY_FRAME) {
4853 level_stats->ref_refresh_map = 0;
4856 level_stats->ref_refresh_map |= vp9_get_refresh_mask(cpi);
4857 // Also need to consider the case where the encoder refers to a buffer
4858 // that has been implicitly refreshed after encoding a keyframe.
4859 if (!cm->intra_only) {
4860 level_stats->ref_refresh_map |= (1 << cpi->lst_fb_idx);
4861 level_stats->ref_refresh_map |= (1 << cpi->gld_fb_idx);
4862 level_stats->ref_refresh_map |= (1 << cpi->alt_fb_idx);
4864 for (i = 0; i < REF_FRAMES; ++i) {
4865 count += (level_stats->ref_refresh_map >> i) & 1;
4867 if (count > level_spec->max_ref_frame_buffers) {
4868 level_spec->max_ref_frame_buffers = count;
4872 // update average_bitrate
4873 level_spec->average_bitrate = (double)level_stats->total_compressed_size /
4874 125.0 / level_stats->time_encoded;
4876 // update max_luma_sample_rate
4878 for (i = 0; i < level_stats->frame_window_buffer.len; ++i) {
4879 idx = (level_stats->frame_window_buffer.start +
4880 level_stats->frame_window_buffer.len - 1 - i) %
4883 dur_end = level_stats->frame_window_buffer.buf[idx].ts;
4885 if (dur_end - level_stats->frame_window_buffer.buf[idx].ts >=
4889 luma_samples += level_stats->frame_window_buffer.buf[idx].luma_samples;
4891 if (luma_samples > level_spec->max_luma_sample_rate) {
4892 level_spec->max_luma_sample_rate = luma_samples;
4895 // update max_cpb_size
4897 for (i = 0; i < CPB_WINDOW_SIZE; ++i) {
4898 if (i >= level_stats->frame_window_buffer.len) break;
4899 idx = (level_stats->frame_window_buffer.start +
4900 level_stats->frame_window_buffer.len - 1 - i) %
4902 cpb_data_size += level_stats->frame_window_buffer.buf[idx].size;
4904 cpb_data_size = cpb_data_size / 125.0;
4905 if (cpb_data_size > level_spec->max_cpb_size) {
4906 level_spec->max_cpb_size = cpb_data_size;
4909 // update max_luma_picture_size
4910 if (luma_pic_size > level_spec->max_luma_picture_size) {
4911 level_spec->max_luma_picture_size = luma_pic_size;
4914 // update compression_ratio
4915 level_spec->compression_ratio = (double)level_stats->total_uncompressed_size *
4917 level_stats->total_compressed_size / 8.0;
4919 // update max_col_tiles
4920 if (level_spec->max_col_tiles < (1 << cm->log2_tile_cols)) {
4921 level_spec->max_col_tiles = (1 << cm->log2_tile_cols);
4924 if (level_index >= 0 && level_constraint->fail_flag == 0) {
4925 if (level_spec->max_luma_picture_size >
4926 vp9_level_defs[level_index].max_luma_picture_size) {
4927 level_constraint->fail_flag |= (1 << LUMA_PIC_SIZE_TOO_LARGE);
4928 vpx_internal_error(&cm->error, VPX_CODEC_ERROR,
4929 "Failed to encode to the target level %d. %s",
4930 vp9_level_defs[level_index].level,
4931 level_fail_messages[LUMA_PIC_SIZE_TOO_LARGE]);
4934 if ((double)level_spec->max_luma_sample_rate >
4935 (double)vp9_level_defs[level_index].max_luma_sample_rate *
4936 (1 + SAMPLE_RATE_GRACE_P)) {
4937 level_constraint->fail_flag |= (1 << LUMA_SAMPLE_RATE_TOO_LARGE);
4938 vpx_internal_error(&cm->error, VPX_CODEC_ERROR,
4939 "Failed to encode to the target level %d. %s",
4940 vp9_level_defs[level_index].level,
4941 level_fail_messages[LUMA_SAMPLE_RATE_TOO_LARGE]);
4944 if (level_spec->max_col_tiles > vp9_level_defs[level_index].max_col_tiles) {
4945 level_constraint->fail_flag |= (1 << TOO_MANY_COLUMN_TILE);
4946 vpx_internal_error(&cm->error, VPX_CODEC_ERROR,
4947 "Failed to encode to the target level %d. %s",
4948 vp9_level_defs[level_index].level,
4949 level_fail_messages[TOO_MANY_COLUMN_TILE]);
4952 if (level_spec->min_altref_distance <
4953 vp9_level_defs[level_index].min_altref_distance) {
4954 level_constraint->fail_flag |= (1 << ALTREF_DIST_TOO_SMALL);
4955 vpx_internal_error(&cm->error, VPX_CODEC_ERROR,
4956 "Failed to encode to the target level %d. %s",
4957 vp9_level_defs[level_index].level,
4958 level_fail_messages[ALTREF_DIST_TOO_SMALL]);
4961 if (level_spec->max_ref_frame_buffers >
4962 vp9_level_defs[level_index].max_ref_frame_buffers) {
4963 level_constraint->fail_flag |= (1 << TOO_MANY_REF_BUFFER);
4964 vpx_internal_error(&cm->error, VPX_CODEC_ERROR,
4965 "Failed to encode to the target level %d. %s",
4966 vp9_level_defs[level_index].level,
4967 level_fail_messages[TOO_MANY_REF_BUFFER]);
4970 if (level_spec->max_cpb_size > vp9_level_defs[level_index].max_cpb_size) {
4971 level_constraint->fail_flag |= (1 << CPB_TOO_LARGE);
4972 vpx_internal_error(&cm->error, VPX_CODEC_ERROR,
4973 "Failed to encode to the target level %d. %s",
4974 vp9_level_defs[level_index].level,
4975 level_fail_messages[CPB_TOO_LARGE]);
4978 // Set an upper bound for the next frame size. It will be used in
4979 // level_rc_framerate() before encoding the next frame.
4981 for (i = 0; i < CPB_WINDOW_SIZE - 1; ++i) {
4982 if (i >= level_stats->frame_window_buffer.len) break;
4983 idx = (level_stats->frame_window_buffer.start +
4984 level_stats->frame_window_buffer.len - 1 - i) %
4986 cpb_data_size += level_stats->frame_window_buffer.buf[idx].size;
4988 cpb_data_size = cpb_data_size / 125.0;
4989 level_constraint->max_frame_size =
4990 (int)((vp9_level_defs[level_index].max_cpb_size - cpb_data_size) *
4992 if (level_stats->frame_window_buffer.len < CPB_WINDOW_SIZE - 1)
4993 level_constraint->max_frame_size >>= 1;
4997 int vp9_get_compressed_data(VP9_COMP *cpi, unsigned int *frame_flags,
4998 size_t *size, uint8_t *dest, int64_t *time_stamp,
4999 int64_t *time_end, int flush) {
5000 const VP9EncoderConfig *const oxcf = &cpi->oxcf;
5001 VP9_COMMON *const cm = &cpi->common;
5002 BufferPool *const pool = cm->buffer_pool;
5003 RATE_CONTROL *const rc = &cpi->rc;
5004 struct vpx_usec_timer cmptimer;
5005 YV12_BUFFER_CONFIG *force_src_buffer = NULL;
5006 struct lookahead_entry *last_source = NULL;
5007 struct lookahead_entry *source = NULL;
5011 if (is_two_pass_svc(cpi)) {
5012 #if CONFIG_SPATIAL_SVC
5013 vp9_svc_start_frame(cpi);
5014 // Use a small empty frame instead of a real frame
5015 if (cpi->svc.encode_empty_frame_state == ENCODING)
5016 source = &cpi->svc.empty_frame;
5018 if (oxcf->pass == 2) vp9_restore_layer_context(cpi);
5019 } else if (is_one_pass_cbr_svc(cpi)) {
5020 vp9_one_pass_cbr_svc_start_layer(cpi);
5023 vpx_usec_timer_start(&cmptimer);
5025 vp9_set_high_precision_mv(cpi, ALTREF_HIGH_PRECISION_MV);
5027 // Is multi-arf enabled.
5028 // Note that at the moment multi_arf is only configured for 2 pass VBR and
5029 // will not work properly with svc.
5030 if ((oxcf->pass == 2) && !cpi->use_svc && (cpi->oxcf.enable_auto_arf > 1))
5031 cpi->multi_arf_allowed = 1;
5033 cpi->multi_arf_allowed = 0;
5036 cm->reset_frame_context = 0;
5037 cm->refresh_frame_context = 1;
5038 if (!is_one_pass_cbr_svc(cpi)) {
5039 cpi->refresh_last_frame = 1;
5040 cpi->refresh_golden_frame = 0;
5041 cpi->refresh_alt_ref_frame = 0;
5044 // Should we encode an arf frame.
5045 arf_src_index = get_arf_src_index(cpi);
5047 // Skip alt frame if we encode the empty frame
5048 if (is_two_pass_svc(cpi) && source != NULL) arf_src_index = 0;
5050 if (arf_src_index) {
5051 for (i = 0; i <= arf_src_index; ++i) {
5052 struct lookahead_entry *e = vp9_lookahead_peek(cpi->lookahead, i);
5053 // Avoid creating an alt-ref if there's a forced keyframe pending.
5056 } else if (e->flags == VPX_EFLAG_FORCE_KF) {
5064 if (arf_src_index) {
5065 assert(arf_src_index <= rc->frames_to_key);
5067 if ((source = vp9_lookahead_peek(cpi->lookahead, arf_src_index)) != NULL) {
5068 cpi->alt_ref_source = source;
5070 #if CONFIG_SPATIAL_SVC
5071 if (is_two_pass_svc(cpi) && cpi->svc.spatial_layer_id > 0) {
5073 // Reference a hidden frame from a lower layer
5074 for (i = cpi->svc.spatial_layer_id - 1; i >= 0; --i) {
5075 if (oxcf->ss_enable_auto_arf[i]) {
5076 cpi->gld_fb_idx = cpi->svc.layer_context[i].alt_ref_idx;
5081 cpi->svc.layer_context[cpi->svc.spatial_layer_id].has_alt_frame = 1;
5083 #if !CONFIG_REALTIME_ONLY
5084 if ((oxcf->mode != REALTIME) && (oxcf->arnr_max_frames > 0) &&
5085 (oxcf->arnr_strength > 0)) {
5086 int bitrate = cpi->rc.avg_frame_bandwidth / 40;
5087 int not_low_bitrate = bitrate > ALT_REF_AQ_LOW_BITRATE_BOUNDARY;
5089 int not_last_frame = (cpi->lookahead->sz - arf_src_index > 1);
5090 not_last_frame |= ALT_REF_AQ_APPLY_TO_LAST_FRAME;
5092 // Produce the filtered ARF frame.
5093 vp9_temporal_filter(cpi, arf_src_index);
5094 vpx_extend_frame_borders(&cpi->alt_ref_buffer);
5096 // for small bitrates segmentation overhead usually
5097 // eats all bitrate gain from enabling delta quantizers
5098 if (cpi->oxcf.alt_ref_aq != 0 && not_low_bitrate && not_last_frame)
5099 vp9_alt_ref_aq_setup_mode(cpi->alt_ref_aq, cpi);
5101 force_src_buffer = &cpi->alt_ref_buffer;
5106 cpi->refresh_alt_ref_frame = 1;
5107 cpi->refresh_golden_frame = 0;
5108 cpi->refresh_last_frame = 0;
5109 rc->is_src_frame_alt_ref = 0;
5110 rc->source_alt_ref_pending = 0;
5112 rc->source_alt_ref_pending = 0;
5117 // Get last frame source.
5118 if (cm->current_video_frame > 0) {
5119 if ((last_source = vp9_lookahead_peek(cpi->lookahead, -1)) == NULL)
5123 // Read in the source frame.
5125 source = vp9_svc_lookahead_pop(cpi, cpi->lookahead, flush);
5127 source = vp9_lookahead_pop(cpi->lookahead, flush);
5129 if (source != NULL) {
5132 // if the flags indicate intra frame, but if the current picture is for
5133 // non-zero spatial layer, it should not be an intra picture.
5134 // TODO(Won Kap): this needs to change if per-layer intra frame is
5136 if ((source->flags & VPX_EFLAG_FORCE_KF) &&
5137 cpi->svc.spatial_layer_id > cpi->svc.first_spatial_layer_to_encode) {
5138 source->flags &= ~(unsigned int)(VPX_EFLAG_FORCE_KF);
5141 // Check to see if the frame should be encoded as an arf overlay.
5142 check_src_altref(cpi, source);
5147 cpi->un_scaled_source = cpi->Source =
5148 force_src_buffer ? force_src_buffer : &source->img;
5150 #ifdef ENABLE_KF_DENOISE
5151 // Copy of raw source for metrics calculation.
5152 if (is_psnr_calc_enabled(cpi))
5153 vp9_copy_and_extend_frame(cpi->Source, &cpi->raw_unscaled_source);
5156 cpi->unscaled_last_source = last_source != NULL ? &last_source->img : NULL;
5158 *time_stamp = source->ts_start;
5159 *time_end = source->ts_end;
5160 *frame_flags = (source->flags & VPX_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0;
5164 #if !CONFIG_REALTIME_ONLY
5165 if (flush && oxcf->pass == 1 && !cpi->twopass.first_pass_done) {
5166 vp9_end_first_pass(cpi); /* get last stats packet */
5167 cpi->twopass.first_pass_done = 1;
5169 #endif // !CONFIG_REALTIME_ONLY
5173 if (source->ts_start < cpi->first_time_stamp_ever) {
5174 cpi->first_time_stamp_ever = source->ts_start;
5175 cpi->last_end_time_stamp_seen = source->ts_start;
5178 // Clear down mmx registers
5179 vpx_clear_system_state();
5181 // adjust frame rates based on timestamps given
5182 if (cm->show_frame) {
5183 adjust_frame_rate(cpi, source);
5186 if (is_one_pass_cbr_svc(cpi)) {
5187 vp9_update_temporal_layer_framerate(cpi);
5188 vp9_restore_layer_context(cpi);
5191 // Find a free buffer for the new frame, releasing the reference previously
5193 if (cm->new_fb_idx != INVALID_IDX) {
5194 --pool->frame_bufs[cm->new_fb_idx].ref_count;
5196 cm->new_fb_idx = get_free_fb(cm);
5198 if (cm->new_fb_idx == INVALID_IDX) return -1;
5200 cm->cur_frame = &pool->frame_bufs[cm->new_fb_idx];
5202 if (!cpi->use_svc && cpi->multi_arf_allowed) {
5203 if (cm->frame_type == KEY_FRAME) {
5204 init_buffer_indices(cpi);
5205 } else if (oxcf->pass == 2) {
5206 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5207 cpi->alt_fb_idx = gf_group->arf_ref_idx[gf_group->index];
5211 // Start with a 0 size frame.
5214 cpi->frame_flags = *frame_flags;
5216 #if !CONFIG_REALTIME_ONLY
5217 if ((oxcf->pass == 2) &&
5218 (!cpi->use_svc || (is_two_pass_svc(cpi) &&
5219 cpi->svc.encode_empty_frame_state != ENCODING))) {
5220 vp9_rc_get_second_pass_params(cpi);
5221 } else if (oxcf->pass == 1) {
5222 set_frame_size(cpi);
5224 #endif // !CONFIG_REALTIME_ONLY
5226 if (oxcf->pass != 1 && cpi->level_constraint.level_index >= 0 &&
5227 cpi->level_constraint.fail_flag == 0)
5228 level_rc_framerate(cpi, arf_src_index);
5230 if (cpi->oxcf.pass != 0 || cpi->use_svc || frame_is_intra_only(cm) == 1) {
5231 for (i = 0; i < MAX_REF_FRAMES; ++i) cpi->scaled_ref_idx[i] = INVALID_IDX;
5234 cpi->td.mb.fp_src_pred = 0;
5235 #if CONFIG_REALTIME_ONLY
5237 SvcEncode(cpi, size, dest, frame_flags);
5240 Pass0Encode(cpi, size, dest, frame_flags);
5242 #else // !CONFIG_REALTIME_ONLY
5243 if (oxcf->pass == 1 && (!cpi->use_svc || is_two_pass_svc(cpi))) {
5244 const int lossless = is_lossless_requested(oxcf);
5245 #if CONFIG_VP9_HIGHBITDEPTH
5246 if (cpi->oxcf.use_highbitdepth)
5247 cpi->td.mb.fwd_txfm4x4 =
5248 lossless ? vp9_highbd_fwht4x4 : vpx_highbd_fdct4x4;
5250 cpi->td.mb.fwd_txfm4x4 = lossless ? vp9_fwht4x4 : vpx_fdct4x4;
5251 cpi->td.mb.highbd_inv_txfm_add =
5252 lossless ? vp9_highbd_iwht4x4_add : vp9_highbd_idct4x4_add;
5254 cpi->td.mb.fwd_txfm4x4 = lossless ? vp9_fwht4x4 : vpx_fdct4x4;
5255 #endif // CONFIG_VP9_HIGHBITDEPTH
5256 cpi->td.mb.inv_txfm_add = lossless ? vp9_iwht4x4_add : vp9_idct4x4_add;
5257 vp9_first_pass(cpi, source);
5258 } else if (oxcf->pass == 2 && (!cpi->use_svc || is_two_pass_svc(cpi))) {
5259 Pass2Encode(cpi, size, dest, frame_flags);
5260 } else if (cpi->use_svc) {
5261 SvcEncode(cpi, size, dest, frame_flags);
5264 Pass0Encode(cpi, size, dest, frame_flags);
5266 #endif // CONFIG_REALTIME_ONLY
5268 if (cm->refresh_frame_context)
5269 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
5271 // No frame encoded, or frame was dropped, release scaled references.
5272 if ((*size == 0) && (frame_is_intra_only(cm) == 0)) {
5273 release_scaled_references(cpi);
5277 cpi->droppable = !frame_is_reference(cpi);
5280 // Save layer specific state.
5281 if (is_one_pass_cbr_svc(cpi) || ((cpi->svc.number_temporal_layers > 1 ||
5282 cpi->svc.number_spatial_layers > 1) &&
5284 vp9_save_layer_context(cpi);
5287 vpx_usec_timer_mark(&cmptimer);
5288 cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer);
5290 // Should we calculate metrics for the frame.
5291 if (is_psnr_calc_enabled(cpi)) generate_psnr_packet(cpi);
5293 if (cpi->keep_level_stats && oxcf->pass != 1)
5294 update_level_info(cpi, size, arf_src_index);
5296 #if CONFIG_INTERNAL_STATS
5298 if (oxcf->pass != 1) {
5299 double samples = 0.0;
5300 cpi->bytes += (int)(*size);
5302 if (cm->show_frame) {
5303 uint32_t bit_depth = 8;
5304 uint32_t in_bit_depth = 8;
5306 #if CONFIG_VP9_HIGHBITDEPTH
5307 if (cm->use_highbitdepth) {
5308 in_bit_depth = cpi->oxcf.input_bit_depth;
5309 bit_depth = cm->bit_depth;
5313 if (cpi->b_calculate_psnr) {
5314 YV12_BUFFER_CONFIG *orig = cpi->raw_source_frame;
5315 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
5316 YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer;
5318 #if CONFIG_VP9_HIGHBITDEPTH
5319 vpx_calc_highbd_psnr(orig, recon, &psnr, cpi->td.mb.e_mbd.bd,
5322 vpx_calc_psnr(orig, recon, &psnr);
5323 #endif // CONFIG_VP9_HIGHBITDEPTH
5325 adjust_image_stat(psnr.psnr[1], psnr.psnr[2], psnr.psnr[3],
5326 psnr.psnr[0], &cpi->psnr);
5327 cpi->total_sq_error += psnr.sse[0];
5328 cpi->total_samples += psnr.samples[0];
5329 samples = psnr.samples[0];
5333 double frame_ssim2 = 0, weight = 0;
5334 #if CONFIG_VP9_POSTPROC
5335 if (vpx_alloc_frame_buffer(
5336 pp, recon->y_crop_width, recon->y_crop_height,
5337 cm->subsampling_x, cm->subsampling_y,
5338 #if CONFIG_VP9_HIGHBITDEPTH
5339 cm->use_highbitdepth,
5341 VP9_ENC_BORDER_IN_PIXELS, cm->byte_alignment) < 0) {
5342 vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
5343 "Failed to allocate post processing buffer");
5346 vp9_ppflags_t ppflags;
5347 ppflags.post_proc_flag = VP9D_DEBLOCK;
5348 ppflags.deblocking_level = 0; // not used in vp9_post_proc_frame()
5349 ppflags.noise_level = 0; // not used in vp9_post_proc_frame()
5350 vp9_post_proc_frame(cm, pp, &ppflags);
5353 vpx_clear_system_state();
5355 #if CONFIG_VP9_HIGHBITDEPTH
5356 vpx_calc_highbd_psnr(orig, pp, &psnr2, cpi->td.mb.e_mbd.bd,
5357 cpi->oxcf.input_bit_depth);
5359 vpx_calc_psnr(orig, pp, &psnr2);
5360 #endif // CONFIG_VP9_HIGHBITDEPTH
5362 cpi->totalp_sq_error += psnr2.sse[0];
5363 cpi->totalp_samples += psnr2.samples[0];
5364 adjust_image_stat(psnr2.psnr[1], psnr2.psnr[2], psnr2.psnr[3],
5365 psnr2.psnr[0], &cpi->psnrp);
5367 #if CONFIG_VP9_HIGHBITDEPTH
5368 if (cm->use_highbitdepth) {
5369 frame_ssim2 = vpx_highbd_calc_ssim(orig, recon, &weight, bit_depth,
5372 frame_ssim2 = vpx_calc_ssim(orig, recon, &weight);
5375 frame_ssim2 = vpx_calc_ssim(orig, recon, &weight);
5376 #endif // CONFIG_VP9_HIGHBITDEPTH
5378 cpi->worst_ssim = VPXMIN(cpi->worst_ssim, frame_ssim2);
5379 cpi->summed_quality += frame_ssim2 * weight;
5380 cpi->summed_weights += weight;
5382 #if CONFIG_VP9_HIGHBITDEPTH
5383 if (cm->use_highbitdepth) {
5384 frame_ssim2 = vpx_highbd_calc_ssim(orig, pp, &weight, bit_depth,
5387 frame_ssim2 = vpx_calc_ssim(orig, pp, &weight);
5390 frame_ssim2 = vpx_calc_ssim(orig, pp, &weight);
5391 #endif // CONFIG_VP9_HIGHBITDEPTH
5393 cpi->summedp_quality += frame_ssim2 * weight;
5394 cpi->summedp_weights += weight;
5397 FILE *f = fopen("q_used.stt", "a");
5398 fprintf(f, "%5d : Y%f7.3:U%f7.3:V%f7.3:F%f7.3:S%7.3f\n",
5399 cpi->common.current_video_frame, y2, u2, v2,
5400 frame_psnr2, frame_ssim2);
5406 if (cpi->b_calculate_blockiness) {
5407 #if CONFIG_VP9_HIGHBITDEPTH
5408 if (!cm->use_highbitdepth)
5411 double frame_blockiness = vp9_get_blockiness(
5412 cpi->Source->y_buffer, cpi->Source->y_stride,
5413 cm->frame_to_show->y_buffer, cm->frame_to_show->y_stride,
5414 cpi->Source->y_width, cpi->Source->y_height);
5415 cpi->worst_blockiness =
5416 VPXMAX(cpi->worst_blockiness, frame_blockiness);
5417 cpi->total_blockiness += frame_blockiness;
5421 if (cpi->b_calculate_consistency) {
5422 #if CONFIG_VP9_HIGHBITDEPTH
5423 if (!cm->use_highbitdepth)
5426 double this_inconsistency = vpx_get_ssim_metrics(
5427 cpi->Source->y_buffer, cpi->Source->y_stride,
5428 cm->frame_to_show->y_buffer, cm->frame_to_show->y_stride,
5429 cpi->Source->y_width, cpi->Source->y_height, cpi->ssim_vars,
5432 const double peak = (double)((1 << cpi->oxcf.input_bit_depth) - 1);
5433 double consistency =
5434 vpx_sse_to_psnr(samples, peak, (double)cpi->total_inconsistency);
5435 if (consistency > 0.0)
5436 cpi->worst_consistency =
5437 VPXMIN(cpi->worst_consistency, consistency);
5438 cpi->total_inconsistency += this_inconsistency;
5443 double y, u, v, frame_all;
5444 frame_all = vpx_calc_fastssim(cpi->Source, cm->frame_to_show, &y, &u,
5445 &v, bit_depth, in_bit_depth);
5446 adjust_image_stat(y, u, v, frame_all, &cpi->fastssim);
5449 double y, u, v, frame_all;
5450 frame_all = vpx_psnrhvs(cpi->Source, cm->frame_to_show, &y, &u, &v,
5451 bit_depth, in_bit_depth);
5452 adjust_image_stat(y, u, v, frame_all, &cpi->psnrhvs);
5459 if (is_two_pass_svc(cpi)) {
5460 if (cpi->svc.encode_empty_frame_state == ENCODING) {
5461 cpi->svc.encode_empty_frame_state = ENCODED;
5462 cpi->svc.encode_intra_empty_frame = 0;
5465 if (cm->show_frame) {
5466 ++cpi->svc.spatial_layer_to_encode;
5467 if (cpi->svc.spatial_layer_to_encode >= cpi->svc.number_spatial_layers)
5468 cpi->svc.spatial_layer_to_encode = 0;
5470 // May need the empty frame after an visible frame.
5471 cpi->svc.encode_empty_frame_state = NEED_TO_ENCODE;
5473 } else if (is_one_pass_cbr_svc(cpi)) {
5474 if (cm->show_frame) {
5475 ++cpi->svc.spatial_layer_to_encode;
5476 if (cpi->svc.spatial_layer_to_encode >= cpi->svc.number_spatial_layers)
5477 cpi->svc.spatial_layer_to_encode = 0;
5481 vpx_clear_system_state();
5485 int vp9_get_preview_raw_frame(VP9_COMP *cpi, YV12_BUFFER_CONFIG *dest,
5486 vp9_ppflags_t *flags) {
5487 VP9_COMMON *cm = &cpi->common;
5488 #if !CONFIG_VP9_POSTPROC
5492 if (!cm->show_frame) {
5496 #if CONFIG_VP9_POSTPROC
5497 ret = vp9_post_proc_frame(cm, dest, flags);
5499 if (cm->frame_to_show) {
5500 *dest = *cm->frame_to_show;
5501 dest->y_width = cm->width;
5502 dest->y_height = cm->height;
5503 dest->uv_width = cm->width >> cm->subsampling_x;
5504 dest->uv_height = cm->height >> cm->subsampling_y;
5509 #endif // !CONFIG_VP9_POSTPROC
5510 vpx_clear_system_state();
5515 int vp9_set_internal_size(VP9_COMP *cpi, VPX_SCALING horiz_mode,
5516 VPX_SCALING vert_mode) {
5517 VP9_COMMON *cm = &cpi->common;
5518 int hr = 0, hs = 0, vr = 0, vs = 0;
5520 if (horiz_mode > ONETWO || vert_mode > ONETWO) return -1;
5522 Scale2Ratio(horiz_mode, &hr, &hs);
5523 Scale2Ratio(vert_mode, &vr, &vs);
5525 // always go to the next whole number
5526 cm->width = (hs - 1 + cpi->oxcf.width * hr) / hs;
5527 cm->height = (vs - 1 + cpi->oxcf.height * vr) / vs;
5528 if (cm->current_video_frame) {
5529 assert(cm->width <= cpi->initial_width);
5530 assert(cm->height <= cpi->initial_height);
5533 update_frame_size(cpi);
5538 int vp9_set_size_literal(VP9_COMP *cpi, unsigned int width,
5539 unsigned int height) {
5540 VP9_COMMON *cm = &cpi->common;
5541 #if CONFIG_VP9_HIGHBITDEPTH
5542 check_initial_width(cpi, cm->use_highbitdepth, 1, 1);
5544 check_initial_width(cpi, 1, 1);
5545 #endif // CONFIG_VP9_HIGHBITDEPTH
5547 #if CONFIG_VP9_TEMPORAL_DENOISING
5548 setup_denoiser_buffer(cpi);
5553 if (cm->width > cpi->initial_width) {
5554 cm->width = cpi->initial_width;
5555 printf("Warning: Desired width too large, changed to %d\n", cm->width);
5560 cm->height = height;
5561 if (cm->height > cpi->initial_height) {
5562 cm->height = cpi->initial_height;
5563 printf("Warning: Desired height too large, changed to %d\n", cm->height);
5566 assert(cm->width <= cpi->initial_width);
5567 assert(cm->height <= cpi->initial_height);
5569 update_frame_size(cpi);
5574 void vp9_set_svc(VP9_COMP *cpi, int use_svc) {
5575 cpi->use_svc = use_svc;
5579 int vp9_get_quantizer(VP9_COMP *cpi) { return cpi->common.base_qindex; }
5581 void vp9_apply_encoding_flags(VP9_COMP *cpi, vpx_enc_frame_flags_t flags) {
5583 (VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF)) {
5586 if (flags & VP8_EFLAG_NO_REF_LAST) ref ^= VP9_LAST_FLAG;
5588 if (flags & VP8_EFLAG_NO_REF_GF) ref ^= VP9_GOLD_FLAG;
5590 if (flags & VP8_EFLAG_NO_REF_ARF) ref ^= VP9_ALT_FLAG;
5592 vp9_use_as_reference(cpi, ref);
5596 (VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
5597 VP8_EFLAG_FORCE_GF | VP8_EFLAG_FORCE_ARF)) {
5600 if (flags & VP8_EFLAG_NO_UPD_LAST) upd ^= VP9_LAST_FLAG;
5602 if (flags & VP8_EFLAG_NO_UPD_GF) upd ^= VP9_GOLD_FLAG;
5604 if (flags & VP8_EFLAG_NO_UPD_ARF) upd ^= VP9_ALT_FLAG;
5606 vp9_update_reference(cpi, upd);
5609 if (flags & VP8_EFLAG_NO_UPD_ENTROPY) {
5610 vp9_update_entropy(cpi, 0);
5614 void vp9_set_row_mt(VP9_COMP *cpi) {
5615 // Enable row based multi-threading for supported modes of encoding
5617 if (((cpi->oxcf.mode == GOOD || cpi->oxcf.mode == BEST) &&
5618 cpi->oxcf.speed < 5 && cpi->oxcf.pass == 1) &&
5619 cpi->oxcf.row_mt && !cpi->use_svc)
5622 if (cpi->oxcf.mode == GOOD && cpi->oxcf.speed < 5 &&
5623 (cpi->oxcf.pass == 0 || cpi->oxcf.pass == 2) && cpi->oxcf.row_mt &&
5627 // In realtime mode, enable row based multi-threading for all the speed levels
5628 // where non-rd path is used.
5629 if (cpi->oxcf.mode == REALTIME && cpi->oxcf.speed >= 5 && cpi->oxcf.row_mt) {
5634 cpi->row_mt_bit_exact = 1;
5636 cpi->row_mt_bit_exact = 0;