blob: 5267f57676647240e172ce0c13841638a8c9f440 [file] [log] [blame]
David Turner0fa8c492019-02-06 16:38:13 +00001/*
2 * Copyright (c) 2019, Alliance for Open Media. All rights reserved
3 *
4 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10 */
11
bohanlid165b192020-06-10 21:46:29 -070012/*!\defgroup gf_group_algo Golden Frame Group
13 * \ingroup high_level_algo
14 * Algorithms regarding determining the length of GF groups and defining GF
15 * group structures.
16 * @{
17 */
18/*! @} - end defgroup gf_group_algo */
19
David Turner0fa8c492019-02-06 16:38:13 +000020#include <stdint.h>
21
22#include "config/aom_config.h"
23#include "config/aom_scale_rtcd.h"
24
25#include "aom/aom_codec.h"
26#include "aom/aom_encoder.h"
27
28#include "aom_ports/system_state.h"
29
Wan-Teh Changf2d15ee2020-03-10 09:24:43 -070030#include "av1/common/av1_common_int.h"
David Turner0fa8c492019-02-06 16:38:13 +000031
32#include "av1/encoder/encoder.h"
33#include "av1/encoder/firstpass.h"
34#include "av1/encoder/gop_structure.h"
Jingning Han2eedec52020-02-05 10:33:43 -080035#include "av1/encoder/pass2_strategy.h"
Sarah Parker1215e232019-04-15 11:10:00 -070036#include "av1/encoder/ratectrl.h"
Satish Kumar Suman47c06892020-06-10 12:45:25 +053037#include "av1/encoder/rc_utils.h"
Jingning Hanfa2978a2020-09-09 18:17:13 -070038#include "av1/encoder/temporal_filter.h"
Jingning Han491198d2020-02-13 21:53:41 -080039#include "av1/encoder/tpl_model.h"
Urvang Joshi38b7e842019-05-01 11:27:47 -070040#include "av1/encoder/use_flat_gop_model_params.h"
Aasaipriya9bc1dcb2020-03-13 17:46:07 +053041#include "av1/encoder/encode_strategy.h"
David Turner0fa8c492019-02-06 16:38:13 +000042
Sarah Parker7ae814b2019-06-26 10:39:30 -070043#define DEFAULT_KF_BOOST 2300
Sarah Parker97803fc2019-05-17 14:15:37 -070044#define DEFAULT_GF_BOOST 2000
bohanli04652962020-03-18 17:02:50 -070045#define GROUP_ADAPTIVE_MAXQ 1
46static void init_gf_stats(GF_GROUP_STATS *gf_stats);
Sarah Parker97803fc2019-05-17 14:15:37 -070047
David Turner0fa8c492019-02-06 16:38:13 +000048// Calculate an active area of the image that discounts formatting
49// bars and partially discounts other 0 energy areas.
50#define MIN_ACTIVE_AREA 0.5
51#define MAX_ACTIVE_AREA 1.0
Jingning Han17af7742019-09-17 16:58:03 -070052static double calculate_active_area(const FRAME_INFO *frame_info,
Yaowu Xubedbf4f2019-05-01 17:54:36 -070053 const FIRSTPASS_STATS *this_frame) {
54 const double active_pct =
David Turner0fa8c492019-02-06 16:38:13 +000055 1.0 -
56 ((this_frame->intra_skip_pct / 2) +
Jingning Han17af7742019-09-17 16:58:03 -070057 ((this_frame->inactive_zone_rows * 2) / (double)frame_info->mb_rows));
David Turner0fa8c492019-02-06 16:38:13 +000058 return fclamp(active_pct, MIN_ACTIVE_AREA, MAX_ACTIVE_AREA);
59}
60
61// Calculate a modified Error used in distributing bits between easier and
62// harder frames.
63#define ACT_AREA_CORRECTION 0.5
Jingning Han17af7742019-09-17 16:58:03 -070064static double calculate_modified_err(const FRAME_INFO *frame_info,
Yaowu Xubedbf4f2019-05-01 17:54:36 -070065 const TWO_PASS *twopass,
66 const AV1EncoderConfig *oxcf,
67 const FIRSTPASS_STATS *this_frame) {
Aasaipriyaeb417c12020-04-07 12:08:24 +053068 const FIRSTPASS_STATS *const stats = twopass->stats_buf_ctx->total_stats;
Mufaddal Chakera74c9cbe2020-01-17 16:44:59 +053069 if (stats == NULL) {
70 return 0;
71 }
David Turner0fa8c492019-02-06 16:38:13 +000072 const double av_weight = stats->weight / stats->count;
73 const double av_err = (stats->coded_error * av_weight) / stats->count;
74 double modified_error =
75 av_err * pow(this_frame->coded_error * this_frame->weight /
76 DOUBLE_DIVIDE_CHECK(av_err),
Debargha Mukherjeec6a81202020-07-22 16:35:20 -070077 oxcf->rc_cfg.vbrbias / 100.0);
David Turner0fa8c492019-02-06 16:38:13 +000078
79 // Correction for active area. Frames with a reduced active area
80 // (eg due to formatting bars) have a higher error per mb for the
81 // remaining active MBs. The correction here assumes that coding
82 // 0.5N blocks of complexity 2X is a little easier than coding N
83 // blocks of complexity X.
84 modified_error *=
Jingning Han17af7742019-09-17 16:58:03 -070085 pow(calculate_active_area(frame_info, this_frame), ACT_AREA_CORRECTION);
David Turner0fa8c492019-02-06 16:38:13 +000086
87 return fclamp(modified_error, twopass->modified_error_min,
88 twopass->modified_error_max);
89}
90
91// Resets the first pass file to the given position using a relative seek from
92// the current position.
93static void reset_fpf_position(TWO_PASS *p, const FIRSTPASS_STATS *position) {
94 p->stats_in = position;
95}
96
97static int input_stats(TWO_PASS *p, FIRSTPASS_STATS *fps) {
Akshata Jadhava49be172019-12-18 00:03:53 +053098 if (p->stats_in >= p->stats_buf_ctx->stats_in_end) return EOF;
David Turner0fa8c492019-02-06 16:38:13 +000099
100 *fps = *p->stats_in;
101 ++p->stats_in;
102 return 1;
103}
104
Akshata Jadhav1fddf7f2019-12-18 00:49:25 +0530105static int input_stats_lap(TWO_PASS *p, FIRSTPASS_STATS *fps) {
106 if (p->stats_in >= p->stats_buf_ctx->stats_in_end) return EOF;
107
108 *fps = *p->stats_in;
109 /* Move old stats[0] out to accommodate for next frame stats */
110 memmove(p->frame_stats_arr[0], p->frame_stats_arr[1],
111 (p->stats_buf_ctx->stats_in_end - p->stats_in - 1) *
112 sizeof(FIRSTPASS_STATS));
113 p->stats_buf_ctx->stats_in_end--;
114 return 1;
115}
116
David Turner0fa8c492019-02-06 16:38:13 +0000117// Read frame stats at an offset from the current position.
118static const FIRSTPASS_STATS *read_frame_stats(const TWO_PASS *p, int offset) {
Akshata Jadhava49be172019-12-18 00:03:53 +0530119 if ((offset >= 0 && p->stats_in + offset >= p->stats_buf_ctx->stats_in_end) ||
120 (offset < 0 && p->stats_in + offset < p->stats_buf_ctx->stats_in_start)) {
David Turner0fa8c492019-02-06 16:38:13 +0000121 return NULL;
122 }
123
124 return &p->stats_in[offset];
125}
126
127static void subtract_stats(FIRSTPASS_STATS *section,
128 const FIRSTPASS_STATS *frame) {
129 section->frame -= frame->frame;
130 section->weight -= frame->weight;
131 section->intra_error -= frame->intra_error;
132 section->frame_avg_wavelet_energy -= frame->frame_avg_wavelet_energy;
133 section->coded_error -= frame->coded_error;
134 section->sr_coded_error -= frame->sr_coded_error;
135 section->pcnt_inter -= frame->pcnt_inter;
136 section->pcnt_motion -= frame->pcnt_motion;
137 section->pcnt_second_ref -= frame->pcnt_second_ref;
138 section->pcnt_neutral -= frame->pcnt_neutral;
139 section->intra_skip_pct -= frame->intra_skip_pct;
140 section->inactive_zone_rows -= frame->inactive_zone_rows;
141 section->inactive_zone_cols -= frame->inactive_zone_cols;
142 section->MVr -= frame->MVr;
143 section->mvr_abs -= frame->mvr_abs;
144 section->MVc -= frame->MVc;
145 section->mvc_abs -= frame->mvc_abs;
146 section->MVrv -= frame->MVrv;
147 section->MVcv -= frame->MVcv;
148 section->mv_in_out_count -= frame->mv_in_out_count;
149 section->new_mv_count -= frame->new_mv_count;
150 section->count -= frame->count;
151 section->duration -= frame->duration;
152}
153
David Turner0fa8c492019-02-06 16:38:13 +0000154// This function returns the maximum target rate per frame.
155static int frame_max_bits(const RATE_CONTROL *rc,
156 const AV1EncoderConfig *oxcf) {
157 int64_t max_bits = ((int64_t)rc->avg_frame_bandwidth *
Debargha Mukherjeec6a81202020-07-22 16:35:20 -0700158 (int64_t)oxcf->rc_cfg.vbrmax_section) /
David Turner0fa8c492019-02-06 16:38:13 +0000159 100;
160 if (max_bits < 0)
161 max_bits = 0;
162 else if (max_bits > rc->max_frame_bandwidth)
163 max_bits = rc->max_frame_bandwidth;
164
165 return (int)max_bits;
166}
167
Paul Wilkins391bfb42019-10-23 10:14:32 +0100168static const double q_pow_term[(QINDEX_RANGE >> 5) + 1] = { 0.65, 0.70, 0.75,
169 0.80, 0.85, 0.90,
170 0.95, 0.95, 0.95 };
Yaowu Xu88ef7bd2019-11-07 14:16:48 -0800171#define ERR_DIVISOR 96.0
172static double calc_correction_factor(double err_per_mb, int q) {
173 const double error_term = err_per_mb / ERR_DIVISOR;
Paul Wilkins391bfb42019-10-23 10:14:32 +0100174 const int index = q >> 5;
Paul Wilkins391bfb42019-10-23 10:14:32 +0100175 // Adjustment to power term based on qindex
Yaowu Xu88ef7bd2019-11-07 14:16:48 -0800176 const double power_term =
Paul Wilkins391bfb42019-10-23 10:14:32 +0100177 q_pow_term[index] +
178 (((q_pow_term[index + 1] - q_pow_term[index]) * (q % 32)) / 32.0);
Yaowu Xu88ef7bd2019-11-07 14:16:48 -0800179 assert(error_term >= 0.0);
David Turner0fa8c492019-02-06 16:38:13 +0000180 return fclamp(pow(error_term, power_term), 0.05, 5.0);
181}
182
Paul Wilkins80891142020-12-15 13:21:13 +0000183// Based on history adjust expectations of bits per macroblock.
184static void twopass_update_bpm_factor(AV1_COMP *cpi, int rate_err_tol) {
185 TWO_PASS *twopass = &cpi->twopass;
186 const RATE_CONTROL *const rc = &cpi->rc;
187 int err_estimate = rc->rate_error_estimate;
188
Paul Wilkins88fdf642019-07-31 12:29:48 +0100189 // Based on recent history adjust expectations of bits per macroblock.
Paul Wilkins87919c92020-11-11 14:32:20 +0000190 double damp_fac = AOMMAX(5.0, rate_err_tol / 10.0);
Paul Wilkins80891142020-12-15 13:21:13 +0000191 double rate_err_factor = 1.0;
192 const double adj_limit = AOMMAX(0.20, (double)(100 - rate_err_tol) / 200.0);
193 const double min_fac = 1.0 - adj_limit;
194 const double max_fac = 1.0 + adj_limit;
Paul Wilkins87919c92020-11-11 14:32:20 +0000195
Paul Wilkins80891142020-12-15 13:21:13 +0000196 if (rc->vbr_bits_off_target && rc->total_actual_bits > 0) {
197 if (cpi->lap_enabled) {
198 rate_err_factor =
199 (double)twopass->rolling_arf_group_actual_bits /
200 DOUBLE_DIVIDE_CHECK((double)twopass->rolling_arf_group_target_bits);
201 } else {
202 rate_err_factor =
203 1.0 - ((double)(rc->vbr_bits_off_target) /
204 AOMMAX(rc->total_actual_bits, cpi->twopass.bits_left));
205 }
Paul Wilkins87919c92020-11-11 14:32:20 +0000206
Paul Wilkins80891142020-12-15 13:21:13 +0000207 rate_err_factor = AOMMAX(min_fac, AOMMIN(max_fac, rate_err_factor));
208
209 // Adjustment is damped if this is 1 pass with look ahead processing
210 // (as there are only ever a few frames of data) and for all but the first
211 // GOP in normal two pass.
212 if ((twopass->bpm_factor != 1.0) || cpi->lap_enabled) {
213 rate_err_factor = 1.0 + ((rate_err_factor - 1.0) / damp_fac);
214 }
215 }
216
217 // Is the rate control trending in the right direction. Only make
Paul Wilkins87919c92020-11-11 14:32:20 +0000218 // an adjustment if things are getting worse.
Paul Wilkins80891142020-12-15 13:21:13 +0000219 if ((rate_err_factor < 1.0 && err_estimate > 0) ||
220 (rate_err_factor > 1.0 && err_estimate < 0)) {
221 twopass->bpm_factor *= rate_err_factor;
222 twopass->bpm_factor = AOMMAX(min_fac, AOMMIN(max_fac, twopass->bpm_factor));
Paul Wilkins87919c92020-11-11 14:32:20 +0000223 }
angiebirdb75def72019-09-20 17:29:27 -0700224}
225
Paul Wilkins8bdf95c2019-12-05 10:54:36 +0000226static int qbpm_enumerator(int rate_err_tol) {
Paul Wilkins75a47cc2020-09-14 17:22:04 +0100227 return 1200000 + ((300000 * AOMMIN(75, AOMMAX(rate_err_tol - 25, 0))) / 75);
Paul Wilkins8bdf95c2019-12-05 10:54:36 +0000228}
229
angiebirdb75def72019-09-20 17:29:27 -0700230// Similar to find_qindex_by_rate() function in ratectrl.c, but includes
231// calculation of a correction_factor.
232static int find_qindex_by_rate_with_correction(
Paul Wilkins518edbc2019-11-21 10:53:36 +0000233 int desired_bits_per_mb, aom_bit_depth_t bit_depth, double error_per_mb,
Paul Wilkins8bdf95c2019-12-05 10:54:36 +0000234 double group_weight_factor, int rate_err_tol, int best_qindex,
235 int worst_qindex) {
angiebirdb75def72019-09-20 17:29:27 -0700236 assert(best_qindex <= worst_qindex);
237 int low = best_qindex;
238 int high = worst_qindex;
Paul Wilkins88fdf642019-07-31 12:29:48 +0100239
David Turner0fa8c492019-02-06 16:38:13 +0000240 while (low < high) {
241 const int mid = (low + high) >> 1;
Yaowu Xu88ef7bd2019-11-07 14:16:48 -0800242 const double mid_factor = calc_correction_factor(error_per_mb, mid);
Paul Wilkins518edbc2019-11-21 10:53:36 +0000243 const double q = av1_convert_qindex_to_q(mid, bit_depth);
Paul Wilkins8bdf95c2019-12-05 10:54:36 +0000244 const int enumerator = qbpm_enumerator(rate_err_tol);
Paul Wilkins518edbc2019-11-21 10:53:36 +0000245 const int mid_bits_per_mb =
246 (int)((enumerator * mid_factor * group_weight_factor) / q);
247
David Turner0fa8c492019-02-06 16:38:13 +0000248 if (mid_bits_per_mb > desired_bits_per_mb) {
249 low = mid + 1;
250 } else {
251 high = mid;
252 }
253 }
David Turner0fa8c492019-02-06 16:38:13 +0000254 return low;
255}
256
Paul Wilkinsbe20bc22020-07-16 14:46:57 +0100257/*!\brief Choose a target maximum Q for a group of frames
258 *
259 * \ingroup rate_control
260 *
261 * This function is used to estimate a suitable maximum Q for a
262 * group of frames. Inititally it is called to get a crude estimate
263 * for the whole clip. It is then called for each ARF/GF group to get
264 * a revised estimate for that group.
265 *
266 * \param[in] cpi Top-level encoder structure
267 * \param[in] av_frame_err The average per frame coded error score
268 * for frames making up this section/group.
269 * \param[in] inactive_zone Used to mask off /ignore part of the
270 * frame. The most common use case is where
271 * a wide format video (e.g. 16:9) is
272 * letter-boxed into a more square format.
273 * Here we want to ignore the bands at the
274 * top and bottom.
275 * \param[in] av_target_bandwidth The target bits per frame
Paul Wilkinsbe20bc22020-07-16 14:46:57 +0100276 *
277 * \return The maximum Q for frames in the group.
278 */
279static int get_twopass_worst_quality(AV1_COMP *cpi, const double av_frame_err,
David Turner0fa8c492019-02-06 16:38:13 +0000280 double inactive_zone,
Paul Wilkins87919c92020-11-11 14:32:20 +0000281 int av_target_bandwidth) {
David Turner0fa8c492019-02-06 16:38:13 +0000282 const RATE_CONTROL *const rc = &cpi->rc;
283 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Vishesh39e74092020-06-16 17:13:48 +0530284 const RateControlCfg *const rc_cfg = &oxcf->rc_cfg;
David Turner0fa8c492019-02-06 16:38:13 +0000285 inactive_zone = fclamp(inactive_zone, 0.0, 1.0);
286
Paul Wilkinsbe20bc22020-07-16 14:46:57 +0100287 if (av_target_bandwidth <= 0) {
David Turner0fa8c492019-02-06 16:38:13 +0000288 return rc->worst_quality; // Highest value allowed
289 } else {
Vishesh39e74092020-06-16 17:13:48 +0530290 const int num_mbs = (oxcf->resize_cfg.resize_mode != RESIZE_NONE)
David Turner0fa8c492019-02-06 16:38:13 +0000291 ? cpi->initial_mbs
Urvang Joshi9dc909d2020-03-23 16:07:02 -0700292 : cpi->common.mi_params.MBs;
David Turner0fa8c492019-02-06 16:38:13 +0000293 const int active_mbs = AOMMAX(1, num_mbs - (int)(num_mbs * inactive_zone));
Paul Wilkinsbe20bc22020-07-16 14:46:57 +0100294 const double av_err_per_mb = av_frame_err / active_mbs;
David Turner0fa8c492019-02-06 16:38:13 +0000295 const int target_norm_bits_per_mb =
Paul Wilkinsbe20bc22020-07-16 14:46:57 +0100296 (int)((uint64_t)av_target_bandwidth << BPER_MB_NORMBITS) / active_mbs;
Vishesh39e74092020-06-16 17:13:48 +0530297 int rate_err_tol = AOMMIN(rc_cfg->under_shoot_pct, rc_cfg->over_shoot_pct);
David Turner0fa8c492019-02-06 16:38:13 +0000298
Paul Wilkins87919c92020-11-11 14:32:20 +0000299 // Update bpm correction factor based on previous GOP rate error.
Paul Wilkins80891142020-12-15 13:21:13 +0000300 twopass_update_bpm_factor(cpi, rate_err_tol);
Paul Wilkins87919c92020-11-11 14:32:20 +0000301
David Turner0fa8c492019-02-06 16:38:13 +0000302 // Try and pick a max Q that will be high enough to encode the
303 // content at the given rate.
304 int q = find_qindex_by_rate_with_correction(
Paul Wilkins518edbc2019-11-21 10:53:36 +0000305 target_norm_bits_per_mb, cpi->common.seq_params.bit_depth,
Paul Wilkins87919c92020-11-11 14:32:20 +0000306 av_err_per_mb, cpi->twopass.bpm_factor, rate_err_tol, rc->best_quality,
Paul Wilkins65cb1da2019-08-15 13:54:17 +0100307 rc->worst_quality);
David Turner0fa8c492019-02-06 16:38:13 +0000308
309 // Restriction on active max q for constrained quality mode.
Vishesh39e74092020-06-16 17:13:48 +0530310 if (rc_cfg->mode == AOM_CQ) q = AOMMAX(q, rc_cfg->cq_level);
David Turner0fa8c492019-02-06 16:38:13 +0000311 return q;
312 }
313}
314
315#define SR_DIFF_PART 0.0015
316#define MOTION_AMP_PART 0.003
317#define INTRA_PART 0.005
318#define DEFAULT_DECAY_LIMIT 0.75
319#define LOW_SR_DIFF_TRHESH 0.1
320#define SR_DIFF_MAX 128.0
321#define NCOUNT_FRAME_II_THRESH 5.0
322
Jingning Hanc7a52172019-09-17 15:28:46 -0700323static double get_sr_decay_rate(const FRAME_INFO *frame_info,
David Turner0fa8c492019-02-06 16:38:13 +0000324 const FIRSTPASS_STATS *frame) {
Jingning Hanc7a52172019-09-17 15:28:46 -0700325 const int num_mbs = frame_info->num_mbs;
David Turner0fa8c492019-02-06 16:38:13 +0000326 double sr_diff = (frame->sr_coded_error - frame->coded_error) / num_mbs;
327 double sr_decay = 1.0;
328 double modified_pct_inter;
329 double modified_pcnt_intra;
330 const double motion_amplitude_factor =
331 frame->pcnt_motion * ((frame->mvc_abs + frame->mvr_abs) / 2);
332
333 modified_pct_inter = frame->pcnt_inter;
334 if ((frame->intra_error / DOUBLE_DIVIDE_CHECK(frame->coded_error)) <
335 (double)NCOUNT_FRAME_II_THRESH) {
336 modified_pct_inter = frame->pcnt_inter - frame->pcnt_neutral;
337 }
338 modified_pcnt_intra = 100 * (1.0 - modified_pct_inter);
339
340 if ((sr_diff > LOW_SR_DIFF_TRHESH)) {
341 sr_diff = AOMMIN(sr_diff, SR_DIFF_MAX);
342 sr_decay = 1.0 - (SR_DIFF_PART * sr_diff) -
343 (MOTION_AMP_PART * motion_amplitude_factor) -
344 (INTRA_PART * modified_pcnt_intra);
345 }
346 return AOMMAX(sr_decay, AOMMIN(DEFAULT_DECAY_LIMIT, modified_pct_inter));
347}
348
349// This function gives an estimate of how badly we believe the prediction
350// quality is decaying from frame to frame.
Jingning Hanc7a52172019-09-17 15:28:46 -0700351static double get_zero_motion_factor(const FRAME_INFO *frame_info,
David Turner0fa8c492019-02-06 16:38:13 +0000352 const FIRSTPASS_STATS *frame) {
353 const double zero_motion_pct = frame->pcnt_inter - frame->pcnt_motion;
Jingning Hanc7a52172019-09-17 15:28:46 -0700354 double sr_decay = get_sr_decay_rate(frame_info, frame);
David Turner0fa8c492019-02-06 16:38:13 +0000355 return AOMMIN(sr_decay, zero_motion_pct);
356}
357
358#define ZM_POWER_FACTOR 0.75
359
Jingning Hanc7a52172019-09-17 15:28:46 -0700360static double get_prediction_decay_rate(const FRAME_INFO *frame_info,
David Turner0fa8c492019-02-06 16:38:13 +0000361 const FIRSTPASS_STATS *next_frame) {
Jingning Hanc7a52172019-09-17 15:28:46 -0700362 const double sr_decay_rate = get_sr_decay_rate(frame_info, next_frame);
David Turner0fa8c492019-02-06 16:38:13 +0000363 const double zero_motion_factor =
364 (0.95 * pow((next_frame->pcnt_inter - next_frame->pcnt_motion),
365 ZM_POWER_FACTOR));
366
367 return AOMMAX(zero_motion_factor,
368 (sr_decay_rate + ((1.0 - sr_decay_rate) * zero_motion_factor)));
369}
370
371// Function to test for a condition where a complex transition is followed
372// by a static section. For example in slide shows where there is a fade
373// between slides. This is to help with more optimal kf and gf positioning.
bohanli04652962020-03-18 17:02:50 -0700374static int detect_transition_to_still(TWO_PASS *const twopass,
375 const int min_gf_interval,
376 const int frame_interval,
377 const int still_interval,
378 const double loop_decay_rate,
379 const double last_decay_rate) {
David Turner0fa8c492019-02-06 16:38:13 +0000380 // Break clause to detect very still sections after motion
381 // For example a static image after a fade or other transition
382 // instead of a clean scene cut.
bohanli04652962020-03-18 17:02:50 -0700383 if (frame_interval > min_gf_interval && loop_decay_rate >= 0.999 &&
David Turner0fa8c492019-02-06 16:38:13 +0000384 last_decay_rate < 0.9) {
385 int j;
David Turner0fa8c492019-02-06 16:38:13 +0000386 // Look ahead a few frames to see if static condition persists...
387 for (j = 0; j < still_interval; ++j) {
388 const FIRSTPASS_STATS *stats = &twopass->stats_in[j];
Akshata Jadhava49be172019-12-18 00:03:53 +0530389 if (stats >= twopass->stats_buf_ctx->stats_in_end) break;
David Turner0fa8c492019-02-06 16:38:13 +0000390
391 if (stats->pcnt_inter - stats->pcnt_motion < 0.999) break;
392 }
David Turner0fa8c492019-02-06 16:38:13 +0000393 // Only if it does do we signal a transition to still.
394 return j == still_interval;
395 }
David Turner0fa8c492019-02-06 16:38:13 +0000396 return 0;
397}
398
399// This function detects a flash through the high relative pcnt_second_ref
400// score in the frame following a flash frame. The offset passed in should
401// reflect this.
bohanli04652962020-03-18 17:02:50 -0700402static int detect_flash(const TWO_PASS *twopass, const int offset) {
David Turner0fa8c492019-02-06 16:38:13 +0000403 const FIRSTPASS_STATS *const next_frame = read_frame_stats(twopass, offset);
404
405 // What we are looking for here is a situation where there is a
406 // brief break in prediction (such as a flash) but subsequent frames
407 // are reasonably well predicted by an earlier (pre flash) frame.
408 // The recovery after a flash is indicated by a high pcnt_second_ref
409 // compared to pcnt_inter.
410 return next_frame != NULL &&
411 next_frame->pcnt_second_ref > next_frame->pcnt_inter &&
412 next_frame->pcnt_second_ref >= 0.5;
413}
414
415// Update the motion related elements to the GF arf boost calculation.
416static void accumulate_frame_motion_stats(const FIRSTPASS_STATS *stats,
bohanli04652962020-03-18 17:02:50 -0700417 GF_GROUP_STATS *gf_stats) {
David Turner0fa8c492019-02-06 16:38:13 +0000418 const double pct = stats->pcnt_motion;
419
420 // Accumulate Motion In/Out of frame stats.
bohanli04652962020-03-18 17:02:50 -0700421 gf_stats->this_frame_mv_in_out = stats->mv_in_out_count * pct;
422 gf_stats->mv_in_out_accumulator += gf_stats->this_frame_mv_in_out;
423 gf_stats->abs_mv_in_out_accumulator += fabs(gf_stats->this_frame_mv_in_out);
David Turner0fa8c492019-02-06 16:38:13 +0000424
425 // Accumulate a measure of how uniform (or conversely how random) the motion
426 // field is (a ratio of abs(mv) / mv).
427 if (pct > 0.05) {
428 const double mvr_ratio =
429 fabs(stats->mvr_abs) / DOUBLE_DIVIDE_CHECK(fabs(stats->MVr));
430 const double mvc_ratio =
431 fabs(stats->mvc_abs) / DOUBLE_DIVIDE_CHECK(fabs(stats->MVc));
432
bohanli04652962020-03-18 17:02:50 -0700433 gf_stats->mv_ratio_accumulator +=
David Turner0fa8c492019-02-06 16:38:13 +0000434 pct * (mvr_ratio < stats->mvr_abs ? mvr_ratio : stats->mvr_abs);
bohanli04652962020-03-18 17:02:50 -0700435 gf_stats->mv_ratio_accumulator +=
David Turner0fa8c492019-02-06 16:38:13 +0000436 pct * (mvc_ratio < stats->mvc_abs ? mvc_ratio : stats->mvc_abs);
437 }
438}
439
bohanli04652962020-03-18 17:02:50 -0700440static void accumulate_this_frame_stats(const FIRSTPASS_STATS *stats,
441 const double mod_frame_err,
442 GF_GROUP_STATS *gf_stats) {
443 gf_stats->gf_group_err += mod_frame_err;
444#if GROUP_ADAPTIVE_MAXQ
445 gf_stats->gf_group_raw_error += stats->coded_error;
446#endif
447 gf_stats->gf_group_skip_pct += stats->intra_skip_pct;
448 gf_stats->gf_group_inactive_zone_rows += stats->inactive_zone_rows;
449}
450
Cheng Chen1439bfc2020-04-03 13:52:28 -0700451static void accumulate_next_frame_stats(const FIRSTPASS_STATS *stats,
452 const FRAME_INFO *frame_info,
453 const int flash_detected,
454 const int frames_since_key,
455 const int cur_idx,
456 GF_GROUP_STATS *gf_stats) {
bohanli04652962020-03-18 17:02:50 -0700457 accumulate_frame_motion_stats(stats, gf_stats);
458 // sum up the metric values of current gf group
459 gf_stats->avg_sr_coded_error += stats->sr_coded_error;
460 gf_stats->avg_tr_coded_error += stats->tr_coded_error;
461 gf_stats->avg_pcnt_second_ref += stats->pcnt_second_ref;
462 gf_stats->avg_pcnt_third_ref += stats->pcnt_third_ref;
463 gf_stats->avg_new_mv_count += stats->new_mv_count;
464 gf_stats->avg_wavelet_energy += stats->frame_avg_wavelet_energy;
465 if (fabs(stats->raw_error_stdev) > 0.000001) {
466 gf_stats->non_zero_stdev_count++;
467 gf_stats->avg_raw_err_stdev += stats->raw_error_stdev;
468 }
469
470 // Accumulate the effect of prediction quality decay
471 if (!flash_detected) {
472 gf_stats->last_loop_decay_rate = gf_stats->loop_decay_rate;
473 gf_stats->loop_decay_rate = get_prediction_decay_rate(frame_info, stats);
474
475 gf_stats->decay_accumulator =
476 gf_stats->decay_accumulator * gf_stats->loop_decay_rate;
477
478 // Monitor for static sections.
479 if ((frames_since_key + cur_idx - 1) > 1) {
480 gf_stats->zero_motion_accumulator =
481 AOMMIN(gf_stats->zero_motion_accumulator,
482 get_zero_motion_factor(frame_info, stats));
483 }
bohanli04652962020-03-18 17:02:50 -0700484 }
485}
486
487static void average_gf_stats(const int total_frame,
488 const FIRSTPASS_STATS *last_stat,
489 GF_GROUP_STATS *gf_stats) {
490 if (total_frame) {
491 gf_stats->avg_sr_coded_error /= total_frame;
492 gf_stats->avg_tr_coded_error /= total_frame;
493 gf_stats->avg_pcnt_second_ref /= total_frame;
494 if (total_frame - 1) {
495 gf_stats->avg_pcnt_third_ref_nolast =
496 (gf_stats->avg_pcnt_third_ref - last_stat->pcnt_third_ref) /
497 (total_frame - 1);
498 } else {
499 gf_stats->avg_pcnt_third_ref_nolast =
500 gf_stats->avg_pcnt_third_ref / total_frame;
501 }
502 gf_stats->avg_pcnt_third_ref /= total_frame;
503 gf_stats->avg_new_mv_count /= total_frame;
504 gf_stats->avg_wavelet_energy /= total_frame;
505 }
506
507 if (gf_stats->non_zero_stdev_count)
508 gf_stats->avg_raw_err_stdev /= gf_stats->non_zero_stdev_count;
509}
510
511static void get_features_from_gf_stats(const GF_GROUP_STATS *gf_stats,
512 const GF_FRAME_STATS *first_frame,
513 const GF_FRAME_STATS *last_frame,
514 const int num_mbs,
515 const int constrained_gf_group,
516 const int kf_zeromotion_pct,
517 const int num_frames, float *features) {
518 *features++ = (float)gf_stats->abs_mv_in_out_accumulator;
519 *features++ = (float)(gf_stats->avg_new_mv_count / num_mbs);
520 *features++ = (float)gf_stats->avg_pcnt_second_ref;
521 *features++ = (float)gf_stats->avg_pcnt_third_ref;
522 *features++ = (float)gf_stats->avg_pcnt_third_ref_nolast;
523 *features++ = (float)(gf_stats->avg_sr_coded_error / num_mbs);
524 *features++ = (float)(gf_stats->avg_tr_coded_error / num_mbs);
525 *features++ = (float)(gf_stats->avg_wavelet_energy / num_mbs);
526 *features++ = (float)(constrained_gf_group);
527 *features++ = (float)gf_stats->decay_accumulator;
528 *features++ = (float)(first_frame->frame_coded_error / num_mbs);
529 *features++ = (float)(first_frame->frame_sr_coded_error / num_mbs);
530 *features++ = (float)(first_frame->frame_tr_coded_error / num_mbs);
531 *features++ = (float)(first_frame->frame_err / num_mbs);
532 *features++ = (float)(kf_zeromotion_pct);
533 *features++ = (float)(last_frame->frame_coded_error / num_mbs);
534 *features++ = (float)(last_frame->frame_sr_coded_error / num_mbs);
535 *features++ = (float)(last_frame->frame_tr_coded_error / num_mbs);
536 *features++ = (float)num_frames;
537 *features++ = (float)gf_stats->mv_ratio_accumulator;
538 *features++ = (float)gf_stats->non_zero_stdev_count;
539}
540
David Turner0fa8c492019-02-06 16:38:13 +0000541#define BOOST_FACTOR 12.5
Jingning Han17af7742019-09-17 16:58:03 -0700542static double baseline_err_per_mb(const FRAME_INFO *frame_info) {
543 unsigned int screen_area = frame_info->frame_height * frame_info->frame_width;
Paul Wilkins366fe022019-08-27 15:10:56 +0100544
545 // Use a different error per mb factor for calculating boost for
546 // different formats.
547 if (screen_area <= 640 * 360) {
548 return 500.0;
549 } else {
550 return 1000.0;
551 }
552}
David Turner0fa8c492019-02-06 16:38:13 +0000553
Jingning Hanc4c60d32019-09-18 16:19:04 -0700554static double calc_frame_boost(const RATE_CONTROL *rc,
555 const FRAME_INFO *frame_info,
Jingning Han17af7742019-09-17 16:58:03 -0700556 const FIRSTPASS_STATS *this_frame,
David Turner0fa8c492019-02-06 16:38:13 +0000557 double this_frame_mv_in_out, double max_boost) {
558 double frame_boost;
Jingning Hanc4c60d32019-09-18 16:19:04 -0700559 const double lq = av1_convert_qindex_to_q(rc->avg_frame_qindex[INTER_FRAME],
560 frame_info->bit_depth);
David Turner0fa8c492019-02-06 16:38:13 +0000561 const double boost_q_correction = AOMMIN((0.5 + (lq * 0.015)), 1.5);
Jingning Han17af7742019-09-17 16:58:03 -0700562 const double active_area = calculate_active_area(frame_info, this_frame);
563 int num_mbs = frame_info->num_mbs;
David Turner0fa8c492019-02-06 16:38:13 +0000564
565 // Correct for any inactive region in the image
Paul Wilkins366fe022019-08-27 15:10:56 +0100566 num_mbs = (int)AOMMAX(1, num_mbs * active_area);
David Turner0fa8c492019-02-06 16:38:13 +0000567
568 // Underlying boost factor is based on inter error ratio.
Jingning Han17af7742019-09-17 16:58:03 -0700569 frame_boost = AOMMAX(baseline_err_per_mb(frame_info) * num_mbs,
Paul Wilkins366fe022019-08-27 15:10:56 +0100570 this_frame->intra_error * active_area) /
David Turner0fa8c492019-02-06 16:38:13 +0000571 DOUBLE_DIVIDE_CHECK(this_frame->coded_error);
572 frame_boost = frame_boost * BOOST_FACTOR * boost_q_correction;
573
574 // Increase boost for frames where new data coming into frame (e.g. zoom out).
575 // Slightly reduce boost if there is a net balance of motion out of the frame
576 // (zoom in). The range for this_frame_mv_in_out is -1.0 to +1.0.
577 if (this_frame_mv_in_out > 0.0)
578 frame_boost += frame_boost * (this_frame_mv_in_out * 2.0);
579 // In the extreme case the boost is halved.
580 else
581 frame_boost += frame_boost * (this_frame_mv_in_out / 2.0);
582
583 return AOMMIN(frame_boost, max_boost * boost_q_correction);
584}
585
Paul Wilkins5e3b3ff2019-09-26 11:03:42 +0100586static double calc_kf_frame_boost(const RATE_CONTROL *rc,
587 const FRAME_INFO *frame_info,
588 const FIRSTPASS_STATS *this_frame,
589 double *sr_accumulator, double max_boost) {
590 double frame_boost;
591 const double lq = av1_convert_qindex_to_q(rc->avg_frame_qindex[INTER_FRAME],
592 frame_info->bit_depth);
593 const double boost_q_correction = AOMMIN((0.50 + (lq * 0.015)), 2.00);
594 const double active_area = calculate_active_area(frame_info, this_frame);
595 int num_mbs = frame_info->num_mbs;
596
597 // Correct for any inactive region in the image
598 num_mbs = (int)AOMMAX(1, num_mbs * active_area);
599
600 // Underlying boost factor is based on inter error ratio.
601 frame_boost = AOMMAX(baseline_err_per_mb(frame_info) * num_mbs,
602 this_frame->intra_error * active_area) /
603 DOUBLE_DIVIDE_CHECK(
604 (this_frame->coded_error + *sr_accumulator) * active_area);
605
606 // Update the accumulator for second ref error difference.
607 // This is intended to give an indication of how much the coded error is
608 // increasing over time.
609 *sr_accumulator += (this_frame->sr_coded_error - this_frame->coded_error);
610 *sr_accumulator = AOMMAX(0.0, *sr_accumulator);
611
612 // Q correction and scaling
613 // The 40.0 value here is an experimentally derived baseline minimum.
614 // This value is in line with the minimum per frame boost in the alt_ref
615 // boost calculation.
616 frame_boost = ((frame_boost + 40.0) * boost_q_correction);
617
618 return AOMMIN(frame_boost, max_boost * boost_q_correction);
619}
620
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +0530621static int get_projected_gfu_boost(const RATE_CONTROL *rc, int gfu_boost,
622 int frames_to_project,
623 int num_stats_used_for_gfu_boost) {
624 /*
625 * If frames_to_project is equal to num_stats_used_for_gfu_boost,
626 * it means that gfu_boost was calculated over frames_to_project to
627 * begin with(ie; all stats required were available), hence return
628 * the original boost.
629 */
630 if (num_stats_used_for_gfu_boost >= frames_to_project) return gfu_boost;
631
632 double min_boost_factor = sqrt(rc->baseline_gf_interval);
633 // Get the current tpl factor (number of frames = frames_to_project).
634 double tpl_factor = av1_get_gfu_boost_projection_factor(
635 min_boost_factor, MAX_GFUBOOST_FACTOR, frames_to_project);
636 // Get the tpl factor when number of frames = num_stats_used_for_prior_boost.
637 double tpl_factor_num_stats = av1_get_gfu_boost_projection_factor(
638 min_boost_factor, MAX_GFUBOOST_FACTOR, num_stats_used_for_gfu_boost);
639 int projected_gfu_boost =
640 (int)rint((tpl_factor * gfu_boost) / tpl_factor_num_stats);
641 return projected_gfu_boost;
642}
643
David Turner0fa8c492019-02-06 16:38:13 +0000644#define GF_MAX_BOOST 90.0
David Turner0fa8c492019-02-06 16:38:13 +0000645#define MIN_DECAY_FACTOR 0.01
Jingning Hanc4c60d32019-09-18 16:19:04 -0700646int av1_calc_arf_boost(const TWO_PASS *twopass, const RATE_CONTROL *rc,
647 FRAME_INFO *frame_info, int offset, int f_frames,
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +0530648 int b_frames, int *num_fpstats_used,
649 int *num_fpstats_required) {
David Turner0fa8c492019-02-06 16:38:13 +0000650 int i;
bohanli04652962020-03-18 17:02:50 -0700651 GF_GROUP_STATS gf_stats;
652 init_gf_stats(&gf_stats);
Jingning Han1a0a2332020-02-07 15:54:14 -0800653 double boost_score = (double)NORMAL_BOOST;
David Turner0fa8c492019-02-06 16:38:13 +0000654 int arf_boost;
655 int flash_detected = 0;
Mufaddal Chakerac956d2d2020-02-12 15:42:23 +0530656 if (num_fpstats_used) *num_fpstats_used = 0;
David Turner0fa8c492019-02-06 16:38:13 +0000657
658 // Search forward from the proposed arf/next gf position.
659 for (i = 0; i < f_frames; ++i) {
660 const FIRSTPASS_STATS *this_frame = read_frame_stats(twopass, i + offset);
661 if (this_frame == NULL) break;
662
663 // Update the motion related elements to the boost calculation.
bohanli04652962020-03-18 17:02:50 -0700664 accumulate_frame_motion_stats(this_frame, &gf_stats);
David Turner0fa8c492019-02-06 16:38:13 +0000665
666 // We want to discount the flash frame itself and the recovery
667 // frame that follows as both will have poor scores.
668 flash_detected = detect_flash(twopass, i + offset) ||
669 detect_flash(twopass, i + offset + 1);
670
671 // Accumulate the effect of prediction quality decay.
672 if (!flash_detected) {
bohanli04652962020-03-18 17:02:50 -0700673 gf_stats.decay_accumulator *=
674 get_prediction_decay_rate(frame_info, this_frame);
675 gf_stats.decay_accumulator = gf_stats.decay_accumulator < MIN_DECAY_FACTOR
676 ? MIN_DECAY_FACTOR
677 : gf_stats.decay_accumulator;
David Turner0fa8c492019-02-06 16:38:13 +0000678 }
679
bohanli04652962020-03-18 17:02:50 -0700680 boost_score +=
681 gf_stats.decay_accumulator *
682 calc_frame_boost(rc, frame_info, this_frame,
683 gf_stats.this_frame_mv_in_out, GF_MAX_BOOST);
Mufaddal Chakerac956d2d2020-02-12 15:42:23 +0530684 if (num_fpstats_used) (*num_fpstats_used)++;
David Turner0fa8c492019-02-06 16:38:13 +0000685 }
686
Paul Wilkinsce1839c2019-08-13 12:22:54 +0100687 arf_boost = (int)boost_score;
David Turner0fa8c492019-02-06 16:38:13 +0000688
689 // Reset for backward looking loop.
690 boost_score = 0.0;
bohanli04652962020-03-18 17:02:50 -0700691 init_gf_stats(&gf_stats);
David Turner0fa8c492019-02-06 16:38:13 +0000692 // Search backward towards last gf position.
693 for (i = -1; i >= -b_frames; --i) {
694 const FIRSTPASS_STATS *this_frame = read_frame_stats(twopass, i + offset);
695 if (this_frame == NULL) break;
696
697 // Update the motion related elements to the boost calculation.
bohanli04652962020-03-18 17:02:50 -0700698 accumulate_frame_motion_stats(this_frame, &gf_stats);
David Turner0fa8c492019-02-06 16:38:13 +0000699
700 // We want to discount the the flash frame itself and the recovery
701 // frame that follows as both will have poor scores.
702 flash_detected = detect_flash(twopass, i + offset) ||
703 detect_flash(twopass, i + offset + 1);
704
705 // Cumulative effect of prediction quality decay.
706 if (!flash_detected) {
bohanli04652962020-03-18 17:02:50 -0700707 gf_stats.decay_accumulator *=
708 get_prediction_decay_rate(frame_info, this_frame);
709 gf_stats.decay_accumulator = gf_stats.decay_accumulator < MIN_DECAY_FACTOR
710 ? MIN_DECAY_FACTOR
711 : gf_stats.decay_accumulator;
David Turner0fa8c492019-02-06 16:38:13 +0000712 }
713
bohanli04652962020-03-18 17:02:50 -0700714 boost_score +=
715 gf_stats.decay_accumulator *
716 calc_frame_boost(rc, frame_info, this_frame,
717 gf_stats.this_frame_mv_in_out, GF_MAX_BOOST);
Mufaddal Chakerac956d2d2020-02-12 15:42:23 +0530718 if (num_fpstats_used) (*num_fpstats_used)++;
David Turner0fa8c492019-02-06 16:38:13 +0000719 }
Paul Wilkinsce1839c2019-08-13 12:22:54 +0100720 arf_boost += (int)boost_score;
David Turner0fa8c492019-02-06 16:38:13 +0000721
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +0530722 if (num_fpstats_required) {
723 *num_fpstats_required = f_frames + b_frames;
724 if (num_fpstats_used) {
725 arf_boost = get_projected_gfu_boost(rc, arf_boost, *num_fpstats_required,
726 *num_fpstats_used);
727 }
728 }
729
Paul Wilkins62698b82019-08-14 11:31:55 +0100730 if (arf_boost < ((b_frames + f_frames) * 50))
731 arf_boost = ((b_frames + f_frames) * 50);
David Turner0fa8c492019-02-06 16:38:13 +0000732
733 return arf_boost;
734}
735
736// Calculate a section intra ratio used in setting max loop filter.
737static int calculate_section_intra_ratio(const FIRSTPASS_STATS *begin,
738 const FIRSTPASS_STATS *end,
739 int section_length) {
740 const FIRSTPASS_STATS *s = begin;
741 double intra_error = 0.0;
742 double coded_error = 0.0;
743 int i = 0;
744
745 while (s < end && i < section_length) {
746 intra_error += s->intra_error;
747 coded_error += s->coded_error;
748 ++s;
749 ++i;
750 }
751
752 return (int)(intra_error / DOUBLE_DIVIDE_CHECK(coded_error));
753}
754
Paul Wilkinsbe20bc22020-07-16 14:46:57 +0100755/*!\brief Calculates the bit target for this GF/ARF group
756 *
757 * \ingroup rate_control
758 *
759 * Calculates the total bits to allocate in this GF/ARF group.
760 *
761 * \param[in] cpi Top-level encoder structure
762 * \param[in] gf_group_err Cumulative coded error score for the
763 * frames making up this group.
764 *
765 * \return The target total number of bits for this GF/ARF group.
766 */
David Turner0fa8c492019-02-06 16:38:13 +0000767static int64_t calculate_total_gf_group_bits(AV1_COMP *cpi,
768 double gf_group_err) {
769 const RATE_CONTROL *const rc = &cpi->rc;
770 const TWO_PASS *const twopass = &cpi->twopass;
771 const int max_bits = frame_max_bits(rc, &cpi->oxcf);
772 int64_t total_group_bits;
773
774 // Calculate the bits to be allocated to the group as a whole.
775 if ((twopass->kf_group_bits > 0) && (twopass->kf_group_error_left > 0)) {
776 total_group_bits = (int64_t)(twopass->kf_group_bits *
777 (gf_group_err / twopass->kf_group_error_left));
778 } else {
779 total_group_bits = 0;
780 }
781
782 // Clamp odd edge cases.
783 total_group_bits = (total_group_bits < 0)
784 ? 0
785 : (total_group_bits > twopass->kf_group_bits)
786 ? twopass->kf_group_bits
787 : total_group_bits;
788
789 // Clip based on user supplied data rate variability limit.
790 if (total_group_bits > (int64_t)max_bits * rc->baseline_gf_interval)
791 total_group_bits = (int64_t)max_bits * rc->baseline_gf_interval;
792
793 return total_group_bits;
794}
795
Wan-Teh Changbcacb322019-09-11 14:50:20 -0700796// Calculate the number of bits to assign to boosted frames in a group.
David Turner0fa8c492019-02-06 16:38:13 +0000797static int calculate_boost_bits(int frame_count, int boost,
798 int64_t total_group_bits) {
799 int allocation_chunks;
800
801 // return 0 for invalid inputs (could arise e.g. through rounding errors)
Jingning Han1a0a2332020-02-07 15:54:14 -0800802 if (!boost || (total_group_bits <= 0)) return 0;
803
804 if (frame_count <= 0) return (int)(AOMMIN(total_group_bits, INT_MAX));
David Turner0fa8c492019-02-06 16:38:13 +0000805
806 allocation_chunks = (frame_count * 100) + boost;
807
808 // Prevent overflow.
809 if (boost > 1023) {
810 int divisor = boost >> 10;
811 boost /= divisor;
812 allocation_chunks /= divisor;
813 }
814
815 // Calculate the number of extra bits for use in the boosted frame or frames.
816 return AOMMAX((int)(((int64_t)boost * total_group_bits) / allocation_chunks),
817 0);
818}
819
Wan-Teh Changbcacb322019-09-11 14:50:20 -0700820// Calculate the boost factor based on the number of bits assigned, i.e. the
821// inverse of calculate_boost_bits().
822static int calculate_boost_factor(int frame_count, int bits,
823 int64_t total_group_bits) {
824 aom_clear_system_state();
825 return (int)(100.0 * frame_count * bits / (total_group_bits - bits));
826}
827
828// Reduce the number of bits assigned to keyframe or arf if necessary, to
829// prevent bitrate spikes that may break level constraints.
830// frame_type: 0: keyframe; 1: arf.
Jingning Hanfa407c02020-02-05 11:46:58 -0800831static int adjust_boost_bits_for_target_level(const AV1_COMP *const cpi,
832 RATE_CONTROL *const rc,
Hui Su1f685022019-09-17 12:05:47 -0700833 int bits_assigned,
834 int64_t group_bits,
Wan-Teh Changbcacb322019-09-11 14:50:20 -0700835 int frame_type) {
836 const AV1_COMMON *const cm = &cpi->common;
837 const SequenceHeader *const seq_params = &cm->seq_params;
838 const int temporal_layer_id = cm->temporal_layer_id;
839 const int spatial_layer_id = cm->spatial_layer_id;
840 for (int index = 0; index < seq_params->operating_points_cnt_minus_1 + 1;
841 ++index) {
842 if (!is_in_operating_point(seq_params->operating_point_idc[index],
843 temporal_layer_id, spatial_layer_id)) {
844 continue;
845 }
846
Vishesh8ac928b2020-04-01 02:36:35 +0530847 const AV1_LEVEL target_level =
848 cpi->level_params.target_seq_level_idx[index];
Wan-Teh Changbcacb322019-09-11 14:50:20 -0700849 if (target_level >= SEQ_LEVELS) continue;
850
851 assert(is_valid_seq_level_idx(target_level));
852
853 const double level_bitrate_limit = av1_get_max_bitrate_for_level(
854 target_level, seq_params->tier[0], seq_params->profile);
855 const int target_bits_per_frame =
856 (int)(level_bitrate_limit / cpi->framerate);
Wan-Teh Changbcacb322019-09-11 14:50:20 -0700857 if (frame_type == 0) {
858 // Maximum bits for keyframe is 8 times the target_bits_per_frame.
859 const int level_enforced_max_kf_bits = target_bits_per_frame * 8;
860 if (bits_assigned > level_enforced_max_kf_bits) {
861 const int frames = rc->frames_to_key - 1;
862 rc->kf_boost = calculate_boost_factor(
863 frames, level_enforced_max_kf_bits, group_bits);
864 bits_assigned = calculate_boost_bits(frames, rc->kf_boost, group_bits);
865 }
866 } else if (frame_type == 1) {
867 // Maximum bits for arf is 4 times the target_bits_per_frame.
868 const int level_enforced_max_arf_bits = target_bits_per_frame * 4;
869 if (bits_assigned > level_enforced_max_arf_bits) {
870 rc->gfu_boost = calculate_boost_factor(
871 rc->baseline_gf_interval, level_enforced_max_arf_bits, group_bits);
872 bits_assigned = calculate_boost_bits(rc->baseline_gf_interval,
873 rc->gfu_boost, group_bits);
874 }
875 } else {
876 assert(0);
877 }
878 }
879
880 return bits_assigned;
881}
882
Paul Wilkins5cbd6ab2020-04-23 17:28:13 +0100883// Allocate bits to each frame in a GF / ARF group
Paul Wilkins47426162020-03-02 17:46:07 +0000884double layer_fraction[MAX_ARF_LAYERS + 1] = { 1.0, 0.70, 0.55, 0.60,
885 0.60, 1.0, 1.0 };
886static void allocate_gf_group_bits(GF_GROUP *gf_group, RATE_CONTROL *const rc,
887 int64_t gf_group_bits, int gf_arf_bits,
888 int key_frame, int use_arf) {
889 int64_t total_group_bits = gf_group_bits;
890 int base_frame_bits;
891 const int gf_group_size = gf_group->size;
892 int layer_frames[MAX_ARF_LAYERS + 1] = { 0 };
893
Paul Wilkins47426162020-03-02 17:46:07 +0000894 // For key frames the frame target rate is already set and it
895 // is also the golden frame.
896 // === [frame_index == 0] ===
Jingning Han9af02fa2020-08-20 23:01:01 -0700897 int frame_index = !!key_frame;
898
899 // Subtract the extra bits set aside for ARF frames from the Group Total
900 if (use_arf) total_group_bits -= gf_arf_bits;
901
902 int num_frames =
903 AOMMAX(1, rc->baseline_gf_interval - (rc->frames_since_key == 0));
904 base_frame_bits = (int)(total_group_bits / num_frames);
905
Paul Wilkins47426162020-03-02 17:46:07 +0000906 // Check the number of frames in each layer in case we have a
907 // non standard group length.
908 int max_arf_layer = gf_group->max_layer_depth - 1;
909 for (int idx = frame_index; idx < gf_group_size; ++idx) {
910 if ((gf_group->update_type[idx] == ARF_UPDATE) ||
911 (gf_group->update_type[idx] == INTNL_ARF_UPDATE)) {
Paul Wilkins47426162020-03-02 17:46:07 +0000912 layer_frames[gf_group->layer_depth[idx]]++;
913 }
914 }
915
916 // Allocate extra bits to each ARF layer
917 int i;
918 int layer_extra_bits[MAX_ARF_LAYERS + 1] = { 0 };
919 for (i = 1; i <= max_arf_layer; ++i) {
920 double fraction = (i == max_arf_layer) ? 1.0 : layer_fraction[i];
921 layer_extra_bits[i] =
922 (int)((gf_arf_bits * fraction) / AOMMAX(1, layer_frames[i]));
923 gf_arf_bits -= (int)(gf_arf_bits * fraction);
924 }
925
926 // Now combine ARF layer and baseline bits to give total bits for each frame.
927 int arf_extra_bits;
928 for (int idx = frame_index; idx < gf_group_size; ++idx) {
929 switch (gf_group->update_type[idx]) {
930 case ARF_UPDATE:
931 case INTNL_ARF_UPDATE:
932 arf_extra_bits = layer_extra_bits[gf_group->layer_depth[idx]];
933 gf_group->bit_allocation[idx] = base_frame_bits + arf_extra_bits;
934 break;
935 case INTNL_OVERLAY_UPDATE:
936 case OVERLAY_UPDATE: gf_group->bit_allocation[idx] = 0; break;
937 default: gf_group->bit_allocation[idx] = base_frame_bits; break;
938 }
939 }
940
941 // Set the frame following the current GOP to 0 bit allocation. For ARF
942 // groups, this next frame will be overlay frame, which is the first frame
943 // in the next GOP. For GF group, next GOP will overwrite the rate allocation.
944 // Setting this frame to use 0 bit (of out the current GOP budget) will
945 // simplify logics in reference frame management.
Jingning Han4e98b5e2021-03-18 14:40:31 -0700946 if (gf_group_size < MAX_STATIC_GF_GROUP_LENGTH)
947 gf_group->bit_allocation[gf_group_size] = 0;
Paul Wilkins47426162020-03-02 17:46:07 +0000948}
David Turner0fa8c492019-02-06 16:38:13 +0000949
950// Returns true if KF group and GF group both are almost completely static.
Aasaipriya46700182020-05-01 17:27:31 +0530951static INLINE int is_almost_static(double gf_zero_motion, int kf_zero_motion,
952 int is_lap_enabled) {
953 if (is_lap_enabled) {
954 /*
955 * when LAP enabled kf_zero_motion is not reliable, so use strict
956 * constraint on gf_zero_motion.
957 */
958 return (gf_zero_motion >= 0.999);
959 } else {
960 return (gf_zero_motion >= 0.995) &&
961 (kf_zero_motion >= STATIC_KF_GROUP_THRESH);
962 }
David Turner0fa8c492019-02-06 16:38:13 +0000963}
964
965#define ARF_ABS_ZOOM_THRESH 4.4
bohanlia964eea2020-02-13 11:48:00 -0800966static INLINE int detect_gf_cut(AV1_COMP *cpi, int frame_index, int cur_start,
bohanli04652962020-03-18 17:02:50 -0700967 int flash_detected, int active_max_gf_interval,
bohanlia964eea2020-02-13 11:48:00 -0800968 int active_min_gf_interval,
bohanli04652962020-03-18 17:02:50 -0700969 GF_GROUP_STATS *gf_stats) {
bohanlia964eea2020-02-13 11:48:00 -0800970 RATE_CONTROL *const rc = &cpi->rc;
971 TWO_PASS *const twopass = &cpi->twopass;
Jayasanker J44fdab72020-04-13 20:34:38 +0530972 InitialDimensions *const initial_dimensions = &cpi->initial_dimensions;
bohanlia964eea2020-02-13 11:48:00 -0800973 // Motion breakout threshold for loop below depends on image size.
974 const double mv_ratio_accumulator_thresh =
Jayasanker J44fdab72020-04-13 20:34:38 +0530975 (initial_dimensions->height + initial_dimensions->width) / 4.0;
bohanlia964eea2020-02-13 11:48:00 -0800976
977 if (!flash_detected) {
978 // Break clause to detect very still sections after motion. For example,
979 // a static image after a fade or other transition.
bohanli04652962020-03-18 17:02:50 -0700980 if (detect_transition_to_still(
981 twopass, rc->min_gf_interval, frame_index - cur_start, 5,
982 gf_stats->loop_decay_rate, gf_stats->last_loop_decay_rate)) {
bohanlia964eea2020-02-13 11:48:00 -0800983 return 1;
984 }
985 }
986
987 // Some conditions to breakout after min interval.
988 if (frame_index - cur_start >= active_min_gf_interval &&
989 // If possible don't break very close to a kf
990 (rc->frames_to_key - frame_index >= rc->min_gf_interval) &&
991 ((frame_index - cur_start) & 0x01) && !flash_detected &&
bohanli04652962020-03-18 17:02:50 -0700992 (gf_stats->mv_ratio_accumulator > mv_ratio_accumulator_thresh ||
993 gf_stats->abs_mv_in_out_accumulator > ARF_ABS_ZOOM_THRESH)) {
bohanlia964eea2020-02-13 11:48:00 -0800994 return 1;
995 }
996
997 // If almost totally static, we will not use the the max GF length later,
998 // so we can continue for more frames.
999 if (((frame_index - cur_start) >= active_max_gf_interval + 1) &&
bohanli04652962020-03-18 17:02:50 -07001000 !is_almost_static(gf_stats->zero_motion_accumulator,
Aasaipriya46700182020-05-01 17:27:31 +05301001 twopass->kf_zeromotion_pct, cpi->lap_enabled)) {
bohanlia964eea2020-02-13 11:48:00 -08001002 return 1;
1003 }
1004 return 0;
1005}
1006
David Turner0fa8c492019-02-06 16:38:13 +00001007#define MIN_FWD_KF_INTERVAL 8
bohanli1a4b3ae2020-10-15 15:43:53 -07001008#define MIN_SHRINK_LEN 6 // the minimum length of gf if we are shrinking
bohanliad60ac32020-09-25 09:12:26 -07001009#define SMOOTH_FILT_LEN 7
1010#define HALF_FILT_LEN (SMOOTH_FILT_LEN / 2)
1011#define WINDOW_SIZE 7
1012#define HALF_WIN (WINDOW_SIZE / 2)
1013// A 7-tap gaussian smooth filter
1014const double smooth_filt[SMOOTH_FILT_LEN] = { 0.006, 0.061, 0.242, 0.383,
1015 0.242, 0.061, 0.006 };
1016
1017// Smooth filter intra_error and coded_error in firstpass stats.
1018// If ignore[i]==1, the ith element should not be used in the filtering.
1019static void smooth_filter_stats(const FIRSTPASS_STATS *stats, const int *ignore,
1020 int start_idx, int last_idx,
1021 double *filt_intra_err,
1022 double *filt_coded_err) {
1023 int i, j;
1024 for (i = start_idx; i <= last_idx; i++) {
1025 double total_wt = 0;
1026 for (j = -HALF_FILT_LEN; j <= HALF_FILT_LEN; j++) {
1027 int idx = AOMMIN(AOMMAX(i + j, start_idx), last_idx);
1028 if (ignore[idx]) continue;
1029
1030 filt_intra_err[i] +=
1031 smooth_filt[j + HALF_FILT_LEN] * stats[idx].intra_error;
1032 total_wt += smooth_filt[j + HALF_FILT_LEN];
1033 }
1034 if (total_wt > 0.01) {
1035 filt_intra_err[i] /= total_wt;
1036 } else {
1037 filt_intra_err[i] = stats[i].intra_error;
1038 }
1039 }
1040 for (i = start_idx; i <= last_idx; i++) {
1041 double total_wt = 0;
1042 for (j = -HALF_FILT_LEN; j <= HALF_FILT_LEN; j++) {
1043 int idx = AOMMIN(AOMMAX(i + j, start_idx), last_idx);
1044 // Coded error involves idx and idx - 1.
bohanli00f0bfe2020-10-02 15:17:34 -07001045 if (ignore[idx] || (idx > 0 && ignore[idx - 1])) continue;
bohanliad60ac32020-09-25 09:12:26 -07001046
1047 filt_coded_err[i] +=
1048 smooth_filt[j + HALF_FILT_LEN] * stats[idx].coded_error;
1049 total_wt += smooth_filt[j + HALF_FILT_LEN];
1050 }
1051 if (total_wt > 0.01) {
1052 filt_coded_err[i] /= total_wt;
1053 } else {
1054 filt_coded_err[i] = stats[i].coded_error;
1055 }
1056 }
1057}
1058
1059// Calculate gradient
1060static void get_gradient(const double *values, int start, int last,
1061 double *grad) {
1062 if (start == last) {
1063 grad[start] = 0;
1064 return;
1065 }
1066 for (int i = start; i <= last; i++) {
1067 int prev = AOMMAX(i - 1, start);
1068 int next = AOMMIN(i + 1, last);
1069 grad[i] = (values[next] - values[prev]) / (next - prev);
1070 }
1071}
1072
bohanlibe4213b2020-09-25 09:41:29 -07001073static int find_next_scenecut(const FIRSTPASS_STATS *const stats_start,
1074 int first, int last, int *ignore) {
1075 // Identify unstable areas caused by scenecuts.
1076 // Find the max and 2nd max coded error, and the average of the rest frames.
1077 // If there is only one frame that yields a huge coded error, it is likely a
1078 // scenecut.
1079 double this_ratio, max_prev_ratio, max_next_ratio, max_prev_coded,
1080 max_next_coded;
bohanlid3e939d2020-02-13 11:53:08 -08001081
bohanli00f0bfe2020-10-02 15:17:34 -07001082 if (last - first == 0) return -1;
1083
bohanlibe4213b2020-09-25 09:41:29 -07001084 for (int i = first; i <= last; i++) {
1085 if (ignore[i] || (i > 0 && ignore[i - 1])) continue;
1086 double temp_intra = AOMMAX(stats_start[i].intra_error, 0.01);
1087 this_ratio = stats_start[i].coded_error / temp_intra;
1088 // find the avg ratio in the preceding neighborhood
1089 max_prev_ratio = 0;
1090 max_prev_coded = 0;
1091 for (int j = AOMMAX(first, i - HALF_WIN); j < i; j++) {
1092 if (ignore[j] || (j > 0 && ignore[j - 1])) continue;
1093 temp_intra = AOMMAX(stats_start[j].intra_error, 0.01);
1094 double temp_ratio = stats_start[j].coded_error / temp_intra;
1095 if (temp_ratio > max_prev_ratio) {
1096 max_prev_ratio = temp_ratio;
1097 }
1098 if (stats_start[j].coded_error > max_prev_coded) {
1099 max_prev_coded = stats_start[j].coded_error;
1100 }
1101 }
1102 // find the avg ratio in the following neighborhood
1103 max_next_ratio = 0;
1104 max_next_coded = 0;
1105 for (int j = i + 1; j <= AOMMIN(i + HALF_WIN, last); j++) {
1106 if (ignore[j] || (j > 0 && ignore[j - 1])) continue;
1107 temp_intra = AOMMAX(stats_start[j].intra_error, 0.01);
1108 double temp_ratio = stats_start[j].coded_error / temp_intra;
1109 if (temp_ratio > max_next_ratio) {
1110 max_next_ratio = temp_ratio;
1111 }
1112 if (stats_start[j].coded_error > max_next_coded) {
1113 max_next_coded = stats_start[j].coded_error;
1114 }
1115 }
1116
1117 if (max_prev_ratio < 0.001 && max_next_ratio < 0.001) {
1118 // the ratios are very small, only check a small fixed threshold
1119 if (this_ratio < 0.02) continue;
1120 } else {
1121 // check if this frame has a larger ratio than the neighborhood
1122 double max_sr = stats_start[i].sr_coded_error;
1123 if (i < last) max_sr = AOMMAX(max_sr, stats_start[i + 1].sr_coded_error);
1124 double max_sr_fr_ratio =
1125 max_sr / AOMMAX(stats_start[i].coded_error, 0.01);
1126
1127 if (max_sr_fr_ratio > 1.2) continue;
1128 if (this_ratio < 2 * AOMMAX(max_prev_ratio, max_next_ratio) &&
1129 stats_start[i].coded_error <
1130 2 * AOMMAX(max_prev_coded, max_next_coded)) {
1131 continue;
1132 }
1133 }
1134 return i;
1135 }
1136 return -1;
1137}
1138
1139static void mark_flashes(const FIRSTPASS_STATS *stats, int start_idx,
1140 int last_idx, int *is_flash) {
1141 int i;
1142 for (i = start_idx; i < last_idx; i++) {
1143 if (stats[i + 1].pcnt_second_ref > stats[i + 1].pcnt_inter &&
1144 stats[i + 1].pcnt_second_ref >= 0.5) {
1145 // this is a new flash frame
1146 is_flash[i] = 1;
1147 continue;
1148 }
1149 }
1150}
1151
bohanliea3b2b12020-09-25 09:52:03 -07001152// Remove the region with index next_region.
1153// parameter merge: 0: merge with previous; 1: merge with next; 2:
1154// merge with both, take type from previous if possible
1155// After removing, next_region will be the index of the next region.
1156static void remove_region(int merge, REGIONS *regions, int *num_regions,
1157 int *next_region) {
1158 int k = *next_region;
1159 assert(k < *num_regions);
1160 if (*num_regions == 1) {
1161 *num_regions = 0;
1162 return;
1163 }
1164 if (k == 0) {
1165 merge = 1;
1166 } else if (k == *num_regions - 1) {
1167 merge = 0;
1168 }
1169 int num_merge = (merge == 2) ? 2 : 1;
1170 switch (merge) {
1171 case 0:
1172 regions[k - 1].last = regions[k].last;
1173 *next_region = k;
1174 break;
1175 case 1:
1176 regions[k + 1].start = regions[k].start;
1177 *next_region = k + 1;
1178 break;
1179 case 2:
1180 regions[k - 1].last = regions[k + 1].last;
1181 *next_region = k;
1182 break;
1183 default: assert(0);
1184 }
1185 *num_regions -= num_merge;
1186 for (k = *next_region - (merge == 1); k < *num_regions; k++) {
1187 regions[k] = regions[k + num_merge];
1188 }
1189}
1190
1191// Insert a region in the cur_region_idx. The start and last should both be in
1192// the current region. After insertion, the cur_region_idx will point to the
1193// last region that was splitted from the original region.
1194static void insert_region(int start, int last, REGION_TYPES type,
1195 REGIONS *regions, int *num_regions,
1196 int *cur_region_idx) {
1197 int k = *cur_region_idx;
1198 REGION_TYPES this_region_type = regions[k].type;
1199 int this_region_last = regions[k].last;
1200 int num_add = (start != regions[k].start) + (last != regions[k].last);
1201 // move the following regions further to the back
1202 for (int r = *num_regions - 1; r > k; r--) {
1203 regions[r + num_add] = regions[r];
1204 }
1205 *num_regions += num_add;
1206 if (start > regions[k].start) {
1207 regions[k].last = start - 1;
1208 k++;
1209 regions[k].start = start;
1210 }
1211 regions[k].type = type;
1212 if (last < this_region_last) {
1213 regions[k].last = last;
1214 k++;
1215 regions[k].start = last + 1;
1216 regions[k].last = this_region_last;
1217 regions[k].type = this_region_type;
1218 } else {
1219 regions[k].last = this_region_last;
1220 }
1221 *cur_region_idx = k;
1222}
1223
bohanli642a14f2020-09-25 14:22:39 -07001224// Estimate the noise variance of each frame from the first pass stats
1225static void estimate_region_noise(const FIRSTPASS_STATS *stats,
1226 const int *is_flash, REGIONS *region) {
1227 double C1, C2, C3, noise;
1228 int count = 0;
1229 region->avg_noise_var = -1;
1230 for (int i = region->start + 2; i <= region->last; i++) {
1231 if (is_flash[i] || is_flash[i - 1] || is_flash[i - 2]) continue;
1232
1233 C1 = stats[i - 1].intra_error *
1234 (stats[i].intra_error - stats[i].coded_error);
1235 C2 = stats[i - 2].intra_error *
1236 (stats[i - 1].intra_error - stats[i - 1].coded_error);
1237 C3 = stats[i - 2].intra_error *
1238 (stats[i].intra_error - stats[i].sr_coded_error);
1239 if (C1 <= 0 || C2 <= 0 || C3 <= 0) continue;
1240 C1 = sqrt(C1);
1241 C2 = sqrt(C2);
1242 C3 = sqrt(C3);
1243
1244 noise = stats[i - 1].intra_error - C1 * C2 / C3;
1245 noise = AOMMAX(noise, 0.01);
1246 region->avg_noise_var = (region->avg_noise_var == -1)
1247 ? noise
1248 : AOMMIN(noise, region->avg_noise_var);
1249 count++;
1250 }
1251 if (count == 0) {
1252 region->avg_noise_var = 0;
1253 }
1254}
1255
1256// Analyze the corrrelation coefficient of each frame with its previous frame in
1257// a region. Also get the average of stats inside a region.
1258// Before calling this function, the region's noise variance is needed.
1259static void analyze_region(const FIRSTPASS_STATS *stats, int region_idx,
1260 REGIONS *regions, double *coeff) {
1261 double cor_coeff;
1262
1263 int i, k = region_idx;
1264 regions[k].avg_cor_coeff = 0;
1265 regions[k].avg_sr_fr_ratio = 0;
1266 regions[k].avg_intra_err = 0;
1267 regions[k].avg_coded_err = 0;
1268
1269 int check_first_sr = (k != 0);
1270
1271 for (i = regions[k].start; i <= regions[k].last; i++) {
bohanli00f0bfe2020-10-02 15:17:34 -07001272 double C = sqrt(AOMMAX(stats[i - 1].intra_error *
1273 (stats[i].intra_error - stats[i].coded_error),
1274 0.001));
1275 cor_coeff =
1276 C / AOMMAX(stats[i - 1].intra_error - regions[k].avg_noise_var, 0.001);
bohanli642a14f2020-09-25 14:22:39 -07001277
1278 if (i > regions[k].start || check_first_sr) {
1279 double num_frames =
1280 (double)(regions[k].last - regions[k].start + check_first_sr);
1281 double max_coded_error =
1282 AOMMAX(stats[i].coded_error, stats[i - 1].coded_error);
bohanli00f0bfe2020-10-02 15:17:34 -07001283 double this_ratio =
1284 stats[i].sr_coded_error / AOMMAX(max_coded_error, 0.001);
bohanli642a14f2020-09-25 14:22:39 -07001285 regions[k].avg_sr_fr_ratio += this_ratio / num_frames;
1286 }
1287
1288 regions[k].avg_intra_err +=
1289 stats[i].intra_error / (double)(regions[k].last - regions[k].start + 1);
1290 regions[k].avg_coded_err +=
1291 stats[i].coded_error / (double)(regions[k].last - regions[k].start + 1);
1292
1293 coeff[i] =
bohanli00f0bfe2020-10-02 15:17:34 -07001294 cor_coeff *
1295 sqrt(
1296 AOMMAX(stats[i - 1].intra_error - regions[k].avg_noise_var, 0.001) /
1297 AOMMAX(stats[i].intra_error - regions[k].avg_noise_var, 0.001));
bohanli642a14f2020-09-25 14:22:39 -07001298 // clip correlation coefficient.
1299 coeff[i] = AOMMIN(AOMMAX(coeff[i], 0), 1);
1300
1301 regions[k].avg_cor_coeff +=
1302 coeff[i] / (double)(regions[k].last - regions[k].start + 1);
1303 }
1304}
1305
1306// Calculate the regions stats of every region. Uses the stable regions to
1307// estimate noise variance of other regions. Then call analyze_region for each.
1308static void get_region_stats(const FIRSTPASS_STATS *stats, const int *is_flash,
1309 REGIONS *regions, double *coeff, int num_regions) {
1310 int k, count_stable = 0;
1311 // Analyze stable regions.
1312 for (k = 0; k < num_regions; k++) {
1313 if (regions[k].type == STABLE_REGION) {
1314 estimate_region_noise(stats, is_flash, regions + k);
1315 analyze_region(stats, k, regions, coeff);
1316 count_stable++;
1317 }
1318 }
1319
1320 if (count_stable == 0) {
1321 // no stable region, just use the lowest noise variance estimated.
1322 double lowest_noise = -1;
1323 for (k = 0; k < num_regions; k++) {
1324 if (regions[k].type == SCENECUT_REGION) continue;
1325 estimate_region_noise(stats, is_flash, regions + k);
1326 if (regions[k].avg_noise_var < 0.01) continue;
1327 if (lowest_noise < 0 || lowest_noise > regions[k].avg_noise_var) {
1328 lowest_noise = regions[k].avg_noise_var;
1329 }
1330 }
1331 lowest_noise = AOMMAX(lowest_noise, 0);
1332 for (k = 0; k < num_regions; k++) {
1333 regions[k].avg_noise_var = lowest_noise;
1334 analyze_region(stats, k, regions, coeff);
1335 }
1336 return;
1337 }
1338
1339 // Analyze other regions
1340 for (k = 0; k < num_regions; k++) {
bohanli8c6b8f12020-09-28 16:20:46 -07001341 if (regions[k].type != STABLE_REGION) {
bohanli642a14f2020-09-25 14:22:39 -07001342 // use the average of the nearest previous and next stable regions
1343 int count = 0;
1344 regions[k].avg_noise_var = 0;
1345 for (int r = k - 1; r >= 0; r--) {
1346 if (regions[r].type == STABLE_REGION) {
1347 count++;
1348 regions[k].avg_noise_var += regions[r].avg_noise_var;
1349 break;
1350 }
1351 }
1352 for (int r = k + 1; r < num_regions; r++) {
1353 if (regions[r].type == STABLE_REGION) {
1354 count++;
1355 regions[k].avg_noise_var += regions[r].avg_noise_var;
1356 break;
1357 }
1358 }
1359 if (count) {
1360 regions[k].avg_noise_var /= (double)count;
1361 }
1362 analyze_region(stats, k, regions, coeff);
1363 }
1364 }
1365}
1366
bohanlifa84c6d2020-09-25 14:27:49 -07001367// Find tentative stable regions
1368static int find_stable_regions(const FIRSTPASS_STATS *stats,
bohanli00f0bfe2020-10-02 15:17:34 -07001369 const double *grad_coded, const int *ignore,
1370 int this_start, int this_last,
1371 REGIONS *regions) {
bohanlifa84c6d2020-09-25 14:27:49 -07001372 int i, j, k = 0;
1373 regions[k].start = this_start;
1374 for (i = this_start; i <= this_last; i++) {
1375 // Check mean and variance of stats in a window
1376 double mean_intra = 0.001, var_intra = 0.001;
1377 double mean_coded = 0.001, var_coded = 0.001;
bohanli00f0bfe2020-10-02 15:17:34 -07001378 int count = 0;
bohanlifa84c6d2020-09-25 14:27:49 -07001379 for (j = -HALF_WIN; j <= HALF_WIN; j++) {
1380 int idx = AOMMIN(AOMMAX(i + j, this_start), this_last);
bohanli00f0bfe2020-10-02 15:17:34 -07001381 if (ignore[idx] || (idx > 0 && ignore[idx - 1])) continue;
1382 mean_intra += stats[idx].intra_error;
1383 var_intra += stats[idx].intra_error * stats[idx].intra_error;
1384 mean_coded += stats[idx].coded_error;
1385 var_coded += stats[idx].coded_error * stats[idx].coded_error;
1386 count++;
bohanlifa84c6d2020-09-25 14:27:49 -07001387 }
1388
bohanli00f0bfe2020-10-02 15:17:34 -07001389 REGION_TYPES cur_type;
1390 if (count > 0) {
1391 mean_intra /= (double)count;
1392 var_intra /= (double)count;
1393 mean_coded /= (double)count;
1394 var_coded /= (double)count;
1395 int is_intra_stable = (var_intra / (mean_intra * mean_intra) < 1.03);
1396 int is_coded_stable = (var_coded / (mean_coded * mean_coded) < 1.04 &&
1397 fabs(grad_coded[i]) / mean_coded < 0.05) ||
1398 mean_coded / mean_intra < 0.05;
1399 int is_coded_small = mean_coded < 0.5 * mean_intra;
1400 cur_type = (is_intra_stable && is_coded_stable && is_coded_small)
1401 ? STABLE_REGION
1402 : HIGH_VAR_REGION;
1403 } else {
1404 cur_type = HIGH_VAR_REGION;
1405 }
bohanlifa84c6d2020-09-25 14:27:49 -07001406
1407 // mark a new region if type changes
1408 if (i == regions[k].start) {
1409 // first frame in the region
1410 regions[k].type = cur_type;
1411 } else if (cur_type != regions[k].type) {
1412 // Append a new region
1413 regions[k].last = i - 1;
1414 regions[k + 1].start = i;
1415 regions[k + 1].type = cur_type;
1416 k++;
1417 }
1418 }
1419 regions[k].last = this_last;
1420 return k + 1;
1421}
1422
1423// Clean up regions that should be removed or merged.
1424static void cleanup_regions(REGIONS *regions, int *num_regions) {
1425 int k = 0;
1426 while (k < *num_regions) {
bohanli00f0bfe2020-10-02 15:17:34 -07001427 if ((k > 0 && regions[k - 1].type == regions[k].type &&
1428 regions[k].type != SCENECUT_REGION) ||
bohanlifa84c6d2020-09-25 14:27:49 -07001429 regions[k].last < regions[k].start) {
1430 remove_region(0, regions, num_regions, &k);
1431 } else {
1432 k++;
1433 }
1434 }
1435}
1436
1437// Remove regions that are of type and shorter than length.
1438// Merge it with its neighboring regions.
1439static void remove_short_regions(REGIONS *regions, int *num_regions,
1440 REGION_TYPES type, int length) {
1441 int k = 0;
1442 while (k < *num_regions && (*num_regions) > 1) {
1443 if ((regions[k].last - regions[k].start + 1 < length &&
1444 regions[k].type == type)) {
1445 // merge current region with the previous and next regions
1446 remove_region(2, regions, num_regions, &k);
1447 } else {
1448 k++;
1449 }
1450 }
1451 cleanup_regions(regions, num_regions);
1452}
1453
1454static void adjust_unstable_region_bounds(const FIRSTPASS_STATS *stats,
1455 const int *is_flash,
1456 const double *grad, REGIONS *regions,
1457 double *coeff, int *num_regions) {
1458 int i, j, k;
1459 // Remove regions that are too short. Likely noise.
1460 remove_short_regions(regions, num_regions, STABLE_REGION, HALF_WIN);
1461 remove_short_regions(regions, num_regions, HIGH_VAR_REGION, HALF_WIN);
1462
1463 get_region_stats(stats, is_flash, regions, coeff, *num_regions);
1464
1465 // Adjust region boundaries. The thresholds are empirically obtained, but
1466 // overall the performance is not very sensitive to small changes to them.
1467 for (k = 0; k < *num_regions; k++) {
1468 if (regions[k].type == STABLE_REGION) continue;
1469 if (k > 0) {
1470 // Adjust previous boundary.
1471 // First find the average intra/coded error in the previous
1472 // neighborhood.
1473 double avg_intra_err = 0, avg_coded_err = 0, avg_coeff = 0;
1474 int starti = AOMMAX(regions[k - 1].last - WINDOW_SIZE + 1,
1475 regions[k - 1].start + 1);
1476 int lasti = regions[k - 1].last;
1477 int counti = 0;
1478 for (i = starti; i <= lasti; i++) {
1479 avg_intra_err += stats[i].intra_error;
1480 avg_coded_err += stats[i].coded_error;
1481 avg_coeff += coeff[i];
1482 counti++;
1483 }
1484 if (counti > 0) {
1485 avg_intra_err = AOMMAX(avg_intra_err / (double)counti, 0.001);
1486 avg_coded_err /= AOMMAX(avg_coded_err / (double)counti, 0.001);
1487 avg_coeff /= AOMMIN(avg_intra_err / (double)counti, 0.99999);
1488 int count_coded = 0, count_grad = 0;
1489 for (j = lasti + 1; j <= regions[k].last; j++) {
1490 int intra_close =
1491 fabs(stats[j].intra_error - avg_intra_err) / avg_intra_err < 0.1;
1492 int coded_close =
1493 fabs(stats[j].coded_error - avg_coded_err) / avg_coded_err < 0.15;
1494 int grad_small = fabs(grad[j]) / avg_coded_err < 0.05;
1495 int coded_small = stats[j].coded_error / avg_intra_err < 0.03;
1496 int coeff_close =
1497 (1 - coeff[j]) / (1 - avg_coeff) < 1.5 || coeff[j] > 0.995;
1498 if (!coeff_close || (!coded_close && !coded_small)) count_coded--;
1499 if (!grad_small && !coded_small) count_grad--;
1500
1501 if (intra_close && count_coded >= 0 && count_grad >= 0) {
1502 // this frame probably belongs to the previous stable region
1503 regions[k - 1].last = j;
1504 regions[k].start = j + 1;
1505 } else {
1506 break;
1507 }
1508 }
1509 }
1510 } // if k > 0
1511 if (k < *num_regions - 1) {
1512 // Adjust next boundary.
1513 // First find the average intra/coded error in the next neighborhood.
1514 double avg_intra_err = 0, avg_coded_err = 0, avg_coeff = 0;
1515 int starti = regions[k + 1].start;
1516 int lasti = AOMMIN(regions[k + 1].last - 1,
1517 regions[k + 1].start + WINDOW_SIZE - 1);
1518 int counti = 0;
1519 for (i = starti; i <= lasti; i++) {
1520 avg_intra_err += stats[i].intra_error;
1521 avg_coded_err += stats[i + 1].coded_error;
1522 avg_coeff += coeff[i];
1523 counti++;
1524 }
1525 if (counti > 0) {
1526 avg_intra_err = AOMMAX(avg_intra_err / (double)counti, 0.001);
1527 avg_coded_err /= AOMMAX(avg_coded_err / (double)counti, 0.001);
1528 avg_coeff /= AOMMIN(avg_intra_err / (double)counti, 0.99999);
1529 // At the boundary, coded error is large, but still the frame is stable
1530 int count_coded = 1, count_grad = 1;
1531 for (j = starti - 1; j >= regions[k].start; j--) {
1532 int intra_close =
1533 fabs(stats[j].intra_error - avg_intra_err) / avg_intra_err < 0.1;
1534 int coded_close =
1535 fabs(stats[j + 1].coded_error - avg_coded_err) / avg_coded_err <
1536 0.15;
1537 int grad_small = fabs(grad[j + 1]) / avg_coded_err < 0.05;
1538 int coded_small = stats[j + 1].coded_error / avg_intra_err < 0.03;
1539 int coeff_close =
1540 (1 - coeff[j + 1]) / (1 - avg_coeff) < 1.5 || coeff[j] > 0.995;
1541 if (!coeff_close || (!coded_close && !coded_small)) count_coded--;
1542 if (!grad_small && !coded_small) count_grad--;
1543 if (intra_close && count_coded >= 0 && count_grad >= 0) {
1544 // this frame probably belongs to the next stable region
1545 regions[k + 1].start = j;
1546 regions[k].last = j - 1;
1547 } else {
1548 break;
1549 }
1550 }
1551 }
1552 } // if k < *num_regions - 1
1553 } // end of loop over all regions
1554
1555 cleanup_regions(regions, num_regions);
1556 remove_short_regions(regions, num_regions, HIGH_VAR_REGION, HALF_WIN);
1557 get_region_stats(stats, is_flash, regions, coeff, *num_regions);
1558
1559 // If a stable regions has higher error than neighboring high var regions,
1560 // or if the stable region has a lower average correlation,
1561 // then it should be merged with them
1562 k = 0;
1563 while (k < *num_regions && (*num_regions) > 1) {
1564 if (regions[k].type == STABLE_REGION &&
1565 ((k > 0 && // previous regions
1566 (regions[k].avg_coded_err > regions[k - 1].avg_coded_err ||
bohanli00f0bfe2020-10-02 15:17:34 -07001567 regions[k].avg_cor_coeff < regions[k - 1].avg_cor_coeff)) &&
bohanlifa84c6d2020-09-25 14:27:49 -07001568 (k < *num_regions - 1 && // next region
1569 (regions[k].avg_coded_err > regions[k + 1].avg_coded_err ||
1570 regions[k].avg_cor_coeff < regions[k + 1].avg_cor_coeff)))) {
1571 // merge current region with the previous and next regions
1572 remove_region(2, regions, num_regions, &k);
1573 analyze_region(stats, k - 1, regions, coeff);
bohanli00f0bfe2020-10-02 15:17:34 -07001574 } else if (regions[k].type == HIGH_VAR_REGION &&
1575 ((k > 0 && // previous regions
1576 (regions[k].avg_coded_err < regions[k - 1].avg_coded_err ||
1577 regions[k].avg_cor_coeff > regions[k - 1].avg_cor_coeff)) &&
1578 (k < *num_regions - 1 && // next region
1579 (regions[k].avg_coded_err < regions[k + 1].avg_coded_err ||
1580 regions[k].avg_cor_coeff > regions[k + 1].avg_cor_coeff)))) {
1581 // merge current region with the previous and next regions
1582 remove_region(2, regions, num_regions, &k);
1583 analyze_region(stats, k - 1, regions, coeff);
bohanlifa84c6d2020-09-25 14:27:49 -07001584 } else {
1585 k++;
1586 }
1587 }
1588
1589 remove_short_regions(regions, num_regions, STABLE_REGION, WINDOW_SIZE);
1590 remove_short_regions(regions, num_regions, HIGH_VAR_REGION, HALF_WIN);
1591}
1592
bohanli0f8edb02020-09-25 14:28:55 -07001593// Identify blending regions.
1594static void find_blending_regions(const FIRSTPASS_STATS *stats,
1595 const int *is_flash, REGIONS *regions,
1596 int *num_regions, double *coeff) {
1597 int i, k = 0;
1598 // Blending regions will have large content change, therefore will have a
1599 // large consistent change in intra error.
1600 int count_stable = 0;
1601 while (k < *num_regions) {
1602 if (regions[k].type == STABLE_REGION) {
1603 k++;
1604 count_stable++;
1605 continue;
1606 }
1607 int dir = 0;
1608 int start = 0, last;
1609 for (i = regions[k].start; i <= regions[k].last; i++) {
1610 // First mark the regions that has consistent large change of intra error.
1611 if (is_flash[i] || (i > 0 && is_flash[i - 1])) continue;
1612 double grad = stats[i].intra_error - stats[i - 1].intra_error;
1613 int large_change = fabs(grad) / AOMMAX(stats[i].intra_error, 0.01) > 0.05;
1614 int this_dir = 0;
1615 if (large_change) {
1616 this_dir = (grad > 0) ? 1 : -1;
1617 }
1618 // the current trend continues
1619 if (dir == this_dir) continue;
1620 if (dir != 0) {
1621 // Mark the end of a new large change group and add it
1622 last = i - 1;
1623 insert_region(start, last, BLENDING_REGION, regions, num_regions, &k);
1624 }
1625 dir = this_dir;
1626 start = i;
1627 }
1628 if (dir != 0) {
1629 last = regions[k].last;
1630 insert_region(start, last, BLENDING_REGION, regions, num_regions, &k);
1631 }
1632 k++;
1633 }
1634
1635 // If the blending region has very low correlation, mark it as high variance
1636 // since we probably cannot benefit from it anyways.
1637 get_region_stats(stats, is_flash, regions, coeff, *num_regions);
1638 for (k = 0; k < *num_regions; k++) {
1639 if (regions[k].type != BLENDING_REGION) continue;
1640 if (regions[k].last == regions[k].start || regions[k].avg_cor_coeff < 0.6 ||
1641 count_stable == 0)
1642 regions[k].type = HIGH_VAR_REGION;
1643 }
1644 get_region_stats(stats, is_flash, regions, coeff, *num_regions);
1645
1646 // It is possible for blending to result in a "dip" in intra error (first
1647 // decrease then increase). Therefore we need to find the dip and combine the
1648 // two regions.
1649 k = 1;
1650 while (k < *num_regions) {
1651 if (k < *num_regions - 1 && regions[k].type == HIGH_VAR_REGION) {
1652 // Check if this short high variance regions is actually in the middle of
1653 // a blending region.
1654 if (regions[k - 1].type == BLENDING_REGION &&
1655 regions[k + 1].type == BLENDING_REGION &&
1656 regions[k].last - regions[k].start < 3) {
1657 int prev_dir = (stats[regions[k - 1].last].intra_error -
1658 stats[regions[k - 1].last - 1].intra_error) > 0
1659 ? 1
1660 : -1;
1661 int next_dir = (stats[regions[k + 1].last].intra_error -
1662 stats[regions[k + 1].last - 1].intra_error) > 0
1663 ? 1
1664 : -1;
1665 if (prev_dir < 0 && next_dir > 0) {
1666 // This is possibly a mid region of blending. Check the ratios
1667 double ratio_thres = AOMMIN(regions[k - 1].avg_sr_fr_ratio,
1668 regions[k + 1].avg_sr_fr_ratio) *
1669 0.95;
1670 if (regions[k].avg_sr_fr_ratio > ratio_thres) {
1671 regions[k].type = BLENDING_REGION;
1672 remove_region(2, regions, num_regions, &k);
1673 analyze_region(stats, k - 1, regions, coeff);
1674 continue;
1675 }
1676 }
1677 }
1678 }
1679 // Check if we have a pair of consecutive blending regions.
1680 if (regions[k - 1].type == BLENDING_REGION &&
1681 regions[k].type == BLENDING_REGION) {
1682 int prev_dir = (stats[regions[k - 1].last].intra_error -
1683 stats[regions[k - 1].last - 1].intra_error) > 0
1684 ? 1
1685 : -1;
1686 int next_dir = (stats[regions[k].last].intra_error -
1687 stats[regions[k].last - 1].intra_error) > 0
1688 ? 1
1689 : -1;
1690
1691 // if both are too short, no need to check
1692 int total_length = regions[k].last - regions[k - 1].start + 1;
1693 if (total_length < 4) {
1694 regions[k - 1].type = HIGH_VAR_REGION;
1695 k++;
1696 continue;
1697 }
1698
1699 int to_merge = 0;
1700 if (prev_dir < 0 && next_dir > 0) {
1701 // In this case we check the last frame in the previous region.
1702 double prev_length =
1703 (double)(regions[k - 1].last - regions[k - 1].start + 1);
1704 double last_ratio, ratio_thres;
1705 if (prev_length < 2.01) {
1706 // if the previous region is very short
1707 double max_coded_error =
1708 AOMMAX(stats[regions[k - 1].last].coded_error,
1709 stats[regions[k - 1].last - 1].coded_error);
1710 last_ratio = stats[regions[k - 1].last].sr_coded_error /
1711 AOMMAX(max_coded_error, 0.001);
1712 ratio_thres = regions[k].avg_sr_fr_ratio * 0.95;
1713 } else {
1714 double max_coded_error =
1715 AOMMAX(stats[regions[k - 1].last].coded_error,
1716 stats[regions[k - 1].last - 1].coded_error);
1717 last_ratio = stats[regions[k - 1].last].sr_coded_error /
1718 AOMMAX(max_coded_error, 0.001);
1719 double prev_ratio =
1720 (regions[k - 1].avg_sr_fr_ratio * prev_length - last_ratio) /
1721 (prev_length - 1.0);
1722 ratio_thres = AOMMIN(prev_ratio, regions[k].avg_sr_fr_ratio) * 0.95;
1723 }
1724 if (last_ratio > ratio_thres) {
1725 to_merge = 1;
1726 }
1727 }
1728
1729 if (to_merge) {
1730 remove_region(0, regions, num_regions, &k);
1731 analyze_region(stats, k - 1, regions, coeff);
1732 continue;
1733 } else {
1734 // These are possibly two separate blending regions. Mark the boundary
1735 // frame as HIGH_VAR_REGION to separate the two.
1736 int prev_k = k - 1;
1737 insert_region(regions[prev_k].last, regions[prev_k].last,
1738 HIGH_VAR_REGION, regions, num_regions, &prev_k);
1739 analyze_region(stats, prev_k, regions, coeff);
1740 k = prev_k + 1;
1741 analyze_region(stats, k, regions, coeff);
1742 }
1743 }
1744 k++;
1745 }
1746 cleanup_regions(regions, num_regions);
1747}
1748
1749// Clean up decision for blendings. Remove blending regions that are too short.
1750// Also if a very short high var region is between a blending and a stable
1751// region, just merge it with one of them.
1752static void cleanup_blendings(REGIONS *regions, int *num_regions) {
1753 int k = 0;
1754 while (k<*num_regions && * num_regions> 1) {
1755 int is_short_blending = regions[k].type == BLENDING_REGION &&
1756 regions[k].last - regions[k].start + 1 < 5;
1757 int is_short_hv = regions[k].type == HIGH_VAR_REGION &&
1758 regions[k].last - regions[k].start + 1 < 5;
1759 int has_stable_neighbor =
1760 ((k > 0 && regions[k - 1].type == STABLE_REGION) ||
1761 (k < *num_regions - 1 && regions[k + 1].type == STABLE_REGION));
1762 int has_blend_neighbor =
1763 ((k > 0 && regions[k - 1].type == BLENDING_REGION) ||
1764 (k < *num_regions - 1 && regions[k + 1].type == BLENDING_REGION));
1765 int total_neighbors = (k > 0) + (k < *num_regions - 1);
1766
1767 if (is_short_blending ||
1768 (is_short_hv &&
1769 has_stable_neighbor + has_blend_neighbor >= total_neighbors)) {
1770 // Remove this region.Try to determine whether to combine it with the
1771 // previous or next region.
1772 int merge;
1773 double prev_diff =
1774 (k > 0)
1775 ? fabs(regions[k].avg_cor_coeff - regions[k - 1].avg_cor_coeff)
1776 : 1;
1777 double next_diff =
1778 (k < *num_regions - 1)
1779 ? fabs(regions[k].avg_cor_coeff - regions[k + 1].avg_cor_coeff)
1780 : 1;
1781 // merge == 0 means to merge with previous, 1 means to merge with next
1782 merge = prev_diff > next_diff;
1783 remove_region(merge, regions, num_regions, &k);
1784 } else {
1785 k++;
1786 }
1787 }
1788 cleanup_regions(regions, num_regions);
1789}
1790
bohanli8c6b8f12020-09-28 16:20:46 -07001791// Identify stable and unstable regions from first pass stats.
1792// Stats_start points to the first frame to analyze.
1793// Offset is the offset from the current frame to the frame stats_start is
1794// pointing to.
1795static void identify_regions(const FIRSTPASS_STATS *const stats_start,
1796 int total_frames, int offset, REGIONS *regions,
1797 int *total_regions, double *cor_coeff) {
1798 int k;
1799 if (total_frames <= 1) return;
1800
1801 double *coeff = cor_coeff + offset;
1802
1803 // store the initial decisions
1804 REGIONS temp_regions[MAX_FIRSTPASS_ANALYSIS_FRAMES];
1805 av1_zero_array(temp_regions, MAX_FIRSTPASS_ANALYSIS_FRAMES);
1806 int is_flash[MAX_FIRSTPASS_ANALYSIS_FRAMES] = { 0 };
1807 // buffers for filtered stats
1808 double filt_intra_err[MAX_FIRSTPASS_ANALYSIS_FRAMES] = { 0 };
1809 double filt_coded_err[MAX_FIRSTPASS_ANALYSIS_FRAMES] = { 0 };
1810 double grad_coded[MAX_FIRSTPASS_ANALYSIS_FRAMES] = { 0 };
1811
1812 int cur_region = 0, this_start = 0, this_last;
1813
1814 // find possible flash frames
1815 mark_flashes(stats_start, 0, total_frames - 1, is_flash);
1816
1817 // first get the obvious scenecuts
1818 int next_scenecut = -1;
1819
1820 do {
1821 next_scenecut =
1822 find_next_scenecut(stats_start, this_start, total_frames - 1, is_flash);
1823 this_last = (next_scenecut >= 0) ? (next_scenecut - 1) : total_frames - 1;
1824 // low-pass filter the needed stats
1825 smooth_filter_stats(stats_start, is_flash, this_start, this_last,
1826 filt_intra_err, filt_coded_err);
1827 get_gradient(filt_coded_err, this_start, this_last, grad_coded);
1828
1829 // find tentative stable regions and unstable regions
bohanli00f0bfe2020-10-02 15:17:34 -07001830 int num_regions = find_stable_regions(stats_start, grad_coded, is_flash,
1831 this_start, this_last, temp_regions);
bohanli8c6b8f12020-09-28 16:20:46 -07001832 adjust_unstable_region_bounds(stats_start, is_flash, grad_coded,
1833 temp_regions, coeff, &num_regions);
1834
1835 get_region_stats(stats_start, is_flash, temp_regions, coeff, num_regions);
1836
1837 // Try to identify blending regions in the unstable regions
1838 find_blending_regions(stats_start, is_flash, temp_regions, &num_regions,
1839 coeff);
1840 cleanup_blendings(temp_regions, &num_regions);
1841
1842 // The flash points should all be considered high variance points
1843 k = 0;
1844 while (k < num_regions) {
1845 if (temp_regions[k].type != STABLE_REGION) {
1846 k++;
1847 continue;
1848 }
1849 int start = temp_regions[k].start;
1850 int last = temp_regions[k].last;
1851 for (int i = start; i <= last; i++) {
1852 if (is_flash[i]) {
1853 insert_region(i, i, HIGH_VAR_REGION, temp_regions, &num_regions, &k);
1854 }
1855 }
1856 k++;
1857 }
1858 cleanup_regions(temp_regions, &num_regions);
1859
1860 // copy the regions in the scenecut group
1861 for (k = 0; k < num_regions; k++) {
1862 regions[k + cur_region] = temp_regions[k];
1863 }
1864 cur_region += num_regions;
1865
1866 // add the scenecut region
1867 if (next_scenecut > -1) {
1868 // add the scenecut region, and find the next scenecut
1869 regions[cur_region].type = SCENECUT_REGION;
1870 regions[cur_region].start = next_scenecut;
1871 regions[cur_region].last = next_scenecut;
1872 cur_region++;
1873 this_start = next_scenecut + 1;
1874 }
1875 } while (next_scenecut >= 0);
1876
1877 *total_regions = cur_region;
1878 get_region_stats(stats_start, is_flash, regions, coeff, *total_regions);
1879
bohanli00f0bfe2020-10-02 15:17:34 -07001880 for (k = 0; k < *total_regions; k++) {
1881 // If scenecuts are very minor, mark them as high variance.
1882 if (regions[k].type != SCENECUT_REGION || regions[k].avg_cor_coeff < 0.8) {
bohanli8c6b8f12020-09-28 16:20:46 -07001883 continue;
1884 }
1885 regions[k].type = HIGH_VAR_REGION;
1886 }
1887 cleanup_regions(regions, total_regions);
1888 get_region_stats(stats_start, is_flash, regions, coeff, *total_regions);
1889
1890 for (k = 0; k < *total_regions; k++) {
1891 regions[k].start += offset;
1892 regions[k].last += offset;
1893 }
1894}
1895
bohanli00f0bfe2020-10-02 15:17:34 -07001896static int find_regions_index(const REGIONS *regions, int num_regions,
1897 int frame_idx) {
1898 for (int k = 0; k < num_regions; k++) {
1899 if (regions[k].start <= frame_idx && regions[k].last >= frame_idx) {
1900 return k;
1901 }
1902 }
1903 return -1;
1904}
bohanliea3b2b12020-09-25 09:52:03 -07001905
bohanlid165b192020-06-10 21:46:29 -07001906/*!\brief Determine the length of future GF groups.
1907 *
1908 * \ingroup gf_group_algo
1909 * This function decides the gf group length of future frames in batch
1910 *
1911 * \param[in] cpi Top-level encoder structure
1912 * \param[in] max_gop_length Maximum length of the GF group
1913 * \param[in] max_intervals Maximum number of intervals to decide
1914 *
1915 * \return Nothing is returned. Instead, cpi->rc.gf_intervals is
1916 * changed to store the decided GF group lengths.
1917 */
bohanlie1515342020-02-25 11:58:11 -08001918static void calculate_gf_length(AV1_COMP *cpi, int max_gop_length,
1919 int max_intervals) {
bohanlia964eea2020-02-13 11:48:00 -08001920 RATE_CONTROL *const rc = &cpi->rc;
bohanlia964eea2020-02-13 11:48:00 -08001921 TWO_PASS *const twopass = &cpi->twopass;
1922 FIRSTPASS_STATS next_frame;
1923 const FIRSTPASS_STATS *const start_pos = twopass->stats_in;
1924 FRAME_INFO *frame_info = &cpi->frame_info;
1925 int i;
1926
bohanlia964eea2020-02-13 11:48:00 -08001927 int flash_detected;
bohanlia964eea2020-02-13 11:48:00 -08001928
1929 aom_clear_system_state();
1930 av1_zero(next_frame);
1931
1932 if (has_no_stats_stage(cpi)) {
bohanlie1515342020-02-25 11:58:11 -08001933 for (i = 0; i < MAX_NUM_GF_INTERVALS; i++) {
Mufaddal Chakera65caa3d2020-03-17 14:16:42 +05301934 rc->gf_intervals[i] = AOMMIN(rc->max_gf_interval, max_gop_length);
bohanlia964eea2020-02-13 11:48:00 -08001935 }
1936 rc->cur_gf_index = 0;
bohanlie1515342020-02-25 11:58:11 -08001937 rc->intervals_till_gf_calculate_due = MAX_NUM_GF_INTERVALS;
bohanlia964eea2020-02-13 11:48:00 -08001938 return;
1939 }
1940
1941 // TODO(urvang): Try logic to vary min and max interval based on q.
1942 const int active_min_gf_interval = rc->min_gf_interval;
1943 const int active_max_gf_interval =
Jingning Han491198d2020-02-13 21:53:41 -08001944 AOMMIN(rc->max_gf_interval, max_gop_length);
Urvang Joshi262c5352020-07-07 23:41:59 -07001945 const int min_shrink_int = AOMMAX(MIN_SHRINK_LEN, active_min_gf_interval);
bohanlia964eea2020-02-13 11:48:00 -08001946
Jingning Han9af02fa2020-08-20 23:01:01 -07001947 i = (rc->frames_since_key == 0);
bohanlie1515342020-02-25 11:58:11 -08001948 max_intervals = cpi->lap_enabled ? 1 : max_intervals;
bohanlia964eea2020-02-13 11:48:00 -08001949 int count_cuts = 1;
bohanli00f0bfe2020-10-02 15:17:34 -07001950 // If cpi->gf_state.arf_gf_boost_lst is 0, we are starting with a KF or GF.
1951 int cur_start = -1 + !cpi->gf_state.arf_gf_boost_lst, cur_last;
Jingning Han9af02fa2020-08-20 23:01:01 -07001952 int cut_pos[MAX_NUM_GF_INTERVALS + 1] = { -1 };
bohanlia964eea2020-02-13 11:48:00 -08001953 int cut_here;
bohanli04652962020-03-18 17:02:50 -07001954 GF_GROUP_STATS gf_stats;
1955 init_gf_stats(&gf_stats);
bohanlia964eea2020-02-13 11:48:00 -08001956 while (count_cuts < max_intervals + 1) {
bohanlia964eea2020-02-13 11:48:00 -08001957 // reaches next key frame, break here
bohanli00f0bfe2020-10-02 15:17:34 -07001958 if (i >= rc->frames_to_key + rc->next_is_fwd_key) {
1959 cut_here = 2;
1960 } else if (i - cur_start >= rc->static_scene_max_gf_interval) {
1961 // reached maximum len, but nothing special yet (almost static)
1962 // let's look at the next interval
bohanlia964eea2020-02-13 11:48:00 -08001963 cut_here = 1;
bohanli00f0bfe2020-10-02 15:17:34 -07001964 } else if (EOF == input_stats(twopass, &next_frame)) {
bohanlia964eea2020-02-13 11:48:00 -08001965 // reaches last frame, break
bohanli00f0bfe2020-10-02 15:17:34 -07001966 cut_here = 2;
1967 } else {
bohanlia964eea2020-02-13 11:48:00 -08001968 // Test for the case where there is a brief flash but the prediction
1969 // quality back to an earlier frame is then restored.
1970 flash_detected = detect_flash(twopass, 0);
bohanli04652962020-03-18 17:02:50 -07001971 // TODO(bohanli): remove redundant accumulations here, or unify
1972 // this and the ones in define_gf_group
Cheng Chen1439bfc2020-04-03 13:52:28 -07001973 accumulate_next_frame_stats(&next_frame, frame_info, flash_detected,
1974 rc->frames_since_key, i, &gf_stats);
bohanlia964eea2020-02-13 11:48:00 -08001975
bohanli04652962020-03-18 17:02:50 -07001976 cut_here = detect_gf_cut(cpi, i, cur_start, flash_detected,
1977 active_max_gf_interval, active_min_gf_interval,
1978 &gf_stats);
bohanlia964eea2020-02-13 11:48:00 -08001979 }
1980 if (cut_here) {
1981 cur_last = i - 1; // the current last frame in the gf group
bohanli00f0bfe2020-10-02 15:17:34 -07001982 int ori_last = cur_last;
1983 // The region frame idx does not start from the same frame as cur_start
1984 // and cur_last. Need to offset them.
1985 int offset = rc->frames_since_key - rc->regions_offset;
1986 REGIONS *regions = rc->regions;
1987 int num_regions = rc->num_regions;
Vishesh7e9873d2020-06-08 15:41:33 +05301988 if (cpi->oxcf.kf_cfg.fwd_kf_enabled && rc->next_is_fwd_key) {
bohanli18ce7a02020-04-29 15:17:22 -07001989 const int frames_left = rc->frames_to_key - i;
1990 const int min_int = AOMMIN(MIN_FWD_KF_INTERVAL, active_min_gf_interval);
bohanli00f0bfe2020-10-02 15:17:34 -07001991 if (frames_left < min_int && frames_left > 0) {
bohanli18ce7a02020-04-29 15:17:22 -07001992 cur_last = rc->frames_to_key - min_int - 1;
1993 }
1994 }
Jingning Han9af02fa2020-08-20 23:01:01 -07001995
bohanli00f0bfe2020-10-02 15:17:34 -07001996 int scenecut_idx = -1;
bohanlid3e939d2020-02-13 11:53:08 -08001997 // only try shrinking if interval smaller than active_max_gf_interval
bohanli00f0bfe2020-10-02 15:17:34 -07001998 if (cur_last - cur_start <= active_max_gf_interval &&
1999 cur_last > cur_start) {
2000 // find the region indices of where the first and last frame belong.
2001 int k_start =
2002 find_regions_index(regions, num_regions, cur_start + offset);
2003 int k_last =
2004 find_regions_index(regions, num_regions, cur_last + offset);
2005 if (cur_start + offset == 0) k_start = 0;
bohanlia964eea2020-02-13 11:48:00 -08002006
bohanli00f0bfe2020-10-02 15:17:34 -07002007 // See if we have a scenecut in between
2008 for (int r = k_start + 1; r <= k_last; r++) {
bohanli2f331442020-10-15 15:35:37 -07002009 if (regions[r].type == SCENECUT_REGION &&
2010 regions[r].last - offset - cur_start > active_min_gf_interval) {
bohanli00f0bfe2020-10-02 15:17:34 -07002011 scenecut_idx = r;
bohanlic92d48a2020-03-02 10:42:05 -08002012 break;
2013 }
bohanlid3e939d2020-02-13 11:53:08 -08002014 }
bohanlid3e939d2020-02-13 11:53:08 -08002015
bohanli00f0bfe2020-10-02 15:17:34 -07002016 // if the found scenecut is very close to the end, ignore it.
2017 if (regions[num_regions - 1].last - regions[scenecut_idx].last < 4) {
2018 scenecut_idx = -1;
2019 }
2020
2021 if (scenecut_idx != -1) {
2022 // If we have a scenecut, then stop at it.
2023 // TODO(bohanli): add logic here to stop before the scenecut and for
2024 // the next gop start from the scenecut with GF
2025 int is_minor_sc = (regions[scenecut_idx].avg_cor_coeff > 0.6);
2026 cur_last = regions[scenecut_idx].last - offset - !is_minor_sc;
2027 } else {
2028 int is_last_analysed = (k_last == num_regions - 1) &&
2029 (cur_last + offset == regions[k_last].last);
2030 int not_enough_regions =
2031 k_last - k_start <=
2032 1 + (regions[k_start].type == SCENECUT_REGION);
2033 // if we are very close to the end, then do not shrink since it may
2034 // introduce intervals that are too short
2035 if (!(is_last_analysed && not_enough_regions)) {
2036 int found = 0;
2037 // first try to end at a stable area
2038 for (int j = cur_last; j >= cur_start + min_shrink_int; j--) {
2039 if (regions[find_regions_index(regions, num_regions, j + offset)]
2040 .type == STABLE_REGION) {
2041 cur_last = j;
2042 found = 1;
2043 break;
2044 }
2045 }
2046 if (!found) {
2047 // Could not find stable point,
2048 // try to find an OK point (high correlation, not blending)
2049 for (int j = cur_last; j >= cur_start + min_shrink_int; j--) {
2050 REGIONS *cur_region =
2051 regions +
2052 find_regions_index(regions, num_regions, j + offset);
2053 double avg_coeff = cur_region->avg_cor_coeff;
2054 if (rc->cor_coeff[j + offset] > avg_coeff &&
2055 cur_region->type != BLENDING_REGION) {
2056 cur_last = j;
2057 found = 1;
2058 break;
2059 }
2060 }
2061 }
2062 if (!found) {
2063 // Could not find a better point,
2064 // try not to cut in blending areas
2065 for (int j = cur_last; j >= cur_start + min_shrink_int; j--) {
2066 REGIONS *cur_region =
2067 regions +
2068 find_regions_index(regions, num_regions, j + offset);
2069 if (cur_region->type != BLENDING_REGION) {
2070 cur_last = j;
2071 break;
2072 }
2073 }
2074 }
2075 // if cannot find anything, just cut at the original place.
bohanlid3e939d2020-02-13 11:53:08 -08002076 }
2077 }
2078 }
bohanlia964eea2020-02-13 11:48:00 -08002079 cut_pos[count_cuts] = cur_last;
2080 count_cuts++;
2081
2082 // reset pointers to the shrinked location
2083 twopass->stats_in = start_pos + cur_last;
2084 cur_start = cur_last;
Jingning Hand853caa2021-03-19 14:34:05 -07002085 int cur_region_idx =
2086 find_regions_index(regions, num_regions, cur_start + 1 + offset);
2087 if (cur_region_idx >= 0)
2088 if (regions[cur_region_idx].type == SCENECUT_REGION) cur_start++;
2089
bohanlia964eea2020-02-13 11:48:00 -08002090 i = cur_last;
2091
bohanli00f0bfe2020-10-02 15:17:34 -07002092 if (cut_here > 1 && cur_last == ori_last) break;
2093
bohanlia964eea2020-02-13 11:48:00 -08002094 // reset accumulators
bohanli04652962020-03-18 17:02:50 -07002095 init_gf_stats(&gf_stats);
bohanlia964eea2020-02-13 11:48:00 -08002096 }
Jingning Han9af02fa2020-08-20 23:01:01 -07002097 ++i;
bohanlia964eea2020-02-13 11:48:00 -08002098 }
2099
2100 // save intervals
2101 rc->intervals_till_gf_calculate_due = count_cuts - 1;
2102 for (int n = 1; n < count_cuts; n++) {
Jingning Han9af02fa2020-08-20 23:01:01 -07002103 rc->gf_intervals[n - 1] = cut_pos[n] - cut_pos[n - 1];
bohanlia964eea2020-02-13 11:48:00 -08002104 }
2105 rc->cur_gf_index = 0;
2106 twopass->stats_in = start_pos;
2107}
David Turner0fa8c492019-02-06 16:38:13 +00002108
Mufaddal Chakera13ed3f12020-03-10 15:46:17 +05302109static void correct_frames_to_key(AV1_COMP *cpi) {
2110 int lookahead_size =
Jingning Hanba67fe82020-09-29 10:10:12 -07002111 (int)av1_lookahead_depth(cpi->lookahead, cpi->compressor_stage);
Mufaddal Chakera13ed3f12020-03-10 15:46:17 +05302112 if (lookahead_size <
2113 av1_lookahead_pop_sz(cpi->lookahead, cpi->compressor_stage)) {
Urvang Joshi93fedf52021-02-01 18:33:18 -08002114 assert(IMPLIES(cpi->oxcf.pass != 0 && cpi->frames_left > 0,
2115 lookahead_size == cpi->frames_left));
Mufaddal Chakera13ed3f12020-03-10 15:46:17 +05302116 cpi->rc.frames_to_key = AOMMIN(cpi->rc.frames_to_key, lookahead_size);
Mufaddal Chakera7a090f22020-07-09 15:09:51 +05302117 } else if (cpi->frames_left > 0) {
2118 // Correct frames to key based on limit
2119 cpi->rc.frames_to_key = AOMMIN(cpi->rc.frames_to_key, cpi->frames_left);
Mufaddal Chakera13ed3f12020-03-10 15:46:17 +05302120 }
2121}
2122
Paul Wilkinse8af1522020-07-09 15:05:01 +01002123/*!\brief Define a GF group in one pass mode when no look ahead stats are
2124 * available.
2125 *
2126 * \ingroup gf_group_algo
2127 * This function defines the structure of a GF group, along with various
2128 * parameters regarding bit-allocation and quality setup in the special
2129 * case of one pass encoding where no lookahead stats are avialable.
2130 *
2131 * \param[in] cpi Top-level encoder structure
Paul Wilkinse8af1522020-07-09 15:05:01 +01002132 *
2133 * \return Nothing is returned. Instead, cpi->gf_group is changed.
2134 */
Jingning Han9af02fa2020-08-20 23:01:01 -07002135static void define_gf_group_pass0(AV1_COMP *cpi) {
Marco Paniconifda31aa2019-07-16 22:24:38 +00002136 RATE_CONTROL *const rc = &cpi->rc;
2137 GF_GROUP *const gf_group = &cpi->gf_group;
Vishesh7e9873d2020-06-08 15:41:33 +05302138 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Vishesh5b50e6d2020-06-10 19:20:07 +05302139 const GFConfig *const gf_cfg = &oxcf->gf_cfg;
Marco Paniconifda31aa2019-07-16 22:24:38 +00002140 int target;
2141
Vishesh734eff92020-06-20 21:46:36 +05302142 if (oxcf->q_cfg.aq_mode == CYCLIC_REFRESH_AQ) {
Marco Paniconifda31aa2019-07-16 22:24:38 +00002143 av1_cyclic_refresh_set_golden_update(cpi);
bohanlia964eea2020-02-13 11:48:00 -08002144 } else {
2145 rc->baseline_gf_interval = rc->gf_intervals[rc->cur_gf_index];
2146 rc->intervals_till_gf_calculate_due--;
2147 rc->cur_gf_index++;
2148 }
Marco Paniconifda31aa2019-07-16 22:24:38 +00002149
Mufaddal Chakera13ed3f12020-03-10 15:46:17 +05302150 // correct frames_to_key when lookahead queue is flushing
2151 correct_frames_to_key(cpi);
2152
Marco Paniconifda31aa2019-07-16 22:24:38 +00002153 if (rc->baseline_gf_interval > rc->frames_to_key)
2154 rc->baseline_gf_interval = rc->frames_to_key;
2155
2156 rc->gfu_boost = DEFAULT_GF_BOOST;
2157 rc->constrained_gf_group =
2158 (rc->baseline_gf_interval >= rc->frames_to_key) ? 1 : 0;
Marco Paniconifda31aa2019-07-16 22:24:38 +00002159
Vishesh5b50e6d2020-06-10 19:20:07 +05302160 gf_group->max_layer_depth_allowed = oxcf->gf_cfg.gf_max_pyr_height;
Jingning Han466fde92019-09-11 14:26:26 -07002161
2162 // Rare case when the look-ahead is less than the target GOP length, can't
2163 // generate ARF frame.
Vishesh5b50e6d2020-06-10 19:20:07 +05302164 if (rc->baseline_gf_interval > gf_cfg->lag_in_frames ||
2165 !is_altref_enabled(gf_cfg->lag_in_frames, gf_cfg->enable_auto_arf) ||
2166 rc->baseline_gf_interval < rc->min_gf_interval)
Jingning Han466fde92019-09-11 14:26:26 -07002167 gf_group->max_layer_depth_allowed = 0;
2168
Marco Paniconifda31aa2019-07-16 22:24:38 +00002169 // Set up the structure of this Group-Of-Pictures (same as GF_GROUP)
Jingning Han9af02fa2020-08-20 23:01:01 -07002170 av1_gop_setup_structure(cpi);
Marco Paniconifda31aa2019-07-16 22:24:38 +00002171
Marco Paniconifda31aa2019-07-16 22:24:38 +00002172 // Allocate bits to each of the frames in the GF group.
2173 // TODO(sarahparker) Extend this to work with pyramid structure.
2174 for (int cur_index = 0; cur_index < gf_group->size; ++cur_index) {
2175 const FRAME_UPDATE_TYPE cur_update_type = gf_group->update_type[cur_index];
Vishesh39e74092020-06-16 17:13:48 +05302176 if (oxcf->rc_cfg.mode == AOM_CBR) {
Elliott Karpilovskybd163432020-08-19 14:52:29 -07002177 if (cur_update_type == KF_UPDATE) {
Marco Paniconicea99e22019-07-16 18:36:31 -07002178 target = av1_calc_iframe_target_size_one_pass_cbr(cpi);
Marco Paniconifda31aa2019-07-16 22:24:38 +00002179 } else {
Marco Paniconicea99e22019-07-16 18:36:31 -07002180 target = av1_calc_pframe_target_size_one_pass_cbr(cpi, cur_update_type);
Marco Paniconifda31aa2019-07-16 22:24:38 +00002181 }
2182 } else {
Elliott Karpilovskybd163432020-08-19 14:52:29 -07002183 if (cur_update_type == KF_UPDATE) {
Marco Paniconicea99e22019-07-16 18:36:31 -07002184 target = av1_calc_iframe_target_size_one_pass_vbr(cpi);
Marco Paniconifda31aa2019-07-16 22:24:38 +00002185 } else {
Marco Paniconicea99e22019-07-16 18:36:31 -07002186 target = av1_calc_pframe_target_size_one_pass_vbr(cpi, cur_update_type);
Marco Paniconifda31aa2019-07-16 22:24:38 +00002187 }
2188 }
2189 gf_group->bit_allocation[cur_index] = target;
2190 }
2191}
2192
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302193static INLINE void set_baseline_gf_interval(AV1_COMP *cpi, int arf_position,
2194 int active_max_gf_interval,
2195 int use_alt_ref,
2196 int is_final_pass) {
2197 RATE_CONTROL *const rc = &cpi->rc;
2198 TWO_PASS *const twopass = &cpi->twopass;
2199 // Set the interval until the next gf.
2200 // If forward keyframes are enabled, ensure the final gf group obeys the
2201 // MIN_FWD_KF_INTERVAL.
bohanli6112e022020-04-29 15:16:27 -07002202 const int is_last_kf =
2203 (twopass->stats_in - arf_position + rc->frames_to_key) >=
2204 twopass->stats_buf_ctx->stats_in_end;
2205
Vishesh7e9873d2020-06-08 15:41:33 +05302206 if (cpi->oxcf.kf_cfg.fwd_kf_enabled && use_alt_ref && !is_last_kf &&
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302207 cpi->rc.next_is_fwd_key) {
bohanli2541b8a2020-10-14 17:33:13 -07002208 if (arf_position == rc->frames_to_key + 1) {
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302209 rc->baseline_gf_interval = arf_position;
2210 // if the last gf group will be smaller than MIN_FWD_KF_INTERVAL
bohanli2541b8a2020-10-14 17:33:13 -07002211 } else if (rc->frames_to_key + 1 - arf_position <
2212 AOMMAX(MIN_FWD_KF_INTERVAL, rc->min_gf_interval)) {
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302213 // if possible, merge the last two gf groups
bohanli2541b8a2020-10-14 17:33:13 -07002214 if (rc->frames_to_key + 1 <= active_max_gf_interval) {
2215 rc->baseline_gf_interval = rc->frames_to_key + 1;
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302216 if (is_final_pass) rc->intervals_till_gf_calculate_due = 0;
2217 // if merging the last two gf groups creates a group that is too long,
2218 // split them and force the last gf group to be the MIN_FWD_KF_INTERVAL
2219 } else {
bohanli2541b8a2020-10-14 17:33:13 -07002220 rc->baseline_gf_interval = rc->frames_to_key + 1 - MIN_FWD_KF_INTERVAL;
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302221 if (is_final_pass) rc->intervals_till_gf_calculate_due = 0;
2222 }
2223 } else {
Jingning Han184fdfe2020-09-27 15:04:35 -07002224 rc->baseline_gf_interval = arf_position;
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302225 }
2226 } else {
Jingning Han9af02fa2020-08-20 23:01:01 -07002227 rc->baseline_gf_interval = arf_position;
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302228 }
2229}
2230
bohanli04652962020-03-18 17:02:50 -07002231// initialize GF_GROUP_STATS
2232static void init_gf_stats(GF_GROUP_STATS *gf_stats) {
2233 gf_stats->gf_group_err = 0.0;
2234 gf_stats->gf_group_raw_error = 0.0;
2235 gf_stats->gf_group_skip_pct = 0.0;
2236 gf_stats->gf_group_inactive_zone_rows = 0.0;
2237
2238 gf_stats->mv_ratio_accumulator = 0.0;
2239 gf_stats->decay_accumulator = 1.0;
2240 gf_stats->zero_motion_accumulator = 1.0;
2241 gf_stats->loop_decay_rate = 1.0;
2242 gf_stats->last_loop_decay_rate = 1.0;
2243 gf_stats->this_frame_mv_in_out = 0.0;
2244 gf_stats->mv_in_out_accumulator = 0.0;
2245 gf_stats->abs_mv_in_out_accumulator = 0.0;
2246
2247 gf_stats->avg_sr_coded_error = 0.0;
2248 gf_stats->avg_tr_coded_error = 0.0;
2249 gf_stats->avg_pcnt_second_ref = 0.0;
2250 gf_stats->avg_pcnt_third_ref = 0.0;
2251 gf_stats->avg_pcnt_third_ref_nolast = 0.0;
2252 gf_stats->avg_new_mv_count = 0.0;
2253 gf_stats->avg_wavelet_energy = 0.0;
2254 gf_stats->avg_raw_err_stdev = 0.0;
2255 gf_stats->non_zero_stdev_count = 0;
bohanli04652962020-03-18 17:02:50 -07002256}
2257
David Turner0fa8c492019-02-06 16:38:13 +00002258// Analyse and define a gf/arf group.
Paul Wilkinsce1839c2019-08-13 12:22:54 +01002259#define MAX_GF_BOOST 5400
bohanlid165b192020-06-10 21:46:29 -07002260/*!\brief Define a GF group.
2261 *
2262 * \ingroup gf_group_algo
2263 * This function defines the structure of a GF group, along with various
2264 * parameters regarding bit-allocation and quality setup.
2265 *
2266 * \param[in] cpi Top-level encoder structure
2267 * \param[in] this_frame First pass statistics structure
2268 * \param[in] frame_params Structure with frame parameters
2269 * \param[in] max_gop_length Maximum length of the GF group
2270 * \param[in] is_final_pass Whether this is the final pass for the
2271 * GF group, or a trial (non-zero)
2272 *
2273 * \return Nothing is returned. Instead, cpi->gf_group is changed.
2274 */
David Turner0fa8c492019-02-06 16:38:13 +00002275static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
Jingning Han9af02fa2020-08-20 23:01:01 -07002276 EncodeFrameParams *frame_params, int max_gop_length,
2277 int is_final_pass) {
David Turner0fa8c492019-02-06 16:38:13 +00002278 AV1_COMMON *const cm = &cpi->common;
2279 RATE_CONTROL *const rc = &cpi->rc;
Vishesh5b50e6d2020-06-10 19:20:07 +05302280 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
David Turner0fa8c492019-02-06 16:38:13 +00002281 TWO_PASS *const twopass = &cpi->twopass;
2282 FIRSTPASS_STATS next_frame;
2283 const FIRSTPASS_STATS *const start_pos = twopass->stats_in;
Jingning Han237d46f2019-08-16 13:22:09 -07002284 GF_GROUP *gf_group = &cpi->gf_group;
Jingning Hanc7a52172019-09-17 15:28:46 -07002285 FRAME_INFO *frame_info = &cpi->frame_info;
Vishesh5b50e6d2020-06-10 19:20:07 +05302286 const GFConfig *const gf_cfg = &oxcf->gf_cfg;
Vishesh39e74092020-06-16 17:13:48 +05302287 const RateControlCfg *const rc_cfg = &oxcf->rc_cfg;
David Turner0fa8c492019-02-06 16:38:13 +00002288 int i;
David Turner0fa8c492019-02-06 16:38:13 +00002289 int flash_detected;
2290 int64_t gf_group_bits;
Jingning Han9af02fa2020-08-20 23:01:01 -07002291 const int is_intra_only = rc->frames_since_key == 0;
David Turner0fa8c492019-02-06 16:38:13 +00002292
Vishesh5b50e6d2020-06-10 19:20:07 +05302293 cpi->internal_altref_allowed = (gf_cfg->gf_max_pyr_height > 1);
David Turner0fa8c492019-02-06 16:38:13 +00002294
2295 // Reset the GF group data structures unless this is a key
2296 // frame in which case it will already have been done.
2297 if (!is_intra_only) {
Sarah Parkere1b22012019-06-06 16:35:25 -07002298 av1_zero(cpi->gf_group);
David Turner0fa8c492019-02-06 16:38:13 +00002299 }
2300
2301 aom_clear_system_state();
2302 av1_zero(next_frame);
2303
Mufaddal Chakera3bcc72c2019-12-11 14:38:37 +05302304 if (has_no_stats_stage(cpi)) {
Jingning Han9af02fa2020-08-20 23:01:01 -07002305 define_gf_group_pass0(cpi);
Marco Paniconifda31aa2019-07-16 22:24:38 +00002306 return;
2307 }
2308
Mufaddal Chakera13ed3f12020-03-10 15:46:17 +05302309 // correct frames_to_key when lookahead queue is emptying
2310 if (cpi->lap_enabled) {
2311 correct_frames_to_key(cpi);
2312 }
2313
bohanli04652962020-03-18 17:02:50 -07002314 GF_GROUP_STATS gf_stats;
2315 init_gf_stats(&gf_stats);
2316 GF_FRAME_STATS first_frame_stats, last_frame_stats;
2317
Jingning Han73da1342020-07-16 13:50:10 -07002318 const int can_disable_arf = !gf_cfg->gf_min_pyr_height;
bohanli04652962020-03-18 17:02:50 -07002319
David Turner0fa8c492019-02-06 16:38:13 +00002320 // Load stats for the current frame.
bohanli04652962020-03-18 17:02:50 -07002321 double mod_frame_err =
2322 calculate_modified_err(frame_info, twopass, oxcf, this_frame);
David Turner0fa8c492019-02-06 16:38:13 +00002323
2324 // Note the error of the frame at the start of the group. This will be
2325 // the GF frame error if we code a normal gf.
bohanli04652962020-03-18 17:02:50 -07002326 first_frame_stats.frame_err = mod_frame_err;
2327 first_frame_stats.frame_coded_error = this_frame->coded_error;
2328 first_frame_stats.frame_sr_coded_error = this_frame->sr_coded_error;
2329 first_frame_stats.frame_tr_coded_error = this_frame->tr_coded_error;
Urvang Joshi38b7e842019-05-01 11:27:47 -07002330
David Turner0fa8c492019-02-06 16:38:13 +00002331 // If this is a key frame or the overlay from a previous arf then
2332 // the error score / cost of this frame has already been accounted for.
David Turner0fa8c492019-02-06 16:38:13 +00002333
2334 // TODO(urvang): Try logic to vary min and max interval based on q.
Urvang Joshi2abb7722019-03-19 14:28:37 -07002335 const int active_min_gf_interval = rc->min_gf_interval;
2336 const int active_max_gf_interval =
Jingning Han491198d2020-02-13 21:53:41 -08002337 AOMMIN(rc->max_gf_interval, max_gop_length);
David Turner0fa8c492019-02-06 16:38:13 +00002338
Jingning Han9af02fa2020-08-20 23:01:01 -07002339 i = is_intra_only;
bohanlia964eea2020-02-13 11:48:00 -08002340 // get the determined gf group length from rc->gf_intervals
2341 while (i < rc->gf_intervals[rc->cur_gf_index]) {
Jingning Han9af02fa2020-08-20 23:01:01 -07002342 // read in the next frame
2343 if (EOF == input_stats(twopass, &next_frame)) break;
David Turner0fa8c492019-02-06 16:38:13 +00002344 // Accumulate error score of frames in this gf group.
Jingning Han17af7742019-09-17 16:58:03 -07002345 mod_frame_err =
Jingning Han9af02fa2020-08-20 23:01:01 -07002346 calculate_modified_err(frame_info, twopass, oxcf, &next_frame);
bohanli04652962020-03-18 17:02:50 -07002347 // accumulate stats for this frame
Jingning Han9af02fa2020-08-20 23:01:01 -07002348 accumulate_this_frame_stats(&next_frame, mod_frame_err, &gf_stats);
David Turner0fa8c492019-02-06 16:38:13 +00002349
Jingning Han9af02fa2020-08-20 23:01:01 -07002350 if (i == 0) {
2351 first_frame_stats.frame_err = mod_frame_err;
2352 first_frame_stats.frame_coded_error = next_frame.coded_error;
2353 first_frame_stats.frame_sr_coded_error = next_frame.sr_coded_error;
2354 first_frame_stats.frame_tr_coded_error = next_frame.tr_coded_error;
2355 }
2356
2357 ++i;
2358 }
2359
2360 reset_fpf_position(twopass, start_pos);
2361
2362 i = is_intra_only;
2363 input_stats(twopass, &next_frame);
2364 while (i < rc->gf_intervals[rc->cur_gf_index]) {
bohanli04652962020-03-18 17:02:50 -07002365 // read in the next frame
David Turner0fa8c492019-02-06 16:38:13 +00002366 if (EOF == input_stats(twopass, &next_frame)) break;
2367
2368 // Test for the case where there is a brief flash but the prediction
2369 // quality back to an earlier frame is then restored.
2370 flash_detected = detect_flash(twopass, 0);
2371
bohanli04652962020-03-18 17:02:50 -07002372 // accumulate stats for next frame
Cheng Chen1439bfc2020-04-03 13:52:28 -07002373 accumulate_next_frame_stats(&next_frame, frame_info, flash_detected,
2374 rc->frames_since_key, i, &gf_stats);
David Turner0fa8c492019-02-06 16:38:13 +00002375
Jingning Han9af02fa2020-08-20 23:01:01 -07002376 ++i;
David Turner0fa8c492019-02-06 16:38:13 +00002377 }
Jingning Han9af02fa2020-08-20 23:01:01 -07002378
2379 i = rc->gf_intervals[rc->cur_gf_index];
2380
bohanli04652962020-03-18 17:02:50 -07002381 // save the errs for the last frame
2382 last_frame_stats.frame_coded_error = next_frame.coded_error;
2383 last_frame_stats.frame_sr_coded_error = next_frame.sr_coded_error;
2384 last_frame_stats.frame_tr_coded_error = next_frame.tr_coded_error;
2385
bohanlie1515342020-02-25 11:58:11 -08002386 if (is_final_pass) {
2387 rc->intervals_till_gf_calculate_due--;
2388 rc->cur_gf_index++;
2389 }
David Turner0fa8c492019-02-06 16:38:13 +00002390
2391 // Was the group length constrained by the requirement for a new KF?
2392 rc->constrained_gf_group = (i >= rc->frames_to_key) ? 1 : 0;
2393
Vishesh7e9873d2020-06-08 15:41:33 +05302394 const int num_mbs = (oxcf->resize_cfg.resize_mode != RESIZE_NONE)
Urvang Joshi9dc909d2020-03-23 16:07:02 -07002395 ? cpi->initial_mbs
2396 : cm->mi_params.MBs;
David Turner0fa8c492019-02-06 16:38:13 +00002397 assert(num_mbs > 0);
David Turner0fa8c492019-02-06 16:38:13 +00002398
bohanli04652962020-03-18 17:02:50 -07002399 average_gf_stats(i, &next_frame, &gf_stats);
David Turner0fa8c492019-02-06 16:38:13 +00002400
Urvang Joshif70375a2019-03-22 23:30:19 -07002401 // Disable internal ARFs for "still" gf groups.
David Turner0fa8c492019-02-06 16:38:13 +00002402 // zero_motion_accumulator: minimum percentage of (0,0) motion;
2403 // avg_sr_coded_error: average of the SSE per pixel of each frame;
2404 // avg_raw_err_stdev: average of the standard deviation of (0,0)
2405 // motion error per block of each frame.
Jingning Han73da1342020-07-16 13:50:10 -07002406 const int can_disable_internal_arfs = gf_cfg->gf_min_pyr_height <= 1;
bohanli04652962020-03-18 17:02:50 -07002407 if (can_disable_internal_arfs &&
2408 gf_stats.zero_motion_accumulator > MIN_ZERO_MOTION &&
2409 gf_stats.avg_sr_coded_error / num_mbs < MAX_SR_CODED_ERROR &&
2410 gf_stats.avg_raw_err_stdev < MAX_RAW_ERR_VAR) {
Urvang Joshif70375a2019-03-22 23:30:19 -07002411 cpi->internal_altref_allowed = 0;
2412 }
David Turner0fa8c492019-02-06 16:38:13 +00002413
Urvang Joshib44f48f2020-01-27 11:09:48 -08002414 int use_alt_ref;
2415 if (can_disable_arf) {
Aasaipriya46700182020-05-01 17:27:31 +05302416 use_alt_ref =
2417 !is_almost_static(gf_stats.zero_motion_accumulator,
2418 twopass->kf_zeromotion_pct, cpi->lap_enabled) &&
Cheng Chen1439bfc2020-04-03 13:52:28 -07002419 rc->use_arf_in_this_kf_group && (i < gf_cfg->lag_in_frames) &&
Jingning Han73da1342020-07-16 13:50:10 -07002420 (i >= MIN_GF_INTERVAL);
David Turner0fa8c492019-02-06 16:38:13 +00002421
Urvang Joshib44f48f2020-01-27 11:09:48 -08002422 // TODO(urvang): Improve and use model for VBR, CQ etc as well.
Vishesh39e74092020-06-16 17:13:48 +05302423 if (use_alt_ref && rc_cfg->mode == AOM_Q && rc_cfg->cq_level <= 200) {
Urvang Joshib44f48f2020-01-27 11:09:48 -08002424 aom_clear_system_state();
bohanli04652962020-03-18 17:02:50 -07002425 float features[21];
2426 get_features_from_gf_stats(
2427 &gf_stats, &first_frame_stats, &last_frame_stats, num_mbs,
2428 rc->constrained_gf_group, twopass->kf_zeromotion_pct, i, features);
Urvang Joshib44f48f2020-01-27 11:09:48 -08002429 // Infer using ML model.
2430 float score;
2431 av1_nn_predict(features, &av1_use_flat_gop_nn_config, 1, &score);
2432 use_alt_ref = (score <= 0.0);
2433 }
2434 } else {
bohanli04652962020-03-18 17:02:50 -07002435 use_alt_ref =
Cheng Chen1439bfc2020-04-03 13:52:28 -07002436 rc->use_arf_in_this_kf_group && (i < gf_cfg->lag_in_frames) && (i > 2);
Urvang Joshi38b7e842019-05-01 11:27:47 -07002437 }
2438
David Turner0fa8c492019-02-06 16:38:13 +00002439#define REDUCE_GF_LENGTH_THRESH 4
2440#define REDUCE_GF_LENGTH_TO_KEY_THRESH 9
2441#define REDUCE_GF_LENGTH_BY 1
2442 int alt_offset = 0;
Urvang Joshi335ce2c2019-02-12 11:17:51 -08002443 // The length reduction strategy is tweaked for certain cases, and doesn't
2444 // work well for certain other cases.
David Turner0fa8c492019-02-06 16:38:13 +00002445 const int allow_gf_length_reduction =
Vishesh39e74092020-06-16 17:13:48 +05302446 ((rc_cfg->mode == AOM_Q && rc_cfg->cq_level <= 128) ||
Urvang Joshif70375a2019-03-22 23:30:19 -07002447 !cpi->internal_altref_allowed) &&
Vishesh39e74092020-06-16 17:13:48 +05302448 !is_lossless_requested(rc_cfg);
David Turner0fa8c492019-02-06 16:38:13 +00002449
2450 if (allow_gf_length_reduction && use_alt_ref) {
2451 // adjust length of this gf group if one of the following condition met
2452 // 1: only one overlay frame left and this gf is too long
2453 // 2: next gf group is too short to have arf compared to the current gf
2454
2455 // maximum length of next gf group
2456 const int next_gf_len = rc->frames_to_key - i;
2457 const int single_overlay_left =
2458 next_gf_len == 0 && i > REDUCE_GF_LENGTH_THRESH;
2459 // the next gf is probably going to have a ARF but it will be shorter than
2460 // this gf
2461 const int unbalanced_gf =
2462 i > REDUCE_GF_LENGTH_TO_KEY_THRESH &&
2463 next_gf_len + 1 < REDUCE_GF_LENGTH_TO_KEY_THRESH &&
2464 next_gf_len + 1 >= rc->min_gf_interval;
2465
2466 if (single_overlay_left || unbalanced_gf) {
David Turner0fa8c492019-02-06 16:38:13 +00002467 const int roll_back = REDUCE_GF_LENGTH_BY;
Urvang Joshi98bb9d62019-03-25 17:47:28 -07002468 // Reduce length only if active_min_gf_interval will be respected later.
2469 if (i - roll_back >= active_min_gf_interval + 1) {
2470 alt_offset = -roll_back;
2471 i -= roll_back;
bohanlie1515342020-02-25 11:58:11 -08002472 if (is_final_pass) rc->intervals_till_gf_calculate_due = 0;
Urvang Joshi98bb9d62019-03-25 17:47:28 -07002473 }
David Turner0fa8c492019-02-06 16:38:13 +00002474 }
2475 }
2476
2477 // Should we use the alternate reference frame.
Jingning Han9af02fa2020-08-20 23:01:01 -07002478 int ext_len = i - is_intra_only;
David Turner0fa8c492019-02-06 16:38:13 +00002479 if (use_alt_ref) {
Vishesh5b50e6d2020-06-10 19:20:07 +05302480 gf_group->max_layer_depth_allowed = gf_cfg->gf_max_pyr_height;
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302481 set_baseline_gf_interval(cpi, i, active_max_gf_interval, use_alt_ref,
2482 is_final_pass);
2483
Jingning Han9af02fa2020-08-20 23:01:01 -07002484 const int forward_frames = (rc->frames_to_key - i >= ext_len)
2485 ? ext_len
Paul Wilkinsce1839c2019-08-13 12:22:54 +01002486 : AOMMAX(0, rc->frames_to_key - i);
2487
David Turner0fa8c492019-02-06 16:38:13 +00002488 // Calculate the boost for alt ref.
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302489 rc->gfu_boost = av1_calc_arf_boost(
Jingning Han9af02fa2020-08-20 23:01:01 -07002490 twopass, rc, frame_info, alt_offset, forward_frames, ext_len,
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302491 cpi->lap_enabled ? &rc->num_stats_used_for_gfu_boost : NULL,
2492 cpi->lap_enabled ? &rc->num_stats_required_for_gfu_boost : NULL);
David Turner0fa8c492019-02-06 16:38:13 +00002493 } else {
Paul Wilkinsce1839c2019-08-13 12:22:54 +01002494 reset_fpf_position(twopass, start_pos);
Jingning Han237d46f2019-08-16 13:22:09 -07002495 gf_group->max_layer_depth_allowed = 0;
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302496 set_baseline_gf_interval(cpi, i, active_max_gf_interval, use_alt_ref,
2497 is_final_pass);
2498
2499 rc->gfu_boost = AOMMIN(
2500 MAX_GF_BOOST,
2501 av1_calc_arf_boost(
Jingning Han9af02fa2020-08-20 23:01:01 -07002502 twopass, rc, frame_info, alt_offset, ext_len, 0,
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302503 cpi->lap_enabled ? &rc->num_stats_used_for_gfu_boost : NULL,
2504 cpi->lap_enabled ? &rc->num_stats_required_for_gfu_boost : NULL));
David Turner0fa8c492019-02-06 16:38:13 +00002505 }
2506
David Turner0fa8c492019-02-06 16:38:13 +00002507#define LAST_ALR_BOOST_FACTOR 0.2f
2508 rc->arf_boost_factor = 1.0;
Jingning Han184fdfe2020-09-27 15:04:35 -07002509 if (use_alt_ref && !is_lossless_requested(rc_cfg)) {
David Turner0fa8c492019-02-06 16:38:13 +00002510 // Reduce the boost of altref in the last gf group
Jingning Han9af02fa2020-08-20 23:01:01 -07002511 if (rc->frames_to_key - ext_len == REDUCE_GF_LENGTH_BY ||
2512 rc->frames_to_key - ext_len == 0) {
David Turner0fa8c492019-02-06 16:38:13 +00002513 rc->arf_boost_factor = LAST_ALR_BOOST_FACTOR;
2514 }
2515 }
2516
David Turner0fa8c492019-02-06 16:38:13 +00002517 rc->frames_till_gf_update_due = rc->baseline_gf_interval;
2518
David Turner0fa8c492019-02-06 16:38:13 +00002519 // Reset the file position.
2520 reset_fpf_position(twopass, start_pos);
2521
Mufaddal Chakera4df85dc2020-07-05 22:00:53 +05302522 if (cpi->lap_enabled) {
2523 // Since we don't have enough stats to know the actual error of the
2524 // gf group, we assume error of each frame to be equal to 1 and set
2525 // the error of the group as baseline_gf_interval.
2526 gf_stats.gf_group_err = rc->baseline_gf_interval;
2527 }
David Turner0fa8c492019-02-06 16:38:13 +00002528 // Calculate the bits to be allocated to the gf/arf group as a whole
bohanli04652962020-03-18 17:02:50 -07002529 gf_group_bits = calculate_total_gf_group_bits(cpi, gf_stats.gf_group_err);
Jingning Han2eedec52020-02-05 10:33:43 -08002530 rc->gf_group_bits = gf_group_bits;
David Turner0fa8c492019-02-06 16:38:13 +00002531
2532#if GROUP_ADAPTIVE_MAXQ
2533 // Calculate an estimate of the maxq needed for the group.
2534 // We are more agressive about correcting for sections
2535 // where there could be significant overshoot than for easier
2536 // sections where we do not wish to risk creating an overshoot
2537 // of the allocated bit budget.
Jingning Han9e88f162020-09-16 11:39:17 -07002538 if ((rc_cfg->mode != AOM_Q) && (rc->baseline_gf_interval > 1) &&
2539 is_final_pass) {
David Turner0fa8c492019-02-06 16:38:13 +00002540 const int vbr_group_bits_per_frame =
2541 (int)(gf_group_bits / rc->baseline_gf_interval);
bohanli04652962020-03-18 17:02:50 -07002542 const double group_av_err =
2543 gf_stats.gf_group_raw_error / rc->baseline_gf_interval;
David Turner0fa8c492019-02-06 16:38:13 +00002544 const double group_av_skip_pct =
bohanli04652962020-03-18 17:02:50 -07002545 gf_stats.gf_group_skip_pct / rc->baseline_gf_interval;
David Turner0fa8c492019-02-06 16:38:13 +00002546 const double group_av_inactive_zone =
bohanli04652962020-03-18 17:02:50 -07002547 ((gf_stats.gf_group_inactive_zone_rows * 2) /
Urvang Joshi9dc909d2020-03-23 16:07:02 -07002548 (rc->baseline_gf_interval * (double)cm->mi_params.mb_rows));
David Turner0fa8c492019-02-06 16:38:13 +00002549
2550 int tmp_q;
David Turner0fa8c492019-02-06 16:38:13 +00002551 tmp_q = get_twopass_worst_quality(
2552 cpi, group_av_err, (group_av_skip_pct + group_av_inactive_zone),
Paul Wilkins87919c92020-11-11 14:32:20 +00002553 vbr_group_bits_per_frame);
Sarah Parkere1b22012019-06-06 16:35:25 -07002554 rc->active_worst_quality = AOMMAX(tmp_q, rc->active_worst_quality >> 1);
David Turner0fa8c492019-02-06 16:38:13 +00002555 }
2556#endif
2557
David Turner0fa8c492019-02-06 16:38:13 +00002558 // Adjust KF group bits and error remaining.
bohanli04652962020-03-18 17:02:50 -07002559 if (is_final_pass)
2560 twopass->kf_group_error_left -= (int64_t)gf_stats.gf_group_err;
David Turner0fa8c492019-02-06 16:38:13 +00002561
David Turner10e667d2019-02-07 11:30:06 +00002562 // Set up the structure of this Group-Of-Pictures (same as GF_GROUP)
Jingning Han9af02fa2020-08-20 23:01:01 -07002563 av1_gop_setup_structure(cpi);
David Turner0fa8c492019-02-06 16:38:13 +00002564
David Turner0fa8c492019-02-06 16:38:13 +00002565 // Reset the file position.
2566 reset_fpf_position(twopass, start_pos);
2567
2568 // Calculate a section intra ratio used in setting max loop filter.
Jingning Han9af02fa2020-08-20 23:01:01 -07002569 if (rc->frames_since_key != 0) {
David Turner0fa8c492019-02-06 16:38:13 +00002570 twopass->section_intra_rating = calculate_section_intra_ratio(
Akshata Jadhava49be172019-12-18 00:03:53 +05302571 start_pos, twopass->stats_buf_ctx->stats_in_end,
2572 rc->baseline_gf_interval);
David Turner0fa8c492019-02-06 16:38:13 +00002573 }
Paul Wilkins88fdf642019-07-31 12:29:48 +01002574
Jingning Han9af02fa2020-08-20 23:01:01 -07002575 av1_gop_bit_allocation(cpi, rc, gf_group, rc->frames_since_key == 0,
2576 use_alt_ref, gf_group_bits);
2577
2578 frame_params->frame_type =
2579 rc->frames_since_key == 0 ? KEY_FRAME : INTER_FRAME;
2580 frame_params->show_frame =
2581 !(gf_group->update_type[gf_group->index] == ARF_UPDATE ||
2582 gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE);
2583
2584 // TODO(jingning): Generalize this condition.
2585 if (is_final_pass) {
2586 cpi->gf_state.arf_gf_boost_lst = use_alt_ref;
Paul Wilkins2bb860e2020-11-05 14:17:30 +00002587
2588 // Reset rolling actual and target bits counters for ARF groups.
2589 twopass->rolling_arf_group_target_bits = 1;
2590 twopass->rolling_arf_group_actual_bits = 1;
Jingning Han9af02fa2020-08-20 23:01:01 -07002591 }
Jingning Han2eedec52020-02-05 10:33:43 -08002592}
2593
Paul Wilkins47426162020-03-02 17:46:07 +00002594// #define FIXED_ARF_BITS
2595#ifdef FIXED_ARF_BITS
2596#define ARF_BITS_FRACTION 0.75
2597#endif
Jingning Hanfa407c02020-02-05 11:46:58 -08002598void av1_gop_bit_allocation(const AV1_COMP *cpi, RATE_CONTROL *const rc,
2599 GF_GROUP *gf_group, int is_key_frame, int use_arf,
Jingning Han2eedec52020-02-05 10:33:43 -08002600 int64_t gf_group_bits) {
Jingning Han2eedec52020-02-05 10:33:43 -08002601 // Calculate the extra bits to be used for boosted frame(s)
Paul Wilkins47426162020-03-02 17:46:07 +00002602#ifdef FIXED_ARF_BITS
2603 int gf_arf_bits = (int)(ARF_BITS_FRACTION * gf_group_bits);
2604#else
Jingning Han9e88f162020-09-16 11:39:17 -07002605 int gf_arf_bits = calculate_boost_bits(
2606 rc->baseline_gf_interval - (rc->frames_since_key == 0), rc->gfu_boost,
2607 gf_group_bits);
Paul Wilkins47426162020-03-02 17:46:07 +00002608#endif
2609
Jingning Hanfa407c02020-02-05 11:46:58 -08002610 gf_arf_bits = adjust_boost_bits_for_target_level(cpi, rc, gf_arf_bits,
2611 gf_group_bits, 1);
2612
Jingning Han2eedec52020-02-05 10:33:43 -08002613 // Allocate bits to each of the frames in the GF group.
Jingning Han491198d2020-02-13 21:53:41 -08002614 allocate_gf_group_bits(gf_group, rc, gf_group_bits, gf_arf_bits, is_key_frame,
2615 use_arf);
David Turner0fa8c492019-02-06 16:38:13 +00002616}
2617
David Turner0fa8c492019-02-06 16:38:13 +00002618// Minimum % intra coding observed in first pass (1.0 = 100%)
2619#define MIN_INTRA_LEVEL 0.25
2620// Minimum ratio between the % of intra coding and inter coding in the first
2621// pass after discounting neutral blocks (discounting neutral blocks in this
2622// way helps catch scene cuts in clips with very flat areas or letter box
2623// format clips with image padding.
2624#define INTRA_VS_INTER_THRESH 2.0
2625// Hard threshold where the first pass chooses intra for almost all blocks.
2626// In such a case even if the frame is not a scene cut coding a key frame
2627// may be a good option.
2628#define VERY_LOW_INTER_THRESH 0.05
2629// Maximum threshold for the relative ratio of intra error score vs best
2630// inter error score.
Cheng Chena5ac5012021-01-19 16:02:01 -08002631#define KF_II_ERR_THRESHOLD 1.9
David Turner0fa8c492019-02-06 16:38:13 +00002632// In real scene cuts there is almost always a sharp change in the intra
2633// or inter error score.
2634#define ERR_CHANGE_THRESHOLD 0.4
2635// For real scene cuts we expect an improvment in the intra inter error
2636// ratio in the next frame.
2637#define II_IMPROVEMENT_THRESHOLD 3.5
2638#define KF_II_MAX 128.0
Cheng Chena5ac5012021-01-19 16:02:01 -08002639// Intra / Inter threshold very low
2640#define VERY_LOW_II 1.5
2641// Clean slide transitions we expect a sharp single frame spike in error.
2642#define ERROR_SPIKE 5.0
2643
2644// Slide show transition detection.
2645// Tests for case where there is very low error either side of the current frame
2646// but much higher just for this frame. This can help detect key frames in
2647// slide shows even where the slides are pictures of different sizes.
2648// Also requires that intra and inter errors are very similar to help eliminate
2649// harmful false positives.
2650// It will not help if the transition is a fade or other multi-frame effect.
2651static int slide_transition(const FIRSTPASS_STATS *this_frame,
2652 const FIRSTPASS_STATS *last_frame,
2653 const FIRSTPASS_STATS *next_frame) {
2654 return (this_frame->intra_error < (this_frame->coded_error * VERY_LOW_II)) &&
2655 (this_frame->coded_error > (last_frame->coded_error * ERROR_SPIKE)) &&
2656 (this_frame->coded_error > (next_frame->coded_error * ERROR_SPIKE));
2657}
David Turner0fa8c492019-02-06 16:38:13 +00002658
Urvang Joshic5ed43f2019-03-06 12:51:32 -08002659// Threshold for use of the lagging second reference frame. High second ref
2660// usage may point to a transient event like a flash or occlusion rather than
2661// a real scene cut.
2662// We adapt the threshold based on number of frames in this key-frame group so
2663// far.
2664static double get_second_ref_usage_thresh(int frame_count_so_far) {
2665 const int adapt_upto = 32;
2666 const double min_second_ref_usage_thresh = 0.085;
2667 const double second_ref_usage_thresh_max_delta = 0.035;
2668 if (frame_count_so_far >= adapt_upto) {
2669 return min_second_ref_usage_thresh + second_ref_usage_thresh_max_delta;
2670 }
2671 return min_second_ref_usage_thresh +
2672 ((double)frame_count_so_far / (adapt_upto - 1)) *
2673 second_ref_usage_thresh_max_delta;
2674}
2675
David Turner0fa8c492019-02-06 16:38:13 +00002676static int test_candidate_kf(TWO_PASS *twopass,
2677 const FIRSTPASS_STATS *last_frame,
2678 const FIRSTPASS_STATS *this_frame,
Urvang Joshic5ed43f2019-03-06 12:51:32 -08002679 const FIRSTPASS_STATS *next_frame,
Aasaipriya196c58a2020-04-23 19:05:45 +05302680 int frame_count_so_far, enum aom_rc_mode rc_mode,
2681 int scenecut_mode) {
David Turner0fa8c492019-02-06 16:38:13 +00002682 int is_viable_kf = 0;
2683 double pcnt_intra = 1.0 - this_frame->pcnt_inter;
2684 double modified_pcnt_inter =
2685 this_frame->pcnt_inter - this_frame->pcnt_neutral;
Urvang Joshic5ed43f2019-03-06 12:51:32 -08002686 const double second_ref_usage_thresh =
2687 get_second_ref_usage_thresh(frame_count_so_far);
Aasaipriya196c58a2020-04-23 19:05:45 +05302688 int total_frames_to_test = SCENE_CUT_KEY_TEST_INTERVAL;
2689 int count_for_tolerable_prediction = 3;
2690 int num_future_frames = 0;
2691 FIRSTPASS_STATS curr_frame;
2692
2693 if (scenecut_mode == ENABLE_SCENECUT_MODE_1) {
2694 curr_frame = *this_frame;
2695 const FIRSTPASS_STATS *const start_position = twopass->stats_in;
2696 for (num_future_frames = 0; num_future_frames < SCENE_CUT_KEY_TEST_INTERVAL;
2697 num_future_frames++)
2698 if (EOF == input_stats(twopass, &curr_frame)) break;
2699 reset_fpf_position(twopass, start_position);
2700 if (num_future_frames < 3) {
2701 return 0;
2702 } else {
2703 total_frames_to_test = 3;
2704 count_for_tolerable_prediction = 1;
2705 }
2706 }
David Turner0fa8c492019-02-06 16:38:13 +00002707
2708 // Does the frame satisfy the primary criteria of a key frame?
2709 // See above for an explanation of the test criteria.
2710 // If so, then examine how well it predicts subsequent frames.
Urvang Joshicfd59e92019-08-07 09:54:31 -07002711 if (IMPLIES(rc_mode == AOM_Q, frame_count_so_far >= 3) &&
Urvang Joshica365192019-07-15 15:53:37 -07002712 (this_frame->pcnt_second_ref < second_ref_usage_thresh) &&
Urvang Joshic5ed43f2019-03-06 12:51:32 -08002713 (next_frame->pcnt_second_ref < second_ref_usage_thresh) &&
David Turner0fa8c492019-02-06 16:38:13 +00002714 ((this_frame->pcnt_inter < VERY_LOW_INTER_THRESH) ||
Cheng Chena5ac5012021-01-19 16:02:01 -08002715 slide_transition(this_frame, last_frame, next_frame) ||
David Turner0fa8c492019-02-06 16:38:13 +00002716 ((pcnt_intra > MIN_INTRA_LEVEL) &&
2717 (pcnt_intra > (INTRA_VS_INTER_THRESH * modified_pcnt_inter)) &&
2718 ((this_frame->intra_error /
2719 DOUBLE_DIVIDE_CHECK(this_frame->coded_error)) <
2720 KF_II_ERR_THRESHOLD) &&
2721 ((fabs(last_frame->coded_error - this_frame->coded_error) /
2722 DOUBLE_DIVIDE_CHECK(this_frame->coded_error) >
2723 ERR_CHANGE_THRESHOLD) ||
2724 (fabs(last_frame->intra_error - this_frame->intra_error) /
2725 DOUBLE_DIVIDE_CHECK(this_frame->intra_error) >
2726 ERR_CHANGE_THRESHOLD) ||
2727 ((next_frame->intra_error /
2728 DOUBLE_DIVIDE_CHECK(next_frame->coded_error)) >
2729 II_IMPROVEMENT_THRESHOLD))))) {
2730 int i;
2731 const FIRSTPASS_STATS *start_pos = twopass->stats_in;
David Turner0fa8c492019-02-06 16:38:13 +00002732 double boost_score = 0.0;
2733 double old_boost_score = 0.0;
2734 double decay_accumulator = 1.0;
2735
2736 // Examine how well the key frame predicts subsequent frames.
Aasaipriya196c58a2020-04-23 19:05:45 +05302737 for (i = 0; i < total_frames_to_test; ++i) {
Aasaipriya2ed55a32020-04-27 08:27:22 +05302738 // Get the next frame details
2739 FIRSTPASS_STATS local_next_frame;
2740 if (EOF == input_stats(twopass, &local_next_frame)) break;
David Turner0fa8c492019-02-06 16:38:13 +00002741 double next_iiratio = (BOOST_FACTOR * local_next_frame.intra_error /
2742 DOUBLE_DIVIDE_CHECK(local_next_frame.coded_error));
2743
2744 if (next_iiratio > KF_II_MAX) next_iiratio = KF_II_MAX;
2745
2746 // Cumulative effect of decay in prediction quality.
2747 if (local_next_frame.pcnt_inter > 0.85)
2748 decay_accumulator *= local_next_frame.pcnt_inter;
2749 else
2750 decay_accumulator *= (0.85 + local_next_frame.pcnt_inter) / 2.0;
2751
2752 // Keep a running total.
2753 boost_score += (decay_accumulator * next_iiratio);
2754
2755 // Test various breakout clauses.
2756 if ((local_next_frame.pcnt_inter < 0.05) || (next_iiratio < 1.5) ||
2757 (((local_next_frame.pcnt_inter - local_next_frame.pcnt_neutral) <
2758 0.20) &&
2759 (next_iiratio < 3.0)) ||
2760 ((boost_score - old_boost_score) < 3.0) ||
2761 (local_next_frame.intra_error < 200)) {
2762 break;
2763 }
2764
2765 old_boost_score = boost_score;
David Turner0fa8c492019-02-06 16:38:13 +00002766 }
2767
2768 // If there is tolerable prediction for at least the next 3 frames then
2769 // break out else discard this potential key frame and move on
Aasaipriya196c58a2020-04-23 19:05:45 +05302770 if (boost_score > 30.0 && (i > count_for_tolerable_prediction)) {
David Turner0fa8c492019-02-06 16:38:13 +00002771 is_viable_kf = 1;
2772 } else {
David Turner0fa8c492019-02-06 16:38:13 +00002773 is_viable_kf = 0;
2774 }
David Turner0fa8c492019-02-06 16:38:13 +00002775
bohanli59600cf2020-04-29 15:02:53 -07002776 // Reset the file position
2777 reset_fpf_position(twopass, start_pos);
2778 }
David Turner0fa8c492019-02-06 16:38:13 +00002779 return is_viable_kf;
2780}
2781
2782#define FRAMES_TO_CHECK_DECAY 8
2783#define KF_MIN_FRAME_BOOST 80.0
2784#define KF_MAX_FRAME_BOOST 128.0
Paul Wilkinsf305de32020-04-02 12:02:45 +01002785#define MIN_KF_BOOST 600 // Minimum boost for non-static KF interval
2786#define MAX_KF_BOOST 3200
David Turner0fa8c492019-02-06 16:38:13 +00002787#define MIN_STATIC_KF_BOOST 5400 // Minimum boost for static KF interval
2788
Aasaipriya9bc1dcb2020-03-13 17:46:07 +05302789static int detect_app_forced_key(AV1_COMP *cpi) {
Vishesh7e9873d2020-06-08 15:41:33 +05302790 if (cpi->oxcf.kf_cfg.fwd_kf_enabled) cpi->rc.next_is_fwd_key = 1;
Aasaipriya9bc1dcb2020-03-13 17:46:07 +05302791 int num_frames_to_app_forced_key = is_forced_keyframe_pending(
2792 cpi->lookahead, cpi->lookahead->max_sz, cpi->compressor_stage);
2793 if (num_frames_to_app_forced_key != -1) cpi->rc.next_is_fwd_key = 0;
2794 return num_frames_to_app_forced_key;
2795}
2796
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05302797static int get_projected_kf_boost(AV1_COMP *cpi) {
2798 /*
2799 * If num_stats_used_for_kf_boost >= frames_to_key, then
2800 * all stats needed for prior boost calculation are available.
2801 * Hence projecting the prior boost is not needed in this cases.
2802 */
2803 if (cpi->rc.num_stats_used_for_kf_boost >= cpi->rc.frames_to_key)
2804 return cpi->rc.kf_boost;
2805
2806 // Get the current tpl factor (number of frames = frames_to_key).
2807 double tpl_factor = av1_get_kf_boost_projection_factor(cpi->rc.frames_to_key);
2808 // Get the tpl factor when number of frames = num_stats_used_for_kf_boost.
2809 double tpl_factor_num_stats =
2810 av1_get_kf_boost_projection_factor(cpi->rc.num_stats_used_for_kf_boost);
2811 int projected_kf_boost =
2812 (int)rint((tpl_factor * cpi->rc.kf_boost) / tpl_factor_num_stats);
2813 return projected_kf_boost;
2814}
2815
bohanlid165b192020-06-10 21:46:29 -07002816/*!\brief Determine the location of the next key frame
2817 *
2818 * \ingroup gf_group_algo
2819 * This function decides the placement of the next key frame when a
2820 * scenecut is detected or the maximum key frame distance is reached.
2821 *
2822 * \param[in] cpi Top-level encoder structure
2823 * \param[in] this_frame Pointer to first pass stats
2824 * \param[out] kf_group_err The total error in the KF group
2825 * \param[in] num_frames_to_detect_scenecut Maximum lookahead frames.
2826 *
2827 * \return Number of frames to the next key.
2828 */
Aasaipriyad1ef4602020-03-16 20:13:35 +05302829static int define_kf_interval(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
2830 double *kf_group_err,
2831 int num_frames_to_detect_scenecut) {
David Turner0fa8c492019-02-06 16:38:13 +00002832 TWO_PASS *const twopass = &cpi->twopass;
Aasaipriyad1ef4602020-03-16 20:13:35 +05302833 RATE_CONTROL *const rc = &cpi->rc;
David Turner0fa8c492019-02-06 16:38:13 +00002834 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Vishesh7e9873d2020-06-08 15:41:33 +05302835 const KeyFrameCfg *const kf_cfg = &oxcf->kf_cfg;
David Turner0fa8c492019-02-06 16:38:13 +00002836 double recent_loop_decay[FRAMES_TO_CHECK_DECAY];
Aasaipriyad1ef4602020-03-16 20:13:35 +05302837 FIRSTPASS_STATS last_frame;
2838 double decay_accumulator = 1.0;
2839 int i = 0, j;
2840 int frames_to_key = 1;
2841 int frames_since_key = rc->frames_since_key + 1;
2842 FRAME_INFO *const frame_info = &cpi->frame_info;
2843 int num_stats_used_for_kf_boost = 1;
2844 int scenecut_detected = 0;
David Turner0fa8c492019-02-06 16:38:13 +00002845
Aasaipriyad1ef4602020-03-16 20:13:35 +05302846 int num_frames_to_next_key = detect_app_forced_key(cpi);
David Turner0fa8c492019-02-06 16:38:13 +00002847
Aasaipriyad1ef4602020-03-16 20:13:35 +05302848 if (num_frames_to_detect_scenecut == 0) {
2849 if (num_frames_to_next_key != -1)
2850 return num_frames_to_next_key;
2851 else
2852 return rc->frames_to_key;
2853 }
David Turner0fa8c492019-02-06 16:38:13 +00002854
Aasaipriyad1ef4602020-03-16 20:13:35 +05302855 if (num_frames_to_next_key != -1)
2856 num_frames_to_detect_scenecut =
2857 AOMMIN(num_frames_to_detect_scenecut, num_frames_to_next_key);
David Turner0fa8c492019-02-06 16:38:13 +00002858
2859 // Initialize the decay rates for the recent frames to check
2860 for (j = 0; j < FRAMES_TO_CHECK_DECAY; ++j) recent_loop_decay[j] = 1.0;
2861
David Turner0fa8c492019-02-06 16:38:13 +00002862 i = 0;
Akshata Jadhava49be172019-12-18 00:03:53 +05302863 while (twopass->stats_in < twopass->stats_buf_ctx->stats_in_end &&
Aasaipriyad1ef4602020-03-16 20:13:35 +05302864 frames_to_key < num_frames_to_detect_scenecut) {
Aasaipriya197771d2020-02-06 20:11:21 +05302865 // Accumulate total number of stats available till next key frame
Aasaipriyad1ef4602020-03-16 20:13:35 +05302866 num_stats_used_for_kf_boost++;
Aasaipriya197771d2020-02-06 20:11:21 +05302867
David Turner0fa8c492019-02-06 16:38:13 +00002868 // Accumulate kf group error.
Aasaipriyad1ef4602020-03-16 20:13:35 +05302869 if (kf_group_err != NULL)
2870 *kf_group_err +=
2871 calculate_modified_err(frame_info, twopass, oxcf, this_frame);
David Turner0fa8c492019-02-06 16:38:13 +00002872
2873 // Load the next frame's stats.
2874 last_frame = *this_frame;
2875 input_stats(twopass, this_frame);
2876
2877 // Provided that we are not at the end of the file...
Vishesh7e9873d2020-06-08 15:41:33 +05302878 if ((cpi->rc.enable_scenecut_detection > 0) && kf_cfg->auto_key &&
Akshata Jadhava49be172019-12-18 00:03:53 +05302879 twopass->stats_in < twopass->stats_buf_ctx->stats_in_end) {
David Turner0fa8c492019-02-06 16:38:13 +00002880 double loop_decay_rate;
2881
2882 // Check for a scene cut.
Urvang Joshice97de22020-06-15 12:57:07 -07002883 if (frames_since_key >= kf_cfg->key_freq_min &&
2884 test_candidate_kf(twopass, &last_frame, this_frame, twopass->stats_in,
Vishesh39e74092020-06-16 17:13:48 +05302885 frames_since_key, oxcf->rc_cfg.mode,
Aasaipriya196c58a2020-04-23 19:05:45 +05302886 cpi->rc.enable_scenecut_detection)) {
Aasaipriyad1ef4602020-03-16 20:13:35 +05302887 scenecut_detected = 1;
David Turner0fa8c492019-02-06 16:38:13 +00002888 break;
Aasaipriyad1ef4602020-03-16 20:13:35 +05302889 }
David Turner0fa8c492019-02-06 16:38:13 +00002890
2891 // How fast is the prediction quality decaying?
Jingning Hanc7a52172019-09-17 15:28:46 -07002892 loop_decay_rate =
2893 get_prediction_decay_rate(frame_info, twopass->stats_in);
David Turner0fa8c492019-02-06 16:38:13 +00002894
2895 // We want to know something about the recent past... rather than
2896 // as used elsewhere where we are concerned with decay in prediction
2897 // quality since the last GF or KF.
2898 recent_loop_decay[i % FRAMES_TO_CHECK_DECAY] = loop_decay_rate;
2899 decay_accumulator = 1.0;
2900 for (j = 0; j < FRAMES_TO_CHECK_DECAY; ++j)
2901 decay_accumulator *= recent_loop_decay[j];
2902
2903 // Special check for transition or high motion followed by a
2904 // static scene.
Urvang Joshice97de22020-06-15 12:57:07 -07002905 if (frames_since_key >= kf_cfg->key_freq_min &&
2906 detect_transition_to_still(twopass, rc->min_gf_interval, i,
2907 kf_cfg->key_freq_max - i, loop_decay_rate,
bohanli04652962020-03-18 17:02:50 -07002908 decay_accumulator)) {
Aasaipriyad1ef4602020-03-16 20:13:35 +05302909 scenecut_detected = 1;
Cheng Chen1439bfc2020-04-03 13:52:28 -07002910 // In the case of transition followed by a static scene, the key frame
2911 // could be a good predictor for the following frames, therefore we
2912 // do not use an arf.
2913 rc->use_arf_in_this_kf_group = 0;
David Turner0fa8c492019-02-06 16:38:13 +00002914 break;
Aasaipriyad1ef4602020-03-16 20:13:35 +05302915 }
David Turner0fa8c492019-02-06 16:38:13 +00002916
2917 // Step on to the next frame.
Aasaipriyad1ef4602020-03-16 20:13:35 +05302918 ++frames_to_key;
2919 ++frames_since_key;
David Turner0fa8c492019-02-06 16:38:13 +00002920
2921 // If we don't have a real key frame within the next two
Urvang Joshice97de22020-06-15 12:57:07 -07002922 // key_freq_max intervals then break out of the loop.
2923 if (frames_to_key >= 2 * kf_cfg->key_freq_max) break;
David Turner0fa8c492019-02-06 16:38:13 +00002924 } else {
Aasaipriyad1ef4602020-03-16 20:13:35 +05302925 ++frames_to_key;
2926 ++frames_since_key;
David Turner0fa8c492019-02-06 16:38:13 +00002927 }
2928 ++i;
2929 }
2930
Aasaipriyad1ef4602020-03-16 20:13:35 +05302931 if (kf_group_err != NULL)
2932 rc->num_stats_used_for_kf_boost = num_stats_used_for_kf_boost;
2933
2934 if (cpi->lap_enabled && !scenecut_detected)
2935 frames_to_key = num_frames_to_next_key;
2936
bohanli2541b8a2020-10-14 17:33:13 -07002937 if (!kf_cfg->fwd_kf_enabled || scenecut_detected ||
2938 twopass->stats_in >= twopass->stats_buf_ctx->stats_in_end)
2939 rc->next_is_fwd_key = 0;
bohanli6112e022020-04-29 15:16:27 -07002940
Aasaipriyad1ef4602020-03-16 20:13:35 +05302941 return frames_to_key;
2942}
Paul Wilkinsbe20bc22020-07-16 14:46:57 +01002943
Hamsalekha S3de77b42020-04-23 17:19:49 +05302944static double get_kf_group_avg_error(TWO_PASS *twopass,
2945 const FIRSTPASS_STATS *first_frame,
2946 const FIRSTPASS_STATS *start_position,
2947 int frames_to_key) {
2948 FIRSTPASS_STATS cur_frame = *first_frame;
2949 int num_frames, i;
2950 double kf_group_avg_error = 0.0;
Aasaipriyad1ef4602020-03-16 20:13:35 +05302951
Hamsalekha S3de77b42020-04-23 17:19:49 +05302952 reset_fpf_position(twopass, start_position);
2953
2954 for (i = 0; i < frames_to_key; ++i) {
2955 kf_group_avg_error += cur_frame.coded_error;
2956 if (EOF == input_stats(twopass, &cur_frame)) break;
2957 }
2958 num_frames = i + 1;
2959 num_frames = AOMMIN(num_frames, frames_to_key);
2960 kf_group_avg_error = kf_group_avg_error / num_frames;
2961
2962 return (kf_group_avg_error);
2963}
2964
2965static int64_t get_kf_group_bits(AV1_COMP *cpi, double kf_group_err,
2966 double kf_group_avg_error) {
Aasaipriya2cc3c812020-04-13 18:48:30 +05302967 RATE_CONTROL *const rc = &cpi->rc;
2968 TWO_PASS *const twopass = &cpi->twopass;
2969 int64_t kf_group_bits;
Hamsalekha S3de77b42020-04-23 17:19:49 +05302970 if (cpi->lap_enabled) {
Aasaipriya3d580a22020-04-28 18:09:06 +05302971 kf_group_bits = (int64_t)rc->frames_to_key * rc->avg_frame_bandwidth;
Vishesh073fc962020-07-01 17:39:16 +05302972 if (cpi->oxcf.rc_cfg.vbr_corpus_complexity_lap) {
Vishesh217bf912020-06-05 16:24:09 +05302973 const int num_mbs = (cpi->oxcf.resize_cfg.resize_mode != RESIZE_NONE)
Hamsalekha S3de77b42020-04-23 17:19:49 +05302974 ? cpi->initial_mbs
2975 : cpi->common.mi_params.MBs;
2976
2977 double vbr_corpus_complexity_lap =
Vishesh073fc962020-07-01 17:39:16 +05302978 cpi->oxcf.rc_cfg.vbr_corpus_complexity_lap / 10.0;
Hamsalekha S3de77b42020-04-23 17:19:49 +05302979 /* Get the average corpus complexity of the frame */
2980 vbr_corpus_complexity_lap = vbr_corpus_complexity_lap * num_mbs;
2981 kf_group_bits = (int64_t)(
2982 kf_group_bits * (kf_group_avg_error / vbr_corpus_complexity_lap));
2983 }
2984 } else {
Aasaipriya2cc3c812020-04-13 18:48:30 +05302985 kf_group_bits = (int64_t)(twopass->bits_left *
2986 (kf_group_err / twopass->modified_error_left));
Hamsalekha S3de77b42020-04-23 17:19:49 +05302987 }
Aasaipriya2cc3c812020-04-13 18:48:30 +05302988
2989 return kf_group_bits;
2990}
2991
Aasaipriya240671e2020-04-16 16:50:21 +05302992static int calc_avg_stats(AV1_COMP *cpi, FIRSTPASS_STATS *avg_frame_stat) {
2993 RATE_CONTROL *const rc = &cpi->rc;
2994 TWO_PASS *const twopass = &cpi->twopass;
2995 FIRSTPASS_STATS cur_frame;
2996 av1_zero(cur_frame);
2997 int num_frames = 0;
2998 // Accumulate total stat using available number of stats.
2999 for (num_frames = 0; num_frames < (rc->frames_to_key - 1); ++num_frames) {
3000 if (EOF == input_stats(twopass, &cur_frame)) break;
3001 av1_accumulate_stats(avg_frame_stat, &cur_frame);
3002 }
3003
3004 if (num_frames < 2) {
3005 return num_frames;
3006 }
3007 // Average the total stat
3008 avg_frame_stat->weight = avg_frame_stat->weight / num_frames;
3009 avg_frame_stat->intra_error = avg_frame_stat->intra_error / num_frames;
3010 avg_frame_stat->frame_avg_wavelet_energy =
3011 avg_frame_stat->frame_avg_wavelet_energy / num_frames;
3012 avg_frame_stat->coded_error = avg_frame_stat->coded_error / num_frames;
3013 avg_frame_stat->sr_coded_error = avg_frame_stat->sr_coded_error / num_frames;
3014 avg_frame_stat->pcnt_inter = avg_frame_stat->pcnt_inter / num_frames;
3015 avg_frame_stat->pcnt_motion = avg_frame_stat->pcnt_motion / num_frames;
3016 avg_frame_stat->pcnt_second_ref =
3017 avg_frame_stat->pcnt_second_ref / num_frames;
3018 avg_frame_stat->pcnt_neutral = avg_frame_stat->pcnt_neutral / num_frames;
3019 avg_frame_stat->intra_skip_pct = avg_frame_stat->intra_skip_pct / num_frames;
3020 avg_frame_stat->inactive_zone_rows =
3021 avg_frame_stat->inactive_zone_rows / num_frames;
3022 avg_frame_stat->inactive_zone_cols =
3023 avg_frame_stat->inactive_zone_cols / num_frames;
3024 avg_frame_stat->MVr = avg_frame_stat->MVr / num_frames;
3025 avg_frame_stat->mvr_abs = avg_frame_stat->mvr_abs / num_frames;
3026 avg_frame_stat->MVc = avg_frame_stat->MVc / num_frames;
3027 avg_frame_stat->mvc_abs = avg_frame_stat->mvc_abs / num_frames;
3028 avg_frame_stat->MVrv = avg_frame_stat->MVrv / num_frames;
3029 avg_frame_stat->MVcv = avg_frame_stat->MVcv / num_frames;
3030 avg_frame_stat->mv_in_out_count =
3031 avg_frame_stat->mv_in_out_count / num_frames;
3032 avg_frame_stat->new_mv_count = avg_frame_stat->new_mv_count / num_frames;
3033 avg_frame_stat->count = avg_frame_stat->count / num_frames;
3034 avg_frame_stat->duration = avg_frame_stat->duration / num_frames;
3035
3036 return num_frames;
3037}
3038
3039static double get_kf_boost_score(AV1_COMP *cpi, double kf_raw_err,
3040 double *zero_motion_accumulator,
3041 double *sr_accumulator, int use_avg_stat) {
3042 RATE_CONTROL *const rc = &cpi->rc;
3043 TWO_PASS *const twopass = &cpi->twopass;
3044 FRAME_INFO *const frame_info = &cpi->frame_info;
3045 FIRSTPASS_STATS frame_stat;
3046 av1_zero(frame_stat);
3047 int i = 0, num_stat_used = 0;
3048 double boost_score = 0.0;
3049 const double kf_max_boost =
Vishesh39e74092020-06-16 17:13:48 +05303050 cpi->oxcf.rc_cfg.mode == AOM_Q
Aasaipriya240671e2020-04-16 16:50:21 +05303051 ? AOMMIN(AOMMAX(rc->frames_to_key * 2.0, KF_MIN_FRAME_BOOST),
3052 KF_MAX_FRAME_BOOST)
3053 : KF_MAX_FRAME_BOOST;
3054
3055 // Calculate the average using available number of stats.
3056 if (use_avg_stat) num_stat_used = calc_avg_stats(cpi, &frame_stat);
3057
3058 for (i = num_stat_used; i < (rc->frames_to_key - 1); ++i) {
3059 if (!use_avg_stat && EOF == input_stats(twopass, &frame_stat)) break;
3060
3061 // Monitor for static sections.
3062 // For the first frame in kf group, the second ref indicator is invalid.
3063 if (i > 0) {
3064 *zero_motion_accumulator =
3065 AOMMIN(*zero_motion_accumulator,
3066 get_zero_motion_factor(frame_info, &frame_stat));
3067 } else {
3068 *zero_motion_accumulator = frame_stat.pcnt_inter - frame_stat.pcnt_motion;
3069 }
3070
3071 // Not all frames in the group are necessarily used in calculating boost.
3072 if ((*sr_accumulator < (kf_raw_err * 1.50)) &&
3073 (i <= rc->max_gf_interval * 2)) {
3074 double frame_boost;
3075 double zm_factor;
3076
3077 // Factor 0.75-1.25 based on how much of frame is static.
3078 zm_factor = (0.75 + (*zero_motion_accumulator / 2.0));
3079
3080 if (i < 2) *sr_accumulator = 0.0;
3081 frame_boost = calc_kf_frame_boost(rc, frame_info, &frame_stat,
3082 sr_accumulator, kf_max_boost);
3083 boost_score += frame_boost * zm_factor;
3084 }
3085 }
3086 return boost_score;
3087}
3088
Mufaddal Chakerab14ef0e2020-07-07 14:23:43 +05303089/*!\brief Interval(in seconds) to clip key-frame distance to in LAP.
3090 */
3091#define MAX_KF_BITS_INTERVAL_SINGLE_PASS 5
3092
bohanlid165b192020-06-10 21:46:29 -07003093/*!\brief Determine the next key frame group
3094 *
3095 * \ingroup gf_group_algo
3096 * This function decides the placement of the next key frame, and
3097 * calculates the bit allocation of the KF group and the keyframe itself.
3098 *
3099 * \param[in] cpi Top-level encoder structure
3100 * \param[in] this_frame Pointer to first pass stats
3101 *
3102 * \return Nothing is returned.
3103 */
Aasaipriyad1ef4602020-03-16 20:13:35 +05303104static void find_next_key_frame(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame) {
3105 RATE_CONTROL *const rc = &cpi->rc;
3106 TWO_PASS *const twopass = &cpi->twopass;
3107 GF_GROUP *const gf_group = &cpi->gf_group;
3108 FRAME_INFO *const frame_info = &cpi->frame_info;
3109 AV1_COMMON *const cm = &cpi->common;
3110 CurrentFrame *const current_frame = &cm->current_frame;
3111 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Vishesh7e9873d2020-06-08 15:41:33 +05303112 const KeyFrameCfg *const kf_cfg = &oxcf->kf_cfg;
Aasaipriyad1ef4602020-03-16 20:13:35 +05303113 const FIRSTPASS_STATS first_frame = *this_frame;
3114 FIRSTPASS_STATS next_frame;
3115 av1_zero(next_frame);
3116
3117 rc->frames_since_key = 0;
Cheng Chen1439bfc2020-04-03 13:52:28 -07003118 // Use arfs if possible.
3119 rc->use_arf_in_this_kf_group = is_altref_enabled(
3120 oxcf->gf_cfg.lag_in_frames, oxcf->gf_cfg.enable_auto_arf);
Aasaipriyad1ef4602020-03-16 20:13:35 +05303121
3122 // Reset the GF group data structures.
3123 av1_zero(*gf_group);
3124
Aasaipriyad1ef4602020-03-16 20:13:35 +05303125 // KF is always a GF so clear frames till next gf counter.
3126 rc->frames_till_gf_update_due = 0;
3127
3128 rc->frames_to_key = 1;
3129
3130 if (has_no_stats_stage(cpi)) {
3131 int num_frames_to_app_forced_key = detect_app_forced_key(cpi);
3132 rc->this_key_frame_forced =
3133 current_frame->frame_number != 0 && rc->frames_to_key == 0;
Aasaipriya9bc1dcb2020-03-13 17:46:07 +05303134 if (num_frames_to_app_forced_key != -1)
3135 rc->frames_to_key = num_frames_to_app_forced_key;
3136 else
Urvang Joshice97de22020-06-15 12:57:07 -07003137 rc->frames_to_key = AOMMAX(1, kf_cfg->key_freq_max);
Mufaddal Chakera13ed3f12020-03-10 15:46:17 +05303138 correct_frames_to_key(cpi);
Aasaipriyad1ef4602020-03-16 20:13:35 +05303139 rc->kf_boost = DEFAULT_KF_BOOST;
Aasaipriyad1ef4602020-03-16 20:13:35 +05303140 gf_group->update_type[0] = KF_UPDATE;
3141 return;
Aasaipriya9bc1dcb2020-03-13 17:46:07 +05303142 }
Aasaipriyad1ef4602020-03-16 20:13:35 +05303143 int i;
3144 const FIRSTPASS_STATS *const start_position = twopass->stats_in;
3145 int kf_bits = 0;
3146 double zero_motion_accumulator = 1.0;
3147 double boost_score = 0.0;
3148 double kf_raw_err = 0.0;
3149 double kf_mod_err = 0.0;
3150 double kf_group_err = 0.0;
3151 double sr_accumulator = 0.0;
Hamsalekha S3de77b42020-04-23 17:19:49 +05303152 double kf_group_avg_error = 0.0;
Mufaddal Chakerab14ef0e2020-07-07 14:23:43 +05303153 int frames_to_key, frames_to_key_clipped = INT_MAX;
3154 int64_t kf_group_bits_clipped = INT64_MAX;
3155
Aasaipriyad1ef4602020-03-16 20:13:35 +05303156 // Is this a forced key frame by interval.
3157 rc->this_key_frame_forced = rc->next_key_frame_forced;
3158
3159 twopass->kf_group_bits = 0; // Total bits available to kf group
3160 twopass->kf_group_error_left = 0; // Group modified error score.
3161
3162 kf_raw_err = this_frame->intra_error;
3163 kf_mod_err = calculate_modified_err(frame_info, twopass, oxcf, this_frame);
3164
3165 frames_to_key =
Urvang Joshice97de22020-06-15 12:57:07 -07003166 define_kf_interval(cpi, this_frame, &kf_group_err, kf_cfg->key_freq_max);
Aasaipriyad1ef4602020-03-16 20:13:35 +05303167
3168 if (frames_to_key != -1)
Urvang Joshice97de22020-06-15 12:57:07 -07003169 rc->frames_to_key = AOMMIN(kf_cfg->key_freq_max, frames_to_key);
Aasaipriyad1ef4602020-03-16 20:13:35 +05303170 else
Urvang Joshice97de22020-06-15 12:57:07 -07003171 rc->frames_to_key = kf_cfg->key_freq_max;
Aasaipriyad1ef4602020-03-16 20:13:35 +05303172
3173 if (cpi->lap_enabled) correct_frames_to_key(cpi);
Mufaddal Chakera13ed3f12020-03-10 15:46:17 +05303174
David Turner0fa8c492019-02-06 16:38:13 +00003175 // If there is a max kf interval set by the user we must obey it.
3176 // We already breakout of the loop above at 2x max.
3177 // This code centers the extra kf if the actual natural interval
3178 // is between 1x and 2x.
Urvang Joshice97de22020-06-15 12:57:07 -07003179 if (kf_cfg->auto_key && rc->frames_to_key > kf_cfg->key_freq_max) {
David Turner0fa8c492019-02-06 16:38:13 +00003180 FIRSTPASS_STATS tmp_frame = first_frame;
3181
3182 rc->frames_to_key /= 2;
3183
3184 // Reset to the start of the group.
3185 reset_fpf_position(twopass, start_position);
3186
3187 kf_group_err = 0.0;
3188
3189 // Rescan to get the correct error data for the forced kf group.
3190 for (i = 0; i < rc->frames_to_key; ++i) {
Jingning Han17af7742019-09-17 16:58:03 -07003191 kf_group_err +=
3192 calculate_modified_err(frame_info, twopass, oxcf, &tmp_frame);
Aasaipriya197771d2020-02-06 20:11:21 +05303193 if (EOF == input_stats(twopass, &tmp_frame)) break;
David Turner0fa8c492019-02-06 16:38:13 +00003194 }
3195 rc->next_key_frame_forced = 1;
Aasaipriya197771d2020-02-06 20:11:21 +05303196 } else if ((twopass->stats_in == twopass->stats_buf_ctx->stats_in_end &&
3197 is_stat_consumption_stage_twopass(cpi)) ||
Urvang Joshice97de22020-06-15 12:57:07 -07003198 rc->frames_to_key >= kf_cfg->key_freq_max) {
David Turner0fa8c492019-02-06 16:38:13 +00003199 rc->next_key_frame_forced = 1;
3200 } else {
3201 rc->next_key_frame_forced = 0;
3202 }
3203
Vishesh7e9873d2020-06-08 15:41:33 +05303204 if (kf_cfg->fwd_kf_enabled) rc->next_is_fwd_key |= rc->next_key_frame_forced;
bohanli6112e022020-04-29 15:16:27 -07003205
David Turner0fa8c492019-02-06 16:38:13 +00003206 // Special case for the last key frame of the file.
Akshata Jadhava49be172019-12-18 00:03:53 +05303207 if (twopass->stats_in >= twopass->stats_buf_ctx->stats_in_end) {
David Turner0fa8c492019-02-06 16:38:13 +00003208 // Accumulate kf group error.
Jingning Han17af7742019-09-17 16:58:03 -07003209 kf_group_err +=
3210 calculate_modified_err(frame_info, twopass, oxcf, this_frame);
bohanli2541b8a2020-10-14 17:33:13 -07003211 rc->next_is_fwd_key = 0;
David Turner0fa8c492019-02-06 16:38:13 +00003212 }
3213
3214 // Calculate the number of bits that should be assigned to the kf group.
Aasaipriya2cc3c812020-04-13 18:48:30 +05303215 if ((twopass->bits_left > 0 && twopass->modified_error_left > 0.0) ||
Vishesh39e74092020-06-16 17:13:48 +05303216 (cpi->lap_enabled && oxcf->rc_cfg.mode != AOM_Q)) {
David Turner0fa8c492019-02-06 16:38:13 +00003217 // Maximum number of bits for a single normal frame (not key frame).
Vishesh7e9873d2020-06-08 15:41:33 +05303218 const int max_bits = frame_max_bits(rc, oxcf);
David Turner0fa8c492019-02-06 16:38:13 +00003219
3220 // Maximum number of bits allocated to the key frame group.
3221 int64_t max_grp_bits;
3222
Vishesh073fc962020-07-01 17:39:16 +05303223 if (oxcf->rc_cfg.vbr_corpus_complexity_lap) {
Hamsalekha S3de77b42020-04-23 17:19:49 +05303224 kf_group_avg_error = get_kf_group_avg_error(
3225 twopass, &first_frame, start_position, rc->frames_to_key);
3226 }
3227
David Turner0fa8c492019-02-06 16:38:13 +00003228 // Default allocation based on bits left and relative
3229 // complexity of the section.
Hamsalekha S3de77b42020-04-23 17:19:49 +05303230 twopass->kf_group_bits =
3231 get_kf_group_bits(cpi, kf_group_err, kf_group_avg_error);
David Turner0fa8c492019-02-06 16:38:13 +00003232 // Clip based on maximum per frame rate defined by the user.
3233 max_grp_bits = (int64_t)max_bits * (int64_t)rc->frames_to_key;
3234 if (twopass->kf_group_bits > max_grp_bits)
3235 twopass->kf_group_bits = max_grp_bits;
3236 } else {
3237 twopass->kf_group_bits = 0;
3238 }
3239 twopass->kf_group_bits = AOMMAX(0, twopass->kf_group_bits);
3240
Mufaddal Chakerab14ef0e2020-07-07 14:23:43 +05303241 if (cpi->lap_enabled) {
3242 // In the case of single pass based on LAP, frames to key may have an
3243 // inaccurate value, and hence should be clipped to an appropriate
3244 // interval.
3245 frames_to_key_clipped =
3246 (int)(MAX_KF_BITS_INTERVAL_SINGLE_PASS * cpi->framerate);
3247
3248 // This variable calculates the bits allocated to kf_group with a clipped
3249 // frames_to_key.
3250 if (rc->frames_to_key > frames_to_key_clipped) {
3251 kf_group_bits_clipped =
3252 (int64_t)((double)twopass->kf_group_bits * frames_to_key_clipped /
3253 rc->frames_to_key);
3254 }
3255 }
3256
David Turner0fa8c492019-02-06 16:38:13 +00003257 // Reset the first pass file position.
3258 reset_fpf_position(twopass, start_position);
3259
3260 // Scan through the kf group collating various stats used to determine
3261 // how many bits to spend on it.
Aasaipriya240671e2020-04-16 16:50:21 +05303262 boost_score = get_kf_boost_score(cpi, kf_raw_err, &zero_motion_accumulator,
3263 &sr_accumulator, 0);
David Turner0fa8c492019-02-06 16:38:13 +00003264 reset_fpf_position(twopass, start_position);
David Turner0fa8c492019-02-06 16:38:13 +00003265 // Store the zero motion percentage
3266 twopass->kf_zeromotion_pct = (int)(zero_motion_accumulator * 100.0);
3267
3268 // Calculate a section intra ratio used in setting max loop filter.
3269 twopass->section_intra_rating = calculate_section_intra_ratio(
Akshata Jadhava49be172019-12-18 00:03:53 +05303270 start_position, twopass->stats_buf_ctx->stats_in_end, rc->frames_to_key);
David Turner0fa8c492019-02-06 16:38:13 +00003271
Cheng Chenf53b3ee2019-07-31 11:35:09 -07003272 rc->kf_boost = (int)boost_score;
David Turner0fa8c492019-02-06 16:38:13 +00003273
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05303274 if (cpi->lap_enabled) {
Vishesh39e74092020-06-16 17:13:48 +05303275 if (oxcf->rc_cfg.mode == AOM_Q) {
Aasaipriya240671e2020-04-16 16:50:21 +05303276 rc->kf_boost = get_projected_kf_boost(cpi);
Aasaipriyab34920d2020-06-01 12:34:03 +05303277 } else {
Aasaipriya240671e2020-04-16 16:50:21 +05303278 // TODO(any): Explore using average frame stats for AOM_Q as well.
3279 boost_score = get_kf_boost_score(
3280 cpi, kf_raw_err, &zero_motion_accumulator, &sr_accumulator, 1);
3281 reset_fpf_position(twopass, start_position);
3282 rc->kf_boost += (int)boost_score;
3283 }
Mufaddal Chakera0ec9bb62020-02-25 14:23:10 +05303284 }
3285
David Turner0fa8c492019-02-06 16:38:13 +00003286 // Special case for static / slide show content but don't apply
3287 // if the kf group is very short.
3288 if ((zero_motion_accumulator > STATIC_KF_GROUP_FLOAT_THRESH) &&
3289 (rc->frames_to_key > 8)) {
3290 rc->kf_boost = AOMMAX(rc->kf_boost, MIN_STATIC_KF_BOOST);
3291 } else {
3292 // Apply various clamps for min and max boost
3293 rc->kf_boost = AOMMAX(rc->kf_boost, (rc->frames_to_key * 3));
3294 rc->kf_boost = AOMMAX(rc->kf_boost, MIN_KF_BOOST);
Paul Wilkinsf305de32020-04-02 12:02:45 +01003295#ifdef STRICT_RC
3296 rc->kf_boost = AOMMIN(rc->kf_boost, MAX_KF_BOOST);
3297#endif
David Turner0fa8c492019-02-06 16:38:13 +00003298 }
3299
3300 // Work out how many bits to allocate for the key frame itself.
Mufaddal Chakerab14ef0e2020-07-07 14:23:43 +05303301 // In case of LAP enabled for VBR, if the frames_to_key value is
3302 // very high, we calculate the bits based on a clipped value of
3303 // frames_to_key.
3304 kf_bits = calculate_boost_bits(
3305 AOMMIN(rc->frames_to_key, frames_to_key_clipped) - 1, rc->kf_boost,
3306 AOMMIN(twopass->kf_group_bits, kf_group_bits_clipped));
David Turner0fa8c492019-02-06 16:38:13 +00003307 // printf("kf boost = %d kf_bits = %d kf_zeromotion_pct = %d\n", rc->kf_boost,
3308 // kf_bits, twopass->kf_zeromotion_pct);
Jingning Hanfa407c02020-02-05 11:46:58 -08003309 kf_bits = adjust_boost_bits_for_target_level(cpi, rc, kf_bits,
Wan-Teh Changbcacb322019-09-11 14:50:20 -07003310 twopass->kf_group_bits, 0);
David Turner0fa8c492019-02-06 16:38:13 +00003311
David Turner0fa8c492019-02-06 16:38:13 +00003312 twopass->kf_group_bits -= kf_bits;
3313
3314 // Save the bits to spend on the key frame.
3315 gf_group->bit_allocation[0] = kf_bits;
3316 gf_group->update_type[0] = KF_UPDATE;
David Turner0fa8c492019-02-06 16:38:13 +00003317
3318 // Note the total error score of the kf group minus the key frame itself.
Mufaddal Chakera4df85dc2020-07-05 22:00:53 +05303319 if (cpi->lap_enabled)
3320 // As we don't have enough stats to know the actual error of the group,
3321 // we assume the complexity of each frame to be equal to 1, and set the
3322 // error as the number of frames in the group(minus the keyframe).
3323 twopass->kf_group_error_left = (int)(rc->frames_to_key - 1);
3324 else
3325 twopass->kf_group_error_left = (int)(kf_group_err - kf_mod_err);
David Turner0fa8c492019-02-06 16:38:13 +00003326
3327 // Adjust the count of total modified error left.
3328 // The count of bits left is adjusted elsewhere based on real coded frame
3329 // sizes.
3330 twopass->modified_error_left -= kf_group_err;
3331}
3332
3333static int is_skippable_frame(const AV1_COMP *cpi) {
Mufaddal Chakera3bcc72c2019-12-11 14:38:37 +05303334 if (has_no_stats_stage(cpi)) return 0;
David Turner0fa8c492019-02-06 16:38:13 +00003335 // If the current frame does not have non-zero motion vector detected in the
3336 // first pass, and so do its previous and forward frames, then this frame
3337 // can be skipped for partition check, and the partition size is assigned
3338 // according to the variance
3339 const TWO_PASS *const twopass = &cpi->twopass;
3340
3341 return (!frame_is_intra_only(&cpi->common) &&
Akshata Jadhava49be172019-12-18 00:03:53 +05303342 twopass->stats_in - 2 > twopass->stats_buf_ctx->stats_in_start &&
3343 twopass->stats_in < twopass->stats_buf_ctx->stats_in_end &&
David Turner0fa8c492019-02-06 16:38:13 +00003344 (twopass->stats_in - 1)->pcnt_inter -
3345 (twopass->stats_in - 1)->pcnt_motion ==
3346 1 &&
3347 (twopass->stats_in - 2)->pcnt_inter -
3348 (twopass->stats_in - 2)->pcnt_motion ==
3349 1 &&
3350 twopass->stats_in->pcnt_inter - twopass->stats_in->pcnt_motion == 1);
3351}
3352
3353#define ARF_STATS_OUTPUT 0
3354#if ARF_STATS_OUTPUT
3355unsigned int arf_count = 0;
3356#endif
David Turner0fa8c492019-02-06 16:38:13 +00003357
Aasaipriya2cc3c812020-04-13 18:48:30 +05303358static int get_section_target_bandwidth(AV1_COMP *cpi) {
3359 AV1_COMMON *const cm = &cpi->common;
3360 CurrentFrame *const current_frame = &cm->current_frame;
3361 RATE_CONTROL *const rc = &cpi->rc;
3362 TWO_PASS *const twopass = &cpi->twopass;
3363 int section_target_bandwidth;
3364 const int frames_left = (int)(twopass->stats_buf_ctx->total_stats->count -
3365 current_frame->frame_number);
3366 if (cpi->lap_enabled)
3367 section_target_bandwidth = (int)rc->avg_frame_bandwidth;
3368 else
3369 section_target_bandwidth = (int)(twopass->bits_left / frames_left);
3370 return section_target_bandwidth;
3371}
3372
Sarah Parker97803fc2019-05-17 14:15:37 -07003373static void process_first_pass_stats(AV1_COMP *cpi,
3374 FIRSTPASS_STATS *this_frame) {
David Turner0fa8c492019-02-06 16:38:13 +00003375 AV1_COMMON *const cm = &cpi->common;
3376 CurrentFrame *const current_frame = &cm->current_frame;
3377 RATE_CONTROL *const rc = &cpi->rc;
3378 TWO_PASS *const twopass = &cpi->twopass;
David Turner0fa8c492019-02-06 16:38:13 +00003379
Vishesh39e74092020-06-16 17:13:48 +05303380 if (cpi->oxcf.rc_cfg.mode != AOM_Q && current_frame->frame_number == 0 &&
Jingning Han9cfce632020-09-23 14:06:00 -07003381 cpi->gf_group.index == 0 && cpi->twopass.stats_buf_ctx->total_stats &&
Aasaipriyaeb417c12020-04-07 12:08:24 +05303382 cpi->twopass.stats_buf_ctx->total_left_stats) {
3383 if (cpi->lap_enabled) {
3384 /*
3385 * Accumulate total_stats using available limited number of stats,
3386 * and assign it to total_left_stats.
3387 */
3388 *cpi->twopass.stats_buf_ctx->total_left_stats =
3389 *cpi->twopass.stats_buf_ctx->total_stats;
3390 }
David Turner0fa8c492019-02-06 16:38:13 +00003391 // Special case code for first frame.
Aasaipriya2cc3c812020-04-13 18:48:30 +05303392 const int section_target_bandwidth = get_section_target_bandwidth(cpi);
Aasaipriyaeb417c12020-04-07 12:08:24 +05303393 const double section_length =
3394 twopass->stats_buf_ctx->total_left_stats->count;
David Turner0fa8c492019-02-06 16:38:13 +00003395 const double section_error =
Aasaipriyaeb417c12020-04-07 12:08:24 +05303396 twopass->stats_buf_ctx->total_left_stats->coded_error / section_length;
David Turner0fa8c492019-02-06 16:38:13 +00003397 const double section_intra_skip =
Aasaipriyaeb417c12020-04-07 12:08:24 +05303398 twopass->stats_buf_ctx->total_left_stats->intra_skip_pct /
3399 section_length;
David Turner0fa8c492019-02-06 16:38:13 +00003400 const double section_inactive_zone =
Aasaipriyaeb417c12020-04-07 12:08:24 +05303401 (twopass->stats_buf_ctx->total_left_stats->inactive_zone_rows * 2) /
Urvang Joshi9dc909d2020-03-23 16:07:02 -07003402 ((double)cm->mi_params.mb_rows * section_length);
David Turner0fa8c492019-02-06 16:38:13 +00003403 const int tmp_q = get_twopass_worst_quality(
3404 cpi, section_error, section_intra_skip + section_inactive_zone,
Paul Wilkins87919c92020-11-11 14:32:20 +00003405 section_target_bandwidth);
David Turner0fa8c492019-02-06 16:38:13 +00003406
Sarah Parkere1b22012019-06-06 16:35:25 -07003407 rc->active_worst_quality = tmp_q;
David Turner0fa8c492019-02-06 16:38:13 +00003408 rc->ni_av_qi = tmp_q;
3409 rc->last_q[INTER_FRAME] = tmp_q;
3410 rc->avg_q = av1_convert_qindex_to_q(tmp_q, cm->seq_params.bit_depth);
3411 rc->avg_frame_qindex[INTER_FRAME] = tmp_q;
Vishesh39e74092020-06-16 17:13:48 +05303412 rc->last_q[KEY_FRAME] = (tmp_q + cpi->oxcf.rc_cfg.best_allowed_q) / 2;
David Turner0fa8c492019-02-06 16:38:13 +00003413 rc->avg_frame_qindex[KEY_FRAME] = rc->last_q[KEY_FRAME];
3414 }
3415
Akshata Jadhav1fddf7f2019-12-18 00:49:25 +05303416 int err = 0;
3417 if (cpi->lap_enabled) {
3418 err = input_stats_lap(twopass, this_frame);
3419 } else {
3420 err = input_stats(twopass, this_frame);
3421 }
3422 if (err == EOF) return;
Sarah Parker97803fc2019-05-17 14:15:37 -07003423
3424 {
Vishesh217bf912020-06-05 16:24:09 +05303425 const int num_mbs = (cpi->oxcf.resize_cfg.resize_mode != RESIZE_NONE)
Sarah Parker97803fc2019-05-17 14:15:37 -07003426 ? cpi->initial_mbs
Urvang Joshi9dc909d2020-03-23 16:07:02 -07003427 : cm->mi_params.MBs;
Sarah Parker97803fc2019-05-17 14:15:37 -07003428 // The multiplication by 256 reverses a scaling factor of (>> 8)
3429 // applied when combining MB error values for the frame.
3430 twopass->mb_av_energy = log((this_frame->intra_error / num_mbs) + 1.0);
3431 twopass->frame_avg_haar_energy =
3432 log((this_frame->frame_avg_wavelet_energy / num_mbs) + 1.0);
3433 }
3434
David Turner0fa8c492019-02-06 16:38:13 +00003435 // Set the frame content type flag.
Sarah Parker97803fc2019-05-17 14:15:37 -07003436 if (this_frame->intra_skip_pct >= FC_ANIMATION_THRESH)
David Turner0fa8c492019-02-06 16:38:13 +00003437 twopass->fr_content_type = FC_GRAPHICS_ANIMATION;
3438 else
3439 twopass->fr_content_type = FC_NORMAL;
Sarah Parker97803fc2019-05-17 14:15:37 -07003440}
3441
Paul Wilkins46279802019-07-10 14:57:37 +01003442static void setup_target_rate(AV1_COMP *cpi) {
Sarah Parker97803fc2019-05-17 14:15:37 -07003443 RATE_CONTROL *const rc = &cpi->rc;
3444 GF_GROUP *const gf_group = &cpi->gf_group;
3445
3446 int target_rate = gf_group->bit_allocation[gf_group->index];
3447
Mufaddal Chakera3bcc72c2019-12-11 14:38:37 +05303448 if (has_no_stats_stage(cpi)) {
Marco Paniconifda31aa2019-07-16 22:24:38 +00003449 av1_rc_set_frame_target(cpi, target_rate, cpi->common.width,
3450 cpi->common.height);
3451 }
3452
Sarah Parker97803fc2019-05-17 14:15:37 -07003453 rc->base_frame_target = target_rate;
3454}
3455
3456void av1_get_second_pass_params(AV1_COMP *cpi,
3457 EncodeFrameParams *const frame_params,
Jingning Han491198d2020-02-13 21:53:41 -08003458 const EncodeFrameInput *const frame_input,
Sarah Parker97803fc2019-05-17 14:15:37 -07003459 unsigned int frame_flags) {
3460 RATE_CONTROL *const rc = &cpi->rc;
3461 TWO_PASS *const twopass = &cpi->twopass;
3462 GF_GROUP *const gf_group = &cpi->gf_group;
Vishesh7e9873d2020-06-08 15:41:33 +05303463 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Sarah Parker97803fc2019-05-17 14:15:37 -07003464
Jingning Han06c7d6c2020-09-23 11:37:27 -07003465 const FIRSTPASS_STATS *const start_pos = twopass->stats_in;
3466
Hamsalekha S37cc1d12019-12-12 19:27:41 +05303467 if (is_stat_consumption_stage(cpi) && !twopass->stats_in) return;
Sarah Parker97803fc2019-05-17 14:15:37 -07003468
Jingning Han56d02eb2020-08-15 00:30:15 -07003469 const int update_type = gf_group->update_type[gf_group->index];
Jingning Han2dcb0502020-08-20 20:35:26 -07003470 frame_params->frame_type = gf_group->frame_type[gf_group->index];
3471
Jingning Han9af02fa2020-08-20 23:01:01 -07003472 if (gf_group->index < gf_group->size && !(frame_flags & FRAMEFLAGS_KEY)) {
Sarah Parker97803fc2019-05-17 14:15:37 -07003473 assert(gf_group->index < gf_group->size);
Sarah Parker97803fc2019-05-17 14:15:37 -07003474
Paul Wilkins46279802019-07-10 14:57:37 +01003475 setup_target_rate(cpi);
Sarah Parker97803fc2019-05-17 14:15:37 -07003476
3477 // If this is an arf frame then we dont want to read the stats file or
3478 // advance the input pointer as we already have what we need.
3479 if (update_type == ARF_UPDATE || update_type == INTNL_ARF_UPDATE) {
Sarah Parker97803fc2019-05-17 14:15:37 -07003480 // Do the firstpass stats indicate that this frame is skippable for the
3481 // partition search?
Vishesh7e9873d2020-06-08 15:41:33 +05303482 if (cpi->sf.part_sf.allow_partition_search_skip && oxcf->pass == 2) {
Sarah Parker97803fc2019-05-17 14:15:37 -07003483 cpi->partition_search_skippable_frame = is_skippable_frame(cpi);
3484 }
Sarah Parker97803fc2019-05-17 14:15:37 -07003485 return;
3486 }
3487 }
3488
3489 aom_clear_system_state();
3490
Vishesh39e74092020-06-16 17:13:48 +05303491 if (oxcf->rc_cfg.mode == AOM_Q)
3492 rc->active_worst_quality = oxcf->rc_cfg.cq_level;
Sarah Parker97803fc2019-05-17 14:15:37 -07003493 FIRSTPASS_STATS this_frame;
3494 av1_zero(this_frame);
3495 // call above fn
Hamsalekha S37cc1d12019-12-12 19:27:41 +05303496 if (is_stat_consumption_stage(cpi)) {
Jingning Han06c7d6c2020-09-23 11:37:27 -07003497 if (gf_group->index < gf_group->size || rc->frames_to_key == 0)
3498 process_first_pass_stats(cpi, &this_frame);
Sarah Parker97803fc2019-05-17 14:15:37 -07003499 } else {
Vishesh39e74092020-06-16 17:13:48 +05303500 rc->active_worst_quality = oxcf->rc_cfg.cq_level;
Sarah Parker97803fc2019-05-17 14:15:37 -07003501 }
David Turner0fa8c492019-02-06 16:38:13 +00003502
3503 // Keyframe and section processing.
Jingning Han9af02fa2020-08-20 23:01:01 -07003504 FIRSTPASS_STATS this_frame_copy;
3505 this_frame_copy = this_frame;
bohanli2541b8a2020-10-14 17:33:13 -07003506 int is_overlay_forward_kf =
3507 rc->frames_to_key == 0 &&
3508 gf_group->update_type[gf_group->index] == OVERLAY_UPDATE;
3509 if (rc->frames_to_key <= 0 && !is_overlay_forward_kf) {
Urvang Joshia4bed752020-06-12 16:34:22 -07003510 assert(rc->frames_to_key >= -1);
David Turner0fa8c492019-02-06 16:38:13 +00003511 // Define next KF group and assign bits to it.
bohanli2541b8a2020-10-14 17:33:13 -07003512 int kf_offset = rc->frames_to_key;
3513 if (rc->frames_to_key < 0) {
3514 this_frame = *(twopass->stats_in - 1);
3515 } else {
3516 frame_params->frame_type = KEY_FRAME;
3517 }
David Turner0fa8c492019-02-06 16:38:13 +00003518 find_next_key_frame(cpi, &this_frame);
bohanli2541b8a2020-10-14 17:33:13 -07003519 rc->frames_since_key -= kf_offset;
3520 rc->frames_to_key += kf_offset;
David Turner0fa8c492019-02-06 16:38:13 +00003521 this_frame = this_frame_copy;
3522 } else {
Vishesh5b50e6d2020-06-10 19:20:07 +05303523 const int altref_enabled = is_altref_enabled(oxcf->gf_cfg.lag_in_frames,
3524 oxcf->gf_cfg.enable_auto_arf);
Vishesh7e9873d2020-06-08 15:41:33 +05303525 const int sframe_dist = oxcf->kf_cfg.sframe_dist;
3526 const int sframe_mode = oxcf->kf_cfg.sframe_mode;
Yaowu Xu9de4d832019-11-25 17:35:39 -08003527 CurrentFrame *const current_frame = &cpi->common.current_frame;
Vishesh7e9873d2020-06-08 15:41:33 +05303528 if (sframe_dist != 0) {
Yaowu Xu9de4d832019-11-25 17:35:39 -08003529 if (altref_enabled) {
3530 if (sframe_mode == 1) {
3531 // sframe_mode == 1: insert sframe if it matches altref frame.
3532 if (current_frame->frame_number % sframe_dist == 0 &&
3533 current_frame->frame_number != 0 && update_type == ARF_UPDATE) {
3534 frame_params->frame_type = S_FRAME;
3535 }
3536 } else {
3537 // sframe_mode != 1: if sframe will be inserted at the next available
3538 // altref frame
3539 if (current_frame->frame_number % sframe_dist == 0 &&
3540 current_frame->frame_number != 0) {
3541 rc->sframe_due = 1;
3542 }
3543 if (rc->sframe_due && update_type == ARF_UPDATE) {
3544 frame_params->frame_type = S_FRAME;
3545 rc->sframe_due = 0;
3546 }
3547 }
3548 } else {
3549 if (current_frame->frame_number % sframe_dist == 0 &&
3550 current_frame->frame_number != 0) {
3551 frame_params->frame_type = S_FRAME;
3552 }
3553 }
3554 }
David Turner0fa8c492019-02-06 16:38:13 +00003555 }
3556
3557 // Define a new GF/ARF group. (Should always enter here for key frames).
Jingning Han9af02fa2020-08-20 23:01:01 -07003558 if (gf_group->index == gf_group->size) {
Sarah Parker97803fc2019-05-17 14:15:37 -07003559 assert(cpi->common.current_frame.frame_number == 0 ||
Urvang Joshi2e4aaf22019-05-08 11:38:00 -07003560 gf_group->index == gf_group->size);
Aasaipriyad1ef4602020-03-16 20:13:35 +05303561 const FIRSTPASS_STATS *const start_position = twopass->stats_in;
bohanli59600cf2020-04-29 15:02:53 -07003562
3563 if (cpi->lap_enabled && cpi->rc.enable_scenecut_detection) {
3564 int num_frames_to_detect_scenecut, frames_to_key;
Aasaipriyad1ef4602020-03-16 20:13:35 +05303565 num_frames_to_detect_scenecut = MAX_GF_LENGTH_LAP + 1;
bohanli59600cf2020-04-29 15:02:53 -07003566 frames_to_key = define_kf_interval(cpi, &this_frame, NULL,
3567 num_frames_to_detect_scenecut);
3568 if (frames_to_key != -1)
3569 rc->frames_to_key = AOMMIN(rc->frames_to_key, frames_to_key);
3570 }
3571
Aasaipriyad1ef4602020-03-16 20:13:35 +05303572 reset_fpf_position(twopass, start_position);
Aasaipriya9bc1dcb2020-03-13 17:46:07 +05303573
Jingning Hanff04e8e2020-06-24 13:19:43 -07003574 int max_gop_length =
3575 (oxcf->gf_cfg.lag_in_frames >= 32 &&
3576 is_stat_consumption_stage_twopass(cpi))
Vishesha36c8d02020-06-30 15:42:35 +05303577 ? AOMMIN(MAX_GF_INTERVAL, oxcf->gf_cfg.lag_in_frames -
3578 oxcf->algo_cfg.arnr_max_frames / 2)
Jingning Hanff04e8e2020-06-24 13:19:43 -07003579 : MAX_GF_LENGTH_LAP;
Jingning Han9af02fa2020-08-20 23:01:01 -07003580
bohanli00f0bfe2020-10-02 15:17:34 -07003581 // Identify regions if needed.
bohanli2541b8a2020-10-14 17:33:13 -07003582 if (rc->frames_since_key == 0 || rc->frames_since_key == 1 ||
bohanli00f0bfe2020-10-02 15:17:34 -07003583 (rc->frames_till_regions_update - rc->frames_since_key <
3584 rc->frames_to_key &&
3585 rc->frames_till_regions_update - rc->frames_since_key <
3586 max_gop_length + 1)) {
3587 int is_first_stat =
3588 twopass->stats_in == twopass->stats_buf_ctx->stats_in_start;
3589 const FIRSTPASS_STATS *stats_start = twopass->stats_in + is_first_stat;
3590 // offset of stats_start from the current frame
3591 int offset = is_first_stat || (rc->frames_since_key == 0);
3592 // offset of the region indices from the previous key frame
3593 rc->regions_offset = rc->frames_since_key;
3594 // how many frames we can analyze from this frame
3595 int rest_frames = AOMMIN(rc->frames_to_key + rc->next_is_fwd_key,
3596 MAX_FIRSTPASS_ANALYSIS_FRAMES);
3597 rest_frames =
3598 AOMMIN(rest_frames,
3599 (int)(twopass->stats_buf_ctx->stats_in_end - stats_start + 1) +
3600 offset);
3601
3602 rc->frames_till_regions_update = rest_frames;
3603
3604 identify_regions(stats_start, rest_frames - offset, offset, rc->regions,
3605 &rc->num_regions, rc->cor_coeff);
3606 }
3607
3608 int cur_region_idx =
3609 find_regions_index(rc->regions, rc->num_regions,
3610 rc->frames_since_key - rc->regions_offset);
3611 if ((cur_region_idx >= 0 &&
3612 rc->regions[cur_region_idx].type == SCENECUT_REGION) ||
3613 rc->frames_since_key == 0) {
3614 // If we start from a scenecut, then the last GOP's arf boost is not
3615 // needed for this GOP.
3616 cpi->gf_state.arf_gf_boost_lst = 0;
3617 }
3618
Jingning Han9af02fa2020-08-20 23:01:01 -07003619 // TODO(jingning): Resoleve the redundant calls here.
3620 if (rc->intervals_till_gf_calculate_due == 0 || 1) {
bohanlie1515342020-02-25 11:58:11 -08003621 calculate_gf_length(cpi, max_gop_length, MAX_NUM_GF_INTERVALS);
Jingning Han491198d2020-02-13 21:53:41 -08003622 }
bohanlie1515342020-02-25 11:58:11 -08003623
Vishesha36c8d02020-06-30 15:42:35 +05303624 if (max_gop_length > 16 && oxcf->algo_cfg.enable_tpl_model &&
Yunqing Wang7e180f62020-06-22 16:03:37 -07003625 !cpi->sf.tpl_sf.disable_gop_length_decision) {
bohanli00f0bfe2020-10-02 15:17:34 -07003626 int this_idx = rc->frames_since_key + rc->gf_intervals[rc->cur_gf_index] -
3627 rc->regions_offset - 1;
3628 int this_region =
3629 find_regions_index(rc->regions, rc->num_regions, this_idx);
3630 int next_region =
3631 find_regions_index(rc->regions, rc->num_regions, this_idx + 1);
3632 int is_last_scenecut =
3633 (rc->gf_intervals[rc->cur_gf_index] >= rc->frames_to_key ||
3634 rc->regions[this_region].type == SCENECUT_REGION ||
3635 rc->regions[next_region].type == SCENECUT_REGION);
3636 int ori_gf_int = rc->gf_intervals[rc->cur_gf_index];
3637
Jingning Han9af02fa2020-08-20 23:01:01 -07003638 if (rc->gf_intervals[rc->cur_gf_index] > 16) {
bohanlie1515342020-02-25 11:58:11 -08003639 // The calculate_gf_length function is previously used with
3640 // max_gop_length = 32 with look-ahead gf intervals.
3641 define_gf_group(cpi, &this_frame, frame_params, max_gop_length, 0);
Jingning Han9af02fa2020-08-20 23:01:01 -07003642 this_frame = this_frame_copy;
Deepa K G5a6eb3b2020-10-16 20:09:14 +05303643 int is_temporal_filter_enabled =
3644 (rc->frames_since_key > 0 && gf_group->arf_index > -1);
3645 if (is_temporal_filter_enabled) {
Jingning Hanfa2978a2020-09-09 18:17:13 -07003646 int arf_src_index = gf_group->arf_src_offset[gf_group->arf_index];
Angie Chiang7b0628b2021-01-10 18:07:49 -08003647 FRAME_UPDATE_TYPE arf_update_type =
3648 gf_group->update_type[gf_group->arf_index];
3649 int is_forward_keyframe = 0;
3650 av1_temporal_filter(cpi, arf_src_index, arf_update_type,
3651 is_forward_keyframe, NULL);
Deepa K G5dbb7f02020-10-22 21:17:27 +05303652 aom_extend_frame_borders(&cpi->alt_ref_buffer,
3653 av1_num_planes(&cpi->common));
Jingning Hanfa2978a2020-09-09 18:17:13 -07003654 }
bohanlie1515342020-02-25 11:58:11 -08003655 if (!av1_tpl_setup_stats(cpi, 1, frame_params, frame_input)) {
3656 // Tpl decides that a shorter gf interval is better.
3657 // TODO(jingning): Remove redundant computations here.
3658 max_gop_length = 16;
3659 calculate_gf_length(cpi, max_gop_length, 1);
bohanli00f0bfe2020-10-02 15:17:34 -07003660 if (is_last_scenecut &&
3661 (ori_gf_int - rc->gf_intervals[rc->cur_gf_index] < 4)) {
3662 rc->gf_intervals[rc->cur_gf_index] = ori_gf_int;
3663 }
Deepa K G5a6eb3b2020-10-16 20:09:14 +05303664 } else {
3665 // Tpl stats is reused only when the ARF frame is temporally filtered
Deepa K G96d81f82020-11-06 20:36:31 +05303666 if (is_temporal_filter_enabled)
Deepa K G5a6eb3b2020-10-16 20:09:14 +05303667 cpi->tpl_data.skip_tpl_setup_stats = 1;
bohanlie1515342020-02-25 11:58:11 -08003668 }
bohanlie1515342020-02-25 11:58:11 -08003669 }
3670 }
Jingning Han9af02fa2020-08-20 23:01:01 -07003671 define_gf_group(cpi, &this_frame, frame_params, max_gop_length, 0);
3672
3673 if (gf_group->update_type[gf_group->index] != ARF_UPDATE &&
3674 rc->frames_since_key > 0)
3675 process_first_pass_stats(cpi, &this_frame);
3676
Jingning Han491198d2020-02-13 21:53:41 -08003677 define_gf_group(cpi, &this_frame, frame_params, max_gop_length, 1);
Jingning Han9af02fa2020-08-20 23:01:01 -07003678
David Turner0fa8c492019-02-06 16:38:13 +00003679 rc->frames_till_gf_update_due = rc->baseline_gf_interval;
Urvang Joshi2e4aaf22019-05-08 11:38:00 -07003680 assert(gf_group->index == 0);
David Turner0fa8c492019-02-06 16:38:13 +00003681#if ARF_STATS_OUTPUT
3682 {
3683 FILE *fpfile;
3684 fpfile = fopen("arf.stt", "a");
3685 ++arf_count;
Paul Wilkinsce1839c2019-08-13 12:22:54 +01003686 fprintf(fpfile, "%10d %10d %10d %10d %10d\n",
3687 cpi->common.current_frame.frame_number,
David Turner0fa8c492019-02-06 16:38:13 +00003688 rc->frames_till_gf_update_due, rc->kf_boost, arf_count,
3689 rc->gfu_boost);
3690
3691 fclose(fpfile);
3692 }
3693#endif
3694 }
Urvang Joshi2e4aaf22019-05-08 11:38:00 -07003695 assert(gf_group->index < gf_group->size);
David Turner0fa8c492019-02-06 16:38:13 +00003696
Jingning Han06c7d6c2020-09-23 11:37:27 -07003697 if (gf_group->update_type[gf_group->index] == ARF_UPDATE ||
Jingning Han9cfce632020-09-23 14:06:00 -07003698 gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE) {
Jingning Han06c7d6c2020-09-23 11:37:27 -07003699 reset_fpf_position(twopass, start_pos);
Jingning Han9cfce632020-09-23 14:06:00 -07003700 } else {
3701 // Update the total stats remaining structure.
3702 if (twopass->stats_buf_ctx->total_left_stats)
3703 subtract_stats(twopass->stats_buf_ctx->total_left_stats,
3704 &this_frame_copy);
3705 }
Jingning Han06c7d6c2020-09-23 11:37:27 -07003706
Jingning Han2dcb0502020-08-20 20:35:26 -07003707 frame_params->frame_type = gf_group->frame_type[gf_group->index];
3708
David Turner0fa8c492019-02-06 16:38:13 +00003709 // Do the firstpass stats indicate that this frame is skippable for the
3710 // partition search?
Vishesh7e9873d2020-06-08 15:41:33 +05303711 if (cpi->sf.part_sf.allow_partition_search_skip && oxcf->pass == 2) {
David Turner0fa8c492019-02-06 16:38:13 +00003712 cpi->partition_search_skippable_frame = is_skippable_frame(cpi);
3713 }
3714
Paul Wilkins46279802019-07-10 14:57:37 +01003715 setup_target_rate(cpi);
David Turner0fa8c492019-02-06 16:38:13 +00003716}
3717
David Turner0fa8c492019-02-06 16:38:13 +00003718void av1_init_second_pass(AV1_COMP *cpi) {
3719 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
3720 TWO_PASS *const twopass = &cpi->twopass;
Jingning Han17af7742019-09-17 16:58:03 -07003721 FRAME_INFO *const frame_info = &cpi->frame_info;
David Turner0fa8c492019-02-06 16:38:13 +00003722 double frame_rate;
3723 FIRSTPASS_STATS *stats;
3724
Akshata Jadhava49be172019-12-18 00:03:53 +05303725 if (!twopass->stats_buf_ctx->stats_in_end) return;
David Turner0fa8c492019-02-06 16:38:13 +00003726
Aasaipriyaeb417c12020-04-07 12:08:24 +05303727 stats = twopass->stats_buf_ctx->total_stats;
David Turner0fa8c492019-02-06 16:38:13 +00003728
Akshata Jadhava49be172019-12-18 00:03:53 +05303729 *stats = *twopass->stats_buf_ctx->stats_in_end;
Aasaipriyaeb417c12020-04-07 12:08:24 +05303730 *twopass->stats_buf_ctx->total_left_stats = *stats;
David Turner0fa8c492019-02-06 16:38:13 +00003731
3732 frame_rate = 10000000.0 * stats->count / stats->duration;
3733 // Each frame can have a different duration, as the frame rate in the source
3734 // isn't guaranteed to be constant. The frame rate prior to the first frame
3735 // encoded in the second pass is a guess. However, the sum duration is not.
3736 // It is calculated based on the actual durations of all frames from the
3737 // first pass.
3738 av1_new_framerate(cpi, frame_rate);
3739 twopass->bits_left =
Vishesh073fc962020-07-01 17:39:16 +05303740 (int64_t)(stats->duration * oxcf->rc_cfg.target_bandwidth / 10000000.0);
David Turner0fa8c492019-02-06 16:38:13 +00003741
3742 // This variable monitors how far behind the second ref update is lagging.
3743 twopass->sr_update_lag = 1;
3744
3745 // Scan the first pass file and calculate a modified total error based upon
3746 // the bias/power function used to allocate bits.
3747 {
3748 const double avg_error =
3749 stats->coded_error / DOUBLE_DIVIDE_CHECK(stats->count);
3750 const FIRSTPASS_STATS *s = twopass->stats_in;
3751 double modified_error_total = 0.0;
3752 twopass->modified_error_min =
Debargha Mukherjeec6a81202020-07-22 16:35:20 -07003753 (avg_error * oxcf->rc_cfg.vbrmin_section) / 100;
David Turner0fa8c492019-02-06 16:38:13 +00003754 twopass->modified_error_max =
Debargha Mukherjeec6a81202020-07-22 16:35:20 -07003755 (avg_error * oxcf->rc_cfg.vbrmax_section) / 100;
Akshata Jadhava49be172019-12-18 00:03:53 +05303756 while (s < twopass->stats_buf_ctx->stats_in_end) {
Jingning Han17af7742019-09-17 16:58:03 -07003757 modified_error_total +=
3758 calculate_modified_err(frame_info, twopass, oxcf, s);
David Turner0fa8c492019-02-06 16:38:13 +00003759 ++s;
3760 }
3761 twopass->modified_error_left = modified_error_total;
3762 }
3763
3764 // Reset the vbr bits off target counters
3765 cpi->rc.vbr_bits_off_target = 0;
3766 cpi->rc.vbr_bits_off_target_fast = 0;
3767
3768 cpi->rc.rate_error_estimate = 0;
3769
3770 // Static sequence monitor variables.
3771 twopass->kf_zeromotion_pct = 100;
3772 twopass->last_kfgroup_zeromotion_pct = 100;
Paul Wilkins88fdf642019-07-31 12:29:48 +01003773
3774 // Initialize bits per macro_block estimate correction factor.
3775 twopass->bpm_factor = 1.0;
3776 // Initialize actual and target bits counters for ARF groups so that
3777 // at the start we have a neutral bpm adjustment.
3778 twopass->rolling_arf_group_target_bits = 1;
3779 twopass->rolling_arf_group_actual_bits = 1;
David Turner0fa8c492019-02-06 16:38:13 +00003780}
3781
Mufaddal Chakera74c9cbe2020-01-17 16:44:59 +05303782void av1_init_single_pass_lap(AV1_COMP *cpi) {
3783 TWO_PASS *const twopass = &cpi->twopass;
3784
Mufaddal Chakera74c9cbe2020-01-17 16:44:59 +05303785 if (!twopass->stats_buf_ctx->stats_in_end) return;
3786
3787 // This variable monitors how far behind the second ref update is lagging.
3788 twopass->sr_update_lag = 1;
3789
3790 twopass->bits_left = 0;
3791 twopass->modified_error_min = 0.0;
3792 twopass->modified_error_max = 0.0;
3793 twopass->modified_error_left = 0.0;
3794
3795 // Reset the vbr bits off target counters
3796 cpi->rc.vbr_bits_off_target = 0;
3797 cpi->rc.vbr_bits_off_target_fast = 0;
3798
3799 cpi->rc.rate_error_estimate = 0;
3800
3801 // Static sequence monitor variables.
3802 twopass->kf_zeromotion_pct = 100;
3803 twopass->last_kfgroup_zeromotion_pct = 100;
3804
3805 // Initialize bits per macro_block estimate correction factor.
3806 twopass->bpm_factor = 1.0;
3807 // Initialize actual and target bits counters for ARF groups so that
3808 // at the start we have a neutral bpm adjustment.
3809 twopass->rolling_arf_group_target_bits = 1;
3810 twopass->rolling_arf_group_actual_bits = 1;
3811}
3812
David Turner0fa8c492019-02-06 16:38:13 +00003813#define MINQ_ADJ_LIMIT 48
3814#define MINQ_ADJ_LIMIT_CQ 20
3815#define HIGH_UNDERSHOOT_RATIO 2
3816void av1_twopass_postencode_update(AV1_COMP *cpi) {
3817 TWO_PASS *const twopass = &cpi->twopass;
3818 RATE_CONTROL *const rc = &cpi->rc;
Vishesh39e74092020-06-16 17:13:48 +05303819 const RateControlCfg *const rc_cfg = &cpi->oxcf.rc_cfg;
David Turner0fa8c492019-02-06 16:38:13 +00003820
3821 // VBR correction is done through rc->vbr_bits_off_target. Based on the
3822 // sign of this value, a limited % adjustment is made to the target rate
3823 // of subsequent frames, to try and push it back towards 0. This method
3824 // is designed to prevent extreme behaviour at the end of a clip
3825 // or group of frames.
3826 rc->vbr_bits_off_target += rc->base_frame_target - rc->projected_frame_size;
Paul Wilkins87919c92020-11-11 14:32:20 +00003827 twopass->bits_left = AOMMAX(twopass->bits_left - rc->base_frame_target, 0);
David Turner0fa8c492019-02-06 16:38:13 +00003828
Paul Wilkins88fdf642019-07-31 12:29:48 +01003829 // Target vs actual bits for this arf group.
Paul Wilkins87919c92020-11-11 14:32:20 +00003830 twopass->rolling_arf_group_target_bits += rc->base_frame_target;
Paul Wilkins88fdf642019-07-31 12:29:48 +01003831 twopass->rolling_arf_group_actual_bits += rc->projected_frame_size;
3832
David Turner0fa8c492019-02-06 16:38:13 +00003833 // Calculate the pct rc error.
3834 if (rc->total_actual_bits) {
3835 rc->rate_error_estimate =
3836 (int)((rc->vbr_bits_off_target * 100) / rc->total_actual_bits);
3837 rc->rate_error_estimate = clamp(rc->rate_error_estimate, -100, 100);
3838 } else {
3839 rc->rate_error_estimate = 0;
3840 }
3841
Paul Wilkins47426162020-03-02 17:46:07 +00003842 // Update the active best quality pyramid.
3843 if (!rc->is_src_frame_alt_ref) {
3844 const int pyramid_level = cpi->gf_group.layer_depth[cpi->gf_group.index];
3845 int i;
3846 for (i = pyramid_level; i <= MAX_ARF_LAYERS; ++i) {
Urvang Joshi17814622020-03-27 17:26:17 -07003847 rc->active_best_quality[i] = cpi->common.quant_params.base_qindex;
Sai Deng14a02942020-08-13 15:30:50 -07003848#if CONFIG_TUNE_VMAF
3849 if (cpi->vmaf_info.original_qindex != -1 &&
Sai Dengaff27722020-08-31 12:06:09 -07003850 (cpi->oxcf.tune_cfg.tuning >= AOM_TUNE_VMAF_WITH_PREPROCESSING &&
3851 cpi->oxcf.tune_cfg.tuning <= AOM_TUNE_VMAF_NEG_MAX_GAIN)) {
Sai Deng14a02942020-08-13 15:30:50 -07003852 rc->active_best_quality[i] = cpi->vmaf_info.original_qindex;
3853 }
3854#endif
Paul Wilkins47426162020-03-02 17:46:07 +00003855 }
3856 }
3857
Paul Wilkins88fdf642019-07-31 12:29:48 +01003858#if 0
3859 {
3860 AV1_COMMON *cm = &cpi->common;
3861 FILE *fpfile;
3862 fpfile = fopen("details.stt", "a");
Paul Wilkins518edbc2019-11-21 10:53:36 +00003863 fprintf(fpfile,
3864 "%10d %10d %10d %10" PRId64 " %10" PRId64
3865 " %10d %10d %10d %10.4lf %10.4lf %10.4lf %10.4lf\n",
3866 cm->current_frame.frame_number, rc->base_frame_target,
3867 rc->projected_frame_size, rc->total_actual_bits,
3868 rc->vbr_bits_off_target, rc->rate_error_estimate,
Paul Wilkins88fdf642019-07-31 12:29:48 +01003869 twopass->rolling_arf_group_target_bits,
3870 twopass->rolling_arf_group_actual_bits,
3871 (double)twopass->rolling_arf_group_actual_bits /
3872 (double)twopass->rolling_arf_group_target_bits,
3873 twopass->bpm_factor,
Paul Wilkins320c3992020-05-18 15:27:37 +01003874 av1_convert_qindex_to_q(cpi->common.quant_params.base_qindex,
Urvang Joshi17814622020-03-27 17:26:17 -07003875 cm->seq_params.bit_depth),
Paul Wilkins518edbc2019-11-21 10:53:36 +00003876 av1_convert_qindex_to_q(rc->active_worst_quality,
3877 cm->seq_params.bit_depth));
Paul Wilkins88fdf642019-07-31 12:29:48 +01003878 fclose(fpfile);
3879 }
3880#endif
3881
David Turner0fa8c492019-02-06 16:38:13 +00003882 if (cpi->common.current_frame.frame_type != KEY_FRAME) {
Paul Wilkins87919c92020-11-11 14:32:20 +00003883 twopass->kf_group_bits -= rc->base_frame_target;
David Turner0fa8c492019-02-06 16:38:13 +00003884 twopass->last_kfgroup_zeromotion_pct = twopass->kf_zeromotion_pct;
3885 }
3886 twopass->kf_group_bits = AOMMAX(twopass->kf_group_bits, 0);
3887
3888 // If the rate control is drifting consider adjustment to min or maxq.
Vishesh39e74092020-06-16 17:13:48 +05303889 if ((rc_cfg->mode != AOM_Q) && !cpi->rc.is_src_frame_alt_ref) {
Sarah Parkere1b22012019-06-06 16:35:25 -07003890 const int maxq_adj_limit = rc->worst_quality - rc->active_worst_quality;
David Turner0fa8c492019-02-06 16:38:13 +00003891 const int minq_adj_limit =
Vishesh39e74092020-06-16 17:13:48 +05303892 (rc_cfg->mode == AOM_CQ ? MINQ_ADJ_LIMIT_CQ : MINQ_ADJ_LIMIT);
David Turner0fa8c492019-02-06 16:38:13 +00003893
3894 // Undershoot.
Vishesh39e74092020-06-16 17:13:48 +05303895 if (rc->rate_error_estimate > rc_cfg->under_shoot_pct) {
David Turner0fa8c492019-02-06 16:38:13 +00003896 --twopass->extend_maxq;
3897 if (rc->rolling_target_bits >= rc->rolling_actual_bits)
3898 ++twopass->extend_minq;
3899 // Overshoot.
Vishesh39e74092020-06-16 17:13:48 +05303900 } else if (rc->rate_error_estimate < -rc_cfg->over_shoot_pct) {
David Turner0fa8c492019-02-06 16:38:13 +00003901 --twopass->extend_minq;
3902 if (rc->rolling_target_bits < rc->rolling_actual_bits)
3903 ++twopass->extend_maxq;
3904 } else {
3905 // Adjustment for extreme local overshoot.
3906 if (rc->projected_frame_size > (2 * rc->base_frame_target) &&
3907 rc->projected_frame_size > (2 * rc->avg_frame_bandwidth))
3908 ++twopass->extend_maxq;
3909
3910 // Unwind undershoot or overshoot adjustment.
3911 if (rc->rolling_target_bits < rc->rolling_actual_bits)
3912 --twopass->extend_minq;
3913 else if (rc->rolling_target_bits > rc->rolling_actual_bits)
3914 --twopass->extend_maxq;
3915 }
3916
3917 twopass->extend_minq = clamp(twopass->extend_minq, 0, minq_adj_limit);
3918 twopass->extend_maxq = clamp(twopass->extend_maxq, 0, maxq_adj_limit);
3919
3920 // If there is a big and undexpected undershoot then feed the extra
3921 // bits back in quickly. One situation where this may happen is if a
3922 // frame is unexpectedly almost perfectly predicted by the ARF or GF
3923 // but not very well predcited by the previous frame.
3924 if (!frame_is_kf_gf_arf(cpi) && !cpi->rc.is_src_frame_alt_ref) {
3925 int fast_extra_thresh = rc->base_frame_target / HIGH_UNDERSHOOT_RATIO;
3926 if (rc->projected_frame_size < fast_extra_thresh) {
3927 rc->vbr_bits_off_target_fast +=
3928 fast_extra_thresh - rc->projected_frame_size;
3929 rc->vbr_bits_off_target_fast =
3930 AOMMIN(rc->vbr_bits_off_target_fast, (4 * rc->avg_frame_bandwidth));
3931
3932 // Fast adaptation of minQ if necessary to use up the extra bits.
3933 if (rc->avg_frame_bandwidth) {
3934 twopass->extend_minq_fast =
3935 (int)(rc->vbr_bits_off_target_fast * 8 / rc->avg_frame_bandwidth);
3936 }
3937 twopass->extend_minq_fast = AOMMIN(
3938 twopass->extend_minq_fast, minq_adj_limit - twopass->extend_minq);
3939 } else if (rc->vbr_bits_off_target_fast) {
3940 twopass->extend_minq_fast = AOMMIN(
3941 twopass->extend_minq_fast, minq_adj_limit - twopass->extend_minq);
3942 } else {
3943 twopass->extend_minq_fast = 0;
3944 }
3945 }
3946 }
3947}