pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 1 | /* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
Ilya Nikolaevskiy | bf35298 | 2017-10-02 10:08:25 +0200 | [diff] [blame] | 2 | * |
| 3 | * Use of this source code is governed by a BSD-style license |
| 4 | * that can be found in the LICENSE file in the root of the source |
| 5 | * tree. An additional intellectual property rights grant can be found |
| 6 | * in the file PATENTS. All contributing project authors may |
| 7 | * be found in the AUTHORS file in the root of the source tree. |
| 8 | */ |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 9 | |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 10 | #include "modules/video_coding/codecs/vp8/screenshare_layers.h" |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 11 | |
| 12 | #include <stdlib.h> |
| 13 | |
philipel | cce46fc | 2015-12-21 03:04:49 -0800 | [diff] [blame] | 14 | #include <algorithm> |
Ilya Nikolaevskiy | bf35298 | 2017-10-02 10:08:25 +0200 | [diff] [blame] | 15 | #include <memory> |
philipel | cce46fc | 2015-12-21 03:04:49 -0800 | [diff] [blame] | 16 | |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 17 | #include "modules/video_coding/include/video_codec_interface.h" |
| 18 | #include "rtc_base/checks.h" |
Ilya Nikolaevskiy | 5866291 | 2017-10-04 15:07:09 +0200 | [diff] [blame] | 19 | #include "rtc_base/logging.h" |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 20 | #include "system_wrappers/include/clock.h" |
| 21 | #include "system_wrappers/include/metrics.h" |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 22 | |
| 23 | namespace webrtc { |
| 24 | |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 25 | static const int kOneSecond90Khz = 90000; |
Erik Språng | 13044c1 | 2017-10-05 12:20:36 +0200 | [diff] [blame] | 26 | static const int kMinTimeBetweenSyncs = kOneSecond90Khz * 2; |
| 27 | static const int kMaxTimeBetweenSyncs = kOneSecond90Khz * 4; |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 28 | static const int kQpDeltaThresholdForSync = 8; |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 29 | static const int kMinBitrateKbpsForQpBoost = 500; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 30 | |
sprang@webrtc.org | 70f74f3 | 2014-12-17 10:57:10 +0000 | [diff] [blame] | 31 | const double ScreenshareLayers::kMaxTL0FpsReduction = 2.5; |
| 32 | const double ScreenshareLayers::kAcceptableTargetOvershoot = 2.0; |
| 33 | |
sprang | 429600d | 2017-01-26 06:12:26 -0800 | [diff] [blame] | 34 | constexpr int ScreenshareLayers::kMaxNumTemporalLayers; |
| 35 | |
sprang | afe1f74 | 2016-04-12 02:45:13 -0700 | [diff] [blame] | 36 | // Always emit a frame with certain interval, even if bitrate targets have |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 37 | // been exceeded. This prevents needless keyframe requests. |
sprang | 6c4bbfa | 2017-05-30 10:08:23 -0700 | [diff] [blame] | 38 | const int ScreenshareLayers::kMaxFrameIntervalMs = 2750; |
sprang | afe1f74 | 2016-04-12 02:45:13 -0700 | [diff] [blame] | 39 | |
Sergio Garcia Murillo | 43800f9 | 2018-06-21 16:16:38 +0200 | [diff] [blame] | 40 | ScreenshareLayers::ScreenshareLayers(int num_temporal_layers, |
| 41 | Clock* clock) |
sprang | b0fdfea | 2016-03-01 05:51:16 -0800 | [diff] [blame] | 42 | : clock_(clock), |
sprang | 429600d | 2017-01-26 06:12:26 -0800 | [diff] [blame] | 43 | number_of_temporal_layers_( |
| 44 | std::min(kMaxNumTemporalLayers, num_temporal_layers)), |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 45 | active_layer_(-1), |
| 46 | last_timestamp_(-1), |
| 47 | last_sync_timestamp_(-1), |
sprang | afe1f74 | 2016-04-12 02:45:13 -0700 | [diff] [blame] | 48 | last_emitted_tl0_timestamp_(-1), |
Erik Språng | 27a457d | 2018-01-12 11:00:21 +0100 | [diff] [blame] | 49 | last_frame_time_ms_(-1), |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 50 | min_qp_(-1), |
| 51 | max_qp_(-1), |
| 52 | max_debt_bytes_(0), |
sprang | ac4a90d | 2016-12-28 05:58:07 -0800 | [diff] [blame] | 53 | encode_framerate_(1000.0f, 1000.0f), // 1 second window, second scale. |
Erik Språng | 08127a9 | 2016-11-16 16:41:30 +0100 | [diff] [blame] | 54 | bitrate_updated_(false) { |
sprang | 429600d | 2017-01-26 06:12:26 -0800 | [diff] [blame] | 55 | RTC_CHECK_GT(number_of_temporal_layers_, 0); |
| 56 | RTC_CHECK_LE(number_of_temporal_layers_, kMaxNumTemporalLayers); |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 57 | } |
| 58 | |
sprang | b0fdfea | 2016-03-01 05:51:16 -0800 | [diff] [blame] | 59 | ScreenshareLayers::~ScreenshareLayers() { |
| 60 | UpdateHistograms(); |
| 61 | } |
| 62 | |
pbos | 18ad1d4 | 2017-05-04 05:04:46 -0700 | [diff] [blame] | 63 | TemporalLayers::FrameConfig ScreenshareLayers::UpdateLayerConfig( |
| 64 | uint32_t timestamp) { |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 65 | if (number_of_temporal_layers_ <= 1) { |
| 66 | // No flags needed for 1 layer screenshare. |
Peter Boström | 1436c83 | 2017-03-27 15:01:49 -0400 | [diff] [blame] | 67 | // TODO(pbos): Consider updating only last, and not all buffers. |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 68 | TemporalLayers::FrameConfig tl_config( |
| 69 | kReferenceAndUpdate, kReferenceAndUpdate, kReferenceAndUpdate); |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 70 | return tl_config; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 71 | } |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 72 | |
sprang | ac4a90d | 2016-12-28 05:58:07 -0800 | [diff] [blame] | 73 | const int64_t now_ms = clock_->TimeInMilliseconds(); |
sprang | b0fdfea | 2016-03-01 05:51:16 -0800 | [diff] [blame] | 74 | |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 75 | int64_t unwrapped_timestamp = time_wrap_handler_.Unwrap(timestamp); |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 76 | int64_t ts_diff; |
| 77 | if (last_timestamp_ == -1) { |
| 78 | ts_diff = kOneSecond90Khz / capture_framerate_.value_or(*target_framerate_); |
| 79 | } else { |
| 80 | ts_diff = unwrapped_timestamp - last_timestamp_; |
| 81 | } |
Erik Språng | 27a457d | 2018-01-12 11:00:21 +0100 | [diff] [blame] | 82 | |
| 83 | if (target_framerate_) { |
| 84 | // If input frame rate exceeds target frame rate, either over a one second |
| 85 | // averaging window, or if frame interval is below 90% of desired value, |
| 86 | // drop frame. |
Erik Språng | 27a457d | 2018-01-12 11:00:21 +0100 | [diff] [blame] | 87 | if (encode_framerate_.Rate(now_ms).value_or(0) > *target_framerate_) |
| 88 | return TemporalLayers::FrameConfig(kNone, kNone, kNone); |
| 89 | |
Erik Språng | db9e9d5 | 2018-01-22 09:23:11 -0800 | [diff] [blame] | 90 | // Primarily check if frame interval is too short using frame timestamps, |
| 91 | // as if they are correct they won't be affected by queuing in webrtc. |
| 92 | const int64_t expected_frame_interval_90khz = |
| 93 | kOneSecond90Khz / *target_framerate_; |
| 94 | if (last_timestamp_ != -1 && ts_diff > 0) { |
| 95 | if (ts_diff < 85 * expected_frame_interval_90khz / 100) { |
| 96 | return TemporalLayers::FrameConfig(kNone, kNone, kNone); |
| 97 | } |
| 98 | } else { |
| 99 | // Timestamps looks off, use realtime clock here instead. |
| 100 | const int64_t expected_frame_interval_ms = 1000 / *target_framerate_; |
| 101 | if (last_frame_time_ms_ != -1 && |
| 102 | now_ms - last_frame_time_ms_ < |
| 103 | (85 * expected_frame_interval_ms) / 100) { |
| 104 | return TemporalLayers::FrameConfig(kNone, kNone, kNone); |
| 105 | } |
Erik Språng | 27a457d | 2018-01-12 11:00:21 +0100 | [diff] [blame] | 106 | } |
| 107 | } |
| 108 | |
| 109 | if (stats_.first_frame_time_ms_ == -1) |
| 110 | stats_.first_frame_time_ms_ = now_ms; |
| 111 | |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 112 | // Make sure both frame droppers leak out bits. |
| 113 | layers_[0].UpdateDebt(ts_diff / 90); |
| 114 | layers_[1].UpdateDebt(ts_diff / 90); |
| 115 | last_timestamp_ = timestamp; |
Erik Språng | 27a457d | 2018-01-12 11:00:21 +0100 | [diff] [blame] | 116 | last_frame_time_ms_ = now_ms; |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 117 | |
| 118 | TemporalLayerState layer_state = TemporalLayerState::kDrop; |
| 119 | |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 120 | if (active_layer_ == -1 || |
| 121 | layers_[active_layer_].state != TemporalLayer::State::kDropped) { |
sprang | afe1f74 | 2016-04-12 02:45:13 -0700 | [diff] [blame] | 122 | if (last_emitted_tl0_timestamp_ != -1 && |
| 123 | (unwrapped_timestamp - last_emitted_tl0_timestamp_) / 90 > |
| 124 | kMaxFrameIntervalMs) { |
| 125 | // Too long time has passed since the last frame was emitted, cancel |
| 126 | // enough debt to allow a single frame. |
| 127 | layers_[0].debt_bytes_ = max_debt_bytes_ - 1; |
| 128 | } |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 129 | if (layers_[0].debt_bytes_ > max_debt_bytes_) { |
| 130 | // Must drop TL0, encode TL1 instead. |
| 131 | if (layers_[1].debt_bytes_ > max_debt_bytes_) { |
| 132 | // Must drop both TL0 and TL1. |
| 133 | active_layer_ = -1; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 134 | } else { |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 135 | active_layer_ = 1; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 136 | } |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 137 | } else { |
| 138 | active_layer_ = 0; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 139 | } |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 140 | } |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 141 | |
| 142 | switch (active_layer_) { |
| 143 | case 0: |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 144 | layer_state = TemporalLayerState::kTl0; |
sprang | afe1f74 | 2016-04-12 02:45:13 -0700 | [diff] [blame] | 145 | last_emitted_tl0_timestamp_ = unwrapped_timestamp; |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 146 | break; |
| 147 | case 1: |
sprang | f03ea04 | 2017-07-13 03:53:51 -0700 | [diff] [blame] | 148 | if (layers_[1].state != TemporalLayer::State::kDropped) { |
Erik Språng | b75d6b8 | 2018-08-13 16:05:33 +0200 | [diff] [blame^] | 149 | if (TimeToSync(unwrapped_timestamp) || |
| 150 | layers_[1].state == TemporalLayer::State::kKeyFrame) { |
sprang | f03ea04 | 2017-07-13 03:53:51 -0700 | [diff] [blame] | 151 | last_sync_timestamp_ = unwrapped_timestamp; |
| 152 | layer_state = TemporalLayerState::kTl1Sync; |
| 153 | } else { |
| 154 | layer_state = TemporalLayerState::kTl1; |
| 155 | } |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 156 | } else { |
sprang | f03ea04 | 2017-07-13 03:53:51 -0700 | [diff] [blame] | 157 | layer_state = last_sync_timestamp_ == unwrapped_timestamp |
| 158 | ? TemporalLayerState::kTl1Sync |
| 159 | : TemporalLayerState::kTl1; |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 160 | } |
| 161 | break; |
| 162 | case -1: |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 163 | layer_state = TemporalLayerState::kDrop; |
sprang | b0fdfea | 2016-03-01 05:51:16 -0800 | [diff] [blame] | 164 | ++stats_.num_dropped_frames_; |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 165 | break; |
| 166 | default: |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 167 | RTC_NOTREACHED(); |
| 168 | } |
| 169 | |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 170 | TemporalLayers::FrameConfig tl_config; |
Peter Boström | 1436c83 | 2017-03-27 15:01:49 -0400 | [diff] [blame] | 171 | // TODO(pbos): Consider referencing but not updating the 'alt' buffer for all |
| 172 | // layers. |
| 173 | switch (layer_state) { |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 174 | case TemporalLayerState::kDrop: |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 175 | tl_config = TemporalLayers::FrameConfig(kNone, kNone, kNone); |
| 176 | break; |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 177 | case TemporalLayerState::kTl0: |
Peter Boström | 1436c83 | 2017-03-27 15:01:49 -0400 | [diff] [blame] | 178 | // TL0 only references and updates 'last'. |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 179 | tl_config = |
| 180 | TemporalLayers::FrameConfig(kReferenceAndUpdate, kNone, kNone); |
pbos | 1777c5f | 2017-07-19 17:04:02 -0700 | [diff] [blame] | 181 | tl_config.packetizer_temporal_idx = 0; |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 182 | break; |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 183 | case TemporalLayerState::kTl1: |
Peter Boström | 1436c83 | 2017-03-27 15:01:49 -0400 | [diff] [blame] | 184 | // TL1 references both 'last' and 'golden' but only updates 'golden'. |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 185 | tl_config = |
| 186 | TemporalLayers::FrameConfig(kReference, kReferenceAndUpdate, kNone); |
pbos | 1777c5f | 2017-07-19 17:04:02 -0700 | [diff] [blame] | 187 | tl_config.packetizer_temporal_idx = 1; |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 188 | break; |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 189 | case TemporalLayerState::kTl1Sync: |
Peter Boström | 1436c83 | 2017-03-27 15:01:49 -0400 | [diff] [blame] | 190 | // Predict from only TL0 to allow participants to switch to the high |
| 191 | // bitrate stream. Updates 'golden' so that TL1 can continue to refer to |
| 192 | // and update 'golden' from this point on. |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 193 | tl_config = TemporalLayers::FrameConfig(kReference, kUpdate, kNone); |
pbos | 1777c5f | 2017-07-19 17:04:02 -0700 | [diff] [blame] | 194 | tl_config.packetizer_temporal_idx = 1; |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 195 | break; |
Peter Boström | 1436c83 | 2017-03-27 15:01:49 -0400 | [diff] [blame] | 196 | } |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 197 | |
pbos | 1777c5f | 2017-07-19 17:04:02 -0700 | [diff] [blame] | 198 | tl_config.layer_sync = layer_state == TemporalLayerState::kTl1Sync; |
pbos | 51f083c | 2017-05-04 06:39:04 -0700 | [diff] [blame] | 199 | return tl_config; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 200 | } |
| 201 | |
Erik Språng | bb60a3a | 2018-03-19 18:25:10 +0100 | [diff] [blame] | 202 | void ScreenshareLayers::OnRatesUpdated( |
| 203 | const std::vector<uint32_t>& bitrates_bps, |
| 204 | int framerate_fps) { |
| 205 | RTC_DCHECK_GT(framerate_fps, 0); |
| 206 | RTC_DCHECK_GE(bitrates_bps.size(), 1); |
| 207 | RTC_DCHECK_LE(bitrates_bps.size(), 2); |
| 208 | |
| 209 | // |bitrates_bps| uses individual rates per layer, but we want to use the |
| 210 | // accumulated rate here. |
| 211 | uint32_t tl0_kbps = bitrates_bps[0] / 1000; |
| 212 | uint32_t tl1_kbps = tl0_kbps; |
| 213 | if (bitrates_bps.size() > 1) { |
| 214 | tl1_kbps += bitrates_bps[1] / 1000; |
| 215 | } |
| 216 | |
sprang | ac4a90d | 2016-12-28 05:58:07 -0800 | [diff] [blame] | 217 | if (!target_framerate_) { |
Erik Språng | bb60a3a | 2018-03-19 18:25:10 +0100 | [diff] [blame] | 218 | // First OnRatesUpdated() is called during construction, with the |
| 219 | // configured targets as parameters. |
| 220 | target_framerate_ = framerate_fps; |
sprang | 0ad0de6 | 2017-01-11 05:01:32 -0800 | [diff] [blame] | 221 | capture_framerate_ = target_framerate_; |
sprang | ac4a90d | 2016-12-28 05:58:07 -0800 | [diff] [blame] | 222 | bitrate_updated_ = true; |
| 223 | } else { |
Erik Språng | e624d07 | 2018-04-11 16:47:27 +0200 | [diff] [blame] | 224 | if ((capture_framerate_ && |
| 225 | framerate_fps != static_cast<int>(*capture_framerate_)) || |
| 226 | (tl0_kbps != layers_[0].target_rate_kbps_) || |
| 227 | (tl1_kbps != layers_[1].target_rate_kbps_)) { |
| 228 | bitrate_updated_ = true; |
| 229 | } |
Erik Språng | bb60a3a | 2018-03-19 18:25:10 +0100 | [diff] [blame] | 230 | |
| 231 | if (framerate_fps < 0) { |
sprang | 0ad0de6 | 2017-01-11 05:01:32 -0800 | [diff] [blame] | 232 | capture_framerate_.reset(); |
sprang | ac4a90d | 2016-12-28 05:58:07 -0800 | [diff] [blame] | 233 | } else { |
Erik Språng | bb60a3a | 2018-03-19 18:25:10 +0100 | [diff] [blame] | 234 | capture_framerate_ = framerate_fps; |
sprang | ac4a90d | 2016-12-28 05:58:07 -0800 | [diff] [blame] | 235 | } |
| 236 | } |
| 237 | |
Erik Språng | bb60a3a | 2018-03-19 18:25:10 +0100 | [diff] [blame] | 238 | layers_[0].target_rate_kbps_ = tl0_kbps; |
| 239 | layers_[1].target_rate_kbps_ = tl1_kbps; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 240 | } |
| 241 | |
Erik Språng | b75d6b8 | 2018-08-13 16:05:33 +0200 | [diff] [blame^] | 242 | void ScreenshareLayers::FrameEncoded(uint32_t timestamp, size_t size, int qp) { |
sprang | ac4a90d | 2016-12-28 05:58:07 -0800 | [diff] [blame] | 243 | if (size > 0) |
| 244 | encode_framerate_.Update(1, clock_->TimeInMilliseconds()); |
| 245 | |
sprang | 2ddb8bd | 2016-02-04 03:59:52 -0800 | [diff] [blame] | 246 | if (number_of_temporal_layers_ == 1) |
| 247 | return; |
| 248 | |
| 249 | RTC_DCHECK_NE(-1, active_layer_); |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 250 | if (size == 0) { |
| 251 | layers_[active_layer_].state = TemporalLayer::State::kDropped; |
sprang | b0fdfea | 2016-03-01 05:51:16 -0800 | [diff] [blame] | 252 | ++stats_.num_overshoots_; |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 253 | return; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 254 | } |
sprang | ef7228c | 2015-08-05 02:01:29 -0700 | [diff] [blame] | 255 | |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 256 | if (layers_[active_layer_].state == TemporalLayer::State::kDropped) { |
| 257 | layers_[active_layer_].state = TemporalLayer::State::kQualityBoost; |
| 258 | } |
| 259 | |
| 260 | if (qp != -1) |
| 261 | layers_[active_layer_].last_qp = qp; |
| 262 | |
| 263 | if (active_layer_ == 0) { |
| 264 | layers_[0].debt_bytes_ += size; |
| 265 | layers_[1].debt_bytes_ += size; |
sprang | b0fdfea | 2016-03-01 05:51:16 -0800 | [diff] [blame] | 266 | ++stats_.num_tl0_frames_; |
| 267 | stats_.tl0_target_bitrate_sum_ += layers_[0].target_rate_kbps_; |
| 268 | stats_.tl0_qp_sum_ += qp; |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 269 | } else if (active_layer_ == 1) { |
| 270 | layers_[1].debt_bytes_ += size; |
sprang | b0fdfea | 2016-03-01 05:51:16 -0800 | [diff] [blame] | 271 | ++stats_.num_tl1_frames_; |
| 272 | stats_.tl1_target_bitrate_sum_ += layers_[1].target_rate_kbps_; |
| 273 | stats_.tl1_qp_sum_ += qp; |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 274 | } |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 275 | } |
| 276 | |
pbos | 18ad1d4 | 2017-05-04 05:04:46 -0700 | [diff] [blame] | 277 | void ScreenshareLayers::PopulateCodecSpecific( |
| 278 | bool frame_is_keyframe, |
| 279 | const TemporalLayers::FrameConfig& tl_config, |
| 280 | CodecSpecificInfoVP8* vp8_info, |
| 281 | uint32_t timestamp) { |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 282 | if (number_of_temporal_layers_ == 1) { |
| 283 | vp8_info->temporalIdx = kNoTemporalIdx; |
| 284 | vp8_info->layerSync = false; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 285 | } else { |
pbos | 1777c5f | 2017-07-19 17:04:02 -0700 | [diff] [blame] | 286 | int64_t unwrapped_timestamp = time_wrap_handler_.Unwrap(timestamp); |
| 287 | vp8_info->temporalIdx = tl_config.packetizer_temporal_idx; |
| 288 | vp8_info->layerSync = tl_config.layer_sync; |
Peter Boström | 1436c83 | 2017-03-27 15:01:49 -0400 | [diff] [blame] | 289 | if (frame_is_keyframe) { |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 290 | vp8_info->temporalIdx = 0; |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 291 | last_sync_timestamp_ = unwrapped_timestamp; |
pbos | 1777c5f | 2017-07-19 17:04:02 -0700 | [diff] [blame] | 292 | vp8_info->layerSync = true; |
Erik Språng | b75d6b8 | 2018-08-13 16:05:33 +0200 | [diff] [blame^] | 293 | layers_[0].state = TemporalLayer::State::kKeyFrame; |
| 294 | layers_[1].state = TemporalLayer::State::kKeyFrame; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 295 | } |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 296 | } |
| 297 | } |
| 298 | |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 299 | bool ScreenshareLayers::TimeToSync(int64_t timestamp) const { |
sprang | 2ddb8bd | 2016-02-04 03:59:52 -0800 | [diff] [blame] | 300 | RTC_DCHECK_EQ(1, active_layer_); |
henrikg | 91d6ede | 2015-09-17 00:24:34 -0700 | [diff] [blame] | 301 | RTC_DCHECK_NE(-1, layers_[0].last_qp); |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 302 | if (layers_[1].last_qp == -1) { |
| 303 | // First frame in TL1 should only depend on TL0 since there are no |
| 304 | // previous frames in TL1. |
| 305 | return true; |
| 306 | } |
| 307 | |
henrikg | 91d6ede | 2015-09-17 00:24:34 -0700 | [diff] [blame] | 308 | RTC_DCHECK_NE(-1, last_sync_timestamp_); |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 309 | int64_t timestamp_diff = timestamp - last_sync_timestamp_; |
| 310 | if (timestamp_diff > kMaxTimeBetweenSyncs) { |
| 311 | // After a certain time, force a sync frame. |
| 312 | return true; |
| 313 | } else if (timestamp_diff < kMinTimeBetweenSyncs) { |
| 314 | // If too soon from previous sync frame, don't issue a new one. |
| 315 | return false; |
| 316 | } |
| 317 | // Issue a sync frame if difference in quality between TL0 and TL1 isn't too |
| 318 | // large. |
| 319 | if (layers_[0].last_qp - layers_[1].last_qp < kQpDeltaThresholdForSync) |
| 320 | return true; |
| 321 | return false; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 322 | } |
| 323 | |
sprang | c7805db | 2016-11-25 08:09:43 -0800 | [diff] [blame] | 324 | uint32_t ScreenshareLayers::GetCodecTargetBitrateKbps() const { |
| 325 | uint32_t target_bitrate_kbps = layers_[0].target_rate_kbps_; |
| 326 | |
| 327 | if (number_of_temporal_layers_ > 1) { |
| 328 | // Calculate a codec target bitrate. This may be higher than TL0, gaining |
| 329 | // quality at the expense of frame rate at TL0. Constraints: |
| 330 | // - TL0 frame rate no less than framerate / kMaxTL0FpsReduction. |
| 331 | // - Target rate * kAcceptableTargetOvershoot should not exceed TL1 rate. |
| 332 | target_bitrate_kbps = |
| 333 | std::min(layers_[0].target_rate_kbps_ * kMaxTL0FpsReduction, |
| 334 | layers_[1].target_rate_kbps_ / kAcceptableTargetOvershoot); |
| 335 | } |
| 336 | |
| 337 | return std::max(layers_[0].target_rate_kbps_, target_bitrate_kbps); |
| 338 | } |
| 339 | |
Anders Carlsson | beabdcb | 2018-01-24 10:25:15 +0100 | [diff] [blame] | 340 | bool ScreenshareLayers::UpdateConfiguration(Vp8EncoderConfig* cfg) { |
Erik Språng | 08127a9 | 2016-11-16 16:41:30 +0100 | [diff] [blame] | 341 | bool cfg_updated = false; |
sprang | c7805db | 2016-11-25 08:09:43 -0800 | [diff] [blame] | 342 | uint32_t target_bitrate_kbps = GetCodecTargetBitrateKbps(); |
Erik Språng | 27a457d | 2018-01-12 11:00:21 +0100 | [diff] [blame] | 343 | |
| 344 | // TODO(sprang): We _really_ need to make an overhaul of this class. :( |
| 345 | // If we're dropping frames in order to meet a target framerate, adjust the |
| 346 | // bitrate assigned to the encoder so the total average bitrate is correct. |
| 347 | float encoder_config_bitrate_kbps = target_bitrate_kbps; |
| 348 | if (target_framerate_ && capture_framerate_ && |
| 349 | *target_framerate_ < *capture_framerate_) { |
| 350 | encoder_config_bitrate_kbps *= |
| 351 | static_cast<float>(*capture_framerate_) / *target_framerate_; |
| 352 | } |
| 353 | |
| 354 | if (bitrate_updated_ || |
| 355 | cfg->rc_target_bitrate != encoder_config_bitrate_kbps) { |
| 356 | cfg->rc_target_bitrate = encoder_config_bitrate_kbps; |
Erik Språng | 08127a9 | 2016-11-16 16:41:30 +0100 | [diff] [blame] | 357 | |
| 358 | // Don't reconfigure qp limits during quality boost frames. |
| 359 | if (active_layer_ == -1 || |
| 360 | layers_[active_layer_].state != TemporalLayer::State::kQualityBoost) { |
| 361 | min_qp_ = cfg->rc_min_quantizer; |
| 362 | max_qp_ = cfg->rc_max_quantizer; |
| 363 | // After a dropped frame, a frame with max qp will be encoded and the |
| 364 | // quality will then ramp up from there. To boost the speed of recovery, |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 365 | // encode the next frame with lower max qp, if there is sufficient |
| 366 | // bandwidth to do so without causing excessive delay. |
| 367 | // TL0 is the most important to improve since the errors in this layer |
| 368 | // will propagate to TL1. |
Erik Språng | 08127a9 | 2016-11-16 16:41:30 +0100 | [diff] [blame] | 369 | // Currently, reduce max qp by 20% for TL0 and 15% for TL1. |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 370 | if (layers_[1].target_rate_kbps_ >= kMinBitrateKbpsForQpBoost) { |
| 371 | layers_[0].enhanced_max_qp = |
| 372 | min_qp_ + (((max_qp_ - min_qp_) * 80) / 100); |
| 373 | layers_[1].enhanced_max_qp = |
| 374 | min_qp_ + (((max_qp_ - min_qp_) * 85) / 100); |
| 375 | } else { |
| 376 | layers_[0].enhanced_max_qp = -1; |
| 377 | layers_[1].enhanced_max_qp = -1; |
| 378 | } |
Erik Språng | 08127a9 | 2016-11-16 16:41:30 +0100 | [diff] [blame] | 379 | } |
| 380 | |
sprang | 0ad0de6 | 2017-01-11 05:01:32 -0800 | [diff] [blame] | 381 | if (capture_framerate_) { |
sprang | ac4a90d | 2016-12-28 05:58:07 -0800 | [diff] [blame] | 382 | int avg_frame_size = |
sprang | 0ad0de6 | 2017-01-11 05:01:32 -0800 | [diff] [blame] | 383 | (target_bitrate_kbps * 1000) / (8 * *capture_framerate_); |
sprang | 916170a | 2017-05-23 07:47:55 -0700 | [diff] [blame] | 384 | // Allow max debt to be the size of a single optimal frame. |
| 385 | // TODO(sprang): Determine if this needs to be adjusted by some factor. |
| 386 | // (Lower values may cause more frame drops, higher may lead to queuing |
| 387 | // delays.) |
| 388 | max_debt_bytes_ = avg_frame_size; |
Erik Språng | 08127a9 | 2016-11-16 16:41:30 +0100 | [diff] [blame] | 389 | } |
| 390 | |
| 391 | bitrate_updated_ = false; |
| 392 | cfg_updated = true; |
| 393 | } |
| 394 | |
| 395 | // Don't try to update boosts state if not active yet. |
| 396 | if (active_layer_ == -1) |
| 397 | return cfg_updated; |
| 398 | |
sprang | ef7228c | 2015-08-05 02:01:29 -0700 | [diff] [blame] | 399 | if (max_qp_ == -1 || number_of_temporal_layers_ <= 1) |
Erik Språng | 08127a9 | 2016-11-16 16:41:30 +0100 | [diff] [blame] | 400 | return cfg_updated; |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 401 | |
| 402 | // If layer is in the quality boost state (following a dropped frame), update |
| 403 | // the configuration with the adjusted (lower) qp and set the state back to |
| 404 | // normal. |
| 405 | unsigned int adjusted_max_qp; |
| 406 | if (layers_[active_layer_].state == TemporalLayer::State::kQualityBoost && |
| 407 | layers_[active_layer_].enhanced_max_qp != -1) { |
| 408 | adjusted_max_qp = layers_[active_layer_].enhanced_max_qp; |
| 409 | layers_[active_layer_].state = TemporalLayer::State::kNormal; |
| 410 | } else { |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 411 | adjusted_max_qp = max_qp_; // Set the normal max qp. |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 412 | } |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 413 | |
| 414 | if (adjusted_max_qp == cfg->rc_max_quantizer) |
Erik Språng | 08127a9 | 2016-11-16 16:41:30 +0100 | [diff] [blame] | 415 | return cfg_updated; |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 416 | |
| 417 | cfg->rc_max_quantizer = adjusted_max_qp; |
Erik Språng | 08127a9 | 2016-11-16 16:41:30 +0100 | [diff] [blame] | 418 | cfg_updated = true; |
sprang | c7805db | 2016-11-25 08:09:43 -0800 | [diff] [blame] | 419 | |
Erik Språng | 08127a9 | 2016-11-16 16:41:30 +0100 | [diff] [blame] | 420 | return cfg_updated; |
Erik Språng | 2c4c914 | 2015-06-24 11:24:44 +0200 | [diff] [blame] | 421 | } |
| 422 | |
| 423 | void ScreenshareLayers::TemporalLayer::UpdateDebt(int64_t delta_ms) { |
| 424 | uint32_t debt_reduction_bytes = target_rate_kbps_ * delta_ms / 8; |
| 425 | if (debt_reduction_bytes >= debt_bytes_) { |
| 426 | debt_bytes_ = 0; |
| 427 | } else { |
| 428 | debt_bytes_ -= debt_reduction_bytes; |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 429 | } |
| 430 | } |
sprang@webrtc.org | 70f74f3 | 2014-12-17 10:57:10 +0000 | [diff] [blame] | 431 | |
sprang | b0fdfea | 2016-03-01 05:51:16 -0800 | [diff] [blame] | 432 | void ScreenshareLayers::UpdateHistograms() { |
| 433 | if (stats_.first_frame_time_ms_ == -1) |
| 434 | return; |
| 435 | int64_t duration_sec = |
| 436 | (clock_->TimeInMilliseconds() - stats_.first_frame_time_ms_ + 500) / 1000; |
| 437 | if (duration_sec >= metrics::kMinRunTimeInSeconds) { |
| 438 | RTC_HISTOGRAM_COUNTS_10000( |
| 439 | "WebRTC.Video.Screenshare.Layer0.FrameRate", |
| 440 | (stats_.num_tl0_frames_ + (duration_sec / 2)) / duration_sec); |
| 441 | RTC_HISTOGRAM_COUNTS_10000( |
| 442 | "WebRTC.Video.Screenshare.Layer1.FrameRate", |
| 443 | (stats_.num_tl1_frames_ + (duration_sec / 2)) / duration_sec); |
| 444 | int total_frames = stats_.num_tl0_frames_ + stats_.num_tl1_frames_; |
asapersson | 58d992e | 2016-03-29 02:15:06 -0700 | [diff] [blame] | 445 | RTC_HISTOGRAM_COUNTS_10000( |
| 446 | "WebRTC.Video.Screenshare.FramesPerDrop", |
Ilya Nikolaevskiy | bf35298 | 2017-10-02 10:08:25 +0200 | [diff] [blame] | 447 | (stats_.num_dropped_frames_ == 0 |
| 448 | ? 0 |
| 449 | : total_frames / stats_.num_dropped_frames_)); |
asapersson | 58d992e | 2016-03-29 02:15:06 -0700 | [diff] [blame] | 450 | RTC_HISTOGRAM_COUNTS_10000( |
| 451 | "WebRTC.Video.Screenshare.FramesPerOvershoot", |
| 452 | (stats_.num_overshoots_ == 0 ? 0 |
| 453 | : total_frames / stats_.num_overshoots_)); |
sprang | b0fdfea | 2016-03-01 05:51:16 -0800 | [diff] [blame] | 454 | if (stats_.num_tl0_frames_ > 0) { |
| 455 | RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.Layer0.Qp", |
| 456 | stats_.tl0_qp_sum_ / stats_.num_tl0_frames_); |
| 457 | RTC_HISTOGRAM_COUNTS_10000( |
| 458 | "WebRTC.Video.Screenshare.Layer0.TargetBitrate", |
| 459 | stats_.tl0_target_bitrate_sum_ / stats_.num_tl0_frames_); |
| 460 | } |
| 461 | if (stats_.num_tl1_frames_ > 0) { |
| 462 | RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.Layer1.Qp", |
| 463 | stats_.tl1_qp_sum_ / stats_.num_tl1_frames_); |
| 464 | RTC_HISTOGRAM_COUNTS_10000( |
| 465 | "WebRTC.Video.Screenshare.Layer1.TargetBitrate", |
| 466 | stats_.tl1_target_bitrate_sum_ / stats_.num_tl1_frames_); |
| 467 | } |
| 468 | } |
| 469 | } |
pbos@webrtc.org | 9115cde | 2014-12-09 10:36:40 +0000 | [diff] [blame] | 470 | } // namespace webrtc |