Erik Språng | 6a7baa7 | 2019-02-26 18:31:00 +0100 | [diff] [blame^] | 1 | /* |
| 2 | * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | #include "video/frame_encode_timer.h" |
| 12 | |
| 13 | #include <algorithm> |
| 14 | |
| 15 | #include "modules/include/module_common_types_public.h" |
| 16 | #include "modules/video_coding/include/video_coding_defines.h" |
| 17 | #include "rtc_base/logging.h" |
| 18 | #include "rtc_base/time_utils.h" |
| 19 | |
| 20 | namespace webrtc { |
| 21 | namespace { |
| 22 | const int kMessagesThrottlingThreshold = 2; |
| 23 | const int kThrottleRatio = 100000; |
| 24 | } // namespace |
| 25 | |
| 26 | FrameEncodeTimer::TimingFramesLayerInfo::TimingFramesLayerInfo() = default; |
| 27 | FrameEncodeTimer::TimingFramesLayerInfo::~TimingFramesLayerInfo() = default; |
| 28 | |
| 29 | FrameEncodeTimer::FrameEncodeTimer(EncodedImageCallback* frame_drop_callback) |
| 30 | : frame_drop_callback_(frame_drop_callback), |
| 31 | internal_source_(false), |
| 32 | framerate_fps_(0), |
| 33 | last_timing_frame_time_ms_(-1), |
| 34 | incorrect_capture_time_logged_messages_(0), |
| 35 | reordered_frames_logged_messages_(0), |
| 36 | stalled_encoder_logged_messages_(0) { |
| 37 | codec_settings_.timing_frame_thresholds = {-1, 0}; |
| 38 | } |
| 39 | FrameEncodeTimer::~FrameEncodeTimer() {} |
| 40 | |
| 41 | void FrameEncodeTimer::OnEncoderInit(const VideoCodec& codec, |
| 42 | bool internal_source) { |
| 43 | rtc::CritScope cs(&lock_); |
| 44 | codec_settings_ = codec; |
| 45 | internal_source_ = internal_source; |
| 46 | } |
| 47 | |
| 48 | void FrameEncodeTimer::OnSetRates( |
| 49 | const VideoBitrateAllocation& bitrate_allocation, |
| 50 | uint32_t framerate_fps) { |
| 51 | rtc::CritScope cs(&lock_); |
| 52 | framerate_fps_ = framerate_fps; |
| 53 | const size_t num_spatial_layers = NumSpatialLayers(); |
| 54 | if (timing_frames_info_.size() < num_spatial_layers) { |
| 55 | timing_frames_info_.resize(num_spatial_layers); |
| 56 | } |
| 57 | for (size_t i = 0; i < num_spatial_layers; ++i) { |
| 58 | timing_frames_info_[i].target_bitrate_bytes_per_sec = |
| 59 | bitrate_allocation.GetSpatialLayerSum(i) / 8; |
| 60 | } |
| 61 | } |
| 62 | |
| 63 | void FrameEncodeTimer::OnEncodeStarted(uint32_t rtp_timestamp, |
| 64 | int64_t capture_time_ms) { |
| 65 | rtc::CritScope cs(&lock_); |
| 66 | if (internal_source_) { |
| 67 | return; |
| 68 | } |
| 69 | |
| 70 | const size_t num_spatial_layers = NumSpatialLayers(); |
| 71 | timing_frames_info_.resize(num_spatial_layers); |
| 72 | for (size_t si = 0; si < num_spatial_layers; ++si) { |
| 73 | RTC_DCHECK( |
| 74 | timing_frames_info_[si].encode_start_list.empty() || |
| 75 | rtc::TimeDiff( |
| 76 | capture_time_ms, |
| 77 | timing_frames_info_[si].encode_start_list.back().capture_time_ms) >= |
| 78 | 0); |
| 79 | // If stream is disabled due to low bandwidth OnEncodeStarted still will be |
| 80 | // called and have to be ignored. |
| 81 | if (timing_frames_info_[si].target_bitrate_bytes_per_sec == 0) |
| 82 | return; |
| 83 | if (timing_frames_info_[si].encode_start_list.size() == |
| 84 | kMaxEncodeStartTimeListSize) { |
| 85 | ++stalled_encoder_logged_messages_; |
| 86 | if (stalled_encoder_logged_messages_ <= kMessagesThrottlingThreshold || |
| 87 | stalled_encoder_logged_messages_ % kThrottleRatio == 0) { |
| 88 | RTC_LOG(LS_WARNING) << "Too many frames in the encode_start_list." |
| 89 | " Did encoder stall?"; |
| 90 | if (stalled_encoder_logged_messages_ == kMessagesThrottlingThreshold) { |
| 91 | RTC_LOG(LS_WARNING) |
| 92 | << "Too many log messages. Further stalled encoder" |
| 93 | "warnings will be throttled."; |
| 94 | } |
| 95 | } |
| 96 | frame_drop_callback_->OnDroppedFrame( |
| 97 | EncodedImageCallback::DropReason::kDroppedByEncoder); |
| 98 | timing_frames_info_[si].encode_start_list.pop_front(); |
| 99 | } |
| 100 | timing_frames_info_[si].encode_start_list.emplace_back( |
| 101 | rtp_timestamp, capture_time_ms, rtc::TimeMillis()); |
| 102 | } |
| 103 | } |
| 104 | |
| 105 | void FrameEncodeTimer::FillTimingInfo(size_t simulcast_svc_idx, |
| 106 | EncodedImage* encoded_image, |
| 107 | int64_t encode_done_ms) { |
| 108 | rtc::CritScope cs(&lock_); |
| 109 | absl::optional<size_t> outlier_frame_size; |
| 110 | absl::optional<int64_t> encode_start_ms; |
| 111 | uint8_t timing_flags = VideoSendTiming::kNotTriggered; |
| 112 | |
| 113 | // Encoders with internal sources do not call OnEncodeStarted |
| 114 | // |timing_frames_info_| may be not filled here. |
| 115 | if (!internal_source_) { |
| 116 | encode_start_ms = ExtractEncodeStartTime(simulcast_svc_idx, encoded_image); |
| 117 | } |
| 118 | |
| 119 | if (timing_frames_info_.size() > simulcast_svc_idx) { |
| 120 | size_t target_bitrate = |
| 121 | timing_frames_info_[simulcast_svc_idx].target_bitrate_bytes_per_sec; |
| 122 | if (framerate_fps_ > 0 && target_bitrate > 0) { |
| 123 | // framerate and target bitrate were reported by encoder. |
| 124 | size_t average_frame_size = target_bitrate / framerate_fps_; |
| 125 | outlier_frame_size.emplace( |
| 126 | average_frame_size * |
| 127 | codec_settings_.timing_frame_thresholds.outlier_ratio_percent / 100); |
| 128 | } |
| 129 | } |
| 130 | |
| 131 | // Outliers trigger timing frames, but do not affect scheduled timing |
| 132 | // frames. |
| 133 | if (outlier_frame_size && encoded_image->size() >= *outlier_frame_size) { |
| 134 | timing_flags |= VideoSendTiming::kTriggeredBySize; |
| 135 | } |
| 136 | |
| 137 | // Check if it's time to send a timing frame. |
| 138 | int64_t timing_frame_delay_ms = |
| 139 | encoded_image->capture_time_ms_ - last_timing_frame_time_ms_; |
| 140 | // Trigger threshold if it's a first frame, too long passed since the last |
| 141 | // timing frame, or we already sent timing frame on a different simulcast |
| 142 | // stream with the same capture time. |
| 143 | if (last_timing_frame_time_ms_ == -1 || |
| 144 | timing_frame_delay_ms >= |
| 145 | codec_settings_.timing_frame_thresholds.delay_ms || |
| 146 | timing_frame_delay_ms == 0) { |
| 147 | timing_flags |= VideoSendTiming::kTriggeredByTimer; |
| 148 | last_timing_frame_time_ms_ = encoded_image->capture_time_ms_; |
| 149 | } |
| 150 | |
| 151 | // Workaround for chromoting encoder: it passes encode start and finished |
| 152 | // timestamps in |timing_| field, but they (together with capture timestamp) |
| 153 | // are not in the WebRTC clock. |
| 154 | if (internal_source_ && encoded_image->timing_.encode_finish_ms > 0 && |
| 155 | encoded_image->timing_.encode_start_ms > 0) { |
| 156 | int64_t clock_offset_ms = |
| 157 | encode_done_ms - encoded_image->timing_.encode_finish_ms; |
| 158 | // Translate capture timestamp to local WebRTC clock. |
| 159 | encoded_image->capture_time_ms_ += clock_offset_ms; |
| 160 | encoded_image->SetTimestamp( |
| 161 | static_cast<uint32_t>(encoded_image->capture_time_ms_ * 90)); |
| 162 | encode_start_ms.emplace(encoded_image->timing_.encode_start_ms + |
| 163 | clock_offset_ms); |
| 164 | } |
| 165 | |
| 166 | // If encode start is not available that means that encoder uses internal |
| 167 | // source. In that case capture timestamp may be from a different clock with a |
| 168 | // drift relative to rtc::TimeMillis(). We can't use it for Timing frames, |
| 169 | // because to being sent in the network capture time required to be less than |
| 170 | // all the other timestamps. |
| 171 | if (encode_start_ms) { |
| 172 | encoded_image->SetEncodeTime(*encode_start_ms, encode_done_ms); |
| 173 | encoded_image->timing_.flags = timing_flags; |
| 174 | } else { |
| 175 | encoded_image->timing_.flags = VideoSendTiming::kInvalid; |
| 176 | } |
| 177 | } |
| 178 | |
| 179 | void FrameEncodeTimer::Reset() { |
| 180 | rtc::CritScope cs(&lock_); |
| 181 | timing_frames_info_.clear(); |
| 182 | last_timing_frame_time_ms_ = -1; |
| 183 | reordered_frames_logged_messages_ = 0; |
| 184 | stalled_encoder_logged_messages_ = 0; |
| 185 | } |
| 186 | |
| 187 | absl::optional<int64_t> FrameEncodeTimer::ExtractEncodeStartTime( |
| 188 | size_t simulcast_svc_idx, |
| 189 | EncodedImage* encoded_image) { |
| 190 | absl::optional<int64_t> result; |
| 191 | size_t num_simulcast_svc_streams = timing_frames_info_.size(); |
| 192 | if (simulcast_svc_idx < num_simulcast_svc_streams) { |
| 193 | auto encode_start_list = |
| 194 | &timing_frames_info_[simulcast_svc_idx].encode_start_list; |
| 195 | // Skip frames for which there was OnEncodeStarted but no OnEncodedImage |
| 196 | // call. These are dropped by encoder internally. |
| 197 | // Because some hardware encoders don't preserve capture timestamp we |
| 198 | // use RTP timestamps here. |
| 199 | while (!encode_start_list->empty() && |
| 200 | IsNewerTimestamp(encoded_image->Timestamp(), |
| 201 | encode_start_list->front().rtp_timestamp)) { |
| 202 | frame_drop_callback_->OnDroppedFrame( |
| 203 | EncodedImageCallback::DropReason::kDroppedByEncoder); |
| 204 | encode_start_list->pop_front(); |
| 205 | } |
| 206 | if (encode_start_list->size() > 0 && |
| 207 | encode_start_list->front().rtp_timestamp == |
| 208 | encoded_image->Timestamp()) { |
| 209 | result.emplace(encode_start_list->front().encode_start_time_ms); |
| 210 | if (encoded_image->capture_time_ms_ != |
| 211 | encode_start_list->front().capture_time_ms) { |
| 212 | // Force correct capture timestamp. |
| 213 | encoded_image->capture_time_ms_ = |
| 214 | encode_start_list->front().capture_time_ms; |
| 215 | ++incorrect_capture_time_logged_messages_; |
| 216 | if (incorrect_capture_time_logged_messages_ <= |
| 217 | kMessagesThrottlingThreshold || |
| 218 | incorrect_capture_time_logged_messages_ % kThrottleRatio == 0) { |
| 219 | RTC_LOG(LS_WARNING) |
| 220 | << "Encoder is not preserving capture timestamps."; |
| 221 | if (incorrect_capture_time_logged_messages_ == |
| 222 | kMessagesThrottlingThreshold) { |
| 223 | RTC_LOG(LS_WARNING) << "Too many log messages. Further incorrect " |
| 224 | "timestamps warnings will be throttled."; |
| 225 | } |
| 226 | } |
| 227 | } |
| 228 | encode_start_list->pop_front(); |
| 229 | } else { |
| 230 | ++reordered_frames_logged_messages_; |
| 231 | if (reordered_frames_logged_messages_ <= kMessagesThrottlingThreshold || |
| 232 | reordered_frames_logged_messages_ % kThrottleRatio == 0) { |
| 233 | RTC_LOG(LS_WARNING) << "Frame with no encode started time recordings. " |
| 234 | "Encoder may be reordering frames " |
| 235 | "or not preserving RTP timestamps."; |
| 236 | if (reordered_frames_logged_messages_ == kMessagesThrottlingThreshold) { |
| 237 | RTC_LOG(LS_WARNING) << "Too many log messages. Further frames " |
| 238 | "reordering warnings will be throttled."; |
| 239 | } |
| 240 | } |
| 241 | } |
| 242 | } |
| 243 | return result; |
| 244 | } |
| 245 | |
| 246 | size_t FrameEncodeTimer::NumSpatialLayers() const { |
| 247 | size_t num_spatial_layers = codec_settings_.numberOfSimulcastStreams; |
| 248 | if (codec_settings_.codecType == kVideoCodecVP9) { |
| 249 | num_spatial_layers = std::max( |
| 250 | num_spatial_layers, |
| 251 | static_cast<size_t>(codec_settings_.VP9().numberOfSpatialLayers)); |
| 252 | } |
| 253 | return std::max(num_spatial_layers, size_t{1}); |
| 254 | } |
| 255 | |
| 256 | } // namespace webrtc |