blob: d382b2e8e023f61bbbfb20895ae498c3363cd019 [file] [log] [blame]
philipelbe7a9e52016-05-19 12:19:35 +02001/*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020011#include "modules/video_coding/frame_buffer2.h"
philipelbe7a9e52016-05-19 12:19:35 +020012
13#include <algorithm>
Yves Gerey3e707812018-11-28 16:47:49 +010014#include <cstdlib>
15#include <iterator>
philipele0b2f152016-09-28 10:23:49 +020016#include <queue>
Yves Gerey3e707812018-11-28 16:47:49 +010017#include <utility>
philipel798b2822018-06-11 13:10:14 +020018#include <vector>
philipelbe7a9e52016-05-19 12:19:35 +020019
Yves Gerey3e707812018-11-28 16:47:49 +010020#include "api/video/encoded_image.h"
21#include "api/video/video_timing.h"
22#include "common_types.h" // NOLINT(build/include)
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020023#include "modules/video_coding/include/video_coding_defines.h"
24#include "modules/video_coding/jitter_estimator.h"
25#include "modules/video_coding/timing.h"
26#include "rtc_base/checks.h"
Yves Gerey3e707812018-11-28 16:47:49 +010027#include "rtc_base/experiments/rtt_mult_experiment.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020028#include "rtc_base/logging.h"
Yves Gerey3e707812018-11-28 16:47:49 +010029#include "rtc_base/numerics/sequence_number_util.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020030#include "rtc_base/trace_event.h"
31#include "system_wrappers/include/clock.h"
philipel707f2782017-10-02 14:10:28 +020032#include "system_wrappers/include/field_trial.h"
philipelbe7a9e52016-05-19 12:19:35 +020033
34namespace webrtc {
35namespace video_coding {
36
37namespace {
philipele0b2f152016-09-28 10:23:49 +020038// Max number of frames the buffer will hold.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010039constexpr size_t kMaxFramesBuffered = 800;
philipelbe7a9e52016-05-19 12:19:35 +020040
philipele0b2f152016-09-28 10:23:49 +020041// Max number of decoded frame info that will be saved.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010042constexpr int kMaxFramesHistory = 1 << 13;
philipel65e1f942017-07-24 08:26:53 -070043
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010044// The time it's allowed for a frame to be late to its rendering prediction and
45// still be rendered.
Ilya Nikolaevskiy7eef0072018-02-28 09:59:26 +010046constexpr int kMaxAllowedFrameDelayMs = 5;
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010047
philipel65e1f942017-07-24 08:26:53 -070048constexpr int64_t kLogNonDecodedIntervalMs = 5000;
philipelbe7a9e52016-05-19 12:19:35 +020049} // namespace
50
philipelbe7a9e52016-05-19 12:19:35 +020051FrameBuffer::FrameBuffer(Clock* clock,
52 VCMJitterEstimator* jitter_estimator,
philipela45102f2017-02-22 05:30:39 -080053 VCMTiming* timing,
54 VCMReceiveStatisticsCallback* stats_callback)
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010055 : decoded_frames_history_(kMaxFramesHistory),
56 clock_(clock),
philipelbe7a9e52016-05-19 12:19:35 +020057 jitter_estimator_(jitter_estimator),
58 timing_(timing),
philipel4f6cd6a2016-08-03 10:59:32 +020059 inter_frame_delay_(clock_->TimeInMilliseconds()),
philipel29f730e2017-03-15 08:10:08 -070060 stopped_(false),
philipela45102f2017-02-22 05:30:39 -080061 protection_mode_(kProtectionNack),
philipel65e1f942017-07-24 08:26:53 -070062 stats_callback_(stats_callback),
Elad Alone4b50232019-01-14 18:56:14 +010063 last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs),
64 add_rtt_to_playout_delay_(
65 webrtc::field_trial::IsEnabled("WebRTC-AddRttToPlayoutDelay")) {}
philipel266f0a42016-11-28 08:49:07 -080066
philipela45102f2017-02-22 05:30:39 -080067FrameBuffer::~FrameBuffer() {}
philipelbe7a9e52016-05-19 12:19:35 +020068
philipel75562822016-09-05 10:57:41 +020069FrameBuffer::ReturnReason FrameBuffer::NextFrame(
70 int64_t max_wait_time_ms,
philipele7c891f2018-02-22 14:35:06 +010071 std::unique_ptr<EncodedFrame>* frame_out,
philipel3042c2d2017-08-18 04:55:02 -070072 bool keyframe_required) {
tommidb23ea62017-03-03 07:21:18 -080073 TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
philipel1c056252017-01-31 09:53:12 -080074 int64_t latest_return_time_ms =
75 clock_->TimeInMilliseconds() + max_wait_time_ms;
philipel504c47d2016-06-30 17:33:02 +020076 int64_t wait_ms = max_wait_time_ms;
philipel29f730e2017-03-15 08:10:08 -070077 int64_t now_ms = 0;
philipele0b2f152016-09-28 10:23:49 +020078
79 do {
philipel29f730e2017-03-15 08:10:08 -070080 now_ms = clock_->TimeInMilliseconds();
philipel504c47d2016-06-30 17:33:02 +020081 {
82 rtc::CritScope lock(&crit_);
tommi0a735642017-03-14 06:23:57 -070083 new_continuous_frame_event_.Reset();
philipel29f730e2017-03-15 08:10:08 -070084 if (stopped_)
85 return kStopped;
86
87 wait_ms = max_wait_time_ms;
88
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +010089 // Need to hold |crit_| in order to access frames_to_decode_. therefore we
philipele0b2f152016-09-28 10:23:49 +020090 // set it here in the loop instead of outside the loop in order to not
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +010091 // acquire the lock unnecessarily.
92 frames_to_decode_.clear();
philipelbe7a9e52016-05-19 12:19:35 +020093
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +010094 // |last_continuous_frame_| may be empty below, but nullopt is smaller
95 // than everything else and loop will immediately terminate as expected.
96 for (auto frame_it = frames_.begin();
97 frame_it != frames_.end() &&
98 frame_it->first <= last_continuous_frame_;
philipel146a48b2017-04-20 04:04:38 -070099 ++frame_it) {
philipel93e451b2016-10-06 12:25:13 +0200100 if (!frame_it->second.continuous ||
101 frame_it->second.num_missing_decodable > 0) {
philipele0b2f152016-09-28 10:23:49 +0200102 continue;
philipel93e451b2016-10-06 12:25:13 +0200103 }
philipele0b2f152016-09-28 10:23:49 +0200104
philipele7c891f2018-02-22 14:35:06 +0100105 EncodedFrame* frame = frame_it->second.frame.get();
philipel3042c2d2017-08-18 04:55:02 -0700106
107 if (keyframe_required && !frame->is_keyframe())
108 continue;
109
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100110 auto last_decoded_frame_timestamp =
111 decoded_frames_history_.GetLastDecodedFrameTimestamp();
112
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100113 // TODO(https://bugs.webrtc.org/9974): consider removing this check
114 // as it may make a stream undecodable after a very long delay between
115 // frames.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100116 if (last_decoded_frame_timestamp &&
117 AheadOf(*last_decoded_frame_timestamp, frame->Timestamp())) {
philipel6d216502018-10-22 14:36:45 +0200118 continue;
119 }
120
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100121 // Only ever return all parts of a superframe. Therefore skip this
122 // frame if it's not a beginning of a superframe.
123 if (frame->inter_layer_predicted) {
124 continue;
125 }
126
127 // Gather all remaining frames for the same superframe.
128 std::vector<FrameMap::iterator> current_superframe;
129 current_superframe.push_back(frame_it);
130 bool last_layer_completed =
131 frame_it->second.frame->is_last_spatial_layer;
132 FrameMap::iterator next_frame_it = frame_it;
133 while (true) {
134 ++next_frame_it;
135 if (next_frame_it == frames_.end() ||
136 next_frame_it->first.picture_id != frame->id.picture_id ||
137 !next_frame_it->second.continuous) {
138 break;
139 }
140 // Check if the next frame has some undecoded references other than
141 // the previous frame in the same superframe.
142 size_t num_allowed_undecoded_refs =
143 (next_frame_it->second.frame->inter_layer_predicted) ? 1 : 0;
144 if (next_frame_it->second.num_missing_decodable >
145 num_allowed_undecoded_refs) {
146 break;
147 }
148 // All frames in the superframe should have the same timestamp.
149 if (frame->Timestamp() != next_frame_it->second.frame->Timestamp()) {
150 RTC_LOG(LS_WARNING)
151 << "Frames in a single superframe have different"
152 " timestamps. Skipping undecodable superframe.";
153 break;
154 }
155 current_superframe.push_back(next_frame_it);
156 last_layer_completed =
157 next_frame_it->second.frame->is_last_spatial_layer;
158 }
159 // Check if the current superframe is complete.
160 // TODO(bugs.webrtc.org/10064): consider returning all available to
161 // decode frames even if the superframe is not complete yet.
162 if (!last_layer_completed) {
163 continue;
164 }
165
166 frames_to_decode_ = std::move(current_superframe);
167
philipel6d216502018-10-22 14:36:45 +0200168 if (frame->RenderTime() == -1) {
Niels Möller23775882018-08-16 10:24:12 +0200169 frame->SetRenderTime(
170 timing_->RenderTimeMs(frame->Timestamp(), now_ms));
philipel6d216502018-10-22 14:36:45 +0200171 }
philipele0b2f152016-09-28 10:23:49 +0200172 wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms);
173
174 // This will cause the frame buffer to prefer high framerate rather
175 // than high resolution in the case of the decoder not decoding fast
176 // enough and the stream has multiple spatial and temporal layers.
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +0100177 // For multiple temporal layers it may cause non-base layer frames to be
178 // skipped if they are late.
Ilya Nikolaevskiy7eef0072018-02-28 09:59:26 +0100179 if (wait_ms < -kMaxAllowedFrameDelayMs)
philipele0b2f152016-09-28 10:23:49 +0200180 continue;
181
182 break;
183 }
184 } // rtc::Critscope lock(&crit_);
185
philipel1c056252017-01-31 09:53:12 -0800186 wait_ms = std::min<int64_t>(wait_ms, latest_return_time_ms - now_ms);
philipele0b2f152016-09-28 10:23:49 +0200187 wait_ms = std::max<int64_t>(wait_ms, 0);
tommi0a735642017-03-14 06:23:57 -0700188 } while (new_continuous_frame_event_.Wait(wait_ms));
philipele0b2f152016-09-28 10:23:49 +0200189
philipel29f730e2017-03-15 08:10:08 -0700190 {
191 rtc::CritScope lock(&crit_);
192 now_ms = clock_->TimeInMilliseconds();
Ilya Nikolaevskiyfdfe1c92019-01-18 13:33:38 +0100193 // TODO(ilnik): remove |frames_out| use frames_to_decode_ directly.
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100194 std::vector<EncodedFrame*> frames_out;
philipele0b2f152016-09-28 10:23:49 +0200195
Ilya Nikolaevskiyfdfe1c92019-01-18 13:33:38 +0100196 if (!frames_to_decode_.empty()) {
197 bool superframe_delayed_by_retransmission = false;
198 size_t superframe_size = 0;
199 EncodedFrame* first_frame = frames_to_decode_[0]->second.frame.get();
200 int64_t render_time_ms = first_frame->RenderTime();
201 int64_t receive_time_ms = first_frame->ReceivedTime();
202 // Gracefully handle bad RTP timestamps and render time issues.
203 if (HasBadRenderTiming(*first_frame, now_ms)) {
204 jitter_estimator_->Reset();
205 timing_->Reset();
206 render_time_ms =
207 timing_->RenderTimeMs(first_frame->Timestamp(), now_ms);
208 }
209
210 for (FrameMap::iterator& frame_it : frames_to_decode_) {
211 RTC_DCHECK(frame_it != frames_.end());
212 EncodedFrame* frame = frame_it->second.frame.release();
213
214 frame->SetRenderTime(render_time_ms);
215
216 superframe_delayed_by_retransmission |=
217 frame->delayed_by_retransmission();
218 receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime());
219 superframe_size += frame->size();
220
221 PropagateDecodability(frame_it->second);
222 decoded_frames_history_.InsertDecoded(frame_it->first,
223 frame->Timestamp());
224
225 // Remove decoded frame and all undecoded frames before it.
226 frames_.erase(frames_.begin(), ++frame_it);
227
228 frames_out.push_back(frame);
229 }
230
231 if (!superframe_delayed_by_retransmission) {
philipel29f730e2017-03-15 08:10:08 -0700232 int64_t frame_delay;
philipele0754302017-01-25 08:56:23 -0800233
Ilya Nikolaevskiyfdfe1c92019-01-18 13:33:38 +0100234 if (inter_frame_delay_.CalculateDelay(first_frame->Timestamp(),
235 &frame_delay, receive_time_ms)) {
236 jitter_estimator_->UpdateEstimate(frame_delay, superframe_size);
philipel29f730e2017-03-15 08:10:08 -0700237 }
238
239 float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0;
“Michaelf9fc1712018-08-27 10:08:58 -0500240 if (RttMultExperiment::RttMultEnabled()) {
241 rtt_mult = RttMultExperiment::GetRttMultValue();
242 }
philipel29f730e2017-03-15 08:10:08 -0700243 timing_->SetJitterDelay(jitter_estimator_->GetJitterEstimate(rtt_mult));
Ilya Nikolaevskiyfdfe1c92019-01-18 13:33:38 +0100244 timing_->UpdateCurrentDelay(render_time_ms, now_ms);
philipele21be1d2017-09-25 06:37:12 -0700245 } else {
Elad Alone4b50232019-01-14 18:56:14 +0100246 if (RttMultExperiment::RttMultEnabled() || add_rtt_to_playout_delay_)
philipel707f2782017-10-02 14:10:28 +0200247 jitter_estimator_->FrameNacked();
philipele0754302017-01-25 08:56:23 -0800248 }
249
philipel29f730e2017-03-15 08:10:08 -0700250 UpdateJitterDelay();
ilnik2edc6842017-07-06 03:06:50 -0700251 UpdateTimingFrameInfo();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100252 }
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100253 if (!frames_out.empty()) {
254 if (frames_out.size() == 1) {
255 frame_out->reset(frames_out[0]);
256 } else {
257 frame_out->reset(CombineAndDeleteFrames(frames_out));
258 }
philipel29f730e2017-03-15 08:10:08 -0700259 return kFrameFound;
philipelbe7a9e52016-05-19 12:19:35 +0200260 }
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100261 } // rtc::Critscope lock(&crit_)
tommi0a735642017-03-14 06:23:57 -0700262
263 if (latest_return_time_ms - now_ms > 0) {
philipel1c056252017-01-31 09:53:12 -0800264 // If |next_frame_it_ == frames_.end()| and there is still time left, it
265 // means that the frame buffer was cleared as the thread in this function
266 // was waiting to acquire |crit_| in order to return. Wait for the
267 // remaining time and then return.
268 return NextFrame(latest_return_time_ms - now_ms, frame_out);
philipelbe7a9e52016-05-19 12:19:35 +0200269 }
tommi0a735642017-03-14 06:23:57 -0700270 return kTimeout;
philipelbe7a9e52016-05-19 12:19:35 +0200271}
272
philipele7c891f2018-02-22 14:35:06 +0100273bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
274 int64_t now_ms) {
stefan95e97542017-05-23 09:52:18 -0700275 // Assume that render timing errors are due to changes in the video stream.
276 int64_t render_time_ms = frame.RenderTimeMs();
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200277 // Zero render time means render immediately.
278 if (render_time_ms == 0) {
279 return false;
280 }
stefan95e97542017-05-23 09:52:18 -0700281 if (render_time_ms < 0) {
282 return true;
283 }
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200284 const int64_t kMaxVideoDelayMs = 10000;
stefan95e97542017-05-23 09:52:18 -0700285 if (std::abs(render_time_ms - now_ms) > kMaxVideoDelayMs) {
286 int frame_delay = static_cast<int>(std::abs(render_time_ms - now_ms));
Mirko Bonadei675513b2017-11-09 11:09:25 +0100287 RTC_LOG(LS_WARNING)
288 << "A frame about to be decoded is out of the configured "
289 << "delay bounds (" << frame_delay << " > " << kMaxVideoDelayMs
290 << "). Resetting the video jitter buffer.";
stefan95e97542017-05-23 09:52:18 -0700291 return true;
292 }
293 if (static_cast<int>(timing_->TargetVideoDelay()) > kMaxVideoDelayMs) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100294 RTC_LOG(LS_WARNING) << "The video target delay has grown larger than "
295 << kMaxVideoDelayMs << " ms.";
stefan95e97542017-05-23 09:52:18 -0700296 return true;
297 }
298 return false;
299}
300
philipel4f6cd6a2016-08-03 10:59:32 +0200301void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
tommidb23ea62017-03-03 07:21:18 -0800302 TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode");
philipel4f6cd6a2016-08-03 10:59:32 +0200303 rtc::CritScope lock(&crit_);
304 protection_mode_ = mode;
305}
306
philipel504c47d2016-06-30 17:33:02 +0200307void FrameBuffer::Start() {
tommidb23ea62017-03-03 07:21:18 -0800308 TRACE_EVENT0("webrtc", "FrameBuffer::Start");
philipel29f730e2017-03-15 08:10:08 -0700309 rtc::CritScope lock(&crit_);
310 stopped_ = false;
philipel504c47d2016-06-30 17:33:02 +0200311}
312
313void FrameBuffer::Stop() {
tommidb23ea62017-03-03 07:21:18 -0800314 TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
philipel29f730e2017-03-15 08:10:08 -0700315 rtc::CritScope lock(&crit_);
316 stopped_ = true;
tommi0a735642017-03-14 06:23:57 -0700317 new_continuous_frame_event_.Set();
philipel504c47d2016-06-30 17:33:02 +0200318}
319
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100320void FrameBuffer::Clear() {
321 rtc::CritScope lock(&crit_);
322 ClearFramesAndHistory();
323}
324
philipele21be1d2017-09-25 06:37:12 -0700325void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
326 rtc::CritScope lock(&crit_);
327 jitter_estimator_->UpdateRtt(rtt_ms);
328}
329
philipele7c891f2018-02-22 14:35:06 +0100330bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
philipel112adf92017-06-15 09:06:21 -0700331 for (size_t i = 0; i < frame.num_references; ++i) {
Philip Eliasson1f850a62019-03-19 12:15:00 +0000332 if (frame.references[i] >= frame.id.picture_id)
philipel112adf92017-06-15 09:06:21 -0700333 return false;
philipel3b3c9c42017-09-11 09:38:36 -0700334
philipel112adf92017-06-15 09:06:21 -0700335 for (size_t j = i + 1; j < frame.num_references; ++j) {
336 if (frame.references[i] == frame.references[j])
337 return false;
338 }
339 }
340
philipel0fa82a62018-03-19 15:34:53 +0100341 if (frame.inter_layer_predicted && frame.id.spatial_layer == 0)
philipel112adf92017-06-15 09:06:21 -0700342 return false;
343
344 return true;
345}
346
philipele7c891f2018-02-22 14:35:06 +0100347int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
tommidb23ea62017-03-03 07:21:18 -0800348 TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
philipel93e451b2016-10-06 12:25:13 +0200349 RTC_DCHECK(frame);
philipela45102f2017-02-22 05:30:39 -0800350 if (stats_callback_)
ilnik6d5b4d62017-08-30 03:32:14 -0700351 stats_callback_->OnCompleteFrame(frame->is_keyframe(), frame->size(),
352 frame->contentType());
philipel0fa82a62018-03-19 15:34:53 +0100353 const VideoLayerFrameId& id = frame->id;
tommi0a735642017-03-14 06:23:57 -0700354
355 rtc::CritScope lock(&crit_);
philipel29f730e2017-03-15 08:10:08 -0700356
philipel1610f942017-12-12 13:58:31 +0100357 int64_t last_continuous_picture_id =
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100358 !last_continuous_frame_ ? -1 : last_continuous_frame_->picture_id;
philipele0b2f152016-09-28 10:23:49 +0200359
philipel112adf92017-06-15 09:06:21 -0700360 if (!ValidReferences(*frame)) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100361 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100362 << id.picture_id << ":"
363 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100364 << ") has invalid frame references, dropping frame.";
philipel112adf92017-06-15 09:06:21 -0700365 return last_continuous_picture_id;
366 }
367
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100368 if (frames_.size() >= kMaxFramesBuffered) {
philipel9771c502018-03-02 11:06:27 +0100369 if (frame->is_keyframe()) {
370 RTC_LOG(LS_WARNING) << "Inserting keyframe (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100371 << id.picture_id << ":"
372 << static_cast<int>(id.spatial_layer)
philipel9771c502018-03-02 11:06:27 +0100373 << ") but buffer is full, clearing"
374 << " buffer and inserting the frame.";
375 ClearFramesAndHistory();
376 } else {
377 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100378 << id.picture_id << ":"
379 << static_cast<int>(id.spatial_layer)
philipel9771c502018-03-02 11:06:27 +0100380 << ") could not be inserted due to the frame "
381 << "buffer being full, dropping frame.";
382 return last_continuous_picture_id;
383 }
philipele0b2f152016-09-28 10:23:49 +0200384 }
385
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100386 auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId();
387 auto last_decoded_frame_timestamp =
388 decoded_frames_history_.GetLastDecodedFrameTimestamp();
389 if (last_decoded_frame && id <= *last_decoded_frame) {
390 if (AheadOf(frame->Timestamp(), *last_decoded_frame_timestamp) &&
philipel3042c2d2017-08-18 04:55:02 -0700391 frame->is_keyframe()) {
philipelfcc60062017-01-18 05:35:20 -0800392 // If this frame has a newer timestamp but an earlier picture id then we
393 // assume there has been a jump in the picture id due to some encoder
394 // reconfiguration or some other reason. Even though this is not according
395 // to spec we can still continue to decode from this frame if it is a
396 // keyframe.
Mirko Bonadei675513b2017-11-09 11:09:25 +0100397 RTC_LOG(LS_WARNING)
398 << "A jump in picture id was detected, clearing buffer.";
philipelfcc60062017-01-18 05:35:20 -0800399 ClearFramesAndHistory();
400 last_continuous_picture_id = -1;
401 } else {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100402 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100403 << id.picture_id << ":"
404 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100405 << ") inserted after frame ("
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100406 << last_decoded_frame->picture_id << ":"
407 << static_cast<int>(last_decoded_frame->spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100408 << ") was handed off for decoding, dropping frame.";
philipelfcc60062017-01-18 05:35:20 -0800409 return last_continuous_picture_id;
410 }
philipele0b2f152016-09-28 10:23:49 +0200411 }
412
philipel146a48b2017-04-20 04:04:38 -0700413 // Test if inserting this frame would cause the order of the frames to become
414 // ambiguous (covering more than half the interval of 2^16). This can happen
415 // when the picture id make large jumps mid stream.
philipel0fa82a62018-03-19 15:34:53 +0100416 if (!frames_.empty() && id < frames_.begin()->first &&
417 frames_.rbegin()->first < id) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100418 RTC_LOG(LS_WARNING)
419 << "A jump in picture id was detected, clearing buffer.";
philipel146a48b2017-04-20 04:04:38 -0700420 ClearFramesAndHistory();
421 last_continuous_picture_id = -1;
422 }
423
philipel0fa82a62018-03-19 15:34:53 +0100424 auto info = frames_.emplace(id, FrameInfo()).first;
philipele0b2f152016-09-28 10:23:49 +0200425
philipel93e451b2016-10-06 12:25:13 +0200426 if (info->second.frame) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100427 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100428 << id.picture_id << ":"
429 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100430 << ") already inserted, dropping frame.";
philipele0b2f152016-09-28 10:23:49 +0200431 return last_continuous_picture_id;
432 }
433
philipel93e451b2016-10-06 12:25:13 +0200434 if (!UpdateFrameInfoWithIncomingFrame(*frame, info))
435 return last_continuous_picture_id;
Ruslan Burakov493a6502019-02-27 15:32:48 +0100436
437 if (!frame->delayed_by_retransmission())
438 timing_->IncomingTimestamp(frame->Timestamp(), frame->ReceivedTime());
philipel0a9f6de2018-02-28 11:29:47 +0100439
philipele0b2f152016-09-28 10:23:49 +0200440 info->second.frame = std::move(frame);
philipele0b2f152016-09-28 10:23:49 +0200441
442 if (info->second.num_missing_continuous == 0) {
443 info->second.continuous = true;
444 PropagateContinuity(info);
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100445 last_continuous_picture_id = last_continuous_frame_->picture_id;
philipele0b2f152016-09-28 10:23:49 +0200446
447 // Since we now have new continuous frames there might be a better frame
448 // to return from NextFrame. Signal that thread so that it again can choose
449 // which frame to return.
tommi0a735642017-03-14 06:23:57 -0700450 new_continuous_frame_event_.Set();
philipele0b2f152016-09-28 10:23:49 +0200451 }
452
453 return last_continuous_picture_id;
philipelbe7a9e52016-05-19 12:19:35 +0200454}
455
philipele0b2f152016-09-28 10:23:49 +0200456void FrameBuffer::PropagateContinuity(FrameMap::iterator start) {
tommidb23ea62017-03-03 07:21:18 -0800457 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateContinuity");
philipele0b2f152016-09-28 10:23:49 +0200458 RTC_DCHECK(start->second.continuous);
philipele0b2f152016-09-28 10:23:49 +0200459
460 std::queue<FrameMap::iterator> continuous_frames;
461 continuous_frames.push(start);
462
463 // A simple BFS to traverse continuous frames.
464 while (!continuous_frames.empty()) {
465 auto frame = continuous_frames.front();
466 continuous_frames.pop();
467
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100468 if (!last_continuous_frame_ || *last_continuous_frame_ < frame->first) {
469 last_continuous_frame_ = frame->first;
470 }
philipele0b2f152016-09-28 10:23:49 +0200471
472 // Loop through all dependent frames, and if that frame no longer has
473 // any unfulfilled dependencies then that frame is continuous as well.
Elad Alon69321dd2019-01-10 15:02:54 +0100474 for (size_t d = 0; d < frame->second.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200475 auto frame_ref = frames_.find(frame->second.dependent_frames[d]);
philipel112adf92017-06-15 09:06:21 -0700476 RTC_DCHECK(frame_ref != frames_.end());
philipele0b2f152016-09-28 10:23:49 +0200477
philipel112adf92017-06-15 09:06:21 -0700478 // TODO(philipel): Look into why we've seen this happen.
479 if (frame_ref != frames_.end()) {
480 --frame_ref->second.num_missing_continuous;
481 if (frame_ref->second.num_missing_continuous == 0) {
482 frame_ref->second.continuous = true;
483 continuous_frames.push(frame_ref);
484 }
philipele0b2f152016-09-28 10:23:49 +0200485 }
486 }
487 }
488}
489
490void FrameBuffer::PropagateDecodability(const FrameInfo& info) {
tommidb23ea62017-03-03 07:21:18 -0800491 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateDecodability");
Elad Alon69321dd2019-01-10 15:02:54 +0100492 for (size_t d = 0; d < info.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200493 auto ref_info = frames_.find(info.dependent_frames[d]);
philipel93e451b2016-10-06 12:25:13 +0200494 RTC_DCHECK(ref_info != frames_.end());
tommie95b78b2017-05-14 07:23:11 -0700495 // TODO(philipel): Look into why we've seen this happen.
496 if (ref_info != frames_.end()) {
497 RTC_DCHECK_GT(ref_info->second.num_missing_decodable, 0U);
498 --ref_info->second.num_missing_decodable;
499 }
philipele0b2f152016-09-28 10:23:49 +0200500 }
501}
502
philipele7c891f2018-02-22 14:35:06 +0100503bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
philipele0b2f152016-09-28 10:23:49 +0200504 FrameMap::iterator info) {
tommidb23ea62017-03-03 07:21:18 -0800505 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame");
philipel0fa82a62018-03-19 15:34:53 +0100506 const VideoLayerFrameId& id = frame.id;
philipele0b2f152016-09-28 10:23:49 +0200507
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100508 auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId();
509 RTC_DCHECK(!last_decoded_frame || *last_decoded_frame < info->first);
philipele0b2f152016-09-28 10:23:49 +0200510
philipel798b2822018-06-11 13:10:14 +0200511 // In this function we determine how many missing dependencies this |frame|
512 // has to become continuous/decodable. If a frame that this |frame| depend
513 // on has already been decoded then we can ignore that dependency since it has
514 // already been fulfilled.
515 //
516 // For all other frames we will register a backwards reference to this |frame|
517 // so that |num_missing_continuous| and |num_missing_decodable| can be
518 // decremented as frames become continuous/are decoded.
519 struct Dependency {
520 VideoLayerFrameId id;
521 bool continuous;
522 };
523 std::vector<Dependency> not_yet_fulfilled_dependencies;
524
525 // Find all dependencies that have not yet been fulfilled.
philipele0b2f152016-09-28 10:23:49 +0200526 for (size_t i = 0; i < frame.num_references; ++i) {
philipel0fa82a62018-03-19 15:34:53 +0100527 VideoLayerFrameId ref_key(frame.references[i], frame.id.spatial_layer);
philipel798b2822018-06-11 13:10:14 +0200528 // Does |frame| depend on a frame earlier than the last decoded one?
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100529 if (last_decoded_frame && ref_key <= *last_decoded_frame) {
philipel798b2822018-06-11 13:10:14 +0200530 // Was that frame decoded? If not, this |frame| will never become
531 // decodable.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100532 if (!decoded_frames_history_.WasDecoded(ref_key)) {
philipel65e1f942017-07-24 08:26:53 -0700533 int64_t now_ms = clock_->TimeInMilliseconds();
534 if (last_log_non_decoded_ms_ + kLogNonDecodedIntervalMs < now_ms) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100535 RTC_LOG(LS_WARNING)
philipel0fa82a62018-03-19 15:34:53 +0100536 << "Frame with (picture_id:spatial_id) (" << id.picture_id << ":"
537 << static_cast<int>(id.spatial_layer)
philipel65e1f942017-07-24 08:26:53 -0700538 << ") depends on a non-decoded frame more previous than"
539 << " the last decoded frame, dropping frame.";
540 last_log_non_decoded_ms_ = now_ms;
541 }
philipele0b2f152016-09-28 10:23:49 +0200542 return false;
543 }
philipele0b2f152016-09-28 10:23:49 +0200544 } else {
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100545 auto ref_info = frames_.find(ref_key);
philipel798b2822018-06-11 13:10:14 +0200546 bool ref_continuous =
547 ref_info != frames_.end() && ref_info->second.continuous;
548 not_yet_fulfilled_dependencies.push_back({ref_key, ref_continuous});
philipele0b2f152016-09-28 10:23:49 +0200549 }
philipelbe7a9e52016-05-19 12:19:35 +0200550 }
551
philipel798b2822018-06-11 13:10:14 +0200552 // Does |frame| depend on the lower spatial layer?
philipelbe7a9e52016-05-19 12:19:35 +0200553 if (frame.inter_layer_predicted) {
philipel0fa82a62018-03-19 15:34:53 +0100554 VideoLayerFrameId ref_key(frame.id.picture_id, frame.id.spatial_layer - 1);
philipel798b2822018-06-11 13:10:14 +0200555 auto ref_info = frames_.find(ref_key);
philipele0b2f152016-09-28 10:23:49 +0200556
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100557 bool lower_layer_decoded =
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100558 last_decoded_frame && *last_decoded_frame == ref_key;
philipel798b2822018-06-11 13:10:14 +0200559 bool lower_layer_continuous =
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100560 lower_layer_decoded ||
561 (ref_info != frames_.end() && ref_info->second.continuous);
philipel798b2822018-06-11 13:10:14 +0200562
563 if (!lower_layer_continuous || !lower_layer_decoded) {
564 not_yet_fulfilled_dependencies.push_back(
565 {ref_key, lower_layer_continuous});
philipele0b2f152016-09-28 10:23:49 +0200566 }
philipelbe7a9e52016-05-19 12:19:35 +0200567 }
568
philipel798b2822018-06-11 13:10:14 +0200569 info->second.num_missing_continuous = not_yet_fulfilled_dependencies.size();
570 info->second.num_missing_decodable = not_yet_fulfilled_dependencies.size();
571
572 for (const Dependency& dep : not_yet_fulfilled_dependencies) {
573 if (dep.continuous)
574 --info->second.num_missing_continuous;
575
Elad Alon69321dd2019-01-10 15:02:54 +0100576 frames_[dep.id].dependent_frames.push_back(id);
philipel798b2822018-06-11 13:10:14 +0200577 }
philipel93e451b2016-10-06 12:25:13 +0200578
philipelbe7a9e52016-05-19 12:19:35 +0200579 return true;
580}
581
philipelbe742702016-11-30 01:31:40 -0800582void FrameBuffer::UpdateJitterDelay() {
tommidb23ea62017-03-03 07:21:18 -0800583 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
philipela45102f2017-02-22 05:30:39 -0800584 if (!stats_callback_)
585 return;
philipelbe742702016-11-30 01:31:40 -0800586
philipela45102f2017-02-22 05:30:39 -0800587 int decode_ms;
588 int max_decode_ms;
589 int current_delay_ms;
590 int target_delay_ms;
591 int jitter_buffer_ms;
592 int min_playout_delay_ms;
593 int render_delay_ms;
594 if (timing_->GetTimings(&decode_ms, &max_decode_ms, &current_delay_ms,
595 &target_delay_ms, &jitter_buffer_ms,
596 &min_playout_delay_ms, &render_delay_ms)) {
597 stats_callback_->OnFrameBufferTimingsUpdated(
598 decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
599 jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
philipelbe742702016-11-30 01:31:40 -0800600 }
philipel266f0a42016-11-28 08:49:07 -0800601}
602
ilnik2edc6842017-07-06 03:06:50 -0700603void FrameBuffer::UpdateTimingFrameInfo() {
604 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo");
Danil Chapovalov0040b662018-06-18 10:48:16 +0200605 absl::optional<TimingFrameInfo> info = timing_->GetTimingFrameInfo();
philipel97187112018-03-23 10:43:21 +0100606 if (info && stats_callback_)
ilnik2edc6842017-07-06 03:06:50 -0700607 stats_callback_->OnTimingFrameInfoUpdated(*info);
608}
609
philipelfcc60062017-01-18 05:35:20 -0800610void FrameBuffer::ClearFramesAndHistory() {
ilnik2edc6842017-07-06 03:06:50 -0700611 TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory");
philipelfcc60062017-01-18 05:35:20 -0800612 frames_.clear();
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100613 last_continuous_frame_.reset();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100614 frames_to_decode_.clear();
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100615 decoded_frames_history_.Clear();
philipelfcc60062017-01-18 05:35:20 -0800616}
617
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100618EncodedFrame* FrameBuffer::CombineAndDeleteFrames(
619 const std::vector<EncodedFrame*>& frames) const {
620 RTC_DCHECK(!frames.empty());
Sergey Silkin61832dd2018-12-20 14:32:14 +0100621 EncodedFrame* first_frame = frames[0];
622 EncodedFrame* last_frame = frames.back();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100623 size_t total_length = 0;
624 for (size_t i = 0; i < frames.size(); ++i) {
625 total_length += frames[i]->size();
626 }
Sergey Silkin61832dd2018-12-20 14:32:14 +0100627 first_frame->VerifyAndAllocate(total_length);
628
629 // Spatial index of combined frame is set equal to spatial index of its top
630 // spatial layer.
631 first_frame->SetSpatialIndex(last_frame->id.spatial_layer);
632 first_frame->id.spatial_layer = last_frame->id.spatial_layer;
633
634 first_frame->video_timing_mutable()->network2_timestamp_ms =
635 last_frame->video_timing().network2_timestamp_ms;
636 first_frame->video_timing_mutable()->receive_finish_ms =
637 last_frame->video_timing().receive_finish_ms;
638
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100639 // Append all remaining frames to the first one.
Niels Möller9c843902019-01-11 10:21:35 +0100640 uint8_t* buffer = first_frame->data() + first_frame->size();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100641 for (size_t i = 1; i < frames.size(); ++i) {
Sergey Silkin61832dd2018-12-20 14:32:14 +0100642 EncodedFrame* next_frame = frames[i];
Niels Möller9c843902019-01-11 10:21:35 +0100643 memcpy(buffer, next_frame->data(), next_frame->size());
Sergey Silkin61832dd2018-12-20 14:32:14 +0100644 buffer += next_frame->size();
645 delete next_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100646 }
Sergey Silkin61832dd2018-12-20 14:32:14 +0100647 first_frame->set_size(total_length);
648 return first_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100649}
650
Niels Möllerbe682d42018-03-27 08:31:45 +0200651FrameBuffer::FrameInfo::FrameInfo() = default;
652FrameBuffer::FrameInfo::FrameInfo(FrameInfo&&) = default;
653FrameBuffer::FrameInfo::~FrameInfo() = default;
654
philipelbe7a9e52016-05-19 12:19:35 +0200655} // namespace video_coding
656} // namespace webrtc