blob: bd1d321e014b78a277c1f9b117d47e797f095da3 [file] [log] [blame]
philipelbe7a9e52016-05-19 12:19:35 +02001/*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020011#include "modules/video_coding/frame_buffer2.h"
philipelbe7a9e52016-05-19 12:19:35 +020012
13#include <algorithm>
Yves Gerey3e707812018-11-28 16:47:49 +010014#include <cstdlib>
15#include <iterator>
philipele0b2f152016-09-28 10:23:49 +020016#include <queue>
Yves Gerey3e707812018-11-28 16:47:49 +010017#include <utility>
philipel798b2822018-06-11 13:10:14 +020018#include <vector>
philipelbe7a9e52016-05-19 12:19:35 +020019
Yves Gerey3e707812018-11-28 16:47:49 +010020#include "api/video/encoded_image.h"
21#include "api/video/video_timing.h"
22#include "common_types.h" // NOLINT(build/include)
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020023#include "modules/video_coding/include/video_coding_defines.h"
24#include "modules/video_coding/jitter_estimator.h"
25#include "modules/video_coding/timing.h"
26#include "rtc_base/checks.h"
Yves Gerey3e707812018-11-28 16:47:49 +010027#include "rtc_base/experiments/rtt_mult_experiment.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020028#include "rtc_base/logging.h"
Yves Gerey3e707812018-11-28 16:47:49 +010029#include "rtc_base/numerics/sequence_number_util.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020030#include "rtc_base/trace_event.h"
31#include "system_wrappers/include/clock.h"
philipel707f2782017-10-02 14:10:28 +020032#include "system_wrappers/include/field_trial.h"
philipelbe7a9e52016-05-19 12:19:35 +020033
34namespace webrtc {
35namespace video_coding {
36
37namespace {
philipele0b2f152016-09-28 10:23:49 +020038// Max number of frames the buffer will hold.
39constexpr int kMaxFramesBuffered = 600;
philipelbe7a9e52016-05-19 12:19:35 +020040
philipele0b2f152016-09-28 10:23:49 +020041// Max number of decoded frame info that will be saved.
philipelfd5a20f2016-11-15 00:57:57 -080042constexpr int kMaxFramesHistory = 50;
philipel65e1f942017-07-24 08:26:53 -070043
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010044// The time it's allowed for a frame to be late to its rendering prediction and
45// still be rendered.
Ilya Nikolaevskiy7eef0072018-02-28 09:59:26 +010046constexpr int kMaxAllowedFrameDelayMs = 5;
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010047
philipel65e1f942017-07-24 08:26:53 -070048constexpr int64_t kLogNonDecodedIntervalMs = 5000;
philipelbe7a9e52016-05-19 12:19:35 +020049} // namespace
50
philipelbe7a9e52016-05-19 12:19:35 +020051FrameBuffer::FrameBuffer(Clock* clock,
52 VCMJitterEstimator* jitter_estimator,
philipela45102f2017-02-22 05:30:39 -080053 VCMTiming* timing,
54 VCMReceiveStatisticsCallback* stats_callback)
philipelbe7a9e52016-05-19 12:19:35 +020055 : clock_(clock),
philipelbe7a9e52016-05-19 12:19:35 +020056 jitter_estimator_(jitter_estimator),
57 timing_(timing),
philipel4f6cd6a2016-08-03 10:59:32 +020058 inter_frame_delay_(clock_->TimeInMilliseconds()),
philipele0b2f152016-09-28 10:23:49 +020059 last_decoded_frame_it_(frames_.end()),
60 last_continuous_frame_it_(frames_.end()),
61 num_frames_history_(0),
62 num_frames_buffered_(0),
philipel29f730e2017-03-15 08:10:08 -070063 stopped_(false),
philipela45102f2017-02-22 05:30:39 -080064 protection_mode_(kProtectionNack),
philipel65e1f942017-07-24 08:26:53 -070065 stats_callback_(stats_callback),
66 last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs) {}
philipel266f0a42016-11-28 08:49:07 -080067
philipela45102f2017-02-22 05:30:39 -080068FrameBuffer::~FrameBuffer() {}
philipelbe7a9e52016-05-19 12:19:35 +020069
philipel75562822016-09-05 10:57:41 +020070FrameBuffer::ReturnReason FrameBuffer::NextFrame(
71 int64_t max_wait_time_ms,
philipele7c891f2018-02-22 14:35:06 +010072 std::unique_ptr<EncodedFrame>* frame_out,
philipel3042c2d2017-08-18 04:55:02 -070073 bool keyframe_required) {
tommidb23ea62017-03-03 07:21:18 -080074 TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
philipel1c056252017-01-31 09:53:12 -080075 int64_t latest_return_time_ms =
76 clock_->TimeInMilliseconds() + max_wait_time_ms;
philipel504c47d2016-06-30 17:33:02 +020077 int64_t wait_ms = max_wait_time_ms;
philipel29f730e2017-03-15 08:10:08 -070078 int64_t now_ms = 0;
philipele0b2f152016-09-28 10:23:49 +020079
80 do {
philipel29f730e2017-03-15 08:10:08 -070081 now_ms = clock_->TimeInMilliseconds();
philipel504c47d2016-06-30 17:33:02 +020082 {
83 rtc::CritScope lock(&crit_);
tommi0a735642017-03-14 06:23:57 -070084 new_continuous_frame_event_.Reset();
philipel29f730e2017-03-15 08:10:08 -070085 if (stopped_)
86 return kStopped;
87
88 wait_ms = max_wait_time_ms;
89
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +010090 // Need to hold |crit_| in order to access frames_to_decode_. therefore we
philipele0b2f152016-09-28 10:23:49 +020091 // set it here in the loop instead of outside the loop in order to not
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +010092 // acquire the lock unnecessarily.
93 frames_to_decode_.clear();
philipelbe7a9e52016-05-19 12:19:35 +020094
philipele0b2f152016-09-28 10:23:49 +020095 // |frame_it| points to the first frame after the
96 // |last_decoded_frame_it_|.
97 auto frame_it = frames_.end();
98 if (last_decoded_frame_it_ == frames_.end()) {
99 frame_it = frames_.begin();
philipelbe7a9e52016-05-19 12:19:35 +0200100 } else {
philipele0b2f152016-09-28 10:23:49 +0200101 frame_it = last_decoded_frame_it_;
102 ++frame_it;
philipelbe7a9e52016-05-19 12:19:35 +0200103 }
philipele0b2f152016-09-28 10:23:49 +0200104
105 // |continuous_end_it| points to the first frame after the
106 // |last_continuous_frame_it_|.
107 auto continuous_end_it = last_continuous_frame_it_;
108 if (continuous_end_it != frames_.end())
109 ++continuous_end_it;
110
philipel146a48b2017-04-20 04:04:38 -0700111 for (; frame_it != continuous_end_it && frame_it != frames_.end();
112 ++frame_it) {
philipel93e451b2016-10-06 12:25:13 +0200113 if (!frame_it->second.continuous ||
114 frame_it->second.num_missing_decodable > 0) {
philipele0b2f152016-09-28 10:23:49 +0200115 continue;
philipel93e451b2016-10-06 12:25:13 +0200116 }
philipele0b2f152016-09-28 10:23:49 +0200117
philipele7c891f2018-02-22 14:35:06 +0100118 EncodedFrame* frame = frame_it->second.frame.get();
philipel3042c2d2017-08-18 04:55:02 -0700119
120 if (keyframe_required && !frame->is_keyframe())
121 continue;
122
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100123 // TODO(https://bugs.webrtc.org/9974): consider removing this check
124 // as it may make a stream undecodable after a very long delay between
125 // frames.
philipel6d216502018-10-22 14:36:45 +0200126 if (last_decoded_frame_timestamp_ &&
127 AheadOf(*last_decoded_frame_timestamp_, frame->Timestamp())) {
128 continue;
129 }
130
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100131 // Only ever return all parts of a superframe. Therefore skip this
132 // frame if it's not a beginning of a superframe.
133 if (frame->inter_layer_predicted) {
134 continue;
135 }
136
137 // Gather all remaining frames for the same superframe.
138 std::vector<FrameMap::iterator> current_superframe;
139 current_superframe.push_back(frame_it);
140 bool last_layer_completed =
141 frame_it->second.frame->is_last_spatial_layer;
142 FrameMap::iterator next_frame_it = frame_it;
143 while (true) {
144 ++next_frame_it;
145 if (next_frame_it == frames_.end() ||
146 next_frame_it->first.picture_id != frame->id.picture_id ||
147 !next_frame_it->second.continuous) {
148 break;
149 }
150 // Check if the next frame has some undecoded references other than
151 // the previous frame in the same superframe.
152 size_t num_allowed_undecoded_refs =
153 (next_frame_it->second.frame->inter_layer_predicted) ? 1 : 0;
154 if (next_frame_it->second.num_missing_decodable >
155 num_allowed_undecoded_refs) {
156 break;
157 }
158 // All frames in the superframe should have the same timestamp.
159 if (frame->Timestamp() != next_frame_it->second.frame->Timestamp()) {
160 RTC_LOG(LS_WARNING)
161 << "Frames in a single superframe have different"
162 " timestamps. Skipping undecodable superframe.";
163 break;
164 }
165 current_superframe.push_back(next_frame_it);
166 last_layer_completed =
167 next_frame_it->second.frame->is_last_spatial_layer;
168 }
169 // Check if the current superframe is complete.
170 // TODO(bugs.webrtc.org/10064): consider returning all available to
171 // decode frames even if the superframe is not complete yet.
172 if (!last_layer_completed) {
173 continue;
174 }
175
176 frames_to_decode_ = std::move(current_superframe);
177
philipel6d216502018-10-22 14:36:45 +0200178 if (frame->RenderTime() == -1) {
Niels Möller23775882018-08-16 10:24:12 +0200179 frame->SetRenderTime(
180 timing_->RenderTimeMs(frame->Timestamp(), now_ms));
philipel6d216502018-10-22 14:36:45 +0200181 }
philipele0b2f152016-09-28 10:23:49 +0200182 wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms);
183
184 // This will cause the frame buffer to prefer high framerate rather
185 // than high resolution in the case of the decoder not decoding fast
186 // enough and the stream has multiple spatial and temporal layers.
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +0100187 // For multiple temporal layers it may cause non-base layer frames to be
188 // skipped if they are late.
Ilya Nikolaevskiy7eef0072018-02-28 09:59:26 +0100189 if (wait_ms < -kMaxAllowedFrameDelayMs)
philipele0b2f152016-09-28 10:23:49 +0200190 continue;
191
192 break;
193 }
194 } // rtc::Critscope lock(&crit_);
195
philipel1c056252017-01-31 09:53:12 -0800196 wait_ms = std::min<int64_t>(wait_ms, latest_return_time_ms - now_ms);
philipele0b2f152016-09-28 10:23:49 +0200197 wait_ms = std::max<int64_t>(wait_ms, 0);
tommi0a735642017-03-14 06:23:57 -0700198 } while (new_continuous_frame_event_.Wait(wait_ms));
philipele0b2f152016-09-28 10:23:49 +0200199
philipel29f730e2017-03-15 08:10:08 -0700200 {
201 rtc::CritScope lock(&crit_);
202 now_ms = clock_->TimeInMilliseconds();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100203 std::vector<EncodedFrame*> frames_out;
204 for (const FrameMap::iterator& frame_it : frames_to_decode_) {
205 RTC_DCHECK(frame_it != frames_.end());
206 EncodedFrame* frame = frame_it->second.frame.release();
philipele0b2f152016-09-28 10:23:49 +0200207
philipel29f730e2017-03-15 08:10:08 -0700208 if (!frame->delayed_by_retransmission()) {
209 int64_t frame_delay;
philipele0754302017-01-25 08:56:23 -0800210
Niels Möller23775882018-08-16 10:24:12 +0200211 if (inter_frame_delay_.CalculateDelay(frame->Timestamp(), &frame_delay,
philipel29f730e2017-03-15 08:10:08 -0700212 frame->ReceivedTime())) {
213 jitter_estimator_->UpdateEstimate(frame_delay, frame->size());
214 }
215
216 float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0;
“Michaelf9fc1712018-08-27 10:08:58 -0500217 if (RttMultExperiment::RttMultEnabled()) {
218 rtt_mult = RttMultExperiment::GetRttMultValue();
219 }
philipel29f730e2017-03-15 08:10:08 -0700220 timing_->SetJitterDelay(jitter_estimator_->GetJitterEstimate(rtt_mult));
221 timing_->UpdateCurrentDelay(frame->RenderTime(), now_ms);
philipele21be1d2017-09-25 06:37:12 -0700222 } else {
“Michaelf9fc1712018-08-27 10:08:58 -0500223 if (RttMultExperiment::RttMultEnabled() ||
224 webrtc::field_trial::IsEnabled("WebRTC-AddRttToPlayoutDelay"))
philipel707f2782017-10-02 14:10:28 +0200225 jitter_estimator_->FrameNacked();
philipele0754302017-01-25 08:56:23 -0800226 }
227
stefan95e97542017-05-23 09:52:18 -0700228 // Gracefully handle bad RTP timestamps and render time issues.
229 if (HasBadRenderTiming(*frame, now_ms)) {
230 jitter_estimator_->Reset();
231 timing_->Reset();
Niels Möller23775882018-08-16 10:24:12 +0200232 frame->SetRenderTime(timing_->RenderTimeMs(frame->Timestamp(), now_ms));
stefan95e97542017-05-23 09:52:18 -0700233 }
234
philipel29f730e2017-03-15 08:10:08 -0700235 UpdateJitterDelay();
ilnik2edc6842017-07-06 03:06:50 -0700236 UpdateTimingFrameInfo();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100237 PropagateDecodability(frame_it->second);
brandtr9078d8c2017-04-27 07:07:27 -0700238
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100239 AdvanceLastDecodedFrame(frame_it);
Niels Möller23775882018-08-16 10:24:12 +0200240 last_decoded_frame_timestamp_ = frame->Timestamp();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100241 frames_out.push_back(frame);
242 }
243
244 if (!frames_out.empty()) {
245 if (frames_out.size() == 1) {
246 frame_out->reset(frames_out[0]);
247 } else {
248 frame_out->reset(CombineAndDeleteFrames(frames_out));
249 }
philipel29f730e2017-03-15 08:10:08 -0700250 return kFrameFound;
philipelbe7a9e52016-05-19 12:19:35 +0200251 }
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100252 } // rtc::Critscope lock(&crit_)
tommi0a735642017-03-14 06:23:57 -0700253
254 if (latest_return_time_ms - now_ms > 0) {
philipel1c056252017-01-31 09:53:12 -0800255 // If |next_frame_it_ == frames_.end()| and there is still time left, it
256 // means that the frame buffer was cleared as the thread in this function
257 // was waiting to acquire |crit_| in order to return. Wait for the
258 // remaining time and then return.
259 return NextFrame(latest_return_time_ms - now_ms, frame_out);
philipelbe7a9e52016-05-19 12:19:35 +0200260 }
tommi0a735642017-03-14 06:23:57 -0700261 return kTimeout;
philipelbe7a9e52016-05-19 12:19:35 +0200262}
263
philipele7c891f2018-02-22 14:35:06 +0100264bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
265 int64_t now_ms) {
stefan95e97542017-05-23 09:52:18 -0700266 // Assume that render timing errors are due to changes in the video stream.
267 int64_t render_time_ms = frame.RenderTimeMs();
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200268 // Zero render time means render immediately.
269 if (render_time_ms == 0) {
270 return false;
271 }
stefan95e97542017-05-23 09:52:18 -0700272 if (render_time_ms < 0) {
273 return true;
274 }
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200275 const int64_t kMaxVideoDelayMs = 10000;
stefan95e97542017-05-23 09:52:18 -0700276 if (std::abs(render_time_ms - now_ms) > kMaxVideoDelayMs) {
277 int frame_delay = static_cast<int>(std::abs(render_time_ms - now_ms));
Mirko Bonadei675513b2017-11-09 11:09:25 +0100278 RTC_LOG(LS_WARNING)
279 << "A frame about to be decoded is out of the configured "
280 << "delay bounds (" << frame_delay << " > " << kMaxVideoDelayMs
281 << "). Resetting the video jitter buffer.";
stefan95e97542017-05-23 09:52:18 -0700282 return true;
283 }
284 if (static_cast<int>(timing_->TargetVideoDelay()) > kMaxVideoDelayMs) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100285 RTC_LOG(LS_WARNING) << "The video target delay has grown larger than "
286 << kMaxVideoDelayMs << " ms.";
stefan95e97542017-05-23 09:52:18 -0700287 return true;
288 }
289 return false;
290}
291
philipel4f6cd6a2016-08-03 10:59:32 +0200292void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
tommidb23ea62017-03-03 07:21:18 -0800293 TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode");
philipel4f6cd6a2016-08-03 10:59:32 +0200294 rtc::CritScope lock(&crit_);
295 protection_mode_ = mode;
296}
297
philipel504c47d2016-06-30 17:33:02 +0200298void FrameBuffer::Start() {
tommidb23ea62017-03-03 07:21:18 -0800299 TRACE_EVENT0("webrtc", "FrameBuffer::Start");
philipel29f730e2017-03-15 08:10:08 -0700300 rtc::CritScope lock(&crit_);
301 stopped_ = false;
philipel504c47d2016-06-30 17:33:02 +0200302}
303
304void FrameBuffer::Stop() {
tommidb23ea62017-03-03 07:21:18 -0800305 TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
philipel29f730e2017-03-15 08:10:08 -0700306 rtc::CritScope lock(&crit_);
307 stopped_ = true;
tommi0a735642017-03-14 06:23:57 -0700308 new_continuous_frame_event_.Set();
philipel504c47d2016-06-30 17:33:02 +0200309}
310
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100311void FrameBuffer::Clear() {
312 rtc::CritScope lock(&crit_);
313 ClearFramesAndHistory();
314}
315
philipele21be1d2017-09-25 06:37:12 -0700316void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
317 rtc::CritScope lock(&crit_);
318 jitter_estimator_->UpdateRtt(rtt_ms);
319}
320
philipele7c891f2018-02-22 14:35:06 +0100321bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
philipel0fa82a62018-03-19 15:34:53 +0100322 if (frame.id.picture_id < 0)
philipel3b3c9c42017-09-11 09:38:36 -0700323 return false;
324
philipel112adf92017-06-15 09:06:21 -0700325 for (size_t i = 0; i < frame.num_references; ++i) {
philipel0fa82a62018-03-19 15:34:53 +0100326 if (frame.references[i] < 0 || frame.references[i] >= frame.id.picture_id)
philipel112adf92017-06-15 09:06:21 -0700327 return false;
philipel3b3c9c42017-09-11 09:38:36 -0700328
philipel112adf92017-06-15 09:06:21 -0700329 for (size_t j = i + 1; j < frame.num_references; ++j) {
330 if (frame.references[i] == frame.references[j])
331 return false;
332 }
333 }
334
philipel0fa82a62018-03-19 15:34:53 +0100335 if (frame.inter_layer_predicted && frame.id.spatial_layer == 0)
philipel112adf92017-06-15 09:06:21 -0700336 return false;
337
338 return true;
339}
340
philipele7c891f2018-02-22 14:35:06 +0100341void FrameBuffer::UpdatePlayoutDelays(const EncodedFrame& frame) {
gnishb2a318b2017-05-10 09:21:33 -0700342 TRACE_EVENT0("webrtc", "FrameBuffer::UpdatePlayoutDelays");
343 PlayoutDelay playout_delay = frame.EncodedImage().playout_delay_;
344 if (playout_delay.min_ms >= 0)
345 timing_->set_min_playout_delay(playout_delay.min_ms);
346
347 if (playout_delay.max_ms >= 0)
348 timing_->set_max_playout_delay(playout_delay.max_ms);
philipel0a9f6de2018-02-28 11:29:47 +0100349
350 if (!frame.delayed_by_retransmission())
Niels Möller23775882018-08-16 10:24:12 +0200351 timing_->IncomingTimestamp(frame.Timestamp(), frame.ReceivedTime());
gnishb2a318b2017-05-10 09:21:33 -0700352}
353
philipele7c891f2018-02-22 14:35:06 +0100354int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
tommidb23ea62017-03-03 07:21:18 -0800355 TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
philipel93e451b2016-10-06 12:25:13 +0200356 RTC_DCHECK(frame);
philipela45102f2017-02-22 05:30:39 -0800357 if (stats_callback_)
ilnik6d5b4d62017-08-30 03:32:14 -0700358 stats_callback_->OnCompleteFrame(frame->is_keyframe(), frame->size(),
359 frame->contentType());
philipel0fa82a62018-03-19 15:34:53 +0100360 const VideoLayerFrameId& id = frame->id;
tommi0a735642017-03-14 06:23:57 -0700361
362 rtc::CritScope lock(&crit_);
philipel29f730e2017-03-15 08:10:08 -0700363
philipel1610f942017-12-12 13:58:31 +0100364 int64_t last_continuous_picture_id =
philipele0b2f152016-09-28 10:23:49 +0200365 last_continuous_frame_it_ == frames_.end()
366 ? -1
367 : last_continuous_frame_it_->first.picture_id;
368
philipel112adf92017-06-15 09:06:21 -0700369 if (!ValidReferences(*frame)) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100370 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100371 << id.picture_id << ":"
372 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100373 << ") has invalid frame references, dropping frame.";
philipel112adf92017-06-15 09:06:21 -0700374 return last_continuous_picture_id;
375 }
376
philipele0b2f152016-09-28 10:23:49 +0200377 if (num_frames_buffered_ >= kMaxFramesBuffered) {
philipel9771c502018-03-02 11:06:27 +0100378 if (frame->is_keyframe()) {
379 RTC_LOG(LS_WARNING) << "Inserting keyframe (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100380 << id.picture_id << ":"
381 << static_cast<int>(id.spatial_layer)
philipel9771c502018-03-02 11:06:27 +0100382 << ") but buffer is full, clearing"
383 << " buffer and inserting the frame.";
384 ClearFramesAndHistory();
385 } else {
386 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100387 << id.picture_id << ":"
388 << static_cast<int>(id.spatial_layer)
philipel9771c502018-03-02 11:06:27 +0100389 << ") could not be inserted due to the frame "
390 << "buffer being full, dropping frame.";
391 return last_continuous_picture_id;
392 }
philipele0b2f152016-09-28 10:23:49 +0200393 }
394
philipele0b2f152016-09-28 10:23:49 +0200395 if (last_decoded_frame_it_ != frames_.end() &&
philipel0fa82a62018-03-19 15:34:53 +0100396 id <= last_decoded_frame_it_->first) {
philipel6d216502018-10-22 14:36:45 +0200397 if (AheadOf(frame->Timestamp(), *last_decoded_frame_timestamp_) &&
philipel3042c2d2017-08-18 04:55:02 -0700398 frame->is_keyframe()) {
philipelfcc60062017-01-18 05:35:20 -0800399 // If this frame has a newer timestamp but an earlier picture id then we
400 // assume there has been a jump in the picture id due to some encoder
401 // reconfiguration or some other reason. Even though this is not according
402 // to spec we can still continue to decode from this frame if it is a
403 // keyframe.
Mirko Bonadei675513b2017-11-09 11:09:25 +0100404 RTC_LOG(LS_WARNING)
405 << "A jump in picture id was detected, clearing buffer.";
philipelfcc60062017-01-18 05:35:20 -0800406 ClearFramesAndHistory();
407 last_continuous_picture_id = -1;
408 } else {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100409 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100410 << id.picture_id << ":"
411 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100412 << ") inserted after frame ("
413 << last_decoded_frame_it_->first.picture_id << ":"
414 << static_cast<int>(
415 last_decoded_frame_it_->first.spatial_layer)
416 << ") was handed off for decoding, dropping frame.";
philipelfcc60062017-01-18 05:35:20 -0800417 return last_continuous_picture_id;
418 }
philipele0b2f152016-09-28 10:23:49 +0200419 }
420
philipel146a48b2017-04-20 04:04:38 -0700421 // Test if inserting this frame would cause the order of the frames to become
422 // ambiguous (covering more than half the interval of 2^16). This can happen
423 // when the picture id make large jumps mid stream.
philipel0fa82a62018-03-19 15:34:53 +0100424 if (!frames_.empty() && id < frames_.begin()->first &&
425 frames_.rbegin()->first < id) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100426 RTC_LOG(LS_WARNING)
427 << "A jump in picture id was detected, clearing buffer.";
philipel146a48b2017-04-20 04:04:38 -0700428 ClearFramesAndHistory();
429 last_continuous_picture_id = -1;
430 }
431
philipel0fa82a62018-03-19 15:34:53 +0100432 auto info = frames_.emplace(id, FrameInfo()).first;
philipele0b2f152016-09-28 10:23:49 +0200433
philipel93e451b2016-10-06 12:25:13 +0200434 if (info->second.frame) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100435 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100436 << id.picture_id << ":"
437 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100438 << ") already inserted, dropping frame.";
philipele0b2f152016-09-28 10:23:49 +0200439 return last_continuous_picture_id;
440 }
441
philipel93e451b2016-10-06 12:25:13 +0200442 if (!UpdateFrameInfoWithIncomingFrame(*frame, info))
443 return last_continuous_picture_id;
gnishb2a318b2017-05-10 09:21:33 -0700444 UpdatePlayoutDelays(*frame);
philipel0a9f6de2018-02-28 11:29:47 +0100445
philipele0b2f152016-09-28 10:23:49 +0200446 info->second.frame = std::move(frame);
447 ++num_frames_buffered_;
448
449 if (info->second.num_missing_continuous == 0) {
450 info->second.continuous = true;
451 PropagateContinuity(info);
452 last_continuous_picture_id = last_continuous_frame_it_->first.picture_id;
453
454 // Since we now have new continuous frames there might be a better frame
455 // to return from NextFrame. Signal that thread so that it again can choose
456 // which frame to return.
tommi0a735642017-03-14 06:23:57 -0700457 new_continuous_frame_event_.Set();
philipele0b2f152016-09-28 10:23:49 +0200458 }
459
460 return last_continuous_picture_id;
philipelbe7a9e52016-05-19 12:19:35 +0200461}
462
philipele0b2f152016-09-28 10:23:49 +0200463void FrameBuffer::PropagateContinuity(FrameMap::iterator start) {
tommidb23ea62017-03-03 07:21:18 -0800464 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateContinuity");
philipele0b2f152016-09-28 10:23:49 +0200465 RTC_DCHECK(start->second.continuous);
466 if (last_continuous_frame_it_ == frames_.end())
467 last_continuous_frame_it_ = start;
468
469 std::queue<FrameMap::iterator> continuous_frames;
470 continuous_frames.push(start);
471
472 // A simple BFS to traverse continuous frames.
473 while (!continuous_frames.empty()) {
474 auto frame = continuous_frames.front();
475 continuous_frames.pop();
476
477 if (last_continuous_frame_it_->first < frame->first)
478 last_continuous_frame_it_ = frame;
479
480 // Loop through all dependent frames, and if that frame no longer has
481 // any unfulfilled dependencies then that frame is continuous as well.
Elad Alon69321dd2019-01-10 15:02:54 +0100482 for (size_t d = 0; d < frame->second.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200483 auto frame_ref = frames_.find(frame->second.dependent_frames[d]);
philipel112adf92017-06-15 09:06:21 -0700484 RTC_DCHECK(frame_ref != frames_.end());
philipele0b2f152016-09-28 10:23:49 +0200485
philipel112adf92017-06-15 09:06:21 -0700486 // TODO(philipel): Look into why we've seen this happen.
487 if (frame_ref != frames_.end()) {
488 --frame_ref->second.num_missing_continuous;
489 if (frame_ref->second.num_missing_continuous == 0) {
490 frame_ref->second.continuous = true;
491 continuous_frames.push(frame_ref);
492 }
philipele0b2f152016-09-28 10:23:49 +0200493 }
494 }
495 }
496}
497
498void FrameBuffer::PropagateDecodability(const FrameInfo& info) {
tommidb23ea62017-03-03 07:21:18 -0800499 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateDecodability");
Elad Alon69321dd2019-01-10 15:02:54 +0100500 for (size_t d = 0; d < info.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200501 auto ref_info = frames_.find(info.dependent_frames[d]);
philipel93e451b2016-10-06 12:25:13 +0200502 RTC_DCHECK(ref_info != frames_.end());
tommie95b78b2017-05-14 07:23:11 -0700503 // TODO(philipel): Look into why we've seen this happen.
504 if (ref_info != frames_.end()) {
505 RTC_DCHECK_GT(ref_info->second.num_missing_decodable, 0U);
506 --ref_info->second.num_missing_decodable;
507 }
philipele0b2f152016-09-28 10:23:49 +0200508 }
509}
510
511void FrameBuffer::AdvanceLastDecodedFrame(FrameMap::iterator decoded) {
tommidb23ea62017-03-03 07:21:18 -0800512 TRACE_EVENT0("webrtc", "FrameBuffer::AdvanceLastDecodedFrame");
philipele0b2f152016-09-28 10:23:49 +0200513 if (last_decoded_frame_it_ == frames_.end()) {
514 last_decoded_frame_it_ = frames_.begin();
515 } else {
516 RTC_DCHECK(last_decoded_frame_it_->first < decoded->first);
517 ++last_decoded_frame_it_;
518 }
519 --num_frames_buffered_;
520 ++num_frames_history_;
521
522 // First, delete non-decoded frames from the history.
523 while (last_decoded_frame_it_ != decoded) {
524 if (last_decoded_frame_it_->second.frame)
525 --num_frames_buffered_;
526 last_decoded_frame_it_ = frames_.erase(last_decoded_frame_it_);
philipelbe7a9e52016-05-19 12:19:35 +0200527 }
528
philipele0b2f152016-09-28 10:23:49 +0200529 // Then remove old history if we have too much history saved.
530 if (num_frames_history_ > kMaxFramesHistory) {
531 frames_.erase(frames_.begin());
532 --num_frames_history_;
533 }
534}
535
philipele7c891f2018-02-22 14:35:06 +0100536bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
philipele0b2f152016-09-28 10:23:49 +0200537 FrameMap::iterator info) {
tommidb23ea62017-03-03 07:21:18 -0800538 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame");
philipel0fa82a62018-03-19 15:34:53 +0100539 const VideoLayerFrameId& id = frame.id;
philipele0b2f152016-09-28 10:23:49 +0200540
541 RTC_DCHECK(last_decoded_frame_it_ == frames_.end() ||
542 last_decoded_frame_it_->first < info->first);
543
philipel798b2822018-06-11 13:10:14 +0200544 // In this function we determine how many missing dependencies this |frame|
545 // has to become continuous/decodable. If a frame that this |frame| depend
546 // on has already been decoded then we can ignore that dependency since it has
547 // already been fulfilled.
548 //
549 // For all other frames we will register a backwards reference to this |frame|
550 // so that |num_missing_continuous| and |num_missing_decodable| can be
551 // decremented as frames become continuous/are decoded.
552 struct Dependency {
553 VideoLayerFrameId id;
554 bool continuous;
555 };
556 std::vector<Dependency> not_yet_fulfilled_dependencies;
557
558 // Find all dependencies that have not yet been fulfilled.
philipele0b2f152016-09-28 10:23:49 +0200559 for (size_t i = 0; i < frame.num_references; ++i) {
philipel0fa82a62018-03-19 15:34:53 +0100560 VideoLayerFrameId ref_key(frame.references[i], frame.id.spatial_layer);
philipele0b2f152016-09-28 10:23:49 +0200561 auto ref_info = frames_.find(ref_key);
562
philipel798b2822018-06-11 13:10:14 +0200563 // Does |frame| depend on a frame earlier than the last decoded one?
philipele0b2f152016-09-28 10:23:49 +0200564 if (last_decoded_frame_it_ != frames_.end() &&
565 ref_key <= last_decoded_frame_it_->first) {
philipel798b2822018-06-11 13:10:14 +0200566 // Was that frame decoded? If not, this |frame| will never become
567 // decodable.
philipele0b2f152016-09-28 10:23:49 +0200568 if (ref_info == frames_.end()) {
philipel65e1f942017-07-24 08:26:53 -0700569 int64_t now_ms = clock_->TimeInMilliseconds();
570 if (last_log_non_decoded_ms_ + kLogNonDecodedIntervalMs < now_ms) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100571 RTC_LOG(LS_WARNING)
philipel0fa82a62018-03-19 15:34:53 +0100572 << "Frame with (picture_id:spatial_id) (" << id.picture_id << ":"
573 << static_cast<int>(id.spatial_layer)
philipel65e1f942017-07-24 08:26:53 -0700574 << ") depends on a non-decoded frame more previous than"
575 << " the last decoded frame, dropping frame.";
576 last_log_non_decoded_ms_ = now_ms;
577 }
philipele0b2f152016-09-28 10:23:49 +0200578 return false;
579 }
philipele0b2f152016-09-28 10:23:49 +0200580 } else {
philipel798b2822018-06-11 13:10:14 +0200581 bool ref_continuous =
582 ref_info != frames_.end() && ref_info->second.continuous;
583 not_yet_fulfilled_dependencies.push_back({ref_key, ref_continuous});
philipele0b2f152016-09-28 10:23:49 +0200584 }
philipelbe7a9e52016-05-19 12:19:35 +0200585 }
586
philipel798b2822018-06-11 13:10:14 +0200587 // Does |frame| depend on the lower spatial layer?
philipelbe7a9e52016-05-19 12:19:35 +0200588 if (frame.inter_layer_predicted) {
philipel0fa82a62018-03-19 15:34:53 +0100589 VideoLayerFrameId ref_key(frame.id.picture_id, frame.id.spatial_layer - 1);
philipel798b2822018-06-11 13:10:14 +0200590 auto ref_info = frames_.find(ref_key);
philipele0b2f152016-09-28 10:23:49 +0200591
philipel798b2822018-06-11 13:10:14 +0200592 bool lower_layer_continuous =
593 ref_info != frames_.end() && ref_info->second.continuous;
594 bool lower_layer_decoded = last_decoded_frame_it_ != frames_.end() &&
595 last_decoded_frame_it_->first == ref_key;
596
597 if (!lower_layer_continuous || !lower_layer_decoded) {
598 not_yet_fulfilled_dependencies.push_back(
599 {ref_key, lower_layer_continuous});
philipele0b2f152016-09-28 10:23:49 +0200600 }
philipelbe7a9e52016-05-19 12:19:35 +0200601 }
602
philipel798b2822018-06-11 13:10:14 +0200603 info->second.num_missing_continuous = not_yet_fulfilled_dependencies.size();
604 info->second.num_missing_decodable = not_yet_fulfilled_dependencies.size();
605
606 for (const Dependency& dep : not_yet_fulfilled_dependencies) {
607 if (dep.continuous)
608 --info->second.num_missing_continuous;
609
Elad Alon69321dd2019-01-10 15:02:54 +0100610 frames_[dep.id].dependent_frames.push_back(id);
philipel798b2822018-06-11 13:10:14 +0200611 }
philipel93e451b2016-10-06 12:25:13 +0200612
philipelbe7a9e52016-05-19 12:19:35 +0200613 return true;
614}
615
philipelbe742702016-11-30 01:31:40 -0800616void FrameBuffer::UpdateJitterDelay() {
tommidb23ea62017-03-03 07:21:18 -0800617 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
philipela45102f2017-02-22 05:30:39 -0800618 if (!stats_callback_)
619 return;
philipelbe742702016-11-30 01:31:40 -0800620
philipela45102f2017-02-22 05:30:39 -0800621 int decode_ms;
622 int max_decode_ms;
623 int current_delay_ms;
624 int target_delay_ms;
625 int jitter_buffer_ms;
626 int min_playout_delay_ms;
627 int render_delay_ms;
628 if (timing_->GetTimings(&decode_ms, &max_decode_ms, &current_delay_ms,
629 &target_delay_ms, &jitter_buffer_ms,
630 &min_playout_delay_ms, &render_delay_ms)) {
631 stats_callback_->OnFrameBufferTimingsUpdated(
632 decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
633 jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
philipelbe742702016-11-30 01:31:40 -0800634 }
philipel266f0a42016-11-28 08:49:07 -0800635}
636
ilnik2edc6842017-07-06 03:06:50 -0700637void FrameBuffer::UpdateTimingFrameInfo() {
638 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo");
Danil Chapovalov0040b662018-06-18 10:48:16 +0200639 absl::optional<TimingFrameInfo> info = timing_->GetTimingFrameInfo();
philipel97187112018-03-23 10:43:21 +0100640 if (info && stats_callback_)
ilnik2edc6842017-07-06 03:06:50 -0700641 stats_callback_->OnTimingFrameInfoUpdated(*info);
642}
643
philipelfcc60062017-01-18 05:35:20 -0800644void FrameBuffer::ClearFramesAndHistory() {
ilnik2edc6842017-07-06 03:06:50 -0700645 TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory");
philipelfcc60062017-01-18 05:35:20 -0800646 frames_.clear();
647 last_decoded_frame_it_ = frames_.end();
648 last_continuous_frame_it_ = frames_.end();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100649 frames_to_decode_.clear();
philipelfcc60062017-01-18 05:35:20 -0800650 num_frames_history_ = 0;
651 num_frames_buffered_ = 0;
652}
653
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100654EncodedFrame* FrameBuffer::CombineAndDeleteFrames(
655 const std::vector<EncodedFrame*>& frames) const {
656 RTC_DCHECK(!frames.empty());
Sergey Silkin61832dd2018-12-20 14:32:14 +0100657 EncodedFrame* first_frame = frames[0];
658 EncodedFrame* last_frame = frames.back();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100659 size_t total_length = 0;
660 for (size_t i = 0; i < frames.size(); ++i) {
661 total_length += frames[i]->size();
662 }
Sergey Silkin61832dd2018-12-20 14:32:14 +0100663 first_frame->VerifyAndAllocate(total_length);
664
665 // Spatial index of combined frame is set equal to spatial index of its top
666 // spatial layer.
667 first_frame->SetSpatialIndex(last_frame->id.spatial_layer);
668 first_frame->id.spatial_layer = last_frame->id.spatial_layer;
669
670 first_frame->video_timing_mutable()->network2_timestamp_ms =
671 last_frame->video_timing().network2_timestamp_ms;
672 first_frame->video_timing_mutable()->receive_finish_ms =
673 last_frame->video_timing().receive_finish_ms;
674
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100675 // Append all remaining frames to the first one.
Sergey Silkin61832dd2018-12-20 14:32:14 +0100676 uint8_t* buffer = first_frame->MutableBuffer() + first_frame->size();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100677 for (size_t i = 1; i < frames.size(); ++i) {
Sergey Silkin61832dd2018-12-20 14:32:14 +0100678 EncodedFrame* next_frame = frames[i];
679 memcpy(buffer, next_frame->Buffer(), next_frame->size());
680 buffer += next_frame->size();
681 delete next_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100682 }
Sergey Silkin61832dd2018-12-20 14:32:14 +0100683 first_frame->set_size(total_length);
684 return first_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100685}
686
Niels Möllerbe682d42018-03-27 08:31:45 +0200687FrameBuffer::FrameInfo::FrameInfo() = default;
688FrameBuffer::FrameInfo::FrameInfo(FrameInfo&&) = default;
689FrameBuffer::FrameInfo::~FrameInfo() = default;
690
philipelbe7a9e52016-05-19 12:19:35 +0200691} // namespace video_coding
692} // namespace webrtc