blob: 9b2b9079fa901faf33db446d0314df6827bc214d [file] [log] [blame]
philipelbe7a9e52016-05-19 12:19:35 +02001/*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020011#include "modules/video_coding/frame_buffer2.h"
philipelbe7a9e52016-05-19 12:19:35 +020012
13#include <algorithm>
Yves Gerey3e707812018-11-28 16:47:49 +010014#include <cstdlib>
15#include <iterator>
philipele0b2f152016-09-28 10:23:49 +020016#include <queue>
Yves Gerey3e707812018-11-28 16:47:49 +010017#include <utility>
philipel798b2822018-06-11 13:10:14 +020018#include <vector>
philipelbe7a9e52016-05-19 12:19:35 +020019
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020020#include "absl/memory/memory.h"
Yves Gerey3e707812018-11-28 16:47:49 +010021#include "api/video/encoded_image.h"
22#include "api/video/video_timing.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020023#include "modules/video_coding/include/video_coding_defines.h"
24#include "modules/video_coding/jitter_estimator.h"
25#include "modules/video_coding/timing.h"
26#include "rtc_base/checks.h"
Yves Gerey3e707812018-11-28 16:47:49 +010027#include "rtc_base/experiments/rtt_mult_experiment.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020028#include "rtc_base/logging.h"
Yves Gerey3e707812018-11-28 16:47:49 +010029#include "rtc_base/numerics/sequence_number_util.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020030#include "rtc_base/trace_event.h"
31#include "system_wrappers/include/clock.h"
philipel707f2782017-10-02 14:10:28 +020032#include "system_wrappers/include/field_trial.h"
philipelbe7a9e52016-05-19 12:19:35 +020033
34namespace webrtc {
35namespace video_coding {
36
37namespace {
philipele0b2f152016-09-28 10:23:49 +020038// Max number of frames the buffer will hold.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010039constexpr size_t kMaxFramesBuffered = 800;
philipelbe7a9e52016-05-19 12:19:35 +020040
philipele0b2f152016-09-28 10:23:49 +020041// Max number of decoded frame info that will be saved.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010042constexpr int kMaxFramesHistory = 1 << 13;
philipel65e1f942017-07-24 08:26:53 -070043
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010044// The time it's allowed for a frame to be late to its rendering prediction and
45// still be rendered.
Ilya Nikolaevskiy7eef0072018-02-28 09:59:26 +010046constexpr int kMaxAllowedFrameDelayMs = 5;
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010047
philipel65e1f942017-07-24 08:26:53 -070048constexpr int64_t kLogNonDecodedIntervalMs = 5000;
philipelbe7a9e52016-05-19 12:19:35 +020049} // namespace
50
philipelbe7a9e52016-05-19 12:19:35 +020051FrameBuffer::FrameBuffer(Clock* clock,
philipela45102f2017-02-22 05:30:39 -080052 VCMTiming* timing,
53 VCMReceiveStatisticsCallback* stats_callback)
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010054 : decoded_frames_history_(kMaxFramesHistory),
55 clock_(clock),
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020056 callback_queue_(nullptr),
Niels Möllerd9c2d942019-04-30 09:16:36 +020057 jitter_estimator_(clock),
philipelbe7a9e52016-05-19 12:19:35 +020058 timing_(timing),
philipel4f6cd6a2016-08-03 10:59:32 +020059 inter_frame_delay_(clock_->TimeInMilliseconds()),
philipel29f730e2017-03-15 08:10:08 -070060 stopped_(false),
philipela45102f2017-02-22 05:30:39 -080061 protection_mode_(kProtectionNack),
philipel65e1f942017-07-24 08:26:53 -070062 stats_callback_(stats_callback),
Elad Alone4b50232019-01-14 18:56:14 +010063 last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs),
64 add_rtt_to_playout_delay_(
Henrik Boströmc680c4a2019-04-03 10:27:36 +000065 webrtc::field_trial::IsEnabled("WebRTC-AddRttToPlayoutDelay")) {}
philipel266f0a42016-11-28 08:49:07 -080066
philipela45102f2017-02-22 05:30:39 -080067FrameBuffer::~FrameBuffer() {}
philipelbe7a9e52016-05-19 12:19:35 +020068
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020069void FrameBuffer::NextFrame(
70 int64_t max_wait_time_ms,
71 bool keyframe_required,
72 rtc::TaskQueue* callback_queue,
73 std::function<void(std::unique_ptr<EncodedFrame>, ReturnReason)> handler) {
74 RTC_DCHECK_RUN_ON(callback_queue);
75 TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
76 int64_t latest_return_time_ms =
77 clock_->TimeInMilliseconds() + max_wait_time_ms;
78 rtc::CritScope lock(&crit_);
79 if (stopped_) {
80 return;
81 }
82 latest_return_time_ms_ = latest_return_time_ms;
83 keyframe_required_ = keyframe_required;
84 frame_handler_ = handler;
85 callback_queue_ = callback_queue;
86 StartWaitForNextFrameOnQueue();
87}
88
89void FrameBuffer::StartWaitForNextFrameOnQueue() {
90 RTC_DCHECK(callback_queue_);
91 RTC_DCHECK(!callback_task_.Running());
92 int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds());
93 callback_task_ = RepeatingTaskHandle::DelayedStart(
94 callback_queue_->Get(), TimeDelta::ms(wait_ms), [this] {
95 // If this task has not been cancelled, we did not get any new frames
96 // while waiting. Continue with frame delivery.
97 rtc::CritScope lock(&crit_);
98 if (!frames_to_decode_.empty()) {
99 // We have frames, deliver!
100 frame_handler_(absl::WrapUnique(GetNextFrame()), kFrameFound);
101 CancelCallback();
102 return TimeDelta::Zero(); // Ignored.
103 } else if (clock_->TimeInMilliseconds() >= latest_return_time_ms_) {
104 // We have timed out, signal this and stop repeating.
105 frame_handler_(nullptr, kTimeout);
106 CancelCallback();
107 return TimeDelta::Zero(); // Ignored.
108 } else {
109 // If there's no frames to decode and there is still time left, it
110 // means that the frame buffer was cleared between creation and
111 // execution of this task. Continue waiting for the remaining time.
112 int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds());
113 return TimeDelta::ms(wait_ms);
114 }
115 });
116}
117
philipel75562822016-09-05 10:57:41 +0200118FrameBuffer::ReturnReason FrameBuffer::NextFrame(
119 int64_t max_wait_time_ms,
philipele7c891f2018-02-22 14:35:06 +0100120 std::unique_ptr<EncodedFrame>* frame_out,
philipel3042c2d2017-08-18 04:55:02 -0700121 bool keyframe_required) {
tommidb23ea62017-03-03 07:21:18 -0800122 TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
philipel1c056252017-01-31 09:53:12 -0800123 int64_t latest_return_time_ms =
124 clock_->TimeInMilliseconds() + max_wait_time_ms;
philipel504c47d2016-06-30 17:33:02 +0200125 int64_t wait_ms = max_wait_time_ms;
philipel29f730e2017-03-15 08:10:08 -0700126 int64_t now_ms = 0;
philipele0b2f152016-09-28 10:23:49 +0200127
128 do {
philipel29f730e2017-03-15 08:10:08 -0700129 now_ms = clock_->TimeInMilliseconds();
philipel504c47d2016-06-30 17:33:02 +0200130 {
131 rtc::CritScope lock(&crit_);
tommi0a735642017-03-14 06:23:57 -0700132 new_continuous_frame_event_.Reset();
philipel29f730e2017-03-15 08:10:08 -0700133 if (stopped_)
134 return kStopped;
135
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200136 keyframe_required_ = keyframe_required;
137 latest_return_time_ms_ = latest_return_time_ms;
138 wait_ms = FindNextFrame(now_ms);
139 }
tommi0a735642017-03-14 06:23:57 -0700140 } while (new_continuous_frame_event_.Wait(wait_ms));
philipele0b2f152016-09-28 10:23:49 +0200141
philipel29f730e2017-03-15 08:10:08 -0700142 {
143 rtc::CritScope lock(&crit_);
philipele0b2f152016-09-28 10:23:49 +0200144
Ilya Nikolaevskiyfdfe1c92019-01-18 13:33:38 +0100145 if (!frames_to_decode_.empty()) {
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200146 frame_out->reset(GetNextFrame());
philipel29f730e2017-03-15 08:10:08 -0700147 return kFrameFound;
philipelbe7a9e52016-05-19 12:19:35 +0200148 }
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200149 }
tommi0a735642017-03-14 06:23:57 -0700150
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200151 if (latest_return_time_ms - clock_->TimeInMilliseconds() > 0) {
philipel1c056252017-01-31 09:53:12 -0800152 // If |next_frame_it_ == frames_.end()| and there is still time left, it
153 // means that the frame buffer was cleared as the thread in this function
154 // was waiting to acquire |crit_| in order to return. Wait for the
155 // remaining time and then return.
156 return NextFrame(latest_return_time_ms - now_ms, frame_out);
philipelbe7a9e52016-05-19 12:19:35 +0200157 }
tommi0a735642017-03-14 06:23:57 -0700158 return kTimeout;
philipelbe7a9e52016-05-19 12:19:35 +0200159}
160
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200161int64_t FrameBuffer::FindNextFrame(int64_t now_ms) {
162 int64_t wait_ms = latest_return_time_ms_ - now_ms;
163 frames_to_decode_.clear();
164
165 // |last_continuous_frame_| may be empty below, but nullopt is smaller
166 // than everything else and loop will immediately terminate as expected.
167 for (auto frame_it = frames_.begin();
168 frame_it != frames_.end() && frame_it->first <= last_continuous_frame_;
169 ++frame_it) {
170 if (!frame_it->second.continuous ||
171 frame_it->second.num_missing_decodable > 0) {
172 continue;
173 }
174
175 EncodedFrame* frame = frame_it->second.frame.get();
176
177 if (keyframe_required_ && !frame->is_keyframe())
178 continue;
179
180 auto last_decoded_frame_timestamp =
181 decoded_frames_history_.GetLastDecodedFrameTimestamp();
182
183 // TODO(https://bugs.webrtc.org/9974): consider removing this check
184 // as it may make a stream undecodable after a very long delay between
185 // frames.
186 if (last_decoded_frame_timestamp &&
187 AheadOf(*last_decoded_frame_timestamp, frame->Timestamp())) {
188 continue;
189 }
190
191 // Only ever return all parts of a superframe. Therefore skip this
192 // frame if it's not a beginning of a superframe.
193 if (frame->inter_layer_predicted) {
194 continue;
195 }
196
197 // Gather all remaining frames for the same superframe.
198 std::vector<FrameMap::iterator> current_superframe;
199 current_superframe.push_back(frame_it);
200 bool last_layer_completed = frame_it->second.frame->is_last_spatial_layer;
201 FrameMap::iterator next_frame_it = frame_it;
202 while (true) {
203 ++next_frame_it;
204 if (next_frame_it == frames_.end() ||
205 next_frame_it->first.picture_id != frame->id.picture_id ||
206 !next_frame_it->second.continuous) {
207 break;
208 }
209 // Check if the next frame has some undecoded references other than
210 // the previous frame in the same superframe.
211 size_t num_allowed_undecoded_refs =
212 (next_frame_it->second.frame->inter_layer_predicted) ? 1 : 0;
213 if (next_frame_it->second.num_missing_decodable >
214 num_allowed_undecoded_refs) {
215 break;
216 }
217 // All frames in the superframe should have the same timestamp.
218 if (frame->Timestamp() != next_frame_it->second.frame->Timestamp()) {
219 RTC_LOG(LS_WARNING) << "Frames in a single superframe have different"
220 " timestamps. Skipping undecodable superframe.";
221 break;
222 }
223 current_superframe.push_back(next_frame_it);
224 last_layer_completed = next_frame_it->second.frame->is_last_spatial_layer;
225 }
226 // Check if the current superframe is complete.
227 // TODO(bugs.webrtc.org/10064): consider returning all available to
228 // decode frames even if the superframe is not complete yet.
229 if (!last_layer_completed) {
230 continue;
231 }
232
233 frames_to_decode_ = std::move(current_superframe);
234
235 if (frame->RenderTime() == -1) {
236 frame->SetRenderTime(timing_->RenderTimeMs(frame->Timestamp(), now_ms));
237 }
238 wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms);
239
240 // This will cause the frame buffer to prefer high framerate rather
241 // than high resolution in the case of the decoder not decoding fast
242 // enough and the stream has multiple spatial and temporal layers.
243 // For multiple temporal layers it may cause non-base layer frames to be
244 // skipped if they are late.
245 if (wait_ms < -kMaxAllowedFrameDelayMs)
246 continue;
247
248 break;
249 }
250 wait_ms = std::min<int64_t>(wait_ms, latest_return_time_ms_ - now_ms);
251 wait_ms = std::max<int64_t>(wait_ms, 0);
252 return wait_ms;
253}
254
255EncodedFrame* FrameBuffer::GetNextFrame() {
256 int64_t now_ms = clock_->TimeInMilliseconds();
257 // TODO(ilnik): remove |frames_out| use frames_to_decode_ directly.
258 std::vector<EncodedFrame*> frames_out;
259
260 RTC_DCHECK(!frames_to_decode_.empty());
261 bool superframe_delayed_by_retransmission = false;
262 size_t superframe_size = 0;
263 EncodedFrame* first_frame = frames_to_decode_[0]->second.frame.get();
264 int64_t render_time_ms = first_frame->RenderTime();
265 int64_t receive_time_ms = first_frame->ReceivedTime();
266 // Gracefully handle bad RTP timestamps and render time issues.
267 if (HasBadRenderTiming(*first_frame, now_ms)) {
Niels Möllerd9c2d942019-04-30 09:16:36 +0200268 jitter_estimator_.Reset();
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200269 timing_->Reset();
270 render_time_ms = timing_->RenderTimeMs(first_frame->Timestamp(), now_ms);
271 }
272
273 for (FrameMap::iterator& frame_it : frames_to_decode_) {
274 RTC_DCHECK(frame_it != frames_.end());
275 EncodedFrame* frame = frame_it->second.frame.release();
276
277 frame->SetRenderTime(render_time_ms);
278
279 superframe_delayed_by_retransmission |= frame->delayed_by_retransmission();
280 receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime());
281 superframe_size += frame->size();
282
283 PropagateDecodability(frame_it->second);
284 decoded_frames_history_.InsertDecoded(frame_it->first, frame->Timestamp());
285
286 // Remove decoded frame and all undecoded frames before it.
287 frames_.erase(frames_.begin(), ++frame_it);
288
289 frames_out.push_back(frame);
290 }
291
292 if (!superframe_delayed_by_retransmission) {
293 int64_t frame_delay;
294
295 if (inter_frame_delay_.CalculateDelay(first_frame->Timestamp(),
296 &frame_delay, receive_time_ms)) {
Niels Möllerd9c2d942019-04-30 09:16:36 +0200297 jitter_estimator_.UpdateEstimate(frame_delay, superframe_size);
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200298 }
299
300 float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0;
“Michaela8ae4072019-04-24 08:04:34 -0500301 float jitter_est_cap_ms = 300.0;
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200302 if (RttMultExperiment::RttMultEnabled()) {
303 rtt_mult = RttMultExperiment::GetRttMultValue();
“Michaela8ae4072019-04-24 08:04:34 -0500304 // TODO(mhoro): add RttMultExperiment::GetJitterEstCapValue();
305 jitter_est_cap_ms = 300.0;
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200306 }
“Michaela8ae4072019-04-24 08:04:34 -0500307 timing_->SetJitterDelay(
Niels Möllerd9c2d942019-04-30 09:16:36 +0200308 jitter_estimator_.GetJitterEstimate(rtt_mult, jitter_est_cap_ms));
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200309 timing_->UpdateCurrentDelay(render_time_ms, now_ms);
310 } else {
311 if (RttMultExperiment::RttMultEnabled() || add_rtt_to_playout_delay_)
Niels Möllerd9c2d942019-04-30 09:16:36 +0200312 jitter_estimator_.FrameNacked();
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200313 }
314
315 UpdateJitterDelay();
316 UpdateTimingFrameInfo();
317
318 if (frames_out.size() == 1) {
319 return frames_out[0];
320 } else {
321 return CombineAndDeleteFrames(frames_out);
322 }
323}
324
philipele7c891f2018-02-22 14:35:06 +0100325bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
326 int64_t now_ms) {
stefan95e97542017-05-23 09:52:18 -0700327 // Assume that render timing errors are due to changes in the video stream.
328 int64_t render_time_ms = frame.RenderTimeMs();
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200329 // Zero render time means render immediately.
330 if (render_time_ms == 0) {
331 return false;
332 }
stefan95e97542017-05-23 09:52:18 -0700333 if (render_time_ms < 0) {
334 return true;
335 }
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200336 const int64_t kMaxVideoDelayMs = 10000;
stefan95e97542017-05-23 09:52:18 -0700337 if (std::abs(render_time_ms - now_ms) > kMaxVideoDelayMs) {
338 int frame_delay = static_cast<int>(std::abs(render_time_ms - now_ms));
Mirko Bonadei675513b2017-11-09 11:09:25 +0100339 RTC_LOG(LS_WARNING)
340 << "A frame about to be decoded is out of the configured "
341 << "delay bounds (" << frame_delay << " > " << kMaxVideoDelayMs
342 << "). Resetting the video jitter buffer.";
stefan95e97542017-05-23 09:52:18 -0700343 return true;
344 }
345 if (static_cast<int>(timing_->TargetVideoDelay()) > kMaxVideoDelayMs) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100346 RTC_LOG(LS_WARNING) << "The video target delay has grown larger than "
347 << kMaxVideoDelayMs << " ms.";
stefan95e97542017-05-23 09:52:18 -0700348 return true;
349 }
350 return false;
351}
352
philipel4f6cd6a2016-08-03 10:59:32 +0200353void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
tommidb23ea62017-03-03 07:21:18 -0800354 TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode");
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000355 rtc::CritScope lock(&crit_);
356 protection_mode_ = mode;
philipel4f6cd6a2016-08-03 10:59:32 +0200357}
358
philipel504c47d2016-06-30 17:33:02 +0200359void FrameBuffer::Start() {
tommidb23ea62017-03-03 07:21:18 -0800360 TRACE_EVENT0("webrtc", "FrameBuffer::Start");
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000361 rtc::CritScope lock(&crit_);
362 stopped_ = false;
philipel504c47d2016-06-30 17:33:02 +0200363}
364
365void FrameBuffer::Stop() {
tommidb23ea62017-03-03 07:21:18 -0800366 TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000367 rtc::CritScope lock(&crit_);
368 stopped_ = true;
369 new_continuous_frame_event_.Set();
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200370 CancelCallback();
philipel504c47d2016-06-30 17:33:02 +0200371}
372
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100373void FrameBuffer::Clear() {
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000374 rtc::CritScope lock(&crit_);
375 ClearFramesAndHistory();
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100376}
377
philipele21be1d2017-09-25 06:37:12 -0700378void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000379 rtc::CritScope lock(&crit_);
Niels Möllerd9c2d942019-04-30 09:16:36 +0200380 jitter_estimator_.UpdateRtt(rtt_ms);
philipele21be1d2017-09-25 06:37:12 -0700381}
382
philipele7c891f2018-02-22 14:35:06 +0100383bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
philipel112adf92017-06-15 09:06:21 -0700384 for (size_t i = 0; i < frame.num_references; ++i) {
Philip Eliasson1f850a62019-03-19 12:15:00 +0000385 if (frame.references[i] >= frame.id.picture_id)
philipel112adf92017-06-15 09:06:21 -0700386 return false;
philipel3b3c9c42017-09-11 09:38:36 -0700387
philipel112adf92017-06-15 09:06:21 -0700388 for (size_t j = i + 1; j < frame.num_references; ++j) {
389 if (frame.references[i] == frame.references[j])
390 return false;
391 }
392 }
393
philipel0fa82a62018-03-19 15:34:53 +0100394 if (frame.inter_layer_predicted && frame.id.spatial_layer == 0)
philipel112adf92017-06-15 09:06:21 -0700395 return false;
396
397 return true;
398}
399
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200400void FrameBuffer::CancelCallback() {
401 frame_handler_ = {};
402 callback_task_.Stop();
403 callback_queue_ = nullptr;
404}
405
Ilya Nikolaevskiy48193b02019-03-25 11:40:34 +0100406bool FrameBuffer::IsCompleteSuperFrame(const EncodedFrame& frame) {
407 if (frame.inter_layer_predicted) {
408 // Check that all previous spatial layers are already inserted.
409 VideoLayerFrameId id = frame.id;
410 RTC_DCHECK_GT(id.spatial_layer, 0);
411 --id.spatial_layer;
412 FrameMap::iterator prev_frame = frames_.find(id);
Ilya Nikolaevskiyb4a70ed2019-04-23 10:37:06 +0200413 if (prev_frame == frames_.end() || !prev_frame->second.frame)
Ilya Nikolaevskiy48193b02019-03-25 11:40:34 +0100414 return false;
415 while (prev_frame->second.frame->inter_layer_predicted) {
Ilya Nikolaevskiyf87a7df2019-04-09 19:23:36 +0200416 if (prev_frame == frames_.begin())
417 return false;
Ilya Nikolaevskiy48193b02019-03-25 11:40:34 +0100418 --prev_frame;
419 --id.spatial_layer;
Ilya Nikolaevskiyb4a70ed2019-04-23 10:37:06 +0200420 if (!prev_frame->second.frame ||
421 prev_frame->first.picture_id != id.picture_id ||
Ilya Nikolaevskiy48193b02019-03-25 11:40:34 +0100422 prev_frame->first.spatial_layer != id.spatial_layer) {
423 return false;
424 }
425 }
426 }
427
428 if (!frame.is_last_spatial_layer) {
429 // Check that all following spatial layers are already inserted.
430 VideoLayerFrameId id = frame.id;
431 ++id.spatial_layer;
432 FrameMap::iterator next_frame = frames_.find(id);
Ilya Nikolaevskiyb4a70ed2019-04-23 10:37:06 +0200433 if (next_frame == frames_.end() || !next_frame->second.frame)
Ilya Nikolaevskiy48193b02019-03-25 11:40:34 +0100434 return false;
435 while (!next_frame->second.frame->is_last_spatial_layer) {
436 ++next_frame;
437 ++id.spatial_layer;
Ilya Nikolaevskiyb4a70ed2019-04-23 10:37:06 +0200438 if (next_frame == frames_.end() || !next_frame->second.frame ||
Ilya Nikolaevskiy48193b02019-03-25 11:40:34 +0100439 next_frame->first.picture_id != id.picture_id ||
440 next_frame->first.spatial_layer != id.spatial_layer) {
441 return false;
442 }
443 }
444 }
445
446 return true;
447}
448
philipele7c891f2018-02-22 14:35:06 +0100449int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
tommidb23ea62017-03-03 07:21:18 -0800450 TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
philipel93e451b2016-10-06 12:25:13 +0200451 RTC_DCHECK(frame);
tommi0a735642017-03-14 06:23:57 -0700452
453 rtc::CritScope lock(&crit_);
philipel29f730e2017-03-15 08:10:08 -0700454
Ilya Nikolaevskiy48193b02019-03-25 11:40:34 +0100455 if (stats_callback_ && IsCompleteSuperFrame(*frame)) {
456 stats_callback_->OnCompleteFrame(frame->is_keyframe(), frame->size(),
457 frame->contentType());
458 }
459 const VideoLayerFrameId& id = frame->id;
460
philipel1610f942017-12-12 13:58:31 +0100461 int64_t last_continuous_picture_id =
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100462 !last_continuous_frame_ ? -1 : last_continuous_frame_->picture_id;
philipele0b2f152016-09-28 10:23:49 +0200463
philipel112adf92017-06-15 09:06:21 -0700464 if (!ValidReferences(*frame)) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100465 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100466 << id.picture_id << ":"
467 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100468 << ") has invalid frame references, dropping frame.";
philipel112adf92017-06-15 09:06:21 -0700469 return last_continuous_picture_id;
470 }
471
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100472 if (frames_.size() >= kMaxFramesBuffered) {
philipel9771c502018-03-02 11:06:27 +0100473 if (frame->is_keyframe()) {
474 RTC_LOG(LS_WARNING) << "Inserting keyframe (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100475 << id.picture_id << ":"
476 << static_cast<int>(id.spatial_layer)
philipel9771c502018-03-02 11:06:27 +0100477 << ") but buffer is full, clearing"
478 << " buffer and inserting the frame.";
479 ClearFramesAndHistory();
480 } else {
481 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100482 << id.picture_id << ":"
483 << static_cast<int>(id.spatial_layer)
philipel9771c502018-03-02 11:06:27 +0100484 << ") could not be inserted due to the frame "
485 << "buffer being full, dropping frame.";
486 return last_continuous_picture_id;
487 }
philipele0b2f152016-09-28 10:23:49 +0200488 }
489
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100490 auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId();
491 auto last_decoded_frame_timestamp =
492 decoded_frames_history_.GetLastDecodedFrameTimestamp();
493 if (last_decoded_frame && id <= *last_decoded_frame) {
494 if (AheadOf(frame->Timestamp(), *last_decoded_frame_timestamp) &&
philipel3042c2d2017-08-18 04:55:02 -0700495 frame->is_keyframe()) {
philipelfcc60062017-01-18 05:35:20 -0800496 // If this frame has a newer timestamp but an earlier picture id then we
497 // assume there has been a jump in the picture id due to some encoder
498 // reconfiguration or some other reason. Even though this is not according
499 // to spec we can still continue to decode from this frame if it is a
500 // keyframe.
Mirko Bonadei675513b2017-11-09 11:09:25 +0100501 RTC_LOG(LS_WARNING)
502 << "A jump in picture id was detected, clearing buffer.";
philipelfcc60062017-01-18 05:35:20 -0800503 ClearFramesAndHistory();
504 last_continuous_picture_id = -1;
505 } else {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100506 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100507 << id.picture_id << ":"
508 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100509 << ") inserted after frame ("
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100510 << last_decoded_frame->picture_id << ":"
511 << static_cast<int>(last_decoded_frame->spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100512 << ") was handed off for decoding, dropping frame.";
philipelfcc60062017-01-18 05:35:20 -0800513 return last_continuous_picture_id;
514 }
philipele0b2f152016-09-28 10:23:49 +0200515 }
516
philipel146a48b2017-04-20 04:04:38 -0700517 // Test if inserting this frame would cause the order of the frames to become
518 // ambiguous (covering more than half the interval of 2^16). This can happen
519 // when the picture id make large jumps mid stream.
philipel0fa82a62018-03-19 15:34:53 +0100520 if (!frames_.empty() && id < frames_.begin()->first &&
521 frames_.rbegin()->first < id) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100522 RTC_LOG(LS_WARNING)
523 << "A jump in picture id was detected, clearing buffer.";
philipel146a48b2017-04-20 04:04:38 -0700524 ClearFramesAndHistory();
525 last_continuous_picture_id = -1;
526 }
527
philipel0fa82a62018-03-19 15:34:53 +0100528 auto info = frames_.emplace(id, FrameInfo()).first;
philipele0b2f152016-09-28 10:23:49 +0200529
philipel93e451b2016-10-06 12:25:13 +0200530 if (info->second.frame) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100531 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100532 << id.picture_id << ":"
533 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100534 << ") already inserted, dropping frame.";
philipele0b2f152016-09-28 10:23:49 +0200535 return last_continuous_picture_id;
536 }
537
philipel93e451b2016-10-06 12:25:13 +0200538 if (!UpdateFrameInfoWithIncomingFrame(*frame, info))
539 return last_continuous_picture_id;
Ruslan Burakov493a6502019-02-27 15:32:48 +0100540
541 if (!frame->delayed_by_retransmission())
542 timing_->IncomingTimestamp(frame->Timestamp(), frame->ReceivedTime());
philipel0a9f6de2018-02-28 11:29:47 +0100543
philipele0b2f152016-09-28 10:23:49 +0200544 info->second.frame = std::move(frame);
philipele0b2f152016-09-28 10:23:49 +0200545
546 if (info->second.num_missing_continuous == 0) {
547 info->second.continuous = true;
548 PropagateContinuity(info);
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100549 last_continuous_picture_id = last_continuous_frame_->picture_id;
philipele0b2f152016-09-28 10:23:49 +0200550
551 // Since we now have new continuous frames there might be a better frame
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200552 // to return from NextFrame.
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000553 new_continuous_frame_event_.Set();
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200554
555 if (callback_queue_) {
556 callback_queue_->PostTask([this] {
557 rtc::CritScope lock(&crit_);
558 if (!callback_task_.Running())
559 return;
560 RTC_CHECK(frame_handler_);
561 callback_task_.Stop();
562 StartWaitForNextFrameOnQueue();
563 });
564 }
philipele0b2f152016-09-28 10:23:49 +0200565 }
566
567 return last_continuous_picture_id;
philipelbe7a9e52016-05-19 12:19:35 +0200568}
569
philipele0b2f152016-09-28 10:23:49 +0200570void FrameBuffer::PropagateContinuity(FrameMap::iterator start) {
tommidb23ea62017-03-03 07:21:18 -0800571 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateContinuity");
philipele0b2f152016-09-28 10:23:49 +0200572 RTC_DCHECK(start->second.continuous);
philipele0b2f152016-09-28 10:23:49 +0200573
574 std::queue<FrameMap::iterator> continuous_frames;
575 continuous_frames.push(start);
576
577 // A simple BFS to traverse continuous frames.
578 while (!continuous_frames.empty()) {
579 auto frame = continuous_frames.front();
580 continuous_frames.pop();
581
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100582 if (!last_continuous_frame_ || *last_continuous_frame_ < frame->first) {
583 last_continuous_frame_ = frame->first;
584 }
philipele0b2f152016-09-28 10:23:49 +0200585
586 // Loop through all dependent frames, and if that frame no longer has
587 // any unfulfilled dependencies then that frame is continuous as well.
Elad Alon69321dd2019-01-10 15:02:54 +0100588 for (size_t d = 0; d < frame->second.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200589 auto frame_ref = frames_.find(frame->second.dependent_frames[d]);
philipel112adf92017-06-15 09:06:21 -0700590 RTC_DCHECK(frame_ref != frames_.end());
philipele0b2f152016-09-28 10:23:49 +0200591
philipel112adf92017-06-15 09:06:21 -0700592 // TODO(philipel): Look into why we've seen this happen.
593 if (frame_ref != frames_.end()) {
594 --frame_ref->second.num_missing_continuous;
595 if (frame_ref->second.num_missing_continuous == 0) {
596 frame_ref->second.continuous = true;
597 continuous_frames.push(frame_ref);
598 }
philipele0b2f152016-09-28 10:23:49 +0200599 }
600 }
601 }
602}
603
604void FrameBuffer::PropagateDecodability(const FrameInfo& info) {
tommidb23ea62017-03-03 07:21:18 -0800605 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateDecodability");
Elad Alon69321dd2019-01-10 15:02:54 +0100606 for (size_t d = 0; d < info.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200607 auto ref_info = frames_.find(info.dependent_frames[d]);
philipel93e451b2016-10-06 12:25:13 +0200608 RTC_DCHECK(ref_info != frames_.end());
tommie95b78b2017-05-14 07:23:11 -0700609 // TODO(philipel): Look into why we've seen this happen.
610 if (ref_info != frames_.end()) {
611 RTC_DCHECK_GT(ref_info->second.num_missing_decodable, 0U);
612 --ref_info->second.num_missing_decodable;
613 }
philipele0b2f152016-09-28 10:23:49 +0200614 }
615}
616
philipele7c891f2018-02-22 14:35:06 +0100617bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
philipele0b2f152016-09-28 10:23:49 +0200618 FrameMap::iterator info) {
tommidb23ea62017-03-03 07:21:18 -0800619 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame");
philipel0fa82a62018-03-19 15:34:53 +0100620 const VideoLayerFrameId& id = frame.id;
philipele0b2f152016-09-28 10:23:49 +0200621
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100622 auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId();
623 RTC_DCHECK(!last_decoded_frame || *last_decoded_frame < info->first);
philipele0b2f152016-09-28 10:23:49 +0200624
philipel798b2822018-06-11 13:10:14 +0200625 // In this function we determine how many missing dependencies this |frame|
626 // has to become continuous/decodable. If a frame that this |frame| depend
627 // on has already been decoded then we can ignore that dependency since it has
628 // already been fulfilled.
629 //
630 // For all other frames we will register a backwards reference to this |frame|
631 // so that |num_missing_continuous| and |num_missing_decodable| can be
632 // decremented as frames become continuous/are decoded.
633 struct Dependency {
634 VideoLayerFrameId id;
635 bool continuous;
636 };
637 std::vector<Dependency> not_yet_fulfilled_dependencies;
638
639 // Find all dependencies that have not yet been fulfilled.
philipele0b2f152016-09-28 10:23:49 +0200640 for (size_t i = 0; i < frame.num_references; ++i) {
philipel0fa82a62018-03-19 15:34:53 +0100641 VideoLayerFrameId ref_key(frame.references[i], frame.id.spatial_layer);
philipel798b2822018-06-11 13:10:14 +0200642 // Does |frame| depend on a frame earlier than the last decoded one?
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100643 if (last_decoded_frame && ref_key <= *last_decoded_frame) {
philipel798b2822018-06-11 13:10:14 +0200644 // Was that frame decoded? If not, this |frame| will never become
645 // decodable.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100646 if (!decoded_frames_history_.WasDecoded(ref_key)) {
philipel65e1f942017-07-24 08:26:53 -0700647 int64_t now_ms = clock_->TimeInMilliseconds();
648 if (last_log_non_decoded_ms_ + kLogNonDecodedIntervalMs < now_ms) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100649 RTC_LOG(LS_WARNING)
philipel0fa82a62018-03-19 15:34:53 +0100650 << "Frame with (picture_id:spatial_id) (" << id.picture_id << ":"
651 << static_cast<int>(id.spatial_layer)
philipel65e1f942017-07-24 08:26:53 -0700652 << ") depends on a non-decoded frame more previous than"
653 << " the last decoded frame, dropping frame.";
654 last_log_non_decoded_ms_ = now_ms;
655 }
philipele0b2f152016-09-28 10:23:49 +0200656 return false;
657 }
philipele0b2f152016-09-28 10:23:49 +0200658 } else {
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100659 auto ref_info = frames_.find(ref_key);
philipel798b2822018-06-11 13:10:14 +0200660 bool ref_continuous =
661 ref_info != frames_.end() && ref_info->second.continuous;
662 not_yet_fulfilled_dependencies.push_back({ref_key, ref_continuous});
philipele0b2f152016-09-28 10:23:49 +0200663 }
philipelbe7a9e52016-05-19 12:19:35 +0200664 }
665
philipel798b2822018-06-11 13:10:14 +0200666 // Does |frame| depend on the lower spatial layer?
philipelbe7a9e52016-05-19 12:19:35 +0200667 if (frame.inter_layer_predicted) {
philipel0fa82a62018-03-19 15:34:53 +0100668 VideoLayerFrameId ref_key(frame.id.picture_id, frame.id.spatial_layer - 1);
philipel798b2822018-06-11 13:10:14 +0200669 auto ref_info = frames_.find(ref_key);
philipele0b2f152016-09-28 10:23:49 +0200670
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100671 bool lower_layer_decoded =
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100672 last_decoded_frame && *last_decoded_frame == ref_key;
philipel798b2822018-06-11 13:10:14 +0200673 bool lower_layer_continuous =
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100674 lower_layer_decoded ||
675 (ref_info != frames_.end() && ref_info->second.continuous);
philipel798b2822018-06-11 13:10:14 +0200676
677 if (!lower_layer_continuous || !lower_layer_decoded) {
678 not_yet_fulfilled_dependencies.push_back(
679 {ref_key, lower_layer_continuous});
philipele0b2f152016-09-28 10:23:49 +0200680 }
philipelbe7a9e52016-05-19 12:19:35 +0200681 }
682
philipel798b2822018-06-11 13:10:14 +0200683 info->second.num_missing_continuous = not_yet_fulfilled_dependencies.size();
684 info->second.num_missing_decodable = not_yet_fulfilled_dependencies.size();
685
686 for (const Dependency& dep : not_yet_fulfilled_dependencies) {
687 if (dep.continuous)
688 --info->second.num_missing_continuous;
689
Elad Alon69321dd2019-01-10 15:02:54 +0100690 frames_[dep.id].dependent_frames.push_back(id);
philipel798b2822018-06-11 13:10:14 +0200691 }
philipel93e451b2016-10-06 12:25:13 +0200692
philipelbe7a9e52016-05-19 12:19:35 +0200693 return true;
694}
695
philipelbe742702016-11-30 01:31:40 -0800696void FrameBuffer::UpdateJitterDelay() {
tommidb23ea62017-03-03 07:21:18 -0800697 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
philipela45102f2017-02-22 05:30:39 -0800698 if (!stats_callback_)
699 return;
philipelbe742702016-11-30 01:31:40 -0800700
philipela45102f2017-02-22 05:30:39 -0800701 int decode_ms;
702 int max_decode_ms;
703 int current_delay_ms;
704 int target_delay_ms;
705 int jitter_buffer_ms;
706 int min_playout_delay_ms;
707 int render_delay_ms;
708 if (timing_->GetTimings(&decode_ms, &max_decode_ms, &current_delay_ms,
709 &target_delay_ms, &jitter_buffer_ms,
710 &min_playout_delay_ms, &render_delay_ms)) {
711 stats_callback_->OnFrameBufferTimingsUpdated(
712 decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
713 jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
philipelbe742702016-11-30 01:31:40 -0800714 }
philipel266f0a42016-11-28 08:49:07 -0800715}
716
ilnik2edc6842017-07-06 03:06:50 -0700717void FrameBuffer::UpdateTimingFrameInfo() {
718 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo");
Danil Chapovalov0040b662018-06-18 10:48:16 +0200719 absl::optional<TimingFrameInfo> info = timing_->GetTimingFrameInfo();
philipel97187112018-03-23 10:43:21 +0100720 if (info && stats_callback_)
ilnik2edc6842017-07-06 03:06:50 -0700721 stats_callback_->OnTimingFrameInfoUpdated(*info);
722}
723
philipelfcc60062017-01-18 05:35:20 -0800724void FrameBuffer::ClearFramesAndHistory() {
ilnik2edc6842017-07-06 03:06:50 -0700725 TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory");
philipelfcc60062017-01-18 05:35:20 -0800726 frames_.clear();
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100727 last_continuous_frame_.reset();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100728 frames_to_decode_.clear();
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100729 decoded_frames_history_.Clear();
philipelfcc60062017-01-18 05:35:20 -0800730}
731
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100732EncodedFrame* FrameBuffer::CombineAndDeleteFrames(
733 const std::vector<EncodedFrame*>& frames) const {
734 RTC_DCHECK(!frames.empty());
Sergey Silkin61832dd2018-12-20 14:32:14 +0100735 EncodedFrame* first_frame = frames[0];
736 EncodedFrame* last_frame = frames.back();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100737 size_t total_length = 0;
738 for (size_t i = 0; i < frames.size(); ++i) {
739 total_length += frames[i]->size();
740 }
Sergey Silkin61832dd2018-12-20 14:32:14 +0100741 first_frame->VerifyAndAllocate(total_length);
742
743 // Spatial index of combined frame is set equal to spatial index of its top
744 // spatial layer.
745 first_frame->SetSpatialIndex(last_frame->id.spatial_layer);
746 first_frame->id.spatial_layer = last_frame->id.spatial_layer;
747
748 first_frame->video_timing_mutable()->network2_timestamp_ms =
749 last_frame->video_timing().network2_timestamp_ms;
750 first_frame->video_timing_mutable()->receive_finish_ms =
751 last_frame->video_timing().receive_finish_ms;
752
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100753 // Append all remaining frames to the first one.
Niels Möller9c843902019-01-11 10:21:35 +0100754 uint8_t* buffer = first_frame->data() + first_frame->size();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100755 for (size_t i = 1; i < frames.size(); ++i) {
Sergey Silkin61832dd2018-12-20 14:32:14 +0100756 EncodedFrame* next_frame = frames[i];
Niels Möller9c843902019-01-11 10:21:35 +0100757 memcpy(buffer, next_frame->data(), next_frame->size());
Sergey Silkin61832dd2018-12-20 14:32:14 +0100758 buffer += next_frame->size();
759 delete next_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100760 }
Sergey Silkin61832dd2018-12-20 14:32:14 +0100761 first_frame->set_size(total_length);
762 return first_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100763}
764
Niels Möllerbe682d42018-03-27 08:31:45 +0200765FrameBuffer::FrameInfo::FrameInfo() = default;
766FrameBuffer::FrameInfo::FrameInfo(FrameInfo&&) = default;
767FrameBuffer::FrameInfo::~FrameInfo() = default;
768
philipelbe7a9e52016-05-19 12:19:35 +0200769} // namespace video_coding
770} // namespace webrtc