blob: 6dae6f42f36c2a445438335f9e763f509751c48d [file] [log] [blame]
philipelbe7a9e52016-05-19 12:19:35 +02001/*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020011#include "modules/video_coding/frame_buffer2.h"
philipelbe7a9e52016-05-19 12:19:35 +020012
13#include <algorithm>
Yves Gerey3e707812018-11-28 16:47:49 +010014#include <cstdlib>
15#include <iterator>
philipele0b2f152016-09-28 10:23:49 +020016#include <queue>
Yves Gerey3e707812018-11-28 16:47:49 +010017#include <utility>
philipel798b2822018-06-11 13:10:14 +020018#include <vector>
philipelbe7a9e52016-05-19 12:19:35 +020019
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020020#include "absl/memory/memory.h"
Yves Gerey3e707812018-11-28 16:47:49 +010021#include "api/video/encoded_image.h"
22#include "api/video/video_timing.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020023#include "modules/video_coding/include/video_coding_defines.h"
24#include "modules/video_coding/jitter_estimator.h"
25#include "modules/video_coding/timing.h"
26#include "rtc_base/checks.h"
Yves Gerey3e707812018-11-28 16:47:49 +010027#include "rtc_base/experiments/rtt_mult_experiment.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020028#include "rtc_base/logging.h"
Yves Gerey3e707812018-11-28 16:47:49 +010029#include "rtc_base/numerics/sequence_number_util.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020030#include "rtc_base/trace_event.h"
31#include "system_wrappers/include/clock.h"
philipel707f2782017-10-02 14:10:28 +020032#include "system_wrappers/include/field_trial.h"
philipelbe7a9e52016-05-19 12:19:35 +020033
34namespace webrtc {
35namespace video_coding {
36
37namespace {
philipele0b2f152016-09-28 10:23:49 +020038// Max number of frames the buffer will hold.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010039constexpr size_t kMaxFramesBuffered = 800;
philipelbe7a9e52016-05-19 12:19:35 +020040
Johannes Kron23bfff32021-09-28 21:31:46 +020041// Default value for the maximum decode queue size that is used when the
42// low-latency renderer is used.
43constexpr size_t kZeroPlayoutDelayDefaultMaxDecodeQueueSize = 8;
44
philipele0b2f152016-09-28 10:23:49 +020045// Max number of decoded frame info that will be saved.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010046constexpr int kMaxFramesHistory = 1 << 13;
philipel65e1f942017-07-24 08:26:53 -070047
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010048// The time it's allowed for a frame to be late to its rendering prediction and
49// still be rendered.
Ilya Nikolaevskiy7eef0072018-02-28 09:59:26 +010050constexpr int kMaxAllowedFrameDelayMs = 5;
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010051
philipel65e1f942017-07-24 08:26:53 -070052constexpr int64_t kLogNonDecodedIntervalMs = 5000;
philipelbe7a9e52016-05-19 12:19:35 +020053} // namespace
54
philipelbe7a9e52016-05-19 12:19:35 +020055FrameBuffer::FrameBuffer(Clock* clock,
philipela45102f2017-02-22 05:30:39 -080056 VCMTiming* timing,
57 VCMReceiveStatisticsCallback* stats_callback)
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010058 : decoded_frames_history_(kMaxFramesHistory),
59 clock_(clock),
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020060 callback_queue_(nullptr),
Niels Möllerd9c2d942019-04-30 09:16:36 +020061 jitter_estimator_(clock),
philipelbe7a9e52016-05-19 12:19:35 +020062 timing_(timing),
philipel4f6cd6a2016-08-03 10:59:32 +020063 inter_frame_delay_(clock_->TimeInMilliseconds()),
philipel29f730e2017-03-15 08:10:08 -070064 stopped_(false),
philipela45102f2017-02-22 05:30:39 -080065 protection_mode_(kProtectionNack),
philipel65e1f942017-07-24 08:26:53 -070066 stats_callback_(stats_callback),
Elad Alone4b50232019-01-14 18:56:14 +010067 last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs),
68 add_rtt_to_playout_delay_(
“Michaeld3a4ebe2019-06-07 03:55:01 -050069 webrtc::field_trial::IsEnabled("WebRTC-AddRttToPlayoutDelay")),
Johannes Kron2ddc39e2021-08-10 16:56:12 +020070 rtt_mult_settings_(RttMultExperiment::GetRttMultValue()),
Johannes Kron23bfff32021-09-28 21:31:46 +020071 zero_playout_delay_max_decode_queue_size_(
72 "max_decode_queue_size",
73 kZeroPlayoutDelayDefaultMaxDecodeQueueSize) {
Johannes Kron2ddc39e2021-08-10 16:56:12 +020074 ParseFieldTrial({&zero_playout_delay_max_decode_queue_size_},
75 field_trial::FindFullName("WebRTC-ZeroPlayoutDelay"));
Tommi430951a2020-05-19 23:27:29 +020076 callback_checker_.Detach();
77}
philipel266f0a42016-11-28 08:49:07 -080078
Tommi430951a2020-05-19 23:27:29 +020079FrameBuffer::~FrameBuffer() {
80 RTC_DCHECK_RUN_ON(&construction_checker_);
81}
philipelbe7a9e52016-05-19 12:19:35 +020082
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020083void FrameBuffer::NextFrame(
84 int64_t max_wait_time_ms,
85 bool keyframe_required,
86 rtc::TaskQueue* callback_queue,
87 std::function<void(std::unique_ptr<EncodedFrame>, ReturnReason)> handler) {
Tommi430951a2020-05-19 23:27:29 +020088 RTC_DCHECK_RUN_ON(&callback_checker_);
89 RTC_DCHECK(callback_queue->IsCurrent());
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020090 TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
91 int64_t latest_return_time_ms =
92 clock_->TimeInMilliseconds() + max_wait_time_ms;
Tommi430951a2020-05-19 23:27:29 +020093
Markus Handell6deec382020-07-07 12:17:12 +020094 MutexLock lock(&mutex_);
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020095 if (stopped_) {
96 return;
97 }
98 latest_return_time_ms_ = latest_return_time_ms;
99 keyframe_required_ = keyframe_required;
100 frame_handler_ = handler;
101 callback_queue_ = callback_queue;
102 StartWaitForNextFrameOnQueue();
103}
104
105void FrameBuffer::StartWaitForNextFrameOnQueue() {
106 RTC_DCHECK(callback_queue_);
107 RTC_DCHECK(!callback_task_.Running());
108 int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds());
109 callback_task_ = RepeatingTaskHandle::DelayedStart(
Danil Chapovalov55284022020-02-07 14:53:52 +0100110 callback_queue_->Get(), TimeDelta::Millis(wait_ms), [this] {
Tommi430951a2020-05-19 23:27:29 +0200111 RTC_DCHECK_RUN_ON(&callback_checker_);
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200112 // If this task has not been cancelled, we did not get any new frames
113 // while waiting. Continue with frame delivery.
Markus Handell334b1fd2020-12-14 00:08:11 +0100114 std::unique_ptr<EncodedFrame> frame;
115 std::function<void(std::unique_ptr<EncodedFrame>, ReturnReason)>
116 frame_handler;
117 {
118 MutexLock lock(&mutex_);
119 if (!frames_to_decode_.empty()) {
120 // We have frames, deliver!
121 frame = absl::WrapUnique(GetNextFrame());
Rezaul Barbhuiya82c22482021-08-05 17:54:11 -0700122 timing_->SetLastDecodeScheduledTimestamp(
123 clock_->TimeInMilliseconds());
Markus Handell334b1fd2020-12-14 00:08:11 +0100124 } else if (clock_->TimeInMilliseconds() < latest_return_time_ms_) {
125 // If there's no frames to decode and there is still time left, it
126 // means that the frame buffer was cleared between creation and
127 // execution of this task. Continue waiting for the remaining time.
128 int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds());
129 return TimeDelta::Millis(wait_ms);
130 }
131 frame_handler = std::move(frame_handler_);
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200132 CancelCallback();
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200133 }
Markus Handell334b1fd2020-12-14 00:08:11 +0100134 // Deliver frame, if any. Otherwise signal timeout.
135 ReturnReason reason = frame ? kFrameFound : kTimeout;
136 frame_handler(std::move(frame), reason);
137 return TimeDelta::Zero(); // Ignored.
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200138 });
139}
140
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200141int64_t FrameBuffer::FindNextFrame(int64_t now_ms) {
142 int64_t wait_ms = latest_return_time_ms_ - now_ms;
143 frames_to_decode_.clear();
144
Artem Titovdcd7fc72021-08-09 13:02:57 +0200145 // `last_continuous_frame_` may be empty below, but nullopt is smaller
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200146 // than everything else and loop will immediately terminate as expected.
147 for (auto frame_it = frames_.begin();
148 frame_it != frames_.end() && frame_it->first <= last_continuous_frame_;
149 ++frame_it) {
150 if (!frame_it->second.continuous ||
151 frame_it->second.num_missing_decodable > 0) {
152 continue;
153 }
154
155 EncodedFrame* frame = frame_it->second.frame.get();
156
157 if (keyframe_required_ && !frame->is_keyframe())
158 continue;
159
160 auto last_decoded_frame_timestamp =
161 decoded_frames_history_.GetLastDecodedFrameTimestamp();
162
163 // TODO(https://bugs.webrtc.org/9974): consider removing this check
164 // as it may make a stream undecodable after a very long delay between
165 // frames.
166 if (last_decoded_frame_timestamp &&
167 AheadOf(*last_decoded_frame_timestamp, frame->Timestamp())) {
168 continue;
169 }
170
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200171 // Gather all remaining frames for the same superframe.
172 std::vector<FrameMap::iterator> current_superframe;
173 current_superframe.push_back(frame_it);
174 bool last_layer_completed = frame_it->second.frame->is_last_spatial_layer;
175 FrameMap::iterator next_frame_it = frame_it;
philipela65d7852020-11-20 17:49:24 +0100176 while (!last_layer_completed) {
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200177 ++next_frame_it;
philipela65d7852020-11-20 17:49:24 +0100178
179 if (next_frame_it == frames_.end() || !next_frame_it->second.frame) {
180 break;
181 }
182
183 if (next_frame_it->second.frame->Timestamp() != frame->Timestamp() ||
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200184 !next_frame_it->second.continuous) {
185 break;
186 }
philipela65d7852020-11-20 17:49:24 +0100187
188 if (next_frame_it->second.num_missing_decodable > 0) {
philipelcb327d92020-12-10 10:49:20 +0100189 bool has_inter_layer_dependency = false;
190 for (size_t i = 0; i < EncodedFrame::kMaxFrameReferences &&
philipela65d7852020-11-20 17:49:24 +0100191 i < next_frame_it->second.frame->num_references;
192 ++i) {
philipel9aa9b8d2021-02-15 13:31:29 +0100193 if (next_frame_it->second.frame->references[i] >= frame_it->first) {
philipela65d7852020-11-20 17:49:24 +0100194 has_inter_layer_dependency = true;
philipelcb327d92020-12-10 10:49:20 +0100195 break;
philipela65d7852020-11-20 17:49:24 +0100196 }
197 }
198
199 // If the frame has an undecoded dependency that is not within the same
philipelcb327d92020-12-10 10:49:20 +0100200 // temporal unit then this frame is not yet ready to be decoded. If it
philipela65d7852020-11-20 17:49:24 +0100201 // is within the same temporal unit then the not yet decoded dependency
202 // is just a lower spatial frame, which is ok.
203 if (!has_inter_layer_dependency ||
204 next_frame_it->second.num_missing_decodable > 1) {
205 break;
206 }
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200207 }
philipela65d7852020-11-20 17:49:24 +0100208
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200209 current_superframe.push_back(next_frame_it);
210 last_layer_completed = next_frame_it->second.frame->is_last_spatial_layer;
211 }
212 // Check if the current superframe is complete.
213 // TODO(bugs.webrtc.org/10064): consider returning all available to
214 // decode frames even if the superframe is not complete yet.
215 if (!last_layer_completed) {
216 continue;
217 }
218
219 frames_to_decode_ = std::move(current_superframe);
220
221 if (frame->RenderTime() == -1) {
222 frame->SetRenderTime(timing_->RenderTimeMs(frame->Timestamp(), now_ms));
223 }
Johannes Kron2ddc39e2021-08-10 16:56:12 +0200224 bool too_many_frames_queued =
225 frames_.size() > zero_playout_delay_max_decode_queue_size_ ? true
226 : false;
227 wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms,
228 too_many_frames_queued);
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200229
230 // This will cause the frame buffer to prefer high framerate rather
231 // than high resolution in the case of the decoder not decoding fast
232 // enough and the stream has multiple spatial and temporal layers.
233 // For multiple temporal layers it may cause non-base layer frames to be
234 // skipped if they are late.
235 if (wait_ms < -kMaxAllowedFrameDelayMs)
236 continue;
237
238 break;
239 }
240 wait_ms = std::min<int64_t>(wait_ms, latest_return_time_ms_ - now_ms);
241 wait_ms = std::max<int64_t>(wait_ms, 0);
242 return wait_ms;
243}
244
245EncodedFrame* FrameBuffer::GetNextFrame() {
Tommi430951a2020-05-19 23:27:29 +0200246 RTC_DCHECK_RUN_ON(&callback_checker_);
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200247 int64_t now_ms = clock_->TimeInMilliseconds();
Artem Titovdcd7fc72021-08-09 13:02:57 +0200248 // TODO(ilnik): remove `frames_out` use frames_to_decode_ directly.
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200249 std::vector<EncodedFrame*> frames_out;
250
251 RTC_DCHECK(!frames_to_decode_.empty());
252 bool superframe_delayed_by_retransmission = false;
253 size_t superframe_size = 0;
254 EncodedFrame* first_frame = frames_to_decode_[0]->second.frame.get();
255 int64_t render_time_ms = first_frame->RenderTime();
256 int64_t receive_time_ms = first_frame->ReceivedTime();
257 // Gracefully handle bad RTP timestamps and render time issues.
258 if (HasBadRenderTiming(*first_frame, now_ms)) {
Niels Möllerd9c2d942019-04-30 09:16:36 +0200259 jitter_estimator_.Reset();
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200260 timing_->Reset();
261 render_time_ms = timing_->RenderTimeMs(first_frame->Timestamp(), now_ms);
262 }
263
264 for (FrameMap::iterator& frame_it : frames_to_decode_) {
265 RTC_DCHECK(frame_it != frames_.end());
266 EncodedFrame* frame = frame_it->second.frame.release();
267
268 frame->SetRenderTime(render_time_ms);
269
270 superframe_delayed_by_retransmission |= frame->delayed_by_retransmission();
271 receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime());
272 superframe_size += frame->size();
273
274 PropagateDecodability(frame_it->second);
275 decoded_frames_history_.InsertDecoded(frame_it->first, frame->Timestamp());
276
277 // Remove decoded frame and all undecoded frames before it.
Johannes Kron0c141c52019-08-26 15:04:43 +0200278 if (stats_callback_) {
philipel9aa9b8d2021-02-15 13:31:29 +0100279 unsigned int dropped_frames =
280 std::count_if(frames_.begin(), frame_it,
281 [](const std::pair<const int64_t, FrameInfo>& frame) {
282 return frame.second.frame != nullptr;
283 });
Johannes Kron0c141c52019-08-26 15:04:43 +0200284 if (dropped_frames > 0) {
285 stats_callback_->OnDroppedFrames(dropped_frames);
286 }
287 }
288
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200289 frames_.erase(frames_.begin(), ++frame_it);
290
291 frames_out.push_back(frame);
292 }
293
294 if (!superframe_delayed_by_retransmission) {
295 int64_t frame_delay;
296
297 if (inter_frame_delay_.CalculateDelay(first_frame->Timestamp(),
298 &frame_delay, receive_time_ms)) {
Niels Möllerd9c2d942019-04-30 09:16:36 +0200299 jitter_estimator_.UpdateEstimate(frame_delay, superframe_size);
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200300 }
301
302 float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0;
“Michaeld3a4ebe2019-06-07 03:55:01 -0500303 absl::optional<float> rtt_mult_add_cap_ms = absl::nullopt;
304 if (rtt_mult_settings_.has_value()) {
305 rtt_mult = rtt_mult_settings_->rtt_mult_setting;
306 rtt_mult_add_cap_ms = rtt_mult_settings_->rtt_mult_add_cap_ms;
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200307 }
“Michaele0f37042019-06-04 10:04:12 -0500308 timing_->SetJitterDelay(
309 jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms));
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200310 timing_->UpdateCurrentDelay(render_time_ms, now_ms);
311 } else {
312 if (RttMultExperiment::RttMultEnabled() || add_rtt_to_playout_delay_)
Niels Möllerd9c2d942019-04-30 09:16:36 +0200313 jitter_estimator_.FrameNacked();
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200314 }
315
316 UpdateJitterDelay();
317 UpdateTimingFrameInfo();
318
319 if (frames_out.size() == 1) {
320 return frames_out[0];
321 } else {
322 return CombineAndDeleteFrames(frames_out);
323 }
324}
325
philipele7c891f2018-02-22 14:35:06 +0100326bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
327 int64_t now_ms) {
stefan95e97542017-05-23 09:52:18 -0700328 // Assume that render timing errors are due to changes in the video stream.
329 int64_t render_time_ms = frame.RenderTimeMs();
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200330 // Zero render time means render immediately.
331 if (render_time_ms == 0) {
332 return false;
333 }
stefan95e97542017-05-23 09:52:18 -0700334 if (render_time_ms < 0) {
335 return true;
336 }
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200337 const int64_t kMaxVideoDelayMs = 10000;
stefan95e97542017-05-23 09:52:18 -0700338 if (std::abs(render_time_ms - now_ms) > kMaxVideoDelayMs) {
339 int frame_delay = static_cast<int>(std::abs(render_time_ms - now_ms));
Mirko Bonadei675513b2017-11-09 11:09:25 +0100340 RTC_LOG(LS_WARNING)
341 << "A frame about to be decoded is out of the configured "
Jonas Olssonb2b20312020-01-14 12:11:31 +0100342 "delay bounds ("
343 << frame_delay << " > " << kMaxVideoDelayMs
Mirko Bonadei675513b2017-11-09 11:09:25 +0100344 << "). Resetting the video jitter buffer.";
stefan95e97542017-05-23 09:52:18 -0700345 return true;
346 }
347 if (static_cast<int>(timing_->TargetVideoDelay()) > kMaxVideoDelayMs) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100348 RTC_LOG(LS_WARNING) << "The video target delay has grown larger than "
349 << kMaxVideoDelayMs << " ms.";
stefan95e97542017-05-23 09:52:18 -0700350 return true;
351 }
352 return false;
353}
354
philipel4f6cd6a2016-08-03 10:59:32 +0200355void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
tommidb23ea62017-03-03 07:21:18 -0800356 TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode");
Markus Handell6deec382020-07-07 12:17:12 +0200357 MutexLock lock(&mutex_);
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000358 protection_mode_ = mode;
philipel4f6cd6a2016-08-03 10:59:32 +0200359}
360
philipel504c47d2016-06-30 17:33:02 +0200361void FrameBuffer::Stop() {
tommidb23ea62017-03-03 07:21:18 -0800362 TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
Markus Handell6deec382020-07-07 12:17:12 +0200363 MutexLock lock(&mutex_);
Tommi430951a2020-05-19 23:27:29 +0200364 if (stopped_)
365 return;
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000366 stopped_ = true;
Tommi430951a2020-05-19 23:27:29 +0200367
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200368 CancelCallback();
philipel504c47d2016-06-30 17:33:02 +0200369}
370
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100371void FrameBuffer::Clear() {
Markus Handell6deec382020-07-07 12:17:12 +0200372 MutexLock lock(&mutex_);
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000373 ClearFramesAndHistory();
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100374}
375
Johannes Kron111e9812020-10-26 13:54:40 +0100376int FrameBuffer::Size() {
377 MutexLock lock(&mutex_);
378 return frames_.size();
379}
380
philipele21be1d2017-09-25 06:37:12 -0700381void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
Markus Handell6deec382020-07-07 12:17:12 +0200382 MutexLock lock(&mutex_);
Niels Möllerd9c2d942019-04-30 09:16:36 +0200383 jitter_estimator_.UpdateRtt(rtt_ms);
philipele21be1d2017-09-25 06:37:12 -0700384}
385
philipele7c891f2018-02-22 14:35:06 +0100386bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
philipel112adf92017-06-15 09:06:21 -0700387 for (size_t i = 0; i < frame.num_references; ++i) {
philipel9aa9b8d2021-02-15 13:31:29 +0100388 if (frame.references[i] >= frame.Id())
philipel112adf92017-06-15 09:06:21 -0700389 return false;
philipel3b3c9c42017-09-11 09:38:36 -0700390
philipel112adf92017-06-15 09:06:21 -0700391 for (size_t j = i + 1; j < frame.num_references; ++j) {
392 if (frame.references[i] == frame.references[j])
393 return false;
394 }
395 }
396
philipel112adf92017-06-15 09:06:21 -0700397 return true;
398}
399
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200400void FrameBuffer::CancelCallback() {
Tommi430951a2020-05-19 23:27:29 +0200401 // Called from the callback queue or from within Stop().
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200402 frame_handler_ = {};
403 callback_task_.Stop();
404 callback_queue_ = nullptr;
Tommi430951a2020-05-19 23:27:29 +0200405 callback_checker_.Detach();
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200406}
407
philipele7c891f2018-02-22 14:35:06 +0100408int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
tommidb23ea62017-03-03 07:21:18 -0800409 TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
philipel93e451b2016-10-06 12:25:13 +0200410 RTC_DCHECK(frame);
tommi0a735642017-03-14 06:23:57 -0700411
Markus Handell6deec382020-07-07 12:17:12 +0200412 MutexLock lock(&mutex_);
philipel29f730e2017-03-15 08:10:08 -0700413
philipel9aa9b8d2021-02-15 13:31:29 +0100414 int64_t last_continuous_frame_id = last_continuous_frame_.value_or(-1);
philipele0b2f152016-09-28 10:23:49 +0200415
philipel112adf92017-06-15 09:06:21 -0700416 if (!ValidReferences(*frame)) {
philipel9aa9b8d2021-02-15 13:31:29 +0100417 RTC_LOG(LS_WARNING) << "Frame " << frame->Id()
philipelf1091932021-02-11 15:25:08 +0100418 << " has invalid frame references, dropping frame.";
philipel9aa9b8d2021-02-15 13:31:29 +0100419 return last_continuous_frame_id;
philipel112adf92017-06-15 09:06:21 -0700420 }
421
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100422 if (frames_.size() >= kMaxFramesBuffered) {
philipel9771c502018-03-02 11:06:27 +0100423 if (frame->is_keyframe()) {
philipel9aa9b8d2021-02-15 13:31:29 +0100424 RTC_LOG(LS_WARNING) << "Inserting keyframe " << frame->Id()
philipelf1091932021-02-11 15:25:08 +0100425 << " but buffer is full, clearing"
Jonas Olssonb2b20312020-01-14 12:11:31 +0100426 " buffer and inserting the frame.";
philipel9771c502018-03-02 11:06:27 +0100427 ClearFramesAndHistory();
428 } else {
philipel9aa9b8d2021-02-15 13:31:29 +0100429 RTC_LOG(LS_WARNING) << "Frame " << frame->Id()
philipelf1091932021-02-11 15:25:08 +0100430 << " could not be inserted due to the frame "
Jonas Olssonb2b20312020-01-14 12:11:31 +0100431 "buffer being full, dropping frame.";
philipel9aa9b8d2021-02-15 13:31:29 +0100432 return last_continuous_frame_id;
philipel9771c502018-03-02 11:06:27 +0100433 }
philipele0b2f152016-09-28 10:23:49 +0200434 }
435
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100436 auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId();
437 auto last_decoded_frame_timestamp =
438 decoded_frames_history_.GetLastDecodedFrameTimestamp();
philipel9aa9b8d2021-02-15 13:31:29 +0100439 if (last_decoded_frame && frame->Id() <= *last_decoded_frame) {
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100440 if (AheadOf(frame->Timestamp(), *last_decoded_frame_timestamp) &&
philipel3042c2d2017-08-18 04:55:02 -0700441 frame->is_keyframe()) {
philipel9aa9b8d2021-02-15 13:31:29 +0100442 // If this frame has a newer timestamp but an earlier frame id then we
443 // assume there has been a jump in the frame id due to some encoder
philipelfcc60062017-01-18 05:35:20 -0800444 // reconfiguration or some other reason. Even though this is not according
445 // to spec we can still continue to decode from this frame if it is a
446 // keyframe.
Mirko Bonadei675513b2017-11-09 11:09:25 +0100447 RTC_LOG(LS_WARNING)
philipel9aa9b8d2021-02-15 13:31:29 +0100448 << "A jump in frame id was detected, clearing buffer.";
philipelfcc60062017-01-18 05:35:20 -0800449 ClearFramesAndHistory();
philipel9aa9b8d2021-02-15 13:31:29 +0100450 last_continuous_frame_id = -1;
philipelfcc60062017-01-18 05:35:20 -0800451 } else {
philipel9aa9b8d2021-02-15 13:31:29 +0100452 RTC_LOG(LS_WARNING) << "Frame " << frame->Id() << " inserted after frame "
453 << *last_decoded_frame
philipelf1091932021-02-11 15:25:08 +0100454 << " was handed off for decoding, dropping frame.";
philipel9aa9b8d2021-02-15 13:31:29 +0100455 return last_continuous_frame_id;
philipelfcc60062017-01-18 05:35:20 -0800456 }
philipele0b2f152016-09-28 10:23:49 +0200457 }
458
philipel146a48b2017-04-20 04:04:38 -0700459 // Test if inserting this frame would cause the order of the frames to become
460 // ambiguous (covering more than half the interval of 2^16). This can happen
philipel9aa9b8d2021-02-15 13:31:29 +0100461 // when the frame id make large jumps mid stream.
462 if (!frames_.empty() && frame->Id() < frames_.begin()->first &&
463 frames_.rbegin()->first < frame->Id()) {
464 RTC_LOG(LS_WARNING) << "A jump in frame id was detected, clearing buffer.";
philipel146a48b2017-04-20 04:04:38 -0700465 ClearFramesAndHistory();
philipel9aa9b8d2021-02-15 13:31:29 +0100466 last_continuous_frame_id = -1;
philipel146a48b2017-04-20 04:04:38 -0700467 }
468
philipel9aa9b8d2021-02-15 13:31:29 +0100469 auto info = frames_.emplace(frame->Id(), FrameInfo()).first;
philipele0b2f152016-09-28 10:23:49 +0200470
philipel93e451b2016-10-06 12:25:13 +0200471 if (info->second.frame) {
philipel9aa9b8d2021-02-15 13:31:29 +0100472 return last_continuous_frame_id;
philipele0b2f152016-09-28 10:23:49 +0200473 }
474
philipel93e451b2016-10-06 12:25:13 +0200475 if (!UpdateFrameInfoWithIncomingFrame(*frame, info))
philipel9aa9b8d2021-02-15 13:31:29 +0100476 return last_continuous_frame_id;
Ruslan Burakov493a6502019-02-27 15:32:48 +0100477
478 if (!frame->delayed_by_retransmission())
479 timing_->IncomingTimestamp(frame->Timestamp(), frame->ReceivedTime());
philipel0a9f6de2018-02-28 11:29:47 +0100480
philipel0cb73262020-12-08 17:36:53 +0100481 // It can happen that a frame will be reported as fully received even if a
482 // lower spatial layer frame is missing.
483 if (stats_callback_ && frame->is_last_spatial_layer) {
Johannes Kronb88b44e2019-08-22 13:16:44 +0200484 stats_callback_->OnCompleteFrame(frame->is_keyframe(), frame->size(),
485 frame->contentType());
486 }
487
philipele0b2f152016-09-28 10:23:49 +0200488 info->second.frame = std::move(frame);
philipele0b2f152016-09-28 10:23:49 +0200489
490 if (info->second.num_missing_continuous == 0) {
491 info->second.continuous = true;
492 PropagateContinuity(info);
philipel9aa9b8d2021-02-15 13:31:29 +0100493 last_continuous_frame_id = *last_continuous_frame_;
philipele0b2f152016-09-28 10:23:49 +0200494
495 // Since we now have new continuous frames there might be a better frame
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200496 // to return from NextFrame.
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200497 if (callback_queue_) {
498 callback_queue_->PostTask([this] {
Markus Handell6deec382020-07-07 12:17:12 +0200499 MutexLock lock(&mutex_);
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200500 if (!callback_task_.Running())
501 return;
502 RTC_CHECK(frame_handler_);
503 callback_task_.Stop();
504 StartWaitForNextFrameOnQueue();
505 });
506 }
philipele0b2f152016-09-28 10:23:49 +0200507 }
508
philipel9aa9b8d2021-02-15 13:31:29 +0100509 return last_continuous_frame_id;
philipelbe7a9e52016-05-19 12:19:35 +0200510}
511
philipele0b2f152016-09-28 10:23:49 +0200512void FrameBuffer::PropagateContinuity(FrameMap::iterator start) {
tommidb23ea62017-03-03 07:21:18 -0800513 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateContinuity");
philipele0b2f152016-09-28 10:23:49 +0200514 RTC_DCHECK(start->second.continuous);
philipele0b2f152016-09-28 10:23:49 +0200515
516 std::queue<FrameMap::iterator> continuous_frames;
517 continuous_frames.push(start);
518
519 // A simple BFS to traverse continuous frames.
520 while (!continuous_frames.empty()) {
521 auto frame = continuous_frames.front();
522 continuous_frames.pop();
523
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100524 if (!last_continuous_frame_ || *last_continuous_frame_ < frame->first) {
525 last_continuous_frame_ = frame->first;
526 }
philipele0b2f152016-09-28 10:23:49 +0200527
528 // Loop through all dependent frames, and if that frame no longer has
529 // any unfulfilled dependencies then that frame is continuous as well.
Elad Alon69321dd2019-01-10 15:02:54 +0100530 for (size_t d = 0; d < frame->second.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200531 auto frame_ref = frames_.find(frame->second.dependent_frames[d]);
philipel112adf92017-06-15 09:06:21 -0700532 RTC_DCHECK(frame_ref != frames_.end());
philipele0b2f152016-09-28 10:23:49 +0200533
philipel112adf92017-06-15 09:06:21 -0700534 // TODO(philipel): Look into why we've seen this happen.
535 if (frame_ref != frames_.end()) {
536 --frame_ref->second.num_missing_continuous;
537 if (frame_ref->second.num_missing_continuous == 0) {
538 frame_ref->second.continuous = true;
539 continuous_frames.push(frame_ref);
540 }
philipele0b2f152016-09-28 10:23:49 +0200541 }
542 }
543 }
544}
545
546void FrameBuffer::PropagateDecodability(const FrameInfo& info) {
tommidb23ea62017-03-03 07:21:18 -0800547 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateDecodability");
Elad Alon69321dd2019-01-10 15:02:54 +0100548 for (size_t d = 0; d < info.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200549 auto ref_info = frames_.find(info.dependent_frames[d]);
philipel93e451b2016-10-06 12:25:13 +0200550 RTC_DCHECK(ref_info != frames_.end());
tommie95b78b2017-05-14 07:23:11 -0700551 // TODO(philipel): Look into why we've seen this happen.
552 if (ref_info != frames_.end()) {
553 RTC_DCHECK_GT(ref_info->second.num_missing_decodable, 0U);
554 --ref_info->second.num_missing_decodable;
555 }
philipele0b2f152016-09-28 10:23:49 +0200556 }
557}
558
philipele7c891f2018-02-22 14:35:06 +0100559bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
philipele0b2f152016-09-28 10:23:49 +0200560 FrameMap::iterator info) {
tommidb23ea62017-03-03 07:21:18 -0800561 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame");
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100562 auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId();
563 RTC_DCHECK(!last_decoded_frame || *last_decoded_frame < info->first);
philipele0b2f152016-09-28 10:23:49 +0200564
Artem Titovdcd7fc72021-08-09 13:02:57 +0200565 // In this function we determine how many missing dependencies this `frame`
566 // has to become continuous/decodable. If a frame that this `frame` depend
philipel798b2822018-06-11 13:10:14 +0200567 // on has already been decoded then we can ignore that dependency since it has
568 // already been fulfilled.
569 //
Artem Titovdcd7fc72021-08-09 13:02:57 +0200570 // For all other frames we will register a backwards reference to this `frame`
571 // so that `num_missing_continuous` and `num_missing_decodable` can be
philipel798b2822018-06-11 13:10:14 +0200572 // decremented as frames become continuous/are decoded.
573 struct Dependency {
philipel9aa9b8d2021-02-15 13:31:29 +0100574 int64_t frame_id;
philipel798b2822018-06-11 13:10:14 +0200575 bool continuous;
576 };
577 std::vector<Dependency> not_yet_fulfilled_dependencies;
578
579 // Find all dependencies that have not yet been fulfilled.
philipele0b2f152016-09-28 10:23:49 +0200580 for (size_t i = 0; i < frame.num_references; ++i) {
Artem Titovdcd7fc72021-08-09 13:02:57 +0200581 // Does `frame` depend on a frame earlier than the last decoded one?
philipel9aa9b8d2021-02-15 13:31:29 +0100582 if (last_decoded_frame && frame.references[i] <= *last_decoded_frame) {
Artem Titovdcd7fc72021-08-09 13:02:57 +0200583 // Was that frame decoded? If not, this `frame` will never become
philipel798b2822018-06-11 13:10:14 +0200584 // decodable.
philipel9aa9b8d2021-02-15 13:31:29 +0100585 if (!decoded_frames_history_.WasDecoded(frame.references[i])) {
philipel65e1f942017-07-24 08:26:53 -0700586 int64_t now_ms = clock_->TimeInMilliseconds();
587 if (last_log_non_decoded_ms_ + kLogNonDecodedIntervalMs < now_ms) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100588 RTC_LOG(LS_WARNING)
philipel9aa9b8d2021-02-15 13:31:29 +0100589 << "Frame " << frame.Id()
philipelf1091932021-02-11 15:25:08 +0100590 << " depends on a non-decoded frame more previous than the last "
591 "decoded frame, dropping frame.";
philipel65e1f942017-07-24 08:26:53 -0700592 last_log_non_decoded_ms_ = now_ms;
593 }
philipele0b2f152016-09-28 10:23:49 +0200594 return false;
595 }
philipele0b2f152016-09-28 10:23:49 +0200596 } else {
philipel9aa9b8d2021-02-15 13:31:29 +0100597 auto ref_info = frames_.find(frame.references[i]);
philipel798b2822018-06-11 13:10:14 +0200598 bool ref_continuous =
599 ref_info != frames_.end() && ref_info->second.continuous;
philipel9aa9b8d2021-02-15 13:31:29 +0100600 not_yet_fulfilled_dependencies.push_back(
601 {frame.references[i], ref_continuous});
philipele0b2f152016-09-28 10:23:49 +0200602 }
philipelbe7a9e52016-05-19 12:19:35 +0200603 }
604
philipel798b2822018-06-11 13:10:14 +0200605 info->second.num_missing_continuous = not_yet_fulfilled_dependencies.size();
606 info->second.num_missing_decodable = not_yet_fulfilled_dependencies.size();
607
608 for (const Dependency& dep : not_yet_fulfilled_dependencies) {
609 if (dep.continuous)
610 --info->second.num_missing_continuous;
611
philipel9aa9b8d2021-02-15 13:31:29 +0100612 frames_[dep.frame_id].dependent_frames.push_back(frame.Id());
philipel798b2822018-06-11 13:10:14 +0200613 }
philipel93e451b2016-10-06 12:25:13 +0200614
philipelbe7a9e52016-05-19 12:19:35 +0200615 return true;
616}
617
philipelbe742702016-11-30 01:31:40 -0800618void FrameBuffer::UpdateJitterDelay() {
tommidb23ea62017-03-03 07:21:18 -0800619 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
philipela45102f2017-02-22 05:30:39 -0800620 if (!stats_callback_)
621 return;
philipelbe742702016-11-30 01:31:40 -0800622
philipela45102f2017-02-22 05:30:39 -0800623 int max_decode_ms;
624 int current_delay_ms;
625 int target_delay_ms;
626 int jitter_buffer_ms;
627 int min_playout_delay_ms;
628 int render_delay_ms;
Johannes Kronbfd343b2019-07-01 10:07:50 +0200629 if (timing_->GetTimings(&max_decode_ms, &current_delay_ms, &target_delay_ms,
630 &jitter_buffer_ms, &min_playout_delay_ms,
631 &render_delay_ms)) {
philipela45102f2017-02-22 05:30:39 -0800632 stats_callback_->OnFrameBufferTimingsUpdated(
Johannes Kronbfd343b2019-07-01 10:07:50 +0200633 max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms,
634 min_playout_delay_ms, render_delay_ms);
philipelbe742702016-11-30 01:31:40 -0800635 }
philipel266f0a42016-11-28 08:49:07 -0800636}
637
ilnik2edc6842017-07-06 03:06:50 -0700638void FrameBuffer::UpdateTimingFrameInfo() {
639 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo");
Danil Chapovalov0040b662018-06-18 10:48:16 +0200640 absl::optional<TimingFrameInfo> info = timing_->GetTimingFrameInfo();
philipel97187112018-03-23 10:43:21 +0100641 if (info && stats_callback_)
ilnik2edc6842017-07-06 03:06:50 -0700642 stats_callback_->OnTimingFrameInfoUpdated(*info);
643}
644
philipelfcc60062017-01-18 05:35:20 -0800645void FrameBuffer::ClearFramesAndHistory() {
ilnik2edc6842017-07-06 03:06:50 -0700646 TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory");
Johannes Kron0c141c52019-08-26 15:04:43 +0200647 if (stats_callback_) {
philipel9aa9b8d2021-02-15 13:31:29 +0100648 unsigned int dropped_frames =
649 std::count_if(frames_.begin(), frames_.end(),
650 [](const std::pair<const int64_t, FrameInfo>& frame) {
651 return frame.second.frame != nullptr;
652 });
Johannes Kron0c141c52019-08-26 15:04:43 +0200653 if (dropped_frames > 0) {
654 stats_callback_->OnDroppedFrames(dropped_frames);
655 }
656 }
philipelfcc60062017-01-18 05:35:20 -0800657 frames_.clear();
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100658 last_continuous_frame_.reset();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100659 frames_to_decode_.clear();
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100660 decoded_frames_history_.Clear();
philipelfcc60062017-01-18 05:35:20 -0800661}
662
Niels Möllerff2e2152019-09-27 10:29:30 +0200663// TODO(philipel): Avoid the concatenation of frames here, by replacing
664// NextFrame and GetNextFrame with methods returning multiple frames.
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100665EncodedFrame* FrameBuffer::CombineAndDeleteFrames(
666 const std::vector<EncodedFrame*>& frames) const {
667 RTC_DCHECK(!frames.empty());
Sergey Silkin61832dd2018-12-20 14:32:14 +0100668 EncodedFrame* first_frame = frames[0];
669 EncodedFrame* last_frame = frames.back();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100670 size_t total_length = 0;
671 for (size_t i = 0; i < frames.size(); ++i) {
672 total_length += frames[i]->size();
673 }
Niels Möllerff2e2152019-09-27 10:29:30 +0200674 auto encoded_image_buffer = EncodedImageBuffer::Create(total_length);
675 uint8_t* buffer = encoded_image_buffer->data();
philipel0cb73262020-12-08 17:36:53 +0100676 first_frame->SetSpatialLayerFrameSize(first_frame->SpatialIndex().value_or(0),
Sergey Silkin2799e632019-05-17 09:51:39 +0200677 first_frame->size());
Niels Möllerff2e2152019-09-27 10:29:30 +0200678 memcpy(buffer, first_frame->data(), first_frame->size());
679 buffer += first_frame->size();
Sergey Silkin2799e632019-05-17 09:51:39 +0200680
Sergey Silkin61832dd2018-12-20 14:32:14 +0100681 // Spatial index of combined frame is set equal to spatial index of its top
682 // spatial layer.
philipel0cb73262020-12-08 17:36:53 +0100683 first_frame->SetSpatialIndex(last_frame->SpatialIndex().value_or(0));
Sergey Silkin61832dd2018-12-20 14:32:14 +0100684
685 first_frame->video_timing_mutable()->network2_timestamp_ms =
686 last_frame->video_timing().network2_timestamp_ms;
687 first_frame->video_timing_mutable()->receive_finish_ms =
688 last_frame->video_timing().receive_finish_ms;
689
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100690 // Append all remaining frames to the first one.
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100691 for (size_t i = 1; i < frames.size(); ++i) {
Sergey Silkin61832dd2018-12-20 14:32:14 +0100692 EncodedFrame* next_frame = frames[i];
philipel0cb73262020-12-08 17:36:53 +0100693 first_frame->SetSpatialLayerFrameSize(
694 next_frame->SpatialIndex().value_or(0), next_frame->size());
Niels Möller9c843902019-01-11 10:21:35 +0100695 memcpy(buffer, next_frame->data(), next_frame->size());
Sergey Silkin61832dd2018-12-20 14:32:14 +0100696 buffer += next_frame->size();
697 delete next_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100698 }
Niels Möllerff2e2152019-09-27 10:29:30 +0200699 first_frame->SetEncodedData(encoded_image_buffer);
Sergey Silkin61832dd2018-12-20 14:32:14 +0100700 return first_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100701}
702
Niels Möllerbe682d42018-03-27 08:31:45 +0200703FrameBuffer::FrameInfo::FrameInfo() = default;
704FrameBuffer::FrameInfo::FrameInfo(FrameInfo&&) = default;
705FrameBuffer::FrameInfo::~FrameInfo() = default;
706
philipelbe7a9e52016-05-19 12:19:35 +0200707} // namespace video_coding
708} // namespace webrtc