blob: be97b645bacab531197b558cb2f0d55d1e21f838 [file] [log] [blame]
philipelbe7a9e52016-05-19 12:19:35 +02001/*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020011#include "modules/video_coding/frame_buffer2.h"
philipelbe7a9e52016-05-19 12:19:35 +020012
13#include <algorithm>
Yves Gerey3e707812018-11-28 16:47:49 +010014#include <cstdlib>
15#include <iterator>
philipele0b2f152016-09-28 10:23:49 +020016#include <queue>
Yves Gerey3e707812018-11-28 16:47:49 +010017#include <utility>
philipel798b2822018-06-11 13:10:14 +020018#include <vector>
philipelbe7a9e52016-05-19 12:19:35 +020019
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020020#include "absl/memory/memory.h"
Yves Gerey3e707812018-11-28 16:47:49 +010021#include "api/video/encoded_image.h"
22#include "api/video/video_timing.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020023#include "modules/video_coding/include/video_coding_defines.h"
24#include "modules/video_coding/jitter_estimator.h"
25#include "modules/video_coding/timing.h"
26#include "rtc_base/checks.h"
Yves Gerey3e707812018-11-28 16:47:49 +010027#include "rtc_base/experiments/rtt_mult_experiment.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020028#include "rtc_base/logging.h"
Yves Gerey3e707812018-11-28 16:47:49 +010029#include "rtc_base/numerics/sequence_number_util.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020030#include "rtc_base/trace_event.h"
31#include "system_wrappers/include/clock.h"
philipel707f2782017-10-02 14:10:28 +020032#include "system_wrappers/include/field_trial.h"
philipelbe7a9e52016-05-19 12:19:35 +020033
34namespace webrtc {
35namespace video_coding {
36
37namespace {
philipele0b2f152016-09-28 10:23:49 +020038// Max number of frames the buffer will hold.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010039constexpr size_t kMaxFramesBuffered = 800;
philipelbe7a9e52016-05-19 12:19:35 +020040
philipele0b2f152016-09-28 10:23:49 +020041// Max number of decoded frame info that will be saved.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010042constexpr int kMaxFramesHistory = 1 << 13;
philipel65e1f942017-07-24 08:26:53 -070043
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010044// The time it's allowed for a frame to be late to its rendering prediction and
45// still be rendered.
Ilya Nikolaevskiy7eef0072018-02-28 09:59:26 +010046constexpr int kMaxAllowedFrameDelayMs = 5;
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010047
philipel65e1f942017-07-24 08:26:53 -070048constexpr int64_t kLogNonDecodedIntervalMs = 5000;
philipelbe7a9e52016-05-19 12:19:35 +020049} // namespace
50
philipelbe7a9e52016-05-19 12:19:35 +020051FrameBuffer::FrameBuffer(Clock* clock,
philipela45102f2017-02-22 05:30:39 -080052 VCMTiming* timing,
53 VCMReceiveStatisticsCallback* stats_callback)
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010054 : decoded_frames_history_(kMaxFramesHistory),
55 clock_(clock),
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020056 callback_queue_(nullptr),
Niels Möllerd9c2d942019-04-30 09:16:36 +020057 jitter_estimator_(clock),
philipelbe7a9e52016-05-19 12:19:35 +020058 timing_(timing),
philipel4f6cd6a2016-08-03 10:59:32 +020059 inter_frame_delay_(clock_->TimeInMilliseconds()),
philipel29f730e2017-03-15 08:10:08 -070060 stopped_(false),
philipela45102f2017-02-22 05:30:39 -080061 protection_mode_(kProtectionNack),
philipel65e1f942017-07-24 08:26:53 -070062 stats_callback_(stats_callback),
Elad Alone4b50232019-01-14 18:56:14 +010063 last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs),
64 add_rtt_to_playout_delay_(
“Michaeld3a4ebe2019-06-07 03:55:01 -050065 webrtc::field_trial::IsEnabled("WebRTC-AddRttToPlayoutDelay")),
Tommi430951a2020-05-19 23:27:29 +020066 rtt_mult_settings_(RttMultExperiment::GetRttMultValue()) {
67 callback_checker_.Detach();
68}
philipel266f0a42016-11-28 08:49:07 -080069
Tommi430951a2020-05-19 23:27:29 +020070FrameBuffer::~FrameBuffer() {
71 RTC_DCHECK_RUN_ON(&construction_checker_);
72}
philipelbe7a9e52016-05-19 12:19:35 +020073
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020074void FrameBuffer::NextFrame(
75 int64_t max_wait_time_ms,
76 bool keyframe_required,
77 rtc::TaskQueue* callback_queue,
78 std::function<void(std::unique_ptr<EncodedFrame>, ReturnReason)> handler) {
Tommi430951a2020-05-19 23:27:29 +020079 RTC_DCHECK_RUN_ON(&callback_checker_);
80 RTC_DCHECK(callback_queue->IsCurrent());
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020081 TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
82 int64_t latest_return_time_ms =
83 clock_->TimeInMilliseconds() + max_wait_time_ms;
Tommi430951a2020-05-19 23:27:29 +020084
Markus Handell6deec382020-07-07 12:17:12 +020085 MutexLock lock(&mutex_);
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +020086 if (stopped_) {
87 return;
88 }
89 latest_return_time_ms_ = latest_return_time_ms;
90 keyframe_required_ = keyframe_required;
91 frame_handler_ = handler;
92 callback_queue_ = callback_queue;
93 StartWaitForNextFrameOnQueue();
94}
95
96void FrameBuffer::StartWaitForNextFrameOnQueue() {
97 RTC_DCHECK(callback_queue_);
98 RTC_DCHECK(!callback_task_.Running());
99 int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds());
100 callback_task_ = RepeatingTaskHandle::DelayedStart(
Danil Chapovalov55284022020-02-07 14:53:52 +0100101 callback_queue_->Get(), TimeDelta::Millis(wait_ms), [this] {
Tommi430951a2020-05-19 23:27:29 +0200102 RTC_DCHECK_RUN_ON(&callback_checker_);
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200103 // If this task has not been cancelled, we did not get any new frames
104 // while waiting. Continue with frame delivery.
Markus Handell6deec382020-07-07 12:17:12 +0200105 MutexLock lock(&mutex_);
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200106 if (!frames_to_decode_.empty()) {
107 // We have frames, deliver!
108 frame_handler_(absl::WrapUnique(GetNextFrame()), kFrameFound);
109 CancelCallback();
110 return TimeDelta::Zero(); // Ignored.
111 } else if (clock_->TimeInMilliseconds() >= latest_return_time_ms_) {
112 // We have timed out, signal this and stop repeating.
113 frame_handler_(nullptr, kTimeout);
114 CancelCallback();
115 return TimeDelta::Zero(); // Ignored.
116 } else {
117 // If there's no frames to decode and there is still time left, it
118 // means that the frame buffer was cleared between creation and
119 // execution of this task. Continue waiting for the remaining time.
120 int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds());
Danil Chapovalov55284022020-02-07 14:53:52 +0100121 return TimeDelta::Millis(wait_ms);
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200122 }
123 });
124}
125
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200126int64_t FrameBuffer::FindNextFrame(int64_t now_ms) {
127 int64_t wait_ms = latest_return_time_ms_ - now_ms;
128 frames_to_decode_.clear();
129
130 // |last_continuous_frame_| may be empty below, but nullopt is smaller
131 // than everything else and loop will immediately terminate as expected.
132 for (auto frame_it = frames_.begin();
133 frame_it != frames_.end() && frame_it->first <= last_continuous_frame_;
134 ++frame_it) {
135 if (!frame_it->second.continuous ||
136 frame_it->second.num_missing_decodable > 0) {
137 continue;
138 }
139
140 EncodedFrame* frame = frame_it->second.frame.get();
141
142 if (keyframe_required_ && !frame->is_keyframe())
143 continue;
144
145 auto last_decoded_frame_timestamp =
146 decoded_frames_history_.GetLastDecodedFrameTimestamp();
147
148 // TODO(https://bugs.webrtc.org/9974): consider removing this check
149 // as it may make a stream undecodable after a very long delay between
150 // frames.
151 if (last_decoded_frame_timestamp &&
152 AheadOf(*last_decoded_frame_timestamp, frame->Timestamp())) {
153 continue;
154 }
155
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200156 // Gather all remaining frames for the same superframe.
157 std::vector<FrameMap::iterator> current_superframe;
158 current_superframe.push_back(frame_it);
159 bool last_layer_completed = frame_it->second.frame->is_last_spatial_layer;
160 FrameMap::iterator next_frame_it = frame_it;
philipela65d7852020-11-20 17:49:24 +0100161 while (!last_layer_completed) {
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200162 ++next_frame_it;
philipela65d7852020-11-20 17:49:24 +0100163
164 if (next_frame_it == frames_.end() || !next_frame_it->second.frame) {
165 break;
166 }
167
168 if (next_frame_it->second.frame->Timestamp() != frame->Timestamp() ||
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200169 !next_frame_it->second.continuous) {
170 break;
171 }
philipela65d7852020-11-20 17:49:24 +0100172
173 if (next_frame_it->second.num_missing_decodable > 0) {
174 // For now VP9 uses the inter_layer_predicted to signal a dependency
175 // instead of adding it as a reference.
176 // TODO(webrtc:12206): Stop using inter_layer_predicted for VP9.
177 bool has_inter_layer_dependency =
178 next_frame_it->second.frame->inter_layer_predicted;
179 for (size_t i = 0; !has_inter_layer_dependency &&
180 i < EncodedFrame::kMaxFrameReferences &&
181 i < next_frame_it->second.frame->num_references;
182 ++i) {
183 if (next_frame_it->second.frame->references[i] >=
184 frame_it->first.picture_id) {
185 has_inter_layer_dependency = true;
186 }
187 }
188
189 // If the frame has an undecoded dependency that is not within the same
190 // temporal unit then this frame is not ready to be decoded yet. If it
191 // is within the same temporal unit then the not yet decoded dependency
192 // is just a lower spatial frame, which is ok.
193 if (!has_inter_layer_dependency ||
194 next_frame_it->second.num_missing_decodable > 1) {
195 break;
196 }
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200197 }
philipela65d7852020-11-20 17:49:24 +0100198
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200199 current_superframe.push_back(next_frame_it);
200 last_layer_completed = next_frame_it->second.frame->is_last_spatial_layer;
201 }
202 // Check if the current superframe is complete.
203 // TODO(bugs.webrtc.org/10064): consider returning all available to
204 // decode frames even if the superframe is not complete yet.
205 if (!last_layer_completed) {
206 continue;
207 }
208
209 frames_to_decode_ = std::move(current_superframe);
210
211 if (frame->RenderTime() == -1) {
212 frame->SetRenderTime(timing_->RenderTimeMs(frame->Timestamp(), now_ms));
213 }
214 wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms);
215
216 // This will cause the frame buffer to prefer high framerate rather
217 // than high resolution in the case of the decoder not decoding fast
218 // enough and the stream has multiple spatial and temporal layers.
219 // For multiple temporal layers it may cause non-base layer frames to be
220 // skipped if they are late.
221 if (wait_ms < -kMaxAllowedFrameDelayMs)
222 continue;
223
224 break;
225 }
226 wait_ms = std::min<int64_t>(wait_ms, latest_return_time_ms_ - now_ms);
227 wait_ms = std::max<int64_t>(wait_ms, 0);
228 return wait_ms;
229}
230
231EncodedFrame* FrameBuffer::GetNextFrame() {
Tommi430951a2020-05-19 23:27:29 +0200232 RTC_DCHECK_RUN_ON(&callback_checker_);
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200233 int64_t now_ms = clock_->TimeInMilliseconds();
234 // TODO(ilnik): remove |frames_out| use frames_to_decode_ directly.
235 std::vector<EncodedFrame*> frames_out;
236
237 RTC_DCHECK(!frames_to_decode_.empty());
238 bool superframe_delayed_by_retransmission = false;
239 size_t superframe_size = 0;
240 EncodedFrame* first_frame = frames_to_decode_[0]->second.frame.get();
241 int64_t render_time_ms = first_frame->RenderTime();
242 int64_t receive_time_ms = first_frame->ReceivedTime();
243 // Gracefully handle bad RTP timestamps and render time issues.
244 if (HasBadRenderTiming(*first_frame, now_ms)) {
Niels Möllerd9c2d942019-04-30 09:16:36 +0200245 jitter_estimator_.Reset();
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200246 timing_->Reset();
247 render_time_ms = timing_->RenderTimeMs(first_frame->Timestamp(), now_ms);
248 }
249
250 for (FrameMap::iterator& frame_it : frames_to_decode_) {
251 RTC_DCHECK(frame_it != frames_.end());
252 EncodedFrame* frame = frame_it->second.frame.release();
253
254 frame->SetRenderTime(render_time_ms);
255
256 superframe_delayed_by_retransmission |= frame->delayed_by_retransmission();
257 receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime());
258 superframe_size += frame->size();
259
260 PropagateDecodability(frame_it->second);
261 decoded_frames_history_.InsertDecoded(frame_it->first, frame->Timestamp());
262
263 // Remove decoded frame and all undecoded frames before it.
Johannes Kron0c141c52019-08-26 15:04:43 +0200264 if (stats_callback_) {
265 unsigned int dropped_frames = std::count_if(
266 frames_.begin(), frame_it,
267 [](const std::pair<const VideoLayerFrameId, FrameInfo>& frame) {
268 return frame.second.frame != nullptr;
269 });
270 if (dropped_frames > 0) {
271 stats_callback_->OnDroppedFrames(dropped_frames);
272 }
273 }
274
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200275 frames_.erase(frames_.begin(), ++frame_it);
276
277 frames_out.push_back(frame);
278 }
279
280 if (!superframe_delayed_by_retransmission) {
281 int64_t frame_delay;
282
283 if (inter_frame_delay_.CalculateDelay(first_frame->Timestamp(),
284 &frame_delay, receive_time_ms)) {
Niels Möllerd9c2d942019-04-30 09:16:36 +0200285 jitter_estimator_.UpdateEstimate(frame_delay, superframe_size);
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200286 }
287
288 float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0;
“Michaeld3a4ebe2019-06-07 03:55:01 -0500289 absl::optional<float> rtt_mult_add_cap_ms = absl::nullopt;
290 if (rtt_mult_settings_.has_value()) {
291 rtt_mult = rtt_mult_settings_->rtt_mult_setting;
292 rtt_mult_add_cap_ms = rtt_mult_settings_->rtt_mult_add_cap_ms;
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200293 }
“Michaele0f37042019-06-04 10:04:12 -0500294 timing_->SetJitterDelay(
295 jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms));
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200296 timing_->UpdateCurrentDelay(render_time_ms, now_ms);
297 } else {
298 if (RttMultExperiment::RttMultEnabled() || add_rtt_to_playout_delay_)
Niels Möllerd9c2d942019-04-30 09:16:36 +0200299 jitter_estimator_.FrameNacked();
Sebastian Jansson1c747f52019-04-04 13:01:39 +0200300 }
301
302 UpdateJitterDelay();
303 UpdateTimingFrameInfo();
304
305 if (frames_out.size() == 1) {
306 return frames_out[0];
307 } else {
308 return CombineAndDeleteFrames(frames_out);
309 }
310}
311
philipele7c891f2018-02-22 14:35:06 +0100312bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
313 int64_t now_ms) {
stefan95e97542017-05-23 09:52:18 -0700314 // Assume that render timing errors are due to changes in the video stream.
315 int64_t render_time_ms = frame.RenderTimeMs();
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200316 // Zero render time means render immediately.
317 if (render_time_ms == 0) {
318 return false;
319 }
stefan95e97542017-05-23 09:52:18 -0700320 if (render_time_ms < 0) {
321 return true;
322 }
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200323 const int64_t kMaxVideoDelayMs = 10000;
stefan95e97542017-05-23 09:52:18 -0700324 if (std::abs(render_time_ms - now_ms) > kMaxVideoDelayMs) {
325 int frame_delay = static_cast<int>(std::abs(render_time_ms - now_ms));
Mirko Bonadei675513b2017-11-09 11:09:25 +0100326 RTC_LOG(LS_WARNING)
327 << "A frame about to be decoded is out of the configured "
Jonas Olssonb2b20312020-01-14 12:11:31 +0100328 "delay bounds ("
329 << frame_delay << " > " << kMaxVideoDelayMs
Mirko Bonadei675513b2017-11-09 11:09:25 +0100330 << "). Resetting the video jitter buffer.";
stefan95e97542017-05-23 09:52:18 -0700331 return true;
332 }
333 if (static_cast<int>(timing_->TargetVideoDelay()) > kMaxVideoDelayMs) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100334 RTC_LOG(LS_WARNING) << "The video target delay has grown larger than "
335 << kMaxVideoDelayMs << " ms.";
stefan95e97542017-05-23 09:52:18 -0700336 return true;
337 }
338 return false;
339}
340
philipel4f6cd6a2016-08-03 10:59:32 +0200341void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
tommidb23ea62017-03-03 07:21:18 -0800342 TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode");
Markus Handell6deec382020-07-07 12:17:12 +0200343 MutexLock lock(&mutex_);
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000344 protection_mode_ = mode;
philipel4f6cd6a2016-08-03 10:59:32 +0200345}
346
philipel504c47d2016-06-30 17:33:02 +0200347void FrameBuffer::Stop() {
tommidb23ea62017-03-03 07:21:18 -0800348 TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
Markus Handell6deec382020-07-07 12:17:12 +0200349 MutexLock lock(&mutex_);
Tommi430951a2020-05-19 23:27:29 +0200350 if (stopped_)
351 return;
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000352 stopped_ = true;
Tommi430951a2020-05-19 23:27:29 +0200353
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200354 CancelCallback();
philipel504c47d2016-06-30 17:33:02 +0200355}
356
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100357void FrameBuffer::Clear() {
Markus Handell6deec382020-07-07 12:17:12 +0200358 MutexLock lock(&mutex_);
Henrik Boströmc680c4a2019-04-03 10:27:36 +0000359 ClearFramesAndHistory();
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100360}
361
Johannes Kron111e9812020-10-26 13:54:40 +0100362int FrameBuffer::Size() {
363 MutexLock lock(&mutex_);
364 return frames_.size();
365}
366
philipele21be1d2017-09-25 06:37:12 -0700367void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
Markus Handell6deec382020-07-07 12:17:12 +0200368 MutexLock lock(&mutex_);
Niels Möllerd9c2d942019-04-30 09:16:36 +0200369 jitter_estimator_.UpdateRtt(rtt_ms);
philipele21be1d2017-09-25 06:37:12 -0700370}
371
philipele7c891f2018-02-22 14:35:06 +0100372bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
philipel112adf92017-06-15 09:06:21 -0700373 for (size_t i = 0; i < frame.num_references; ++i) {
Philip Eliasson1f850a62019-03-19 12:15:00 +0000374 if (frame.references[i] >= frame.id.picture_id)
philipel112adf92017-06-15 09:06:21 -0700375 return false;
philipel3b3c9c42017-09-11 09:38:36 -0700376
philipel112adf92017-06-15 09:06:21 -0700377 for (size_t j = i + 1; j < frame.num_references; ++j) {
378 if (frame.references[i] == frame.references[j])
379 return false;
380 }
381 }
382
philipel0fa82a62018-03-19 15:34:53 +0100383 if (frame.inter_layer_predicted && frame.id.spatial_layer == 0)
philipel112adf92017-06-15 09:06:21 -0700384 return false;
385
386 return true;
387}
388
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200389void FrameBuffer::CancelCallback() {
Tommi430951a2020-05-19 23:27:29 +0200390 // Called from the callback queue or from within Stop().
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200391 frame_handler_ = {};
392 callback_task_.Stop();
393 callback_queue_ = nullptr;
Tommi430951a2020-05-19 23:27:29 +0200394 callback_checker_.Detach();
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200395}
396
philipele7c891f2018-02-22 14:35:06 +0100397int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
tommidb23ea62017-03-03 07:21:18 -0800398 TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
philipel93e451b2016-10-06 12:25:13 +0200399 RTC_DCHECK(frame);
tommi0a735642017-03-14 06:23:57 -0700400
Markus Handell6deec382020-07-07 12:17:12 +0200401 MutexLock lock(&mutex_);
philipel29f730e2017-03-15 08:10:08 -0700402
Ilya Nikolaevskiy48193b02019-03-25 11:40:34 +0100403 const VideoLayerFrameId& id = frame->id;
philipel1610f942017-12-12 13:58:31 +0100404 int64_t last_continuous_picture_id =
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100405 !last_continuous_frame_ ? -1 : last_continuous_frame_->picture_id;
philipele0b2f152016-09-28 10:23:49 +0200406
philipel112adf92017-06-15 09:06:21 -0700407 if (!ValidReferences(*frame)) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100408 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100409 << id.picture_id << ":"
410 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100411 << ") has invalid frame references, dropping frame.";
philipel112adf92017-06-15 09:06:21 -0700412 return last_continuous_picture_id;
413 }
414
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100415 if (frames_.size() >= kMaxFramesBuffered) {
philipel9771c502018-03-02 11:06:27 +0100416 if (frame->is_keyframe()) {
417 RTC_LOG(LS_WARNING) << "Inserting keyframe (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100418 << id.picture_id << ":"
419 << static_cast<int>(id.spatial_layer)
philipel9771c502018-03-02 11:06:27 +0100420 << ") but buffer is full, clearing"
Jonas Olssonb2b20312020-01-14 12:11:31 +0100421 " buffer and inserting the frame.";
philipel9771c502018-03-02 11:06:27 +0100422 ClearFramesAndHistory();
423 } else {
424 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100425 << id.picture_id << ":"
426 << static_cast<int>(id.spatial_layer)
philipel9771c502018-03-02 11:06:27 +0100427 << ") could not be inserted due to the frame "
Jonas Olssonb2b20312020-01-14 12:11:31 +0100428 "buffer being full, dropping frame.";
philipel9771c502018-03-02 11:06:27 +0100429 return last_continuous_picture_id;
430 }
philipele0b2f152016-09-28 10:23:49 +0200431 }
432
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100433 auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId();
434 auto last_decoded_frame_timestamp =
435 decoded_frames_history_.GetLastDecodedFrameTimestamp();
436 if (last_decoded_frame && id <= *last_decoded_frame) {
437 if (AheadOf(frame->Timestamp(), *last_decoded_frame_timestamp) &&
philipel3042c2d2017-08-18 04:55:02 -0700438 frame->is_keyframe()) {
philipelfcc60062017-01-18 05:35:20 -0800439 // If this frame has a newer timestamp but an earlier picture id then we
440 // assume there has been a jump in the picture id due to some encoder
441 // reconfiguration or some other reason. Even though this is not according
442 // to spec we can still continue to decode from this frame if it is a
443 // keyframe.
Mirko Bonadei675513b2017-11-09 11:09:25 +0100444 RTC_LOG(LS_WARNING)
445 << "A jump in picture id was detected, clearing buffer.";
philipelfcc60062017-01-18 05:35:20 -0800446 ClearFramesAndHistory();
447 last_continuous_picture_id = -1;
448 } else {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100449 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100450 << id.picture_id << ":"
451 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100452 << ") inserted after frame ("
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100453 << last_decoded_frame->picture_id << ":"
454 << static_cast<int>(last_decoded_frame->spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100455 << ") was handed off for decoding, dropping frame.";
philipelfcc60062017-01-18 05:35:20 -0800456 return last_continuous_picture_id;
457 }
philipele0b2f152016-09-28 10:23:49 +0200458 }
459
philipel146a48b2017-04-20 04:04:38 -0700460 // Test if inserting this frame would cause the order of the frames to become
461 // ambiguous (covering more than half the interval of 2^16). This can happen
462 // when the picture id make large jumps mid stream.
philipel0fa82a62018-03-19 15:34:53 +0100463 if (!frames_.empty() && id < frames_.begin()->first &&
464 frames_.rbegin()->first < id) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100465 RTC_LOG(LS_WARNING)
466 << "A jump in picture id was detected, clearing buffer.";
philipel146a48b2017-04-20 04:04:38 -0700467 ClearFramesAndHistory();
468 last_continuous_picture_id = -1;
469 }
470
philipel0fa82a62018-03-19 15:34:53 +0100471 auto info = frames_.emplace(id, FrameInfo()).first;
philipele0b2f152016-09-28 10:23:49 +0200472
philipel93e451b2016-10-06 12:25:13 +0200473 if (info->second.frame) {
philipele0b2f152016-09-28 10:23:49 +0200474 return last_continuous_picture_id;
475 }
476
philipel93e451b2016-10-06 12:25:13 +0200477 if (!UpdateFrameInfoWithIncomingFrame(*frame, info))
478 return last_continuous_picture_id;
Ruslan Burakov493a6502019-02-27 15:32:48 +0100479
480 if (!frame->delayed_by_retransmission())
481 timing_->IncomingTimestamp(frame->Timestamp(), frame->ReceivedTime());
philipel0a9f6de2018-02-28 11:29:47 +0100482
philipel0cb73262020-12-08 17:36:53 +0100483 // It can happen that a frame will be reported as fully received even if a
484 // lower spatial layer frame is missing.
485 if (stats_callback_ && frame->is_last_spatial_layer) {
Johannes Kronb88b44e2019-08-22 13:16:44 +0200486 stats_callback_->OnCompleteFrame(frame->is_keyframe(), frame->size(),
487 frame->contentType());
488 }
489
philipele0b2f152016-09-28 10:23:49 +0200490 info->second.frame = std::move(frame);
philipele0b2f152016-09-28 10:23:49 +0200491
492 if (info->second.num_missing_continuous == 0) {
493 info->second.continuous = true;
494 PropagateContinuity(info);
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100495 last_continuous_picture_id = last_continuous_frame_->picture_id;
philipele0b2f152016-09-28 10:23:49 +0200496
497 // Since we now have new continuous frames there might be a better frame
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200498 // to return from NextFrame.
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200499 if (callback_queue_) {
500 callback_queue_->PostTask([this] {
Markus Handell6deec382020-07-07 12:17:12 +0200501 MutexLock lock(&mutex_);
Sebastian Jansson11d0d7b2019-04-11 12:39:34 +0200502 if (!callback_task_.Running())
503 return;
504 RTC_CHECK(frame_handler_);
505 callback_task_.Stop();
506 StartWaitForNextFrameOnQueue();
507 });
508 }
philipele0b2f152016-09-28 10:23:49 +0200509 }
510
511 return last_continuous_picture_id;
philipelbe7a9e52016-05-19 12:19:35 +0200512}
513
philipele0b2f152016-09-28 10:23:49 +0200514void FrameBuffer::PropagateContinuity(FrameMap::iterator start) {
tommidb23ea62017-03-03 07:21:18 -0800515 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateContinuity");
philipele0b2f152016-09-28 10:23:49 +0200516 RTC_DCHECK(start->second.continuous);
philipele0b2f152016-09-28 10:23:49 +0200517
518 std::queue<FrameMap::iterator> continuous_frames;
519 continuous_frames.push(start);
520
521 // A simple BFS to traverse continuous frames.
522 while (!continuous_frames.empty()) {
523 auto frame = continuous_frames.front();
524 continuous_frames.pop();
525
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100526 if (!last_continuous_frame_ || *last_continuous_frame_ < frame->first) {
527 last_continuous_frame_ = frame->first;
528 }
philipele0b2f152016-09-28 10:23:49 +0200529
530 // Loop through all dependent frames, and if that frame no longer has
531 // any unfulfilled dependencies then that frame is continuous as well.
Elad Alon69321dd2019-01-10 15:02:54 +0100532 for (size_t d = 0; d < frame->second.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200533 auto frame_ref = frames_.find(frame->second.dependent_frames[d]);
philipel112adf92017-06-15 09:06:21 -0700534 RTC_DCHECK(frame_ref != frames_.end());
philipele0b2f152016-09-28 10:23:49 +0200535
philipel112adf92017-06-15 09:06:21 -0700536 // TODO(philipel): Look into why we've seen this happen.
537 if (frame_ref != frames_.end()) {
538 --frame_ref->second.num_missing_continuous;
539 if (frame_ref->second.num_missing_continuous == 0) {
540 frame_ref->second.continuous = true;
541 continuous_frames.push(frame_ref);
542 }
philipele0b2f152016-09-28 10:23:49 +0200543 }
544 }
545 }
546}
547
548void FrameBuffer::PropagateDecodability(const FrameInfo& info) {
tommidb23ea62017-03-03 07:21:18 -0800549 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateDecodability");
Elad Alon69321dd2019-01-10 15:02:54 +0100550 for (size_t d = 0; d < info.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200551 auto ref_info = frames_.find(info.dependent_frames[d]);
philipel93e451b2016-10-06 12:25:13 +0200552 RTC_DCHECK(ref_info != frames_.end());
tommie95b78b2017-05-14 07:23:11 -0700553 // TODO(philipel): Look into why we've seen this happen.
554 if (ref_info != frames_.end()) {
555 RTC_DCHECK_GT(ref_info->second.num_missing_decodable, 0U);
556 --ref_info->second.num_missing_decodable;
557 }
philipele0b2f152016-09-28 10:23:49 +0200558 }
559}
560
philipele7c891f2018-02-22 14:35:06 +0100561bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
philipele0b2f152016-09-28 10:23:49 +0200562 FrameMap::iterator info) {
tommidb23ea62017-03-03 07:21:18 -0800563 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame");
philipel0fa82a62018-03-19 15:34:53 +0100564 const VideoLayerFrameId& id = frame.id;
philipele0b2f152016-09-28 10:23:49 +0200565
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100566 auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId();
567 RTC_DCHECK(!last_decoded_frame || *last_decoded_frame < info->first);
philipele0b2f152016-09-28 10:23:49 +0200568
philipel798b2822018-06-11 13:10:14 +0200569 // In this function we determine how many missing dependencies this |frame|
570 // has to become continuous/decodable. If a frame that this |frame| depend
571 // on has already been decoded then we can ignore that dependency since it has
572 // already been fulfilled.
573 //
574 // For all other frames we will register a backwards reference to this |frame|
575 // so that |num_missing_continuous| and |num_missing_decodable| can be
576 // decremented as frames become continuous/are decoded.
577 struct Dependency {
578 VideoLayerFrameId id;
579 bool continuous;
580 };
581 std::vector<Dependency> not_yet_fulfilled_dependencies;
582
583 // Find all dependencies that have not yet been fulfilled.
philipele0b2f152016-09-28 10:23:49 +0200584 for (size_t i = 0; i < frame.num_references; ++i) {
philipel0fa82a62018-03-19 15:34:53 +0100585 VideoLayerFrameId ref_key(frame.references[i], frame.id.spatial_layer);
philipel798b2822018-06-11 13:10:14 +0200586 // Does |frame| depend on a frame earlier than the last decoded one?
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100587 if (last_decoded_frame && ref_key <= *last_decoded_frame) {
philipel798b2822018-06-11 13:10:14 +0200588 // Was that frame decoded? If not, this |frame| will never become
589 // decodable.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100590 if (!decoded_frames_history_.WasDecoded(ref_key)) {
philipel65e1f942017-07-24 08:26:53 -0700591 int64_t now_ms = clock_->TimeInMilliseconds();
592 if (last_log_non_decoded_ms_ + kLogNonDecodedIntervalMs < now_ms) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100593 RTC_LOG(LS_WARNING)
philipel0fa82a62018-03-19 15:34:53 +0100594 << "Frame with (picture_id:spatial_id) (" << id.picture_id << ":"
595 << static_cast<int>(id.spatial_layer)
philipel65e1f942017-07-24 08:26:53 -0700596 << ") depends on a non-decoded frame more previous than"
Jonas Olssonb2b20312020-01-14 12:11:31 +0100597 " the last decoded frame, dropping frame.";
philipel65e1f942017-07-24 08:26:53 -0700598 last_log_non_decoded_ms_ = now_ms;
599 }
philipele0b2f152016-09-28 10:23:49 +0200600 return false;
601 }
philipele0b2f152016-09-28 10:23:49 +0200602 } else {
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100603 auto ref_info = frames_.find(ref_key);
philipel798b2822018-06-11 13:10:14 +0200604 bool ref_continuous =
605 ref_info != frames_.end() && ref_info->second.continuous;
606 not_yet_fulfilled_dependencies.push_back({ref_key, ref_continuous});
philipele0b2f152016-09-28 10:23:49 +0200607 }
philipelbe7a9e52016-05-19 12:19:35 +0200608 }
609
philipel798b2822018-06-11 13:10:14 +0200610 // Does |frame| depend on the lower spatial layer?
philipelbe7a9e52016-05-19 12:19:35 +0200611 if (frame.inter_layer_predicted) {
philipel0fa82a62018-03-19 15:34:53 +0100612 VideoLayerFrameId ref_key(frame.id.picture_id, frame.id.spatial_layer - 1);
philipel798b2822018-06-11 13:10:14 +0200613 auto ref_info = frames_.find(ref_key);
philipele0b2f152016-09-28 10:23:49 +0200614
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100615 bool lower_layer_decoded =
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100616 last_decoded_frame && *last_decoded_frame == ref_key;
philipel798b2822018-06-11 13:10:14 +0200617 bool lower_layer_continuous =
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100618 lower_layer_decoded ||
619 (ref_info != frames_.end() && ref_info->second.continuous);
philipel798b2822018-06-11 13:10:14 +0200620
621 if (!lower_layer_continuous || !lower_layer_decoded) {
622 not_yet_fulfilled_dependencies.push_back(
623 {ref_key, lower_layer_continuous});
philipele0b2f152016-09-28 10:23:49 +0200624 }
philipelbe7a9e52016-05-19 12:19:35 +0200625 }
626
philipel798b2822018-06-11 13:10:14 +0200627 info->second.num_missing_continuous = not_yet_fulfilled_dependencies.size();
628 info->second.num_missing_decodable = not_yet_fulfilled_dependencies.size();
629
630 for (const Dependency& dep : not_yet_fulfilled_dependencies) {
631 if (dep.continuous)
632 --info->second.num_missing_continuous;
633
Elad Alon69321dd2019-01-10 15:02:54 +0100634 frames_[dep.id].dependent_frames.push_back(id);
philipel798b2822018-06-11 13:10:14 +0200635 }
philipel93e451b2016-10-06 12:25:13 +0200636
philipelbe7a9e52016-05-19 12:19:35 +0200637 return true;
638}
639
philipelbe742702016-11-30 01:31:40 -0800640void FrameBuffer::UpdateJitterDelay() {
tommidb23ea62017-03-03 07:21:18 -0800641 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
philipela45102f2017-02-22 05:30:39 -0800642 if (!stats_callback_)
643 return;
philipelbe742702016-11-30 01:31:40 -0800644
philipela45102f2017-02-22 05:30:39 -0800645 int max_decode_ms;
646 int current_delay_ms;
647 int target_delay_ms;
648 int jitter_buffer_ms;
649 int min_playout_delay_ms;
650 int render_delay_ms;
Johannes Kronbfd343b2019-07-01 10:07:50 +0200651 if (timing_->GetTimings(&max_decode_ms, &current_delay_ms, &target_delay_ms,
652 &jitter_buffer_ms, &min_playout_delay_ms,
653 &render_delay_ms)) {
philipela45102f2017-02-22 05:30:39 -0800654 stats_callback_->OnFrameBufferTimingsUpdated(
Johannes Kronbfd343b2019-07-01 10:07:50 +0200655 max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms,
656 min_playout_delay_ms, render_delay_ms);
philipelbe742702016-11-30 01:31:40 -0800657 }
philipel266f0a42016-11-28 08:49:07 -0800658}
659
ilnik2edc6842017-07-06 03:06:50 -0700660void FrameBuffer::UpdateTimingFrameInfo() {
661 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo");
Danil Chapovalov0040b662018-06-18 10:48:16 +0200662 absl::optional<TimingFrameInfo> info = timing_->GetTimingFrameInfo();
philipel97187112018-03-23 10:43:21 +0100663 if (info && stats_callback_)
ilnik2edc6842017-07-06 03:06:50 -0700664 stats_callback_->OnTimingFrameInfoUpdated(*info);
665}
666
philipelfcc60062017-01-18 05:35:20 -0800667void FrameBuffer::ClearFramesAndHistory() {
ilnik2edc6842017-07-06 03:06:50 -0700668 TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory");
Johannes Kron0c141c52019-08-26 15:04:43 +0200669 if (stats_callback_) {
670 unsigned int dropped_frames = std::count_if(
671 frames_.begin(), frames_.end(),
672 [](const std::pair<const VideoLayerFrameId, FrameInfo>& frame) {
673 return frame.second.frame != nullptr;
674 });
675 if (dropped_frames > 0) {
676 stats_callback_->OnDroppedFrames(dropped_frames);
677 }
678 }
philipelfcc60062017-01-18 05:35:20 -0800679 frames_.clear();
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100680 last_continuous_frame_.reset();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100681 frames_to_decode_.clear();
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100682 decoded_frames_history_.Clear();
philipelfcc60062017-01-18 05:35:20 -0800683}
684
Niels Möllerff2e2152019-09-27 10:29:30 +0200685// TODO(philipel): Avoid the concatenation of frames here, by replacing
686// NextFrame and GetNextFrame with methods returning multiple frames.
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100687EncodedFrame* FrameBuffer::CombineAndDeleteFrames(
688 const std::vector<EncodedFrame*>& frames) const {
689 RTC_DCHECK(!frames.empty());
Sergey Silkin61832dd2018-12-20 14:32:14 +0100690 EncodedFrame* first_frame = frames[0];
691 EncodedFrame* last_frame = frames.back();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100692 size_t total_length = 0;
693 for (size_t i = 0; i < frames.size(); ++i) {
694 total_length += frames[i]->size();
695 }
Niels Möllerff2e2152019-09-27 10:29:30 +0200696 auto encoded_image_buffer = EncodedImageBuffer::Create(total_length);
697 uint8_t* buffer = encoded_image_buffer->data();
philipel0cb73262020-12-08 17:36:53 +0100698 first_frame->SetSpatialLayerFrameSize(first_frame->SpatialIndex().value_or(0),
Sergey Silkin2799e632019-05-17 09:51:39 +0200699 first_frame->size());
Niels Möllerff2e2152019-09-27 10:29:30 +0200700 memcpy(buffer, first_frame->data(), first_frame->size());
701 buffer += first_frame->size();
Sergey Silkin2799e632019-05-17 09:51:39 +0200702
Sergey Silkin61832dd2018-12-20 14:32:14 +0100703 // Spatial index of combined frame is set equal to spatial index of its top
704 // spatial layer.
philipel0cb73262020-12-08 17:36:53 +0100705 first_frame->SetSpatialIndex(last_frame->SpatialIndex().value_or(0));
Sergey Silkin61832dd2018-12-20 14:32:14 +0100706 first_frame->id.spatial_layer = last_frame->id.spatial_layer;
707
708 first_frame->video_timing_mutable()->network2_timestamp_ms =
709 last_frame->video_timing().network2_timestamp_ms;
710 first_frame->video_timing_mutable()->receive_finish_ms =
711 last_frame->video_timing().receive_finish_ms;
712
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100713 // Append all remaining frames to the first one.
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100714 for (size_t i = 1; i < frames.size(); ++i) {
Sergey Silkin61832dd2018-12-20 14:32:14 +0100715 EncodedFrame* next_frame = frames[i];
philipel0cb73262020-12-08 17:36:53 +0100716 first_frame->SetSpatialLayerFrameSize(
717 next_frame->SpatialIndex().value_or(0), next_frame->size());
Niels Möller9c843902019-01-11 10:21:35 +0100718 memcpy(buffer, next_frame->data(), next_frame->size());
Sergey Silkin61832dd2018-12-20 14:32:14 +0100719 buffer += next_frame->size();
720 delete next_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100721 }
Niels Möllerff2e2152019-09-27 10:29:30 +0200722 first_frame->SetEncodedData(encoded_image_buffer);
Sergey Silkin61832dd2018-12-20 14:32:14 +0100723 return first_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100724}
725
Niels Möllerbe682d42018-03-27 08:31:45 +0200726FrameBuffer::FrameInfo::FrameInfo() = default;
727FrameBuffer::FrameInfo::FrameInfo(FrameInfo&&) = default;
728FrameBuffer::FrameInfo::~FrameInfo() = default;
729
philipelbe7a9e52016-05-19 12:19:35 +0200730} // namespace video_coding
731} // namespace webrtc