blob: a65401901d356ede8cda5df21530d0998143e0a3 [file] [log] [blame]
philipelbe7a9e52016-05-19 12:19:35 +02001/*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020011#include "modules/video_coding/frame_buffer2.h"
philipelbe7a9e52016-05-19 12:19:35 +020012
13#include <algorithm>
Yves Gerey3e707812018-11-28 16:47:49 +010014#include <cstdlib>
15#include <iterator>
philipele0b2f152016-09-28 10:23:49 +020016#include <queue>
Yves Gerey3e707812018-11-28 16:47:49 +010017#include <utility>
philipel798b2822018-06-11 13:10:14 +020018#include <vector>
philipelbe7a9e52016-05-19 12:19:35 +020019
Sebastian Jansson13943b72019-04-02 15:08:14 +020020#include "absl/memory/memory.h"
21#include "api/task_queue/global_task_queue_factory.h"
Yves Gerey3e707812018-11-28 16:47:49 +010022#include "api/video/encoded_image.h"
23#include "api/video/video_timing.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020024#include "modules/video_coding/include/video_coding_defines.h"
25#include "modules/video_coding/jitter_estimator.h"
26#include "modules/video_coding/timing.h"
27#include "rtc_base/checks.h"
Yves Gerey3e707812018-11-28 16:47:49 +010028#include "rtc_base/experiments/rtt_mult_experiment.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020029#include "rtc_base/logging.h"
Yves Gerey3e707812018-11-28 16:47:49 +010030#include "rtc_base/numerics/sequence_number_util.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020031#include "rtc_base/trace_event.h"
32#include "system_wrappers/include/clock.h"
philipel707f2782017-10-02 14:10:28 +020033#include "system_wrappers/include/field_trial.h"
philipelbe7a9e52016-05-19 12:19:35 +020034
35namespace webrtc {
36namespace video_coding {
37
38namespace {
philipele0b2f152016-09-28 10:23:49 +020039// Max number of frames the buffer will hold.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010040constexpr size_t kMaxFramesBuffered = 800;
philipelbe7a9e52016-05-19 12:19:35 +020041
philipele0b2f152016-09-28 10:23:49 +020042// Max number of decoded frame info that will be saved.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010043constexpr int kMaxFramesHistory = 1 << 13;
philipel65e1f942017-07-24 08:26:53 -070044
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010045// The time it's allowed for a frame to be late to its rendering prediction and
46// still be rendered.
Ilya Nikolaevskiy7eef0072018-02-28 09:59:26 +010047constexpr int kMaxAllowedFrameDelayMs = 5;
Ilya Nikolaevskiy8c4fe162018-02-27 15:49:47 +010048
philipel65e1f942017-07-24 08:26:53 -070049constexpr int64_t kLogNonDecodedIntervalMs = 5000;
Sebastian Jansson13943b72019-04-02 15:08:14 +020050
51std::unique_ptr<TaskQueueBase, TaskQueueDeleter> CreateQueue(
52 TaskQueueFactory* task_queue_factory) {
53 if (!task_queue_factory)
54 task_queue_factory = &GlobalTaskQueueFactory();
55 return task_queue_factory->CreateTaskQueue("FrameBuffer",
56 TaskQueueFactory::Priority::HIGH);
57}
philipelbe7a9e52016-05-19 12:19:35 +020058} // namespace
59
philipelbe7a9e52016-05-19 12:19:35 +020060FrameBuffer::FrameBuffer(Clock* clock,
61 VCMJitterEstimator* jitter_estimator,
philipela45102f2017-02-22 05:30:39 -080062 VCMTiming* timing,
Sebastian Jansson13943b72019-04-02 15:08:14 +020063 VCMReceiveStatisticsCallback* stats_proxy)
64 : FrameBuffer(clock, nullptr, jitter_estimator, timing, stats_proxy) {}
65
66FrameBuffer::FrameBuffer(Clock* clock,
67 TaskQueueFactory* task_queue_factory,
68 VCMJitterEstimator* jitter_estimator,
69 VCMTiming* timing,
philipela45102f2017-02-22 05:30:39 -080070 VCMReceiveStatisticsCallback* stats_callback)
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +010071 : decoded_frames_history_(kMaxFramesHistory),
72 clock_(clock),
Sebastian Jansson13943b72019-04-02 15:08:14 +020073 use_task_queue_(task_queue_factory != nullptr),
philipelbe7a9e52016-05-19 12:19:35 +020074 jitter_estimator_(jitter_estimator),
75 timing_(timing),
philipel4f6cd6a2016-08-03 10:59:32 +020076 inter_frame_delay_(clock_->TimeInMilliseconds()),
philipel29f730e2017-03-15 08:10:08 -070077 stopped_(false),
philipela45102f2017-02-22 05:30:39 -080078 protection_mode_(kProtectionNack),
philipel65e1f942017-07-24 08:26:53 -070079 stats_callback_(stats_callback),
Elad Alone4b50232019-01-14 18:56:14 +010080 last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs),
81 add_rtt_to_playout_delay_(
Sebastian Jansson13943b72019-04-02 15:08:14 +020082 webrtc::field_trial::IsEnabled("WebRTC-AddRttToPlayoutDelay")),
83 task_queue_(CreateQueue(task_queue_factory)) {}
philipel266f0a42016-11-28 08:49:07 -080084
philipela45102f2017-02-22 05:30:39 -080085FrameBuffer::~FrameBuffer() {}
philipelbe7a9e52016-05-19 12:19:35 +020086
Sebastian Jansson13943b72019-04-02 15:08:14 +020087void FrameBuffer::NextFrame(
88 int64_t max_wait_time_ms,
89 bool keyframe_required,
90 std::function<void(std::unique_ptr<EncodedFrame>, ReturnReason)> handler) {
91 RTC_DCHECK(use_task_queue_);
92 TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
93 int64_t latest_return_time_ms =
94 clock_->TimeInMilliseconds() + max_wait_time_ms;
95 task_queue_.PostTask([=] {
96 RTC_DCHECK_RUN_ON(&task_queue_);
97 rtc::CritScope lock(&crit_);
98 if (stopped_) {
99 return;
100 }
101 latest_return_time_ms_ = latest_return_time_ms;
102 keyframe_required_ = keyframe_required;
103 frame_handler_ = handler;
104 NextFrameOnQueue();
105 });
106}
107
108void FrameBuffer::NextFrameOnQueue() {
109 RTC_DCHECK(use_task_queue_);
110 RTC_DCHECK(!callback_task_.Running());
111 int64_t wait_ms = UpdateFramesToDecode(clock_->TimeInMilliseconds());
112 callback_task_ = RepeatingTaskHandle::DelayedStart(
113 task_queue_.Get(), TimeDelta::ms(wait_ms), [this] {
114 // If this task has not been cancelled, we did not get any new frames
115 // while waiting. Continue with frame delivery.
116 RTC_DCHECK_RUN_ON(&task_queue_);
117 rtc::CritScope lock(&crit_);
118 if (!frames_to_decode_.empty()) {
119 // We have frames, deliver!
120 frame_handler_(absl::WrapUnique(GetFrameToDecode()), kFrameFound);
121 frame_handler_ = {};
122 callback_task_.Stop();
123 return TimeDelta::Zero(); // Ignored.
124 } else if (clock_->TimeInMilliseconds() >= latest_return_time_ms_) {
125 // We have timed out, signal this and stop repeating.
126 frame_handler_(nullptr, kTimeout);
127 frame_handler_ = {};
128 callback_task_.Stop();
129 return TimeDelta::Zero(); // Ignored.
130 } else {
131 // If there's no frames to decode and there is still time left, it
132 // means that the frame buffer was cleared between creation and
133 // execution of this task. Continue waiting for the remaining time.
134 int64_t wait_ms = UpdateFramesToDecode(clock_->TimeInMilliseconds());
135 return TimeDelta::ms(wait_ms);
136 }
137 });
138}
139
philipel75562822016-09-05 10:57:41 +0200140FrameBuffer::ReturnReason FrameBuffer::NextFrame(
141 int64_t max_wait_time_ms,
philipele7c891f2018-02-22 14:35:06 +0100142 std::unique_ptr<EncodedFrame>* frame_out,
philipel3042c2d2017-08-18 04:55:02 -0700143 bool keyframe_required) {
Sebastian Jansson13943b72019-04-02 15:08:14 +0200144 RTC_DCHECK(!use_task_queue_);
tommidb23ea62017-03-03 07:21:18 -0800145 TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
philipel1c056252017-01-31 09:53:12 -0800146 int64_t latest_return_time_ms =
147 clock_->TimeInMilliseconds() + max_wait_time_ms;
philipel504c47d2016-06-30 17:33:02 +0200148 int64_t wait_ms = max_wait_time_ms;
philipel29f730e2017-03-15 08:10:08 -0700149 int64_t now_ms = 0;
philipele0b2f152016-09-28 10:23:49 +0200150
151 do {
philipel29f730e2017-03-15 08:10:08 -0700152 now_ms = clock_->TimeInMilliseconds();
philipel504c47d2016-06-30 17:33:02 +0200153 {
154 rtc::CritScope lock(&crit_);
tommi0a735642017-03-14 06:23:57 -0700155 new_continuous_frame_event_.Reset();
philipel29f730e2017-03-15 08:10:08 -0700156 if (stopped_)
157 return kStopped;
158
Sebastian Jansson13943b72019-04-02 15:08:14 +0200159 // Need to hold |crit_| in order to access the members. therefore we
philipele0b2f152016-09-28 10:23:49 +0200160 // set it here in the loop instead of outside the loop in order to not
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100161 // acquire the lock unnecessarily.
Sebastian Jansson13943b72019-04-02 15:08:14 +0200162 keyframe_required_ = keyframe_required;
163 latest_return_time_ms_ = latest_return_time_ms;
164 wait_ms = UpdateFramesToDecode(now_ms);
165 }
tommi0a735642017-03-14 06:23:57 -0700166 } while (new_continuous_frame_event_.Wait(wait_ms));
philipele0b2f152016-09-28 10:23:49 +0200167
philipel29f730e2017-03-15 08:10:08 -0700168 {
169 rtc::CritScope lock(&crit_);
philipele0b2f152016-09-28 10:23:49 +0200170
Ilya Nikolaevskiyfdfe1c92019-01-18 13:33:38 +0100171 if (!frames_to_decode_.empty()) {
Sebastian Jansson13943b72019-04-02 15:08:14 +0200172 frame_out->reset(GetFrameToDecode());
philipel29f730e2017-03-15 08:10:08 -0700173 return kFrameFound;
philipelbe7a9e52016-05-19 12:19:35 +0200174 }
Sebastian Jansson13943b72019-04-02 15:08:14 +0200175 }
tommi0a735642017-03-14 06:23:57 -0700176
Sebastian Jansson13943b72019-04-02 15:08:14 +0200177 if (latest_return_time_ms - clock_->TimeInMilliseconds() > 0) {
philipel1c056252017-01-31 09:53:12 -0800178 // If |next_frame_it_ == frames_.end()| and there is still time left, it
179 // means that the frame buffer was cleared as the thread in this function
180 // was waiting to acquire |crit_| in order to return. Wait for the
181 // remaining time and then return.
182 return NextFrame(latest_return_time_ms - now_ms, frame_out);
philipelbe7a9e52016-05-19 12:19:35 +0200183 }
tommi0a735642017-03-14 06:23:57 -0700184 return kTimeout;
philipelbe7a9e52016-05-19 12:19:35 +0200185}
186
Sebastian Jansson13943b72019-04-02 15:08:14 +0200187int64_t FrameBuffer::UpdateFramesToDecode(int64_t now_ms) {
188 int64_t wait_ms = latest_return_time_ms_ - now_ms;
189 frames_to_decode_.clear();
190
191 // |last_continuous_frame_| may be empty below, but nullopt is smaller
192 // than everything else and loop will immediately terminate as expected.
193 for (auto frame_it = frames_.begin();
194 frame_it != frames_.end() && frame_it->first <= last_continuous_frame_;
195 ++frame_it) {
196 if (!frame_it->second.continuous ||
197 frame_it->second.num_missing_decodable > 0) {
198 continue;
199 }
200
201 EncodedFrame* frame = frame_it->second.frame.get();
202
203 if (keyframe_required_ && !frame->is_keyframe())
204 continue;
205
206 auto last_decoded_frame_timestamp =
207 decoded_frames_history_.GetLastDecodedFrameTimestamp();
208
209 // TODO(https://bugs.webrtc.org/9974): consider removing this check
210 // as it may make a stream undecodable after a very long delay between
211 // frames.
212 if (last_decoded_frame_timestamp &&
213 AheadOf(*last_decoded_frame_timestamp, frame->Timestamp())) {
214 continue;
215 }
216
217 // Only ever return all parts of a superframe. Therefore skip this
218 // frame if it's not a beginning of a superframe.
219 if (frame->inter_layer_predicted) {
220 continue;
221 }
222
223 // Gather all remaining frames for the same superframe.
224 std::vector<FrameMap::iterator> current_superframe;
225 current_superframe.push_back(frame_it);
226 bool last_layer_completed = frame_it->second.frame->is_last_spatial_layer;
227 FrameMap::iterator next_frame_it = frame_it;
228 while (true) {
229 ++next_frame_it;
230 if (next_frame_it == frames_.end() ||
231 next_frame_it->first.picture_id != frame->id.picture_id ||
232 !next_frame_it->second.continuous) {
233 break;
234 }
235 // Check if the next frame has some undecoded references other than
236 // the previous frame in the same superframe.
237 size_t num_allowed_undecoded_refs =
238 (next_frame_it->second.frame->inter_layer_predicted) ? 1 : 0;
239 if (next_frame_it->second.num_missing_decodable >
240 num_allowed_undecoded_refs) {
241 break;
242 }
243 // All frames in the superframe should have the same timestamp.
244 if (frame->Timestamp() != next_frame_it->second.frame->Timestamp()) {
245 RTC_LOG(LS_WARNING) << "Frames in a single superframe have different"
246 " timestamps. Skipping undecodable superframe.";
247 break;
248 }
249 current_superframe.push_back(next_frame_it);
250 last_layer_completed = next_frame_it->second.frame->is_last_spatial_layer;
251 }
252 // Check if the current superframe is complete.
253 // TODO(bugs.webrtc.org/10064): consider returning all available to
254 // decode frames even if the superframe is not complete yet.
255 if (!last_layer_completed) {
256 continue;
257 }
258
259 frames_to_decode_ = std::move(current_superframe);
260
261 if (frame->RenderTime() == -1) {
262 frame->SetRenderTime(timing_->RenderTimeMs(frame->Timestamp(), now_ms));
263 }
264 wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms);
265
266 // This will cause the frame buffer to prefer high framerate rather
267 // than high resolution in the case of the decoder not decoding fast
268 // enough and the stream has multiple spatial and temporal layers.
269 // For multiple temporal layers it may cause non-base layer frames to be
270 // skipped if they are late.
271 if (wait_ms < -kMaxAllowedFrameDelayMs)
272 continue;
273
274 break;
275 }
276 wait_ms = std::min<int64_t>(wait_ms, latest_return_time_ms_ - now_ms);
277 wait_ms = std::max<int64_t>(wait_ms, 0);
278 return wait_ms;
279}
280
281EncodedFrame* FrameBuffer::GetFrameToDecode() {
282 int64_t now_ms = clock_->TimeInMilliseconds();
283 // TODO(ilnik): remove |frames_out| use frames_to_decode_ directly.
284 std::vector<EncodedFrame*> frames_out;
285
286 RTC_DCHECK(!frames_to_decode_.empty());
287 bool superframe_delayed_by_retransmission = false;
288 size_t superframe_size = 0;
289 EncodedFrame* first_frame = frames_to_decode_[0]->second.frame.get();
290 int64_t render_time_ms = first_frame->RenderTime();
291 int64_t receive_time_ms = first_frame->ReceivedTime();
292 // Gracefully handle bad RTP timestamps and render time issues.
293 if (HasBadRenderTiming(*first_frame, now_ms)) {
294 jitter_estimator_->Reset();
295 timing_->Reset();
296 render_time_ms = timing_->RenderTimeMs(first_frame->Timestamp(), now_ms);
297 }
298
299 for (FrameMap::iterator& frame_it : frames_to_decode_) {
300 RTC_DCHECK(frame_it != frames_.end());
301 EncodedFrame* frame = frame_it->second.frame.release();
302
303 frame->SetRenderTime(render_time_ms);
304
305 superframe_delayed_by_retransmission |= frame->delayed_by_retransmission();
306 receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime());
307 superframe_size += frame->size();
308
309 PropagateDecodability(frame_it->second);
310 decoded_frames_history_.InsertDecoded(frame_it->first, frame->Timestamp());
311
312 // Remove decoded frame and all undecoded frames before it.
313 frames_.erase(frames_.begin(), ++frame_it);
314
315 frames_out.push_back(frame);
316 }
317
318 if (!superframe_delayed_by_retransmission) {
319 int64_t frame_delay;
320
321 if (inter_frame_delay_.CalculateDelay(first_frame->Timestamp(),
322 &frame_delay, receive_time_ms)) {
323 jitter_estimator_->UpdateEstimate(frame_delay, superframe_size);
324 }
325
326 float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0;
327 if (RttMultExperiment::RttMultEnabled()) {
328 rtt_mult = RttMultExperiment::GetRttMultValue();
329 }
330 timing_->SetJitterDelay(jitter_estimator_->GetJitterEstimate(rtt_mult));
331 timing_->UpdateCurrentDelay(render_time_ms, now_ms);
332 } else {
333 if (RttMultExperiment::RttMultEnabled() || add_rtt_to_playout_delay_)
334 jitter_estimator_->FrameNacked();
335 }
336
337 UpdateJitterDelay();
338 UpdateTimingFrameInfo();
339
340 if (frames_out.size() == 1) {
341 return frames_out[0];
342 } else {
343 return CombineAndDeleteFrames(frames_out);
344 }
345}
346
philipele7c891f2018-02-22 14:35:06 +0100347bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
348 int64_t now_ms) {
stefan95e97542017-05-23 09:52:18 -0700349 // Assume that render timing errors are due to changes in the video stream.
350 int64_t render_time_ms = frame.RenderTimeMs();
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200351 // Zero render time means render immediately.
352 if (render_time_ms == 0) {
353 return false;
354 }
stefan95e97542017-05-23 09:52:18 -0700355 if (render_time_ms < 0) {
356 return true;
357 }
Stefan Holmer812ceaf2018-05-15 13:00:10 +0200358 const int64_t kMaxVideoDelayMs = 10000;
stefan95e97542017-05-23 09:52:18 -0700359 if (std::abs(render_time_ms - now_ms) > kMaxVideoDelayMs) {
360 int frame_delay = static_cast<int>(std::abs(render_time_ms - now_ms));
Mirko Bonadei675513b2017-11-09 11:09:25 +0100361 RTC_LOG(LS_WARNING)
362 << "A frame about to be decoded is out of the configured "
363 << "delay bounds (" << frame_delay << " > " << kMaxVideoDelayMs
364 << "). Resetting the video jitter buffer.";
stefan95e97542017-05-23 09:52:18 -0700365 return true;
366 }
367 if (static_cast<int>(timing_->TargetVideoDelay()) > kMaxVideoDelayMs) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100368 RTC_LOG(LS_WARNING) << "The video target delay has grown larger than "
369 << kMaxVideoDelayMs << " ms.";
stefan95e97542017-05-23 09:52:18 -0700370 return true;
371 }
372 return false;
373}
374
Sebastian Jansson13943b72019-04-02 15:08:14 +0200375void FrameBuffer::SafePost(std::function<void()> func) {
376 if (!use_task_queue_) {
377 func();
378 } else {
379 task_queue_.PostTask(func);
380 }
381}
philipel4f6cd6a2016-08-03 10:59:32 +0200382void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
tommidb23ea62017-03-03 07:21:18 -0800383 TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode");
Sebastian Jansson13943b72019-04-02 15:08:14 +0200384 SafePost([this, mode] {
385 rtc::CritScope lock(&crit_);
386 protection_mode_ = mode;
387 });
philipel4f6cd6a2016-08-03 10:59:32 +0200388}
389
philipel504c47d2016-06-30 17:33:02 +0200390void FrameBuffer::Start() {
tommidb23ea62017-03-03 07:21:18 -0800391 TRACE_EVENT0("webrtc", "FrameBuffer::Start");
Sebastian Jansson13943b72019-04-02 15:08:14 +0200392 SafePost([this] {
393 rtc::CritScope lock(&crit_);
394 stopped_ = false;
395 });
philipel504c47d2016-06-30 17:33:02 +0200396}
397
398void FrameBuffer::Stop() {
tommidb23ea62017-03-03 07:21:18 -0800399 TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
Sebastian Jansson13943b72019-04-02 15:08:14 +0200400 if (!use_task_queue_) {
401 rtc::CritScope lock(&crit_);
402 stopped_ = true;
403 new_continuous_frame_event_.Set();
404 } else {
405 rtc::Event done;
406 task_queue_.PostTask([this, &done] {
407 rtc::CritScope lock(&crit_);
408 stopped_ = true;
409 if (frame_handler_) {
410 RTC_DCHECK(callback_task_.Running());
411 callback_task_.Stop();
412 frame_handler_ = {};
413 }
414 done.Set();
415 });
416 done.Wait(rtc::Event::kForever);
417 }
philipel504c47d2016-06-30 17:33:02 +0200418}
419
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100420void FrameBuffer::Clear() {
Sebastian Jansson13943b72019-04-02 15:08:14 +0200421 SafePost([this] {
422 rtc::CritScope lock(&crit_);
423 ClearFramesAndHistory();
424 });
Ilya Nikolaevskiye6a2d942018-11-07 14:32:28 +0100425}
426
philipele21be1d2017-09-25 06:37:12 -0700427void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
Sebastian Jansson13943b72019-04-02 15:08:14 +0200428 SafePost([this, rtt_ms] {
429 rtc::CritScope lock(&crit_);
430 jitter_estimator_->UpdateRtt(rtt_ms);
431 });
philipele21be1d2017-09-25 06:37:12 -0700432}
433
philipele7c891f2018-02-22 14:35:06 +0100434bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
philipel112adf92017-06-15 09:06:21 -0700435 for (size_t i = 0; i < frame.num_references; ++i) {
Philip Eliasson1f850a62019-03-19 12:15:00 +0000436 if (frame.references[i] >= frame.id.picture_id)
philipel112adf92017-06-15 09:06:21 -0700437 return false;
philipel3b3c9c42017-09-11 09:38:36 -0700438
philipel112adf92017-06-15 09:06:21 -0700439 for (size_t j = i + 1; j < frame.num_references; ++j) {
440 if (frame.references[i] == frame.references[j])
441 return false;
442 }
443 }
444
philipel0fa82a62018-03-19 15:34:53 +0100445 if (frame.inter_layer_predicted && frame.id.spatial_layer == 0)
philipel112adf92017-06-15 09:06:21 -0700446 return false;
447
448 return true;
449}
450
Ilya Nikolaevskiy48193b02019-03-25 11:40:34 +0100451bool FrameBuffer::IsCompleteSuperFrame(const EncodedFrame& frame) {
452 if (frame.inter_layer_predicted) {
453 // Check that all previous spatial layers are already inserted.
454 VideoLayerFrameId id = frame.id;
455 RTC_DCHECK_GT(id.spatial_layer, 0);
456 --id.spatial_layer;
457 FrameMap::iterator prev_frame = frames_.find(id);
458 if (prev_frame == frames_.end())
459 return false;
460 while (prev_frame->second.frame->inter_layer_predicted) {
461 --prev_frame;
462 --id.spatial_layer;
463 if (prev_frame == frames_.end() ||
464 prev_frame->first.picture_id != id.picture_id ||
465 prev_frame->first.spatial_layer != id.spatial_layer) {
466 return false;
467 }
468 }
469 }
470
471 if (!frame.is_last_spatial_layer) {
472 // Check that all following spatial layers are already inserted.
473 VideoLayerFrameId id = frame.id;
474 ++id.spatial_layer;
475 FrameMap::iterator next_frame = frames_.find(id);
476 if (next_frame == frames_.end())
477 return false;
478 while (!next_frame->second.frame->is_last_spatial_layer) {
479 ++next_frame;
480 ++id.spatial_layer;
481 if (next_frame == frames_.end() ||
482 next_frame->first.picture_id != id.picture_id ||
483 next_frame->first.spatial_layer != id.spatial_layer) {
484 return false;
485 }
486 }
487 }
488
489 return true;
490}
491
Sebastian Jansson13943b72019-04-02 15:08:14 +0200492void FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame,
493 std::function<void(int64_t)> picture_id_handler) {
494 struct InsertFrameTask {
495 void operator()() {
496 RTC_DCHECK_RUN_ON(&frame_buffer->task_queue_);
497 int64_t last_continuous_pid = frame_buffer->InsertFrame(std::move(frame));
498 picture_id_handler(last_continuous_pid);
499 }
500 FrameBuffer* frame_buffer;
501 std::unique_ptr<EncodedFrame> frame;
502 std::function<void(int64_t)> picture_id_handler;
503 };
504 task_queue_.PostTask(
505 InsertFrameTask{this, std::move(frame), std::move(picture_id_handler)});
506}
507
philipele7c891f2018-02-22 14:35:06 +0100508int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
tommidb23ea62017-03-03 07:21:18 -0800509 TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
philipel93e451b2016-10-06 12:25:13 +0200510 RTC_DCHECK(frame);
tommi0a735642017-03-14 06:23:57 -0700511
512 rtc::CritScope lock(&crit_);
philipel29f730e2017-03-15 08:10:08 -0700513
Ilya Nikolaevskiy48193b02019-03-25 11:40:34 +0100514 if (stats_callback_ && IsCompleteSuperFrame(*frame)) {
515 stats_callback_->OnCompleteFrame(frame->is_keyframe(), frame->size(),
516 frame->contentType());
517 }
518 const VideoLayerFrameId& id = frame->id;
519
philipel1610f942017-12-12 13:58:31 +0100520 int64_t last_continuous_picture_id =
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100521 !last_continuous_frame_ ? -1 : last_continuous_frame_->picture_id;
philipele0b2f152016-09-28 10:23:49 +0200522
philipel112adf92017-06-15 09:06:21 -0700523 if (!ValidReferences(*frame)) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100524 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100525 << id.picture_id << ":"
526 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100527 << ") has invalid frame references, dropping frame.";
philipel112adf92017-06-15 09:06:21 -0700528 return last_continuous_picture_id;
529 }
530
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100531 if (frames_.size() >= kMaxFramesBuffered) {
philipel9771c502018-03-02 11:06:27 +0100532 if (frame->is_keyframe()) {
533 RTC_LOG(LS_WARNING) << "Inserting keyframe (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100534 << id.picture_id << ":"
535 << static_cast<int>(id.spatial_layer)
philipel9771c502018-03-02 11:06:27 +0100536 << ") but buffer is full, clearing"
537 << " buffer and inserting the frame.";
538 ClearFramesAndHistory();
539 } else {
540 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100541 << id.picture_id << ":"
542 << static_cast<int>(id.spatial_layer)
philipel9771c502018-03-02 11:06:27 +0100543 << ") could not be inserted due to the frame "
544 << "buffer being full, dropping frame.";
545 return last_continuous_picture_id;
546 }
philipele0b2f152016-09-28 10:23:49 +0200547 }
548
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100549 auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId();
550 auto last_decoded_frame_timestamp =
551 decoded_frames_history_.GetLastDecodedFrameTimestamp();
552 if (last_decoded_frame && id <= *last_decoded_frame) {
553 if (AheadOf(frame->Timestamp(), *last_decoded_frame_timestamp) &&
philipel3042c2d2017-08-18 04:55:02 -0700554 frame->is_keyframe()) {
philipelfcc60062017-01-18 05:35:20 -0800555 // If this frame has a newer timestamp but an earlier picture id then we
556 // assume there has been a jump in the picture id due to some encoder
557 // reconfiguration or some other reason. Even though this is not according
558 // to spec we can still continue to decode from this frame if it is a
559 // keyframe.
Mirko Bonadei675513b2017-11-09 11:09:25 +0100560 RTC_LOG(LS_WARNING)
561 << "A jump in picture id was detected, clearing buffer.";
philipelfcc60062017-01-18 05:35:20 -0800562 ClearFramesAndHistory();
563 last_continuous_picture_id = -1;
564 } else {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100565 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100566 << id.picture_id << ":"
567 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100568 << ") inserted after frame ("
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100569 << last_decoded_frame->picture_id << ":"
570 << static_cast<int>(last_decoded_frame->spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100571 << ") was handed off for decoding, dropping frame.";
philipelfcc60062017-01-18 05:35:20 -0800572 return last_continuous_picture_id;
573 }
philipele0b2f152016-09-28 10:23:49 +0200574 }
575
philipel146a48b2017-04-20 04:04:38 -0700576 // Test if inserting this frame would cause the order of the frames to become
577 // ambiguous (covering more than half the interval of 2^16). This can happen
578 // when the picture id make large jumps mid stream.
philipel0fa82a62018-03-19 15:34:53 +0100579 if (!frames_.empty() && id < frames_.begin()->first &&
580 frames_.rbegin()->first < id) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100581 RTC_LOG(LS_WARNING)
582 << "A jump in picture id was detected, clearing buffer.";
philipel146a48b2017-04-20 04:04:38 -0700583 ClearFramesAndHistory();
584 last_continuous_picture_id = -1;
585 }
586
philipel0fa82a62018-03-19 15:34:53 +0100587 auto info = frames_.emplace(id, FrameInfo()).first;
philipele0b2f152016-09-28 10:23:49 +0200588
philipel93e451b2016-10-06 12:25:13 +0200589 if (info->second.frame) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100590 RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
philipel0fa82a62018-03-19 15:34:53 +0100591 << id.picture_id << ":"
592 << static_cast<int>(id.spatial_layer)
Mirko Bonadei675513b2017-11-09 11:09:25 +0100593 << ") already inserted, dropping frame.";
philipele0b2f152016-09-28 10:23:49 +0200594 return last_continuous_picture_id;
595 }
596
philipel93e451b2016-10-06 12:25:13 +0200597 if (!UpdateFrameInfoWithIncomingFrame(*frame, info))
598 return last_continuous_picture_id;
Ruslan Burakov493a6502019-02-27 15:32:48 +0100599
600 if (!frame->delayed_by_retransmission())
601 timing_->IncomingTimestamp(frame->Timestamp(), frame->ReceivedTime());
philipel0a9f6de2018-02-28 11:29:47 +0100602
philipele0b2f152016-09-28 10:23:49 +0200603 info->second.frame = std::move(frame);
philipele0b2f152016-09-28 10:23:49 +0200604
605 if (info->second.num_missing_continuous == 0) {
606 info->second.continuous = true;
607 PropagateContinuity(info);
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100608 last_continuous_picture_id = last_continuous_frame_->picture_id;
philipele0b2f152016-09-28 10:23:49 +0200609
610 // Since we now have new continuous frames there might be a better frame
Sebastian Jansson13943b72019-04-02 15:08:14 +0200611 // to return from NextFrame.
612 if (!use_task_queue_) {
613 new_continuous_frame_event_.Set();
614 } else if (callback_task_.Running()) {
615 RTC_CHECK(frame_handler_);
616 callback_task_.Stop();
617 NextFrameOnQueue();
618 }
philipele0b2f152016-09-28 10:23:49 +0200619 }
620
621 return last_continuous_picture_id;
philipelbe7a9e52016-05-19 12:19:35 +0200622}
623
philipele0b2f152016-09-28 10:23:49 +0200624void FrameBuffer::PropagateContinuity(FrameMap::iterator start) {
tommidb23ea62017-03-03 07:21:18 -0800625 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateContinuity");
philipele0b2f152016-09-28 10:23:49 +0200626 RTC_DCHECK(start->second.continuous);
philipele0b2f152016-09-28 10:23:49 +0200627
628 std::queue<FrameMap::iterator> continuous_frames;
629 continuous_frames.push(start);
630
631 // A simple BFS to traverse continuous frames.
632 while (!continuous_frames.empty()) {
633 auto frame = continuous_frames.front();
634 continuous_frames.pop();
635
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100636 if (!last_continuous_frame_ || *last_continuous_frame_ < frame->first) {
637 last_continuous_frame_ = frame->first;
638 }
philipele0b2f152016-09-28 10:23:49 +0200639
640 // Loop through all dependent frames, and if that frame no longer has
641 // any unfulfilled dependencies then that frame is continuous as well.
Elad Alon69321dd2019-01-10 15:02:54 +0100642 for (size_t d = 0; d < frame->second.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200643 auto frame_ref = frames_.find(frame->second.dependent_frames[d]);
philipel112adf92017-06-15 09:06:21 -0700644 RTC_DCHECK(frame_ref != frames_.end());
philipele0b2f152016-09-28 10:23:49 +0200645
philipel112adf92017-06-15 09:06:21 -0700646 // TODO(philipel): Look into why we've seen this happen.
647 if (frame_ref != frames_.end()) {
648 --frame_ref->second.num_missing_continuous;
649 if (frame_ref->second.num_missing_continuous == 0) {
650 frame_ref->second.continuous = true;
651 continuous_frames.push(frame_ref);
652 }
philipele0b2f152016-09-28 10:23:49 +0200653 }
654 }
655 }
656}
657
658void FrameBuffer::PropagateDecodability(const FrameInfo& info) {
tommidb23ea62017-03-03 07:21:18 -0800659 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateDecodability");
Elad Alon69321dd2019-01-10 15:02:54 +0100660 for (size_t d = 0; d < info.dependent_frames.size(); ++d) {
philipele0b2f152016-09-28 10:23:49 +0200661 auto ref_info = frames_.find(info.dependent_frames[d]);
philipel93e451b2016-10-06 12:25:13 +0200662 RTC_DCHECK(ref_info != frames_.end());
tommie95b78b2017-05-14 07:23:11 -0700663 // TODO(philipel): Look into why we've seen this happen.
664 if (ref_info != frames_.end()) {
665 RTC_DCHECK_GT(ref_info->second.num_missing_decodable, 0U);
666 --ref_info->second.num_missing_decodable;
667 }
philipele0b2f152016-09-28 10:23:49 +0200668 }
669}
670
philipele7c891f2018-02-22 14:35:06 +0100671bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
philipele0b2f152016-09-28 10:23:49 +0200672 FrameMap::iterator info) {
tommidb23ea62017-03-03 07:21:18 -0800673 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame");
philipel0fa82a62018-03-19 15:34:53 +0100674 const VideoLayerFrameId& id = frame.id;
philipele0b2f152016-09-28 10:23:49 +0200675
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100676 auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId();
677 RTC_DCHECK(!last_decoded_frame || *last_decoded_frame < info->first);
philipele0b2f152016-09-28 10:23:49 +0200678
philipel798b2822018-06-11 13:10:14 +0200679 // In this function we determine how many missing dependencies this |frame|
680 // has to become continuous/decodable. If a frame that this |frame| depend
681 // on has already been decoded then we can ignore that dependency since it has
682 // already been fulfilled.
683 //
684 // For all other frames we will register a backwards reference to this |frame|
685 // so that |num_missing_continuous| and |num_missing_decodable| can be
686 // decremented as frames become continuous/are decoded.
687 struct Dependency {
688 VideoLayerFrameId id;
689 bool continuous;
690 };
691 std::vector<Dependency> not_yet_fulfilled_dependencies;
692
693 // Find all dependencies that have not yet been fulfilled.
philipele0b2f152016-09-28 10:23:49 +0200694 for (size_t i = 0; i < frame.num_references; ++i) {
philipel0fa82a62018-03-19 15:34:53 +0100695 VideoLayerFrameId ref_key(frame.references[i], frame.id.spatial_layer);
philipel798b2822018-06-11 13:10:14 +0200696 // Does |frame| depend on a frame earlier than the last decoded one?
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100697 if (last_decoded_frame && ref_key <= *last_decoded_frame) {
philipel798b2822018-06-11 13:10:14 +0200698 // Was that frame decoded? If not, this |frame| will never become
699 // decodable.
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100700 if (!decoded_frames_history_.WasDecoded(ref_key)) {
philipel65e1f942017-07-24 08:26:53 -0700701 int64_t now_ms = clock_->TimeInMilliseconds();
702 if (last_log_non_decoded_ms_ + kLogNonDecodedIntervalMs < now_ms) {
Mirko Bonadei675513b2017-11-09 11:09:25 +0100703 RTC_LOG(LS_WARNING)
philipel0fa82a62018-03-19 15:34:53 +0100704 << "Frame with (picture_id:spatial_id) (" << id.picture_id << ":"
705 << static_cast<int>(id.spatial_layer)
philipel65e1f942017-07-24 08:26:53 -0700706 << ") depends on a non-decoded frame more previous than"
707 << " the last decoded frame, dropping frame.";
708 last_log_non_decoded_ms_ = now_ms;
709 }
philipele0b2f152016-09-28 10:23:49 +0200710 return false;
711 }
philipele0b2f152016-09-28 10:23:49 +0200712 } else {
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100713 auto ref_info = frames_.find(ref_key);
philipel798b2822018-06-11 13:10:14 +0200714 bool ref_continuous =
715 ref_info != frames_.end() && ref_info->second.continuous;
716 not_yet_fulfilled_dependencies.push_back({ref_key, ref_continuous});
philipele0b2f152016-09-28 10:23:49 +0200717 }
philipelbe7a9e52016-05-19 12:19:35 +0200718 }
719
philipel798b2822018-06-11 13:10:14 +0200720 // Does |frame| depend on the lower spatial layer?
philipelbe7a9e52016-05-19 12:19:35 +0200721 if (frame.inter_layer_predicted) {
philipel0fa82a62018-03-19 15:34:53 +0100722 VideoLayerFrameId ref_key(frame.id.picture_id, frame.id.spatial_layer - 1);
philipel798b2822018-06-11 13:10:14 +0200723 auto ref_info = frames_.find(ref_key);
philipele0b2f152016-09-28 10:23:49 +0200724
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100725 bool lower_layer_decoded =
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100726 last_decoded_frame && *last_decoded_frame == ref_key;
philipel798b2822018-06-11 13:10:14 +0200727 bool lower_layer_continuous =
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100728 lower_layer_decoded ||
729 (ref_info != frames_.end() && ref_info->second.continuous);
philipel798b2822018-06-11 13:10:14 +0200730
731 if (!lower_layer_continuous || !lower_layer_decoded) {
732 not_yet_fulfilled_dependencies.push_back(
733 {ref_key, lower_layer_continuous});
philipele0b2f152016-09-28 10:23:49 +0200734 }
philipelbe7a9e52016-05-19 12:19:35 +0200735 }
736
philipel798b2822018-06-11 13:10:14 +0200737 info->second.num_missing_continuous = not_yet_fulfilled_dependencies.size();
738 info->second.num_missing_decodable = not_yet_fulfilled_dependencies.size();
739
740 for (const Dependency& dep : not_yet_fulfilled_dependencies) {
741 if (dep.continuous)
742 --info->second.num_missing_continuous;
743
Elad Alon69321dd2019-01-10 15:02:54 +0100744 frames_[dep.id].dependent_frames.push_back(id);
philipel798b2822018-06-11 13:10:14 +0200745 }
philipel93e451b2016-10-06 12:25:13 +0200746
philipelbe7a9e52016-05-19 12:19:35 +0200747 return true;
748}
749
philipelbe742702016-11-30 01:31:40 -0800750void FrameBuffer::UpdateJitterDelay() {
tommidb23ea62017-03-03 07:21:18 -0800751 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
philipela45102f2017-02-22 05:30:39 -0800752 if (!stats_callback_)
753 return;
philipelbe742702016-11-30 01:31:40 -0800754
philipela45102f2017-02-22 05:30:39 -0800755 int decode_ms;
756 int max_decode_ms;
757 int current_delay_ms;
758 int target_delay_ms;
759 int jitter_buffer_ms;
760 int min_playout_delay_ms;
761 int render_delay_ms;
762 if (timing_->GetTimings(&decode_ms, &max_decode_ms, &current_delay_ms,
763 &target_delay_ms, &jitter_buffer_ms,
764 &min_playout_delay_ms, &render_delay_ms)) {
765 stats_callback_->OnFrameBufferTimingsUpdated(
766 decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
767 jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
philipelbe742702016-11-30 01:31:40 -0800768 }
philipel266f0a42016-11-28 08:49:07 -0800769}
770
ilnik2edc6842017-07-06 03:06:50 -0700771void FrameBuffer::UpdateTimingFrameInfo() {
772 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo");
Danil Chapovalov0040b662018-06-18 10:48:16 +0200773 absl::optional<TimingFrameInfo> info = timing_->GetTimingFrameInfo();
philipel97187112018-03-23 10:43:21 +0100774 if (info && stats_callback_)
ilnik2edc6842017-07-06 03:06:50 -0700775 stats_callback_->OnTimingFrameInfoUpdated(*info);
776}
777
philipelfcc60062017-01-18 05:35:20 -0800778void FrameBuffer::ClearFramesAndHistory() {
ilnik2edc6842017-07-06 03:06:50 -0700779 TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory");
philipelfcc60062017-01-18 05:35:20 -0800780 frames_.clear();
Ilya Nikolaevskiy6551faf2019-01-10 15:16:47 +0100781 last_continuous_frame_.reset();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100782 frames_to_decode_.clear();
Ilya Nikolaevskiy13717842019-01-14 13:24:22 +0100783 decoded_frames_history_.Clear();
philipelfcc60062017-01-18 05:35:20 -0800784}
785
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100786EncodedFrame* FrameBuffer::CombineAndDeleteFrames(
787 const std::vector<EncodedFrame*>& frames) const {
788 RTC_DCHECK(!frames.empty());
Sergey Silkin61832dd2018-12-20 14:32:14 +0100789 EncodedFrame* first_frame = frames[0];
790 EncodedFrame* last_frame = frames.back();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100791 size_t total_length = 0;
792 for (size_t i = 0; i < frames.size(); ++i) {
793 total_length += frames[i]->size();
794 }
Sergey Silkin61832dd2018-12-20 14:32:14 +0100795 first_frame->VerifyAndAllocate(total_length);
796
797 // Spatial index of combined frame is set equal to spatial index of its top
798 // spatial layer.
799 first_frame->SetSpatialIndex(last_frame->id.spatial_layer);
800 first_frame->id.spatial_layer = last_frame->id.spatial_layer;
801
802 first_frame->video_timing_mutable()->network2_timestamp_ms =
803 last_frame->video_timing().network2_timestamp_ms;
804 first_frame->video_timing_mutable()->receive_finish_ms =
805 last_frame->video_timing().receive_finish_ms;
806
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100807 // Append all remaining frames to the first one.
Niels Möller9c843902019-01-11 10:21:35 +0100808 uint8_t* buffer = first_frame->data() + first_frame->size();
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100809 for (size_t i = 1; i < frames.size(); ++i) {
Sergey Silkin61832dd2018-12-20 14:32:14 +0100810 EncodedFrame* next_frame = frames[i];
Niels Möller9c843902019-01-11 10:21:35 +0100811 memcpy(buffer, next_frame->data(), next_frame->size());
Sergey Silkin61832dd2018-12-20 14:32:14 +0100812 buffer += next_frame->size();
813 delete next_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100814 }
Sergey Silkin61832dd2018-12-20 14:32:14 +0100815 first_frame->set_size(total_length);
816 return first_frame;
Ilya Nikolaevskiy5546aef2018-12-04 15:54:52 +0100817}
818
Niels Möllerbe682d42018-03-27 08:31:45 +0200819FrameBuffer::FrameInfo::FrameInfo() = default;
820FrameBuffer::FrameInfo::FrameInfo(FrameInfo&&) = default;
821FrameBuffer::FrameInfo::~FrameInfo() = default;
822
philipelbe7a9e52016-05-19 12:19:35 +0200823} // namespace video_coding
824} // namespace webrtc