blob: acc1bb83c5a0e448e030d42b98eab5d25315b0a1 [file] [log] [blame]
philipelbe7a9e52016-05-19 12:19:35 +02001/*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/modules/video_coding/frame_buffer2.h"
12
13#include <algorithm>
philipele0b2f152016-09-28 10:23:49 +020014#include <cstring>
15#include <queue>
philipelbe7a9e52016-05-19 12:19:35 +020016
17#include "webrtc/base/checks.h"
philipele0b2f152016-09-28 10:23:49 +020018#include "webrtc/base/logging.h"
tommidb23ea62017-03-03 07:21:18 -080019#include "webrtc/base/trace_event.h"
philipela45102f2017-02-22 05:30:39 -080020#include "webrtc/modules/video_coding/include/video_coding_defines.h"
philipelbe7a9e52016-05-19 12:19:35 +020021#include "webrtc/modules/video_coding/jitter_estimator.h"
philipelbe7a9e52016-05-19 12:19:35 +020022#include "webrtc/modules/video_coding/timing.h"
23#include "webrtc/system_wrappers/include/clock.h"
philipel266f0a42016-11-28 08:49:07 -080024#include "webrtc/system_wrappers/include/metrics.h"
philipelbe7a9e52016-05-19 12:19:35 +020025
26namespace webrtc {
27namespace video_coding {
28
29namespace {
philipele0b2f152016-09-28 10:23:49 +020030// Max number of frames the buffer will hold.
31constexpr int kMaxFramesBuffered = 600;
philipelbe7a9e52016-05-19 12:19:35 +020032
philipele0b2f152016-09-28 10:23:49 +020033// Max number of decoded frame info that will be saved.
philipelfd5a20f2016-11-15 00:57:57 -080034constexpr int kMaxFramesHistory = 50;
philipelbe7a9e52016-05-19 12:19:35 +020035} // namespace
36
philipelbe7a9e52016-05-19 12:19:35 +020037FrameBuffer::FrameBuffer(Clock* clock,
38 VCMJitterEstimator* jitter_estimator,
philipela45102f2017-02-22 05:30:39 -080039 VCMTiming* timing,
40 VCMReceiveStatisticsCallback* stats_callback)
philipelbe7a9e52016-05-19 12:19:35 +020041 : clock_(clock),
tommi0a735642017-03-14 06:23:57 -070042 new_continuous_frame_event_(false, false),
philipelbe7a9e52016-05-19 12:19:35 +020043 jitter_estimator_(jitter_estimator),
44 timing_(timing),
philipel4f6cd6a2016-08-03 10:59:32 +020045 inter_frame_delay_(clock_->TimeInMilliseconds()),
philipele0b2f152016-09-28 10:23:49 +020046 last_decoded_frame_it_(frames_.end()),
47 last_continuous_frame_it_(frames_.end()),
48 num_frames_history_(0),
49 num_frames_buffered_(0),
philipel29f730e2017-03-15 08:10:08 -070050 stopped_(false),
philipela45102f2017-02-22 05:30:39 -080051 protection_mode_(kProtectionNack),
52 stats_callback_(stats_callback) {}
philipel266f0a42016-11-28 08:49:07 -080053
philipela45102f2017-02-22 05:30:39 -080054FrameBuffer::~FrameBuffer() {}
philipelbe7a9e52016-05-19 12:19:35 +020055
philipel75562822016-09-05 10:57:41 +020056FrameBuffer::ReturnReason FrameBuffer::NextFrame(
57 int64_t max_wait_time_ms,
58 std::unique_ptr<FrameObject>* frame_out) {
tommidb23ea62017-03-03 07:21:18 -080059 TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
philipel1c056252017-01-31 09:53:12 -080060 int64_t latest_return_time_ms =
61 clock_->TimeInMilliseconds() + max_wait_time_ms;
philipel504c47d2016-06-30 17:33:02 +020062 int64_t wait_ms = max_wait_time_ms;
philipel29f730e2017-03-15 08:10:08 -070063 int64_t now_ms = 0;
philipele0b2f152016-09-28 10:23:49 +020064
65 do {
philipel29f730e2017-03-15 08:10:08 -070066 now_ms = clock_->TimeInMilliseconds();
philipel504c47d2016-06-30 17:33:02 +020067 {
68 rtc::CritScope lock(&crit_);
tommi0a735642017-03-14 06:23:57 -070069 new_continuous_frame_event_.Reset();
philipel29f730e2017-03-15 08:10:08 -070070 if (stopped_)
71 return kStopped;
72
73 wait_ms = max_wait_time_ms;
74
philipele0b2f152016-09-28 10:23:49 +020075 // Need to hold |crit_| in order to use |frames_|, therefore we
76 // set it here in the loop instead of outside the loop in order to not
77 // acquire the lock unnecesserily.
philipel1c056252017-01-31 09:53:12 -080078 next_frame_it_ = frames_.end();
philipelbe7a9e52016-05-19 12:19:35 +020079
philipele0b2f152016-09-28 10:23:49 +020080 // |frame_it| points to the first frame after the
81 // |last_decoded_frame_it_|.
82 auto frame_it = frames_.end();
83 if (last_decoded_frame_it_ == frames_.end()) {
84 frame_it = frames_.begin();
philipelbe7a9e52016-05-19 12:19:35 +020085 } else {
philipele0b2f152016-09-28 10:23:49 +020086 frame_it = last_decoded_frame_it_;
87 ++frame_it;
philipelbe7a9e52016-05-19 12:19:35 +020088 }
philipele0b2f152016-09-28 10:23:49 +020089
90 // |continuous_end_it| points to the first frame after the
91 // |last_continuous_frame_it_|.
92 auto continuous_end_it = last_continuous_frame_it_;
93 if (continuous_end_it != frames_.end())
94 ++continuous_end_it;
95
philipel146a48b2017-04-20 04:04:38 -070096 for (; frame_it != continuous_end_it && frame_it != frames_.end();
97 ++frame_it) {
philipel93e451b2016-10-06 12:25:13 +020098 if (!frame_it->second.continuous ||
99 frame_it->second.num_missing_decodable > 0) {
philipele0b2f152016-09-28 10:23:49 +0200100 continue;
philipel93e451b2016-10-06 12:25:13 +0200101 }
philipele0b2f152016-09-28 10:23:49 +0200102
103 FrameObject* frame = frame_it->second.frame.get();
philipel1c056252017-01-31 09:53:12 -0800104 next_frame_it_ = frame_it;
philipele0b2f152016-09-28 10:23:49 +0200105 if (frame->RenderTime() == -1)
106 frame->SetRenderTime(timing_->RenderTimeMs(frame->timestamp, now_ms));
107 wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms);
108
109 // This will cause the frame buffer to prefer high framerate rather
110 // than high resolution in the case of the decoder not decoding fast
111 // enough and the stream has multiple spatial and temporal layers.
112 if (wait_ms == 0)
113 continue;
114
115 break;
116 }
117 } // rtc::Critscope lock(&crit_);
118
philipel1c056252017-01-31 09:53:12 -0800119 wait_ms = std::min<int64_t>(wait_ms, latest_return_time_ms - now_ms);
philipele0b2f152016-09-28 10:23:49 +0200120 wait_ms = std::max<int64_t>(wait_ms, 0);
tommi0a735642017-03-14 06:23:57 -0700121 } while (new_continuous_frame_event_.Wait(wait_ms));
philipele0b2f152016-09-28 10:23:49 +0200122
philipel29f730e2017-03-15 08:10:08 -0700123 {
124 rtc::CritScope lock(&crit_);
125 now_ms = clock_->TimeInMilliseconds();
126 if (next_frame_it_ != frames_.end()) {
127 std::unique_ptr<FrameObject> frame =
128 std::move(next_frame_it_->second.frame);
philipele0b2f152016-09-28 10:23:49 +0200129
philipel29f730e2017-03-15 08:10:08 -0700130 if (!frame->delayed_by_retransmission()) {
131 int64_t frame_delay;
philipele0754302017-01-25 08:56:23 -0800132
philipel29f730e2017-03-15 08:10:08 -0700133 if (inter_frame_delay_.CalculateDelay(frame->timestamp, &frame_delay,
134 frame->ReceivedTime())) {
135 jitter_estimator_->UpdateEstimate(frame_delay, frame->size());
136 }
137
138 float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0;
139 timing_->SetJitterDelay(jitter_estimator_->GetJitterEstimate(rtt_mult));
140 timing_->UpdateCurrentDelay(frame->RenderTime(), now_ms);
philipele0754302017-01-25 08:56:23 -0800141 }
142
philipel29f730e2017-03-15 08:10:08 -0700143 UpdateJitterDelay();
144
145 PropagateDecodability(next_frame_it_->second);
146 AdvanceLastDecodedFrame(next_frame_it_);
147 last_decoded_frame_timestamp_ = frame->timestamp;
148 *frame_out = std::move(frame);
149 return kFrameFound;
philipelbe7a9e52016-05-19 12:19:35 +0200150 }
tommi0a735642017-03-14 06:23:57 -0700151 }
152
153 if (latest_return_time_ms - now_ms > 0) {
philipel1c056252017-01-31 09:53:12 -0800154 // If |next_frame_it_ == frames_.end()| and there is still time left, it
155 // means that the frame buffer was cleared as the thread in this function
156 // was waiting to acquire |crit_| in order to return. Wait for the
157 // remaining time and then return.
158 return NextFrame(latest_return_time_ms - now_ms, frame_out);
philipelbe7a9e52016-05-19 12:19:35 +0200159 }
tommi0a735642017-03-14 06:23:57 -0700160
161 return kTimeout;
philipelbe7a9e52016-05-19 12:19:35 +0200162}
163
philipel4f6cd6a2016-08-03 10:59:32 +0200164void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
tommidb23ea62017-03-03 07:21:18 -0800165 TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode");
philipel4f6cd6a2016-08-03 10:59:32 +0200166 rtc::CritScope lock(&crit_);
167 protection_mode_ = mode;
168}
169
philipel504c47d2016-06-30 17:33:02 +0200170void FrameBuffer::Start() {
tommidb23ea62017-03-03 07:21:18 -0800171 TRACE_EVENT0("webrtc", "FrameBuffer::Start");
philipel29f730e2017-03-15 08:10:08 -0700172 rtc::CritScope lock(&crit_);
173 stopped_ = false;
philipel504c47d2016-06-30 17:33:02 +0200174}
175
176void FrameBuffer::Stop() {
tommidb23ea62017-03-03 07:21:18 -0800177 TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
philipel29f730e2017-03-15 08:10:08 -0700178 rtc::CritScope lock(&crit_);
179 stopped_ = true;
tommi0a735642017-03-14 06:23:57 -0700180 new_continuous_frame_event_.Set();
philipel504c47d2016-06-30 17:33:02 +0200181}
182
philipele0b2f152016-09-28 10:23:49 +0200183int FrameBuffer::InsertFrame(std::unique_ptr<FrameObject> frame) {
tommidb23ea62017-03-03 07:21:18 -0800184 TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
philipel93e451b2016-10-06 12:25:13 +0200185 RTC_DCHECK(frame);
philipela45102f2017-02-22 05:30:39 -0800186 if (stats_callback_)
187 stats_callback_->OnCompleteFrame(frame->num_references == 0, frame->size());
philipelbe7a9e52016-05-19 12:19:35 +0200188 FrameKey key(frame->picture_id, frame->spatial_layer);
tommi0a735642017-03-14 06:23:57 -0700189
190 rtc::CritScope lock(&crit_);
philipel29f730e2017-03-15 08:10:08 -0700191
philipele0b2f152016-09-28 10:23:49 +0200192 int last_continuous_picture_id =
193 last_continuous_frame_it_ == frames_.end()
194 ? -1
195 : last_continuous_frame_it_->first.picture_id;
196
197 if (num_frames_buffered_ >= kMaxFramesBuffered) {
198 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" << key.picture_id
199 << ":" << static_cast<int>(key.spatial_layer)
200 << ") could not be inserted due to the frame "
201 << "buffer being full, dropping frame.";
202 return last_continuous_picture_id;
203 }
204
205 if (frame->inter_layer_predicted && frame->spatial_layer == 0) {
206 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" << key.picture_id
207 << ":" << static_cast<int>(key.spatial_layer)
208 << ") is marked as inter layer predicted, dropping frame.";
209 return last_continuous_picture_id;
210 }
211
212 if (last_decoded_frame_it_ != frames_.end() &&
213 key < last_decoded_frame_it_->first) {
philipelfcc60062017-01-18 05:35:20 -0800214 if (AheadOf(frame->timestamp, last_decoded_frame_timestamp_) &&
215 frame->num_references == 0) {
216 // If this frame has a newer timestamp but an earlier picture id then we
217 // assume there has been a jump in the picture id due to some encoder
218 // reconfiguration or some other reason. Even though this is not according
219 // to spec we can still continue to decode from this frame if it is a
220 // keyframe.
221 LOG(LS_WARNING) << "A jump in picture id was detected, clearing buffer.";
222 ClearFramesAndHistory();
223 last_continuous_picture_id = -1;
224 } else {
225 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
226 << key.picture_id << ":"
227 << static_cast<int>(key.spatial_layer)
228 << ") inserted after frame ("
229 << last_decoded_frame_it_->first.picture_id << ":"
230 << static_cast<int>(
231 last_decoded_frame_it_->first.spatial_layer)
232 << ") was handed off for decoding, dropping frame.";
233 return last_continuous_picture_id;
234 }
philipele0b2f152016-09-28 10:23:49 +0200235 }
236
philipel146a48b2017-04-20 04:04:38 -0700237 // Test if inserting this frame would cause the order of the frames to become
238 // ambiguous (covering more than half the interval of 2^16). This can happen
239 // when the picture id make large jumps mid stream.
240 if (!frames_.empty() &&
241 key < frames_.begin()->first &&
242 frames_.rbegin()->first < key) {
243 LOG(LS_WARNING) << "A jump in picture id was detected, clearing buffer.";
244 ClearFramesAndHistory();
245 last_continuous_picture_id = -1;
246 }
247
philipele0b2f152016-09-28 10:23:49 +0200248 auto info = frames_.insert(std::make_pair(key, FrameInfo())).first;
249
philipel93e451b2016-10-06 12:25:13 +0200250 if (info->second.frame) {
251 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" << key.picture_id
252 << ":" << static_cast<int>(key.spatial_layer)
253 << ") already inserted, dropping frame.";
philipele0b2f152016-09-28 10:23:49 +0200254 return last_continuous_picture_id;
255 }
256
philipel93e451b2016-10-06 12:25:13 +0200257 if (!UpdateFrameInfoWithIncomingFrame(*frame, info))
258 return last_continuous_picture_id;
259
philipele0b2f152016-09-28 10:23:49 +0200260 info->second.frame = std::move(frame);
261 ++num_frames_buffered_;
262
263 if (info->second.num_missing_continuous == 0) {
264 info->second.continuous = true;
265 PropagateContinuity(info);
266 last_continuous_picture_id = last_continuous_frame_it_->first.picture_id;
267
268 // Since we now have new continuous frames there might be a better frame
269 // to return from NextFrame. Signal that thread so that it again can choose
270 // which frame to return.
tommi0a735642017-03-14 06:23:57 -0700271 new_continuous_frame_event_.Set();
philipele0b2f152016-09-28 10:23:49 +0200272 }
273
274 return last_continuous_picture_id;
philipelbe7a9e52016-05-19 12:19:35 +0200275}
276
philipele0b2f152016-09-28 10:23:49 +0200277void FrameBuffer::PropagateContinuity(FrameMap::iterator start) {
tommidb23ea62017-03-03 07:21:18 -0800278 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateContinuity");
philipele0b2f152016-09-28 10:23:49 +0200279 RTC_DCHECK(start->second.continuous);
280 if (last_continuous_frame_it_ == frames_.end())
281 last_continuous_frame_it_ = start;
282
283 std::queue<FrameMap::iterator> continuous_frames;
284 continuous_frames.push(start);
285
286 // A simple BFS to traverse continuous frames.
287 while (!continuous_frames.empty()) {
288 auto frame = continuous_frames.front();
289 continuous_frames.pop();
290
291 if (last_continuous_frame_it_->first < frame->first)
292 last_continuous_frame_it_ = frame;
293
294 // Loop through all dependent frames, and if that frame no longer has
295 // any unfulfilled dependencies then that frame is continuous as well.
296 for (size_t d = 0; d < frame->second.num_dependent_frames; ++d) {
297 auto frame_ref = frames_.find(frame->second.dependent_frames[d]);
298 --frame_ref->second.num_missing_continuous;
299
300 if (frame_ref->second.num_missing_continuous == 0) {
301 frame_ref->second.continuous = true;
302 continuous_frames.push(frame_ref);
303 }
304 }
305 }
306}
307
308void FrameBuffer::PropagateDecodability(const FrameInfo& info) {
tommidb23ea62017-03-03 07:21:18 -0800309 TRACE_EVENT0("webrtc", "FrameBuffer::PropagateDecodability");
philipele0b2f152016-09-28 10:23:49 +0200310 for (size_t d = 0; d < info.num_dependent_frames; ++d) {
311 auto ref_info = frames_.find(info.dependent_frames[d]);
philipel93e451b2016-10-06 12:25:13 +0200312 RTC_DCHECK(ref_info != frames_.end());
philipele0b2f152016-09-28 10:23:49 +0200313 RTC_DCHECK_GT(ref_info->second.num_missing_decodable, 0U);
314 --ref_info->second.num_missing_decodable;
315 }
316}
317
318void FrameBuffer::AdvanceLastDecodedFrame(FrameMap::iterator decoded) {
tommidb23ea62017-03-03 07:21:18 -0800319 TRACE_EVENT0("webrtc", "FrameBuffer::AdvanceLastDecodedFrame");
philipele0b2f152016-09-28 10:23:49 +0200320 if (last_decoded_frame_it_ == frames_.end()) {
321 last_decoded_frame_it_ = frames_.begin();
322 } else {
323 RTC_DCHECK(last_decoded_frame_it_->first < decoded->first);
324 ++last_decoded_frame_it_;
325 }
326 --num_frames_buffered_;
327 ++num_frames_history_;
328
329 // First, delete non-decoded frames from the history.
330 while (last_decoded_frame_it_ != decoded) {
331 if (last_decoded_frame_it_->second.frame)
332 --num_frames_buffered_;
333 last_decoded_frame_it_ = frames_.erase(last_decoded_frame_it_);
philipelbe7a9e52016-05-19 12:19:35 +0200334 }
335
philipele0b2f152016-09-28 10:23:49 +0200336 // Then remove old history if we have too much history saved.
337 if (num_frames_history_ > kMaxFramesHistory) {
338 frames_.erase(frames_.begin());
339 --num_frames_history_;
340 }
341}
342
343bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const FrameObject& frame,
344 FrameMap::iterator info) {
tommidb23ea62017-03-03 07:21:18 -0800345 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame");
philipele0b2f152016-09-28 10:23:49 +0200346 FrameKey key(frame.picture_id, frame.spatial_layer);
347 info->second.num_missing_continuous = frame.num_references;
348 info->second.num_missing_decodable = frame.num_references;
349
350 RTC_DCHECK(last_decoded_frame_it_ == frames_.end() ||
351 last_decoded_frame_it_->first < info->first);
352
353 // Check how many dependencies that have already been fulfilled.
354 for (size_t i = 0; i < frame.num_references; ++i) {
355 FrameKey ref_key(frame.references[i], frame.spatial_layer);
356 auto ref_info = frames_.find(ref_key);
357
358 // Does |frame| depend on a frame earlier than the last decoded frame?
359 if (last_decoded_frame_it_ != frames_.end() &&
360 ref_key <= last_decoded_frame_it_->first) {
361 if (ref_info == frames_.end()) {
362 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
363 << key.picture_id << ":"
364 << static_cast<int>(key.spatial_layer)
365 << " depends on a non-decoded frame more previous than "
366 << "the last decoded frame, dropping frame.";
367 return false;
368 }
369
370 --info->second.num_missing_continuous;
371 --info->second.num_missing_decodable;
372 } else {
373 if (ref_info == frames_.end())
374 ref_info = frames_.insert(std::make_pair(ref_key, FrameInfo())).first;
375
376 if (ref_info->second.continuous)
377 --info->second.num_missing_continuous;
378
379 // Add backwards reference so |frame| can be updated when new
380 // frames are inserted or decoded.
381 ref_info->second.dependent_frames[ref_info->second.num_dependent_frames] =
382 key;
383 ++ref_info->second.num_dependent_frames;
384 }
philipel93e451b2016-10-06 12:25:13 +0200385 RTC_DCHECK_LE(ref_info->second.num_missing_continuous,
386 ref_info->second.num_missing_decodable);
philipelbe7a9e52016-05-19 12:19:35 +0200387 }
388
philipele0b2f152016-09-28 10:23:49 +0200389 // Check if we have the lower spatial layer frame.
philipelbe7a9e52016-05-19 12:19:35 +0200390 if (frame.inter_layer_predicted) {
philipele0b2f152016-09-28 10:23:49 +0200391 ++info->second.num_missing_continuous;
392 ++info->second.num_missing_decodable;
393
philipelbe7a9e52016-05-19 12:19:35 +0200394 FrameKey ref_key(frame.picture_id, frame.spatial_layer - 1);
philipele0b2f152016-09-28 10:23:49 +0200395 // Gets or create the FrameInfo for the referenced frame.
396 auto ref_info = frames_.insert(std::make_pair(ref_key, FrameInfo())).first;
397 if (ref_info->second.continuous)
398 --info->second.num_missing_continuous;
399
400 if (ref_info == last_decoded_frame_it_) {
401 --info->second.num_missing_decodable;
402 } else {
403 ref_info->second.dependent_frames[ref_info->second.num_dependent_frames] =
404 key;
405 ++ref_info->second.num_dependent_frames;
406 }
philipel93e451b2016-10-06 12:25:13 +0200407 RTC_DCHECK_LE(ref_info->second.num_missing_continuous,
408 ref_info->second.num_missing_decodable);
philipelbe7a9e52016-05-19 12:19:35 +0200409 }
410
philipel93e451b2016-10-06 12:25:13 +0200411 RTC_DCHECK_LE(info->second.num_missing_continuous,
412 info->second.num_missing_decodable);
413
philipelbe7a9e52016-05-19 12:19:35 +0200414 return true;
415}
416
philipelbe742702016-11-30 01:31:40 -0800417void FrameBuffer::UpdateJitterDelay() {
tommidb23ea62017-03-03 07:21:18 -0800418 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
philipela45102f2017-02-22 05:30:39 -0800419 if (!stats_callback_)
420 return;
philipelbe742702016-11-30 01:31:40 -0800421
philipela45102f2017-02-22 05:30:39 -0800422 int decode_ms;
423 int max_decode_ms;
424 int current_delay_ms;
425 int target_delay_ms;
426 int jitter_buffer_ms;
427 int min_playout_delay_ms;
428 int render_delay_ms;
429 if (timing_->GetTimings(&decode_ms, &max_decode_ms, &current_delay_ms,
430 &target_delay_ms, &jitter_buffer_ms,
431 &min_playout_delay_ms, &render_delay_ms)) {
432 stats_callback_->OnFrameBufferTimingsUpdated(
433 decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
434 jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
philipelbe742702016-11-30 01:31:40 -0800435 }
philipel266f0a42016-11-28 08:49:07 -0800436}
437
philipelfcc60062017-01-18 05:35:20 -0800438void FrameBuffer::ClearFramesAndHistory() {
tommidb23ea62017-03-03 07:21:18 -0800439 TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
philipelfcc60062017-01-18 05:35:20 -0800440 frames_.clear();
441 last_decoded_frame_it_ = frames_.end();
442 last_continuous_frame_it_ = frames_.end();
philipel1c056252017-01-31 09:53:12 -0800443 next_frame_it_ = frames_.end();
philipelfcc60062017-01-18 05:35:20 -0800444 num_frames_history_ = 0;
445 num_frames_buffered_ = 0;
446}
447
philipelbe7a9e52016-05-19 12:19:35 +0200448} // namespace video_coding
449} // namespace webrtc