blob: e179423a77ac9e0327a54a5693ce1121d3e013f0 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
stefan@webrtc.org91c63082012-01-31 10:49:08 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000011#include "webrtc/modules/video_coding/main/source/receiver.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
13#include <assert.h>
14
pbos@webrtc.org3f655aa2014-03-18 11:10:11 +000015#include <cstdlib>
16
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000017#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
18#include "webrtc/modules/video_coding/main/source/internal_defines.h"
19#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +000020#include "webrtc/system_wrappers/interface/clock.h"
stefan@webrtc.org34c5da62014-04-11 14:08:35 +000021#include "webrtc/system_wrappers/interface/logging.h"
hclam@chromium.org806dc3b2013-04-09 19:54:10 +000022#include "webrtc/system_wrappers/interface/trace_event.h"
stefan@webrtc.org91c63082012-01-31 10:49:08 +000023
niklase@google.com470e71d2011-07-07 08:21:25 +000024namespace webrtc {
25
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +000026enum { kMaxReceiverDelayMs = 10000 };
27
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000028VCMReceiver::VCMReceiver(VCMTiming* timing,
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +000029 Clock* clock,
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000030 EventFactory* event_factory,
niklase@google.com470e71d2011-07-07 08:21:25 +000031 bool master)
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000032 : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000033 clock_(clock),
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000034 master_(master),
stefan@webrtc.org34c5da62014-04-11 14:08:35 +000035 jitter_buffer_(clock_, event_factory),
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000036 timing_(timing),
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000037 render_wait_event_(event_factory->CreateEvent()),
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +000038 state_(kPassive),
39 max_video_delay_ms_(kMaxVideoDelayMs) {}
niklase@google.com470e71d2011-07-07 08:21:25 +000040
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000041VCMReceiver::~VCMReceiver() {
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000042 render_wait_event_->Set();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000043 delete crit_sect_;
niklase@google.com470e71d2011-07-07 08:21:25 +000044}
45
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000046void VCMReceiver::Reset() {
47 CriticalSectionScoped cs(crit_sect_);
48 if (!jitter_buffer_.Running()) {
49 jitter_buffer_.Start();
50 } else {
51 jitter_buffer_.Flush();
52 }
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000053 render_wait_event_->Reset();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000054 if (master_) {
55 state_ = kReceiving;
56 } else {
57 state_ = kPassive;
58 }
henrik.lundin@webrtc.orgbaf6db52011-11-02 18:58:39 +000059}
60
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000061int32_t VCMReceiver::Initialize() {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000062 Reset();
stefan@webrtc.org4f3624d2013-09-20 07:43:17 +000063 CriticalSectionScoped cs(crit_sect_);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000064 if (!master_) {
stefan@webrtc.orga64300a2013-03-04 15:24:40 +000065 SetNackMode(kNoNack, -1, -1);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000066 }
67 return VCM_OK;
68}
69
70void VCMReceiver::UpdateRtt(uint32_t rtt) {
71 jitter_buffer_.UpdateRtt(rtt);
72}
73
stefan@webrtc.orga64300a2013-03-04 15:24:40 +000074int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
75 uint16_t frame_width,
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000076 uint16_t frame_height) {
stefan@webrtc.org3417eb42013-05-21 15:25:53 +000077 // Insert the packet into the jitter buffer. The packet can either be empty or
78 // contain media at this point.
79 bool retransmitted = false;
80 const VCMFrameBufferEnum ret = jitter_buffer_.InsertPacket(packet,
81 &retransmitted);
82 if (ret == kOldPacket) {
niklase@google.com470e71d2011-07-07 08:21:25 +000083 return VCM_OK;
stefan@webrtc.org3417eb42013-05-21 15:25:53 +000084 } else if (ret == kFlushIndicator) {
85 return VCM_FLUSH_INDICATOR;
86 } else if (ret < 0) {
stefan@webrtc.org3417eb42013-05-21 15:25:53 +000087 return VCM_JITTER_BUFFER_ERROR;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000088 }
stefan@webrtc.org3417eb42013-05-21 15:25:53 +000089 if (ret == kCompleteSession && !retransmitted) {
90 // We don't want to include timestamps which have suffered from
91 // retransmission here, since we compensate with extra retransmission
92 // delay within the jitter estimate.
93 timing_->IncomingTimestamp(packet.timestamp, clock_->TimeInMilliseconds());
94 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000095 return VCM_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +000096}
97
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000098VCMEncodedFrame* VCMReceiver::FrameForDecoding(
99 uint16_t max_wait_time_ms,
100 int64_t& next_render_time_ms,
101 bool render_timing,
102 VCMReceiver* dual_receiver) {
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000103 const int64_t start_time_ms = clock_->TimeInMilliseconds();
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000104 uint32_t frame_timestamp = 0;
105 // Exhaust wait time to get a complete frame for decoding.
106 bool found_frame = jitter_buffer_.NextCompleteTimestamp(
107 max_wait_time_ms, &frame_timestamp);
108
109 if (!found_frame) {
110 // Get an incomplete frame when enabled.
111 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
112 dual_receiver->State() == kPassive &&
113 dual_receiver->NackMode() == kNack);
114 if (dual_receiver_enabled_and_passive &&
115 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
116 // Jitter buffer state might get corrupt with this frame.
117 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
118 }
119 found_frame = jitter_buffer_.NextMaybeIncompleteTimestamp(
120 &frame_timestamp);
121 }
122
123 if (!found_frame) {
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000124 return NULL;
125 }
mikhal@webrtc.orgd3cd5652013-05-03 17:54:18 +0000126
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000127 // We have a frame - Set timing and render timestamp.
mikhal@webrtc.orgadc64a72013-05-30 16:20:18 +0000128 timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000129 const int64_t now_ms = clock_->TimeInMilliseconds();
130 timing_->UpdateCurrentDelay(frame_timestamp);
131 next_render_time_ms = timing_->RenderTimeMs(frame_timestamp, now_ms);
132 // Check render timing.
133 bool timing_error = false;
134 // Assume that render timing errors are due to changes in the video stream.
135 if (next_render_time_ms < 0) {
136 timing_error = true;
pbos@webrtc.orgb5f30292014-03-13 08:53:39 +0000137 } else if (std::abs(next_render_time_ms - now_ms) > max_video_delay_ms_) {
stefan@webrtc.org34c5da62014-04-11 14:08:35 +0000138 int frame_delay = static_cast<int>(std::abs(next_render_time_ms - now_ms));
139 LOG(LS_WARNING) << "A frame about to be decoded is out of the configured "
140 << "delay bounds (" << frame_delay << " > "
141 << max_video_delay_ms_
142 << "). Resetting the video jitter buffer.";
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000143 timing_error = true;
144 } else if (static_cast<int>(timing_->TargetVideoDelay()) >
145 max_video_delay_ms_) {
stefan@webrtc.org34c5da62014-04-11 14:08:35 +0000146 LOG(LS_WARNING) << "The video target delay has grown larger than "
147 << max_video_delay_ms_ << " ms. Resetting jitter buffer.";
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000148 timing_error = true;
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000149 }
150
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000151 if (timing_error) {
152 // Timing error => reset timing and flush the jitter buffer.
153 jitter_buffer_.Flush();
stefan@webrtc.org9f557c12013-05-17 12:55:07 +0000154 timing_->Reset();
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000155 return NULL;
156 }
157
158 if (!render_timing) {
159 // Decode frame as close as possible to the render timestamp.
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000160 const int32_t available_wait_time = max_wait_time_ms -
161 static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms);
162 uint16_t new_max_wait_time = static_cast<uint16_t>(
163 VCM_MAX(available_wait_time, 0));
164 uint32_t wait_time_ms = timing_->MaxWaitingTime(
165 next_render_time_ms, clock_->TimeInMilliseconds());
166 if (new_max_wait_time < wait_time_ms) {
167 // We're not allowed to wait until the frame is supposed to be rendered,
168 // waiting as long as we're allowed to avoid busy looping, and then return
169 // NULL. Next call to this function might return the frame.
170 render_wait_event_->Wait(max_wait_time_ms);
171 return NULL;
172 }
173 // Wait until it's time to render.
174 render_wait_event_->Wait(wait_time_ms);
175 }
176
177 // Extract the frame from the jitter buffer and set the render time.
178 VCMEncodedFrame* frame = jitter_buffer_.ExtractAndSetDecode(frame_timestamp);
mikhal@webrtc.org8f86cc82013-05-07 18:05:21 +0000179 if (frame == NULL) {
180 return NULL;
181 }
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000182 frame->SetRenderTime(next_render_time_ms);
hclam@chromium.org1a7b9b92013-07-08 21:31:18 +0000183 TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame->TimeStamp(),
184 "SetRenderTS", "render_time", next_render_time_ms);
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000185 if (dual_receiver != NULL) {
186 dual_receiver->UpdateState(*frame);
187 }
188 if (!frame->Complete()) {
189 // Update stats for incomplete frames.
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000190 bool retransmitted = false;
191 const int64_t last_packet_time_ms =
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000192 jitter_buffer_.LastPacketTime(frame, &retransmitted);
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000193 if (last_packet_time_ms >= 0 && !retransmitted) {
194 // We don't want to include timestamps which have suffered from
195 // retransmission here, since we compensate with extra retransmission
196 // delay within the jitter estimate.
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000197 timing_->IncomingTimestamp(frame_timestamp, last_packet_time_ms);
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000198 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000199 }
200 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000201}
202
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000203void VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame) {
204 jitter_buffer_.ReleaseFrame(frame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000205}
206
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000207void VCMReceiver::ReceiveStatistics(uint32_t* bitrate,
208 uint32_t* framerate) {
209 assert(bitrate);
210 assert(framerate);
211 jitter_buffer_.IncomingRateStatistics(framerate, bitrate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000212}
213
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000214void VCMReceiver::ReceivedFrameCount(VCMFrameCount* frame_count) const {
215 assert(frame_count);
sprang@webrtc.org71f055f2013-12-04 15:09:27 +0000216 std::map<FrameType, uint32_t> counts(jitter_buffer_.FrameStatistics());
217 frame_count->numDeltaFrames = counts[kVideoFrameDelta];
218 frame_count->numKeyFrames = counts[kVideoFrameKey];
niklase@google.com470e71d2011-07-07 08:21:25 +0000219}
220
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000221uint32_t VCMReceiver::DiscardedPackets() const {
222 return jitter_buffer_.num_discarded_packets();
niklase@google.com470e71d2011-07-07 08:21:25 +0000223}
224
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000225void VCMReceiver::SetNackMode(VCMNackMode nackMode,
226 int low_rtt_nack_threshold_ms,
227 int high_rtt_nack_threshold_ms) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000228 CriticalSectionScoped cs(crit_sect_);
229 // Default to always having NACK enabled in hybrid mode.
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000230 jitter_buffer_.SetNackMode(nackMode, low_rtt_nack_threshold_ms,
231 high_rtt_nack_threshold_ms);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000232 if (!master_) {
233 state_ = kPassive; // The dual decoder defaults to passive.
234 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000235}
236
stefan@webrtc.orgbecf9c82013-02-01 15:09:57 +0000237void VCMReceiver::SetNackSettings(size_t max_nack_list_size,
stefan@webrtc.orgef144882013-05-07 19:16:33 +0000238 int max_packet_age_to_nack,
239 int max_incomplete_time_ms) {
stefan@webrtc.orgbecf9c82013-02-01 15:09:57 +0000240 jitter_buffer_.SetNackSettings(max_nack_list_size,
stefan@webrtc.orgef144882013-05-07 19:16:33 +0000241 max_packet_age_to_nack,
242 max_incomplete_time_ms);
stefan@webrtc.orgbecf9c82013-02-01 15:09:57 +0000243}
244
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000245VCMNackMode VCMReceiver::NackMode() const {
246 CriticalSectionScoped cs(crit_sect_);
247 return jitter_buffer_.nack_mode();
stefan@webrtc.org791eec72011-10-11 07:53:43 +0000248}
249
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000250VCMNackStatus VCMReceiver::NackList(uint16_t* nack_list,
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000251 uint16_t size,
252 uint16_t* nack_list_length) {
253 bool request_key_frame = false;
254 uint16_t* internal_nack_list = jitter_buffer_.GetNackList(
255 nack_list_length, &request_key_frame);
stefan@webrtc.org34c5da62014-04-11 14:08:35 +0000256 assert(*nack_list_length <= size);
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000257 if (internal_nack_list != NULL && *nack_list_length > 0) {
258 memcpy(nack_list, internal_nack_list, *nack_list_length * sizeof(uint16_t));
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000259 }
stefan@webrtc.orgef144882013-05-07 19:16:33 +0000260 if (request_key_frame) {
261 return kNackKeyFrameRequest;
262 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000263 return kNackOk;
niklase@google.com470e71d2011-07-07 08:21:25 +0000264}
265
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000266// Decide whether we should change decoder state. This should be done if the
267// dual decoder has caught up with the decoder decoding with packet losses.
268bool VCMReceiver::DualDecoderCaughtUp(VCMEncodedFrame* dual_frame,
269 VCMReceiver& dual_receiver) const {
270 if (dual_frame == NULL) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000271 return false;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000272 }
273 if (jitter_buffer_.LastDecodedTimestamp() == dual_frame->TimeStamp()) {
274 dual_receiver.UpdateState(kWaitForPrimaryDecode);
275 return true;
276 }
277 return false;
niklase@google.com470e71d2011-07-07 08:21:25 +0000278}
279
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000280void VCMReceiver::CopyJitterBufferStateFromReceiver(
281 const VCMReceiver& receiver) {
282 jitter_buffer_.CopyFrom(receiver.jitter_buffer_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000283}
284
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000285VCMReceiverState VCMReceiver::State() const {
286 CriticalSectionScoped cs(crit_sect_);
287 return state_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000288}
289
mikhal@webrtc.orgdbf6a812013-08-21 20:40:47 +0000290void VCMReceiver::SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) {
291 jitter_buffer_.SetDecodeErrorMode(decode_error_mode);
mikhal@webrtc.orgdc3cd212013-04-25 20:27:04 +0000292}
293
agalusza@google.coma7e360e2013-08-01 03:15:08 +0000294VCMDecodeErrorMode VCMReceiver::DecodeErrorMode() const {
agalusza@google.coma7e360e2013-08-01 03:15:08 +0000295 return jitter_buffer_.decode_error_mode();
mikhal@webrtc.orgdc3cd212013-04-25 20:27:04 +0000296}
297
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000298int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
299 CriticalSectionScoped cs(crit_sect_);
300 if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) {
301 return -1;
302 }
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000303 max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs;
mikhal@webrtc.orgdbd6a6d2013-04-17 16:23:22 +0000304 // Initializing timing to the desired delay.
mikhal@webrtc.orgadc64a72013-05-30 16:20:18 +0000305 timing_->set_min_playout_delay(desired_delay_ms);
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000306 return 0;
307}
308
mikhal@webrtc.org381da4b2013-04-25 21:45:29 +0000309int VCMReceiver::RenderBufferSizeMs() {
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000310 uint32_t timestamp_start = 0u;
311 uint32_t timestamp_end = 0u;
312 // Render timestamps are computed just prior to decoding. Therefore this is
313 // only an estimate based on frames' timestamps and current timing state.
314 jitter_buffer_.RenderBufferSize(&timestamp_start, &timestamp_end);
315 if (timestamp_start == timestamp_end) {
316 return 0;
317 }
318 // Update timing.
319 const int64_t now_ms = clock_->TimeInMilliseconds();
mikhal@webrtc.orgadc64a72013-05-30 16:20:18 +0000320 timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000321 // Get render timestamps.
322 uint32_t render_start = timing_->RenderTimeMs(timestamp_start, now_ms);
323 uint32_t render_end = timing_->RenderTimeMs(timestamp_end, now_ms);
324 return render_end - render_start;
mikhal@webrtc.org381da4b2013-04-25 21:45:29 +0000325}
326
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000327void VCMReceiver::UpdateState(VCMReceiverState new_state) {
328 CriticalSectionScoped cs(crit_sect_);
329 assert(!(state_ == kPassive && new_state == kWaitForPrimaryDecode));
330 state_ = new_state;
niklase@google.com470e71d2011-07-07 08:21:25 +0000331}
332
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000333void VCMReceiver::UpdateState(const VCMEncodedFrame& frame) {
334 if (jitter_buffer_.nack_mode() == kNoNack) {
335 // Dual decoder mode has not been enabled.
336 return;
337 }
338 // Update the dual receiver state.
339 if (frame.Complete() && frame.FrameType() == kVideoFrameKey) {
340 UpdateState(kPassive);
341 }
342 if (State() == kWaitForPrimaryDecode &&
343 frame.Complete() && !frame.MissingFrame()) {
344 UpdateState(kPassive);
345 }
346 if (frame.MissingFrame() || !frame.Complete()) {
347 // State was corrupted, enable dual receiver.
348 UpdateState(kReceiving);
349 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000350}
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000351} // namespace webrtc