blob: ce209e130dc3ec8d174e253adb63ee773c6dab25 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
stefan@webrtc.org91c63082012-01-31 10:49:08 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000011#include "webrtc/modules/video_coding/main/source/receiver.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
13#include <assert.h>
14
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000015#include "webrtc/modules/video_coding/main/interface/video_coding.h"
16#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
17#include "webrtc/modules/video_coding/main/source/internal_defines.h"
18#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
19#include "webrtc/modules/video_coding/main/source/tick_time_base.h"
20#include "webrtc/system_wrappers/interface/trace.h"
stefan@webrtc.org91c63082012-01-31 10:49:08 +000021
niklase@google.com470e71d2011-07-07 08:21:25 +000022namespace webrtc {
23
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000024VCMReceiver::VCMReceiver(VCMTiming* timing,
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +000025 TickTimeBase* clock,
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000026 int32_t vcm_id,
27 int32_t receiver_id,
niklase@google.com470e71d2011-07-07 08:21:25 +000028 bool master)
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000029 : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
30 vcm_id_(vcm_id),
31 clock_(clock),
32 receiver_id_(receiver_id),
33 master_(master),
34 jitter_buffer_(clock_, vcm_id, receiver_id, master),
35 timing_(timing),
36 render_wait_event_(),
37 state_(kPassive) {}
niklase@google.com470e71d2011-07-07 08:21:25 +000038
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000039VCMReceiver::~VCMReceiver() {
40 render_wait_event_.Set();
41 delete crit_sect_;
niklase@google.com470e71d2011-07-07 08:21:25 +000042}
43
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000044void VCMReceiver::Reset() {
45 CriticalSectionScoped cs(crit_sect_);
46 if (!jitter_buffer_.Running()) {
47 jitter_buffer_.Start();
48 } else {
49 jitter_buffer_.Flush();
50 }
51 render_wait_event_.Reset();
52 if (master_) {
53 state_ = kReceiving;
54 } else {
55 state_ = kPassive;
56 }
henrik.lundin@webrtc.orgbaf6db52011-11-02 18:58:39 +000057}
58
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000059int32_t VCMReceiver::Initialize() {
60 CriticalSectionScoped cs(crit_sect_);
61 Reset();
62 if (!master_) {
63 SetNackMode(kNoNack);
64 }
65 return VCM_OK;
66}
67
68void VCMReceiver::UpdateRtt(uint32_t rtt) {
69 jitter_buffer_.UpdateRtt(rtt);
70}
71
72int32_t VCMReceiver::InsertPacket(const VCMPacket& packet, uint16_t frame_width,
73 uint16_t frame_height) {
74 // Find an empty frame.
75 VCMEncodedFrame* buffer = NULL;
76 const int32_t error = jitter_buffer_.GetFrame(packet, buffer);
77 if (error == VCM_OLD_PACKET_ERROR) {
niklase@google.com470e71d2011-07-07 08:21:25 +000078 return VCM_OK;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000079 } else if (error != VCM_OK) {
80 return error;
81 }
82 assert(buffer);
83 {
84 CriticalSectionScoped cs(crit_sect_);
85
86 if (frame_width && frame_height) {
87 buffer->SetEncodedSize(static_cast<uint32_t>(frame_width),
88 static_cast<uint32_t>(frame_height));
89 }
90
91 if (master_) {
92 // Only trace the primary receiver to make it possible to parse and plot
93 // the trace file.
94 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
95 VCMId(vcm_id_, receiver_id_),
96 "Packet seq_no %u of frame %u at %u",
97 packet.seqNum, packet.timestamp,
98 MaskWord64ToUWord32(clock_->MillisecondTimestamp()));
99 }
100
101 const int64_t now_ms = clock_->MillisecondTimestamp();
102
103 int64_t render_time_ms = timing_->RenderTimeMs(packet.timestamp, now_ms);
104
105 if (render_time_ms < 0) {
106 // Render time error. Assume that this is due to some change in the
107 // incoming video stream and reset the JB and the timing.
108 jitter_buffer_.Flush();
109 timing_->Reset(clock_->MillisecondTimestamp());
110 return VCM_FLUSH_INDICATOR;
111 } else if (render_time_ms < now_ms - kMaxVideoDelayMs) {
112 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
113 VCMId(vcm_id_, receiver_id_),
114 "This frame should have been rendered more than %u ms ago."
115 "Flushing jitter buffer and resetting timing.",
116 kMaxVideoDelayMs);
117 jitter_buffer_.Flush();
118 timing_->Reset(clock_->MillisecondTimestamp());
119 return VCM_FLUSH_INDICATOR;
120 } else if (timing_->TargetVideoDelay() > kMaxVideoDelayMs) {
121 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
122 VCMId(vcm_id_, receiver_id_),
123 "More than %u ms target delay. Flushing jitter buffer and"
124 "resetting timing.", kMaxVideoDelayMs);
125 jitter_buffer_.Flush();
126 timing_->Reset(clock_->MillisecondTimestamp());
127 return VCM_FLUSH_INDICATOR;
128 }
129
130 // First packet received belonging to this frame.
131 if (buffer->Length() == 0) {
132 const int64_t now_ms = clock_->MillisecondTimestamp();
133 if (master_) {
134 // Only trace the primary receiver to make it possible to parse and plot
135 // the trace file.
136 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
137 VCMId(vcm_id_, receiver_id_),
138 "First packet of frame %u at %u", packet.timestamp,
139 MaskWord64ToUWord32(now_ms));
140 }
141 render_time_ms = timing_->RenderTimeMs(packet.timestamp, now_ms);
142 if (render_time_ms >= 0) {
143 buffer->SetRenderTime(render_time_ms);
144 } else {
145 buffer->SetRenderTime(now_ms);
146 }
147 }
148
149 // Insert packet into the jitter buffer both media and empty packets.
150 const VCMFrameBufferEnum
151 ret = jitter_buffer_.InsertPacket(buffer, packet);
152 if (ret == kFlushIndicator) {
153 return VCM_FLUSH_INDICATOR;
154 } else if (ret < 0) {
155 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding,
156 VCMId(vcm_id_, receiver_id_),
157 "Error inserting packet seq_no=%u, time_stamp=%u",
158 packet.seqNum, packet.timestamp);
159 return VCM_JITTER_BUFFER_ERROR;
160 }
161 }
162 return VCM_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +0000163}
164
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000165VCMEncodedFrame* VCMReceiver::FrameForDecoding(
166 uint16_t max_wait_time_ms,
167 int64_t& next_render_time_ms,
168 bool render_timing,
169 VCMReceiver* dual_receiver) {
170 // No need to enter the critical section here since the jitter buffer
171 // is thread-safe.
172 FrameType incoming_frame_type = kVideoFrameDelta;
173 next_render_time_ms = -1;
174 const int64_t start_time_ms = clock_->MillisecondTimestamp();
175 int64_t ret = jitter_buffer_.NextTimestamp(max_wait_time_ms,
176 &incoming_frame_type,
177 &next_render_time_ms);
178 if (ret < 0) {
179 // No timestamp in jitter buffer at the moment.
180 return NULL;
181 }
182 const uint32_t time_stamp = static_cast<uint32_t>(ret);
183
184 // Update the timing.
185 timing_->SetRequiredDelay(jitter_buffer_.EstimatedJitterMs());
186 timing_->UpdateCurrentDelay(time_stamp);
187
188 const int32_t temp_wait_time = max_wait_time_ms -
189 static_cast<int32_t>(clock_->MillisecondTimestamp() - start_time_ms);
190 uint16_t new_max_wait_time = static_cast<uint16_t>(VCM_MAX(temp_wait_time,
191 0));
192
193 VCMEncodedFrame* frame = NULL;
194
195 if (render_timing) {
196 frame = FrameForDecoding(new_max_wait_time, next_render_time_ms,
197 dual_receiver);
198 } else {
199 frame = FrameForRendering(new_max_wait_time, next_render_time_ms,
200 dual_receiver);
201 }
202
203 if (frame != NULL) {
204 bool retransmitted = false;
205 const int64_t last_packet_time_ms =
206 jitter_buffer_.LastPacketTime(frame, &retransmitted);
207 if (last_packet_time_ms >= 0 && !retransmitted) {
208 // We don't want to include timestamps which have suffered from
209 // retransmission here, since we compensate with extra retransmission
210 // delay within the jitter estimate.
211 timing_->IncomingTimestamp(time_stamp, last_packet_time_ms);
212 }
213 if (dual_receiver != NULL) {
214 dual_receiver->UpdateState(*frame);
215 }
216 }
217 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000218}
219
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000220VCMEncodedFrame* VCMReceiver::FrameForDecoding(
221 uint16_t max_wait_time_ms,
222 int64_t next_render_time_ms,
223 VCMReceiver* dual_receiver) {
224 // How long can we wait until we must decode the next frame.
225 uint32_t wait_time_ms = timing_->MaxWaitingTime(
226 next_render_time_ms, clock_->MillisecondTimestamp());
227
228 // Try to get a complete frame from the jitter buffer.
229 VCMEncodedFrame* frame = jitter_buffer_.GetCompleteFrameForDecoding(0);
230
231 if (frame == NULL && max_wait_time_ms == 0 && wait_time_ms > 0) {
232 // If we're not allowed to wait for frames to get complete we must
233 // calculate if it's time to decode, and if it's not we will just return
234 // for now.
235 return NULL;
236 }
237
238 if (frame == NULL && VCM_MIN(wait_time_ms, max_wait_time_ms) == 0) {
239 // No time to wait for a complete frame, check if we have an incomplete.
240 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
241 dual_receiver->State() == kPassive &&
242 dual_receiver->NackMode() == kNackInfinite);
243 if (dual_receiver_enabled_and_passive &&
244 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
245 // Jitter buffer state might get corrupt with this frame.
246 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
247 frame = jitter_buffer_.GetFrameForDecoding();
248 assert(frame);
249 } else {
250 frame = jitter_buffer_.GetFrameForDecoding();
niklase@google.com470e71d2011-07-07 08:21:25 +0000251 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000252 }
253 if (frame == NULL) {
254 // Wait for a complete frame.
255 frame = jitter_buffer_.GetCompleteFrameForDecoding(max_wait_time_ms);
256 }
257 if (frame == NULL) {
258 // Get an incomplete frame.
259 if (timing_->MaxWaitingTime(next_render_time_ms,
260 clock_->MillisecondTimestamp()) > 0) {
261 // Still time to wait for a complete frame.
262 return NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000263 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000264
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000265 // No time left to wait, we must decode this frame now.
266 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
267 dual_receiver->State() == kPassive &&
268 dual_receiver->NackMode() == kNackInfinite);
269 if (dual_receiver_enabled_and_passive &&
270 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
271 // Jitter buffer state might get corrupt with this frame.
272 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
niklase@google.com470e71d2011-07-07 08:21:25 +0000273 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000274
275 frame = jitter_buffer_.GetFrameForDecoding();
276 }
277 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000278}
279
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000280VCMEncodedFrame* VCMReceiver::FrameForRendering(uint16_t max_wait_time_ms,
281 int64_t next_render_time_ms,
282 VCMReceiver* dual_receiver) {
283 // How long MUST we wait until we must decode the next frame. This is
284 // different for the case where we have a renderer which can render at a
285 // specified time. Here we must wait as long as possible before giving the
286 // frame to the decoder, which will render the frame as soon as it has been
287 // decoded.
288 uint32_t wait_time_ms = timing_->MaxWaitingTime(
289 next_render_time_ms, clock_->MillisecondTimestamp());
290 if (max_wait_time_ms < wait_time_ms) {
291 // If we're not allowed to wait until the frame is supposed to be rendered
292 // we will have to return NULL for now.
293 return NULL;
294 }
295 // Wait until it's time to render.
296 render_wait_event_.Wait(wait_time_ms);
niklase@google.com470e71d2011-07-07 08:21:25 +0000297
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000298 // Get a complete frame if possible.
299 VCMEncodedFrame* frame = jitter_buffer_.GetCompleteFrameForDecoding(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000300
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000301 if (frame == NULL) {
302 // Get an incomplete frame.
303 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
304 dual_receiver->State() == kPassive &&
305 dual_receiver->NackMode() == kNackInfinite);
306 if (dual_receiver_enabled_and_passive &&
307 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
308 // Jitter buffer state might get corrupt with this frame.
309 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
niklase@google.com470e71d2011-07-07 08:21:25 +0000310 }
311
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000312 frame = jitter_buffer_.GetFrameForDecoding();
313 }
314 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000315}
316
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000317void VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame) {
318 jitter_buffer_.ReleaseFrame(frame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000319}
320
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000321void VCMReceiver::ReceiveStatistics(uint32_t* bitrate,
322 uint32_t* framerate) {
323 assert(bitrate);
324 assert(framerate);
325 jitter_buffer_.IncomingRateStatistics(framerate, bitrate);
326 *bitrate /= 1000; // Should be in kbps.
niklase@google.com470e71d2011-07-07 08:21:25 +0000327}
328
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000329void VCMReceiver::ReceivedFrameCount(VCMFrameCount* frame_count) const {
330 assert(frame_count);
331 jitter_buffer_.FrameStatistics(&frame_count->numDeltaFrames,
332 &frame_count->numKeyFrames);
niklase@google.com470e71d2011-07-07 08:21:25 +0000333}
334
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000335uint32_t VCMReceiver::DiscardedPackets() const {
336 return jitter_buffer_.num_discarded_packets();
niklase@google.com470e71d2011-07-07 08:21:25 +0000337}
338
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000339void VCMReceiver::SetNackMode(VCMNackMode nackMode) {
340 CriticalSectionScoped cs(crit_sect_);
341 // Default to always having NACK enabled in hybrid mode.
342 jitter_buffer_.SetNackMode(nackMode, kLowRttNackMs, -1);
343 if (!master_) {
344 state_ = kPassive; // The dual decoder defaults to passive.
345 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000346}
347
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000348VCMNackMode VCMReceiver::NackMode() const {
349 CriticalSectionScoped cs(crit_sect_);
350 return jitter_buffer_.nack_mode();
stefan@webrtc.org791eec72011-10-11 07:53:43 +0000351}
352
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000353VCMNackStatus VCMReceiver::NackList(uint16_t* nack_list,
354 uint16_t* size) {
355 bool extended = false;
356 uint16_t nack_list_size = 0;
357 uint16_t* internal_nack_list = jitter_buffer_.CreateNackList(&nack_list_size,
358 &extended);
359 if (internal_nack_list == NULL && nack_list_size == 0xffff) {
360 // This combination is used to trigger key frame requests.
361 *size = 0;
362 return kNackKeyFrameRequest;
363 }
364 if (nack_list_size > *size) {
365 *size = nack_list_size;
366 return kNackNeedMoreMemory;
367 }
368 if (internal_nack_list != NULL && nack_list_size > 0) {
369 memcpy(nack_list, internal_nack_list, nack_list_size * sizeof(uint16_t));
370 }
371 *size = nack_list_size;
372 return kNackOk;
niklase@google.com470e71d2011-07-07 08:21:25 +0000373}
374
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000375// Decide whether we should change decoder state. This should be done if the
376// dual decoder has caught up with the decoder decoding with packet losses.
377bool VCMReceiver::DualDecoderCaughtUp(VCMEncodedFrame* dual_frame,
378 VCMReceiver& dual_receiver) const {
379 if (dual_frame == NULL) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000380 return false;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000381 }
382 if (jitter_buffer_.LastDecodedTimestamp() == dual_frame->TimeStamp()) {
383 dual_receiver.UpdateState(kWaitForPrimaryDecode);
384 return true;
385 }
386 return false;
niklase@google.com470e71d2011-07-07 08:21:25 +0000387}
388
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000389void VCMReceiver::CopyJitterBufferStateFromReceiver(
390 const VCMReceiver& receiver) {
391 jitter_buffer_.CopyFrom(receiver.jitter_buffer_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000392}
393
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000394VCMReceiverState VCMReceiver::State() const {
395 CriticalSectionScoped cs(crit_sect_);
396 return state_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000397}
398
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000399void VCMReceiver::UpdateState(VCMReceiverState new_state) {
400 CriticalSectionScoped cs(crit_sect_);
401 assert(!(state_ == kPassive && new_state == kWaitForPrimaryDecode));
402 state_ = new_state;
niklase@google.com470e71d2011-07-07 08:21:25 +0000403}
404
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000405void VCMReceiver::UpdateState(const VCMEncodedFrame& frame) {
406 if (jitter_buffer_.nack_mode() == kNoNack) {
407 // Dual decoder mode has not been enabled.
408 return;
409 }
410 // Update the dual receiver state.
411 if (frame.Complete() && frame.FrameType() == kVideoFrameKey) {
412 UpdateState(kPassive);
413 }
414 if (State() == kWaitForPrimaryDecode &&
415 frame.Complete() && !frame.MissingFrame()) {
416 UpdateState(kPassive);
417 }
418 if (frame.MissingFrame() || !frame.Complete()) {
419 // State was corrupted, enable dual receiver.
420 UpdateState(kReceiving);
421 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000422}
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000423} // namespace webrtc