blob: d2705bbbed94c3e11c9396643c882190be919708 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
stefan@webrtc.org91c63082012-01-31 10:49:08 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000011#include "webrtc/modules/video_coding/main/source/receiver.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
13#include <assert.h>
14
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000015#include "webrtc/modules/video_coding/main/interface/video_coding.h"
16#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
17#include "webrtc/modules/video_coding/main/source/internal_defines.h"
18#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +000019#include "webrtc/system_wrappers/interface/clock.h"
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000020#include "webrtc/system_wrappers/interface/trace.h"
hclam@chromium.org806dc3b2013-04-09 19:54:10 +000021#include "webrtc/system_wrappers/interface/trace_event.h"
stefan@webrtc.org91c63082012-01-31 10:49:08 +000022
niklase@google.com470e71d2011-07-07 08:21:25 +000023namespace webrtc {
24
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +000025enum { kMaxReceiverDelayMs = 10000 };
26
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000027VCMReceiver::VCMReceiver(VCMTiming* timing,
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +000028 Clock* clock,
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000029 EventFactory* event_factory,
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000030 int32_t vcm_id,
31 int32_t receiver_id,
niklase@google.com470e71d2011-07-07 08:21:25 +000032 bool master)
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000033 : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
34 vcm_id_(vcm_id),
35 clock_(clock),
36 receiver_id_(receiver_id),
37 master_(master),
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000038 jitter_buffer_(clock_, event_factory, vcm_id, receiver_id, master),
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000039 timing_(timing),
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000040 render_wait_event_(event_factory->CreateEvent()),
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +000041 state_(kPassive),
42 max_video_delay_ms_(kMaxVideoDelayMs) {}
niklase@google.com470e71d2011-07-07 08:21:25 +000043
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000044VCMReceiver::~VCMReceiver() {
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000045 render_wait_event_->Set();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000046 delete crit_sect_;
niklase@google.com470e71d2011-07-07 08:21:25 +000047}
48
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000049void VCMReceiver::Reset() {
50 CriticalSectionScoped cs(crit_sect_);
51 if (!jitter_buffer_.Running()) {
52 jitter_buffer_.Start();
53 } else {
54 jitter_buffer_.Flush();
55 }
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000056 render_wait_event_->Reset();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000057 if (master_) {
58 state_ = kReceiving;
59 } else {
60 state_ = kPassive;
61 }
henrik.lundin@webrtc.orgbaf6db52011-11-02 18:58:39 +000062}
63
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000064int32_t VCMReceiver::Initialize() {
65 CriticalSectionScoped cs(crit_sect_);
66 Reset();
67 if (!master_) {
stefan@webrtc.orga64300a2013-03-04 15:24:40 +000068 SetNackMode(kNoNack, -1, -1);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000069 }
70 return VCM_OK;
71}
72
73void VCMReceiver::UpdateRtt(uint32_t rtt) {
74 jitter_buffer_.UpdateRtt(rtt);
75}
76
stefan@webrtc.orga64300a2013-03-04 15:24:40 +000077int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
78 uint16_t frame_width,
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000079 uint16_t frame_height) {
80 // Find an empty frame.
81 VCMEncodedFrame* buffer = NULL;
82 const int32_t error = jitter_buffer_.GetFrame(packet, buffer);
83 if (error == VCM_OLD_PACKET_ERROR) {
niklase@google.com470e71d2011-07-07 08:21:25 +000084 return VCM_OK;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000085 } else if (error != VCM_OK) {
86 return error;
87 }
88 assert(buffer);
89 {
90 CriticalSectionScoped cs(crit_sect_);
91
92 if (frame_width && frame_height) {
93 buffer->SetEncodedSize(static_cast<uint32_t>(frame_width),
94 static_cast<uint32_t>(frame_height));
95 }
96
97 if (master_) {
98 // Only trace the primary receiver to make it possible to parse and plot
99 // the trace file.
100 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
101 VCMId(vcm_id_, receiver_id_),
102 "Packet seq_no %u of frame %u at %u",
103 packet.seqNum, packet.timestamp,
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000104 MaskWord64ToUWord32(clock_->TimeInMilliseconds()));
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000105 }
106
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000107 const int64_t now_ms = clock_->TimeInMilliseconds();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000108
109 int64_t render_time_ms = timing_->RenderTimeMs(packet.timestamp, now_ms);
110
111 if (render_time_ms < 0) {
112 // Render time error. Assume that this is due to some change in the
113 // incoming video stream and reset the JB and the timing.
114 jitter_buffer_.Flush();
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000115 timing_->Reset(clock_->TimeInMilliseconds());
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000116 return VCM_FLUSH_INDICATOR;
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000117 } else if (render_time_ms < now_ms - max_video_delay_ms_) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000118 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
119 VCMId(vcm_id_, receiver_id_),
120 "This frame should have been rendered more than %u ms ago."
121 "Flushing jitter buffer and resetting timing.",
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000122 max_video_delay_ms_);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000123 jitter_buffer_.Flush();
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000124 timing_->Reset(clock_->TimeInMilliseconds());
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000125 return VCM_FLUSH_INDICATOR;
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000126 } else if (static_cast<int>(timing_->TargetVideoDelay()) >
127 max_video_delay_ms_) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000128 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
129 VCMId(vcm_id_, receiver_id_),
130 "More than %u ms target delay. Flushing jitter buffer and"
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000131 "resetting timing.", max_video_delay_ms_);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000132 jitter_buffer_.Flush();
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000133 timing_->Reset(clock_->TimeInMilliseconds());
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000134 return VCM_FLUSH_INDICATOR;
135 }
136
137 // First packet received belonging to this frame.
138 if (buffer->Length() == 0) {
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000139 const int64_t now_ms = clock_->TimeInMilliseconds();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000140 if (master_) {
141 // Only trace the primary receiver to make it possible to parse and plot
142 // the trace file.
143 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
144 VCMId(vcm_id_, receiver_id_),
145 "First packet of frame %u at %u", packet.timestamp,
146 MaskWord64ToUWord32(now_ms));
147 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000148 }
149
150 // Insert packet into the jitter buffer both media and empty packets.
151 const VCMFrameBufferEnum
152 ret = jitter_buffer_.InsertPacket(buffer, packet);
153 if (ret == kFlushIndicator) {
154 return VCM_FLUSH_INDICATOR;
155 } else if (ret < 0) {
156 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding,
157 VCMId(vcm_id_, receiver_id_),
158 "Error inserting packet seq_no=%u, time_stamp=%u",
159 packet.seqNum, packet.timestamp);
160 return VCM_JITTER_BUFFER_ERROR;
161 }
162 }
163 return VCM_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +0000164}
165
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000166VCMEncodedFrame* VCMReceiver::FrameForDecoding(
167 uint16_t max_wait_time_ms,
168 int64_t& next_render_time_ms,
169 bool render_timing,
170 VCMReceiver* dual_receiver) {
hclam@chromium.org806dc3b2013-04-09 19:54:10 +0000171 TRACE_EVENT0("webrtc", "Recv::FrameForDecoding");
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000172 // No need to enter the critical section here since the jitter buffer
173 // is thread-safe.
174 FrameType incoming_frame_type = kVideoFrameDelta;
175 next_render_time_ms = -1;
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000176 const int64_t start_time_ms = clock_->TimeInMilliseconds();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000177 int64_t ret = jitter_buffer_.NextTimestamp(max_wait_time_ms,
178 &incoming_frame_type,
179 &next_render_time_ms);
180 if (ret < 0) {
181 // No timestamp in jitter buffer at the moment.
182 return NULL;
183 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000184
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000185 timing_->SetRequiredDelay(jitter_buffer_.EstimatedJitterMs());
mikhal@webrtc.orgee184b92013-04-19 19:15:47 +0000186 timing_->UpdateCurrentDelay(ret);
187 const int64_t now_ms = clock_->TimeInMilliseconds();
188 next_render_time_ms = timing_->RenderTimeMs(ret, now_ms);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000189
190 const int32_t temp_wait_time = max_wait_time_ms -
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000191 static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000192 uint16_t new_max_wait_time = static_cast<uint16_t>(VCM_MAX(temp_wait_time,
193 0));
194
195 VCMEncodedFrame* frame = NULL;
196
197 if (render_timing) {
198 frame = FrameForDecoding(new_max_wait_time, next_render_time_ms,
199 dual_receiver);
200 } else {
201 frame = FrameForRendering(new_max_wait_time, next_render_time_ms,
202 dual_receiver);
203 }
204
205 if (frame != NULL) {
mikhal@webrtc.orgee184b92013-04-19 19:15:47 +0000206 // Set render time.
207 const int64_t now_ms = clock_->TimeInMilliseconds();
208 const int64_t render_time_ms = timing_->RenderTimeMs(frame->TimeStamp(),
209 now_ms);
210 if (render_time_ms >= 0) {
211 frame->SetRenderTime(render_time_ms);
212 } else {
213 frame->SetRenderTime(now_ms);
214 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000215 bool retransmitted = false;
216 const int64_t last_packet_time_ms =
217 jitter_buffer_.LastPacketTime(frame, &retransmitted);
218 if (last_packet_time_ms >= 0 && !retransmitted) {
219 // We don't want to include timestamps which have suffered from
220 // retransmission here, since we compensate with extra retransmission
221 // delay within the jitter estimate.
mikhal@webrtc.orgee184b92013-04-19 19:15:47 +0000222 timing_->IncomingTimestamp(frame->TimeStamp(), last_packet_time_ms);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000223 }
224 if (dual_receiver != NULL) {
225 dual_receiver->UpdateState(*frame);
226 }
227 }
228 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000229}
230
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000231VCMEncodedFrame* VCMReceiver::FrameForDecoding(
232 uint16_t max_wait_time_ms,
233 int64_t next_render_time_ms,
234 VCMReceiver* dual_receiver) {
hclam@chromium.org806dc3b2013-04-09 19:54:10 +0000235 TRACE_EVENT1("webrtc", "FrameForDecoding",
236 "max_wait", max_wait_time_ms);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000237 // How long can we wait until we must decode the next frame.
238 uint32_t wait_time_ms = timing_->MaxWaitingTime(
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000239 next_render_time_ms, clock_->TimeInMilliseconds());
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000240
241 // Try to get a complete frame from the jitter buffer.
242 VCMEncodedFrame* frame = jitter_buffer_.GetCompleteFrameForDecoding(0);
243
244 if (frame == NULL && max_wait_time_ms == 0 && wait_time_ms > 0) {
245 // If we're not allowed to wait for frames to get complete we must
246 // calculate if it's time to decode, and if it's not we will just return
247 // for now.
248 return NULL;
249 }
250
251 if (frame == NULL && VCM_MIN(wait_time_ms, max_wait_time_ms) == 0) {
252 // No time to wait for a complete frame, check if we have an incomplete.
253 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
254 dual_receiver->State() == kPassive &&
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000255 dual_receiver->NackMode() == kNack);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000256 if (dual_receiver_enabled_and_passive &&
257 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
258 // Jitter buffer state might get corrupt with this frame.
259 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
260 frame = jitter_buffer_.GetFrameForDecoding();
261 assert(frame);
262 } else {
263 frame = jitter_buffer_.GetFrameForDecoding();
niklase@google.com470e71d2011-07-07 08:21:25 +0000264 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000265 }
266 if (frame == NULL) {
267 // Wait for a complete frame.
268 frame = jitter_buffer_.GetCompleteFrameForDecoding(max_wait_time_ms);
269 }
270 if (frame == NULL) {
271 // Get an incomplete frame.
272 if (timing_->MaxWaitingTime(next_render_time_ms,
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000273 clock_->TimeInMilliseconds()) > 0) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000274 // Still time to wait for a complete frame.
275 return NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000276 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000277
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000278 // No time left to wait, we must decode this frame now.
279 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
280 dual_receiver->State() == kPassive &&
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000281 dual_receiver->NackMode() == kNack);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000282 if (dual_receiver_enabled_and_passive &&
283 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
284 // Jitter buffer state might get corrupt with this frame.
285 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
niklase@google.com470e71d2011-07-07 08:21:25 +0000286 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000287
288 frame = jitter_buffer_.GetFrameForDecoding();
289 }
290 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000291}
292
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000293VCMEncodedFrame* VCMReceiver::FrameForRendering(uint16_t max_wait_time_ms,
294 int64_t next_render_time_ms,
295 VCMReceiver* dual_receiver) {
hclam@chromium.org806dc3b2013-04-09 19:54:10 +0000296 TRACE_EVENT0("webrtc", "FrameForRendering");
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000297 // How long MUST we wait until we must decode the next frame. This is
298 // different for the case where we have a renderer which can render at a
299 // specified time. Here we must wait as long as possible before giving the
300 // frame to the decoder, which will render the frame as soon as it has been
301 // decoded.
302 uint32_t wait_time_ms = timing_->MaxWaitingTime(
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000303 next_render_time_ms, clock_->TimeInMilliseconds());
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000304 if (max_wait_time_ms < wait_time_ms) {
305 // If we're not allowed to wait until the frame is supposed to be rendered
306 // we will have to return NULL for now.
307 return NULL;
308 }
309 // Wait until it's time to render.
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +0000310 render_wait_event_->Wait(wait_time_ms);
niklase@google.com470e71d2011-07-07 08:21:25 +0000311
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000312 // Get a complete frame if possible.
313 VCMEncodedFrame* frame = jitter_buffer_.GetCompleteFrameForDecoding(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000314
mikhal@webrtc.orgc2a3aa72013-04-12 19:53:30 +0000315 if (frame == NULL) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000316 // Get an incomplete frame.
317 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
318 dual_receiver->State() == kPassive &&
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000319 dual_receiver->NackMode() == kNack);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000320 if (dual_receiver_enabled_and_passive &&
321 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
322 // Jitter buffer state might get corrupt with this frame.
323 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
niklase@google.com470e71d2011-07-07 08:21:25 +0000324 }
325
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000326 frame = jitter_buffer_.GetFrameForDecoding();
327 }
328 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000329}
330
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000331void VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame) {
332 jitter_buffer_.ReleaseFrame(frame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000333}
334
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000335void VCMReceiver::ReceiveStatistics(uint32_t* bitrate,
336 uint32_t* framerate) {
337 assert(bitrate);
338 assert(framerate);
339 jitter_buffer_.IncomingRateStatistics(framerate, bitrate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000340}
341
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000342void VCMReceiver::ReceivedFrameCount(VCMFrameCount* frame_count) const {
343 assert(frame_count);
344 jitter_buffer_.FrameStatistics(&frame_count->numDeltaFrames,
345 &frame_count->numKeyFrames);
niklase@google.com470e71d2011-07-07 08:21:25 +0000346}
347
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000348uint32_t VCMReceiver::DiscardedPackets() const {
349 return jitter_buffer_.num_discarded_packets();
niklase@google.com470e71d2011-07-07 08:21:25 +0000350}
351
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000352void VCMReceiver::SetNackMode(VCMNackMode nackMode,
353 int low_rtt_nack_threshold_ms,
354 int high_rtt_nack_threshold_ms) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000355 CriticalSectionScoped cs(crit_sect_);
356 // Default to always having NACK enabled in hybrid mode.
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000357 jitter_buffer_.SetNackMode(nackMode, low_rtt_nack_threshold_ms,
358 high_rtt_nack_threshold_ms);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000359 if (!master_) {
360 state_ = kPassive; // The dual decoder defaults to passive.
361 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000362}
363
stefan@webrtc.orgbecf9c82013-02-01 15:09:57 +0000364void VCMReceiver::SetNackSettings(size_t max_nack_list_size,
365 int max_packet_age_to_nack) {
366 jitter_buffer_.SetNackSettings(max_nack_list_size,
367 max_packet_age_to_nack);
368}
369
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000370VCMNackMode VCMReceiver::NackMode() const {
371 CriticalSectionScoped cs(crit_sect_);
372 return jitter_buffer_.nack_mode();
stefan@webrtc.org791eec72011-10-11 07:53:43 +0000373}
374
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000375VCMNackStatus VCMReceiver::NackList(uint16_t* nack_list,
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000376 uint16_t size,
377 uint16_t* nack_list_length) {
378 bool request_key_frame = false;
379 uint16_t* internal_nack_list = jitter_buffer_.GetNackList(
380 nack_list_length, &request_key_frame);
381 if (request_key_frame) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000382 // This combination is used to trigger key frame requests.
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000383 return kNackKeyFrameRequest;
384 }
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000385 if (*nack_list_length > size) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000386 return kNackNeedMoreMemory;
387 }
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000388 if (internal_nack_list != NULL && *nack_list_length > 0) {
389 memcpy(nack_list, internal_nack_list, *nack_list_length * sizeof(uint16_t));
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000390 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000391 return kNackOk;
niklase@google.com470e71d2011-07-07 08:21:25 +0000392}
393
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000394// Decide whether we should change decoder state. This should be done if the
395// dual decoder has caught up with the decoder decoding with packet losses.
396bool VCMReceiver::DualDecoderCaughtUp(VCMEncodedFrame* dual_frame,
397 VCMReceiver& dual_receiver) const {
398 if (dual_frame == NULL) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000399 return false;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000400 }
401 if (jitter_buffer_.LastDecodedTimestamp() == dual_frame->TimeStamp()) {
402 dual_receiver.UpdateState(kWaitForPrimaryDecode);
403 return true;
404 }
405 return false;
niklase@google.com470e71d2011-07-07 08:21:25 +0000406}
407
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000408void VCMReceiver::CopyJitterBufferStateFromReceiver(
409 const VCMReceiver& receiver) {
410 jitter_buffer_.CopyFrom(receiver.jitter_buffer_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000411}
412
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000413VCMReceiverState VCMReceiver::State() const {
414 CriticalSectionScoped cs(crit_sect_);
415 return state_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000416}
417
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000418int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
419 CriticalSectionScoped cs(crit_sect_);
420 if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) {
421 return -1;
422 }
423 jitter_buffer_.SetMaxJitterEstimate(desired_delay_ms);
424 max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs;
425 timing_->SetMaxVideoDelay(max_video_delay_ms_);
mikhal@webrtc.orgdbd6a6d2013-04-17 16:23:22 +0000426 // Initializing timing to the desired delay.
427 timing_->SetRequiredDelay(desired_delay_ms);
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000428 return 0;
429}
430
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000431void VCMReceiver::UpdateState(VCMReceiverState new_state) {
432 CriticalSectionScoped cs(crit_sect_);
433 assert(!(state_ == kPassive && new_state == kWaitForPrimaryDecode));
434 state_ = new_state;
niklase@google.com470e71d2011-07-07 08:21:25 +0000435}
436
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000437void VCMReceiver::UpdateState(const VCMEncodedFrame& frame) {
438 if (jitter_buffer_.nack_mode() == kNoNack) {
439 // Dual decoder mode has not been enabled.
440 return;
441 }
442 // Update the dual receiver state.
443 if (frame.Complete() && frame.FrameType() == kVideoFrameKey) {
444 UpdateState(kPassive);
445 }
446 if (State() == kWaitForPrimaryDecode &&
447 frame.Complete() && !frame.MissingFrame()) {
448 UpdateState(kPassive);
449 }
450 if (frame.MissingFrame() || !frame.Complete()) {
451 // State was corrupted, enable dual receiver.
452 UpdateState(kReceiving);
453 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000454}
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000455} // namespace webrtc