blob: 04fd13101497358f27026d20b762a41ee67937f9 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
stefan@webrtc.org91c63082012-01-31 10:49:08 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000011#include "webrtc/modules/video_coding/main/source/receiver.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
13#include <assert.h>
14
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000015#include "webrtc/modules/video_coding/main/interface/video_coding.h"
16#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
17#include "webrtc/modules/video_coding/main/source/internal_defines.h"
18#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +000019#include "webrtc/system_wrappers/interface/clock.h"
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000020#include "webrtc/system_wrappers/interface/trace.h"
stefan@webrtc.org91c63082012-01-31 10:49:08 +000021
niklase@google.com470e71d2011-07-07 08:21:25 +000022namespace webrtc {
23
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +000024enum { kMaxReceiverDelayMs = 10000 };
25
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000026VCMReceiver::VCMReceiver(VCMTiming* timing,
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +000027 Clock* clock,
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000028 int32_t vcm_id,
29 int32_t receiver_id,
niklase@google.com470e71d2011-07-07 08:21:25 +000030 bool master)
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000031 : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
32 vcm_id_(vcm_id),
33 clock_(clock),
34 receiver_id_(receiver_id),
35 master_(master),
36 jitter_buffer_(clock_, vcm_id, receiver_id, master),
37 timing_(timing),
38 render_wait_event_(),
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +000039 state_(kPassive),
40 max_video_delay_ms_(kMaxVideoDelayMs) {}
niklase@google.com470e71d2011-07-07 08:21:25 +000041
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000042VCMReceiver::~VCMReceiver() {
43 render_wait_event_.Set();
44 delete crit_sect_;
niklase@google.com470e71d2011-07-07 08:21:25 +000045}
46
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000047void VCMReceiver::Reset() {
48 CriticalSectionScoped cs(crit_sect_);
49 if (!jitter_buffer_.Running()) {
50 jitter_buffer_.Start();
51 } else {
52 jitter_buffer_.Flush();
53 }
54 render_wait_event_.Reset();
55 if (master_) {
56 state_ = kReceiving;
57 } else {
58 state_ = kPassive;
59 }
henrik.lundin@webrtc.orgbaf6db52011-11-02 18:58:39 +000060}
61
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000062int32_t VCMReceiver::Initialize() {
63 CriticalSectionScoped cs(crit_sect_);
64 Reset();
65 if (!master_) {
stefan@webrtc.orga64300a2013-03-04 15:24:40 +000066 SetNackMode(kNoNack, -1, -1);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000067 }
68 return VCM_OK;
69}
70
71void VCMReceiver::UpdateRtt(uint32_t rtt) {
72 jitter_buffer_.UpdateRtt(rtt);
73}
74
stefan@webrtc.orga64300a2013-03-04 15:24:40 +000075int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
76 uint16_t frame_width,
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000077 uint16_t frame_height) {
78 // Find an empty frame.
79 VCMEncodedFrame* buffer = NULL;
80 const int32_t error = jitter_buffer_.GetFrame(packet, buffer);
81 if (error == VCM_OLD_PACKET_ERROR) {
niklase@google.com470e71d2011-07-07 08:21:25 +000082 return VCM_OK;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000083 } else if (error != VCM_OK) {
84 return error;
85 }
86 assert(buffer);
87 {
88 CriticalSectionScoped cs(crit_sect_);
89
90 if (frame_width && frame_height) {
91 buffer->SetEncodedSize(static_cast<uint32_t>(frame_width),
92 static_cast<uint32_t>(frame_height));
93 }
94
95 if (master_) {
96 // Only trace the primary receiver to make it possible to parse and plot
97 // the trace file.
98 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
99 VCMId(vcm_id_, receiver_id_),
100 "Packet seq_no %u of frame %u at %u",
101 packet.seqNum, packet.timestamp,
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000102 MaskWord64ToUWord32(clock_->TimeInMilliseconds()));
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000103 }
104
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000105 const int64_t now_ms = clock_->TimeInMilliseconds();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000106
107 int64_t render_time_ms = timing_->RenderTimeMs(packet.timestamp, now_ms);
108
109 if (render_time_ms < 0) {
110 // Render time error. Assume that this is due to some change in the
111 // incoming video stream and reset the JB and the timing.
112 jitter_buffer_.Flush();
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000113 timing_->Reset(clock_->TimeInMilliseconds());
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000114 return VCM_FLUSH_INDICATOR;
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000115 } else if (render_time_ms < now_ms - max_video_delay_ms_) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000116 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
117 VCMId(vcm_id_, receiver_id_),
118 "This frame should have been rendered more than %u ms ago."
119 "Flushing jitter buffer and resetting timing.",
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000120 max_video_delay_ms_);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000121 jitter_buffer_.Flush();
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000122 timing_->Reset(clock_->TimeInMilliseconds());
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000123 return VCM_FLUSH_INDICATOR;
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000124 } else if (static_cast<int>(timing_->TargetVideoDelay()) >
125 max_video_delay_ms_) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000126 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
127 VCMId(vcm_id_, receiver_id_),
128 "More than %u ms target delay. Flushing jitter buffer and"
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000129 "resetting timing.", max_video_delay_ms_);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000130 jitter_buffer_.Flush();
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000131 timing_->Reset(clock_->TimeInMilliseconds());
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000132 return VCM_FLUSH_INDICATOR;
133 }
134
135 // First packet received belonging to this frame.
136 if (buffer->Length() == 0) {
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000137 const int64_t now_ms = clock_->TimeInMilliseconds();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000138 if (master_) {
139 // Only trace the primary receiver to make it possible to parse and plot
140 // the trace file.
141 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
142 VCMId(vcm_id_, receiver_id_),
143 "First packet of frame %u at %u", packet.timestamp,
144 MaskWord64ToUWord32(now_ms));
145 }
146 render_time_ms = timing_->RenderTimeMs(packet.timestamp, now_ms);
147 if (render_time_ms >= 0) {
148 buffer->SetRenderTime(render_time_ms);
149 } else {
150 buffer->SetRenderTime(now_ms);
151 }
152 }
153
154 // Insert packet into the jitter buffer both media and empty packets.
155 const VCMFrameBufferEnum
156 ret = jitter_buffer_.InsertPacket(buffer, packet);
157 if (ret == kFlushIndicator) {
158 return VCM_FLUSH_INDICATOR;
159 } else if (ret < 0) {
160 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding,
161 VCMId(vcm_id_, receiver_id_),
162 "Error inserting packet seq_no=%u, time_stamp=%u",
163 packet.seqNum, packet.timestamp);
164 return VCM_JITTER_BUFFER_ERROR;
165 }
166 }
167 return VCM_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +0000168}
169
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000170VCMEncodedFrame* VCMReceiver::FrameForDecoding(
171 uint16_t max_wait_time_ms,
172 int64_t& next_render_time_ms,
173 bool render_timing,
174 VCMReceiver* dual_receiver) {
175 // No need to enter the critical section here since the jitter buffer
176 // is thread-safe.
177 FrameType incoming_frame_type = kVideoFrameDelta;
178 next_render_time_ms = -1;
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000179 const int64_t start_time_ms = clock_->TimeInMilliseconds();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000180 int64_t ret = jitter_buffer_.NextTimestamp(max_wait_time_ms,
181 &incoming_frame_type,
182 &next_render_time_ms);
183 if (ret < 0) {
184 // No timestamp in jitter buffer at the moment.
185 return NULL;
186 }
187 const uint32_t time_stamp = static_cast<uint32_t>(ret);
188
189 // Update the timing.
190 timing_->SetRequiredDelay(jitter_buffer_.EstimatedJitterMs());
191 timing_->UpdateCurrentDelay(time_stamp);
192
193 const int32_t temp_wait_time = max_wait_time_ms -
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000194 static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000195 uint16_t new_max_wait_time = static_cast<uint16_t>(VCM_MAX(temp_wait_time,
196 0));
197
198 VCMEncodedFrame* frame = NULL;
199
200 if (render_timing) {
201 frame = FrameForDecoding(new_max_wait_time, next_render_time_ms,
202 dual_receiver);
203 } else {
204 frame = FrameForRendering(new_max_wait_time, next_render_time_ms,
205 dual_receiver);
206 }
207
208 if (frame != NULL) {
209 bool retransmitted = false;
210 const int64_t last_packet_time_ms =
211 jitter_buffer_.LastPacketTime(frame, &retransmitted);
212 if (last_packet_time_ms >= 0 && !retransmitted) {
213 // We don't want to include timestamps which have suffered from
214 // retransmission here, since we compensate with extra retransmission
215 // delay within the jitter estimate.
216 timing_->IncomingTimestamp(time_stamp, last_packet_time_ms);
217 }
218 if (dual_receiver != NULL) {
219 dual_receiver->UpdateState(*frame);
220 }
221 }
222 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000223}
224
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000225VCMEncodedFrame* VCMReceiver::FrameForDecoding(
226 uint16_t max_wait_time_ms,
227 int64_t next_render_time_ms,
228 VCMReceiver* dual_receiver) {
229 // How long can we wait until we must decode the next frame.
230 uint32_t wait_time_ms = timing_->MaxWaitingTime(
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000231 next_render_time_ms, clock_->TimeInMilliseconds());
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000232
233 // Try to get a complete frame from the jitter buffer.
234 VCMEncodedFrame* frame = jitter_buffer_.GetCompleteFrameForDecoding(0);
235
236 if (frame == NULL && max_wait_time_ms == 0 && wait_time_ms > 0) {
237 // If we're not allowed to wait for frames to get complete we must
238 // calculate if it's time to decode, and if it's not we will just return
239 // for now.
240 return NULL;
241 }
242
243 if (frame == NULL && VCM_MIN(wait_time_ms, max_wait_time_ms) == 0) {
244 // No time to wait for a complete frame, check if we have an incomplete.
245 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
246 dual_receiver->State() == kPassive &&
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000247 dual_receiver->NackMode() == kNack);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000248 if (dual_receiver_enabled_and_passive &&
249 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
250 // Jitter buffer state might get corrupt with this frame.
251 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
252 frame = jitter_buffer_.GetFrameForDecoding();
253 assert(frame);
254 } else {
255 frame = jitter_buffer_.GetFrameForDecoding();
niklase@google.com470e71d2011-07-07 08:21:25 +0000256 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000257 }
258 if (frame == NULL) {
259 // Wait for a complete frame.
260 frame = jitter_buffer_.GetCompleteFrameForDecoding(max_wait_time_ms);
261 }
262 if (frame == NULL) {
263 // Get an incomplete frame.
264 if (timing_->MaxWaitingTime(next_render_time_ms,
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000265 clock_->TimeInMilliseconds()) > 0) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000266 // Still time to wait for a complete frame.
267 return NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000268 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000269
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000270 // No time left to wait, we must decode this frame now.
271 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
272 dual_receiver->State() == kPassive &&
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000273 dual_receiver->NackMode() == kNack);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000274 if (dual_receiver_enabled_and_passive &&
275 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
276 // Jitter buffer state might get corrupt with this frame.
277 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
niklase@google.com470e71d2011-07-07 08:21:25 +0000278 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000279
280 frame = jitter_buffer_.GetFrameForDecoding();
281 }
282 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000283}
284
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000285VCMEncodedFrame* VCMReceiver::FrameForRendering(uint16_t max_wait_time_ms,
286 int64_t next_render_time_ms,
287 VCMReceiver* dual_receiver) {
288 // How long MUST we wait until we must decode the next frame. This is
289 // different for the case where we have a renderer which can render at a
290 // specified time. Here we must wait as long as possible before giving the
291 // frame to the decoder, which will render the frame as soon as it has been
292 // decoded.
293 uint32_t wait_time_ms = timing_->MaxWaitingTime(
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000294 next_render_time_ms, clock_->TimeInMilliseconds());
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000295 if (max_wait_time_ms < wait_time_ms) {
296 // If we're not allowed to wait until the frame is supposed to be rendered
297 // we will have to return NULL for now.
298 return NULL;
299 }
300 // Wait until it's time to render.
301 render_wait_event_.Wait(wait_time_ms);
niklase@google.com470e71d2011-07-07 08:21:25 +0000302
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000303 // Get a complete frame if possible.
304 VCMEncodedFrame* frame = jitter_buffer_.GetCompleteFrameForDecoding(0);
niklase@google.com470e71d2011-07-07 08:21:25 +0000305
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000306 if (frame == NULL) {
307 // Get an incomplete frame.
308 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
309 dual_receiver->State() == kPassive &&
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000310 dual_receiver->NackMode() == kNack);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000311 if (dual_receiver_enabled_and_passive &&
312 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
313 // Jitter buffer state might get corrupt with this frame.
314 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
niklase@google.com470e71d2011-07-07 08:21:25 +0000315 }
316
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000317 frame = jitter_buffer_.GetFrameForDecoding();
318 }
319 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000320}
321
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000322void VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame) {
323 jitter_buffer_.ReleaseFrame(frame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000324}
325
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000326void VCMReceiver::ReceiveStatistics(uint32_t* bitrate,
327 uint32_t* framerate) {
328 assert(bitrate);
329 assert(framerate);
330 jitter_buffer_.IncomingRateStatistics(framerate, bitrate);
331 *bitrate /= 1000; // Should be in kbps.
niklase@google.com470e71d2011-07-07 08:21:25 +0000332}
333
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000334void VCMReceiver::ReceivedFrameCount(VCMFrameCount* frame_count) const {
335 assert(frame_count);
336 jitter_buffer_.FrameStatistics(&frame_count->numDeltaFrames,
337 &frame_count->numKeyFrames);
niklase@google.com470e71d2011-07-07 08:21:25 +0000338}
339
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000340uint32_t VCMReceiver::DiscardedPackets() const {
341 return jitter_buffer_.num_discarded_packets();
niklase@google.com470e71d2011-07-07 08:21:25 +0000342}
343
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000344void VCMReceiver::SetNackMode(VCMNackMode nackMode,
345 int low_rtt_nack_threshold_ms,
346 int high_rtt_nack_threshold_ms) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000347 CriticalSectionScoped cs(crit_sect_);
348 // Default to always having NACK enabled in hybrid mode.
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000349 jitter_buffer_.SetNackMode(nackMode, low_rtt_nack_threshold_ms,
350 high_rtt_nack_threshold_ms);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000351 if (!master_) {
352 state_ = kPassive; // The dual decoder defaults to passive.
353 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000354}
355
stefan@webrtc.orgbecf9c82013-02-01 15:09:57 +0000356void VCMReceiver::SetNackSettings(size_t max_nack_list_size,
357 int max_packet_age_to_nack) {
358 jitter_buffer_.SetNackSettings(max_nack_list_size,
359 max_packet_age_to_nack);
360}
361
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000362VCMNackMode VCMReceiver::NackMode() const {
363 CriticalSectionScoped cs(crit_sect_);
364 return jitter_buffer_.nack_mode();
stefan@webrtc.org791eec72011-10-11 07:53:43 +0000365}
366
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000367VCMNackStatus VCMReceiver::NackList(uint16_t* nack_list,
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000368 uint16_t size,
369 uint16_t* nack_list_length) {
370 bool request_key_frame = false;
371 uint16_t* internal_nack_list = jitter_buffer_.GetNackList(
372 nack_list_length, &request_key_frame);
373 if (request_key_frame) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000374 // This combination is used to trigger key frame requests.
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000375 return kNackKeyFrameRequest;
376 }
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000377 if (*nack_list_length > size) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000378 return kNackNeedMoreMemory;
379 }
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000380 if (internal_nack_list != NULL && *nack_list_length > 0) {
381 memcpy(nack_list, internal_nack_list, *nack_list_length * sizeof(uint16_t));
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000382 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000383 return kNackOk;
niklase@google.com470e71d2011-07-07 08:21:25 +0000384}
385
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000386// Decide whether we should change decoder state. This should be done if the
387// dual decoder has caught up with the decoder decoding with packet losses.
388bool VCMReceiver::DualDecoderCaughtUp(VCMEncodedFrame* dual_frame,
389 VCMReceiver& dual_receiver) const {
390 if (dual_frame == NULL) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000391 return false;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000392 }
393 if (jitter_buffer_.LastDecodedTimestamp() == dual_frame->TimeStamp()) {
394 dual_receiver.UpdateState(kWaitForPrimaryDecode);
395 return true;
396 }
397 return false;
niklase@google.com470e71d2011-07-07 08:21:25 +0000398}
399
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000400void VCMReceiver::CopyJitterBufferStateFromReceiver(
401 const VCMReceiver& receiver) {
402 jitter_buffer_.CopyFrom(receiver.jitter_buffer_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000403}
404
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000405VCMReceiverState VCMReceiver::State() const {
406 CriticalSectionScoped cs(crit_sect_);
407 return state_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000408}
409
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000410int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
411 CriticalSectionScoped cs(crit_sect_);
412 if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) {
413 return -1;
414 }
415 jitter_buffer_.SetMaxJitterEstimate(desired_delay_ms);
416 max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs;
417 timing_->SetMaxVideoDelay(max_video_delay_ms_);
418 return 0;
419}
420
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000421void VCMReceiver::UpdateState(VCMReceiverState new_state) {
422 CriticalSectionScoped cs(crit_sect_);
423 assert(!(state_ == kPassive && new_state == kWaitForPrimaryDecode));
424 state_ = new_state;
niklase@google.com470e71d2011-07-07 08:21:25 +0000425}
426
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000427void VCMReceiver::UpdateState(const VCMEncodedFrame& frame) {
428 if (jitter_buffer_.nack_mode() == kNoNack) {
429 // Dual decoder mode has not been enabled.
430 return;
431 }
432 // Update the dual receiver state.
433 if (frame.Complete() && frame.FrameType() == kVideoFrameKey) {
434 UpdateState(kPassive);
435 }
436 if (State() == kWaitForPrimaryDecode &&
437 frame.Complete() && !frame.MissingFrame()) {
438 UpdateState(kPassive);
439 }
440 if (frame.MissingFrame() || !frame.Complete()) {
441 // State was corrupted, enable dual receiver.
442 UpdateState(kReceiving);
443 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000444}
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000445} // namespace webrtc