blob: e3fc0ceac76acc24a5e6f951cd3360c816a4591b [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
stefan@webrtc.org91c63082012-01-31 10:49:08 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000011#include "webrtc/modules/video_coding/main/source/receiver.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
13#include <assert.h>
14
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000015#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
16#include "webrtc/modules/video_coding/main/source/internal_defines.h"
17#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +000018#include "webrtc/system_wrappers/interface/clock.h"
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000019#include "webrtc/system_wrappers/interface/trace.h"
hclam@chromium.org806dc3b2013-04-09 19:54:10 +000020#include "webrtc/system_wrappers/interface/trace_event.h"
stefan@webrtc.org91c63082012-01-31 10:49:08 +000021
niklase@google.com470e71d2011-07-07 08:21:25 +000022namespace webrtc {
23
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +000024enum { kMaxReceiverDelayMs = 10000 };
25
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000026VCMReceiver::VCMReceiver(VCMTiming* timing,
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +000027 Clock* clock,
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000028 EventFactory* event_factory,
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000029 int32_t vcm_id,
30 int32_t receiver_id,
niklase@google.com470e71d2011-07-07 08:21:25 +000031 bool master)
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000032 : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
33 vcm_id_(vcm_id),
34 clock_(clock),
35 receiver_id_(receiver_id),
36 master_(master),
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000037 jitter_buffer_(clock_, event_factory, vcm_id, receiver_id, master),
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000038 timing_(timing),
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000039 render_wait_event_(event_factory->CreateEvent()),
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +000040 state_(kPassive),
41 max_video_delay_ms_(kMaxVideoDelayMs) {}
niklase@google.com470e71d2011-07-07 08:21:25 +000042
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000043VCMReceiver::~VCMReceiver() {
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000044 render_wait_event_->Set();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000045 delete crit_sect_;
niklase@google.com470e71d2011-07-07 08:21:25 +000046}
47
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000048void VCMReceiver::Reset() {
49 CriticalSectionScoped cs(crit_sect_);
50 if (!jitter_buffer_.Running()) {
51 jitter_buffer_.Start();
52 } else {
53 jitter_buffer_.Flush();
54 }
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000055 render_wait_event_->Reset();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000056 if (master_) {
57 state_ = kReceiving;
58 } else {
59 state_ = kPassive;
60 }
henrik.lundin@webrtc.orgbaf6db52011-11-02 18:58:39 +000061}
62
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000063int32_t VCMReceiver::Initialize() {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000064 Reset();
stefan@webrtc.org4f3624d2013-09-20 07:43:17 +000065 CriticalSectionScoped cs(crit_sect_);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000066 if (!master_) {
stefan@webrtc.orga64300a2013-03-04 15:24:40 +000067 SetNackMode(kNoNack, -1, -1);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000068 }
69 return VCM_OK;
70}
71
72void VCMReceiver::UpdateRtt(uint32_t rtt) {
73 jitter_buffer_.UpdateRtt(rtt);
74}
75
stefan@webrtc.orga64300a2013-03-04 15:24:40 +000076int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
77 uint16_t frame_width,
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000078 uint16_t frame_height) {
stefan@webrtc.orga7dc37d2013-05-23 07:21:05 +000079 if (packet.frameType == kVideoFrameKey) {
80 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCoding,
81 VCMId(vcm_id_, receiver_id_),
82 "Inserting key frame packet seqnum=%u, timestamp=%u",
83 packet.seqNum, packet.timestamp);
84 }
hclam@chromium.org8c49c1e2013-05-22 21:18:59 +000085
stefan@webrtc.org3417eb42013-05-21 15:25:53 +000086 // Insert the packet into the jitter buffer. The packet can either be empty or
87 // contain media at this point.
88 bool retransmitted = false;
89 const VCMFrameBufferEnum ret = jitter_buffer_.InsertPacket(packet,
90 &retransmitted);
91 if (ret == kOldPacket) {
niklase@google.com470e71d2011-07-07 08:21:25 +000092 return VCM_OK;
stefan@webrtc.org3417eb42013-05-21 15:25:53 +000093 } else if (ret == kFlushIndicator) {
94 return VCM_FLUSH_INDICATOR;
95 } else if (ret < 0) {
96 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding,
97 VCMId(vcm_id_, receiver_id_),
98 "Error inserting packet seqnum=%u, timestamp=%u",
99 packet.seqNum, packet.timestamp);
100 return VCM_JITTER_BUFFER_ERROR;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000101 }
stefan@webrtc.org3417eb42013-05-21 15:25:53 +0000102 if (ret == kCompleteSession && !retransmitted) {
103 // We don't want to include timestamps which have suffered from
104 // retransmission here, since we compensate with extra retransmission
105 // delay within the jitter estimate.
106 timing_->IncomingTimestamp(packet.timestamp, clock_->TimeInMilliseconds());
107 }
108 if (master_) {
109 // Only trace the primary receiver to make it possible to parse and plot
110 // the trace file.
111 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
112 VCMId(vcm_id_, receiver_id_),
113 "Packet seqnum=%u timestamp=%u inserted at %u",
114 packet.seqNum, packet.timestamp,
115 MaskWord64ToUWord32(clock_->TimeInMilliseconds()));
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000116 }
117 return VCM_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +0000118}
119
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000120VCMEncodedFrame* VCMReceiver::FrameForDecoding(
121 uint16_t max_wait_time_ms,
122 int64_t& next_render_time_ms,
123 bool render_timing,
124 VCMReceiver* dual_receiver) {
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000125 const int64_t start_time_ms = clock_->TimeInMilliseconds();
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000126 uint32_t frame_timestamp = 0;
127 // Exhaust wait time to get a complete frame for decoding.
128 bool found_frame = jitter_buffer_.NextCompleteTimestamp(
129 max_wait_time_ms, &frame_timestamp);
130
131 if (!found_frame) {
132 // Get an incomplete frame when enabled.
133 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
134 dual_receiver->State() == kPassive &&
135 dual_receiver->NackMode() == kNack);
136 if (dual_receiver_enabled_and_passive &&
137 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
138 // Jitter buffer state might get corrupt with this frame.
139 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
140 }
141 found_frame = jitter_buffer_.NextMaybeIncompleteTimestamp(
142 &frame_timestamp);
143 }
144
145 if (!found_frame) {
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000146 return NULL;
147 }
mikhal@webrtc.orgd3cd5652013-05-03 17:54:18 +0000148
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000149 // We have a frame - Set timing and render timestamp.
mikhal@webrtc.orgadc64a72013-05-30 16:20:18 +0000150 timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000151 const int64_t now_ms = clock_->TimeInMilliseconds();
152 timing_->UpdateCurrentDelay(frame_timestamp);
153 next_render_time_ms = timing_->RenderTimeMs(frame_timestamp, now_ms);
154 // Check render timing.
155 bool timing_error = false;
156 // Assume that render timing errors are due to changes in the video stream.
157 if (next_render_time_ms < 0) {
158 timing_error = true;
pbos@webrtc.org0117d1c2014-03-03 16:47:03 +0000159 } else if (labs(next_render_time_ms - now_ms) > max_video_delay_ms_) {
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000160 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
161 VCMId(vcm_id_, receiver_id_),
stefan@webrtc.org554d1582013-09-11 08:45:26 +0000162 "This frame is out of our delay bounds, resetting jitter "
163 "buffer: %d > %d",
pbos@webrtc.org0117d1c2014-03-03 16:47:03 +0000164 static_cast<int>(labs(next_render_time_ms - now_ms)),
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000165 max_video_delay_ms_);
166 timing_error = true;
167 } else if (static_cast<int>(timing_->TargetVideoDelay()) >
168 max_video_delay_ms_) {
169 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
170 VCMId(vcm_id_, receiver_id_),
171 "More than %u ms target delay. Flushing jitter buffer and"
172 "resetting timing.", max_video_delay_ms_);
173 timing_error = true;
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000174 }
175
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000176 if (timing_error) {
177 // Timing error => reset timing and flush the jitter buffer.
178 jitter_buffer_.Flush();
stefan@webrtc.org9f557c12013-05-17 12:55:07 +0000179 timing_->Reset();
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000180 return NULL;
181 }
182
183 if (!render_timing) {
184 // Decode frame as close as possible to the render timestamp.
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000185 const int32_t available_wait_time = max_wait_time_ms -
186 static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms);
187 uint16_t new_max_wait_time = static_cast<uint16_t>(
188 VCM_MAX(available_wait_time, 0));
189 uint32_t wait_time_ms = timing_->MaxWaitingTime(
190 next_render_time_ms, clock_->TimeInMilliseconds());
191 if (new_max_wait_time < wait_time_ms) {
192 // We're not allowed to wait until the frame is supposed to be rendered,
193 // waiting as long as we're allowed to avoid busy looping, and then return
194 // NULL. Next call to this function might return the frame.
195 render_wait_event_->Wait(max_wait_time_ms);
196 return NULL;
197 }
198 // Wait until it's time to render.
199 render_wait_event_->Wait(wait_time_ms);
200 }
201
202 // Extract the frame from the jitter buffer and set the render time.
203 VCMEncodedFrame* frame = jitter_buffer_.ExtractAndSetDecode(frame_timestamp);
mikhal@webrtc.org8f86cc82013-05-07 18:05:21 +0000204 if (frame == NULL) {
205 return NULL;
206 }
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000207 frame->SetRenderTime(next_render_time_ms);
hclam@chromium.org1a7b9b92013-07-08 21:31:18 +0000208 TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame->TimeStamp(),
209 "SetRenderTS", "render_time", next_render_time_ms);
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000210 if (dual_receiver != NULL) {
211 dual_receiver->UpdateState(*frame);
212 }
213 if (!frame->Complete()) {
214 // Update stats for incomplete frames.
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000215 bool retransmitted = false;
216 const int64_t last_packet_time_ms =
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000217 jitter_buffer_.LastPacketTime(frame, &retransmitted);
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000218 if (last_packet_time_ms >= 0 && !retransmitted) {
219 // We don't want to include timestamps which have suffered from
220 // retransmission here, since we compensate with extra retransmission
221 // delay within the jitter estimate.
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000222 timing_->IncomingTimestamp(frame_timestamp, last_packet_time_ms);
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000223 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000224 }
225 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000226}
227
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000228void VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame) {
229 jitter_buffer_.ReleaseFrame(frame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000230}
231
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000232void VCMReceiver::ReceiveStatistics(uint32_t* bitrate,
233 uint32_t* framerate) {
234 assert(bitrate);
235 assert(framerate);
236 jitter_buffer_.IncomingRateStatistics(framerate, bitrate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000237}
238
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000239void VCMReceiver::ReceivedFrameCount(VCMFrameCount* frame_count) const {
240 assert(frame_count);
sprang@webrtc.org71f055f2013-12-04 15:09:27 +0000241 std::map<FrameType, uint32_t> counts(jitter_buffer_.FrameStatistics());
242 frame_count->numDeltaFrames = counts[kVideoFrameDelta];
243 frame_count->numKeyFrames = counts[kVideoFrameKey];
niklase@google.com470e71d2011-07-07 08:21:25 +0000244}
245
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000246uint32_t VCMReceiver::DiscardedPackets() const {
247 return jitter_buffer_.num_discarded_packets();
niklase@google.com470e71d2011-07-07 08:21:25 +0000248}
249
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000250void VCMReceiver::SetNackMode(VCMNackMode nackMode,
251 int low_rtt_nack_threshold_ms,
252 int high_rtt_nack_threshold_ms) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000253 CriticalSectionScoped cs(crit_sect_);
254 // Default to always having NACK enabled in hybrid mode.
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000255 jitter_buffer_.SetNackMode(nackMode, low_rtt_nack_threshold_ms,
256 high_rtt_nack_threshold_ms);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000257 if (!master_) {
258 state_ = kPassive; // The dual decoder defaults to passive.
259 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000260}
261
stefan@webrtc.orgbecf9c82013-02-01 15:09:57 +0000262void VCMReceiver::SetNackSettings(size_t max_nack_list_size,
stefan@webrtc.orgef144882013-05-07 19:16:33 +0000263 int max_packet_age_to_nack,
264 int max_incomplete_time_ms) {
stefan@webrtc.orgbecf9c82013-02-01 15:09:57 +0000265 jitter_buffer_.SetNackSettings(max_nack_list_size,
stefan@webrtc.orgef144882013-05-07 19:16:33 +0000266 max_packet_age_to_nack,
267 max_incomplete_time_ms);
stefan@webrtc.orgbecf9c82013-02-01 15:09:57 +0000268}
269
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000270VCMNackMode VCMReceiver::NackMode() const {
271 CriticalSectionScoped cs(crit_sect_);
272 return jitter_buffer_.nack_mode();
stefan@webrtc.org791eec72011-10-11 07:53:43 +0000273}
274
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000275VCMNackStatus VCMReceiver::NackList(uint16_t* nack_list,
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000276 uint16_t size,
277 uint16_t* nack_list_length) {
278 bool request_key_frame = false;
279 uint16_t* internal_nack_list = jitter_buffer_.GetNackList(
280 nack_list_length, &request_key_frame);
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000281 if (*nack_list_length > size) {
stefan@webrtc.orgef144882013-05-07 19:16:33 +0000282 *nack_list_length = 0;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000283 return kNackNeedMoreMemory;
284 }
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000285 if (internal_nack_list != NULL && *nack_list_length > 0) {
286 memcpy(nack_list, internal_nack_list, *nack_list_length * sizeof(uint16_t));
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000287 }
stefan@webrtc.orgef144882013-05-07 19:16:33 +0000288 if (request_key_frame) {
289 return kNackKeyFrameRequest;
290 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000291 return kNackOk;
niklase@google.com470e71d2011-07-07 08:21:25 +0000292}
293
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000294// Decide whether we should change decoder state. This should be done if the
295// dual decoder has caught up with the decoder decoding with packet losses.
296bool VCMReceiver::DualDecoderCaughtUp(VCMEncodedFrame* dual_frame,
297 VCMReceiver& dual_receiver) const {
298 if (dual_frame == NULL) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000299 return false;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000300 }
301 if (jitter_buffer_.LastDecodedTimestamp() == dual_frame->TimeStamp()) {
302 dual_receiver.UpdateState(kWaitForPrimaryDecode);
303 return true;
304 }
305 return false;
niklase@google.com470e71d2011-07-07 08:21:25 +0000306}
307
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000308void VCMReceiver::CopyJitterBufferStateFromReceiver(
309 const VCMReceiver& receiver) {
310 jitter_buffer_.CopyFrom(receiver.jitter_buffer_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000311}
312
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000313VCMReceiverState VCMReceiver::State() const {
314 CriticalSectionScoped cs(crit_sect_);
315 return state_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000316}
317
mikhal@webrtc.orgdbf6a812013-08-21 20:40:47 +0000318void VCMReceiver::SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) {
319 jitter_buffer_.SetDecodeErrorMode(decode_error_mode);
mikhal@webrtc.orgdc3cd212013-04-25 20:27:04 +0000320}
321
agalusza@google.coma7e360e2013-08-01 03:15:08 +0000322VCMDecodeErrorMode VCMReceiver::DecodeErrorMode() const {
agalusza@google.coma7e360e2013-08-01 03:15:08 +0000323 return jitter_buffer_.decode_error_mode();
mikhal@webrtc.orgdc3cd212013-04-25 20:27:04 +0000324}
325
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000326int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
327 CriticalSectionScoped cs(crit_sect_);
328 if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) {
329 return -1;
330 }
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000331 max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs;
mikhal@webrtc.orgdbd6a6d2013-04-17 16:23:22 +0000332 // Initializing timing to the desired delay.
mikhal@webrtc.orgadc64a72013-05-30 16:20:18 +0000333 timing_->set_min_playout_delay(desired_delay_ms);
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000334 return 0;
335}
336
mikhal@webrtc.org381da4b2013-04-25 21:45:29 +0000337int VCMReceiver::RenderBufferSizeMs() {
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000338 uint32_t timestamp_start = 0u;
339 uint32_t timestamp_end = 0u;
340 // Render timestamps are computed just prior to decoding. Therefore this is
341 // only an estimate based on frames' timestamps and current timing state.
342 jitter_buffer_.RenderBufferSize(&timestamp_start, &timestamp_end);
343 if (timestamp_start == timestamp_end) {
344 return 0;
345 }
346 // Update timing.
347 const int64_t now_ms = clock_->TimeInMilliseconds();
mikhal@webrtc.orgadc64a72013-05-30 16:20:18 +0000348 timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000349 // Get render timestamps.
350 uint32_t render_start = timing_->RenderTimeMs(timestamp_start, now_ms);
351 uint32_t render_end = timing_->RenderTimeMs(timestamp_end, now_ms);
352 return render_end - render_start;
mikhal@webrtc.org381da4b2013-04-25 21:45:29 +0000353}
354
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000355void VCMReceiver::UpdateState(VCMReceiverState new_state) {
356 CriticalSectionScoped cs(crit_sect_);
357 assert(!(state_ == kPassive && new_state == kWaitForPrimaryDecode));
358 state_ = new_state;
niklase@google.com470e71d2011-07-07 08:21:25 +0000359}
360
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000361void VCMReceiver::UpdateState(const VCMEncodedFrame& frame) {
362 if (jitter_buffer_.nack_mode() == kNoNack) {
363 // Dual decoder mode has not been enabled.
364 return;
365 }
366 // Update the dual receiver state.
367 if (frame.Complete() && frame.FrameType() == kVideoFrameKey) {
368 UpdateState(kPassive);
369 }
370 if (State() == kWaitForPrimaryDecode &&
371 frame.Complete() && !frame.MissingFrame()) {
372 UpdateState(kPassive);
373 }
374 if (frame.MissingFrame() || !frame.Complete()) {
375 // State was corrupted, enable dual receiver.
376 UpdateState(kReceiving);
377 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000378}
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000379} // namespace webrtc