blob: 38c5db680f6bc1ed365715daee4e4fad994660ad [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
stefan@webrtc.org91c63082012-01-31 10:49:08 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000011#include "webrtc/modules/video_coding/main/source/receiver.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
13#include <assert.h>
14
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000015#include "webrtc/modules/video_coding/main/interface/video_coding.h"
16#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
17#include "webrtc/modules/video_coding/main/source/internal_defines.h"
18#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +000019#include "webrtc/system_wrappers/interface/clock.h"
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000020#include "webrtc/system_wrappers/interface/trace.h"
hclam@chromium.org806dc3b2013-04-09 19:54:10 +000021#include "webrtc/system_wrappers/interface/trace_event.h"
stefan@webrtc.org91c63082012-01-31 10:49:08 +000022
niklase@google.com470e71d2011-07-07 08:21:25 +000023namespace webrtc {
24
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +000025enum { kMaxReceiverDelayMs = 10000 };
26
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000027VCMReceiver::VCMReceiver(VCMTiming* timing,
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +000028 Clock* clock,
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000029 EventFactory* event_factory,
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000030 int32_t vcm_id,
31 int32_t receiver_id,
niklase@google.com470e71d2011-07-07 08:21:25 +000032 bool master)
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000033 : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
34 vcm_id_(vcm_id),
35 clock_(clock),
36 receiver_id_(receiver_id),
37 master_(master),
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000038 jitter_buffer_(clock_, event_factory, vcm_id, receiver_id, master),
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000039 timing_(timing),
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000040 render_wait_event_(event_factory->CreateEvent()),
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +000041 state_(kPassive),
42 max_video_delay_ms_(kMaxVideoDelayMs) {}
niklase@google.com470e71d2011-07-07 08:21:25 +000043
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000044VCMReceiver::~VCMReceiver() {
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000045 render_wait_event_->Set();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000046 delete crit_sect_;
niklase@google.com470e71d2011-07-07 08:21:25 +000047}
48
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000049void VCMReceiver::Reset() {
50 CriticalSectionScoped cs(crit_sect_);
51 if (!jitter_buffer_.Running()) {
52 jitter_buffer_.Start();
53 } else {
54 jitter_buffer_.Flush();
55 }
stefan@webrtc.org2baf5f52013-03-13 08:46:25 +000056 render_wait_event_->Reset();
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000057 if (master_) {
58 state_ = kReceiving;
59 } else {
60 state_ = kPassive;
61 }
henrik.lundin@webrtc.orgbaf6db52011-11-02 18:58:39 +000062}
63
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000064int32_t VCMReceiver::Initialize() {
65 CriticalSectionScoped cs(crit_sect_);
66 Reset();
67 if (!master_) {
stefan@webrtc.orga64300a2013-03-04 15:24:40 +000068 SetNackMode(kNoNack, -1, -1);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000069 }
70 return VCM_OK;
71}
72
73void VCMReceiver::UpdateRtt(uint32_t rtt) {
74 jitter_buffer_.UpdateRtt(rtt);
75}
76
stefan@webrtc.orga64300a2013-03-04 15:24:40 +000077int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
78 uint16_t frame_width,
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000079 uint16_t frame_height) {
80 // Find an empty frame.
81 VCMEncodedFrame* buffer = NULL;
82 const int32_t error = jitter_buffer_.GetFrame(packet, buffer);
83 if (error == VCM_OLD_PACKET_ERROR) {
niklase@google.com470e71d2011-07-07 08:21:25 +000084 return VCM_OK;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +000085 } else if (error != VCM_OK) {
86 return error;
87 }
88 assert(buffer);
89 {
90 CriticalSectionScoped cs(crit_sect_);
91
92 if (frame_width && frame_height) {
93 buffer->SetEncodedSize(static_cast<uint32_t>(frame_width),
94 static_cast<uint32_t>(frame_height));
95 }
96
97 if (master_) {
98 // Only trace the primary receiver to make it possible to parse and plot
99 // the trace file.
100 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
101 VCMId(vcm_id_, receiver_id_),
102 "Packet seq_no %u of frame %u at %u",
103 packet.seqNum, packet.timestamp,
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000104 MaskWord64ToUWord32(clock_->TimeInMilliseconds()));
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000105 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000106 // First packet received belonging to this frame.
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000107 if (buffer->Length() == 0 && master_) {
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000108 const int64_t now_ms = clock_->TimeInMilliseconds();
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000109 // Only trace the primary receiver to make it possible to parse and plot
110 // the trace file.
111 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
112 VCMId(vcm_id_, receiver_id_),
113 "First packet of frame %u at %u", packet.timestamp,
114 MaskWord64ToUWord32(now_ms));
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000115 }
116
117 // Insert packet into the jitter buffer both media and empty packets.
118 const VCMFrameBufferEnum
119 ret = jitter_buffer_.InsertPacket(buffer, packet);
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000120 if (ret == kCompleteSession) {
121 bool retransmitted = false;
122 const int64_t last_packet_time_ms =
123 jitter_buffer_.LastPacketTime(buffer, &retransmitted);
124 if (last_packet_time_ms >= 0 && !retransmitted) {
125 // We don't want to include timestamps which have suffered from
126 // retransmission here, since we compensate with extra retransmission
127 // delay within the jitter estimate.
128 timing_->IncomingTimestamp(packet.timestamp, last_packet_time_ms);
129 }
130 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000131 if (ret == kFlushIndicator) {
132 return VCM_FLUSH_INDICATOR;
133 } else if (ret < 0) {
134 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding,
135 VCMId(vcm_id_, receiver_id_),
136 "Error inserting packet seq_no=%u, time_stamp=%u",
137 packet.seqNum, packet.timestamp);
138 return VCM_JITTER_BUFFER_ERROR;
139 }
140 }
141 return VCM_OK;
niklase@google.com470e71d2011-07-07 08:21:25 +0000142}
143
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000144VCMEncodedFrame* VCMReceiver::FrameForDecoding(
145 uint16_t max_wait_time_ms,
146 int64_t& next_render_time_ms,
147 bool render_timing,
148 VCMReceiver* dual_receiver) {
hclam@chromium.org806dc3b2013-04-09 19:54:10 +0000149 TRACE_EVENT0("webrtc", "Recv::FrameForDecoding");
stefan@webrtc.orga678a3b2013-01-21 07:42:11 +0000150 const int64_t start_time_ms = clock_->TimeInMilliseconds();
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000151 uint32_t frame_timestamp = 0;
152 // Exhaust wait time to get a complete frame for decoding.
153 bool found_frame = jitter_buffer_.NextCompleteTimestamp(
154 max_wait_time_ms, &frame_timestamp);
155
156 if (!found_frame) {
157 // Get an incomplete frame when enabled.
158 const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
159 dual_receiver->State() == kPassive &&
160 dual_receiver->NackMode() == kNack);
161 if (dual_receiver_enabled_and_passive &&
162 !jitter_buffer_.CompleteSequenceWithNextFrame()) {
163 // Jitter buffer state might get corrupt with this frame.
164 dual_receiver->CopyJitterBufferStateFromReceiver(*this);
165 }
166 found_frame = jitter_buffer_.NextMaybeIncompleteTimestamp(
167 &frame_timestamp);
168 }
169
170 if (!found_frame) {
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000171 return NULL;
172 }
mikhal@webrtc.orgd3cd5652013-05-03 17:54:18 +0000173
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000174 // We have a frame - Set timing and render timestamp.
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000175 timing_->SetRequiredDelay(jitter_buffer_.EstimatedJitterMs());
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000176 const int64_t now_ms = clock_->TimeInMilliseconds();
177 timing_->UpdateCurrentDelay(frame_timestamp);
178 next_render_time_ms = timing_->RenderTimeMs(frame_timestamp, now_ms);
179 // Check render timing.
180 bool timing_error = false;
181 // Assume that render timing errors are due to changes in the video stream.
182 if (next_render_time_ms < 0) {
183 timing_error = true;
184 } else if (next_render_time_ms < now_ms - max_video_delay_ms_) {
185 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
186 VCMId(vcm_id_, receiver_id_),
187 "This frame should have been rendered more than %u ms ago."
188 "Flushing jitter buffer and resetting timing.",
189 max_video_delay_ms_);
190 timing_error = true;
191 } else if (static_cast<int>(timing_->TargetVideoDelay()) >
192 max_video_delay_ms_) {
193 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
194 VCMId(vcm_id_, receiver_id_),
195 "More than %u ms target delay. Flushing jitter buffer and"
196 "resetting timing.", max_video_delay_ms_);
197 timing_error = true;
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000198 }
199
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000200 if (timing_error) {
201 // Timing error => reset timing and flush the jitter buffer.
202 jitter_buffer_.Flush();
203 timing_->Reset(clock_->TimeInMilliseconds());
204 return NULL;
205 }
206
207 if (!render_timing) {
208 // Decode frame as close as possible to the render timestamp.
209 TRACE_EVENT0("webrtc", "FrameForRendering");
210 const int32_t available_wait_time = max_wait_time_ms -
211 static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms);
212 uint16_t new_max_wait_time = static_cast<uint16_t>(
213 VCM_MAX(available_wait_time, 0));
214 uint32_t wait_time_ms = timing_->MaxWaitingTime(
215 next_render_time_ms, clock_->TimeInMilliseconds());
216 if (new_max_wait_time < wait_time_ms) {
217 // We're not allowed to wait until the frame is supposed to be rendered,
218 // waiting as long as we're allowed to avoid busy looping, and then return
219 // NULL. Next call to this function might return the frame.
220 render_wait_event_->Wait(max_wait_time_ms);
221 return NULL;
222 }
223 // Wait until it's time to render.
224 render_wait_event_->Wait(wait_time_ms);
225 }
226
227 // Extract the frame from the jitter buffer and set the render time.
228 VCMEncodedFrame* frame = jitter_buffer_.ExtractAndSetDecode(frame_timestamp);
mikhal@webrtc.org8f86cc82013-05-07 18:05:21 +0000229 if (frame == NULL) {
230 return NULL;
231 }
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000232 frame->SetRenderTime(next_render_time_ms);
233 if (dual_receiver != NULL) {
234 dual_receiver->UpdateState(*frame);
235 }
236 if (!frame->Complete()) {
237 // Update stats for incomplete frames.
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000238 bool retransmitted = false;
239 const int64_t last_packet_time_ms =
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000240 jitter_buffer_.LastPacketTime(frame, &retransmitted);
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000241 if (last_packet_time_ms >= 0 && !retransmitted) {
242 // We don't want to include timestamps which have suffered from
243 // retransmission here, since we compensate with extra retransmission
244 // delay within the jitter estimate.
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000245 timing_->IncomingTimestamp(frame_timestamp, last_packet_time_ms);
stefan@webrtc.org4ce19b12013-05-06 13:16:51 +0000246 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000247 }
248 return frame;
niklase@google.com470e71d2011-07-07 08:21:25 +0000249}
250
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000251void VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame) {
252 jitter_buffer_.ReleaseFrame(frame);
niklase@google.com470e71d2011-07-07 08:21:25 +0000253}
254
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000255void VCMReceiver::ReceiveStatistics(uint32_t* bitrate,
256 uint32_t* framerate) {
257 assert(bitrate);
258 assert(framerate);
259 jitter_buffer_.IncomingRateStatistics(framerate, bitrate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000260}
261
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000262void VCMReceiver::ReceivedFrameCount(VCMFrameCount* frame_count) const {
263 assert(frame_count);
264 jitter_buffer_.FrameStatistics(&frame_count->numDeltaFrames,
265 &frame_count->numKeyFrames);
niklase@google.com470e71d2011-07-07 08:21:25 +0000266}
267
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000268uint32_t VCMReceiver::DiscardedPackets() const {
269 return jitter_buffer_.num_discarded_packets();
niklase@google.com470e71d2011-07-07 08:21:25 +0000270}
271
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000272void VCMReceiver::SetNackMode(VCMNackMode nackMode,
273 int low_rtt_nack_threshold_ms,
274 int high_rtt_nack_threshold_ms) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000275 CriticalSectionScoped cs(crit_sect_);
276 // Default to always having NACK enabled in hybrid mode.
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000277 jitter_buffer_.SetNackMode(nackMode, low_rtt_nack_threshold_ms,
278 high_rtt_nack_threshold_ms);
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000279 if (!master_) {
280 state_ = kPassive; // The dual decoder defaults to passive.
281 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000282}
283
stefan@webrtc.orgbecf9c82013-02-01 15:09:57 +0000284void VCMReceiver::SetNackSettings(size_t max_nack_list_size,
285 int max_packet_age_to_nack) {
286 jitter_buffer_.SetNackSettings(max_nack_list_size,
287 max_packet_age_to_nack);
288}
289
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000290VCMNackMode VCMReceiver::NackMode() const {
291 CriticalSectionScoped cs(crit_sect_);
292 return jitter_buffer_.nack_mode();
stefan@webrtc.org791eec72011-10-11 07:53:43 +0000293}
294
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000295VCMNackStatus VCMReceiver::NackList(uint16_t* nack_list,
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000296 uint16_t size,
297 uint16_t* nack_list_length) {
298 bool request_key_frame = false;
299 uint16_t* internal_nack_list = jitter_buffer_.GetNackList(
300 nack_list_length, &request_key_frame);
301 if (request_key_frame) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000302 // This combination is used to trigger key frame requests.
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000303 return kNackKeyFrameRequest;
304 }
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000305 if (*nack_list_length > size) {
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000306 return kNackNeedMoreMemory;
307 }
stefan@webrtc.orga64300a2013-03-04 15:24:40 +0000308 if (internal_nack_list != NULL && *nack_list_length > 0) {
309 memcpy(nack_list, internal_nack_list, *nack_list_length * sizeof(uint16_t));
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000310 }
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000311 return kNackOk;
niklase@google.com470e71d2011-07-07 08:21:25 +0000312}
313
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000314// Decide whether we should change decoder state. This should be done if the
315// dual decoder has caught up with the decoder decoding with packet losses.
316bool VCMReceiver::DualDecoderCaughtUp(VCMEncodedFrame* dual_frame,
317 VCMReceiver& dual_receiver) const {
318 if (dual_frame == NULL) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000319 return false;
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000320 }
321 if (jitter_buffer_.LastDecodedTimestamp() == dual_frame->TimeStamp()) {
322 dual_receiver.UpdateState(kWaitForPrimaryDecode);
323 return true;
324 }
325 return false;
niklase@google.com470e71d2011-07-07 08:21:25 +0000326}
327
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000328void VCMReceiver::CopyJitterBufferStateFromReceiver(
329 const VCMReceiver& receiver) {
330 jitter_buffer_.CopyFrom(receiver.jitter_buffer_);
niklase@google.com470e71d2011-07-07 08:21:25 +0000331}
332
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000333VCMReceiverState VCMReceiver::State() const {
334 CriticalSectionScoped cs(crit_sect_);
335 return state_;
niklase@google.com470e71d2011-07-07 08:21:25 +0000336}
337
mikhal@webrtc.orgdc3cd212013-04-25 20:27:04 +0000338void VCMReceiver::SetDecodeWithErrors(bool enable){
339 CriticalSectionScoped cs(crit_sect_);
340 jitter_buffer_.DecodeWithErrors(enable);
341}
342
343bool VCMReceiver::DecodeWithErrors() const {
344 CriticalSectionScoped cs(crit_sect_);
345 return jitter_buffer_.decode_with_errors();
346}
347
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000348int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
349 CriticalSectionScoped cs(crit_sect_);
350 if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) {
351 return -1;
352 }
mikhal@webrtc.org6faba6e2013-04-30 15:39:34 +0000353 jitter_buffer_.SetMaxJitterEstimate(desired_delay_ms > 0);
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000354 max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs;
mikhal@webrtc.orgdbd6a6d2013-04-17 16:23:22 +0000355 // Initializing timing to the desired delay.
mikhal@webrtc.org6faba6e2013-04-30 15:39:34 +0000356 timing_->SetMinimumTotalDelay(desired_delay_ms);
mikhal@webrtc.orgef9f76a2013-02-15 23:22:18 +0000357 return 0;
358}
359
mikhal@webrtc.org381da4b2013-04-25 21:45:29 +0000360int VCMReceiver::RenderBufferSizeMs() {
mikhal@webrtc.org759b0412013-05-07 16:36:00 +0000361 uint32_t timestamp_start = 0u;
362 uint32_t timestamp_end = 0u;
363 // Render timestamps are computed just prior to decoding. Therefore this is
364 // only an estimate based on frames' timestamps and current timing state.
365 jitter_buffer_.RenderBufferSize(&timestamp_start, &timestamp_end);
366 if (timestamp_start == timestamp_end) {
367 return 0;
368 }
369 // Update timing.
370 const int64_t now_ms = clock_->TimeInMilliseconds();
371 timing_->SetRequiredDelay(jitter_buffer_.EstimatedJitterMs());
372 // Get render timestamps.
373 uint32_t render_start = timing_->RenderTimeMs(timestamp_start, now_ms);
374 uint32_t render_end = timing_->RenderTimeMs(timestamp_end, now_ms);
375 return render_end - render_start;
mikhal@webrtc.org381da4b2013-04-25 21:45:29 +0000376}
377
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000378void VCMReceiver::UpdateState(VCMReceiverState new_state) {
379 CriticalSectionScoped cs(crit_sect_);
380 assert(!(state_ == kPassive && new_state == kWaitForPrimaryDecode));
381 state_ = new_state;
niklase@google.com470e71d2011-07-07 08:21:25 +0000382}
383
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000384void VCMReceiver::UpdateState(const VCMEncodedFrame& frame) {
385 if (jitter_buffer_.nack_mode() == kNoNack) {
386 // Dual decoder mode has not been enabled.
387 return;
388 }
389 // Update the dual receiver state.
390 if (frame.Complete() && frame.FrameType() == kVideoFrameKey) {
391 UpdateState(kPassive);
392 }
393 if (State() == kWaitForPrimaryDecode &&
394 frame.Complete() && !frame.MissingFrame()) {
395 UpdateState(kPassive);
396 }
397 if (frame.MissingFrame() || !frame.Complete()) {
398 // State was corrupted, enable dual receiver.
399 UpdateState(kReceiving);
400 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000401}
stefan@webrtc.org1ea4b502013-01-07 08:49:41 +0000402} // namespace webrtc