blob: dcedd757aa50b70f513da237eebe3819bc3235dd [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010020#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000021#include "webrtc/base/timeutils.h"
ivoc14d5dbe2016-07-04 07:06:55 -070022#include "webrtc/call/rtc_event_log.h"
minyue@webrtc.orge509f942013-09-12 17:03:00 +000023#include "webrtc/common.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020024#include "webrtc/config.h"
ossue3525782016-05-25 07:37:43 -070025#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000026#include "webrtc/modules/audio_device/include/audio_device.h"
27#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010028#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010029#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010030#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
31#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
32#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000033#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010034#include "webrtc/modules/utility/include/audio_frame_operations.h"
35#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010036#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000037#include "webrtc/voice_engine/include/voe_base.h"
38#include "webrtc/voice_engine/include/voe_external_media.h"
39#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
40#include "webrtc/voice_engine/output_mixer.h"
41#include "webrtc/voice_engine/statistics.h"
42#include "webrtc/voice_engine/transmit_mixer.h"
43#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000044
andrew@webrtc.org50419b02012-11-14 19:07:54 +000045namespace webrtc {
46namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000047
kwibergc8d071e2016-04-06 12:22:38 -070048namespace {
49
50bool RegisterReceiveCodec(std::unique_ptr<AudioCodingModule>* acm,
51 acm2::RentACodec* rac,
52 const CodecInst& ci) {
kwibergabe95ba2016-06-02 02:58:59 -070053 const int result = (*acm)->RegisterReceiveCodec(
54 ci, [&] { return rac->RentIsacDecoder(ci.plfreq); });
kwibergc8d071e2016-04-06 12:22:38 -070055 return result == 0;
56}
57
58} // namespace
59
solenberg8842c3e2016-03-11 03:06:41 -080060const int kTelephoneEventAttenuationdB = 10;
61
ivoc14d5dbe2016-07-04 07:06:55 -070062class RtcEventLogProxy final : public webrtc::RtcEventLog {
63 public:
64 RtcEventLogProxy() : event_log_(nullptr) {}
65
66 bool StartLogging(const std::string& file_name,
67 int64_t max_size_bytes) override {
68 RTC_NOTREACHED();
69 return false;
70 }
71
72 bool StartLogging(rtc::PlatformFile log_file,
73 int64_t max_size_bytes) override {
74 RTC_NOTREACHED();
75 return false;
76 }
77
78 void StopLogging() override { RTC_NOTREACHED(); }
79
80 void LogVideoReceiveStreamConfig(
81 const webrtc::VideoReceiveStream::Config& config) override {
82 rtc::CritScope lock(&crit_);
83 if (event_log_) {
84 event_log_->LogVideoReceiveStreamConfig(config);
85 }
86 }
87
88 void LogVideoSendStreamConfig(
89 const webrtc::VideoSendStream::Config& config) override {
90 rtc::CritScope lock(&crit_);
91 if (event_log_) {
92 event_log_->LogVideoSendStreamConfig(config);
93 }
94 }
95
96 void LogRtpHeader(webrtc::PacketDirection direction,
97 webrtc::MediaType media_type,
98 const uint8_t* header,
99 size_t packet_length) override {
100 rtc::CritScope lock(&crit_);
101 if (event_log_) {
102 event_log_->LogRtpHeader(direction, media_type, header, packet_length);
103 }
104 }
105
106 void LogRtcpPacket(webrtc::PacketDirection direction,
107 webrtc::MediaType media_type,
108 const uint8_t* packet,
109 size_t length) override {
110 rtc::CritScope lock(&crit_);
111 if (event_log_) {
112 event_log_->LogRtcpPacket(direction, media_type, packet, length);
113 }
114 }
115
116 void LogAudioPlayout(uint32_t ssrc) override {
117 rtc::CritScope lock(&crit_);
118 if (event_log_) {
119 event_log_->LogAudioPlayout(ssrc);
120 }
121 }
122
123 void LogBwePacketLossEvent(int32_t bitrate,
124 uint8_t fraction_loss,
125 int32_t total_packets) override {
126 rtc::CritScope lock(&crit_);
127 if (event_log_) {
128 event_log_->LogBwePacketLossEvent(bitrate, fraction_loss, total_packets);
129 }
130 }
131
132 void SetEventLog(RtcEventLog* event_log) {
133 rtc::CritScope lock(&crit_);
134 event_log_ = event_log;
135 }
136
137 private:
138 rtc::CriticalSection crit_;
139 RtcEventLog* event_log_ GUARDED_BY(crit_);
140 RTC_DISALLOW_COPY_AND_ASSIGN(RtcEventLogProxy);
141};
142
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100143class TransportFeedbackProxy : public TransportFeedbackObserver {
144 public:
145 TransportFeedbackProxy() : feedback_observer_(nullptr) {
146 pacer_thread_.DetachFromThread();
147 network_thread_.DetachFromThread();
148 }
149
150 void SetTransportFeedbackObserver(
151 TransportFeedbackObserver* feedback_observer) {
152 RTC_DCHECK(thread_checker_.CalledOnValidThread());
153 rtc::CritScope lock(&crit_);
154 feedback_observer_ = feedback_observer;
155 }
156
157 // Implements TransportFeedbackObserver.
158 void AddPacket(uint16_t sequence_number,
159 size_t length,
philipela1ed0b32016-06-01 06:31:17 -0700160 int probe_cluster_id) override {
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100161 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
162 rtc::CritScope lock(&crit_);
163 if (feedback_observer_)
pbos2169d8b2016-06-20 11:53:02 -0700164 feedback_observer_->AddPacket(sequence_number, length, probe_cluster_id);
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100165 }
166 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
167 RTC_DCHECK(network_thread_.CalledOnValidThread());
168 rtc::CritScope lock(&crit_);
169 if (feedback_observer_)
170 feedback_observer_->OnTransportFeedback(feedback);
171 }
172
173 private:
174 rtc::CriticalSection crit_;
175 rtc::ThreadChecker thread_checker_;
176 rtc::ThreadChecker pacer_thread_;
177 rtc::ThreadChecker network_thread_;
178 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
179};
180
181class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
182 public:
183 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
184 pacer_thread_.DetachFromThread();
185 }
186
187 void SetSequenceNumberAllocator(
188 TransportSequenceNumberAllocator* seq_num_allocator) {
189 RTC_DCHECK(thread_checker_.CalledOnValidThread());
190 rtc::CritScope lock(&crit_);
191 seq_num_allocator_ = seq_num_allocator;
192 }
193
194 // Implements TransportSequenceNumberAllocator.
195 uint16_t AllocateSequenceNumber() override {
196 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
197 rtc::CritScope lock(&crit_);
198 if (!seq_num_allocator_)
199 return 0;
200 return seq_num_allocator_->AllocateSequenceNumber();
201 }
202
203 private:
204 rtc::CriticalSection crit_;
205 rtc::ThreadChecker thread_checker_;
206 rtc::ThreadChecker pacer_thread_;
207 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
208};
209
210class RtpPacketSenderProxy : public RtpPacketSender {
211 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800212 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100213
214 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
215 RTC_DCHECK(thread_checker_.CalledOnValidThread());
216 rtc::CritScope lock(&crit_);
217 rtp_packet_sender_ = rtp_packet_sender;
218 }
219
220 // Implements RtpPacketSender.
221 void InsertPacket(Priority priority,
222 uint32_t ssrc,
223 uint16_t sequence_number,
224 int64_t capture_time_ms,
225 size_t bytes,
226 bool retransmission) override {
227 rtc::CritScope lock(&crit_);
228 if (rtp_packet_sender_) {
229 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
230 capture_time_ms, bytes, retransmission);
231 }
232 }
233
234 private:
235 rtc::ThreadChecker thread_checker_;
236 rtc::CriticalSection crit_;
237 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
238};
239
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000240// Extend the default RTCP statistics struct with max_jitter, defined as the
241// maximum jitter value seen in an RTCP report block.
242struct ChannelStatistics : public RtcpStatistics {
243 ChannelStatistics() : rtcp(), max_jitter(0) {}
244
245 RtcpStatistics rtcp;
246 uint32_t max_jitter;
247};
248
249// Statistics callback, called at each generation of a new RTCP report block.
250class StatisticsProxy : public RtcpStatisticsCallback {
251 public:
tommi31fc21f2016-01-21 10:37:37 -0800252 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000253 virtual ~StatisticsProxy() {}
254
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000255 void StatisticsUpdated(const RtcpStatistics& statistics,
256 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000257 if (ssrc != ssrc_)
258 return;
259
tommi31fc21f2016-01-21 10:37:37 -0800260 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000261 stats_.rtcp = statistics;
262 if (statistics.jitter > stats_.max_jitter) {
263 stats_.max_jitter = statistics.jitter;
264 }
265 }
266
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000267 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000268
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000269 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800270 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000271 return stats_;
272 }
273
274 private:
275 // StatisticsUpdated calls are triggered from threads in the RTP module,
276 // while GetStats calls can be triggered from the public voice engine API,
277 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800278 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000279 const uint32_t ssrc_;
280 ChannelStatistics stats_;
281};
282
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000283class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000284 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000285 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
286 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000287
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000288 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
289 // Not used for Voice Engine.
290 }
291
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000292 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
293 int64_t rtt,
294 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000295 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
296 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
297 // report for VoiceEngine?
298 if (report_blocks.empty())
299 return;
300
301 int fraction_lost_aggregate = 0;
302 int total_number_of_packets = 0;
303
304 // If receiving multiple report blocks, calculate the weighted average based
305 // on the number of packets a report refers to.
306 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
307 block_it != report_blocks.end(); ++block_it) {
308 // Find the previous extended high sequence number for this remote SSRC,
309 // to calculate the number of RTP packets this report refers to. Ignore if
310 // we haven't seen this SSRC before.
311 std::map<uint32_t, uint32_t>::iterator seq_num_it =
312 extended_max_sequence_number_.find(block_it->sourceSSRC);
313 int number_of_packets = 0;
314 if (seq_num_it != extended_max_sequence_number_.end()) {
315 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
316 }
317 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
318 total_number_of_packets += number_of_packets;
319
320 extended_max_sequence_number_[block_it->sourceSSRC] =
321 block_it->extendedHighSeqNum;
322 }
323 int weighted_fraction_lost = 0;
324 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800325 weighted_fraction_lost =
326 (fraction_lost_aggregate + total_number_of_packets / 2) /
327 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000328 }
329 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000330 }
331
332 private:
333 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000334 // Maps remote side ssrc to extended highest sequence number received.
335 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000336};
337
kwiberg55b97fe2016-01-28 05:22:45 -0800338int32_t Channel::SendData(FrameType frameType,
339 uint8_t payloadType,
340 uint32_t timeStamp,
341 const uint8_t* payloadData,
342 size_t payloadSize,
343 const RTPFragmentationHeader* fragmentation) {
344 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
345 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
346 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
347 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000348
kwiberg55b97fe2016-01-28 05:22:45 -0800349 if (_includeAudioLevelIndication) {
350 // Store current audio level in the RTP/RTCP module.
351 // The level will be used in combination with voice-activity state
352 // (frameType) to add an RTP header extension
353 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
354 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000355
kwiberg55b97fe2016-01-28 05:22:45 -0800356 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
357 // packetization.
358 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
359 if (_rtpRtcpModule->SendOutgoingData(
360 (FrameType&)frameType, payloadType, timeStamp,
361 // Leaving the time when this frame was
362 // received from the capture device as
363 // undefined for voice for now.
364 -1, payloadData, payloadSize, fragmentation) == -1) {
365 _engineStatisticsPtr->SetLastError(
366 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
367 "Channel::SendData() failed to send data to RTP/RTCP module");
368 return -1;
369 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000370
kwiberg55b97fe2016-01-28 05:22:45 -0800371 _lastLocalTimeStamp = timeStamp;
372 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000373
kwiberg55b97fe2016-01-28 05:22:45 -0800374 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000375}
376
kwiberg55b97fe2016-01-28 05:22:45 -0800377int32_t Channel::InFrameType(FrameType frame_type) {
378 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
379 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000380
kwiberg55b97fe2016-01-28 05:22:45 -0800381 rtc::CritScope cs(&_callbackCritSect);
382 _sendFrameType = (frame_type == kAudioFrameSpeech);
383 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000384}
385
kwiberg55b97fe2016-01-28 05:22:45 -0800386int32_t Channel::OnRxVadDetected(int vadDecision) {
387 rtc::CritScope cs(&_callbackCritSect);
388 if (_rxVadObserverPtr) {
389 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
390 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000391
kwiberg55b97fe2016-01-28 05:22:45 -0800392 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000393}
394
stefan1d8a5062015-10-02 03:39:33 -0700395bool Channel::SendRtp(const uint8_t* data,
396 size_t len,
397 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800398 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
399 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000400
kwiberg55b97fe2016-01-28 05:22:45 -0800401 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000402
kwiberg55b97fe2016-01-28 05:22:45 -0800403 if (_transportPtr == NULL) {
404 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
405 "Channel::SendPacket() failed to send RTP packet due to"
406 " invalid transport object");
407 return false;
408 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000409
kwiberg55b97fe2016-01-28 05:22:45 -0800410 uint8_t* bufferToSendPtr = (uint8_t*)data;
411 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000412
kwiberg55b97fe2016-01-28 05:22:45 -0800413 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
414 std::string transport_name =
415 _externalTransport ? "external transport" : "WebRtc sockets";
416 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
417 "Channel::SendPacket() RTP transmission using %s failed",
418 transport_name.c_str());
419 return false;
420 }
421 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000422}
423
kwiberg55b97fe2016-01-28 05:22:45 -0800424bool Channel::SendRtcp(const uint8_t* data, size_t len) {
425 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
426 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000427
kwiberg55b97fe2016-01-28 05:22:45 -0800428 rtc::CritScope cs(&_callbackCritSect);
429 if (_transportPtr == NULL) {
430 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
431 "Channel::SendRtcp() failed to send RTCP packet"
432 " due to invalid transport object");
433 return false;
434 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000435
kwiberg55b97fe2016-01-28 05:22:45 -0800436 uint8_t* bufferToSendPtr = (uint8_t*)data;
437 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000438
kwiberg55b97fe2016-01-28 05:22:45 -0800439 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
440 if (n < 0) {
441 std::string transport_name =
442 _externalTransport ? "external transport" : "WebRtc sockets";
443 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
444 "Channel::SendRtcp() transmission using %s failed",
445 transport_name.c_str());
446 return false;
447 }
448 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000449}
450
kwiberg55b97fe2016-01-28 05:22:45 -0800451void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
452 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
453 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000454
kwiberg55b97fe2016-01-28 05:22:45 -0800455 // Update ssrc so that NTP for AV sync can be updated.
456 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000457}
458
Peter Boströmac547a62015-09-17 23:03:57 +0200459void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
460 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
461 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
462 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000463}
464
Peter Boströmac547a62015-09-17 23:03:57 +0200465int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000466 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000467 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000468 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800469 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200470 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800471 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
472 "Channel::OnInitializeDecoder(payloadType=%d, "
473 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
474 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000475
kwiberg55b97fe2016-01-28 05:22:45 -0800476 CodecInst receiveCodec = {0};
477 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000478
kwiberg55b97fe2016-01-28 05:22:45 -0800479 receiveCodec.pltype = payloadType;
480 receiveCodec.plfreq = frequency;
481 receiveCodec.channels = channels;
482 receiveCodec.rate = rate;
483 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000484
kwiberg55b97fe2016-01-28 05:22:45 -0800485 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
486 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000487
kwiberg55b97fe2016-01-28 05:22:45 -0800488 // Register the new codec to the ACM
kwibergc8d071e2016-04-06 12:22:38 -0700489 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, receiveCodec)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800490 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
491 "Channel::OnInitializeDecoder() invalid codec ("
492 "pt=%d, name=%s) received - 1",
493 payloadType, payloadName);
494 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
495 return -1;
496 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000497
kwiberg55b97fe2016-01-28 05:22:45 -0800498 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000499}
500
kwiberg55b97fe2016-01-28 05:22:45 -0800501int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
502 size_t payloadSize,
503 const WebRtcRTPHeader* rtpHeader) {
504 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
505 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
506 ","
507 " payloadType=%u, audioChannel=%" PRIuS ")",
508 payloadSize, rtpHeader->header.payloadType,
509 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000510
kwiberg55b97fe2016-01-28 05:22:45 -0800511 if (!channel_state_.Get().playing) {
512 // Avoid inserting into NetEQ when we are not playing. Count the
513 // packet as discarded.
514 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
515 "received packet is discarded since playing is not"
516 " activated");
517 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000518 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800519 }
520
521 // Push the incoming payload (parsed and ready for decoding) into the ACM
522 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
523 0) {
524 _engineStatisticsPtr->SetLastError(
525 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
526 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
527 return -1;
528 }
529
530 // Update the packet delay.
531 UpdatePacketDelay(rtpHeader->header.timestamp,
532 rtpHeader->header.sequenceNumber);
533
534 int64_t round_trip_time = 0;
535 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
536 NULL);
537
538 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
539 if (!nack_list.empty()) {
540 // Can't use nack_list.data() since it's not supported by all
541 // compilers.
542 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
543 }
544 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000545}
546
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000547bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000548 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000549 RTPHeader header;
550 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
551 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
552 "IncomingPacket invalid RTP header");
553 return false;
554 }
555 header.payload_type_frequency =
556 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
557 if (header.payload_type_frequency < 0)
558 return false;
559 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
560}
561
henrik.lundin42dda502016-05-18 05:36:01 -0700562MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
563 int32_t id,
564 AudioFrame* audioFrame) {
ivoc14d5dbe2016-07-04 07:06:55 -0700565 unsigned int ssrc;
566 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
567 event_log_proxy_->LogAudioPlayout(ssrc);
kwiberg55b97fe2016-01-28 05:22:45 -0800568 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
henrik.lundind4ccb002016-05-17 12:21:55 -0700569 bool muted;
570 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame,
571 &muted) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -0800572 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
573 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
574 // In all likelihood, the audio in this frame is garbage. We return an
575 // error so that the audio mixer module doesn't add it to the mix. As
576 // a result, it won't be played out and the actions skipped here are
577 // irrelevant.
henrik.lundin42dda502016-05-18 05:36:01 -0700578 return MixerParticipant::AudioFrameInfo::kError;
kwiberg55b97fe2016-01-28 05:22:45 -0800579 }
henrik.lundina89ab962016-05-18 08:52:45 -0700580
581 if (muted) {
582 // TODO(henrik.lundin): We should be able to do better than this. But we
583 // will have to go through all the cases below where the audio samples may
584 // be used, and handle the muted case in some way.
585 audioFrame->Mute();
586 }
kwiberg55b97fe2016-01-28 05:22:45 -0800587
588 if (_RxVadDetection) {
589 UpdateRxVadDetection(*audioFrame);
590 }
591
592 // Convert module ID to internal VoE channel ID
593 audioFrame->id_ = VoEChannelId(audioFrame->id_);
594 // Store speech type for dead-or-alive detection
595 _outputSpeechType = audioFrame->speech_type_;
596
597 ChannelState::State state = channel_state_.Get();
598
599 if (state.rx_apm_is_enabled) {
600 int err = rx_audioproc_->ProcessStream(audioFrame);
601 if (err) {
602 LOG(LS_ERROR) << "ProcessStream() error: " << err;
603 assert(false);
Ivo Creusenae856f22015-09-17 16:30:16 +0200604 }
kwiberg55b97fe2016-01-28 05:22:45 -0800605 }
606
607 {
608 // Pass the audio buffers to an optional sink callback, before applying
609 // scaling/panning, as that applies to the mix operation.
610 // External recipients of the audio (e.g. via AudioTrack), will do their
611 // own mixing/dynamic processing.
612 rtc::CritScope cs(&_callbackCritSect);
613 if (audio_sink_) {
614 AudioSinkInterface::Data data(
615 &audioFrame->data_[0], audioFrame->samples_per_channel_,
616 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
617 audioFrame->timestamp_);
618 audio_sink_->OnData(data);
619 }
620 }
621
622 float output_gain = 1.0f;
623 float left_pan = 1.0f;
624 float right_pan = 1.0f;
625 {
626 rtc::CritScope cs(&volume_settings_critsect_);
627 output_gain = _outputGain;
628 left_pan = _panLeft;
629 right_pan = _panRight;
630 }
631
632 // Output volume scaling
633 if (output_gain < 0.99f || output_gain > 1.01f) {
634 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
635 }
636
637 // Scale left and/or right channel(s) if stereo and master balance is
638 // active
639
640 if (left_pan != 1.0f || right_pan != 1.0f) {
641 if (audioFrame->num_channels_ == 1) {
642 // Emulate stereo mode since panning is active.
643 // The mono signal is copied to both left and right channels here.
644 AudioFrameOperations::MonoToStereo(audioFrame);
645 }
646 // For true stereo mode (when we are receiving a stereo signal), no
647 // action is needed.
648
649 // Do the panning operation (the audio frame contains stereo at this
650 // stage)
651 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
652 }
653
654 // Mix decoded PCM output with file if file mixing is enabled
655 if (state.output_file_playing) {
656 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
henrik.lundina89ab962016-05-18 08:52:45 -0700657 muted = false; // We may have added non-zero samples.
kwiberg55b97fe2016-01-28 05:22:45 -0800658 }
659
660 // External media
661 if (_outputExternalMedia) {
662 rtc::CritScope cs(&_callbackCritSect);
663 const bool isStereo = (audioFrame->num_channels_ == 2);
664 if (_outputExternalMediaCallbackPtr) {
665 _outputExternalMediaCallbackPtr->Process(
666 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
667 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
668 isStereo);
669 }
670 }
671
672 // Record playout if enabled
673 {
674 rtc::CritScope cs(&_fileCritSect);
675
676 if (_outputFileRecording && _outputFileRecorderPtr) {
677 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
678 }
679 }
680
681 // Measure audio level (0-9)
henrik.lundina89ab962016-05-18 08:52:45 -0700682 // TODO(henrik.lundin) Use the |muted| information here too.
kwiberg55b97fe2016-01-28 05:22:45 -0800683 _outputAudioLevel.ComputeLevel(*audioFrame);
684
685 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
686 // The first frame with a valid rtp timestamp.
687 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
688 }
689
690 if (capture_start_rtp_time_stamp_ >= 0) {
691 // audioFrame.timestamp_ should be valid from now on.
692
693 // Compute elapsed time.
694 int64_t unwrap_timestamp =
695 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
696 audioFrame->elapsed_time_ms_ =
697 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
698 (GetPlayoutFrequency() / 1000);
699
niklase@google.com470e71d2011-07-07 08:21:25 +0000700 {
kwiberg55b97fe2016-01-28 05:22:45 -0800701 rtc::CritScope lock(&ts_stats_lock_);
702 // Compute ntp time.
703 audioFrame->ntp_time_ms_ =
704 ntp_estimator_.Estimate(audioFrame->timestamp_);
705 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
706 if (audioFrame->ntp_time_ms_ > 0) {
707 // Compute |capture_start_ntp_time_ms_| so that
708 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
709 capture_start_ntp_time_ms_ =
710 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000711 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000712 }
kwiberg55b97fe2016-01-28 05:22:45 -0800713 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000714
henrik.lundin42dda502016-05-18 05:36:01 -0700715 return muted ? MixerParticipant::AudioFrameInfo::kMuted
716 : MixerParticipant::AudioFrameInfo::kNormal;
niklase@google.com470e71d2011-07-07 08:21:25 +0000717}
718
kwiberg55b97fe2016-01-28 05:22:45 -0800719int32_t Channel::NeededFrequency(int32_t id) const {
720 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
721 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000722
kwiberg55b97fe2016-01-28 05:22:45 -0800723 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000724
kwiberg55b97fe2016-01-28 05:22:45 -0800725 // Determine highest needed receive frequency
726 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000727
kwiberg55b97fe2016-01-28 05:22:45 -0800728 // Return the bigger of playout and receive frequency in the ACM.
729 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
730 highestNeeded = audio_coding_->PlayoutFrequency();
731 } else {
732 highestNeeded = receiveFrequency;
733 }
734
735 // Special case, if we're playing a file on the playout side
736 // we take that frequency into consideration as well
737 // This is not needed on sending side, since the codec will
738 // limit the spectrum anyway.
739 if (channel_state_.Get().output_file_playing) {
740 rtc::CritScope cs(&_fileCritSect);
741 if (_outputFilePlayerPtr) {
742 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
743 highestNeeded = _outputFilePlayerPtr->Frequency();
744 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000745 }
kwiberg55b97fe2016-01-28 05:22:45 -0800746 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000747
kwiberg55b97fe2016-01-28 05:22:45 -0800748 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000749}
750
ivocb04965c2015-09-09 00:09:43 -0700751int32_t Channel::CreateChannel(Channel*& channel,
752 int32_t channelId,
753 uint32_t instanceId,
ivocb04965c2015-09-09 00:09:43 -0700754 const Config& config) {
ivoc14d5dbe2016-07-04 07:06:55 -0700755 return CreateChannel(channel, channelId, instanceId, config,
ossu5f7cfa52016-05-30 08:11:28 -0700756 CreateBuiltinAudioDecoderFactory());
757}
758
759int32_t Channel::CreateChannel(
760 Channel*& channel,
761 int32_t channelId,
762 uint32_t instanceId,
ossu5f7cfa52016-05-30 08:11:28 -0700763 const Config& config,
764 const rtc::scoped_refptr<AudioDecoderFactory>& decoder_factory) {
kwiberg55b97fe2016-01-28 05:22:45 -0800765 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
766 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
767 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000768
ivoc14d5dbe2016-07-04 07:06:55 -0700769 channel = new Channel(channelId, instanceId, config, decoder_factory);
kwiberg55b97fe2016-01-28 05:22:45 -0800770 if (channel == NULL) {
771 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
772 "Channel::CreateChannel() unable to allocate memory for"
773 " channel");
774 return -1;
775 }
776 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000777}
778
kwiberg55b97fe2016-01-28 05:22:45 -0800779void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
780 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
781 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
782 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000783
kwiberg55b97fe2016-01-28 05:22:45 -0800784 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000785}
786
kwiberg55b97fe2016-01-28 05:22:45 -0800787void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
788 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
789 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
790 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000791
kwiberg55b97fe2016-01-28 05:22:45 -0800792 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000793}
794
kwiberg55b97fe2016-01-28 05:22:45 -0800795void Channel::PlayFileEnded(int32_t id) {
796 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
797 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000798
kwiberg55b97fe2016-01-28 05:22:45 -0800799 if (id == _inputFilePlayerId) {
800 channel_state_.SetInputFilePlaying(false);
801 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
802 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000803 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800804 } else if (id == _outputFilePlayerId) {
805 channel_state_.SetOutputFilePlaying(false);
806 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
807 "Channel::PlayFileEnded() => output file player module is"
808 " shutdown");
809 }
810}
811
812void Channel::RecordFileEnded(int32_t id) {
813 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
814 "Channel::RecordFileEnded(id=%d)", id);
815
816 assert(id == _outputFileRecorderId);
817
818 rtc::CritScope cs(&_fileCritSect);
819
820 _outputFileRecording = false;
821 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
822 "Channel::RecordFileEnded() => output file recorder module is"
823 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000824}
825
pbos@webrtc.org92135212013-05-14 08:31:39 +0000826Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000827 uint32_t instanceId,
ossu5f7cfa52016-05-30 08:11:28 -0700828 const Config& config,
829 const rtc::scoped_refptr<AudioDecoderFactory>& decoder_factory)
tommi31fc21f2016-01-21 10:37:37 -0800830 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100831 _channelId(channelId),
ivoc14d5dbe2016-07-04 07:06:55 -0700832 event_log_proxy_(new RtcEventLogProxy()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100833 rtp_header_parser_(RtpHeaderParser::Create()),
834 rtp_payload_registry_(
835 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
836 rtp_receive_statistics_(
837 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
838 rtp_receiver_(
839 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100840 this,
841 this,
842 rtp_payload_registry_.get())),
843 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
844 _outputAudioLevel(),
845 _externalTransport(false),
846 _inputFilePlayerPtr(NULL),
847 _outputFilePlayerPtr(NULL),
848 _outputFileRecorderPtr(NULL),
849 // Avoid conflict with other channels by adding 1024 - 1026,
850 // won't use as much as 1024 channels.
851 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
852 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
853 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
854 _outputFileRecording(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100855 _outputExternalMedia(false),
856 _inputExternalMediaCallbackPtr(NULL),
857 _outputExternalMediaCallbackPtr(NULL),
858 _timeStamp(0), // This is just an offset, RTP module will add it's own
859 // random offset
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100860 ntp_estimator_(Clock::GetRealTimeClock()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100861 playout_timestamp_rtp_(0),
862 playout_timestamp_rtcp_(0),
863 playout_delay_ms_(0),
864 _numberOfDiscardedPackets(0),
865 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100866 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
867 capture_start_rtp_time_stamp_(-1),
868 capture_start_ntp_time_ms_(-1),
869 _engineStatisticsPtr(NULL),
870 _outputMixerPtr(NULL),
871 _transmitMixerPtr(NULL),
872 _moduleProcessThreadPtr(NULL),
873 _audioDeviceModulePtr(NULL),
874 _voiceEngineObserverPtr(NULL),
875 _callbackCritSectPtr(NULL),
876 _transportPtr(NULL),
877 _rxVadObserverPtr(NULL),
878 _oldVadDecision(-1),
879 _sendFrameType(0),
880 _externalMixing(false),
881 _mixFileWithMicrophone(false),
solenberg1c2af8e2016-03-24 10:36:00 -0700882 input_mute_(false),
883 previous_frame_muted_(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100884 _panLeft(1.0f),
885 _panRight(1.0f),
886 _outputGain(1.0f),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100887 _lastLocalTimeStamp(0),
888 _lastPayloadType(0),
889 _includeAudioLevelIndication(false),
890 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100891 _average_jitter_buffer_delay_us(0),
892 _previousTimestamp(0),
893 _recPacketDelayMs(20),
894 _RxVadDetection(false),
895 _rxAgcIsEnabled(false),
896 _rxNsIsEnabled(false),
897 restored_packet_in_use_(false),
898 rtcp_observer_(new VoERtcpObserver(this)),
899 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100900 associate_send_channel_(ChannelOwner(nullptr)),
901 pacing_enabled_(config.Get<VoicePacing>().enabled),
stefanbba9dec2016-02-01 04:39:55 -0800902 feedback_observer_proxy_(new TransportFeedbackProxy()),
903 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
ossu29b1a8d2016-06-13 07:34:51 -0700904 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()),
905 decoder_factory_(decoder_factory) {
kwiberg55b97fe2016-01-28 05:22:45 -0800906 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
907 "Channel::Channel() - ctor");
908 AudioCodingModule::Config acm_config;
909 acm_config.id = VoEModuleId(instanceId, channelId);
910 if (config.Get<NetEqCapacityConfig>().enabled) {
911 // Clamping the buffer capacity at 20 packets. While going lower will
912 // probably work, it makes little sense.
913 acm_config.neteq_config.max_packets_in_buffer =
914 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
915 }
916 acm_config.neteq_config.enable_fast_accelerate =
917 config.Get<NetEqFastAccelerate>().enabled;
henrik.lundina89ab962016-05-18 08:52:45 -0700918 acm_config.neteq_config.enable_muted_state = true;
ossu5f7cfa52016-05-30 08:11:28 -0700919 acm_config.decoder_factory = decoder_factory;
kwiberg55b97fe2016-01-28 05:22:45 -0800920 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200921
kwiberg55b97fe2016-01-28 05:22:45 -0800922 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000923
kwiberg55b97fe2016-01-28 05:22:45 -0800924 RtpRtcp::Configuration configuration;
925 configuration.audio = true;
926 configuration.outgoing_transport = this;
kwiberg55b97fe2016-01-28 05:22:45 -0800927 configuration.receive_statistics = rtp_receive_statistics_.get();
928 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800929 if (pacing_enabled_) {
930 configuration.paced_sender = rtp_packet_sender_proxy_.get();
931 configuration.transport_sequence_number_allocator =
932 seq_num_allocator_proxy_.get();
933 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
934 }
ivoc14d5dbe2016-07-04 07:06:55 -0700935 configuration.event_log = &(*event_log_proxy_);
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000936
kwiberg55b97fe2016-01-28 05:22:45 -0800937 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100938 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000939
kwiberg55b97fe2016-01-28 05:22:45 -0800940 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
941 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
942 statistics_proxy_.get());
aluebs@webrtc.orgf927fd62014-04-16 11:58:18 +0000943
kwiberg55b97fe2016-01-28 05:22:45 -0800944 Config audioproc_config;
945 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
946 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
niklase@google.com470e71d2011-07-07 08:21:25 +0000947}
948
kwiberg55b97fe2016-01-28 05:22:45 -0800949Channel::~Channel() {
950 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
951 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
952 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000953
kwiberg55b97fe2016-01-28 05:22:45 -0800954 if (_outputExternalMedia) {
955 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
956 }
957 if (channel_state_.Get().input_external_media) {
958 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
959 }
960 StopSend();
961 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000962
kwiberg55b97fe2016-01-28 05:22:45 -0800963 {
964 rtc::CritScope cs(&_fileCritSect);
965 if (_inputFilePlayerPtr) {
966 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
967 _inputFilePlayerPtr->StopPlayingFile();
968 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
969 _inputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000970 }
kwiberg55b97fe2016-01-28 05:22:45 -0800971 if (_outputFilePlayerPtr) {
972 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
973 _outputFilePlayerPtr->StopPlayingFile();
974 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
975 _outputFilePlayerPtr = NULL;
976 }
977 if (_outputFileRecorderPtr) {
978 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
979 _outputFileRecorderPtr->StopRecording();
980 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
981 _outputFileRecorderPtr = NULL;
982 }
983 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000984
kwiberg55b97fe2016-01-28 05:22:45 -0800985 // The order to safely shutdown modules in a channel is:
986 // 1. De-register callbacks in modules
987 // 2. De-register modules in process thread
988 // 3. Destroy modules
989 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
990 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
991 "~Channel() failed to de-register transport callback"
992 " (Audio coding module)");
993 }
994 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
995 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
996 "~Channel() failed to de-register VAD callback"
997 " (Audio coding module)");
998 }
999 // De-register modules in process thread
1000 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +00001001
kwiberg55b97fe2016-01-28 05:22:45 -08001002 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +00001003}
1004
kwiberg55b97fe2016-01-28 05:22:45 -08001005int32_t Channel::Init() {
1006 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1007 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001008
kwiberg55b97fe2016-01-28 05:22:45 -08001009 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001010
kwiberg55b97fe2016-01-28 05:22:45 -08001011 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +00001012
kwiberg55b97fe2016-01-28 05:22:45 -08001013 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
1014 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1015 "Channel::Init() must call SetEngineInformation() first");
1016 return -1;
1017 }
1018
1019 // --- Add modules to process thread (for periodic schedulation)
1020
1021 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
1022
1023 // --- ACM initialization
1024
1025 if (audio_coding_->InitializeReceiver() == -1) {
1026 _engineStatisticsPtr->SetLastError(
1027 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1028 "Channel::Init() unable to initialize the ACM - 1");
1029 return -1;
1030 }
1031
1032 // --- RTP/RTCP module initialization
1033
1034 // Ensure that RTCP is enabled by default for the created channel.
1035 // Note that, the module will keep generating RTCP until it is explicitly
1036 // disabled by the user.
1037 // After StopListen (when no sockets exists), RTCP packets will no longer
1038 // be transmitted since the Transport object will then be invalid.
1039 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
1040 // RTCP is enabled by default.
1041 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
1042 // --- Register all permanent callbacks
1043 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
1044 (audio_coding_->RegisterVADCallback(this) == -1);
1045
1046 if (fail) {
1047 _engineStatisticsPtr->SetLastError(
1048 VE_CANNOT_INIT_CHANNEL, kTraceError,
1049 "Channel::Init() callbacks not registered");
1050 return -1;
1051 }
1052
1053 // --- Register all supported codecs to the receiving side of the
1054 // RTP/RTCP module
1055
1056 CodecInst codec;
1057 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1058
1059 for (int idx = 0; idx < nSupportedCodecs; idx++) {
1060 // Open up the RTP/RTCP receiver for all supported codecs
1061 if ((audio_coding_->Codec(idx, &codec) == -1) ||
1062 (rtp_receiver_->RegisterReceivePayload(
1063 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1064 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
1065 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1066 "Channel::Init() unable to register %s "
1067 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
1068 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1069 codec.rate);
1070 } else {
1071 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1072 "Channel::Init() %s (%d/%d/%" PRIuS
1073 "/%d) has been "
1074 "added to the RTP/RTCP receiver",
1075 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1076 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00001077 }
1078
kwiberg55b97fe2016-01-28 05:22:45 -08001079 // Ensure that PCMU is used as default codec on the sending side
1080 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
1081 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +00001082 }
1083
kwiberg55b97fe2016-01-28 05:22:45 -08001084 // Register default PT for outband 'telephone-event'
1085 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
kwibergc8d071e2016-04-06 12:22:38 -07001086 if (_rtpRtcpModule->RegisterSendPayload(codec) == -1 ||
1087 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001088 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1089 "Channel::Init() failed to register outband "
1090 "'telephone-event' (%d/%d) correctly",
1091 codec.pltype, codec.plfreq);
1092 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001093 }
1094
kwiberg55b97fe2016-01-28 05:22:45 -08001095 if (!STR_CASE_CMP(codec.plname, "CN")) {
kwibergc8d071e2016-04-06 12:22:38 -07001096 if (!codec_manager_.RegisterEncoder(codec) ||
1097 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get()) ||
1098 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec) ||
1099 _rtpRtcpModule->RegisterSendPayload(codec) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08001100 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1101 "Channel::Init() failed to register CN (%d/%d) "
1102 "correctly - 1",
1103 codec.pltype, codec.plfreq);
1104 }
1105 }
kwiberg55b97fe2016-01-28 05:22:45 -08001106 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001107
kwiberg55b97fe2016-01-28 05:22:45 -08001108 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1109 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1110 return -1;
1111 }
1112 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1113 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1114 return -1;
1115 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001116
kwiberg55b97fe2016-01-28 05:22:45 -08001117 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001118}
1119
kwiberg55b97fe2016-01-28 05:22:45 -08001120int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1121 OutputMixer& outputMixer,
1122 voe::TransmitMixer& transmitMixer,
1123 ProcessThread& moduleProcessThread,
1124 AudioDeviceModule& audioDeviceModule,
1125 VoiceEngineObserver* voiceEngineObserver,
1126 rtc::CriticalSection* callbackCritSect) {
1127 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1128 "Channel::SetEngineInformation()");
1129 _engineStatisticsPtr = &engineStatistics;
1130 _outputMixerPtr = &outputMixer;
1131 _transmitMixerPtr = &transmitMixer,
1132 _moduleProcessThreadPtr = &moduleProcessThread;
1133 _audioDeviceModulePtr = &audioDeviceModule;
1134 _voiceEngineObserverPtr = voiceEngineObserver;
1135 _callbackCritSectPtr = callbackCritSect;
1136 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001137}
1138
kwiberg55b97fe2016-01-28 05:22:45 -08001139int32_t Channel::UpdateLocalTimeStamp() {
1140 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1141 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001142}
1143
kwibergb7f89d62016-02-17 10:04:18 -08001144void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001145 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001146 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001147}
1148
ossu29b1a8d2016-06-13 07:34:51 -07001149const rtc::scoped_refptr<AudioDecoderFactory>&
1150Channel::GetAudioDecoderFactory() const {
1151 return decoder_factory_;
1152}
1153
kwiberg55b97fe2016-01-28 05:22:45 -08001154int32_t Channel::StartPlayout() {
1155 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1156 "Channel::StartPlayout()");
1157 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001158 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001159 }
1160
1161 if (!_externalMixing) {
1162 // Add participant as candidates for mixing.
1163 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1164 _engineStatisticsPtr->SetLastError(
1165 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1166 "StartPlayout() failed to add participant to mixer");
1167 return -1;
1168 }
1169 }
1170
1171 channel_state_.SetPlaying(true);
1172 if (RegisterFilePlayingToMixer() != 0)
1173 return -1;
1174
1175 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001176}
1177
kwiberg55b97fe2016-01-28 05:22:45 -08001178int32_t Channel::StopPlayout() {
1179 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1180 "Channel::StopPlayout()");
1181 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001182 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001183 }
1184
1185 if (!_externalMixing) {
1186 // Remove participant as candidates for mixing
1187 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1188 _engineStatisticsPtr->SetLastError(
1189 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1190 "StopPlayout() failed to remove participant from mixer");
1191 return -1;
1192 }
1193 }
1194
1195 channel_state_.SetPlaying(false);
1196 _outputAudioLevel.Clear();
1197
1198 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001199}
1200
kwiberg55b97fe2016-01-28 05:22:45 -08001201int32_t Channel::StartSend() {
1202 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1203 "Channel::StartSend()");
1204 // Resume the previous sequence number which was reset by StopSend().
1205 // This needs to be done before |sending| is set to true.
1206 if (send_sequence_number_)
1207 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001208
kwiberg55b97fe2016-01-28 05:22:45 -08001209 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001210 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001211 }
1212 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001213
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001214 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001215 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1216 _engineStatisticsPtr->SetLastError(
1217 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1218 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001219 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001220 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001221 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001222 return -1;
1223 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001224
kwiberg55b97fe2016-01-28 05:22:45 -08001225 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001226}
1227
kwiberg55b97fe2016-01-28 05:22:45 -08001228int32_t Channel::StopSend() {
1229 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1230 "Channel::StopSend()");
1231 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001232 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001233 }
1234 channel_state_.SetSending(false);
1235
1236 // Store the sequence number to be able to pick up the same sequence for
1237 // the next StartSend(). This is needed for restarting device, otherwise
1238 // it might cause libSRTP to complain about packets being replayed.
1239 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1240 // CL is landed. See issue
1241 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1242 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1243
1244 // Reset sending SSRC and sequence number and triggers direct transmission
1245 // of RTCP BYE
1246 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1247 _engineStatisticsPtr->SetLastError(
1248 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1249 "StartSend() RTP/RTCP failed to stop sending");
1250 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001251 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001252
1253 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001254}
1255
kwiberg55b97fe2016-01-28 05:22:45 -08001256int32_t Channel::StartReceiving() {
1257 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1258 "Channel::StartReceiving()");
1259 if (channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001260 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001261 }
1262 channel_state_.SetReceiving(true);
1263 _numberOfDiscardedPackets = 0;
1264 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001265}
1266
kwiberg55b97fe2016-01-28 05:22:45 -08001267int32_t Channel::StopReceiving() {
1268 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1269 "Channel::StopReceiving()");
1270 if (!channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001271 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001272 }
1273
1274 channel_state_.SetReceiving(false);
1275 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001276}
1277
kwiberg55b97fe2016-01-28 05:22:45 -08001278int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1279 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1280 "Channel::RegisterVoiceEngineObserver()");
1281 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001282
kwiberg55b97fe2016-01-28 05:22:45 -08001283 if (_voiceEngineObserverPtr) {
1284 _engineStatisticsPtr->SetLastError(
1285 VE_INVALID_OPERATION, kTraceError,
1286 "RegisterVoiceEngineObserver() observer already enabled");
1287 return -1;
1288 }
1289 _voiceEngineObserverPtr = &observer;
1290 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001291}
1292
kwiberg55b97fe2016-01-28 05:22:45 -08001293int32_t Channel::DeRegisterVoiceEngineObserver() {
1294 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1295 "Channel::DeRegisterVoiceEngineObserver()");
1296 rtc::CritScope cs(&_callbackCritSect);
1297
1298 if (!_voiceEngineObserverPtr) {
1299 _engineStatisticsPtr->SetLastError(
1300 VE_INVALID_OPERATION, kTraceWarning,
1301 "DeRegisterVoiceEngineObserver() observer already disabled");
1302 return 0;
1303 }
1304 _voiceEngineObserverPtr = NULL;
1305 return 0;
1306}
1307
1308int32_t Channel::GetSendCodec(CodecInst& codec) {
kwibergc8d071e2016-04-06 12:22:38 -07001309 auto send_codec = codec_manager_.GetCodecInst();
kwiberg1fd4a4a2015-11-03 11:20:50 -08001310 if (send_codec) {
1311 codec = *send_codec;
1312 return 0;
1313 }
1314 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001315}
1316
kwiberg55b97fe2016-01-28 05:22:45 -08001317int32_t Channel::GetRecCodec(CodecInst& codec) {
1318 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001319}
1320
kwiberg55b97fe2016-01-28 05:22:45 -08001321int32_t Channel::SetSendCodec(const CodecInst& codec) {
1322 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1323 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001324
kwibergc8d071e2016-04-06 12:22:38 -07001325 if (!codec_manager_.RegisterEncoder(codec) ||
1326 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001327 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1328 "SetSendCodec() failed to register codec to ACM");
1329 return -1;
1330 }
1331
1332 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1333 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1334 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1335 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1336 "SetSendCodec() failed to register codec to"
1337 " RTP/RTCP module");
1338 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001339 }
kwiberg55b97fe2016-01-28 05:22:45 -08001340 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001341
kwiberg55b97fe2016-01-28 05:22:45 -08001342 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1343 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1344 "SetSendCodec() failed to set audio packet size");
1345 return -1;
1346 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001347
kwiberg55b97fe2016-01-28 05:22:45 -08001348 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001349}
1350
Ivo Creusenadf89b72015-04-29 16:03:33 +02001351void Channel::SetBitRate(int bitrate_bps) {
1352 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1353 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1354 audio_coding_->SetBitRate(bitrate_bps);
1355}
1356
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001357void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue@webrtc.org74aaf292014-07-16 21:28:26 +00001358 network_predictor_->UpdatePacketLossRate(fraction_lost);
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001359 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1360
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001361 // Normalizes rate to 0 - 100.
kwiberg55b97fe2016-01-28 05:22:45 -08001362 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1363 0) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001364 assert(false); // This should not happen.
1365 }
1366}
1367
kwiberg55b97fe2016-01-28 05:22:45 -08001368int32_t Channel::SetVADStatus(bool enableVAD,
1369 ACMVADMode mode,
1370 bool disableDTX) {
1371 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1372 "Channel::SetVADStatus(mode=%d)", mode);
kwibergc8d071e2016-04-06 12:22:38 -07001373 RTC_DCHECK(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1374 if (!codec_manager_.SetVAD(enableVAD, mode) ||
1375 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001376 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1377 kTraceError,
1378 "SetVADStatus() failed to set VAD");
1379 return -1;
1380 }
1381 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001382}
1383
kwiberg55b97fe2016-01-28 05:22:45 -08001384int32_t Channel::GetVADStatus(bool& enabledVAD,
1385 ACMVADMode& mode,
1386 bool& disabledDTX) {
kwibergc8d071e2016-04-06 12:22:38 -07001387 const auto* params = codec_manager_.GetStackParams();
1388 enabledVAD = params->use_cng;
1389 mode = params->vad_mode;
1390 disabledDTX = !params->use_cng;
kwiberg55b97fe2016-01-28 05:22:45 -08001391 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001392}
1393
kwiberg55b97fe2016-01-28 05:22:45 -08001394int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1395 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1396 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001397
kwiberg55b97fe2016-01-28 05:22:45 -08001398 if (channel_state_.Get().playing) {
1399 _engineStatisticsPtr->SetLastError(
1400 VE_ALREADY_PLAYING, kTraceError,
1401 "SetRecPayloadType() unable to set PT while playing");
1402 return -1;
1403 }
1404 if (channel_state_.Get().receiving) {
1405 _engineStatisticsPtr->SetLastError(
1406 VE_ALREADY_LISTENING, kTraceError,
1407 "SetRecPayloadType() unable to set PT while listening");
1408 return -1;
1409 }
1410
1411 if (codec.pltype == -1) {
1412 // De-register the selected codec (RTP/RTCP module and ACM)
1413
1414 int8_t pltype(-1);
1415 CodecInst rxCodec = codec;
1416
1417 // Get payload type for the given codec
1418 rtp_payload_registry_->ReceivePayloadType(
1419 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1420 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1421 rxCodec.pltype = pltype;
1422
1423 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1424 _engineStatisticsPtr->SetLastError(
1425 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1426 "SetRecPayloadType() RTP/RTCP-module deregistration "
1427 "failed");
1428 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001429 }
kwiberg55b97fe2016-01-28 05:22:45 -08001430 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1431 _engineStatisticsPtr->SetLastError(
1432 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1433 "SetRecPayloadType() ACM deregistration failed - 1");
1434 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001435 }
kwiberg55b97fe2016-01-28 05:22:45 -08001436 return 0;
1437 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001438
kwiberg55b97fe2016-01-28 05:22:45 -08001439 if (rtp_receiver_->RegisterReceivePayload(
1440 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1441 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1442 // First attempt to register failed => de-register and try again
kwibergc8d071e2016-04-06 12:22:38 -07001443 // TODO(kwiberg): Retrying is probably not necessary, since
1444 // AcmReceiver::AddCodec also retries.
kwiberg55b97fe2016-01-28 05:22:45 -08001445 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001446 if (rtp_receiver_->RegisterReceivePayload(
kwiberg55b97fe2016-01-28 05:22:45 -08001447 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1448 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1449 _engineStatisticsPtr->SetLastError(
1450 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1451 "SetRecPayloadType() RTP/RTCP-module registration failed");
1452 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001453 }
kwiberg55b97fe2016-01-28 05:22:45 -08001454 }
kwibergc8d071e2016-04-06 12:22:38 -07001455 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001456 audio_coding_->UnregisterReceiveCodec(codec.pltype);
kwibergc8d071e2016-04-06 12:22:38 -07001457 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001458 _engineStatisticsPtr->SetLastError(
1459 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1460 "SetRecPayloadType() ACM registration failed - 1");
1461 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001462 }
kwiberg55b97fe2016-01-28 05:22:45 -08001463 }
1464 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001465}
1466
kwiberg55b97fe2016-01-28 05:22:45 -08001467int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1468 int8_t payloadType(-1);
1469 if (rtp_payload_registry_->ReceivePayloadType(
1470 codec.plname, codec.plfreq, codec.channels,
1471 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1472 _engineStatisticsPtr->SetLastError(
1473 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1474 "GetRecPayloadType() failed to retrieve RX payload type");
1475 return -1;
1476 }
1477 codec.pltype = payloadType;
1478 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001479}
1480
kwiberg55b97fe2016-01-28 05:22:45 -08001481int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1482 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1483 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001484
kwiberg55b97fe2016-01-28 05:22:45 -08001485 CodecInst codec;
1486 int32_t samplingFreqHz(-1);
1487 const size_t kMono = 1;
1488 if (frequency == kFreq32000Hz)
1489 samplingFreqHz = 32000;
1490 else if (frequency == kFreq16000Hz)
1491 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001492
kwiberg55b97fe2016-01-28 05:22:45 -08001493 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1494 _engineStatisticsPtr->SetLastError(
1495 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1496 "SetSendCNPayloadType() failed to retrieve default CN codec "
1497 "settings");
1498 return -1;
1499 }
1500
1501 // Modify the payload type (must be set to dynamic range)
1502 codec.pltype = type;
1503
kwibergc8d071e2016-04-06 12:22:38 -07001504 if (!codec_manager_.RegisterEncoder(codec) ||
1505 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001506 _engineStatisticsPtr->SetLastError(
1507 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1508 "SetSendCNPayloadType() failed to register CN to ACM");
1509 return -1;
1510 }
1511
1512 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1513 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1514 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1515 _engineStatisticsPtr->SetLastError(
1516 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1517 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1518 "module");
1519 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001520 }
kwiberg55b97fe2016-01-28 05:22:45 -08001521 }
1522 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001523}
1524
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001525int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001526 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001527 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001528
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001529 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001530 _engineStatisticsPtr->SetLastError(
1531 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001532 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001533 return -1;
1534 }
1535 return 0;
1536}
1537
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001538int Channel::SetOpusDtx(bool enable_dtx) {
1539 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1540 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001541 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001542 : audio_coding_->DisableOpusDtx();
1543 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001544 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1545 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001546 return -1;
1547 }
1548 return 0;
1549}
1550
ivoc85228d62016-07-27 04:53:47 -07001551int Channel::GetOpusDtx(bool* enabled) {
1552 int success = -1;
1553 audio_coding_->QueryEncoder([&](AudioEncoder const* encoder) {
1554 if (encoder) {
1555 *enabled = encoder->GetDtx();
1556 success = 0;
1557 }
1558 });
1559 return success;
1560}
1561
mflodman3d7db262016-04-29 00:57:13 -07001562int32_t Channel::RegisterExternalTransport(Transport* transport) {
kwiberg55b97fe2016-01-28 05:22:45 -08001563 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001564 "Channel::RegisterExternalTransport()");
1565
kwiberg55b97fe2016-01-28 05:22:45 -08001566 rtc::CritScope cs(&_callbackCritSect);
kwiberg55b97fe2016-01-28 05:22:45 -08001567 if (_externalTransport) {
1568 _engineStatisticsPtr->SetLastError(
1569 VE_INVALID_OPERATION, kTraceError,
1570 "RegisterExternalTransport() external transport already enabled");
1571 return -1;
1572 }
1573 _externalTransport = true;
mflodman3d7db262016-04-29 00:57:13 -07001574 _transportPtr = transport;
kwiberg55b97fe2016-01-28 05:22:45 -08001575 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001576}
1577
kwiberg55b97fe2016-01-28 05:22:45 -08001578int32_t Channel::DeRegisterExternalTransport() {
1579 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1580 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001581
kwiberg55b97fe2016-01-28 05:22:45 -08001582 rtc::CritScope cs(&_callbackCritSect);
mflodman3d7db262016-04-29 00:57:13 -07001583 if (_transportPtr) {
1584 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1585 "DeRegisterExternalTransport() all transport is disabled");
1586 } else {
kwiberg55b97fe2016-01-28 05:22:45 -08001587 _engineStatisticsPtr->SetLastError(
1588 VE_INVALID_OPERATION, kTraceWarning,
1589 "DeRegisterExternalTransport() external transport already "
1590 "disabled");
kwiberg55b97fe2016-01-28 05:22:45 -08001591 }
1592 _externalTransport = false;
1593 _transportPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001594 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001595}
1596
mflodman3d7db262016-04-29 00:57:13 -07001597int32_t Channel::ReceivedRTPPacket(const uint8_t* received_packet,
kwiberg55b97fe2016-01-28 05:22:45 -08001598 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001599 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001600 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001601 "Channel::ReceivedRTPPacket()");
1602
1603 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001604 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001605
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001606 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001607 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1608 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1609 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001610 return -1;
1611 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001612 header.payload_type_frequency =
1613 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001614 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001615 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001616 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001617 rtp_receive_statistics_->IncomingPacket(
1618 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001619 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001620
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001621 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001622}
1623
1624bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001625 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001626 const RTPHeader& header,
1627 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001628 if (rtp_payload_registry_->IsRtx(header)) {
1629 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001630 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001631 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001632 assert(packet_length >= header.headerLength);
1633 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001634 PayloadUnion payload_specific;
1635 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001636 &payload_specific)) {
1637 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001638 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001639 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1640 payload_specific, in_order);
1641}
1642
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001643bool Channel::HandleRtxPacket(const uint8_t* packet,
1644 size_t packet_length,
1645 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001646 if (!rtp_payload_registry_->IsRtx(header))
1647 return false;
1648
1649 // Remove the RTX header and parse the original RTP header.
1650 if (packet_length < header.headerLength)
1651 return false;
1652 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1653 return false;
1654 if (restored_packet_in_use_) {
1655 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1656 "Multiple RTX headers detected, dropping packet");
1657 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001658 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001659 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001660 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1661 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001662 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1663 "Incoming RTX packet: invalid RTP header");
1664 return false;
1665 }
1666 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001667 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001668 restored_packet_in_use_ = false;
1669 return ret;
1670}
1671
1672bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1673 StreamStatistician* statistician =
1674 rtp_receive_statistics_->GetStatistician(header.ssrc);
1675 if (!statistician)
1676 return false;
1677 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001678}
1679
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001680bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1681 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001682 // Retransmissions are handled separately if RTX is enabled.
1683 if (rtp_payload_registry_->RtxEnabled())
1684 return false;
1685 StreamStatistician* statistician =
1686 rtp_receive_statistics_->GetStatistician(header.ssrc);
1687 if (!statistician)
1688 return false;
1689 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001690 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001691 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001692 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001693}
1694
mflodman3d7db262016-04-29 00:57:13 -07001695int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001696 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001697 "Channel::ReceivedRTCPPacket()");
1698 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001699 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001700
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001701 // Deliver RTCP packet to RTP/RTCP module for parsing
mflodman3d7db262016-04-29 00:57:13 -07001702 if (_rtpRtcpModule->IncomingRtcpPacket(data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001703 _engineStatisticsPtr->SetLastError(
1704 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1705 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1706 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001707
Minyue2013aec2015-05-13 14:14:42 +02001708 int64_t rtt = GetRTT(true);
1709 if (rtt == 0) {
1710 // Waiting for valid RTT.
1711 return 0;
1712 }
1713 uint32_t ntp_secs = 0;
1714 uint32_t ntp_frac = 0;
1715 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001716 if (0 !=
1717 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1718 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001719 // Waiting for RTCP.
1720 return 0;
1721 }
1722
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001723 {
tommi31fc21f2016-01-21 10:37:37 -08001724 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001725 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001726 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001727 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001728}
1729
niklase@google.com470e71d2011-07-07 08:21:25 +00001730int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001731 bool loop,
1732 FileFormats format,
1733 int startPosition,
1734 float volumeScaling,
1735 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001736 const CodecInst* codecInst) {
1737 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1738 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1739 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1740 "stopPosition=%d)",
1741 fileName, loop, format, volumeScaling, startPosition,
1742 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001743
kwiberg55b97fe2016-01-28 05:22:45 -08001744 if (channel_state_.Get().output_file_playing) {
1745 _engineStatisticsPtr->SetLastError(
1746 VE_ALREADY_PLAYING, kTraceError,
1747 "StartPlayingFileLocally() is already playing");
1748 return -1;
1749 }
1750
1751 {
1752 rtc::CritScope cs(&_fileCritSect);
1753
1754 if (_outputFilePlayerPtr) {
1755 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1756 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1757 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001758 }
1759
kwiberg55b97fe2016-01-28 05:22:45 -08001760 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1761 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001762
kwiberg55b97fe2016-01-28 05:22:45 -08001763 if (_outputFilePlayerPtr == NULL) {
1764 _engineStatisticsPtr->SetLastError(
1765 VE_INVALID_ARGUMENT, kTraceError,
1766 "StartPlayingFileLocally() filePlayer format is not correct");
1767 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001768 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001769
kwiberg55b97fe2016-01-28 05:22:45 -08001770 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001771
kwiberg55b97fe2016-01-28 05:22:45 -08001772 if (_outputFilePlayerPtr->StartPlayingFile(
1773 fileName, loop, startPosition, volumeScaling, notificationTime,
1774 stopPosition, (const CodecInst*)codecInst) != 0) {
1775 _engineStatisticsPtr->SetLastError(
1776 VE_BAD_FILE, kTraceError,
1777 "StartPlayingFile() failed to start file playout");
1778 _outputFilePlayerPtr->StopPlayingFile();
1779 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1780 _outputFilePlayerPtr = NULL;
1781 return -1;
1782 }
1783 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1784 channel_state_.SetOutputFilePlaying(true);
1785 }
1786
1787 if (RegisterFilePlayingToMixer() != 0)
1788 return -1;
1789
1790 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001791}
1792
1793int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001794 FileFormats format,
1795 int startPosition,
1796 float volumeScaling,
1797 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001798 const CodecInst* codecInst) {
1799 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1800 "Channel::StartPlayingFileLocally(format=%d,"
1801 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1802 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001803
kwiberg55b97fe2016-01-28 05:22:45 -08001804 if (stream == NULL) {
1805 _engineStatisticsPtr->SetLastError(
1806 VE_BAD_FILE, kTraceError,
1807 "StartPlayingFileLocally() NULL as input stream");
1808 return -1;
1809 }
1810
1811 if (channel_state_.Get().output_file_playing) {
1812 _engineStatisticsPtr->SetLastError(
1813 VE_ALREADY_PLAYING, kTraceError,
1814 "StartPlayingFileLocally() is already playing");
1815 return -1;
1816 }
1817
1818 {
1819 rtc::CritScope cs(&_fileCritSect);
1820
1821 // Destroy the old instance
1822 if (_outputFilePlayerPtr) {
1823 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1824 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1825 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001826 }
1827
kwiberg55b97fe2016-01-28 05:22:45 -08001828 // Create the instance
1829 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1830 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001831
kwiberg55b97fe2016-01-28 05:22:45 -08001832 if (_outputFilePlayerPtr == NULL) {
1833 _engineStatisticsPtr->SetLastError(
1834 VE_INVALID_ARGUMENT, kTraceError,
1835 "StartPlayingFileLocally() filePlayer format isnot correct");
1836 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001837 }
1838
kwiberg55b97fe2016-01-28 05:22:45 -08001839 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001840
kwiberg55b97fe2016-01-28 05:22:45 -08001841 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1842 volumeScaling, notificationTime,
1843 stopPosition, codecInst) != 0) {
1844 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1845 "StartPlayingFile() failed to "
1846 "start file playout");
1847 _outputFilePlayerPtr->StopPlayingFile();
1848 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1849 _outputFilePlayerPtr = NULL;
1850 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001851 }
kwiberg55b97fe2016-01-28 05:22:45 -08001852 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1853 channel_state_.SetOutputFilePlaying(true);
1854 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001855
kwiberg55b97fe2016-01-28 05:22:45 -08001856 if (RegisterFilePlayingToMixer() != 0)
1857 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001858
kwiberg55b97fe2016-01-28 05:22:45 -08001859 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001860}
1861
kwiberg55b97fe2016-01-28 05:22:45 -08001862int Channel::StopPlayingFileLocally() {
1863 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1864 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001865
kwiberg55b97fe2016-01-28 05:22:45 -08001866 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001867 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001868 }
1869
1870 {
1871 rtc::CritScope cs(&_fileCritSect);
1872
1873 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1874 _engineStatisticsPtr->SetLastError(
1875 VE_STOP_RECORDING_FAILED, kTraceError,
1876 "StopPlayingFile() could not stop playing");
1877 return -1;
1878 }
1879 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1880 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1881 _outputFilePlayerPtr = NULL;
1882 channel_state_.SetOutputFilePlaying(false);
1883 }
1884 // _fileCritSect cannot be taken while calling
1885 // SetAnonymousMixibilityStatus. Refer to comments in
1886 // StartPlayingFileLocally(const char* ...) for more details.
1887 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1888 _engineStatisticsPtr->SetLastError(
1889 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1890 "StopPlayingFile() failed to stop participant from playing as"
1891 "file in the mixer");
1892 return -1;
1893 }
1894
1895 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001896}
1897
kwiberg55b97fe2016-01-28 05:22:45 -08001898int Channel::IsPlayingFileLocally() const {
1899 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001900}
1901
kwiberg55b97fe2016-01-28 05:22:45 -08001902int Channel::RegisterFilePlayingToMixer() {
1903 // Return success for not registering for file playing to mixer if:
1904 // 1. playing file before playout is started on that channel.
1905 // 2. starting playout without file playing on that channel.
1906 if (!channel_state_.Get().playing ||
1907 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001908 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001909 }
1910
1911 // |_fileCritSect| cannot be taken while calling
1912 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1913 // frames can be pulled by the mixer. Since the frames are generated from
1914 // the file, _fileCritSect will be taken. This would result in a deadlock.
1915 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1916 channel_state_.SetOutputFilePlaying(false);
1917 rtc::CritScope cs(&_fileCritSect);
1918 _engineStatisticsPtr->SetLastError(
1919 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1920 "StartPlayingFile() failed to add participant as file to mixer");
1921 _outputFilePlayerPtr->StopPlayingFile();
1922 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1923 _outputFilePlayerPtr = NULL;
1924 return -1;
1925 }
1926
1927 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001928}
1929
niklase@google.com470e71d2011-07-07 08:21:25 +00001930int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001931 bool loop,
1932 FileFormats format,
1933 int startPosition,
1934 float volumeScaling,
1935 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001936 const CodecInst* codecInst) {
1937 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1938 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1939 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1940 "stopPosition=%d)",
1941 fileName, loop, format, volumeScaling, startPosition,
1942 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001943
kwiberg55b97fe2016-01-28 05:22:45 -08001944 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001945
kwiberg55b97fe2016-01-28 05:22:45 -08001946 if (channel_state_.Get().input_file_playing) {
1947 _engineStatisticsPtr->SetLastError(
1948 VE_ALREADY_PLAYING, kTraceWarning,
1949 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001950 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001951 }
1952
1953 // Destroy the old instance
1954 if (_inputFilePlayerPtr) {
1955 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1956 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1957 _inputFilePlayerPtr = NULL;
1958 }
1959
1960 // Create the instance
1961 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1962 (const FileFormats)format);
1963
1964 if (_inputFilePlayerPtr == NULL) {
1965 _engineStatisticsPtr->SetLastError(
1966 VE_INVALID_ARGUMENT, kTraceError,
1967 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1968 return -1;
1969 }
1970
1971 const uint32_t notificationTime(0);
1972
1973 if (_inputFilePlayerPtr->StartPlayingFile(
1974 fileName, loop, startPosition, volumeScaling, notificationTime,
1975 stopPosition, (const CodecInst*)codecInst) != 0) {
1976 _engineStatisticsPtr->SetLastError(
1977 VE_BAD_FILE, kTraceError,
1978 "StartPlayingFile() failed to start file playout");
1979 _inputFilePlayerPtr->StopPlayingFile();
1980 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1981 _inputFilePlayerPtr = NULL;
1982 return -1;
1983 }
1984 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1985 channel_state_.SetInputFilePlaying(true);
1986
1987 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001988}
1989
1990int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001991 FileFormats format,
1992 int startPosition,
1993 float volumeScaling,
1994 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001995 const CodecInst* codecInst) {
1996 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1997 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1998 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1999 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00002000
kwiberg55b97fe2016-01-28 05:22:45 -08002001 if (stream == NULL) {
2002 _engineStatisticsPtr->SetLastError(
2003 VE_BAD_FILE, kTraceError,
2004 "StartPlayingFileAsMicrophone NULL as input stream");
2005 return -1;
2006 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002007
kwiberg55b97fe2016-01-28 05:22:45 -08002008 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00002009
kwiberg55b97fe2016-01-28 05:22:45 -08002010 if (channel_state_.Get().input_file_playing) {
2011 _engineStatisticsPtr->SetLastError(
2012 VE_ALREADY_PLAYING, kTraceWarning,
2013 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00002014 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002015 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002016
kwiberg55b97fe2016-01-28 05:22:45 -08002017 // Destroy the old instance
2018 if (_inputFilePlayerPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002019 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2020 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2021 _inputFilePlayerPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002022 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002023
kwiberg55b97fe2016-01-28 05:22:45 -08002024 // Create the instance
2025 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
2026 (const FileFormats)format);
2027
2028 if (_inputFilePlayerPtr == NULL) {
2029 _engineStatisticsPtr->SetLastError(
2030 VE_INVALID_ARGUMENT, kTraceError,
2031 "StartPlayingInputFile() filePlayer format isnot correct");
2032 return -1;
2033 }
2034
2035 const uint32_t notificationTime(0);
2036
2037 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2038 volumeScaling, notificationTime,
2039 stopPosition, codecInst) != 0) {
2040 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2041 "StartPlayingFile() failed to start "
2042 "file playout");
2043 _inputFilePlayerPtr->StopPlayingFile();
2044 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2045 _inputFilePlayerPtr = NULL;
2046 return -1;
2047 }
2048
2049 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2050 channel_state_.SetInputFilePlaying(true);
2051
2052 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002053}
2054
kwiberg55b97fe2016-01-28 05:22:45 -08002055int Channel::StopPlayingFileAsMicrophone() {
2056 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2057 "Channel::StopPlayingFileAsMicrophone()");
2058
2059 rtc::CritScope cs(&_fileCritSect);
2060
2061 if (!channel_state_.Get().input_file_playing) {
2062 return 0;
2063 }
2064
2065 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
2066 _engineStatisticsPtr->SetLastError(
2067 VE_STOP_RECORDING_FAILED, kTraceError,
2068 "StopPlayingFile() could not stop playing");
2069 return -1;
2070 }
2071 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2072 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2073 _inputFilePlayerPtr = NULL;
2074 channel_state_.SetInputFilePlaying(false);
2075
2076 return 0;
2077}
2078
2079int Channel::IsPlayingFileAsMicrophone() const {
2080 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00002081}
2082
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002083int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08002084 const CodecInst* codecInst) {
2085 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2086 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00002087
kwiberg55b97fe2016-01-28 05:22:45 -08002088 if (_outputFileRecording) {
2089 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2090 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002091 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002092 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002093
kwiberg55b97fe2016-01-28 05:22:45 -08002094 FileFormats format;
2095 const uint32_t notificationTime(0); // Not supported in VoE
2096 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00002097
kwiberg55b97fe2016-01-28 05:22:45 -08002098 if ((codecInst != NULL) &&
2099 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2100 _engineStatisticsPtr->SetLastError(
2101 VE_BAD_ARGUMENT, kTraceError,
2102 "StartRecordingPlayout() invalid compression");
2103 return (-1);
2104 }
2105 if (codecInst == NULL) {
2106 format = kFileFormatPcm16kHzFile;
2107 codecInst = &dummyCodec;
2108 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2109 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2110 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2111 format = kFileFormatWavFile;
2112 } else {
2113 format = kFileFormatCompressedFile;
2114 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002115
kwiberg55b97fe2016-01-28 05:22:45 -08002116 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002117
kwiberg55b97fe2016-01-28 05:22:45 -08002118 // Destroy the old instance
2119 if (_outputFileRecorderPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002120 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2121 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2122 _outputFileRecorderPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002123 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002124
kwiberg55b97fe2016-01-28 05:22:45 -08002125 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2126 _outputFileRecorderId, (const FileFormats)format);
2127 if (_outputFileRecorderPtr == NULL) {
2128 _engineStatisticsPtr->SetLastError(
2129 VE_INVALID_ARGUMENT, kTraceError,
2130 "StartRecordingPlayout() fileRecorder format isnot correct");
2131 return -1;
2132 }
2133
2134 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2135 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2136 _engineStatisticsPtr->SetLastError(
2137 VE_BAD_FILE, kTraceError,
2138 "StartRecordingAudioFile() failed to start file recording");
2139 _outputFileRecorderPtr->StopRecording();
2140 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2141 _outputFileRecorderPtr = NULL;
2142 return -1;
2143 }
2144 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2145 _outputFileRecording = true;
2146
2147 return 0;
2148}
2149
2150int Channel::StartRecordingPlayout(OutStream* stream,
2151 const CodecInst* codecInst) {
2152 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2153 "Channel::StartRecordingPlayout()");
2154
2155 if (_outputFileRecording) {
2156 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2157 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002158 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002159 }
2160
2161 FileFormats format;
2162 const uint32_t notificationTime(0); // Not supported in VoE
2163 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2164
2165 if (codecInst != NULL && codecInst->channels != 1) {
2166 _engineStatisticsPtr->SetLastError(
2167 VE_BAD_ARGUMENT, kTraceError,
2168 "StartRecordingPlayout() invalid compression");
2169 return (-1);
2170 }
2171 if (codecInst == NULL) {
2172 format = kFileFormatPcm16kHzFile;
2173 codecInst = &dummyCodec;
2174 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2175 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2176 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2177 format = kFileFormatWavFile;
2178 } else {
2179 format = kFileFormatCompressedFile;
2180 }
2181
2182 rtc::CritScope cs(&_fileCritSect);
2183
2184 // Destroy the old instance
2185 if (_outputFileRecorderPtr) {
2186 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2187 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2188 _outputFileRecorderPtr = NULL;
2189 }
2190
2191 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2192 _outputFileRecorderId, (const FileFormats)format);
2193 if (_outputFileRecorderPtr == NULL) {
2194 _engineStatisticsPtr->SetLastError(
2195 VE_INVALID_ARGUMENT, kTraceError,
2196 "StartRecordingPlayout() fileRecorder format isnot correct");
2197 return -1;
2198 }
2199
2200 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2201 notificationTime) != 0) {
2202 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2203 "StartRecordingPlayout() failed to "
2204 "start file recording");
2205 _outputFileRecorderPtr->StopRecording();
2206 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2207 _outputFileRecorderPtr = NULL;
2208 return -1;
2209 }
2210
2211 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2212 _outputFileRecording = true;
2213
2214 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002215}
2216
kwiberg55b97fe2016-01-28 05:22:45 -08002217int Channel::StopRecordingPlayout() {
2218 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2219 "Channel::StopRecordingPlayout()");
2220
2221 if (!_outputFileRecording) {
2222 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2223 "StopRecordingPlayout() isnot recording");
2224 return -1;
2225 }
2226
2227 rtc::CritScope cs(&_fileCritSect);
2228
2229 if (_outputFileRecorderPtr->StopRecording() != 0) {
2230 _engineStatisticsPtr->SetLastError(
2231 VE_STOP_RECORDING_FAILED, kTraceError,
2232 "StopRecording() could not stop recording");
2233 return (-1);
2234 }
2235 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2236 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2237 _outputFileRecorderPtr = NULL;
2238 _outputFileRecording = false;
2239
2240 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002241}
2242
kwiberg55b97fe2016-01-28 05:22:45 -08002243void Channel::SetMixWithMicStatus(bool mix) {
2244 rtc::CritScope cs(&_fileCritSect);
2245 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002246}
2247
kwiberg55b97fe2016-01-28 05:22:45 -08002248int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2249 int8_t currentLevel = _outputAudioLevel.Level();
2250 level = static_cast<int32_t>(currentLevel);
2251 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002252}
2253
kwiberg55b97fe2016-01-28 05:22:45 -08002254int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2255 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2256 level = static_cast<int32_t>(currentLevel);
2257 return 0;
2258}
2259
solenberg1c2af8e2016-03-24 10:36:00 -07002260int Channel::SetInputMute(bool enable) {
kwiberg55b97fe2016-01-28 05:22:45 -08002261 rtc::CritScope cs(&volume_settings_critsect_);
2262 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002263 "Channel::SetMute(enable=%d)", enable);
solenberg1c2af8e2016-03-24 10:36:00 -07002264 input_mute_ = enable;
kwiberg55b97fe2016-01-28 05:22:45 -08002265 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002266}
2267
solenberg1c2af8e2016-03-24 10:36:00 -07002268bool Channel::InputMute() const {
kwiberg55b97fe2016-01-28 05:22:45 -08002269 rtc::CritScope cs(&volume_settings_critsect_);
solenberg1c2af8e2016-03-24 10:36:00 -07002270 return input_mute_;
niklase@google.com470e71d2011-07-07 08:21:25 +00002271}
2272
kwiberg55b97fe2016-01-28 05:22:45 -08002273int Channel::SetOutputVolumePan(float left, float right) {
2274 rtc::CritScope cs(&volume_settings_critsect_);
2275 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002276 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002277 _panLeft = left;
2278 _panRight = right;
2279 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002280}
2281
kwiberg55b97fe2016-01-28 05:22:45 -08002282int Channel::GetOutputVolumePan(float& left, float& right) const {
2283 rtc::CritScope cs(&volume_settings_critsect_);
2284 left = _panLeft;
2285 right = _panRight;
2286 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002287}
2288
kwiberg55b97fe2016-01-28 05:22:45 -08002289int Channel::SetChannelOutputVolumeScaling(float scaling) {
2290 rtc::CritScope cs(&volume_settings_critsect_);
2291 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002292 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002293 _outputGain = scaling;
2294 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002295}
2296
kwiberg55b97fe2016-01-28 05:22:45 -08002297int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2298 rtc::CritScope cs(&volume_settings_critsect_);
2299 scaling = _outputGain;
2300 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002301}
2302
solenberg8842c3e2016-03-11 03:06:41 -08002303int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002304 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002305 "Channel::SendTelephoneEventOutband(...)");
2306 RTC_DCHECK_LE(0, event);
2307 RTC_DCHECK_GE(255, event);
2308 RTC_DCHECK_LE(0, duration_ms);
2309 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002310 if (!Sending()) {
2311 return -1;
2312 }
solenberg8842c3e2016-03-11 03:06:41 -08002313 if (_rtpRtcpModule->SendTelephoneEventOutband(
2314 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002315 _engineStatisticsPtr->SetLastError(
2316 VE_SEND_DTMF_FAILED, kTraceWarning,
2317 "SendTelephoneEventOutband() failed to send event");
2318 return -1;
2319 }
2320 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002321}
2322
solenberg31642aa2016-03-14 08:00:37 -07002323int Channel::SetSendTelephoneEventPayloadType(int payload_type) {
kwiberg55b97fe2016-01-28 05:22:45 -08002324 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002325 "Channel::SetSendTelephoneEventPayloadType()");
solenberg31642aa2016-03-14 08:00:37 -07002326 RTC_DCHECK_LE(0, payload_type);
2327 RTC_DCHECK_GE(127, payload_type);
2328 CodecInst codec = {0};
kwiberg55b97fe2016-01-28 05:22:45 -08002329 codec.plfreq = 8000;
solenberg31642aa2016-03-14 08:00:37 -07002330 codec.pltype = payload_type;
kwiberg55b97fe2016-01-28 05:22:45 -08002331 memcpy(codec.plname, "telephone-event", 16);
2332 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2333 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2334 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2335 _engineStatisticsPtr->SetLastError(
2336 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2337 "SetSendTelephoneEventPayloadType() failed to register send"
2338 "payload type");
2339 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002340 }
kwiberg55b97fe2016-01-28 05:22:45 -08002341 }
kwiberg55b97fe2016-01-28 05:22:45 -08002342 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002343}
2344
kwiberg55b97fe2016-01-28 05:22:45 -08002345int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2346 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2347 "Channel::UpdateRxVadDetection()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002348
kwiberg55b97fe2016-01-28 05:22:45 -08002349 int vadDecision = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002350
kwiberg55b97fe2016-01-28 05:22:45 -08002351 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002352
kwiberg55b97fe2016-01-28 05:22:45 -08002353 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2354 OnRxVadDetected(vadDecision);
2355 _oldVadDecision = vadDecision;
2356 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002357
kwiberg55b97fe2016-01-28 05:22:45 -08002358 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2359 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2360 vadDecision);
2361 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002362}
2363
kwiberg55b97fe2016-01-28 05:22:45 -08002364int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2365 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2366 "Channel::RegisterRxVadObserver()");
2367 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002368
kwiberg55b97fe2016-01-28 05:22:45 -08002369 if (_rxVadObserverPtr) {
2370 _engineStatisticsPtr->SetLastError(
2371 VE_INVALID_OPERATION, kTraceError,
2372 "RegisterRxVadObserver() observer already enabled");
2373 return -1;
2374 }
2375 _rxVadObserverPtr = &observer;
2376 _RxVadDetection = true;
2377 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002378}
2379
kwiberg55b97fe2016-01-28 05:22:45 -08002380int Channel::DeRegisterRxVadObserver() {
2381 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2382 "Channel::DeRegisterRxVadObserver()");
2383 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002384
kwiberg55b97fe2016-01-28 05:22:45 -08002385 if (!_rxVadObserverPtr) {
2386 _engineStatisticsPtr->SetLastError(
2387 VE_INVALID_OPERATION, kTraceWarning,
2388 "DeRegisterRxVadObserver() observer already disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002389 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002390 }
2391 _rxVadObserverPtr = NULL;
2392 _RxVadDetection = false;
2393 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002394}
2395
kwiberg55b97fe2016-01-28 05:22:45 -08002396int Channel::VoiceActivityIndicator(int& activity) {
2397 activity = _sendFrameType;
2398 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002399}
2400
2401#ifdef WEBRTC_VOICE_ENGINE_AGC
2402
kwiberg55b97fe2016-01-28 05:22:45 -08002403int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2404 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2405 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2406 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002407
kwiberg55b97fe2016-01-28 05:22:45 -08002408 GainControl::Mode agcMode = kDefaultRxAgcMode;
2409 switch (mode) {
2410 case kAgcDefault:
2411 break;
2412 case kAgcUnchanged:
2413 agcMode = rx_audioproc_->gain_control()->mode();
2414 break;
2415 case kAgcFixedDigital:
2416 agcMode = GainControl::kFixedDigital;
2417 break;
2418 case kAgcAdaptiveDigital:
2419 agcMode = GainControl::kAdaptiveDigital;
2420 break;
2421 default:
2422 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2423 "SetRxAgcStatus() invalid Agc mode");
2424 return -1;
2425 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002426
kwiberg55b97fe2016-01-28 05:22:45 -08002427 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2428 _engineStatisticsPtr->SetLastError(
2429 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2430 return -1;
2431 }
2432 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2433 _engineStatisticsPtr->SetLastError(
2434 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2435 return -1;
2436 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002437
kwiberg55b97fe2016-01-28 05:22:45 -08002438 _rxAgcIsEnabled = enable;
2439 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002440
kwiberg55b97fe2016-01-28 05:22:45 -08002441 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002442}
2443
kwiberg55b97fe2016-01-28 05:22:45 -08002444int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2445 bool enable = rx_audioproc_->gain_control()->is_enabled();
2446 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
niklase@google.com470e71d2011-07-07 08:21:25 +00002447
kwiberg55b97fe2016-01-28 05:22:45 -08002448 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002449
kwiberg55b97fe2016-01-28 05:22:45 -08002450 switch (agcMode) {
2451 case GainControl::kFixedDigital:
2452 mode = kAgcFixedDigital;
2453 break;
2454 case GainControl::kAdaptiveDigital:
2455 mode = kAgcAdaptiveDigital;
2456 break;
2457 default:
2458 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2459 "GetRxAgcStatus() invalid Agc mode");
2460 return -1;
2461 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002462
kwiberg55b97fe2016-01-28 05:22:45 -08002463 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002464}
2465
kwiberg55b97fe2016-01-28 05:22:45 -08002466int Channel::SetRxAgcConfig(AgcConfig config) {
2467 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2468 "Channel::SetRxAgcConfig()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002469
kwiberg55b97fe2016-01-28 05:22:45 -08002470 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2471 config.targetLeveldBOv) != 0) {
2472 _engineStatisticsPtr->SetLastError(
2473 VE_APM_ERROR, kTraceError,
2474 "SetRxAgcConfig() failed to set target peak |level|"
2475 "(or envelope) of the Agc");
2476 return -1;
2477 }
2478 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2479 config.digitalCompressionGaindB) != 0) {
2480 _engineStatisticsPtr->SetLastError(
2481 VE_APM_ERROR, kTraceError,
2482 "SetRxAgcConfig() failed to set the range in |gain| the"
2483 " digital compression stage may apply");
2484 return -1;
2485 }
2486 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2487 0) {
2488 _engineStatisticsPtr->SetLastError(
2489 VE_APM_ERROR, kTraceError,
2490 "SetRxAgcConfig() failed to set hard limiter to the signal");
2491 return -1;
2492 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002493
kwiberg55b97fe2016-01-28 05:22:45 -08002494 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002495}
2496
kwiberg55b97fe2016-01-28 05:22:45 -08002497int Channel::GetRxAgcConfig(AgcConfig& config) {
2498 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2499 config.digitalCompressionGaindB =
2500 rx_audioproc_->gain_control()->compression_gain_db();
2501 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
niklase@google.com470e71d2011-07-07 08:21:25 +00002502
kwiberg55b97fe2016-01-28 05:22:45 -08002503 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002504}
2505
kwiberg55b97fe2016-01-28 05:22:45 -08002506#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
niklase@google.com470e71d2011-07-07 08:21:25 +00002507
2508#ifdef WEBRTC_VOICE_ENGINE_NR
2509
kwiberg55b97fe2016-01-28 05:22:45 -08002510int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2511 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2512 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2513 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002514
kwiberg55b97fe2016-01-28 05:22:45 -08002515 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2516 switch (mode) {
2517 case kNsDefault:
2518 break;
2519 case kNsUnchanged:
2520 nsLevel = rx_audioproc_->noise_suppression()->level();
2521 break;
2522 case kNsConference:
2523 nsLevel = NoiseSuppression::kHigh;
2524 break;
2525 case kNsLowSuppression:
2526 nsLevel = NoiseSuppression::kLow;
2527 break;
2528 case kNsModerateSuppression:
2529 nsLevel = NoiseSuppression::kModerate;
2530 break;
2531 case kNsHighSuppression:
2532 nsLevel = NoiseSuppression::kHigh;
2533 break;
2534 case kNsVeryHighSuppression:
2535 nsLevel = NoiseSuppression::kVeryHigh;
2536 break;
2537 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002538
kwiberg55b97fe2016-01-28 05:22:45 -08002539 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2540 _engineStatisticsPtr->SetLastError(
2541 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2542 return -1;
2543 }
2544 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2545 _engineStatisticsPtr->SetLastError(
2546 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2547 return -1;
2548 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002549
kwiberg55b97fe2016-01-28 05:22:45 -08002550 _rxNsIsEnabled = enable;
2551 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002552
kwiberg55b97fe2016-01-28 05:22:45 -08002553 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002554}
2555
kwiberg55b97fe2016-01-28 05:22:45 -08002556int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2557 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2558 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
niklase@google.com470e71d2011-07-07 08:21:25 +00002559
kwiberg55b97fe2016-01-28 05:22:45 -08002560 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002561
kwiberg55b97fe2016-01-28 05:22:45 -08002562 switch (ncLevel) {
2563 case NoiseSuppression::kLow:
2564 mode = kNsLowSuppression;
2565 break;
2566 case NoiseSuppression::kModerate:
2567 mode = kNsModerateSuppression;
2568 break;
2569 case NoiseSuppression::kHigh:
2570 mode = kNsHighSuppression;
2571 break;
2572 case NoiseSuppression::kVeryHigh:
2573 mode = kNsVeryHighSuppression;
2574 break;
2575 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002576
kwiberg55b97fe2016-01-28 05:22:45 -08002577 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002578}
2579
kwiberg55b97fe2016-01-28 05:22:45 -08002580#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
niklase@google.com470e71d2011-07-07 08:21:25 +00002581
kwiberg55b97fe2016-01-28 05:22:45 -08002582int Channel::SetLocalSSRC(unsigned int ssrc) {
2583 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2584 "Channel::SetLocalSSRC()");
2585 if (channel_state_.Get().sending) {
2586 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2587 "SetLocalSSRC() already sending");
2588 return -1;
2589 }
2590 _rtpRtcpModule->SetSSRC(ssrc);
2591 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002592}
2593
kwiberg55b97fe2016-01-28 05:22:45 -08002594int Channel::GetLocalSSRC(unsigned int& ssrc) {
2595 ssrc = _rtpRtcpModule->SSRC();
2596 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002597}
2598
kwiberg55b97fe2016-01-28 05:22:45 -08002599int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2600 ssrc = rtp_receiver_->SSRC();
2601 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002602}
2603
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002604int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002605 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002606 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002607}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002608
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002609int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2610 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002611 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2612 if (enable &&
2613 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2614 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002615 return -1;
2616 }
2617 return 0;
2618}
2619
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002620int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2621 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2622}
2623
2624int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2625 rtp_header_parser_->DeregisterRtpHeaderExtension(
2626 kRtpExtensionAbsoluteSendTime);
kwiberg55b97fe2016-01-28 05:22:45 -08002627 if (enable &&
2628 !rtp_header_parser_->RegisterRtpHeaderExtension(
2629 kRtpExtensionAbsoluteSendTime, id)) {
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00002630 return -1;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002631 }
2632 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002633}
2634
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002635void Channel::EnableSendTransportSequenceNumber(int id) {
2636 int ret =
2637 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2638 RTC_DCHECK_EQ(0, ret);
2639}
2640
stefan3313ec92016-01-21 06:32:43 -08002641void Channel::EnableReceiveTransportSequenceNumber(int id) {
2642 rtp_header_parser_->DeregisterRtpHeaderExtension(
2643 kRtpExtensionTransportSequenceNumber);
2644 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2645 kRtpExtensionTransportSequenceNumber, id);
2646 RTC_DCHECK(ret);
2647}
2648
stefanbba9dec2016-02-01 04:39:55 -08002649void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002650 RtpPacketSender* rtp_packet_sender,
2651 TransportFeedbackObserver* transport_feedback_observer,
2652 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002653 RTC_DCHECK(rtp_packet_sender);
2654 RTC_DCHECK(transport_feedback_observer);
2655 RTC_DCHECK(packet_router && !packet_router_);
2656 feedback_observer_proxy_->SetTransportFeedbackObserver(
2657 transport_feedback_observer);
2658 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2659 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2660 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002661 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002662 packet_router_ = packet_router;
2663}
2664
stefanbba9dec2016-02-01 04:39:55 -08002665void Channel::RegisterReceiverCongestionControlObjects(
2666 PacketRouter* packet_router) {
2667 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002668 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002669 packet_router_ = packet_router;
2670}
2671
2672void Channel::ResetCongestionControlObjects() {
2673 RTC_DCHECK(packet_router_);
2674 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2675 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2676 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002677 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002678 packet_router_ = nullptr;
2679 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2680}
2681
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002682void Channel::SetRTCPStatus(bool enable) {
2683 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2684 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002685 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002686}
2687
kwiberg55b97fe2016-01-28 05:22:45 -08002688int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002689 RtcpMode method = _rtpRtcpModule->RTCP();
2690 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002691 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002692}
2693
kwiberg55b97fe2016-01-28 05:22:45 -08002694int Channel::SetRTCP_CNAME(const char cName[256]) {
2695 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2696 "Channel::SetRTCP_CNAME()");
2697 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2698 _engineStatisticsPtr->SetLastError(
2699 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2700 "SetRTCP_CNAME() failed to set RTCP CNAME");
2701 return -1;
2702 }
2703 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002704}
2705
kwiberg55b97fe2016-01-28 05:22:45 -08002706int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2707 if (cName == NULL) {
2708 _engineStatisticsPtr->SetLastError(
2709 VE_INVALID_ARGUMENT, kTraceError,
2710 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2711 return -1;
2712 }
2713 char cname[RTCP_CNAME_SIZE];
2714 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2715 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2716 _engineStatisticsPtr->SetLastError(
2717 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2718 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2719 return -1;
2720 }
2721 strcpy(cName, cname);
2722 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002723}
2724
kwiberg55b97fe2016-01-28 05:22:45 -08002725int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2726 unsigned int& NTPLow,
2727 unsigned int& timestamp,
2728 unsigned int& playoutTimestamp,
2729 unsigned int* jitter,
2730 unsigned short* fractionLost) {
2731 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002732
kwiberg55b97fe2016-01-28 05:22:45 -08002733 RTCPSenderInfo senderInfo;
2734 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2735 _engineStatisticsPtr->SetLastError(
2736 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2737 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2738 "side");
2739 return -1;
2740 }
2741
2742 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2743 // and octet count)
2744 NTPHigh = senderInfo.NTPseconds;
2745 NTPLow = senderInfo.NTPfraction;
2746 timestamp = senderInfo.RTPtimeStamp;
2747
2748 // --- Locally derived information
2749
2750 // This value is updated on each incoming RTCP packet (0 when no packet
2751 // has been received)
2752 playoutTimestamp = playout_timestamp_rtcp_;
2753
2754 if (NULL != jitter || NULL != fractionLost) {
2755 // Get all RTCP receiver report blocks that have been received on this
2756 // channel. If we receive RTP packets from a remote source we know the
2757 // remote SSRC and use the report block from him.
2758 // Otherwise use the first report block.
2759 std::vector<RTCPReportBlock> remote_stats;
2760 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2761 remote_stats.empty()) {
2762 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2763 "GetRemoteRTCPData() failed to measure statistics due"
2764 " to lack of received RTP and/or RTCP packets");
2765 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002766 }
2767
kwiberg55b97fe2016-01-28 05:22:45 -08002768 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2769 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2770 for (; it != remote_stats.end(); ++it) {
2771 if (it->remoteSSRC == remoteSSRC)
2772 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002773 }
kwiberg55b97fe2016-01-28 05:22:45 -08002774
2775 if (it == remote_stats.end()) {
2776 // If we have not received any RTCP packets from this SSRC it probably
2777 // means that we have not received any RTP packets.
2778 // Use the first received report block instead.
2779 it = remote_stats.begin();
2780 remoteSSRC = it->remoteSSRC;
2781 }
2782
2783 if (jitter) {
2784 *jitter = it->jitter;
2785 }
2786
2787 if (fractionLost) {
2788 *fractionLost = it->fractionLost;
2789 }
2790 }
2791 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002792}
2793
kwiberg55b97fe2016-01-28 05:22:45 -08002794int Channel::SendApplicationDefinedRTCPPacket(
2795 unsigned char subType,
2796 unsigned int name,
2797 const char* data,
2798 unsigned short dataLengthInBytes) {
2799 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2800 "Channel::SendApplicationDefinedRTCPPacket()");
2801 if (!channel_state_.Get().sending) {
2802 _engineStatisticsPtr->SetLastError(
2803 VE_NOT_SENDING, kTraceError,
2804 "SendApplicationDefinedRTCPPacket() not sending");
2805 return -1;
2806 }
2807 if (NULL == data) {
2808 _engineStatisticsPtr->SetLastError(
2809 VE_INVALID_ARGUMENT, kTraceError,
2810 "SendApplicationDefinedRTCPPacket() invalid data value");
2811 return -1;
2812 }
2813 if (dataLengthInBytes % 4 != 0) {
2814 _engineStatisticsPtr->SetLastError(
2815 VE_INVALID_ARGUMENT, kTraceError,
2816 "SendApplicationDefinedRTCPPacket() invalid length value");
2817 return -1;
2818 }
2819 RtcpMode status = _rtpRtcpModule->RTCP();
2820 if (status == RtcpMode::kOff) {
2821 _engineStatisticsPtr->SetLastError(
2822 VE_RTCP_ERROR, kTraceError,
2823 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2824 return -1;
2825 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002826
kwiberg55b97fe2016-01-28 05:22:45 -08002827 // Create and schedule the RTCP APP packet for transmission
2828 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2829 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2830 _engineStatisticsPtr->SetLastError(
2831 VE_SEND_ERROR, kTraceError,
2832 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2833 return -1;
2834 }
2835 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002836}
2837
kwiberg55b97fe2016-01-28 05:22:45 -08002838int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2839 unsigned int& maxJitterMs,
2840 unsigned int& discardedPackets) {
2841 // The jitter statistics is updated for each received RTP packet and is
2842 // based on received packets.
2843 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2844 // If RTCP is off, there is no timed thread in the RTCP module regularly
2845 // generating new stats, trigger the update manually here instead.
2846 StreamStatistician* statistician =
2847 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2848 if (statistician) {
2849 // Don't use returned statistics, use data from proxy instead so that
2850 // max jitter can be fetched atomically.
2851 RtcpStatistics s;
2852 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002853 }
kwiberg55b97fe2016-01-28 05:22:45 -08002854 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002855
kwiberg55b97fe2016-01-28 05:22:45 -08002856 ChannelStatistics stats = statistics_proxy_->GetStats();
2857 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2858 if (playoutFrequency > 0) {
2859 // Scale RTP statistics given the current playout frequency
2860 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2861 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2862 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002863
kwiberg55b97fe2016-01-28 05:22:45 -08002864 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002865
kwiberg55b97fe2016-01-28 05:22:45 -08002866 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002867}
2868
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002869int Channel::GetRemoteRTCPReportBlocks(
2870 std::vector<ReportBlock>* report_blocks) {
2871 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002872 _engineStatisticsPtr->SetLastError(
2873 VE_INVALID_ARGUMENT, kTraceError,
2874 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002875 return -1;
2876 }
2877
2878 // Get the report blocks from the latest received RTCP Sender or Receiver
2879 // Report. Each element in the vector contains the sender's SSRC and a
2880 // report block according to RFC 3550.
2881 std::vector<RTCPReportBlock> rtcp_report_blocks;
2882 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002883 return -1;
2884 }
2885
2886 if (rtcp_report_blocks.empty())
2887 return 0;
2888
2889 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2890 for (; it != rtcp_report_blocks.end(); ++it) {
2891 ReportBlock report_block;
2892 report_block.sender_SSRC = it->remoteSSRC;
2893 report_block.source_SSRC = it->sourceSSRC;
2894 report_block.fraction_lost = it->fractionLost;
2895 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2896 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2897 report_block.interarrival_jitter = it->jitter;
2898 report_block.last_SR_timestamp = it->lastSR;
2899 report_block.delay_since_last_SR = it->delaySinceLastSR;
2900 report_blocks->push_back(report_block);
2901 }
2902 return 0;
2903}
2904
kwiberg55b97fe2016-01-28 05:22:45 -08002905int Channel::GetRTPStatistics(CallStatistics& stats) {
2906 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002907
kwiberg55b97fe2016-01-28 05:22:45 -08002908 // The jitter statistics is updated for each received RTP packet and is
2909 // based on received packets.
2910 RtcpStatistics statistics;
2911 StreamStatistician* statistician =
2912 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002913 if (statistician) {
2914 statistician->GetStatistics(&statistics,
2915 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002916 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002917
kwiberg55b97fe2016-01-28 05:22:45 -08002918 stats.fractionLost = statistics.fraction_lost;
2919 stats.cumulativeLost = statistics.cumulative_lost;
2920 stats.extendedMax = statistics.extended_max_sequence_number;
2921 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002922
kwiberg55b97fe2016-01-28 05:22:45 -08002923 // --- RTT
2924 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002925
kwiberg55b97fe2016-01-28 05:22:45 -08002926 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002927
kwiberg55b97fe2016-01-28 05:22:45 -08002928 size_t bytesSent(0);
2929 uint32_t packetsSent(0);
2930 size_t bytesReceived(0);
2931 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002932
kwiberg55b97fe2016-01-28 05:22:45 -08002933 if (statistician) {
2934 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2935 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002936
kwiberg55b97fe2016-01-28 05:22:45 -08002937 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2938 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2939 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2940 " output will not be complete");
2941 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002942
kwiberg55b97fe2016-01-28 05:22:45 -08002943 stats.bytesSent = bytesSent;
2944 stats.packetsSent = packetsSent;
2945 stats.bytesReceived = bytesReceived;
2946 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002947
kwiberg55b97fe2016-01-28 05:22:45 -08002948 // --- Timestamps
2949 {
2950 rtc::CritScope lock(&ts_stats_lock_);
2951 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2952 }
2953 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002954}
2955
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002956int Channel::SetCodecFECStatus(bool enable) {
2957 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2958 "Channel::SetCodecFECStatus()");
2959
kwibergc8d071e2016-04-06 12:22:38 -07002960 if (!codec_manager_.SetCodecFEC(enable) ||
2961 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002962 _engineStatisticsPtr->SetLastError(
2963 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2964 "SetCodecFECStatus() failed to set FEC state");
2965 return -1;
2966 }
2967 return 0;
2968}
2969
2970bool Channel::GetCodecFECStatus() {
kwibergc8d071e2016-04-06 12:22:38 -07002971 return codec_manager_.GetStackParams()->use_codec_fec;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002972}
2973
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002974void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2975 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002976 // If pacing is enabled we always store packets.
2977 if (!pacing_enabled_)
2978 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002979 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002980 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002981 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002982 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002983 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002984}
2985
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002986// Called when we are missing one or more packets.
2987int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002988 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2989}
2990
kwiberg55b97fe2016-01-28 05:22:45 -08002991uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2992 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2993 "Channel::Demultiplex()");
2994 _audioFrame.CopyFrom(audioFrame);
2995 _audioFrame.id_ = _channelId;
2996 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002997}
2998
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002999void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00003000 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07003001 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08003002 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00003003 CodecInst codec;
3004 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00003005
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07003006 // Never upsample or upmix the capture signal here. This should be done at the
3007 // end of the send chain.
3008 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
3009 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
3010 RemixAndResample(audio_data, number_of_frames, number_of_channels,
3011 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00003012}
3013
kwiberg55b97fe2016-01-28 05:22:45 -08003014uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
3015 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3016 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003017
kwiberg55b97fe2016-01-28 05:22:45 -08003018 if (_audioFrame.samples_per_channel_ == 0) {
3019 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3020 "Channel::PrepareEncodeAndSend() invalid audio frame");
3021 return 0xFFFFFFFF;
3022 }
3023
3024 if (channel_state_.Get().input_file_playing) {
3025 MixOrReplaceAudioWithFile(mixingFrequency);
3026 }
3027
solenberg1c2af8e2016-03-24 10:36:00 -07003028 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
3029 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
kwiberg55b97fe2016-01-28 05:22:45 -08003030
3031 if (channel_state_.Get().input_external_media) {
3032 rtc::CritScope cs(&_callbackCritSect);
3033 const bool isStereo = (_audioFrame.num_channels_ == 2);
3034 if (_inputExternalMediaCallbackPtr) {
3035 _inputExternalMediaCallbackPtr->Process(
3036 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
3037 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
3038 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00003039 }
kwiberg55b97fe2016-01-28 05:22:45 -08003040 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003041
kwiberg55b97fe2016-01-28 05:22:45 -08003042 if (_includeAudioLevelIndication) {
3043 size_t length =
3044 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
Tommi60c4e0a2016-05-26 21:35:27 +02003045 RTC_CHECK_LE(length, sizeof(_audioFrame.data_));
solenberg1c2af8e2016-03-24 10:36:00 -07003046 if (is_muted && previous_frame_muted_) {
kwiberg55b97fe2016-01-28 05:22:45 -08003047 rms_level_.ProcessMuted(length);
3048 } else {
3049 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00003050 }
kwiberg55b97fe2016-01-28 05:22:45 -08003051 }
solenberg1c2af8e2016-03-24 10:36:00 -07003052 previous_frame_muted_ = is_muted;
niklase@google.com470e71d2011-07-07 08:21:25 +00003053
kwiberg55b97fe2016-01-28 05:22:45 -08003054 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003055}
3056
kwiberg55b97fe2016-01-28 05:22:45 -08003057uint32_t Channel::EncodeAndSend() {
3058 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3059 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003060
kwiberg55b97fe2016-01-28 05:22:45 -08003061 assert(_audioFrame.num_channels_ <= 2);
3062 if (_audioFrame.samples_per_channel_ == 0) {
3063 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3064 "Channel::EncodeAndSend() invalid audio frame");
3065 return 0xFFFFFFFF;
3066 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003067
kwiberg55b97fe2016-01-28 05:22:45 -08003068 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00003069
kwiberg55b97fe2016-01-28 05:22:45 -08003070 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00003071
kwiberg55b97fe2016-01-28 05:22:45 -08003072 // The ACM resamples internally.
3073 _audioFrame.timestamp_ = _timeStamp;
3074 // This call will trigger AudioPacketizationCallback::SendData if encoding
3075 // is done and payload is ready for packetization and transmission.
3076 // Otherwise, it will return without invoking the callback.
3077 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
3078 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
3079 "Channel::EncodeAndSend() ACM encoding failed");
3080 return 0xFFFFFFFF;
3081 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003082
kwiberg55b97fe2016-01-28 05:22:45 -08003083 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
3084 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003085}
3086
Minyue2013aec2015-05-13 14:14:42 +02003087void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08003088 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003089 Channel* channel = associate_send_channel_.channel();
3090 if (channel && channel->ChannelId() == channel_id) {
3091 // If this channel is associated with a send channel of the specified
3092 // Channel ID, disassociate with it.
3093 ChannelOwner ref(NULL);
3094 associate_send_channel_ = ref;
3095 }
3096}
3097
ivoc14d5dbe2016-07-04 07:06:55 -07003098void Channel::SetRtcEventLog(RtcEventLog* event_log) {
3099 event_log_proxy_->SetEventLog(event_log);
3100}
3101
kwiberg55b97fe2016-01-28 05:22:45 -08003102int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3103 VoEMediaProcess& processObject) {
3104 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3105 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003106
kwiberg55b97fe2016-01-28 05:22:45 -08003107 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003108
kwiberg55b97fe2016-01-28 05:22:45 -08003109 if (kPlaybackPerChannel == type) {
3110 if (_outputExternalMediaCallbackPtr) {
3111 _engineStatisticsPtr->SetLastError(
3112 VE_INVALID_OPERATION, kTraceError,
3113 "Channel::RegisterExternalMediaProcessing() "
3114 "output external media already enabled");
3115 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003116 }
kwiberg55b97fe2016-01-28 05:22:45 -08003117 _outputExternalMediaCallbackPtr = &processObject;
3118 _outputExternalMedia = true;
3119 } else if (kRecordingPerChannel == type) {
3120 if (_inputExternalMediaCallbackPtr) {
3121 _engineStatisticsPtr->SetLastError(
3122 VE_INVALID_OPERATION, kTraceError,
3123 "Channel::RegisterExternalMediaProcessing() "
3124 "output external media already enabled");
3125 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003126 }
kwiberg55b97fe2016-01-28 05:22:45 -08003127 _inputExternalMediaCallbackPtr = &processObject;
3128 channel_state_.SetInputExternalMedia(true);
3129 }
3130 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003131}
3132
kwiberg55b97fe2016-01-28 05:22:45 -08003133int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3134 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3135 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003136
kwiberg55b97fe2016-01-28 05:22:45 -08003137 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003138
kwiberg55b97fe2016-01-28 05:22:45 -08003139 if (kPlaybackPerChannel == type) {
3140 if (!_outputExternalMediaCallbackPtr) {
3141 _engineStatisticsPtr->SetLastError(
3142 VE_INVALID_OPERATION, kTraceWarning,
3143 "Channel::DeRegisterExternalMediaProcessing() "
3144 "output external media already disabled");
3145 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003146 }
kwiberg55b97fe2016-01-28 05:22:45 -08003147 _outputExternalMedia = false;
3148 _outputExternalMediaCallbackPtr = NULL;
3149 } else if (kRecordingPerChannel == type) {
3150 if (!_inputExternalMediaCallbackPtr) {
3151 _engineStatisticsPtr->SetLastError(
3152 VE_INVALID_OPERATION, kTraceWarning,
3153 "Channel::DeRegisterExternalMediaProcessing() "
3154 "input external media already disabled");
3155 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003156 }
kwiberg55b97fe2016-01-28 05:22:45 -08003157 channel_state_.SetInputExternalMedia(false);
3158 _inputExternalMediaCallbackPtr = NULL;
3159 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003160
kwiberg55b97fe2016-01-28 05:22:45 -08003161 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003162}
3163
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003164int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08003165 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3166 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003167
kwiberg55b97fe2016-01-28 05:22:45 -08003168 if (channel_state_.Get().playing) {
3169 _engineStatisticsPtr->SetLastError(
3170 VE_INVALID_OPERATION, kTraceError,
3171 "Channel::SetExternalMixing() "
3172 "external mixing cannot be changed while playing.");
3173 return -1;
3174 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003175
kwiberg55b97fe2016-01-28 05:22:45 -08003176 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003177
kwiberg55b97fe2016-01-28 05:22:45 -08003178 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003179}
3180
kwiberg55b97fe2016-01-28 05:22:45 -08003181int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3182 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00003183}
3184
wu@webrtc.org24301a62013-12-13 19:17:43 +00003185void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3186 audio_coding_->GetDecodingCallStatistics(stats);
3187}
3188
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003189bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3190 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08003191 rtc::CritScope lock(&video_sync_lock_);
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003192 if (_average_jitter_buffer_delay_us == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003193 return false;
3194 }
kwiberg55b97fe2016-01-28 05:22:45 -08003195 *jitter_buffer_delay_ms =
3196 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003197 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003198 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00003199}
3200
solenberg358057b2015-11-27 10:46:42 -08003201uint32_t Channel::GetDelayEstimate() const {
3202 int jitter_buffer_delay_ms = 0;
3203 int playout_buffer_delay_ms = 0;
3204 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3205 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3206}
3207
deadbeef74375882015-08-13 12:09:10 -07003208int Channel::LeastRequiredDelayMs() const {
3209 return audio_coding_->LeastRequiredDelayMs();
3210}
3211
kwiberg55b97fe2016-01-28 05:22:45 -08003212int Channel::SetMinimumPlayoutDelay(int delayMs) {
3213 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3214 "Channel::SetMinimumPlayoutDelay()");
3215 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3216 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3217 _engineStatisticsPtr->SetLastError(
3218 VE_INVALID_ARGUMENT, kTraceError,
3219 "SetMinimumPlayoutDelay() invalid min delay");
3220 return -1;
3221 }
3222 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3223 _engineStatisticsPtr->SetLastError(
3224 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3225 "SetMinimumPlayoutDelay() failed to set min playout delay");
3226 return -1;
3227 }
3228 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003229}
3230
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003231int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07003232 uint32_t playout_timestamp_rtp = 0;
3233 {
tommi31fc21f2016-01-21 10:37:37 -08003234 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003235 playout_timestamp_rtp = playout_timestamp_rtp_;
3236 }
kwiberg55b97fe2016-01-28 05:22:45 -08003237 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003238 _engineStatisticsPtr->SetLastError(
skvlad4c0536b2016-07-07 13:06:26 -07003239 VE_CANNOT_RETRIEVE_VALUE, kTraceStateInfo,
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003240 "GetPlayoutTimestamp() failed to retrieve timestamp");
3241 return -1;
3242 }
deadbeef74375882015-08-13 12:09:10 -07003243 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003244 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003245}
3246
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003247int Channel::SetInitTimestamp(unsigned int timestamp) {
3248 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00003249 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003250 if (channel_state_.Get().sending) {
3251 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3252 "SetInitTimestamp() already sending");
3253 return -1;
3254 }
3255 _rtpRtcpModule->SetStartTimestamp(timestamp);
3256 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003257}
3258
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003259int Channel::SetInitSequenceNumber(short sequenceNumber) {
3260 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3261 "Channel::SetInitSequenceNumber()");
3262 if (channel_state_.Get().sending) {
3263 _engineStatisticsPtr->SetLastError(
3264 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3265 return -1;
3266 }
3267 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3268 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003269}
3270
kwiberg55b97fe2016-01-28 05:22:45 -08003271int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3272 RtpReceiver** rtp_receiver) const {
3273 *rtpRtcpModule = _rtpRtcpModule.get();
3274 *rtp_receiver = rtp_receiver_.get();
3275 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003276}
3277
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003278// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3279// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003280int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003281 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003282 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003283
kwiberg55b97fe2016-01-28 05:22:45 -08003284 {
3285 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003286
kwiberg55b97fe2016-01-28 05:22:45 -08003287 if (_inputFilePlayerPtr == NULL) {
3288 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3289 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3290 " doesnt exist");
3291 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003292 }
3293
kwiberg55b97fe2016-01-28 05:22:45 -08003294 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3295 mixingFrequency) == -1) {
3296 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3297 "Channel::MixOrReplaceAudioWithFile() file mixing "
3298 "failed");
3299 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003300 }
kwiberg55b97fe2016-01-28 05:22:45 -08003301 if (fileSamples == 0) {
3302 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3303 "Channel::MixOrReplaceAudioWithFile() file is ended");
3304 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003305 }
kwiberg55b97fe2016-01-28 05:22:45 -08003306 }
3307
3308 assert(_audioFrame.samples_per_channel_ == fileSamples);
3309
3310 if (_mixFileWithMicrophone) {
3311 // Currently file stream is always mono.
3312 // TODO(xians): Change the code when FilePlayer supports real stereo.
3313 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3314 1, fileSamples);
3315 } else {
3316 // Replace ACM audio with file.
3317 // Currently file stream is always mono.
3318 // TODO(xians): Change the code when FilePlayer supports real stereo.
3319 _audioFrame.UpdateFrame(
3320 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3321 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3322 }
3323 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003324}
3325
kwiberg55b97fe2016-01-28 05:22:45 -08003326int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3327 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003328
kwibergb7f89d62016-02-17 10:04:18 -08003329 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003330 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003331
kwiberg55b97fe2016-01-28 05:22:45 -08003332 {
3333 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003334
kwiberg55b97fe2016-01-28 05:22:45 -08003335 if (_outputFilePlayerPtr == NULL) {
3336 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3337 "Channel::MixAudioWithFile() file mixing failed");
3338 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003339 }
3340
kwiberg55b97fe2016-01-28 05:22:45 -08003341 // We should get the frequency we ask for.
3342 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3343 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3344 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3345 "Channel::MixAudioWithFile() file mixing failed");
3346 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003347 }
kwiberg55b97fe2016-01-28 05:22:45 -08003348 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003349
kwiberg55b97fe2016-01-28 05:22:45 -08003350 if (audioFrame.samples_per_channel_ == fileSamples) {
3351 // Currently file stream is always mono.
3352 // TODO(xians): Change the code when FilePlayer supports real stereo.
3353 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3354 fileSamples);
3355 } else {
3356 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3357 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3358 ") != "
3359 "fileSamples(%" PRIuS ")",
3360 audioFrame.samples_per_channel_, fileSamples);
3361 return -1;
3362 }
3363
3364 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003365}
3366
deadbeef74375882015-08-13 12:09:10 -07003367void Channel::UpdatePlayoutTimestamp(bool rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003368 jitter_buffer_playout_timestamp_ = audio_coding_->PlayoutTimestamp();
deadbeef74375882015-08-13 12:09:10 -07003369
henrik.lundin96bd5022016-04-06 04:13:56 -07003370 if (!jitter_buffer_playout_timestamp_) {
3371 // This can happen if this channel has not received any RTP packets. In
3372 // this case, NetEq is not capable of computing a playout timestamp.
deadbeef74375882015-08-13 12:09:10 -07003373 return;
3374 }
3375
3376 uint16_t delay_ms = 0;
3377 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003378 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003379 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3380 " delay from the ADM");
3381 _engineStatisticsPtr->SetLastError(
3382 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3383 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3384 return;
3385 }
3386
henrik.lundin96bd5022016-04-06 04:13:56 -07003387 RTC_DCHECK(jitter_buffer_playout_timestamp_);
3388 uint32_t playout_timestamp = *jitter_buffer_playout_timestamp_;
deadbeef74375882015-08-13 12:09:10 -07003389
3390 // Remove the playout delay.
henrik.lundin96bd5022016-04-06 04:13:56 -07003391 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
deadbeef74375882015-08-13 12:09:10 -07003392
kwiberg55b97fe2016-01-28 05:22:45 -08003393 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003394 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
henrik.lundin96bd5022016-04-06 04:13:56 -07003395 playout_timestamp);
deadbeef74375882015-08-13 12:09:10 -07003396
3397 {
tommi31fc21f2016-01-21 10:37:37 -08003398 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003399 if (rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003400 playout_timestamp_rtcp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003401 } else {
henrik.lundin96bd5022016-04-06 04:13:56 -07003402 playout_timestamp_rtp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003403 }
3404 playout_delay_ms_ = delay_ms;
3405 }
3406}
3407
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003408// Called for incoming RTP packets after successful RTP header parsing.
3409void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3410 uint16_t sequence_number) {
kwiberg55b97fe2016-01-28 05:22:45 -08003411 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003412 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3413 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00003414
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003415 // Get frequency of last received payload
wu@webrtc.org94454b72014-06-05 20:34:08 +00003416 int rtp_receive_frequency = GetPlayoutFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00003417
turaj@webrtc.org167b6df2013-12-13 21:05:07 +00003418 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
henrik.lundin96bd5022016-04-06 04:13:56 -07003419 // every incoming packet. May be empty if no valid playout timestamp is
3420 // available.
3421 // If |rtp_timestamp| is newer than |jitter_buffer_playout_timestamp_|, the
3422 // resulting difference is positive and will be used. When the inverse is
3423 // true (can happen when a network glitch causes a packet to arrive late,
3424 // and during long comfort noise periods with clock drift), or when
3425 // |jitter_buffer_playout_timestamp_| has no value, the difference is not
3426 // changed from the initial 0.
3427 uint32_t timestamp_diff_ms = 0;
3428 if (jitter_buffer_playout_timestamp_ &&
3429 IsNewerTimestamp(rtp_timestamp, *jitter_buffer_playout_timestamp_)) {
3430 timestamp_diff_ms = (rtp_timestamp - *jitter_buffer_playout_timestamp_) /
3431 (rtp_receive_frequency / 1000);
3432 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3433 // Diff is too large; set it to zero instead.
3434 timestamp_diff_ms = 0;
3435 }
henrik.lundin@webrtc.orgd6692992014-03-20 12:04:09 +00003436 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003437
kwiberg55b97fe2016-01-28 05:22:45 -08003438 uint16_t packet_delay_ms =
3439 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003440
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003441 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00003442
kwiberg55b97fe2016-01-28 05:22:45 -08003443 if (timestamp_diff_ms == 0)
3444 return;
niklase@google.com470e71d2011-07-07 08:21:25 +00003445
deadbeef74375882015-08-13 12:09:10 -07003446 {
tommi31fc21f2016-01-21 10:37:37 -08003447 rtc::CritScope lock(&video_sync_lock_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003448
deadbeef74375882015-08-13 12:09:10 -07003449 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3450 _recPacketDelayMs = packet_delay_ms;
3451 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003452
deadbeef74375882015-08-13 12:09:10 -07003453 if (_average_jitter_buffer_delay_us == 0) {
3454 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3455 return;
3456 }
3457
3458 // Filter average delay value using exponential filter (alpha is
3459 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3460 // risk of rounding error) and compensate for it in GetDelayEstimate()
3461 // later.
kwiberg55b97fe2016-01-28 05:22:45 -08003462 _average_jitter_buffer_delay_us =
3463 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3464 8;
deadbeef74375882015-08-13 12:09:10 -07003465 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003466}
3467
kwiberg55b97fe2016-01-28 05:22:45 -08003468void Channel::RegisterReceiveCodecsToRTPModule() {
3469 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3470 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003471
kwiberg55b97fe2016-01-28 05:22:45 -08003472 CodecInst codec;
3473 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003474
kwiberg55b97fe2016-01-28 05:22:45 -08003475 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3476 // Open up the RTP/RTCP receiver for all supported codecs
3477 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3478 (rtp_receiver_->RegisterReceivePayload(
3479 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3480 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3481 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3482 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3483 " to register %s (%d/%d/%" PRIuS
3484 "/%d) to RTP/RTCP "
3485 "receiver",
3486 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3487 codec.rate);
3488 } else {
3489 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3490 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3491 "(%d/%d/%" PRIuS
3492 "/%d) has been added to the RTP/RTCP "
3493 "receiver",
3494 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3495 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003496 }
kwiberg55b97fe2016-01-28 05:22:45 -08003497 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003498}
3499
kwiberg55b97fe2016-01-28 05:22:45 -08003500int Channel::SetSendRtpHeaderExtension(bool enable,
3501 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003502 unsigned char id) {
3503 int error = 0;
3504 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3505 if (enable) {
3506 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3507 }
3508 return error;
3509}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003510
wu@webrtc.org94454b72014-06-05 20:34:08 +00003511int32_t Channel::GetPlayoutFrequency() {
3512 int32_t playout_frequency = audio_coding_->PlayoutFrequency();
3513 CodecInst current_recive_codec;
3514 if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
3515 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
3516 // Even though the actual sampling rate for G.722 audio is
3517 // 16,000 Hz, the RTP clock rate for the G722 payload format is
3518 // 8,000 Hz because that value was erroneously assigned in
3519 // RFC 1890 and must remain unchanged for backward compatibility.
3520 playout_frequency = 8000;
3521 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
3522 // We are resampling Opus internally to 32,000 Hz until all our
3523 // DSP routines can operate at 48,000 Hz, but the RTP clock
3524 // rate for the Opus payload format is standardized to 48,000 Hz,
3525 // because that is the maximum supported decoding sampling rate.
3526 playout_frequency = 48000;
3527 }
3528 }
3529 return playout_frequency;
3530}
3531
Minyue2013aec2015-05-13 14:14:42 +02003532int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003533 RtcpMode method = _rtpRtcpModule->RTCP();
3534 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003535 return 0;
3536 }
3537 std::vector<RTCPReportBlock> report_blocks;
3538 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003539
3540 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003541 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003542 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003543 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003544 Channel* channel = associate_send_channel_.channel();
3545 // Tries to get RTT from an associated channel. This is important for
3546 // receive-only channels.
3547 if (channel) {
3548 // To prevent infinite recursion and deadlock, calling GetRTT of
3549 // associate channel should always use "false" for argument:
3550 // |allow_associate_channel|.
3551 rtt = channel->GetRTT(false);
3552 }
3553 }
3554 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003555 }
3556
3557 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3558 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3559 for (; it != report_blocks.end(); ++it) {
3560 if (it->remoteSSRC == remoteSSRC)
3561 break;
3562 }
3563 if (it == report_blocks.end()) {
3564 // We have not received packets with SSRC matching the report blocks.
3565 // To calculate RTT we try with the SSRC of the first report block.
3566 // This is very important for send-only channels where we don't know
3567 // the SSRC of the other end.
3568 remoteSSRC = report_blocks[0].remoteSSRC;
3569 }
Minyue2013aec2015-05-13 14:14:42 +02003570
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003571 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003572 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003573 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003574 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3575 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003576 return 0;
3577 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003578 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003579}
3580
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003581} // namespace voe
3582} // namespace webrtc