blob: c2b95420537f4763893d1d5cea4d188240e9d527 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010020#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000021#include "webrtc/base/timeutils.h"
minyue@webrtc.orge509f942013-09-12 17:03:00 +000022#include "webrtc/common.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020023#include "webrtc/config.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000024#include "webrtc/modules/audio_device/include/audio_device.h"
25#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010026#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010027#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010028#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
29#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
30#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000031#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010032#include "webrtc/modules/utility/include/audio_frame_operations.h"
33#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010034#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000035#include "webrtc/voice_engine/include/voe_base.h"
36#include "webrtc/voice_engine/include/voe_external_media.h"
37#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
38#include "webrtc/voice_engine/output_mixer.h"
39#include "webrtc/voice_engine/statistics.h"
40#include "webrtc/voice_engine/transmit_mixer.h"
41#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000042
andrew@webrtc.org50419b02012-11-14 19:07:54 +000043namespace webrtc {
44namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000045
kwibergc8d071e2016-04-06 12:22:38 -070046namespace {
47
48bool RegisterReceiveCodec(std::unique_ptr<AudioCodingModule>* acm,
49 acm2::RentACodec* rac,
50 const CodecInst& ci) {
51 const int result =
52 (*acm)->RegisterReceiveCodec(ci, [&] { return rac->RentIsacDecoder(); });
53 return result == 0;
54}
55
56} // namespace
57
solenberg8842c3e2016-03-11 03:06:41 -080058const int kTelephoneEventAttenuationdB = 10;
59
Stefan Holmerb86d4e42015-12-07 10:26:18 +010060class TransportFeedbackProxy : public TransportFeedbackObserver {
61 public:
62 TransportFeedbackProxy() : feedback_observer_(nullptr) {
63 pacer_thread_.DetachFromThread();
64 network_thread_.DetachFromThread();
65 }
66
67 void SetTransportFeedbackObserver(
68 TransportFeedbackObserver* feedback_observer) {
69 RTC_DCHECK(thread_checker_.CalledOnValidThread());
70 rtc::CritScope lock(&crit_);
71 feedback_observer_ = feedback_observer;
72 }
73
74 // Implements TransportFeedbackObserver.
75 void AddPacket(uint16_t sequence_number,
76 size_t length,
77 bool was_paced) override {
78 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
79 rtc::CritScope lock(&crit_);
80 if (feedback_observer_)
81 feedback_observer_->AddPacket(sequence_number, length, was_paced);
82 }
83 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
84 RTC_DCHECK(network_thread_.CalledOnValidThread());
85 rtc::CritScope lock(&crit_);
86 if (feedback_observer_)
87 feedback_observer_->OnTransportFeedback(feedback);
88 }
89
90 private:
91 rtc::CriticalSection crit_;
92 rtc::ThreadChecker thread_checker_;
93 rtc::ThreadChecker pacer_thread_;
94 rtc::ThreadChecker network_thread_;
95 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
96};
97
98class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
99 public:
100 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
101 pacer_thread_.DetachFromThread();
102 }
103
104 void SetSequenceNumberAllocator(
105 TransportSequenceNumberAllocator* seq_num_allocator) {
106 RTC_DCHECK(thread_checker_.CalledOnValidThread());
107 rtc::CritScope lock(&crit_);
108 seq_num_allocator_ = seq_num_allocator;
109 }
110
111 // Implements TransportSequenceNumberAllocator.
112 uint16_t AllocateSequenceNumber() override {
113 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
114 rtc::CritScope lock(&crit_);
115 if (!seq_num_allocator_)
116 return 0;
117 return seq_num_allocator_->AllocateSequenceNumber();
118 }
119
120 private:
121 rtc::CriticalSection crit_;
122 rtc::ThreadChecker thread_checker_;
123 rtc::ThreadChecker pacer_thread_;
124 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
125};
126
127class RtpPacketSenderProxy : public RtpPacketSender {
128 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800129 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100130
131 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
132 RTC_DCHECK(thread_checker_.CalledOnValidThread());
133 rtc::CritScope lock(&crit_);
134 rtp_packet_sender_ = rtp_packet_sender;
135 }
136
137 // Implements RtpPacketSender.
138 void InsertPacket(Priority priority,
139 uint32_t ssrc,
140 uint16_t sequence_number,
141 int64_t capture_time_ms,
142 size_t bytes,
143 bool retransmission) override {
144 rtc::CritScope lock(&crit_);
145 if (rtp_packet_sender_) {
146 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
147 capture_time_ms, bytes, retransmission);
148 }
149 }
150
151 private:
152 rtc::ThreadChecker thread_checker_;
153 rtc::CriticalSection crit_;
154 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
155};
156
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000157// Extend the default RTCP statistics struct with max_jitter, defined as the
158// maximum jitter value seen in an RTCP report block.
159struct ChannelStatistics : public RtcpStatistics {
160 ChannelStatistics() : rtcp(), max_jitter(0) {}
161
162 RtcpStatistics rtcp;
163 uint32_t max_jitter;
164};
165
166// Statistics callback, called at each generation of a new RTCP report block.
167class StatisticsProxy : public RtcpStatisticsCallback {
168 public:
tommi31fc21f2016-01-21 10:37:37 -0800169 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000170 virtual ~StatisticsProxy() {}
171
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000172 void StatisticsUpdated(const RtcpStatistics& statistics,
173 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000174 if (ssrc != ssrc_)
175 return;
176
tommi31fc21f2016-01-21 10:37:37 -0800177 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000178 stats_.rtcp = statistics;
179 if (statistics.jitter > stats_.max_jitter) {
180 stats_.max_jitter = statistics.jitter;
181 }
182 }
183
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000184 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000185
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000186 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800187 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000188 return stats_;
189 }
190
191 private:
192 // StatisticsUpdated calls are triggered from threads in the RTP module,
193 // while GetStats calls can be triggered from the public voice engine API,
194 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800195 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000196 const uint32_t ssrc_;
197 ChannelStatistics stats_;
198};
199
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000200class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000201 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000202 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
203 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000204
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000205 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
206 // Not used for Voice Engine.
207 }
208
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000209 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
210 int64_t rtt,
211 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000212 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
213 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
214 // report for VoiceEngine?
215 if (report_blocks.empty())
216 return;
217
218 int fraction_lost_aggregate = 0;
219 int total_number_of_packets = 0;
220
221 // If receiving multiple report blocks, calculate the weighted average based
222 // on the number of packets a report refers to.
223 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
224 block_it != report_blocks.end(); ++block_it) {
225 // Find the previous extended high sequence number for this remote SSRC,
226 // to calculate the number of RTP packets this report refers to. Ignore if
227 // we haven't seen this SSRC before.
228 std::map<uint32_t, uint32_t>::iterator seq_num_it =
229 extended_max_sequence_number_.find(block_it->sourceSSRC);
230 int number_of_packets = 0;
231 if (seq_num_it != extended_max_sequence_number_.end()) {
232 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
233 }
234 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
235 total_number_of_packets += number_of_packets;
236
237 extended_max_sequence_number_[block_it->sourceSSRC] =
238 block_it->extendedHighSeqNum;
239 }
240 int weighted_fraction_lost = 0;
241 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800242 weighted_fraction_lost =
243 (fraction_lost_aggregate + total_number_of_packets / 2) /
244 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000245 }
246 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000247 }
248
249 private:
250 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000251 // Maps remote side ssrc to extended highest sequence number received.
252 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000253};
254
kwiberg55b97fe2016-01-28 05:22:45 -0800255int32_t Channel::SendData(FrameType frameType,
256 uint8_t payloadType,
257 uint32_t timeStamp,
258 const uint8_t* payloadData,
259 size_t payloadSize,
260 const RTPFragmentationHeader* fragmentation) {
261 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
262 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
263 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
264 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000265
kwiberg55b97fe2016-01-28 05:22:45 -0800266 if (_includeAudioLevelIndication) {
267 // Store current audio level in the RTP/RTCP module.
268 // The level will be used in combination with voice-activity state
269 // (frameType) to add an RTP header extension
270 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
271 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000272
kwiberg55b97fe2016-01-28 05:22:45 -0800273 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
274 // packetization.
275 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
276 if (_rtpRtcpModule->SendOutgoingData(
277 (FrameType&)frameType, payloadType, timeStamp,
278 // Leaving the time when this frame was
279 // received from the capture device as
280 // undefined for voice for now.
281 -1, payloadData, payloadSize, fragmentation) == -1) {
282 _engineStatisticsPtr->SetLastError(
283 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
284 "Channel::SendData() failed to send data to RTP/RTCP module");
285 return -1;
286 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000287
kwiberg55b97fe2016-01-28 05:22:45 -0800288 _lastLocalTimeStamp = timeStamp;
289 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000290
kwiberg55b97fe2016-01-28 05:22:45 -0800291 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000292}
293
kwiberg55b97fe2016-01-28 05:22:45 -0800294int32_t Channel::InFrameType(FrameType frame_type) {
295 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
296 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000297
kwiberg55b97fe2016-01-28 05:22:45 -0800298 rtc::CritScope cs(&_callbackCritSect);
299 _sendFrameType = (frame_type == kAudioFrameSpeech);
300 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000301}
302
kwiberg55b97fe2016-01-28 05:22:45 -0800303int32_t Channel::OnRxVadDetected(int vadDecision) {
304 rtc::CritScope cs(&_callbackCritSect);
305 if (_rxVadObserverPtr) {
306 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
307 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000308
kwiberg55b97fe2016-01-28 05:22:45 -0800309 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000310}
311
stefan1d8a5062015-10-02 03:39:33 -0700312bool Channel::SendRtp(const uint8_t* data,
313 size_t len,
314 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800315 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
316 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000317
kwiberg55b97fe2016-01-28 05:22:45 -0800318 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000319
kwiberg55b97fe2016-01-28 05:22:45 -0800320 if (_transportPtr == NULL) {
321 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
322 "Channel::SendPacket() failed to send RTP packet due to"
323 " invalid transport object");
324 return false;
325 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000326
kwiberg55b97fe2016-01-28 05:22:45 -0800327 uint8_t* bufferToSendPtr = (uint8_t*)data;
328 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000329
kwiberg55b97fe2016-01-28 05:22:45 -0800330 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
331 std::string transport_name =
332 _externalTransport ? "external transport" : "WebRtc sockets";
333 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
334 "Channel::SendPacket() RTP transmission using %s failed",
335 transport_name.c_str());
336 return false;
337 }
338 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000339}
340
kwiberg55b97fe2016-01-28 05:22:45 -0800341bool Channel::SendRtcp(const uint8_t* data, size_t len) {
342 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
343 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000344
kwiberg55b97fe2016-01-28 05:22:45 -0800345 rtc::CritScope cs(&_callbackCritSect);
346 if (_transportPtr == NULL) {
347 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
348 "Channel::SendRtcp() failed to send RTCP packet"
349 " due to invalid transport object");
350 return false;
351 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000352
kwiberg55b97fe2016-01-28 05:22:45 -0800353 uint8_t* bufferToSendPtr = (uint8_t*)data;
354 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000355
kwiberg55b97fe2016-01-28 05:22:45 -0800356 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
357 if (n < 0) {
358 std::string transport_name =
359 _externalTransport ? "external transport" : "WebRtc sockets";
360 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
361 "Channel::SendRtcp() transmission using %s failed",
362 transport_name.c_str());
363 return false;
364 }
365 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000366}
367
kwiberg55b97fe2016-01-28 05:22:45 -0800368void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
369 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
370 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000371
kwiberg55b97fe2016-01-28 05:22:45 -0800372 // Update ssrc so that NTP for AV sync can be updated.
373 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000374}
375
Peter Boströmac547a62015-09-17 23:03:57 +0200376void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
377 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
378 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
379 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000380}
381
Peter Boströmac547a62015-09-17 23:03:57 +0200382int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000383 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000384 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000385 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800386 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200387 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800388 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
389 "Channel::OnInitializeDecoder(payloadType=%d, "
390 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
391 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000392
kwiberg55b97fe2016-01-28 05:22:45 -0800393 CodecInst receiveCodec = {0};
394 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000395
kwiberg55b97fe2016-01-28 05:22:45 -0800396 receiveCodec.pltype = payloadType;
397 receiveCodec.plfreq = frequency;
398 receiveCodec.channels = channels;
399 receiveCodec.rate = rate;
400 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000401
kwiberg55b97fe2016-01-28 05:22:45 -0800402 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
403 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000404
kwiberg55b97fe2016-01-28 05:22:45 -0800405 // Register the new codec to the ACM
kwibergc8d071e2016-04-06 12:22:38 -0700406 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, receiveCodec)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800407 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
408 "Channel::OnInitializeDecoder() invalid codec ("
409 "pt=%d, name=%s) received - 1",
410 payloadType, payloadName);
411 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
412 return -1;
413 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000414
kwiberg55b97fe2016-01-28 05:22:45 -0800415 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000416}
417
kwiberg55b97fe2016-01-28 05:22:45 -0800418int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
419 size_t payloadSize,
420 const WebRtcRTPHeader* rtpHeader) {
421 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
422 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
423 ","
424 " payloadType=%u, audioChannel=%" PRIuS ")",
425 payloadSize, rtpHeader->header.payloadType,
426 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000427
kwiberg55b97fe2016-01-28 05:22:45 -0800428 if (!channel_state_.Get().playing) {
429 // Avoid inserting into NetEQ when we are not playing. Count the
430 // packet as discarded.
431 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
432 "received packet is discarded since playing is not"
433 " activated");
434 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000435 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800436 }
437
438 // Push the incoming payload (parsed and ready for decoding) into the ACM
439 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
440 0) {
441 _engineStatisticsPtr->SetLastError(
442 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
443 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
444 return -1;
445 }
446
447 // Update the packet delay.
448 UpdatePacketDelay(rtpHeader->header.timestamp,
449 rtpHeader->header.sequenceNumber);
450
451 int64_t round_trip_time = 0;
452 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
453 NULL);
454
455 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
456 if (!nack_list.empty()) {
457 // Can't use nack_list.data() since it's not supported by all
458 // compilers.
459 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
460 }
461 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000462}
463
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000464bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000465 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000466 RTPHeader header;
467 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
468 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
469 "IncomingPacket invalid RTP header");
470 return false;
471 }
472 header.payload_type_frequency =
473 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
474 if (header.payload_type_frequency < 0)
475 return false;
476 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
477}
478
henrik.lundin42dda502016-05-18 05:36:01 -0700479MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
480 int32_t id,
481 AudioFrame* audioFrame) {
kwiberg55b97fe2016-01-28 05:22:45 -0800482 if (event_log_) {
483 unsigned int ssrc;
484 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
485 event_log_->LogAudioPlayout(ssrc);
486 }
487 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
henrik.lundind4ccb002016-05-17 12:21:55 -0700488 bool muted;
489 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame,
490 &muted) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -0800491 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
492 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
493 // In all likelihood, the audio in this frame is garbage. We return an
494 // error so that the audio mixer module doesn't add it to the mix. As
495 // a result, it won't be played out and the actions skipped here are
496 // irrelevant.
henrik.lundin42dda502016-05-18 05:36:01 -0700497 return MixerParticipant::AudioFrameInfo::kError;
kwiberg55b97fe2016-01-28 05:22:45 -0800498 }
henrik.lundina89ab962016-05-18 08:52:45 -0700499
500 if (muted) {
501 // TODO(henrik.lundin): We should be able to do better than this. But we
502 // will have to go through all the cases below where the audio samples may
503 // be used, and handle the muted case in some way.
504 audioFrame->Mute();
505 }
kwiberg55b97fe2016-01-28 05:22:45 -0800506
507 if (_RxVadDetection) {
508 UpdateRxVadDetection(*audioFrame);
509 }
510
511 // Convert module ID to internal VoE channel ID
512 audioFrame->id_ = VoEChannelId(audioFrame->id_);
513 // Store speech type for dead-or-alive detection
514 _outputSpeechType = audioFrame->speech_type_;
515
516 ChannelState::State state = channel_state_.Get();
517
518 if (state.rx_apm_is_enabled) {
519 int err = rx_audioproc_->ProcessStream(audioFrame);
520 if (err) {
521 LOG(LS_ERROR) << "ProcessStream() error: " << err;
522 assert(false);
Ivo Creusenae856f22015-09-17 16:30:16 +0200523 }
kwiberg55b97fe2016-01-28 05:22:45 -0800524 }
525
526 {
527 // Pass the audio buffers to an optional sink callback, before applying
528 // scaling/panning, as that applies to the mix operation.
529 // External recipients of the audio (e.g. via AudioTrack), will do their
530 // own mixing/dynamic processing.
531 rtc::CritScope cs(&_callbackCritSect);
532 if (audio_sink_) {
533 AudioSinkInterface::Data data(
534 &audioFrame->data_[0], audioFrame->samples_per_channel_,
535 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
536 audioFrame->timestamp_);
537 audio_sink_->OnData(data);
538 }
539 }
540
541 float output_gain = 1.0f;
542 float left_pan = 1.0f;
543 float right_pan = 1.0f;
544 {
545 rtc::CritScope cs(&volume_settings_critsect_);
546 output_gain = _outputGain;
547 left_pan = _panLeft;
548 right_pan = _panRight;
549 }
550
551 // Output volume scaling
552 if (output_gain < 0.99f || output_gain > 1.01f) {
553 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
554 }
555
556 // Scale left and/or right channel(s) if stereo and master balance is
557 // active
558
559 if (left_pan != 1.0f || right_pan != 1.0f) {
560 if (audioFrame->num_channels_ == 1) {
561 // Emulate stereo mode since panning is active.
562 // The mono signal is copied to both left and right channels here.
563 AudioFrameOperations::MonoToStereo(audioFrame);
564 }
565 // For true stereo mode (when we are receiving a stereo signal), no
566 // action is needed.
567
568 // Do the panning operation (the audio frame contains stereo at this
569 // stage)
570 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
571 }
572
573 // Mix decoded PCM output with file if file mixing is enabled
574 if (state.output_file_playing) {
575 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
henrik.lundina89ab962016-05-18 08:52:45 -0700576 muted = false; // We may have added non-zero samples.
kwiberg55b97fe2016-01-28 05:22:45 -0800577 }
578
579 // External media
580 if (_outputExternalMedia) {
581 rtc::CritScope cs(&_callbackCritSect);
582 const bool isStereo = (audioFrame->num_channels_ == 2);
583 if (_outputExternalMediaCallbackPtr) {
584 _outputExternalMediaCallbackPtr->Process(
585 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
586 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
587 isStereo);
588 }
589 }
590
591 // Record playout if enabled
592 {
593 rtc::CritScope cs(&_fileCritSect);
594
595 if (_outputFileRecording && _outputFileRecorderPtr) {
596 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
597 }
598 }
599
600 // Measure audio level (0-9)
henrik.lundina89ab962016-05-18 08:52:45 -0700601 // TODO(henrik.lundin) Use the |muted| information here too.
kwiberg55b97fe2016-01-28 05:22:45 -0800602 _outputAudioLevel.ComputeLevel(*audioFrame);
603
604 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
605 // The first frame with a valid rtp timestamp.
606 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
607 }
608
609 if (capture_start_rtp_time_stamp_ >= 0) {
610 // audioFrame.timestamp_ should be valid from now on.
611
612 // Compute elapsed time.
613 int64_t unwrap_timestamp =
614 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
615 audioFrame->elapsed_time_ms_ =
616 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
617 (GetPlayoutFrequency() / 1000);
618
niklase@google.com470e71d2011-07-07 08:21:25 +0000619 {
kwiberg55b97fe2016-01-28 05:22:45 -0800620 rtc::CritScope lock(&ts_stats_lock_);
621 // Compute ntp time.
622 audioFrame->ntp_time_ms_ =
623 ntp_estimator_.Estimate(audioFrame->timestamp_);
624 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
625 if (audioFrame->ntp_time_ms_ > 0) {
626 // Compute |capture_start_ntp_time_ms_| so that
627 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
628 capture_start_ntp_time_ms_ =
629 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000630 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000631 }
kwiberg55b97fe2016-01-28 05:22:45 -0800632 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000633
henrik.lundin42dda502016-05-18 05:36:01 -0700634 return muted ? MixerParticipant::AudioFrameInfo::kMuted
635 : MixerParticipant::AudioFrameInfo::kNormal;
niklase@google.com470e71d2011-07-07 08:21:25 +0000636}
637
kwiberg55b97fe2016-01-28 05:22:45 -0800638int32_t Channel::NeededFrequency(int32_t id) const {
639 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
640 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000641
kwiberg55b97fe2016-01-28 05:22:45 -0800642 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000643
kwiberg55b97fe2016-01-28 05:22:45 -0800644 // Determine highest needed receive frequency
645 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000646
kwiberg55b97fe2016-01-28 05:22:45 -0800647 // Return the bigger of playout and receive frequency in the ACM.
648 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
649 highestNeeded = audio_coding_->PlayoutFrequency();
650 } else {
651 highestNeeded = receiveFrequency;
652 }
653
654 // Special case, if we're playing a file on the playout side
655 // we take that frequency into consideration as well
656 // This is not needed on sending side, since the codec will
657 // limit the spectrum anyway.
658 if (channel_state_.Get().output_file_playing) {
659 rtc::CritScope cs(&_fileCritSect);
660 if (_outputFilePlayerPtr) {
661 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
662 highestNeeded = _outputFilePlayerPtr->Frequency();
663 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000664 }
kwiberg55b97fe2016-01-28 05:22:45 -0800665 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000666
kwiberg55b97fe2016-01-28 05:22:45 -0800667 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000668}
669
ivocb04965c2015-09-09 00:09:43 -0700670int32_t Channel::CreateChannel(Channel*& channel,
671 int32_t channelId,
672 uint32_t instanceId,
673 RtcEventLog* const event_log,
674 const Config& config) {
kwiberg55b97fe2016-01-28 05:22:45 -0800675 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
676 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
677 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000678
kwiberg55b97fe2016-01-28 05:22:45 -0800679 channel = new Channel(channelId, instanceId, event_log, config);
680 if (channel == NULL) {
681 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
682 "Channel::CreateChannel() unable to allocate memory for"
683 " channel");
684 return -1;
685 }
686 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000687}
688
kwiberg55b97fe2016-01-28 05:22:45 -0800689void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
690 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
691 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
692 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000693
kwiberg55b97fe2016-01-28 05:22:45 -0800694 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000695}
696
kwiberg55b97fe2016-01-28 05:22:45 -0800697void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
698 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
699 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
700 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000701
kwiberg55b97fe2016-01-28 05:22:45 -0800702 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000703}
704
kwiberg55b97fe2016-01-28 05:22:45 -0800705void Channel::PlayFileEnded(int32_t id) {
706 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
707 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000708
kwiberg55b97fe2016-01-28 05:22:45 -0800709 if (id == _inputFilePlayerId) {
710 channel_state_.SetInputFilePlaying(false);
711 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
712 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000713 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800714 } else if (id == _outputFilePlayerId) {
715 channel_state_.SetOutputFilePlaying(false);
716 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
717 "Channel::PlayFileEnded() => output file player module is"
718 " shutdown");
719 }
720}
721
722void Channel::RecordFileEnded(int32_t id) {
723 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
724 "Channel::RecordFileEnded(id=%d)", id);
725
726 assert(id == _outputFileRecorderId);
727
728 rtc::CritScope cs(&_fileCritSect);
729
730 _outputFileRecording = false;
731 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
732 "Channel::RecordFileEnded() => output file recorder module is"
733 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000734}
735
pbos@webrtc.org92135212013-05-14 08:31:39 +0000736Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000737 uint32_t instanceId,
ivocb04965c2015-09-09 00:09:43 -0700738 RtcEventLog* const event_log,
739 const Config& config)
tommi31fc21f2016-01-21 10:37:37 -0800740 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100741 _channelId(channelId),
742 event_log_(event_log),
743 rtp_header_parser_(RtpHeaderParser::Create()),
744 rtp_payload_registry_(
745 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
746 rtp_receive_statistics_(
747 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
748 rtp_receiver_(
749 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100750 this,
751 this,
752 rtp_payload_registry_.get())),
753 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
754 _outputAudioLevel(),
755 _externalTransport(false),
756 _inputFilePlayerPtr(NULL),
757 _outputFilePlayerPtr(NULL),
758 _outputFileRecorderPtr(NULL),
759 // Avoid conflict with other channels by adding 1024 - 1026,
760 // won't use as much as 1024 channels.
761 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
762 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
763 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
764 _outputFileRecording(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100765 _outputExternalMedia(false),
766 _inputExternalMediaCallbackPtr(NULL),
767 _outputExternalMediaCallbackPtr(NULL),
768 _timeStamp(0), // This is just an offset, RTP module will add it's own
769 // random offset
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100770 ntp_estimator_(Clock::GetRealTimeClock()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100771 playout_timestamp_rtp_(0),
772 playout_timestamp_rtcp_(0),
773 playout_delay_ms_(0),
774 _numberOfDiscardedPackets(0),
775 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100776 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
777 capture_start_rtp_time_stamp_(-1),
778 capture_start_ntp_time_ms_(-1),
779 _engineStatisticsPtr(NULL),
780 _outputMixerPtr(NULL),
781 _transmitMixerPtr(NULL),
782 _moduleProcessThreadPtr(NULL),
783 _audioDeviceModulePtr(NULL),
784 _voiceEngineObserverPtr(NULL),
785 _callbackCritSectPtr(NULL),
786 _transportPtr(NULL),
787 _rxVadObserverPtr(NULL),
788 _oldVadDecision(-1),
789 _sendFrameType(0),
790 _externalMixing(false),
791 _mixFileWithMicrophone(false),
solenberg1c2af8e2016-03-24 10:36:00 -0700792 input_mute_(false),
793 previous_frame_muted_(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100794 _panLeft(1.0f),
795 _panRight(1.0f),
796 _outputGain(1.0f),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100797 _lastLocalTimeStamp(0),
798 _lastPayloadType(0),
799 _includeAudioLevelIndication(false),
800 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100801 _average_jitter_buffer_delay_us(0),
802 _previousTimestamp(0),
803 _recPacketDelayMs(20),
804 _RxVadDetection(false),
805 _rxAgcIsEnabled(false),
806 _rxNsIsEnabled(false),
807 restored_packet_in_use_(false),
808 rtcp_observer_(new VoERtcpObserver(this)),
809 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100810 associate_send_channel_(ChannelOwner(nullptr)),
811 pacing_enabled_(config.Get<VoicePacing>().enabled),
stefanbba9dec2016-02-01 04:39:55 -0800812 feedback_observer_proxy_(new TransportFeedbackProxy()),
813 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
814 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()) {
kwiberg55b97fe2016-01-28 05:22:45 -0800815 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
816 "Channel::Channel() - ctor");
817 AudioCodingModule::Config acm_config;
818 acm_config.id = VoEModuleId(instanceId, channelId);
819 if (config.Get<NetEqCapacityConfig>().enabled) {
820 // Clamping the buffer capacity at 20 packets. While going lower will
821 // probably work, it makes little sense.
822 acm_config.neteq_config.max_packets_in_buffer =
823 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
824 }
825 acm_config.neteq_config.enable_fast_accelerate =
826 config.Get<NetEqFastAccelerate>().enabled;
henrik.lundina89ab962016-05-18 08:52:45 -0700827 acm_config.neteq_config.enable_muted_state = true;
kwiberg55b97fe2016-01-28 05:22:45 -0800828 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200829
kwiberg55b97fe2016-01-28 05:22:45 -0800830 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000831
kwiberg55b97fe2016-01-28 05:22:45 -0800832 RtpRtcp::Configuration configuration;
833 configuration.audio = true;
834 configuration.outgoing_transport = this;
kwiberg55b97fe2016-01-28 05:22:45 -0800835 configuration.receive_statistics = rtp_receive_statistics_.get();
836 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800837 if (pacing_enabled_) {
838 configuration.paced_sender = rtp_packet_sender_proxy_.get();
839 configuration.transport_sequence_number_allocator =
840 seq_num_allocator_proxy_.get();
841 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
842 }
kwiberg55b97fe2016-01-28 05:22:45 -0800843 configuration.event_log = event_log;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000844
kwiberg55b97fe2016-01-28 05:22:45 -0800845 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100846 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000847
kwiberg55b97fe2016-01-28 05:22:45 -0800848 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
849 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
850 statistics_proxy_.get());
aluebs@webrtc.orgf927fd62014-04-16 11:58:18 +0000851
kwiberg55b97fe2016-01-28 05:22:45 -0800852 Config audioproc_config;
853 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
854 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
niklase@google.com470e71d2011-07-07 08:21:25 +0000855}
856
kwiberg55b97fe2016-01-28 05:22:45 -0800857Channel::~Channel() {
858 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
859 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
860 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000861
kwiberg55b97fe2016-01-28 05:22:45 -0800862 if (_outputExternalMedia) {
863 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
864 }
865 if (channel_state_.Get().input_external_media) {
866 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
867 }
868 StopSend();
869 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000870
kwiberg55b97fe2016-01-28 05:22:45 -0800871 {
872 rtc::CritScope cs(&_fileCritSect);
873 if (_inputFilePlayerPtr) {
874 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
875 _inputFilePlayerPtr->StopPlayingFile();
876 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
877 _inputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000878 }
kwiberg55b97fe2016-01-28 05:22:45 -0800879 if (_outputFilePlayerPtr) {
880 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
881 _outputFilePlayerPtr->StopPlayingFile();
882 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
883 _outputFilePlayerPtr = NULL;
884 }
885 if (_outputFileRecorderPtr) {
886 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
887 _outputFileRecorderPtr->StopRecording();
888 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
889 _outputFileRecorderPtr = NULL;
890 }
891 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000892
kwiberg55b97fe2016-01-28 05:22:45 -0800893 // The order to safely shutdown modules in a channel is:
894 // 1. De-register callbacks in modules
895 // 2. De-register modules in process thread
896 // 3. Destroy modules
897 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
898 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
899 "~Channel() failed to de-register transport callback"
900 " (Audio coding module)");
901 }
902 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
903 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
904 "~Channel() failed to de-register VAD callback"
905 " (Audio coding module)");
906 }
907 // De-register modules in process thread
908 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +0000909
kwiberg55b97fe2016-01-28 05:22:45 -0800910 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +0000911}
912
kwiberg55b97fe2016-01-28 05:22:45 -0800913int32_t Channel::Init() {
914 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
915 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +0000916
kwiberg55b97fe2016-01-28 05:22:45 -0800917 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +0000918
kwiberg55b97fe2016-01-28 05:22:45 -0800919 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +0000920
kwiberg55b97fe2016-01-28 05:22:45 -0800921 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
922 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
923 "Channel::Init() must call SetEngineInformation() first");
924 return -1;
925 }
926
927 // --- Add modules to process thread (for periodic schedulation)
928
929 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
930
931 // --- ACM initialization
932
933 if (audio_coding_->InitializeReceiver() == -1) {
934 _engineStatisticsPtr->SetLastError(
935 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
936 "Channel::Init() unable to initialize the ACM - 1");
937 return -1;
938 }
939
940 // --- RTP/RTCP module initialization
941
942 // Ensure that RTCP is enabled by default for the created channel.
943 // Note that, the module will keep generating RTCP until it is explicitly
944 // disabled by the user.
945 // After StopListen (when no sockets exists), RTCP packets will no longer
946 // be transmitted since the Transport object will then be invalid.
947 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
948 // RTCP is enabled by default.
949 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
950 // --- Register all permanent callbacks
951 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
952 (audio_coding_->RegisterVADCallback(this) == -1);
953
954 if (fail) {
955 _engineStatisticsPtr->SetLastError(
956 VE_CANNOT_INIT_CHANNEL, kTraceError,
957 "Channel::Init() callbacks not registered");
958 return -1;
959 }
960
961 // --- Register all supported codecs to the receiving side of the
962 // RTP/RTCP module
963
964 CodecInst codec;
965 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
966
967 for (int idx = 0; idx < nSupportedCodecs; idx++) {
968 // Open up the RTP/RTCP receiver for all supported codecs
969 if ((audio_coding_->Codec(idx, &codec) == -1) ||
970 (rtp_receiver_->RegisterReceivePayload(
971 codec.plname, codec.pltype, codec.plfreq, codec.channels,
972 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
973 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
974 "Channel::Init() unable to register %s "
975 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
976 codec.plname, codec.pltype, codec.plfreq, codec.channels,
977 codec.rate);
978 } else {
979 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
980 "Channel::Init() %s (%d/%d/%" PRIuS
981 "/%d) has been "
982 "added to the RTP/RTCP receiver",
983 codec.plname, codec.pltype, codec.plfreq, codec.channels,
984 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000985 }
986
kwiberg55b97fe2016-01-28 05:22:45 -0800987 // Ensure that PCMU is used as default codec on the sending side
988 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
989 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +0000990 }
991
kwiberg55b97fe2016-01-28 05:22:45 -0800992 // Register default PT for outband 'telephone-event'
993 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
kwibergc8d071e2016-04-06 12:22:38 -0700994 if (_rtpRtcpModule->RegisterSendPayload(codec) == -1 ||
995 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800996 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
997 "Channel::Init() failed to register outband "
998 "'telephone-event' (%d/%d) correctly",
999 codec.pltype, codec.plfreq);
1000 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001001 }
1002
kwiberg55b97fe2016-01-28 05:22:45 -08001003 if (!STR_CASE_CMP(codec.plname, "CN")) {
kwibergc8d071e2016-04-06 12:22:38 -07001004 if (!codec_manager_.RegisterEncoder(codec) ||
1005 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get()) ||
1006 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec) ||
1007 _rtpRtcpModule->RegisterSendPayload(codec) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08001008 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1009 "Channel::Init() failed to register CN (%d/%d) "
1010 "correctly - 1",
1011 codec.pltype, codec.plfreq);
1012 }
1013 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001014#ifdef WEBRTC_CODEC_RED
kwiberg55b97fe2016-01-28 05:22:45 -08001015 // Register RED to the receiving side of the ACM.
1016 // We will not receive an OnInitializeDecoder() callback for RED.
1017 if (!STR_CASE_CMP(codec.plname, "RED")) {
kwibergc8d071e2016-04-06 12:22:38 -07001018 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001019 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1020 "Channel::Init() failed to register RED (%d/%d) "
1021 "correctly",
1022 codec.pltype, codec.plfreq);
1023 }
1024 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001025#endif
kwiberg55b97fe2016-01-28 05:22:45 -08001026 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001027
kwiberg55b97fe2016-01-28 05:22:45 -08001028 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1029 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1030 return -1;
1031 }
1032 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1033 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1034 return -1;
1035 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001036
kwiberg55b97fe2016-01-28 05:22:45 -08001037 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001038}
1039
kwiberg55b97fe2016-01-28 05:22:45 -08001040int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1041 OutputMixer& outputMixer,
1042 voe::TransmitMixer& transmitMixer,
1043 ProcessThread& moduleProcessThread,
1044 AudioDeviceModule& audioDeviceModule,
1045 VoiceEngineObserver* voiceEngineObserver,
1046 rtc::CriticalSection* callbackCritSect) {
1047 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1048 "Channel::SetEngineInformation()");
1049 _engineStatisticsPtr = &engineStatistics;
1050 _outputMixerPtr = &outputMixer;
1051 _transmitMixerPtr = &transmitMixer,
1052 _moduleProcessThreadPtr = &moduleProcessThread;
1053 _audioDeviceModulePtr = &audioDeviceModule;
1054 _voiceEngineObserverPtr = voiceEngineObserver;
1055 _callbackCritSectPtr = callbackCritSect;
1056 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001057}
1058
kwiberg55b97fe2016-01-28 05:22:45 -08001059int32_t Channel::UpdateLocalTimeStamp() {
1060 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1061 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001062}
1063
kwibergb7f89d62016-02-17 10:04:18 -08001064void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001065 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001066 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001067}
1068
kwiberg55b97fe2016-01-28 05:22:45 -08001069int32_t Channel::StartPlayout() {
1070 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1071 "Channel::StartPlayout()");
1072 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001073 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001074 }
1075
1076 if (!_externalMixing) {
1077 // Add participant as candidates for mixing.
1078 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1079 _engineStatisticsPtr->SetLastError(
1080 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1081 "StartPlayout() failed to add participant to mixer");
1082 return -1;
1083 }
1084 }
1085
1086 channel_state_.SetPlaying(true);
1087 if (RegisterFilePlayingToMixer() != 0)
1088 return -1;
1089
1090 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001091}
1092
kwiberg55b97fe2016-01-28 05:22:45 -08001093int32_t Channel::StopPlayout() {
1094 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1095 "Channel::StopPlayout()");
1096 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001097 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001098 }
1099
1100 if (!_externalMixing) {
1101 // Remove participant as candidates for mixing
1102 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1103 _engineStatisticsPtr->SetLastError(
1104 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1105 "StopPlayout() failed to remove participant from mixer");
1106 return -1;
1107 }
1108 }
1109
1110 channel_state_.SetPlaying(false);
1111 _outputAudioLevel.Clear();
1112
1113 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001114}
1115
kwiberg55b97fe2016-01-28 05:22:45 -08001116int32_t Channel::StartSend() {
1117 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1118 "Channel::StartSend()");
1119 // Resume the previous sequence number which was reset by StopSend().
1120 // This needs to be done before |sending| is set to true.
1121 if (send_sequence_number_)
1122 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001123
kwiberg55b97fe2016-01-28 05:22:45 -08001124 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001125 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001126 }
1127 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001128
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001129 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001130 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1131 _engineStatisticsPtr->SetLastError(
1132 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1133 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001134 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001135 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001136 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001137 return -1;
1138 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001139
kwiberg55b97fe2016-01-28 05:22:45 -08001140 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001141}
1142
kwiberg55b97fe2016-01-28 05:22:45 -08001143int32_t Channel::StopSend() {
1144 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1145 "Channel::StopSend()");
1146 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001147 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001148 }
1149 channel_state_.SetSending(false);
1150
1151 // Store the sequence number to be able to pick up the same sequence for
1152 // the next StartSend(). This is needed for restarting device, otherwise
1153 // it might cause libSRTP to complain about packets being replayed.
1154 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1155 // CL is landed. See issue
1156 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1157 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1158
1159 // Reset sending SSRC and sequence number and triggers direct transmission
1160 // of RTCP BYE
1161 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1162 _engineStatisticsPtr->SetLastError(
1163 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1164 "StartSend() RTP/RTCP failed to stop sending");
1165 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001166 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001167
1168 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001169}
1170
kwiberg55b97fe2016-01-28 05:22:45 -08001171int32_t Channel::StartReceiving() {
1172 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1173 "Channel::StartReceiving()");
1174 if (channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001175 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001176 }
1177 channel_state_.SetReceiving(true);
1178 _numberOfDiscardedPackets = 0;
1179 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001180}
1181
kwiberg55b97fe2016-01-28 05:22:45 -08001182int32_t Channel::StopReceiving() {
1183 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1184 "Channel::StopReceiving()");
1185 if (!channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001186 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001187 }
1188
1189 channel_state_.SetReceiving(false);
1190 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001191}
1192
kwiberg55b97fe2016-01-28 05:22:45 -08001193int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1194 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1195 "Channel::RegisterVoiceEngineObserver()");
1196 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001197
kwiberg55b97fe2016-01-28 05:22:45 -08001198 if (_voiceEngineObserverPtr) {
1199 _engineStatisticsPtr->SetLastError(
1200 VE_INVALID_OPERATION, kTraceError,
1201 "RegisterVoiceEngineObserver() observer already enabled");
1202 return -1;
1203 }
1204 _voiceEngineObserverPtr = &observer;
1205 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001206}
1207
kwiberg55b97fe2016-01-28 05:22:45 -08001208int32_t Channel::DeRegisterVoiceEngineObserver() {
1209 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1210 "Channel::DeRegisterVoiceEngineObserver()");
1211 rtc::CritScope cs(&_callbackCritSect);
1212
1213 if (!_voiceEngineObserverPtr) {
1214 _engineStatisticsPtr->SetLastError(
1215 VE_INVALID_OPERATION, kTraceWarning,
1216 "DeRegisterVoiceEngineObserver() observer already disabled");
1217 return 0;
1218 }
1219 _voiceEngineObserverPtr = NULL;
1220 return 0;
1221}
1222
1223int32_t Channel::GetSendCodec(CodecInst& codec) {
kwibergc8d071e2016-04-06 12:22:38 -07001224 auto send_codec = codec_manager_.GetCodecInst();
kwiberg1fd4a4a2015-11-03 11:20:50 -08001225 if (send_codec) {
1226 codec = *send_codec;
1227 return 0;
1228 }
1229 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001230}
1231
kwiberg55b97fe2016-01-28 05:22:45 -08001232int32_t Channel::GetRecCodec(CodecInst& codec) {
1233 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001234}
1235
kwiberg55b97fe2016-01-28 05:22:45 -08001236int32_t Channel::SetSendCodec(const CodecInst& codec) {
1237 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1238 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001239
kwibergc8d071e2016-04-06 12:22:38 -07001240 if (!codec_manager_.RegisterEncoder(codec) ||
1241 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001242 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1243 "SetSendCodec() failed to register codec to ACM");
1244 return -1;
1245 }
1246
1247 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1248 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1249 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1250 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1251 "SetSendCodec() failed to register codec to"
1252 " RTP/RTCP module");
1253 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001254 }
kwiberg55b97fe2016-01-28 05:22:45 -08001255 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001256
kwiberg55b97fe2016-01-28 05:22:45 -08001257 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1258 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1259 "SetSendCodec() failed to set audio packet size");
1260 return -1;
1261 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001262
kwiberg55b97fe2016-01-28 05:22:45 -08001263 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001264}
1265
Ivo Creusenadf89b72015-04-29 16:03:33 +02001266void Channel::SetBitRate(int bitrate_bps) {
1267 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1268 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1269 audio_coding_->SetBitRate(bitrate_bps);
1270}
1271
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001272void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue@webrtc.org74aaf292014-07-16 21:28:26 +00001273 network_predictor_->UpdatePacketLossRate(fraction_lost);
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001274 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1275
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001276 // Normalizes rate to 0 - 100.
kwiberg55b97fe2016-01-28 05:22:45 -08001277 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1278 0) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001279 assert(false); // This should not happen.
1280 }
1281}
1282
kwiberg55b97fe2016-01-28 05:22:45 -08001283int32_t Channel::SetVADStatus(bool enableVAD,
1284 ACMVADMode mode,
1285 bool disableDTX) {
1286 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1287 "Channel::SetVADStatus(mode=%d)", mode);
kwibergc8d071e2016-04-06 12:22:38 -07001288 RTC_DCHECK(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1289 if (!codec_manager_.SetVAD(enableVAD, mode) ||
1290 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001291 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1292 kTraceError,
1293 "SetVADStatus() failed to set VAD");
1294 return -1;
1295 }
1296 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001297}
1298
kwiberg55b97fe2016-01-28 05:22:45 -08001299int32_t Channel::GetVADStatus(bool& enabledVAD,
1300 ACMVADMode& mode,
1301 bool& disabledDTX) {
kwibergc8d071e2016-04-06 12:22:38 -07001302 const auto* params = codec_manager_.GetStackParams();
1303 enabledVAD = params->use_cng;
1304 mode = params->vad_mode;
1305 disabledDTX = !params->use_cng;
kwiberg55b97fe2016-01-28 05:22:45 -08001306 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001307}
1308
kwiberg55b97fe2016-01-28 05:22:45 -08001309int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1310 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1311 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001312
kwiberg55b97fe2016-01-28 05:22:45 -08001313 if (channel_state_.Get().playing) {
1314 _engineStatisticsPtr->SetLastError(
1315 VE_ALREADY_PLAYING, kTraceError,
1316 "SetRecPayloadType() unable to set PT while playing");
1317 return -1;
1318 }
1319 if (channel_state_.Get().receiving) {
1320 _engineStatisticsPtr->SetLastError(
1321 VE_ALREADY_LISTENING, kTraceError,
1322 "SetRecPayloadType() unable to set PT while listening");
1323 return -1;
1324 }
1325
1326 if (codec.pltype == -1) {
1327 // De-register the selected codec (RTP/RTCP module and ACM)
1328
1329 int8_t pltype(-1);
1330 CodecInst rxCodec = codec;
1331
1332 // Get payload type for the given codec
1333 rtp_payload_registry_->ReceivePayloadType(
1334 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1335 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1336 rxCodec.pltype = pltype;
1337
1338 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1339 _engineStatisticsPtr->SetLastError(
1340 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1341 "SetRecPayloadType() RTP/RTCP-module deregistration "
1342 "failed");
1343 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001344 }
kwiberg55b97fe2016-01-28 05:22:45 -08001345 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1346 _engineStatisticsPtr->SetLastError(
1347 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1348 "SetRecPayloadType() ACM deregistration failed - 1");
1349 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001350 }
kwiberg55b97fe2016-01-28 05:22:45 -08001351 return 0;
1352 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001353
kwiberg55b97fe2016-01-28 05:22:45 -08001354 if (rtp_receiver_->RegisterReceivePayload(
1355 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1356 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1357 // First attempt to register failed => de-register and try again
kwibergc8d071e2016-04-06 12:22:38 -07001358 // TODO(kwiberg): Retrying is probably not necessary, since
1359 // AcmReceiver::AddCodec also retries.
kwiberg55b97fe2016-01-28 05:22:45 -08001360 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001361 if (rtp_receiver_->RegisterReceivePayload(
kwiberg55b97fe2016-01-28 05:22:45 -08001362 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1363 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1364 _engineStatisticsPtr->SetLastError(
1365 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1366 "SetRecPayloadType() RTP/RTCP-module registration failed");
1367 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001368 }
kwiberg55b97fe2016-01-28 05:22:45 -08001369 }
kwibergc8d071e2016-04-06 12:22:38 -07001370 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001371 audio_coding_->UnregisterReceiveCodec(codec.pltype);
kwibergc8d071e2016-04-06 12:22:38 -07001372 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001373 _engineStatisticsPtr->SetLastError(
1374 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1375 "SetRecPayloadType() ACM registration failed - 1");
1376 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001377 }
kwiberg55b97fe2016-01-28 05:22:45 -08001378 }
1379 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001380}
1381
kwiberg55b97fe2016-01-28 05:22:45 -08001382int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1383 int8_t payloadType(-1);
1384 if (rtp_payload_registry_->ReceivePayloadType(
1385 codec.plname, codec.plfreq, codec.channels,
1386 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1387 _engineStatisticsPtr->SetLastError(
1388 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1389 "GetRecPayloadType() failed to retrieve RX payload type");
1390 return -1;
1391 }
1392 codec.pltype = payloadType;
1393 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001394}
1395
kwiberg55b97fe2016-01-28 05:22:45 -08001396int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1397 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1398 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001399
kwiberg55b97fe2016-01-28 05:22:45 -08001400 CodecInst codec;
1401 int32_t samplingFreqHz(-1);
1402 const size_t kMono = 1;
1403 if (frequency == kFreq32000Hz)
1404 samplingFreqHz = 32000;
1405 else if (frequency == kFreq16000Hz)
1406 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001407
kwiberg55b97fe2016-01-28 05:22:45 -08001408 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1409 _engineStatisticsPtr->SetLastError(
1410 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1411 "SetSendCNPayloadType() failed to retrieve default CN codec "
1412 "settings");
1413 return -1;
1414 }
1415
1416 // Modify the payload type (must be set to dynamic range)
1417 codec.pltype = type;
1418
kwibergc8d071e2016-04-06 12:22:38 -07001419 if (!codec_manager_.RegisterEncoder(codec) ||
1420 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001421 _engineStatisticsPtr->SetLastError(
1422 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1423 "SetSendCNPayloadType() failed to register CN to ACM");
1424 return -1;
1425 }
1426
1427 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1428 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1429 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1430 _engineStatisticsPtr->SetLastError(
1431 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1432 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1433 "module");
1434 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001435 }
kwiberg55b97fe2016-01-28 05:22:45 -08001436 }
1437 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001438}
1439
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001440int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001441 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001442 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001443
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001444 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001445 _engineStatisticsPtr->SetLastError(
1446 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001447 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001448 return -1;
1449 }
1450 return 0;
1451}
1452
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001453int Channel::SetOpusDtx(bool enable_dtx) {
1454 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1455 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001456 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001457 : audio_coding_->DisableOpusDtx();
1458 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001459 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1460 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001461 return -1;
1462 }
1463 return 0;
1464}
1465
mflodman3d7db262016-04-29 00:57:13 -07001466int32_t Channel::RegisterExternalTransport(Transport* transport) {
kwiberg55b97fe2016-01-28 05:22:45 -08001467 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001468 "Channel::RegisterExternalTransport()");
1469
kwiberg55b97fe2016-01-28 05:22:45 -08001470 rtc::CritScope cs(&_callbackCritSect);
kwiberg55b97fe2016-01-28 05:22:45 -08001471 if (_externalTransport) {
1472 _engineStatisticsPtr->SetLastError(
1473 VE_INVALID_OPERATION, kTraceError,
1474 "RegisterExternalTransport() external transport already enabled");
1475 return -1;
1476 }
1477 _externalTransport = true;
mflodman3d7db262016-04-29 00:57:13 -07001478 _transportPtr = transport;
kwiberg55b97fe2016-01-28 05:22:45 -08001479 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001480}
1481
kwiberg55b97fe2016-01-28 05:22:45 -08001482int32_t Channel::DeRegisterExternalTransport() {
1483 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1484 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001485
kwiberg55b97fe2016-01-28 05:22:45 -08001486 rtc::CritScope cs(&_callbackCritSect);
mflodman3d7db262016-04-29 00:57:13 -07001487 if (_transportPtr) {
1488 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1489 "DeRegisterExternalTransport() all transport is disabled");
1490 } else {
kwiberg55b97fe2016-01-28 05:22:45 -08001491 _engineStatisticsPtr->SetLastError(
1492 VE_INVALID_OPERATION, kTraceWarning,
1493 "DeRegisterExternalTransport() external transport already "
1494 "disabled");
kwiberg55b97fe2016-01-28 05:22:45 -08001495 }
1496 _externalTransport = false;
1497 _transportPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001498 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001499}
1500
mflodman3d7db262016-04-29 00:57:13 -07001501int32_t Channel::ReceivedRTPPacket(const uint8_t* received_packet,
kwiberg55b97fe2016-01-28 05:22:45 -08001502 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001503 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001504 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001505 "Channel::ReceivedRTPPacket()");
1506
1507 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001508 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001509
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001510 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001511 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1512 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1513 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001514 return -1;
1515 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001516 header.payload_type_frequency =
1517 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001518 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001519 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001520 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001521 rtp_receive_statistics_->IncomingPacket(
1522 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001523 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001524
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001525 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001526}
1527
1528bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001529 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001530 const RTPHeader& header,
1531 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001532 if (rtp_payload_registry_->IsRtx(header)) {
1533 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001534 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001535 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001536 assert(packet_length >= header.headerLength);
1537 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001538 PayloadUnion payload_specific;
1539 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001540 &payload_specific)) {
1541 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001542 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001543 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1544 payload_specific, in_order);
1545}
1546
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001547bool Channel::HandleRtxPacket(const uint8_t* packet,
1548 size_t packet_length,
1549 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001550 if (!rtp_payload_registry_->IsRtx(header))
1551 return false;
1552
1553 // Remove the RTX header and parse the original RTP header.
1554 if (packet_length < header.headerLength)
1555 return false;
1556 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1557 return false;
1558 if (restored_packet_in_use_) {
1559 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1560 "Multiple RTX headers detected, dropping packet");
1561 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001562 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001563 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001564 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1565 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001566 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1567 "Incoming RTX packet: invalid RTP header");
1568 return false;
1569 }
1570 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001571 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001572 restored_packet_in_use_ = false;
1573 return ret;
1574}
1575
1576bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1577 StreamStatistician* statistician =
1578 rtp_receive_statistics_->GetStatistician(header.ssrc);
1579 if (!statistician)
1580 return false;
1581 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001582}
1583
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001584bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1585 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001586 // Retransmissions are handled separately if RTX is enabled.
1587 if (rtp_payload_registry_->RtxEnabled())
1588 return false;
1589 StreamStatistician* statistician =
1590 rtp_receive_statistics_->GetStatistician(header.ssrc);
1591 if (!statistician)
1592 return false;
1593 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001594 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001595 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001596 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001597}
1598
mflodman3d7db262016-04-29 00:57:13 -07001599int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001600 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001601 "Channel::ReceivedRTCPPacket()");
1602 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001603 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001604
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001605 // Deliver RTCP packet to RTP/RTCP module for parsing
mflodman3d7db262016-04-29 00:57:13 -07001606 if (_rtpRtcpModule->IncomingRtcpPacket(data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001607 _engineStatisticsPtr->SetLastError(
1608 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1609 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1610 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001611
Minyue2013aec2015-05-13 14:14:42 +02001612 int64_t rtt = GetRTT(true);
1613 if (rtt == 0) {
1614 // Waiting for valid RTT.
1615 return 0;
1616 }
1617 uint32_t ntp_secs = 0;
1618 uint32_t ntp_frac = 0;
1619 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001620 if (0 !=
1621 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1622 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001623 // Waiting for RTCP.
1624 return 0;
1625 }
1626
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001627 {
tommi31fc21f2016-01-21 10:37:37 -08001628 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001629 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001630 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001631 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001632}
1633
niklase@google.com470e71d2011-07-07 08:21:25 +00001634int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001635 bool loop,
1636 FileFormats format,
1637 int startPosition,
1638 float volumeScaling,
1639 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001640 const CodecInst* codecInst) {
1641 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1642 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1643 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1644 "stopPosition=%d)",
1645 fileName, loop, format, volumeScaling, startPosition,
1646 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001647
kwiberg55b97fe2016-01-28 05:22:45 -08001648 if (channel_state_.Get().output_file_playing) {
1649 _engineStatisticsPtr->SetLastError(
1650 VE_ALREADY_PLAYING, kTraceError,
1651 "StartPlayingFileLocally() is already playing");
1652 return -1;
1653 }
1654
1655 {
1656 rtc::CritScope cs(&_fileCritSect);
1657
1658 if (_outputFilePlayerPtr) {
1659 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1660 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1661 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001662 }
1663
kwiberg55b97fe2016-01-28 05:22:45 -08001664 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1665 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001666
kwiberg55b97fe2016-01-28 05:22:45 -08001667 if (_outputFilePlayerPtr == NULL) {
1668 _engineStatisticsPtr->SetLastError(
1669 VE_INVALID_ARGUMENT, kTraceError,
1670 "StartPlayingFileLocally() filePlayer format is not correct");
1671 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001672 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001673
kwiberg55b97fe2016-01-28 05:22:45 -08001674 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001675
kwiberg55b97fe2016-01-28 05:22:45 -08001676 if (_outputFilePlayerPtr->StartPlayingFile(
1677 fileName, loop, startPosition, volumeScaling, notificationTime,
1678 stopPosition, (const CodecInst*)codecInst) != 0) {
1679 _engineStatisticsPtr->SetLastError(
1680 VE_BAD_FILE, kTraceError,
1681 "StartPlayingFile() failed to start file playout");
1682 _outputFilePlayerPtr->StopPlayingFile();
1683 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1684 _outputFilePlayerPtr = NULL;
1685 return -1;
1686 }
1687 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1688 channel_state_.SetOutputFilePlaying(true);
1689 }
1690
1691 if (RegisterFilePlayingToMixer() != 0)
1692 return -1;
1693
1694 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001695}
1696
1697int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001698 FileFormats format,
1699 int startPosition,
1700 float volumeScaling,
1701 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001702 const CodecInst* codecInst) {
1703 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1704 "Channel::StartPlayingFileLocally(format=%d,"
1705 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1706 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001707
kwiberg55b97fe2016-01-28 05:22:45 -08001708 if (stream == NULL) {
1709 _engineStatisticsPtr->SetLastError(
1710 VE_BAD_FILE, kTraceError,
1711 "StartPlayingFileLocally() NULL as input stream");
1712 return -1;
1713 }
1714
1715 if (channel_state_.Get().output_file_playing) {
1716 _engineStatisticsPtr->SetLastError(
1717 VE_ALREADY_PLAYING, kTraceError,
1718 "StartPlayingFileLocally() is already playing");
1719 return -1;
1720 }
1721
1722 {
1723 rtc::CritScope cs(&_fileCritSect);
1724
1725 // Destroy the old instance
1726 if (_outputFilePlayerPtr) {
1727 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1728 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1729 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001730 }
1731
kwiberg55b97fe2016-01-28 05:22:45 -08001732 // Create the instance
1733 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1734 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001735
kwiberg55b97fe2016-01-28 05:22:45 -08001736 if (_outputFilePlayerPtr == NULL) {
1737 _engineStatisticsPtr->SetLastError(
1738 VE_INVALID_ARGUMENT, kTraceError,
1739 "StartPlayingFileLocally() filePlayer format isnot correct");
1740 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001741 }
1742
kwiberg55b97fe2016-01-28 05:22:45 -08001743 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001744
kwiberg55b97fe2016-01-28 05:22:45 -08001745 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1746 volumeScaling, notificationTime,
1747 stopPosition, codecInst) != 0) {
1748 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1749 "StartPlayingFile() failed to "
1750 "start file playout");
1751 _outputFilePlayerPtr->StopPlayingFile();
1752 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1753 _outputFilePlayerPtr = NULL;
1754 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001755 }
kwiberg55b97fe2016-01-28 05:22:45 -08001756 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1757 channel_state_.SetOutputFilePlaying(true);
1758 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001759
kwiberg55b97fe2016-01-28 05:22:45 -08001760 if (RegisterFilePlayingToMixer() != 0)
1761 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001762
kwiberg55b97fe2016-01-28 05:22:45 -08001763 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001764}
1765
kwiberg55b97fe2016-01-28 05:22:45 -08001766int Channel::StopPlayingFileLocally() {
1767 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1768 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001769
kwiberg55b97fe2016-01-28 05:22:45 -08001770 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001771 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001772 }
1773
1774 {
1775 rtc::CritScope cs(&_fileCritSect);
1776
1777 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1778 _engineStatisticsPtr->SetLastError(
1779 VE_STOP_RECORDING_FAILED, kTraceError,
1780 "StopPlayingFile() could not stop playing");
1781 return -1;
1782 }
1783 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1784 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1785 _outputFilePlayerPtr = NULL;
1786 channel_state_.SetOutputFilePlaying(false);
1787 }
1788 // _fileCritSect cannot be taken while calling
1789 // SetAnonymousMixibilityStatus. Refer to comments in
1790 // StartPlayingFileLocally(const char* ...) for more details.
1791 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1792 _engineStatisticsPtr->SetLastError(
1793 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1794 "StopPlayingFile() failed to stop participant from playing as"
1795 "file in the mixer");
1796 return -1;
1797 }
1798
1799 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001800}
1801
kwiberg55b97fe2016-01-28 05:22:45 -08001802int Channel::IsPlayingFileLocally() const {
1803 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001804}
1805
kwiberg55b97fe2016-01-28 05:22:45 -08001806int Channel::RegisterFilePlayingToMixer() {
1807 // Return success for not registering for file playing to mixer if:
1808 // 1. playing file before playout is started on that channel.
1809 // 2. starting playout without file playing on that channel.
1810 if (!channel_state_.Get().playing ||
1811 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001812 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001813 }
1814
1815 // |_fileCritSect| cannot be taken while calling
1816 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1817 // frames can be pulled by the mixer. Since the frames are generated from
1818 // the file, _fileCritSect will be taken. This would result in a deadlock.
1819 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1820 channel_state_.SetOutputFilePlaying(false);
1821 rtc::CritScope cs(&_fileCritSect);
1822 _engineStatisticsPtr->SetLastError(
1823 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1824 "StartPlayingFile() failed to add participant as file to mixer");
1825 _outputFilePlayerPtr->StopPlayingFile();
1826 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1827 _outputFilePlayerPtr = NULL;
1828 return -1;
1829 }
1830
1831 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001832}
1833
niklase@google.com470e71d2011-07-07 08:21:25 +00001834int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001835 bool loop,
1836 FileFormats format,
1837 int startPosition,
1838 float volumeScaling,
1839 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001840 const CodecInst* codecInst) {
1841 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1842 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1843 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1844 "stopPosition=%d)",
1845 fileName, loop, format, volumeScaling, startPosition,
1846 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001847
kwiberg55b97fe2016-01-28 05:22:45 -08001848 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001849
kwiberg55b97fe2016-01-28 05:22:45 -08001850 if (channel_state_.Get().input_file_playing) {
1851 _engineStatisticsPtr->SetLastError(
1852 VE_ALREADY_PLAYING, kTraceWarning,
1853 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001854 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001855 }
1856
1857 // Destroy the old instance
1858 if (_inputFilePlayerPtr) {
1859 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1860 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1861 _inputFilePlayerPtr = NULL;
1862 }
1863
1864 // Create the instance
1865 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1866 (const FileFormats)format);
1867
1868 if (_inputFilePlayerPtr == NULL) {
1869 _engineStatisticsPtr->SetLastError(
1870 VE_INVALID_ARGUMENT, kTraceError,
1871 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1872 return -1;
1873 }
1874
1875 const uint32_t notificationTime(0);
1876
1877 if (_inputFilePlayerPtr->StartPlayingFile(
1878 fileName, loop, startPosition, volumeScaling, notificationTime,
1879 stopPosition, (const CodecInst*)codecInst) != 0) {
1880 _engineStatisticsPtr->SetLastError(
1881 VE_BAD_FILE, kTraceError,
1882 "StartPlayingFile() failed to start file playout");
1883 _inputFilePlayerPtr->StopPlayingFile();
1884 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1885 _inputFilePlayerPtr = NULL;
1886 return -1;
1887 }
1888 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1889 channel_state_.SetInputFilePlaying(true);
1890
1891 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001892}
1893
1894int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001895 FileFormats format,
1896 int startPosition,
1897 float volumeScaling,
1898 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001899 const CodecInst* codecInst) {
1900 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1901 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1902 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1903 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001904
kwiberg55b97fe2016-01-28 05:22:45 -08001905 if (stream == NULL) {
1906 _engineStatisticsPtr->SetLastError(
1907 VE_BAD_FILE, kTraceError,
1908 "StartPlayingFileAsMicrophone NULL as input stream");
1909 return -1;
1910 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001911
kwiberg55b97fe2016-01-28 05:22:45 -08001912 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001913
kwiberg55b97fe2016-01-28 05:22:45 -08001914 if (channel_state_.Get().input_file_playing) {
1915 _engineStatisticsPtr->SetLastError(
1916 VE_ALREADY_PLAYING, kTraceWarning,
1917 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001918 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001919 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001920
kwiberg55b97fe2016-01-28 05:22:45 -08001921 // Destroy the old instance
1922 if (_inputFilePlayerPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001923 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1924 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1925 _inputFilePlayerPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001926 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001927
kwiberg55b97fe2016-01-28 05:22:45 -08001928 // Create the instance
1929 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1930 (const FileFormats)format);
1931
1932 if (_inputFilePlayerPtr == NULL) {
1933 _engineStatisticsPtr->SetLastError(
1934 VE_INVALID_ARGUMENT, kTraceError,
1935 "StartPlayingInputFile() filePlayer format isnot correct");
1936 return -1;
1937 }
1938
1939 const uint32_t notificationTime(0);
1940
1941 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1942 volumeScaling, notificationTime,
1943 stopPosition, codecInst) != 0) {
1944 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1945 "StartPlayingFile() failed to start "
1946 "file playout");
1947 _inputFilePlayerPtr->StopPlayingFile();
1948 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1949 _inputFilePlayerPtr = NULL;
1950 return -1;
1951 }
1952
1953 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1954 channel_state_.SetInputFilePlaying(true);
1955
1956 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001957}
1958
kwiberg55b97fe2016-01-28 05:22:45 -08001959int Channel::StopPlayingFileAsMicrophone() {
1960 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1961 "Channel::StopPlayingFileAsMicrophone()");
1962
1963 rtc::CritScope cs(&_fileCritSect);
1964
1965 if (!channel_state_.Get().input_file_playing) {
1966 return 0;
1967 }
1968
1969 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
1970 _engineStatisticsPtr->SetLastError(
1971 VE_STOP_RECORDING_FAILED, kTraceError,
1972 "StopPlayingFile() could not stop playing");
1973 return -1;
1974 }
1975 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1976 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1977 _inputFilePlayerPtr = NULL;
1978 channel_state_.SetInputFilePlaying(false);
1979
1980 return 0;
1981}
1982
1983int Channel::IsPlayingFileAsMicrophone() const {
1984 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001985}
1986
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00001987int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08001988 const CodecInst* codecInst) {
1989 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1990 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00001991
kwiberg55b97fe2016-01-28 05:22:45 -08001992 if (_outputFileRecording) {
1993 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
1994 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00001995 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001996 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001997
kwiberg55b97fe2016-01-28 05:22:45 -08001998 FileFormats format;
1999 const uint32_t notificationTime(0); // Not supported in VoE
2000 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00002001
kwiberg55b97fe2016-01-28 05:22:45 -08002002 if ((codecInst != NULL) &&
2003 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2004 _engineStatisticsPtr->SetLastError(
2005 VE_BAD_ARGUMENT, kTraceError,
2006 "StartRecordingPlayout() invalid compression");
2007 return (-1);
2008 }
2009 if (codecInst == NULL) {
2010 format = kFileFormatPcm16kHzFile;
2011 codecInst = &dummyCodec;
2012 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2013 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2014 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2015 format = kFileFormatWavFile;
2016 } else {
2017 format = kFileFormatCompressedFile;
2018 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002019
kwiberg55b97fe2016-01-28 05:22:45 -08002020 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002021
kwiberg55b97fe2016-01-28 05:22:45 -08002022 // Destroy the old instance
2023 if (_outputFileRecorderPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002024 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2025 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2026 _outputFileRecorderPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002027 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002028
kwiberg55b97fe2016-01-28 05:22:45 -08002029 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2030 _outputFileRecorderId, (const FileFormats)format);
2031 if (_outputFileRecorderPtr == NULL) {
2032 _engineStatisticsPtr->SetLastError(
2033 VE_INVALID_ARGUMENT, kTraceError,
2034 "StartRecordingPlayout() fileRecorder format isnot correct");
2035 return -1;
2036 }
2037
2038 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2039 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2040 _engineStatisticsPtr->SetLastError(
2041 VE_BAD_FILE, kTraceError,
2042 "StartRecordingAudioFile() failed to start file recording");
2043 _outputFileRecorderPtr->StopRecording();
2044 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2045 _outputFileRecorderPtr = NULL;
2046 return -1;
2047 }
2048 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2049 _outputFileRecording = true;
2050
2051 return 0;
2052}
2053
2054int Channel::StartRecordingPlayout(OutStream* stream,
2055 const CodecInst* codecInst) {
2056 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2057 "Channel::StartRecordingPlayout()");
2058
2059 if (_outputFileRecording) {
2060 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2061 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002062 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002063 }
2064
2065 FileFormats format;
2066 const uint32_t notificationTime(0); // Not supported in VoE
2067 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2068
2069 if (codecInst != NULL && codecInst->channels != 1) {
2070 _engineStatisticsPtr->SetLastError(
2071 VE_BAD_ARGUMENT, kTraceError,
2072 "StartRecordingPlayout() invalid compression");
2073 return (-1);
2074 }
2075 if (codecInst == NULL) {
2076 format = kFileFormatPcm16kHzFile;
2077 codecInst = &dummyCodec;
2078 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2079 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2080 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2081 format = kFileFormatWavFile;
2082 } else {
2083 format = kFileFormatCompressedFile;
2084 }
2085
2086 rtc::CritScope cs(&_fileCritSect);
2087
2088 // Destroy the old instance
2089 if (_outputFileRecorderPtr) {
2090 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2091 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2092 _outputFileRecorderPtr = NULL;
2093 }
2094
2095 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2096 _outputFileRecorderId, (const FileFormats)format);
2097 if (_outputFileRecorderPtr == NULL) {
2098 _engineStatisticsPtr->SetLastError(
2099 VE_INVALID_ARGUMENT, kTraceError,
2100 "StartRecordingPlayout() fileRecorder format isnot correct");
2101 return -1;
2102 }
2103
2104 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2105 notificationTime) != 0) {
2106 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2107 "StartRecordingPlayout() failed to "
2108 "start file recording");
2109 _outputFileRecorderPtr->StopRecording();
2110 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2111 _outputFileRecorderPtr = NULL;
2112 return -1;
2113 }
2114
2115 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2116 _outputFileRecording = true;
2117
2118 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002119}
2120
kwiberg55b97fe2016-01-28 05:22:45 -08002121int Channel::StopRecordingPlayout() {
2122 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2123 "Channel::StopRecordingPlayout()");
2124
2125 if (!_outputFileRecording) {
2126 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2127 "StopRecordingPlayout() isnot recording");
2128 return -1;
2129 }
2130
2131 rtc::CritScope cs(&_fileCritSect);
2132
2133 if (_outputFileRecorderPtr->StopRecording() != 0) {
2134 _engineStatisticsPtr->SetLastError(
2135 VE_STOP_RECORDING_FAILED, kTraceError,
2136 "StopRecording() could not stop recording");
2137 return (-1);
2138 }
2139 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2140 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2141 _outputFileRecorderPtr = NULL;
2142 _outputFileRecording = false;
2143
2144 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002145}
2146
kwiberg55b97fe2016-01-28 05:22:45 -08002147void Channel::SetMixWithMicStatus(bool mix) {
2148 rtc::CritScope cs(&_fileCritSect);
2149 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002150}
2151
kwiberg55b97fe2016-01-28 05:22:45 -08002152int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2153 int8_t currentLevel = _outputAudioLevel.Level();
2154 level = static_cast<int32_t>(currentLevel);
2155 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002156}
2157
kwiberg55b97fe2016-01-28 05:22:45 -08002158int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2159 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2160 level = static_cast<int32_t>(currentLevel);
2161 return 0;
2162}
2163
solenberg1c2af8e2016-03-24 10:36:00 -07002164int Channel::SetInputMute(bool enable) {
kwiberg55b97fe2016-01-28 05:22:45 -08002165 rtc::CritScope cs(&volume_settings_critsect_);
2166 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002167 "Channel::SetMute(enable=%d)", enable);
solenberg1c2af8e2016-03-24 10:36:00 -07002168 input_mute_ = enable;
kwiberg55b97fe2016-01-28 05:22:45 -08002169 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002170}
2171
solenberg1c2af8e2016-03-24 10:36:00 -07002172bool Channel::InputMute() const {
kwiberg55b97fe2016-01-28 05:22:45 -08002173 rtc::CritScope cs(&volume_settings_critsect_);
solenberg1c2af8e2016-03-24 10:36:00 -07002174 return input_mute_;
niklase@google.com470e71d2011-07-07 08:21:25 +00002175}
2176
kwiberg55b97fe2016-01-28 05:22:45 -08002177int Channel::SetOutputVolumePan(float left, float right) {
2178 rtc::CritScope cs(&volume_settings_critsect_);
2179 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002180 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002181 _panLeft = left;
2182 _panRight = right;
2183 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002184}
2185
kwiberg55b97fe2016-01-28 05:22:45 -08002186int Channel::GetOutputVolumePan(float& left, float& right) const {
2187 rtc::CritScope cs(&volume_settings_critsect_);
2188 left = _panLeft;
2189 right = _panRight;
2190 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002191}
2192
kwiberg55b97fe2016-01-28 05:22:45 -08002193int Channel::SetChannelOutputVolumeScaling(float scaling) {
2194 rtc::CritScope cs(&volume_settings_critsect_);
2195 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002196 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002197 _outputGain = scaling;
2198 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002199}
2200
kwiberg55b97fe2016-01-28 05:22:45 -08002201int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2202 rtc::CritScope cs(&volume_settings_critsect_);
2203 scaling = _outputGain;
2204 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002205}
2206
solenberg8842c3e2016-03-11 03:06:41 -08002207int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002208 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002209 "Channel::SendTelephoneEventOutband(...)");
2210 RTC_DCHECK_LE(0, event);
2211 RTC_DCHECK_GE(255, event);
2212 RTC_DCHECK_LE(0, duration_ms);
2213 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002214 if (!Sending()) {
2215 return -1;
2216 }
solenberg8842c3e2016-03-11 03:06:41 -08002217 if (_rtpRtcpModule->SendTelephoneEventOutband(
2218 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002219 _engineStatisticsPtr->SetLastError(
2220 VE_SEND_DTMF_FAILED, kTraceWarning,
2221 "SendTelephoneEventOutband() failed to send event");
2222 return -1;
2223 }
2224 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002225}
2226
solenberg31642aa2016-03-14 08:00:37 -07002227int Channel::SetSendTelephoneEventPayloadType(int payload_type) {
kwiberg55b97fe2016-01-28 05:22:45 -08002228 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002229 "Channel::SetSendTelephoneEventPayloadType()");
solenberg31642aa2016-03-14 08:00:37 -07002230 RTC_DCHECK_LE(0, payload_type);
2231 RTC_DCHECK_GE(127, payload_type);
2232 CodecInst codec = {0};
kwiberg55b97fe2016-01-28 05:22:45 -08002233 codec.plfreq = 8000;
solenberg31642aa2016-03-14 08:00:37 -07002234 codec.pltype = payload_type;
kwiberg55b97fe2016-01-28 05:22:45 -08002235 memcpy(codec.plname, "telephone-event", 16);
2236 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2237 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2238 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2239 _engineStatisticsPtr->SetLastError(
2240 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2241 "SetSendTelephoneEventPayloadType() failed to register send"
2242 "payload type");
2243 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002244 }
kwiberg55b97fe2016-01-28 05:22:45 -08002245 }
kwiberg55b97fe2016-01-28 05:22:45 -08002246 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002247}
2248
kwiberg55b97fe2016-01-28 05:22:45 -08002249int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2250 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2251 "Channel::UpdateRxVadDetection()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002252
kwiberg55b97fe2016-01-28 05:22:45 -08002253 int vadDecision = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002254
kwiberg55b97fe2016-01-28 05:22:45 -08002255 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002256
kwiberg55b97fe2016-01-28 05:22:45 -08002257 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2258 OnRxVadDetected(vadDecision);
2259 _oldVadDecision = vadDecision;
2260 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002261
kwiberg55b97fe2016-01-28 05:22:45 -08002262 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2263 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2264 vadDecision);
2265 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002266}
2267
kwiberg55b97fe2016-01-28 05:22:45 -08002268int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2269 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2270 "Channel::RegisterRxVadObserver()");
2271 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002272
kwiberg55b97fe2016-01-28 05:22:45 -08002273 if (_rxVadObserverPtr) {
2274 _engineStatisticsPtr->SetLastError(
2275 VE_INVALID_OPERATION, kTraceError,
2276 "RegisterRxVadObserver() observer already enabled");
2277 return -1;
2278 }
2279 _rxVadObserverPtr = &observer;
2280 _RxVadDetection = true;
2281 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002282}
2283
kwiberg55b97fe2016-01-28 05:22:45 -08002284int Channel::DeRegisterRxVadObserver() {
2285 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2286 "Channel::DeRegisterRxVadObserver()");
2287 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002288
kwiberg55b97fe2016-01-28 05:22:45 -08002289 if (!_rxVadObserverPtr) {
2290 _engineStatisticsPtr->SetLastError(
2291 VE_INVALID_OPERATION, kTraceWarning,
2292 "DeRegisterRxVadObserver() observer already disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002293 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002294 }
2295 _rxVadObserverPtr = NULL;
2296 _RxVadDetection = false;
2297 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002298}
2299
kwiberg55b97fe2016-01-28 05:22:45 -08002300int Channel::VoiceActivityIndicator(int& activity) {
2301 activity = _sendFrameType;
2302 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002303}
2304
2305#ifdef WEBRTC_VOICE_ENGINE_AGC
2306
kwiberg55b97fe2016-01-28 05:22:45 -08002307int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2308 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2309 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2310 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002311
kwiberg55b97fe2016-01-28 05:22:45 -08002312 GainControl::Mode agcMode = kDefaultRxAgcMode;
2313 switch (mode) {
2314 case kAgcDefault:
2315 break;
2316 case kAgcUnchanged:
2317 agcMode = rx_audioproc_->gain_control()->mode();
2318 break;
2319 case kAgcFixedDigital:
2320 agcMode = GainControl::kFixedDigital;
2321 break;
2322 case kAgcAdaptiveDigital:
2323 agcMode = GainControl::kAdaptiveDigital;
2324 break;
2325 default:
2326 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2327 "SetRxAgcStatus() invalid Agc mode");
2328 return -1;
2329 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002330
kwiberg55b97fe2016-01-28 05:22:45 -08002331 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2332 _engineStatisticsPtr->SetLastError(
2333 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2334 return -1;
2335 }
2336 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2337 _engineStatisticsPtr->SetLastError(
2338 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2339 return -1;
2340 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002341
kwiberg55b97fe2016-01-28 05:22:45 -08002342 _rxAgcIsEnabled = enable;
2343 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002344
kwiberg55b97fe2016-01-28 05:22:45 -08002345 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002346}
2347
kwiberg55b97fe2016-01-28 05:22:45 -08002348int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2349 bool enable = rx_audioproc_->gain_control()->is_enabled();
2350 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
niklase@google.com470e71d2011-07-07 08:21:25 +00002351
kwiberg55b97fe2016-01-28 05:22:45 -08002352 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002353
kwiberg55b97fe2016-01-28 05:22:45 -08002354 switch (agcMode) {
2355 case GainControl::kFixedDigital:
2356 mode = kAgcFixedDigital;
2357 break;
2358 case GainControl::kAdaptiveDigital:
2359 mode = kAgcAdaptiveDigital;
2360 break;
2361 default:
2362 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2363 "GetRxAgcStatus() invalid Agc mode");
2364 return -1;
2365 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002366
kwiberg55b97fe2016-01-28 05:22:45 -08002367 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002368}
2369
kwiberg55b97fe2016-01-28 05:22:45 -08002370int Channel::SetRxAgcConfig(AgcConfig config) {
2371 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2372 "Channel::SetRxAgcConfig()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002373
kwiberg55b97fe2016-01-28 05:22:45 -08002374 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2375 config.targetLeveldBOv) != 0) {
2376 _engineStatisticsPtr->SetLastError(
2377 VE_APM_ERROR, kTraceError,
2378 "SetRxAgcConfig() failed to set target peak |level|"
2379 "(or envelope) of the Agc");
2380 return -1;
2381 }
2382 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2383 config.digitalCompressionGaindB) != 0) {
2384 _engineStatisticsPtr->SetLastError(
2385 VE_APM_ERROR, kTraceError,
2386 "SetRxAgcConfig() failed to set the range in |gain| the"
2387 " digital compression stage may apply");
2388 return -1;
2389 }
2390 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2391 0) {
2392 _engineStatisticsPtr->SetLastError(
2393 VE_APM_ERROR, kTraceError,
2394 "SetRxAgcConfig() failed to set hard limiter to the signal");
2395 return -1;
2396 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002397
kwiberg55b97fe2016-01-28 05:22:45 -08002398 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002399}
2400
kwiberg55b97fe2016-01-28 05:22:45 -08002401int Channel::GetRxAgcConfig(AgcConfig& config) {
2402 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2403 config.digitalCompressionGaindB =
2404 rx_audioproc_->gain_control()->compression_gain_db();
2405 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
niklase@google.com470e71d2011-07-07 08:21:25 +00002406
kwiberg55b97fe2016-01-28 05:22:45 -08002407 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002408}
2409
kwiberg55b97fe2016-01-28 05:22:45 -08002410#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
niklase@google.com470e71d2011-07-07 08:21:25 +00002411
2412#ifdef WEBRTC_VOICE_ENGINE_NR
2413
kwiberg55b97fe2016-01-28 05:22:45 -08002414int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2415 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2416 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2417 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002418
kwiberg55b97fe2016-01-28 05:22:45 -08002419 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2420 switch (mode) {
2421 case kNsDefault:
2422 break;
2423 case kNsUnchanged:
2424 nsLevel = rx_audioproc_->noise_suppression()->level();
2425 break;
2426 case kNsConference:
2427 nsLevel = NoiseSuppression::kHigh;
2428 break;
2429 case kNsLowSuppression:
2430 nsLevel = NoiseSuppression::kLow;
2431 break;
2432 case kNsModerateSuppression:
2433 nsLevel = NoiseSuppression::kModerate;
2434 break;
2435 case kNsHighSuppression:
2436 nsLevel = NoiseSuppression::kHigh;
2437 break;
2438 case kNsVeryHighSuppression:
2439 nsLevel = NoiseSuppression::kVeryHigh;
2440 break;
2441 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002442
kwiberg55b97fe2016-01-28 05:22:45 -08002443 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2444 _engineStatisticsPtr->SetLastError(
2445 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2446 return -1;
2447 }
2448 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2449 _engineStatisticsPtr->SetLastError(
2450 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2451 return -1;
2452 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002453
kwiberg55b97fe2016-01-28 05:22:45 -08002454 _rxNsIsEnabled = enable;
2455 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002456
kwiberg55b97fe2016-01-28 05:22:45 -08002457 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002458}
2459
kwiberg55b97fe2016-01-28 05:22:45 -08002460int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2461 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2462 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
niklase@google.com470e71d2011-07-07 08:21:25 +00002463
kwiberg55b97fe2016-01-28 05:22:45 -08002464 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002465
kwiberg55b97fe2016-01-28 05:22:45 -08002466 switch (ncLevel) {
2467 case NoiseSuppression::kLow:
2468 mode = kNsLowSuppression;
2469 break;
2470 case NoiseSuppression::kModerate:
2471 mode = kNsModerateSuppression;
2472 break;
2473 case NoiseSuppression::kHigh:
2474 mode = kNsHighSuppression;
2475 break;
2476 case NoiseSuppression::kVeryHigh:
2477 mode = kNsVeryHighSuppression;
2478 break;
2479 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002480
kwiberg55b97fe2016-01-28 05:22:45 -08002481 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002482}
2483
kwiberg55b97fe2016-01-28 05:22:45 -08002484#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
niklase@google.com470e71d2011-07-07 08:21:25 +00002485
kwiberg55b97fe2016-01-28 05:22:45 -08002486int Channel::SetLocalSSRC(unsigned int ssrc) {
2487 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2488 "Channel::SetLocalSSRC()");
2489 if (channel_state_.Get().sending) {
2490 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2491 "SetLocalSSRC() already sending");
2492 return -1;
2493 }
2494 _rtpRtcpModule->SetSSRC(ssrc);
2495 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002496}
2497
kwiberg55b97fe2016-01-28 05:22:45 -08002498int Channel::GetLocalSSRC(unsigned int& ssrc) {
2499 ssrc = _rtpRtcpModule->SSRC();
2500 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002501}
2502
kwiberg55b97fe2016-01-28 05:22:45 -08002503int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2504 ssrc = rtp_receiver_->SSRC();
2505 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002506}
2507
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002508int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002509 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002510 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002511}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002512
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002513int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2514 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002515 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2516 if (enable &&
2517 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2518 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002519 return -1;
2520 }
2521 return 0;
2522}
2523
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002524int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2525 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2526}
2527
2528int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2529 rtp_header_parser_->DeregisterRtpHeaderExtension(
2530 kRtpExtensionAbsoluteSendTime);
kwiberg55b97fe2016-01-28 05:22:45 -08002531 if (enable &&
2532 !rtp_header_parser_->RegisterRtpHeaderExtension(
2533 kRtpExtensionAbsoluteSendTime, id)) {
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00002534 return -1;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002535 }
2536 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002537}
2538
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002539void Channel::EnableSendTransportSequenceNumber(int id) {
2540 int ret =
2541 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2542 RTC_DCHECK_EQ(0, ret);
2543}
2544
stefan3313ec92016-01-21 06:32:43 -08002545void Channel::EnableReceiveTransportSequenceNumber(int id) {
2546 rtp_header_parser_->DeregisterRtpHeaderExtension(
2547 kRtpExtensionTransportSequenceNumber);
2548 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2549 kRtpExtensionTransportSequenceNumber, id);
2550 RTC_DCHECK(ret);
2551}
2552
stefanbba9dec2016-02-01 04:39:55 -08002553void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002554 RtpPacketSender* rtp_packet_sender,
2555 TransportFeedbackObserver* transport_feedback_observer,
2556 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002557 RTC_DCHECK(rtp_packet_sender);
2558 RTC_DCHECK(transport_feedback_observer);
2559 RTC_DCHECK(packet_router && !packet_router_);
2560 feedback_observer_proxy_->SetTransportFeedbackObserver(
2561 transport_feedback_observer);
2562 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2563 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2564 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002565 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002566 packet_router_ = packet_router;
2567}
2568
stefanbba9dec2016-02-01 04:39:55 -08002569void Channel::RegisterReceiverCongestionControlObjects(
2570 PacketRouter* packet_router) {
2571 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002572 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002573 packet_router_ = packet_router;
2574}
2575
2576void Channel::ResetCongestionControlObjects() {
2577 RTC_DCHECK(packet_router_);
2578 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2579 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2580 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002581 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002582 packet_router_ = nullptr;
2583 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2584}
2585
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002586void Channel::SetRTCPStatus(bool enable) {
2587 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2588 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002589 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002590}
2591
kwiberg55b97fe2016-01-28 05:22:45 -08002592int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002593 RtcpMode method = _rtpRtcpModule->RTCP();
2594 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002595 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002596}
2597
kwiberg55b97fe2016-01-28 05:22:45 -08002598int Channel::SetRTCP_CNAME(const char cName[256]) {
2599 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2600 "Channel::SetRTCP_CNAME()");
2601 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2602 _engineStatisticsPtr->SetLastError(
2603 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2604 "SetRTCP_CNAME() failed to set RTCP CNAME");
2605 return -1;
2606 }
2607 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002608}
2609
kwiberg55b97fe2016-01-28 05:22:45 -08002610int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2611 if (cName == NULL) {
2612 _engineStatisticsPtr->SetLastError(
2613 VE_INVALID_ARGUMENT, kTraceError,
2614 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2615 return -1;
2616 }
2617 char cname[RTCP_CNAME_SIZE];
2618 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2619 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2620 _engineStatisticsPtr->SetLastError(
2621 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2622 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2623 return -1;
2624 }
2625 strcpy(cName, cname);
2626 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002627}
2628
kwiberg55b97fe2016-01-28 05:22:45 -08002629int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2630 unsigned int& NTPLow,
2631 unsigned int& timestamp,
2632 unsigned int& playoutTimestamp,
2633 unsigned int* jitter,
2634 unsigned short* fractionLost) {
2635 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002636
kwiberg55b97fe2016-01-28 05:22:45 -08002637 RTCPSenderInfo senderInfo;
2638 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2639 _engineStatisticsPtr->SetLastError(
2640 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2641 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2642 "side");
2643 return -1;
2644 }
2645
2646 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2647 // and octet count)
2648 NTPHigh = senderInfo.NTPseconds;
2649 NTPLow = senderInfo.NTPfraction;
2650 timestamp = senderInfo.RTPtimeStamp;
2651
2652 // --- Locally derived information
2653
2654 // This value is updated on each incoming RTCP packet (0 when no packet
2655 // has been received)
2656 playoutTimestamp = playout_timestamp_rtcp_;
2657
2658 if (NULL != jitter || NULL != fractionLost) {
2659 // Get all RTCP receiver report blocks that have been received on this
2660 // channel. If we receive RTP packets from a remote source we know the
2661 // remote SSRC and use the report block from him.
2662 // Otherwise use the first report block.
2663 std::vector<RTCPReportBlock> remote_stats;
2664 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2665 remote_stats.empty()) {
2666 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2667 "GetRemoteRTCPData() failed to measure statistics due"
2668 " to lack of received RTP and/or RTCP packets");
2669 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002670 }
2671
kwiberg55b97fe2016-01-28 05:22:45 -08002672 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2673 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2674 for (; it != remote_stats.end(); ++it) {
2675 if (it->remoteSSRC == remoteSSRC)
2676 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002677 }
kwiberg55b97fe2016-01-28 05:22:45 -08002678
2679 if (it == remote_stats.end()) {
2680 // If we have not received any RTCP packets from this SSRC it probably
2681 // means that we have not received any RTP packets.
2682 // Use the first received report block instead.
2683 it = remote_stats.begin();
2684 remoteSSRC = it->remoteSSRC;
2685 }
2686
2687 if (jitter) {
2688 *jitter = it->jitter;
2689 }
2690
2691 if (fractionLost) {
2692 *fractionLost = it->fractionLost;
2693 }
2694 }
2695 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002696}
2697
kwiberg55b97fe2016-01-28 05:22:45 -08002698int Channel::SendApplicationDefinedRTCPPacket(
2699 unsigned char subType,
2700 unsigned int name,
2701 const char* data,
2702 unsigned short dataLengthInBytes) {
2703 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2704 "Channel::SendApplicationDefinedRTCPPacket()");
2705 if (!channel_state_.Get().sending) {
2706 _engineStatisticsPtr->SetLastError(
2707 VE_NOT_SENDING, kTraceError,
2708 "SendApplicationDefinedRTCPPacket() not sending");
2709 return -1;
2710 }
2711 if (NULL == data) {
2712 _engineStatisticsPtr->SetLastError(
2713 VE_INVALID_ARGUMENT, kTraceError,
2714 "SendApplicationDefinedRTCPPacket() invalid data value");
2715 return -1;
2716 }
2717 if (dataLengthInBytes % 4 != 0) {
2718 _engineStatisticsPtr->SetLastError(
2719 VE_INVALID_ARGUMENT, kTraceError,
2720 "SendApplicationDefinedRTCPPacket() invalid length value");
2721 return -1;
2722 }
2723 RtcpMode status = _rtpRtcpModule->RTCP();
2724 if (status == RtcpMode::kOff) {
2725 _engineStatisticsPtr->SetLastError(
2726 VE_RTCP_ERROR, kTraceError,
2727 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2728 return -1;
2729 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002730
kwiberg55b97fe2016-01-28 05:22:45 -08002731 // Create and schedule the RTCP APP packet for transmission
2732 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2733 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2734 _engineStatisticsPtr->SetLastError(
2735 VE_SEND_ERROR, kTraceError,
2736 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2737 return -1;
2738 }
2739 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002740}
2741
kwiberg55b97fe2016-01-28 05:22:45 -08002742int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2743 unsigned int& maxJitterMs,
2744 unsigned int& discardedPackets) {
2745 // The jitter statistics is updated for each received RTP packet and is
2746 // based on received packets.
2747 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2748 // If RTCP is off, there is no timed thread in the RTCP module regularly
2749 // generating new stats, trigger the update manually here instead.
2750 StreamStatistician* statistician =
2751 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2752 if (statistician) {
2753 // Don't use returned statistics, use data from proxy instead so that
2754 // max jitter can be fetched atomically.
2755 RtcpStatistics s;
2756 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002757 }
kwiberg55b97fe2016-01-28 05:22:45 -08002758 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002759
kwiberg55b97fe2016-01-28 05:22:45 -08002760 ChannelStatistics stats = statistics_proxy_->GetStats();
2761 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2762 if (playoutFrequency > 0) {
2763 // Scale RTP statistics given the current playout frequency
2764 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2765 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2766 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002767
kwiberg55b97fe2016-01-28 05:22:45 -08002768 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002769
kwiberg55b97fe2016-01-28 05:22:45 -08002770 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002771}
2772
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002773int Channel::GetRemoteRTCPReportBlocks(
2774 std::vector<ReportBlock>* report_blocks) {
2775 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002776 _engineStatisticsPtr->SetLastError(
2777 VE_INVALID_ARGUMENT, kTraceError,
2778 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002779 return -1;
2780 }
2781
2782 // Get the report blocks from the latest received RTCP Sender or Receiver
2783 // Report. Each element in the vector contains the sender's SSRC and a
2784 // report block according to RFC 3550.
2785 std::vector<RTCPReportBlock> rtcp_report_blocks;
2786 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002787 return -1;
2788 }
2789
2790 if (rtcp_report_blocks.empty())
2791 return 0;
2792
2793 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2794 for (; it != rtcp_report_blocks.end(); ++it) {
2795 ReportBlock report_block;
2796 report_block.sender_SSRC = it->remoteSSRC;
2797 report_block.source_SSRC = it->sourceSSRC;
2798 report_block.fraction_lost = it->fractionLost;
2799 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2800 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2801 report_block.interarrival_jitter = it->jitter;
2802 report_block.last_SR_timestamp = it->lastSR;
2803 report_block.delay_since_last_SR = it->delaySinceLastSR;
2804 report_blocks->push_back(report_block);
2805 }
2806 return 0;
2807}
2808
kwiberg55b97fe2016-01-28 05:22:45 -08002809int Channel::GetRTPStatistics(CallStatistics& stats) {
2810 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002811
kwiberg55b97fe2016-01-28 05:22:45 -08002812 // The jitter statistics is updated for each received RTP packet and is
2813 // based on received packets.
2814 RtcpStatistics statistics;
2815 StreamStatistician* statistician =
2816 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002817 if (statistician) {
2818 statistician->GetStatistics(&statistics,
2819 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002820 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002821
kwiberg55b97fe2016-01-28 05:22:45 -08002822 stats.fractionLost = statistics.fraction_lost;
2823 stats.cumulativeLost = statistics.cumulative_lost;
2824 stats.extendedMax = statistics.extended_max_sequence_number;
2825 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002826
kwiberg55b97fe2016-01-28 05:22:45 -08002827 // --- RTT
2828 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002829
kwiberg55b97fe2016-01-28 05:22:45 -08002830 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002831
kwiberg55b97fe2016-01-28 05:22:45 -08002832 size_t bytesSent(0);
2833 uint32_t packetsSent(0);
2834 size_t bytesReceived(0);
2835 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002836
kwiberg55b97fe2016-01-28 05:22:45 -08002837 if (statistician) {
2838 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2839 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002840
kwiberg55b97fe2016-01-28 05:22:45 -08002841 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2842 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2843 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2844 " output will not be complete");
2845 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002846
kwiberg55b97fe2016-01-28 05:22:45 -08002847 stats.bytesSent = bytesSent;
2848 stats.packetsSent = packetsSent;
2849 stats.bytesReceived = bytesReceived;
2850 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002851
kwiberg55b97fe2016-01-28 05:22:45 -08002852 // --- Timestamps
2853 {
2854 rtc::CritScope lock(&ts_stats_lock_);
2855 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2856 }
2857 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002858}
2859
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002860int Channel::SetREDStatus(bool enable, int redPayloadtype) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002861 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002862 "Channel::SetREDStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002863
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002864 if (enable) {
2865 if (redPayloadtype < 0 || redPayloadtype > 127) {
2866 _engineStatisticsPtr->SetLastError(
2867 VE_PLTYPE_ERROR, kTraceError,
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002868 "SetREDStatus() invalid RED payload type");
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002869 return -1;
2870 }
2871
2872 if (SetRedPayloadType(redPayloadtype) < 0) {
2873 _engineStatisticsPtr->SetLastError(
2874 VE_CODEC_ERROR, kTraceError,
2875 "SetSecondarySendCodec() Failed to register RED ACM");
2876 return -1;
2877 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002878 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002879
kwibergc8d071e2016-04-06 12:22:38 -07002880 if (!codec_manager_.SetCopyRed(enable) ||
2881 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002882 _engineStatisticsPtr->SetLastError(
2883 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002884 "SetREDStatus() failed to set RED state in the ACM");
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002885 return -1;
2886 }
2887 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002888}
2889
kwiberg55b97fe2016-01-28 05:22:45 -08002890int Channel::GetREDStatus(bool& enabled, int& redPayloadtype) {
kwibergc8d071e2016-04-06 12:22:38 -07002891 enabled = codec_manager_.GetStackParams()->use_red;
kwiberg55b97fe2016-01-28 05:22:45 -08002892 if (enabled) {
2893 int8_t payloadType = 0;
2894 if (_rtpRtcpModule->SendREDPayloadType(&payloadType) != 0) {
2895 _engineStatisticsPtr->SetLastError(
2896 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2897 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
2898 "module");
2899 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002900 }
kwiberg55b97fe2016-01-28 05:22:45 -08002901 redPayloadtype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +00002902 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002903 }
2904 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002905}
2906
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002907int Channel::SetCodecFECStatus(bool enable) {
2908 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2909 "Channel::SetCodecFECStatus()");
2910
kwibergc8d071e2016-04-06 12:22:38 -07002911 if (!codec_manager_.SetCodecFEC(enable) ||
2912 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002913 _engineStatisticsPtr->SetLastError(
2914 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2915 "SetCodecFECStatus() failed to set FEC state");
2916 return -1;
2917 }
2918 return 0;
2919}
2920
2921bool Channel::GetCodecFECStatus() {
kwibergc8d071e2016-04-06 12:22:38 -07002922 return codec_manager_.GetStackParams()->use_codec_fec;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002923}
2924
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002925void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2926 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002927 // If pacing is enabled we always store packets.
2928 if (!pacing_enabled_)
2929 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002930 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002931 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002932 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002933 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002934 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002935}
2936
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002937// Called when we are missing one or more packets.
2938int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002939 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2940}
2941
kwiberg55b97fe2016-01-28 05:22:45 -08002942uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2943 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2944 "Channel::Demultiplex()");
2945 _audioFrame.CopyFrom(audioFrame);
2946 _audioFrame.id_ = _channelId;
2947 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002948}
2949
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002950void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00002951 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07002952 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08002953 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002954 CodecInst codec;
2955 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002956
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07002957 // Never upsample or upmix the capture signal here. This should be done at the
2958 // end of the send chain.
2959 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
2960 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
2961 RemixAndResample(audio_data, number_of_frames, number_of_channels,
2962 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002963}
2964
kwiberg55b97fe2016-01-28 05:22:45 -08002965uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
2966 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2967 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002968
kwiberg55b97fe2016-01-28 05:22:45 -08002969 if (_audioFrame.samples_per_channel_ == 0) {
2970 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2971 "Channel::PrepareEncodeAndSend() invalid audio frame");
2972 return 0xFFFFFFFF;
2973 }
2974
2975 if (channel_state_.Get().input_file_playing) {
2976 MixOrReplaceAudioWithFile(mixingFrequency);
2977 }
2978
solenberg1c2af8e2016-03-24 10:36:00 -07002979 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
2980 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
kwiberg55b97fe2016-01-28 05:22:45 -08002981
2982 if (channel_state_.Get().input_external_media) {
2983 rtc::CritScope cs(&_callbackCritSect);
2984 const bool isStereo = (_audioFrame.num_channels_ == 2);
2985 if (_inputExternalMediaCallbackPtr) {
2986 _inputExternalMediaCallbackPtr->Process(
2987 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
2988 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
2989 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00002990 }
kwiberg55b97fe2016-01-28 05:22:45 -08002991 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002992
kwiberg55b97fe2016-01-28 05:22:45 -08002993 if (_includeAudioLevelIndication) {
2994 size_t length =
2995 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
solenberg1c2af8e2016-03-24 10:36:00 -07002996 if (is_muted && previous_frame_muted_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002997 rms_level_.ProcessMuted(length);
2998 } else {
2999 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00003000 }
kwiberg55b97fe2016-01-28 05:22:45 -08003001 }
solenberg1c2af8e2016-03-24 10:36:00 -07003002 previous_frame_muted_ = is_muted;
niklase@google.com470e71d2011-07-07 08:21:25 +00003003
kwiberg55b97fe2016-01-28 05:22:45 -08003004 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003005}
3006
kwiberg55b97fe2016-01-28 05:22:45 -08003007uint32_t Channel::EncodeAndSend() {
3008 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3009 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003010
kwiberg55b97fe2016-01-28 05:22:45 -08003011 assert(_audioFrame.num_channels_ <= 2);
3012 if (_audioFrame.samples_per_channel_ == 0) {
3013 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3014 "Channel::EncodeAndSend() invalid audio frame");
3015 return 0xFFFFFFFF;
3016 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003017
kwiberg55b97fe2016-01-28 05:22:45 -08003018 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00003019
kwiberg55b97fe2016-01-28 05:22:45 -08003020 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00003021
kwiberg55b97fe2016-01-28 05:22:45 -08003022 // The ACM resamples internally.
3023 _audioFrame.timestamp_ = _timeStamp;
3024 // This call will trigger AudioPacketizationCallback::SendData if encoding
3025 // is done and payload is ready for packetization and transmission.
3026 // Otherwise, it will return without invoking the callback.
3027 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
3028 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
3029 "Channel::EncodeAndSend() ACM encoding failed");
3030 return 0xFFFFFFFF;
3031 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003032
kwiberg55b97fe2016-01-28 05:22:45 -08003033 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
3034 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003035}
3036
Minyue2013aec2015-05-13 14:14:42 +02003037void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08003038 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003039 Channel* channel = associate_send_channel_.channel();
3040 if (channel && channel->ChannelId() == channel_id) {
3041 // If this channel is associated with a send channel of the specified
3042 // Channel ID, disassociate with it.
3043 ChannelOwner ref(NULL);
3044 associate_send_channel_ = ref;
3045 }
3046}
3047
kwiberg55b97fe2016-01-28 05:22:45 -08003048int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3049 VoEMediaProcess& processObject) {
3050 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3051 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003052
kwiberg55b97fe2016-01-28 05:22:45 -08003053 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003054
kwiberg55b97fe2016-01-28 05:22:45 -08003055 if (kPlaybackPerChannel == type) {
3056 if (_outputExternalMediaCallbackPtr) {
3057 _engineStatisticsPtr->SetLastError(
3058 VE_INVALID_OPERATION, kTraceError,
3059 "Channel::RegisterExternalMediaProcessing() "
3060 "output external media already enabled");
3061 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003062 }
kwiberg55b97fe2016-01-28 05:22:45 -08003063 _outputExternalMediaCallbackPtr = &processObject;
3064 _outputExternalMedia = true;
3065 } else if (kRecordingPerChannel == type) {
3066 if (_inputExternalMediaCallbackPtr) {
3067 _engineStatisticsPtr->SetLastError(
3068 VE_INVALID_OPERATION, kTraceError,
3069 "Channel::RegisterExternalMediaProcessing() "
3070 "output external media already enabled");
3071 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003072 }
kwiberg55b97fe2016-01-28 05:22:45 -08003073 _inputExternalMediaCallbackPtr = &processObject;
3074 channel_state_.SetInputExternalMedia(true);
3075 }
3076 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003077}
3078
kwiberg55b97fe2016-01-28 05:22:45 -08003079int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3080 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3081 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003082
kwiberg55b97fe2016-01-28 05:22:45 -08003083 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003084
kwiberg55b97fe2016-01-28 05:22:45 -08003085 if (kPlaybackPerChannel == type) {
3086 if (!_outputExternalMediaCallbackPtr) {
3087 _engineStatisticsPtr->SetLastError(
3088 VE_INVALID_OPERATION, kTraceWarning,
3089 "Channel::DeRegisterExternalMediaProcessing() "
3090 "output external media already disabled");
3091 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003092 }
kwiberg55b97fe2016-01-28 05:22:45 -08003093 _outputExternalMedia = false;
3094 _outputExternalMediaCallbackPtr = NULL;
3095 } else if (kRecordingPerChannel == type) {
3096 if (!_inputExternalMediaCallbackPtr) {
3097 _engineStatisticsPtr->SetLastError(
3098 VE_INVALID_OPERATION, kTraceWarning,
3099 "Channel::DeRegisterExternalMediaProcessing() "
3100 "input external media already disabled");
3101 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003102 }
kwiberg55b97fe2016-01-28 05:22:45 -08003103 channel_state_.SetInputExternalMedia(false);
3104 _inputExternalMediaCallbackPtr = NULL;
3105 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003106
kwiberg55b97fe2016-01-28 05:22:45 -08003107 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003108}
3109
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003110int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08003111 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3112 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003113
kwiberg55b97fe2016-01-28 05:22:45 -08003114 if (channel_state_.Get().playing) {
3115 _engineStatisticsPtr->SetLastError(
3116 VE_INVALID_OPERATION, kTraceError,
3117 "Channel::SetExternalMixing() "
3118 "external mixing cannot be changed while playing.");
3119 return -1;
3120 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003121
kwiberg55b97fe2016-01-28 05:22:45 -08003122 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003123
kwiberg55b97fe2016-01-28 05:22:45 -08003124 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003125}
3126
kwiberg55b97fe2016-01-28 05:22:45 -08003127int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3128 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00003129}
3130
wu@webrtc.org24301a62013-12-13 19:17:43 +00003131void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3132 audio_coding_->GetDecodingCallStatistics(stats);
3133}
3134
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003135bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3136 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08003137 rtc::CritScope lock(&video_sync_lock_);
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003138 if (_average_jitter_buffer_delay_us == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003139 return false;
3140 }
kwiberg55b97fe2016-01-28 05:22:45 -08003141 *jitter_buffer_delay_ms =
3142 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003143 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003144 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00003145}
3146
solenberg358057b2015-11-27 10:46:42 -08003147uint32_t Channel::GetDelayEstimate() const {
3148 int jitter_buffer_delay_ms = 0;
3149 int playout_buffer_delay_ms = 0;
3150 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3151 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3152}
3153
deadbeef74375882015-08-13 12:09:10 -07003154int Channel::LeastRequiredDelayMs() const {
3155 return audio_coding_->LeastRequiredDelayMs();
3156}
3157
kwiberg55b97fe2016-01-28 05:22:45 -08003158int Channel::SetMinimumPlayoutDelay(int delayMs) {
3159 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3160 "Channel::SetMinimumPlayoutDelay()");
3161 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3162 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3163 _engineStatisticsPtr->SetLastError(
3164 VE_INVALID_ARGUMENT, kTraceError,
3165 "SetMinimumPlayoutDelay() invalid min delay");
3166 return -1;
3167 }
3168 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3169 _engineStatisticsPtr->SetLastError(
3170 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3171 "SetMinimumPlayoutDelay() failed to set min playout delay");
3172 return -1;
3173 }
3174 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003175}
3176
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003177int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07003178 uint32_t playout_timestamp_rtp = 0;
3179 {
tommi31fc21f2016-01-21 10:37:37 -08003180 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003181 playout_timestamp_rtp = playout_timestamp_rtp_;
3182 }
kwiberg55b97fe2016-01-28 05:22:45 -08003183 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003184 _engineStatisticsPtr->SetLastError(
3185 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3186 "GetPlayoutTimestamp() failed to retrieve timestamp");
3187 return -1;
3188 }
deadbeef74375882015-08-13 12:09:10 -07003189 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003190 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003191}
3192
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003193int Channel::SetInitTimestamp(unsigned int timestamp) {
3194 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00003195 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003196 if (channel_state_.Get().sending) {
3197 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3198 "SetInitTimestamp() already sending");
3199 return -1;
3200 }
3201 _rtpRtcpModule->SetStartTimestamp(timestamp);
3202 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003203}
3204
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003205int Channel::SetInitSequenceNumber(short sequenceNumber) {
3206 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3207 "Channel::SetInitSequenceNumber()");
3208 if (channel_state_.Get().sending) {
3209 _engineStatisticsPtr->SetLastError(
3210 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3211 return -1;
3212 }
3213 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3214 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003215}
3216
kwiberg55b97fe2016-01-28 05:22:45 -08003217int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3218 RtpReceiver** rtp_receiver) const {
3219 *rtpRtcpModule = _rtpRtcpModule.get();
3220 *rtp_receiver = rtp_receiver_.get();
3221 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003222}
3223
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003224// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3225// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003226int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003227 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003228 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003229
kwiberg55b97fe2016-01-28 05:22:45 -08003230 {
3231 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003232
kwiberg55b97fe2016-01-28 05:22:45 -08003233 if (_inputFilePlayerPtr == NULL) {
3234 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3235 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3236 " doesnt exist");
3237 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003238 }
3239
kwiberg55b97fe2016-01-28 05:22:45 -08003240 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3241 mixingFrequency) == -1) {
3242 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3243 "Channel::MixOrReplaceAudioWithFile() file mixing "
3244 "failed");
3245 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003246 }
kwiberg55b97fe2016-01-28 05:22:45 -08003247 if (fileSamples == 0) {
3248 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3249 "Channel::MixOrReplaceAudioWithFile() file is ended");
3250 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003251 }
kwiberg55b97fe2016-01-28 05:22:45 -08003252 }
3253
3254 assert(_audioFrame.samples_per_channel_ == fileSamples);
3255
3256 if (_mixFileWithMicrophone) {
3257 // Currently file stream is always mono.
3258 // TODO(xians): Change the code when FilePlayer supports real stereo.
3259 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3260 1, fileSamples);
3261 } else {
3262 // Replace ACM audio with file.
3263 // Currently file stream is always mono.
3264 // TODO(xians): Change the code when FilePlayer supports real stereo.
3265 _audioFrame.UpdateFrame(
3266 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3267 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3268 }
3269 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003270}
3271
kwiberg55b97fe2016-01-28 05:22:45 -08003272int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3273 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003274
kwibergb7f89d62016-02-17 10:04:18 -08003275 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003276 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003277
kwiberg55b97fe2016-01-28 05:22:45 -08003278 {
3279 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003280
kwiberg55b97fe2016-01-28 05:22:45 -08003281 if (_outputFilePlayerPtr == NULL) {
3282 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3283 "Channel::MixAudioWithFile() file mixing failed");
3284 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003285 }
3286
kwiberg55b97fe2016-01-28 05:22:45 -08003287 // We should get the frequency we ask for.
3288 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3289 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3290 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3291 "Channel::MixAudioWithFile() file mixing failed");
3292 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003293 }
kwiberg55b97fe2016-01-28 05:22:45 -08003294 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003295
kwiberg55b97fe2016-01-28 05:22:45 -08003296 if (audioFrame.samples_per_channel_ == fileSamples) {
3297 // Currently file stream is always mono.
3298 // TODO(xians): Change the code when FilePlayer supports real stereo.
3299 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3300 fileSamples);
3301 } else {
3302 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3303 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3304 ") != "
3305 "fileSamples(%" PRIuS ")",
3306 audioFrame.samples_per_channel_, fileSamples);
3307 return -1;
3308 }
3309
3310 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003311}
3312
deadbeef74375882015-08-13 12:09:10 -07003313void Channel::UpdatePlayoutTimestamp(bool rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003314 jitter_buffer_playout_timestamp_ = audio_coding_->PlayoutTimestamp();
deadbeef74375882015-08-13 12:09:10 -07003315
henrik.lundin96bd5022016-04-06 04:13:56 -07003316 if (!jitter_buffer_playout_timestamp_) {
3317 // This can happen if this channel has not received any RTP packets. In
3318 // this case, NetEq is not capable of computing a playout timestamp.
deadbeef74375882015-08-13 12:09:10 -07003319 return;
3320 }
3321
3322 uint16_t delay_ms = 0;
3323 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003324 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003325 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3326 " delay from the ADM");
3327 _engineStatisticsPtr->SetLastError(
3328 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3329 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3330 return;
3331 }
3332
henrik.lundin96bd5022016-04-06 04:13:56 -07003333 RTC_DCHECK(jitter_buffer_playout_timestamp_);
3334 uint32_t playout_timestamp = *jitter_buffer_playout_timestamp_;
deadbeef74375882015-08-13 12:09:10 -07003335
3336 // Remove the playout delay.
henrik.lundin96bd5022016-04-06 04:13:56 -07003337 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
deadbeef74375882015-08-13 12:09:10 -07003338
kwiberg55b97fe2016-01-28 05:22:45 -08003339 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003340 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
henrik.lundin96bd5022016-04-06 04:13:56 -07003341 playout_timestamp);
deadbeef74375882015-08-13 12:09:10 -07003342
3343 {
tommi31fc21f2016-01-21 10:37:37 -08003344 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003345 if (rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003346 playout_timestamp_rtcp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003347 } else {
henrik.lundin96bd5022016-04-06 04:13:56 -07003348 playout_timestamp_rtp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003349 }
3350 playout_delay_ms_ = delay_ms;
3351 }
3352}
3353
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003354// Called for incoming RTP packets after successful RTP header parsing.
3355void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3356 uint16_t sequence_number) {
kwiberg55b97fe2016-01-28 05:22:45 -08003357 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003358 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3359 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00003360
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003361 // Get frequency of last received payload
wu@webrtc.org94454b72014-06-05 20:34:08 +00003362 int rtp_receive_frequency = GetPlayoutFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00003363
turaj@webrtc.org167b6df2013-12-13 21:05:07 +00003364 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
henrik.lundin96bd5022016-04-06 04:13:56 -07003365 // every incoming packet. May be empty if no valid playout timestamp is
3366 // available.
3367 // If |rtp_timestamp| is newer than |jitter_buffer_playout_timestamp_|, the
3368 // resulting difference is positive and will be used. When the inverse is
3369 // true (can happen when a network glitch causes a packet to arrive late,
3370 // and during long comfort noise periods with clock drift), or when
3371 // |jitter_buffer_playout_timestamp_| has no value, the difference is not
3372 // changed from the initial 0.
3373 uint32_t timestamp_diff_ms = 0;
3374 if (jitter_buffer_playout_timestamp_ &&
3375 IsNewerTimestamp(rtp_timestamp, *jitter_buffer_playout_timestamp_)) {
3376 timestamp_diff_ms = (rtp_timestamp - *jitter_buffer_playout_timestamp_) /
3377 (rtp_receive_frequency / 1000);
3378 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3379 // Diff is too large; set it to zero instead.
3380 timestamp_diff_ms = 0;
3381 }
henrik.lundin@webrtc.orgd6692992014-03-20 12:04:09 +00003382 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003383
kwiberg55b97fe2016-01-28 05:22:45 -08003384 uint16_t packet_delay_ms =
3385 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003386
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003387 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00003388
kwiberg55b97fe2016-01-28 05:22:45 -08003389 if (timestamp_diff_ms == 0)
3390 return;
niklase@google.com470e71d2011-07-07 08:21:25 +00003391
deadbeef74375882015-08-13 12:09:10 -07003392 {
tommi31fc21f2016-01-21 10:37:37 -08003393 rtc::CritScope lock(&video_sync_lock_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003394
deadbeef74375882015-08-13 12:09:10 -07003395 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3396 _recPacketDelayMs = packet_delay_ms;
3397 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003398
deadbeef74375882015-08-13 12:09:10 -07003399 if (_average_jitter_buffer_delay_us == 0) {
3400 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3401 return;
3402 }
3403
3404 // Filter average delay value using exponential filter (alpha is
3405 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3406 // risk of rounding error) and compensate for it in GetDelayEstimate()
3407 // later.
kwiberg55b97fe2016-01-28 05:22:45 -08003408 _average_jitter_buffer_delay_us =
3409 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3410 8;
deadbeef74375882015-08-13 12:09:10 -07003411 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003412}
3413
kwiberg55b97fe2016-01-28 05:22:45 -08003414void Channel::RegisterReceiveCodecsToRTPModule() {
3415 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3416 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003417
kwiberg55b97fe2016-01-28 05:22:45 -08003418 CodecInst codec;
3419 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003420
kwiberg55b97fe2016-01-28 05:22:45 -08003421 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3422 // Open up the RTP/RTCP receiver for all supported codecs
3423 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3424 (rtp_receiver_->RegisterReceivePayload(
3425 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3426 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3427 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3428 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3429 " to register %s (%d/%d/%" PRIuS
3430 "/%d) to RTP/RTCP "
3431 "receiver",
3432 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3433 codec.rate);
3434 } else {
3435 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3436 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3437 "(%d/%d/%" PRIuS
3438 "/%d) has been added to the RTP/RTCP "
3439 "receiver",
3440 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3441 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003442 }
kwiberg55b97fe2016-01-28 05:22:45 -08003443 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003444}
3445
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00003446// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003447int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003448 CodecInst codec;
3449 bool found_red = false;
3450
3451 // Get default RED settings from the ACM database
3452 const int num_codecs = AudioCodingModule::NumberOfCodecs();
3453 for (int idx = 0; idx < num_codecs; idx++) {
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003454 audio_coding_->Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003455 if (!STR_CASE_CMP(codec.plname, "RED")) {
3456 found_red = true;
3457 break;
3458 }
3459 }
3460
3461 if (!found_red) {
3462 _engineStatisticsPtr->SetLastError(
3463 VE_CODEC_ERROR, kTraceError,
3464 "SetRedPayloadType() RED is not supported");
3465 return -1;
3466 }
3467
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00003468 codec.pltype = red_payload_type;
kwibergc8d071e2016-04-06 12:22:38 -07003469 if (!codec_manager_.RegisterEncoder(codec) ||
3470 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003471 _engineStatisticsPtr->SetLastError(
3472 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3473 "SetRedPayloadType() RED registration in ACM module failed");
3474 return -1;
3475 }
3476
3477 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
3478 _engineStatisticsPtr->SetLastError(
3479 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3480 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
3481 return -1;
3482 }
3483 return 0;
3484}
3485
kwiberg55b97fe2016-01-28 05:22:45 -08003486int Channel::SetSendRtpHeaderExtension(bool enable,
3487 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003488 unsigned char id) {
3489 int error = 0;
3490 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3491 if (enable) {
3492 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3493 }
3494 return error;
3495}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003496
wu@webrtc.org94454b72014-06-05 20:34:08 +00003497int32_t Channel::GetPlayoutFrequency() {
3498 int32_t playout_frequency = audio_coding_->PlayoutFrequency();
3499 CodecInst current_recive_codec;
3500 if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
3501 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
3502 // Even though the actual sampling rate for G.722 audio is
3503 // 16,000 Hz, the RTP clock rate for the G722 payload format is
3504 // 8,000 Hz because that value was erroneously assigned in
3505 // RFC 1890 and must remain unchanged for backward compatibility.
3506 playout_frequency = 8000;
3507 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
3508 // We are resampling Opus internally to 32,000 Hz until all our
3509 // DSP routines can operate at 48,000 Hz, but the RTP clock
3510 // rate for the Opus payload format is standardized to 48,000 Hz,
3511 // because that is the maximum supported decoding sampling rate.
3512 playout_frequency = 48000;
3513 }
3514 }
3515 return playout_frequency;
3516}
3517
Minyue2013aec2015-05-13 14:14:42 +02003518int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003519 RtcpMode method = _rtpRtcpModule->RTCP();
3520 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003521 return 0;
3522 }
3523 std::vector<RTCPReportBlock> report_blocks;
3524 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003525
3526 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003527 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003528 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003529 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003530 Channel* channel = associate_send_channel_.channel();
3531 // Tries to get RTT from an associated channel. This is important for
3532 // receive-only channels.
3533 if (channel) {
3534 // To prevent infinite recursion and deadlock, calling GetRTT of
3535 // associate channel should always use "false" for argument:
3536 // |allow_associate_channel|.
3537 rtt = channel->GetRTT(false);
3538 }
3539 }
3540 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003541 }
3542
3543 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3544 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3545 for (; it != report_blocks.end(); ++it) {
3546 if (it->remoteSSRC == remoteSSRC)
3547 break;
3548 }
3549 if (it == report_blocks.end()) {
3550 // We have not received packets with SSRC matching the report blocks.
3551 // To calculate RTT we try with the SSRC of the first report block.
3552 // This is very important for send-only channels where we don't know
3553 // the SSRC of the other end.
3554 remoteSSRC = report_blocks[0].remoteSSRC;
3555 }
Minyue2013aec2015-05-13 14:14:42 +02003556
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003557 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003558 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003559 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003560 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3561 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003562 return 0;
3563 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003564 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003565}
3566
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003567} // namespace voe
3568} // namespace webrtc