blob: 25ecca1d74ad4bbf98e5539b99df9b6866a90d74 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010020#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000021#include "webrtc/base/timeutils.h"
minyue@webrtc.orge509f942013-09-12 17:03:00 +000022#include "webrtc/common.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020023#include "webrtc/config.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000024#include "webrtc/modules/audio_device/include/audio_device.h"
25#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010026#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010027#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010028#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
29#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
30#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000031#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010032#include "webrtc/modules/utility/include/audio_frame_operations.h"
33#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010034#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000035#include "webrtc/voice_engine/include/voe_base.h"
36#include "webrtc/voice_engine/include/voe_external_media.h"
37#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
38#include "webrtc/voice_engine/output_mixer.h"
39#include "webrtc/voice_engine/statistics.h"
40#include "webrtc/voice_engine/transmit_mixer.h"
41#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000042
solenberg3ecb5c82016-03-09 07:31:58 -080043#if defined(_WIN32)
44#include <Qos.h>
45#endif
46
andrew@webrtc.org50419b02012-11-14 19:07:54 +000047namespace webrtc {
48namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000049
solenberg8842c3e2016-03-11 03:06:41 -080050const int kTelephoneEventAttenuationdB = 10;
51
Stefan Holmerb86d4e42015-12-07 10:26:18 +010052class TransportFeedbackProxy : public TransportFeedbackObserver {
53 public:
54 TransportFeedbackProxy() : feedback_observer_(nullptr) {
55 pacer_thread_.DetachFromThread();
56 network_thread_.DetachFromThread();
57 }
58
59 void SetTransportFeedbackObserver(
60 TransportFeedbackObserver* feedback_observer) {
61 RTC_DCHECK(thread_checker_.CalledOnValidThread());
62 rtc::CritScope lock(&crit_);
63 feedback_observer_ = feedback_observer;
64 }
65
66 // Implements TransportFeedbackObserver.
67 void AddPacket(uint16_t sequence_number,
68 size_t length,
69 bool was_paced) override {
70 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
71 rtc::CritScope lock(&crit_);
72 if (feedback_observer_)
73 feedback_observer_->AddPacket(sequence_number, length, was_paced);
74 }
75 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
76 RTC_DCHECK(network_thread_.CalledOnValidThread());
77 rtc::CritScope lock(&crit_);
78 if (feedback_observer_)
79 feedback_observer_->OnTransportFeedback(feedback);
80 }
81
82 private:
83 rtc::CriticalSection crit_;
84 rtc::ThreadChecker thread_checker_;
85 rtc::ThreadChecker pacer_thread_;
86 rtc::ThreadChecker network_thread_;
87 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
88};
89
90class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
91 public:
92 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
93 pacer_thread_.DetachFromThread();
94 }
95
96 void SetSequenceNumberAllocator(
97 TransportSequenceNumberAllocator* seq_num_allocator) {
98 RTC_DCHECK(thread_checker_.CalledOnValidThread());
99 rtc::CritScope lock(&crit_);
100 seq_num_allocator_ = seq_num_allocator;
101 }
102
103 // Implements TransportSequenceNumberAllocator.
104 uint16_t AllocateSequenceNumber() override {
105 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
106 rtc::CritScope lock(&crit_);
107 if (!seq_num_allocator_)
108 return 0;
109 return seq_num_allocator_->AllocateSequenceNumber();
110 }
111
112 private:
113 rtc::CriticalSection crit_;
114 rtc::ThreadChecker thread_checker_;
115 rtc::ThreadChecker pacer_thread_;
116 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
117};
118
119class RtpPacketSenderProxy : public RtpPacketSender {
120 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800121 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100122
123 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
124 RTC_DCHECK(thread_checker_.CalledOnValidThread());
125 rtc::CritScope lock(&crit_);
126 rtp_packet_sender_ = rtp_packet_sender;
127 }
128
129 // Implements RtpPacketSender.
130 void InsertPacket(Priority priority,
131 uint32_t ssrc,
132 uint16_t sequence_number,
133 int64_t capture_time_ms,
134 size_t bytes,
135 bool retransmission) override {
136 rtc::CritScope lock(&crit_);
137 if (rtp_packet_sender_) {
138 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
139 capture_time_ms, bytes, retransmission);
140 }
141 }
142
143 private:
144 rtc::ThreadChecker thread_checker_;
145 rtc::CriticalSection crit_;
146 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
147};
148
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000149// Extend the default RTCP statistics struct with max_jitter, defined as the
150// maximum jitter value seen in an RTCP report block.
151struct ChannelStatistics : public RtcpStatistics {
152 ChannelStatistics() : rtcp(), max_jitter(0) {}
153
154 RtcpStatistics rtcp;
155 uint32_t max_jitter;
156};
157
158// Statistics callback, called at each generation of a new RTCP report block.
159class StatisticsProxy : public RtcpStatisticsCallback {
160 public:
tommi31fc21f2016-01-21 10:37:37 -0800161 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000162 virtual ~StatisticsProxy() {}
163
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000164 void StatisticsUpdated(const RtcpStatistics& statistics,
165 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000166 if (ssrc != ssrc_)
167 return;
168
tommi31fc21f2016-01-21 10:37:37 -0800169 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000170 stats_.rtcp = statistics;
171 if (statistics.jitter > stats_.max_jitter) {
172 stats_.max_jitter = statistics.jitter;
173 }
174 }
175
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000176 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000177
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000178 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800179 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000180 return stats_;
181 }
182
183 private:
184 // StatisticsUpdated calls are triggered from threads in the RTP module,
185 // while GetStats calls can be triggered from the public voice engine API,
186 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800187 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000188 const uint32_t ssrc_;
189 ChannelStatistics stats_;
190};
191
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000192class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000193 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000194 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
195 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000196
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000197 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
198 // Not used for Voice Engine.
199 }
200
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000201 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
202 int64_t rtt,
203 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000204 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
205 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
206 // report for VoiceEngine?
207 if (report_blocks.empty())
208 return;
209
210 int fraction_lost_aggregate = 0;
211 int total_number_of_packets = 0;
212
213 // If receiving multiple report blocks, calculate the weighted average based
214 // on the number of packets a report refers to.
215 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
216 block_it != report_blocks.end(); ++block_it) {
217 // Find the previous extended high sequence number for this remote SSRC,
218 // to calculate the number of RTP packets this report refers to. Ignore if
219 // we haven't seen this SSRC before.
220 std::map<uint32_t, uint32_t>::iterator seq_num_it =
221 extended_max_sequence_number_.find(block_it->sourceSSRC);
222 int number_of_packets = 0;
223 if (seq_num_it != extended_max_sequence_number_.end()) {
224 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
225 }
226 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
227 total_number_of_packets += number_of_packets;
228
229 extended_max_sequence_number_[block_it->sourceSSRC] =
230 block_it->extendedHighSeqNum;
231 }
232 int weighted_fraction_lost = 0;
233 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800234 weighted_fraction_lost =
235 (fraction_lost_aggregate + total_number_of_packets / 2) /
236 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000237 }
238 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000239 }
240
241 private:
242 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000243 // Maps remote side ssrc to extended highest sequence number received.
244 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000245};
246
kwiberg55b97fe2016-01-28 05:22:45 -0800247int32_t Channel::SendData(FrameType frameType,
248 uint8_t payloadType,
249 uint32_t timeStamp,
250 const uint8_t* payloadData,
251 size_t payloadSize,
252 const RTPFragmentationHeader* fragmentation) {
253 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
254 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
255 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
256 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000257
kwiberg55b97fe2016-01-28 05:22:45 -0800258 if (_includeAudioLevelIndication) {
259 // Store current audio level in the RTP/RTCP module.
260 // The level will be used in combination with voice-activity state
261 // (frameType) to add an RTP header extension
262 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
263 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000264
kwiberg55b97fe2016-01-28 05:22:45 -0800265 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
266 // packetization.
267 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
268 if (_rtpRtcpModule->SendOutgoingData(
269 (FrameType&)frameType, payloadType, timeStamp,
270 // Leaving the time when this frame was
271 // received from the capture device as
272 // undefined for voice for now.
273 -1, payloadData, payloadSize, fragmentation) == -1) {
274 _engineStatisticsPtr->SetLastError(
275 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
276 "Channel::SendData() failed to send data to RTP/RTCP module");
277 return -1;
278 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000279
kwiberg55b97fe2016-01-28 05:22:45 -0800280 _lastLocalTimeStamp = timeStamp;
281 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000282
kwiberg55b97fe2016-01-28 05:22:45 -0800283 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000284}
285
kwiberg55b97fe2016-01-28 05:22:45 -0800286int32_t Channel::InFrameType(FrameType frame_type) {
287 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
288 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000289
kwiberg55b97fe2016-01-28 05:22:45 -0800290 rtc::CritScope cs(&_callbackCritSect);
291 _sendFrameType = (frame_type == kAudioFrameSpeech);
292 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000293}
294
kwiberg55b97fe2016-01-28 05:22:45 -0800295int32_t Channel::OnRxVadDetected(int vadDecision) {
296 rtc::CritScope cs(&_callbackCritSect);
297 if (_rxVadObserverPtr) {
298 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
299 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000300
kwiberg55b97fe2016-01-28 05:22:45 -0800301 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000302}
303
stefan1d8a5062015-10-02 03:39:33 -0700304bool Channel::SendRtp(const uint8_t* data,
305 size_t len,
306 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800307 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
308 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000309
kwiberg55b97fe2016-01-28 05:22:45 -0800310 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000311
kwiberg55b97fe2016-01-28 05:22:45 -0800312 if (_transportPtr == NULL) {
313 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
314 "Channel::SendPacket() failed to send RTP packet due to"
315 " invalid transport object");
316 return false;
317 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000318
kwiberg55b97fe2016-01-28 05:22:45 -0800319 uint8_t* bufferToSendPtr = (uint8_t*)data;
320 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000321
kwiberg55b97fe2016-01-28 05:22:45 -0800322 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
323 std::string transport_name =
324 _externalTransport ? "external transport" : "WebRtc sockets";
325 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
326 "Channel::SendPacket() RTP transmission using %s failed",
327 transport_name.c_str());
328 return false;
329 }
330 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000331}
332
kwiberg55b97fe2016-01-28 05:22:45 -0800333bool Channel::SendRtcp(const uint8_t* data, size_t len) {
334 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
335 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000336
kwiberg55b97fe2016-01-28 05:22:45 -0800337 rtc::CritScope cs(&_callbackCritSect);
338 if (_transportPtr == NULL) {
339 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
340 "Channel::SendRtcp() failed to send RTCP packet"
341 " due to invalid transport object");
342 return false;
343 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000344
kwiberg55b97fe2016-01-28 05:22:45 -0800345 uint8_t* bufferToSendPtr = (uint8_t*)data;
346 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000347
kwiberg55b97fe2016-01-28 05:22:45 -0800348 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
349 if (n < 0) {
350 std::string transport_name =
351 _externalTransport ? "external transport" : "WebRtc sockets";
352 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
353 "Channel::SendRtcp() transmission using %s failed",
354 transport_name.c_str());
355 return false;
356 }
357 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000358}
359
Peter Boströmac547a62015-09-17 23:03:57 +0200360void Channel::OnPlayTelephoneEvent(uint8_t event,
361 uint16_t lengthMs,
362 uint8_t volume) {
kwiberg55b97fe2016-01-28 05:22:45 -0800363 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
364 "Channel::OnPlayTelephoneEvent(event=%u, lengthMs=%u,"
365 " volume=%u)",
366 event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000367
solenberg3ecb5c82016-03-09 07:31:58 -0800368 if (!_playOutbandDtmfEvent || (event > 15)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800369 // Ignore callback since feedback is disabled or event is not a
370 // Dtmf tone event.
371 return;
372 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000373
kwiberg55b97fe2016-01-28 05:22:45 -0800374 assert(_outputMixerPtr != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +0000375
kwiberg55b97fe2016-01-28 05:22:45 -0800376 // Start playing out the Dtmf tone (if playout is enabled).
377 // Reduce length of tone with 80ms to the reduce risk of echo.
378 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000379}
380
kwiberg55b97fe2016-01-28 05:22:45 -0800381void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
382 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
383 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000384
kwiberg55b97fe2016-01-28 05:22:45 -0800385 // Update ssrc so that NTP for AV sync can be updated.
386 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000387}
388
Peter Boströmac547a62015-09-17 23:03:57 +0200389void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
390 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
391 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
392 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000393}
394
Peter Boströmac547a62015-09-17 23:03:57 +0200395int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000396 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000397 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000398 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800399 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200400 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800401 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
402 "Channel::OnInitializeDecoder(payloadType=%d, "
403 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
404 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000405
kwiberg55b97fe2016-01-28 05:22:45 -0800406 CodecInst receiveCodec = {0};
407 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000408
kwiberg55b97fe2016-01-28 05:22:45 -0800409 receiveCodec.pltype = payloadType;
410 receiveCodec.plfreq = frequency;
411 receiveCodec.channels = channels;
412 receiveCodec.rate = rate;
413 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000414
kwiberg55b97fe2016-01-28 05:22:45 -0800415 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
416 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000417
kwiberg55b97fe2016-01-28 05:22:45 -0800418 // Register the new codec to the ACM
419 if (audio_coding_->RegisterReceiveCodec(receiveCodec) == -1) {
420 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
421 "Channel::OnInitializeDecoder() invalid codec ("
422 "pt=%d, name=%s) received - 1",
423 payloadType, payloadName);
424 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
425 return -1;
426 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000427
kwiberg55b97fe2016-01-28 05:22:45 -0800428 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000429}
430
kwiberg55b97fe2016-01-28 05:22:45 -0800431int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
432 size_t payloadSize,
433 const WebRtcRTPHeader* rtpHeader) {
434 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
435 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
436 ","
437 " payloadType=%u, audioChannel=%" PRIuS ")",
438 payloadSize, rtpHeader->header.payloadType,
439 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000440
kwiberg55b97fe2016-01-28 05:22:45 -0800441 if (!channel_state_.Get().playing) {
442 // Avoid inserting into NetEQ when we are not playing. Count the
443 // packet as discarded.
444 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
445 "received packet is discarded since playing is not"
446 " activated");
447 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000448 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800449 }
450
451 // Push the incoming payload (parsed and ready for decoding) into the ACM
452 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
453 0) {
454 _engineStatisticsPtr->SetLastError(
455 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
456 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
457 return -1;
458 }
459
460 // Update the packet delay.
461 UpdatePacketDelay(rtpHeader->header.timestamp,
462 rtpHeader->header.sequenceNumber);
463
464 int64_t round_trip_time = 0;
465 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
466 NULL);
467
468 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
469 if (!nack_list.empty()) {
470 // Can't use nack_list.data() since it's not supported by all
471 // compilers.
472 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
473 }
474 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000475}
476
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000477bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000478 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000479 RTPHeader header;
480 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
481 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
482 "IncomingPacket invalid RTP header");
483 return false;
484 }
485 header.payload_type_frequency =
486 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
487 if (header.payload_type_frequency < 0)
488 return false;
489 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
490}
491
kwiberg55b97fe2016-01-28 05:22:45 -0800492int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
493 if (event_log_) {
494 unsigned int ssrc;
495 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
496 event_log_->LogAudioPlayout(ssrc);
497 }
498 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
499 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame) ==
500 -1) {
501 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
502 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
503 // In all likelihood, the audio in this frame is garbage. We return an
504 // error so that the audio mixer module doesn't add it to the mix. As
505 // a result, it won't be played out and the actions skipped here are
506 // irrelevant.
507 return -1;
508 }
509
510 if (_RxVadDetection) {
511 UpdateRxVadDetection(*audioFrame);
512 }
513
514 // Convert module ID to internal VoE channel ID
515 audioFrame->id_ = VoEChannelId(audioFrame->id_);
516 // Store speech type for dead-or-alive detection
517 _outputSpeechType = audioFrame->speech_type_;
518
519 ChannelState::State state = channel_state_.Get();
520
521 if (state.rx_apm_is_enabled) {
522 int err = rx_audioproc_->ProcessStream(audioFrame);
523 if (err) {
524 LOG(LS_ERROR) << "ProcessStream() error: " << err;
525 assert(false);
Ivo Creusenae856f22015-09-17 16:30:16 +0200526 }
kwiberg55b97fe2016-01-28 05:22:45 -0800527 }
528
529 {
530 // Pass the audio buffers to an optional sink callback, before applying
531 // scaling/panning, as that applies to the mix operation.
532 // External recipients of the audio (e.g. via AudioTrack), will do their
533 // own mixing/dynamic processing.
534 rtc::CritScope cs(&_callbackCritSect);
535 if (audio_sink_) {
536 AudioSinkInterface::Data data(
537 &audioFrame->data_[0], audioFrame->samples_per_channel_,
538 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
539 audioFrame->timestamp_);
540 audio_sink_->OnData(data);
541 }
542 }
543
544 float output_gain = 1.0f;
545 float left_pan = 1.0f;
546 float right_pan = 1.0f;
547 {
548 rtc::CritScope cs(&volume_settings_critsect_);
549 output_gain = _outputGain;
550 left_pan = _panLeft;
551 right_pan = _panRight;
552 }
553
554 // Output volume scaling
555 if (output_gain < 0.99f || output_gain > 1.01f) {
556 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
557 }
558
559 // Scale left and/or right channel(s) if stereo and master balance is
560 // active
561
562 if (left_pan != 1.0f || right_pan != 1.0f) {
563 if (audioFrame->num_channels_ == 1) {
564 // Emulate stereo mode since panning is active.
565 // The mono signal is copied to both left and right channels here.
566 AudioFrameOperations::MonoToStereo(audioFrame);
567 }
568 // For true stereo mode (when we are receiving a stereo signal), no
569 // action is needed.
570
571 // Do the panning operation (the audio frame contains stereo at this
572 // stage)
573 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
574 }
575
576 // Mix decoded PCM output with file if file mixing is enabled
577 if (state.output_file_playing) {
578 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
579 }
580
581 // External media
582 if (_outputExternalMedia) {
583 rtc::CritScope cs(&_callbackCritSect);
584 const bool isStereo = (audioFrame->num_channels_ == 2);
585 if (_outputExternalMediaCallbackPtr) {
586 _outputExternalMediaCallbackPtr->Process(
587 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
588 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
589 isStereo);
590 }
591 }
592
593 // Record playout if enabled
594 {
595 rtc::CritScope cs(&_fileCritSect);
596
597 if (_outputFileRecording && _outputFileRecorderPtr) {
598 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
599 }
600 }
601
602 // Measure audio level (0-9)
603 _outputAudioLevel.ComputeLevel(*audioFrame);
604
605 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
606 // The first frame with a valid rtp timestamp.
607 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
608 }
609
610 if (capture_start_rtp_time_stamp_ >= 0) {
611 // audioFrame.timestamp_ should be valid from now on.
612
613 // Compute elapsed time.
614 int64_t unwrap_timestamp =
615 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
616 audioFrame->elapsed_time_ms_ =
617 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
618 (GetPlayoutFrequency() / 1000);
619
niklase@google.com470e71d2011-07-07 08:21:25 +0000620 {
kwiberg55b97fe2016-01-28 05:22:45 -0800621 rtc::CritScope lock(&ts_stats_lock_);
622 // Compute ntp time.
623 audioFrame->ntp_time_ms_ =
624 ntp_estimator_.Estimate(audioFrame->timestamp_);
625 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
626 if (audioFrame->ntp_time_ms_ > 0) {
627 // Compute |capture_start_ntp_time_ms_| so that
628 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
629 capture_start_ntp_time_ms_ =
630 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000631 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000632 }
kwiberg55b97fe2016-01-28 05:22:45 -0800633 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000634
kwiberg55b97fe2016-01-28 05:22:45 -0800635 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000636}
637
kwiberg55b97fe2016-01-28 05:22:45 -0800638int32_t Channel::NeededFrequency(int32_t id) const {
639 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
640 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000641
kwiberg55b97fe2016-01-28 05:22:45 -0800642 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000643
kwiberg55b97fe2016-01-28 05:22:45 -0800644 // Determine highest needed receive frequency
645 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000646
kwiberg55b97fe2016-01-28 05:22:45 -0800647 // Return the bigger of playout and receive frequency in the ACM.
648 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
649 highestNeeded = audio_coding_->PlayoutFrequency();
650 } else {
651 highestNeeded = receiveFrequency;
652 }
653
654 // Special case, if we're playing a file on the playout side
655 // we take that frequency into consideration as well
656 // This is not needed on sending side, since the codec will
657 // limit the spectrum anyway.
658 if (channel_state_.Get().output_file_playing) {
659 rtc::CritScope cs(&_fileCritSect);
660 if (_outputFilePlayerPtr) {
661 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
662 highestNeeded = _outputFilePlayerPtr->Frequency();
663 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000664 }
kwiberg55b97fe2016-01-28 05:22:45 -0800665 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000666
kwiberg55b97fe2016-01-28 05:22:45 -0800667 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000668}
669
ivocb04965c2015-09-09 00:09:43 -0700670int32_t Channel::CreateChannel(Channel*& channel,
671 int32_t channelId,
672 uint32_t instanceId,
673 RtcEventLog* const event_log,
674 const Config& config) {
kwiberg55b97fe2016-01-28 05:22:45 -0800675 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
676 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
677 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000678
kwiberg55b97fe2016-01-28 05:22:45 -0800679 channel = new Channel(channelId, instanceId, event_log, config);
680 if (channel == NULL) {
681 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
682 "Channel::CreateChannel() unable to allocate memory for"
683 " channel");
684 return -1;
685 }
686 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000687}
688
kwiberg55b97fe2016-01-28 05:22:45 -0800689void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
690 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
691 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
692 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000693
kwiberg55b97fe2016-01-28 05:22:45 -0800694 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000695}
696
kwiberg55b97fe2016-01-28 05:22:45 -0800697void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
698 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
699 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
700 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000701
kwiberg55b97fe2016-01-28 05:22:45 -0800702 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000703}
704
kwiberg55b97fe2016-01-28 05:22:45 -0800705void Channel::PlayFileEnded(int32_t id) {
706 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
707 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000708
kwiberg55b97fe2016-01-28 05:22:45 -0800709 if (id == _inputFilePlayerId) {
710 channel_state_.SetInputFilePlaying(false);
711 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
712 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000713 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800714 } else if (id == _outputFilePlayerId) {
715 channel_state_.SetOutputFilePlaying(false);
716 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
717 "Channel::PlayFileEnded() => output file player module is"
718 " shutdown");
719 }
720}
721
722void Channel::RecordFileEnded(int32_t id) {
723 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
724 "Channel::RecordFileEnded(id=%d)", id);
725
726 assert(id == _outputFileRecorderId);
727
728 rtc::CritScope cs(&_fileCritSect);
729
730 _outputFileRecording = false;
731 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
732 "Channel::RecordFileEnded() => output file recorder module is"
733 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000734}
735
pbos@webrtc.org92135212013-05-14 08:31:39 +0000736Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000737 uint32_t instanceId,
ivocb04965c2015-09-09 00:09:43 -0700738 RtcEventLog* const event_log,
739 const Config& config)
tommi31fc21f2016-01-21 10:37:37 -0800740 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100741 _channelId(channelId),
742 event_log_(event_log),
743 rtp_header_parser_(RtpHeaderParser::Create()),
744 rtp_payload_registry_(
745 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
746 rtp_receive_statistics_(
747 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
748 rtp_receiver_(
749 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
750 this,
751 this,
752 this,
753 rtp_payload_registry_.get())),
754 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
755 _outputAudioLevel(),
756 _externalTransport(false),
757 _inputFilePlayerPtr(NULL),
758 _outputFilePlayerPtr(NULL),
759 _outputFileRecorderPtr(NULL),
760 // Avoid conflict with other channels by adding 1024 - 1026,
761 // won't use as much as 1024 channels.
762 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
763 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
764 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
765 _outputFileRecording(false),
solenberg3ecb5c82016-03-09 07:31:58 -0800766 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
767 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100768 _outputExternalMedia(false),
769 _inputExternalMediaCallbackPtr(NULL),
770 _outputExternalMediaCallbackPtr(NULL),
771 _timeStamp(0), // This is just an offset, RTP module will add it's own
772 // random offset
solenberg3ecb5c82016-03-09 07:31:58 -0800773 _sendTelephoneEventPayloadType(106),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100774 ntp_estimator_(Clock::GetRealTimeClock()),
775 jitter_buffer_playout_timestamp_(0),
776 playout_timestamp_rtp_(0),
777 playout_timestamp_rtcp_(0),
778 playout_delay_ms_(0),
779 _numberOfDiscardedPackets(0),
780 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100781 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
782 capture_start_rtp_time_stamp_(-1),
783 capture_start_ntp_time_ms_(-1),
784 _engineStatisticsPtr(NULL),
785 _outputMixerPtr(NULL),
786 _transmitMixerPtr(NULL),
787 _moduleProcessThreadPtr(NULL),
788 _audioDeviceModulePtr(NULL),
789 _voiceEngineObserverPtr(NULL),
790 _callbackCritSectPtr(NULL),
791 _transportPtr(NULL),
792 _rxVadObserverPtr(NULL),
793 _oldVadDecision(-1),
794 _sendFrameType(0),
795 _externalMixing(false),
796 _mixFileWithMicrophone(false),
797 _mute(false),
798 _panLeft(1.0f),
799 _panRight(1.0f),
800 _outputGain(1.0f),
solenberg3ecb5c82016-03-09 07:31:58 -0800801 _playOutbandDtmfEvent(false),
802 _playInbandDtmfEvent(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100803 _lastLocalTimeStamp(0),
804 _lastPayloadType(0),
805 _includeAudioLevelIndication(false),
806 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100807 _average_jitter_buffer_delay_us(0),
808 _previousTimestamp(0),
809 _recPacketDelayMs(20),
810 _RxVadDetection(false),
811 _rxAgcIsEnabled(false),
812 _rxNsIsEnabled(false),
813 restored_packet_in_use_(false),
814 rtcp_observer_(new VoERtcpObserver(this)),
815 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100816 associate_send_channel_(ChannelOwner(nullptr)),
817 pacing_enabled_(config.Get<VoicePacing>().enabled),
stefanbba9dec2016-02-01 04:39:55 -0800818 feedback_observer_proxy_(new TransportFeedbackProxy()),
819 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
820 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()) {
kwiberg55b97fe2016-01-28 05:22:45 -0800821 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
822 "Channel::Channel() - ctor");
823 AudioCodingModule::Config acm_config;
824 acm_config.id = VoEModuleId(instanceId, channelId);
825 if (config.Get<NetEqCapacityConfig>().enabled) {
826 // Clamping the buffer capacity at 20 packets. While going lower will
827 // probably work, it makes little sense.
828 acm_config.neteq_config.max_packets_in_buffer =
829 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
830 }
831 acm_config.neteq_config.enable_fast_accelerate =
832 config.Get<NetEqFastAccelerate>().enabled;
833 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200834
solenberg3ecb5c82016-03-09 07:31:58 -0800835 _inbandDtmfQueue.ResetDtmf();
836 _inbandDtmfGenerator.Init();
kwiberg55b97fe2016-01-28 05:22:45 -0800837 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000838
kwiberg55b97fe2016-01-28 05:22:45 -0800839 RtpRtcp::Configuration configuration;
840 configuration.audio = true;
841 configuration.outgoing_transport = this;
842 configuration.audio_messages = this;
843 configuration.receive_statistics = rtp_receive_statistics_.get();
844 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800845 if (pacing_enabled_) {
846 configuration.paced_sender = rtp_packet_sender_proxy_.get();
847 configuration.transport_sequence_number_allocator =
848 seq_num_allocator_proxy_.get();
849 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
850 }
kwiberg55b97fe2016-01-28 05:22:45 -0800851 configuration.event_log = event_log;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000852
kwiberg55b97fe2016-01-28 05:22:45 -0800853 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100854 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000855
kwiberg55b97fe2016-01-28 05:22:45 -0800856 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
857 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
858 statistics_proxy_.get());
aluebs@webrtc.orgf927fd62014-04-16 11:58:18 +0000859
kwiberg55b97fe2016-01-28 05:22:45 -0800860 Config audioproc_config;
861 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
862 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
niklase@google.com470e71d2011-07-07 08:21:25 +0000863}
864
kwiberg55b97fe2016-01-28 05:22:45 -0800865Channel::~Channel() {
866 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
867 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
868 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000869
kwiberg55b97fe2016-01-28 05:22:45 -0800870 if (_outputExternalMedia) {
871 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
872 }
873 if (channel_state_.Get().input_external_media) {
874 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
875 }
876 StopSend();
877 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000878
kwiberg55b97fe2016-01-28 05:22:45 -0800879 {
880 rtc::CritScope cs(&_fileCritSect);
881 if (_inputFilePlayerPtr) {
882 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
883 _inputFilePlayerPtr->StopPlayingFile();
884 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
885 _inputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000886 }
kwiberg55b97fe2016-01-28 05:22:45 -0800887 if (_outputFilePlayerPtr) {
888 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
889 _outputFilePlayerPtr->StopPlayingFile();
890 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
891 _outputFilePlayerPtr = NULL;
892 }
893 if (_outputFileRecorderPtr) {
894 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
895 _outputFileRecorderPtr->StopRecording();
896 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
897 _outputFileRecorderPtr = NULL;
898 }
899 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000900
kwiberg55b97fe2016-01-28 05:22:45 -0800901 // The order to safely shutdown modules in a channel is:
902 // 1. De-register callbacks in modules
903 // 2. De-register modules in process thread
904 // 3. Destroy modules
905 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
906 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
907 "~Channel() failed to de-register transport callback"
908 " (Audio coding module)");
909 }
910 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
911 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
912 "~Channel() failed to de-register VAD callback"
913 " (Audio coding module)");
914 }
915 // De-register modules in process thread
916 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +0000917
kwiberg55b97fe2016-01-28 05:22:45 -0800918 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +0000919}
920
kwiberg55b97fe2016-01-28 05:22:45 -0800921int32_t Channel::Init() {
922 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
923 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +0000924
kwiberg55b97fe2016-01-28 05:22:45 -0800925 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +0000926
kwiberg55b97fe2016-01-28 05:22:45 -0800927 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +0000928
kwiberg55b97fe2016-01-28 05:22:45 -0800929 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
930 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
931 "Channel::Init() must call SetEngineInformation() first");
932 return -1;
933 }
934
935 // --- Add modules to process thread (for periodic schedulation)
936
937 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
938
939 // --- ACM initialization
940
941 if (audio_coding_->InitializeReceiver() == -1) {
942 _engineStatisticsPtr->SetLastError(
943 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
944 "Channel::Init() unable to initialize the ACM - 1");
945 return -1;
946 }
947
948 // --- RTP/RTCP module initialization
949
950 // Ensure that RTCP is enabled by default for the created channel.
951 // Note that, the module will keep generating RTCP until it is explicitly
952 // disabled by the user.
953 // After StopListen (when no sockets exists), RTCP packets will no longer
954 // be transmitted since the Transport object will then be invalid.
955 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
956 // RTCP is enabled by default.
957 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
958 // --- Register all permanent callbacks
959 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
960 (audio_coding_->RegisterVADCallback(this) == -1);
961
962 if (fail) {
963 _engineStatisticsPtr->SetLastError(
964 VE_CANNOT_INIT_CHANNEL, kTraceError,
965 "Channel::Init() callbacks not registered");
966 return -1;
967 }
968
969 // --- Register all supported codecs to the receiving side of the
970 // RTP/RTCP module
971
972 CodecInst codec;
973 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
974
975 for (int idx = 0; idx < nSupportedCodecs; idx++) {
976 // Open up the RTP/RTCP receiver for all supported codecs
977 if ((audio_coding_->Codec(idx, &codec) == -1) ||
978 (rtp_receiver_->RegisterReceivePayload(
979 codec.plname, codec.pltype, codec.plfreq, codec.channels,
980 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
981 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
982 "Channel::Init() unable to register %s "
983 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
984 codec.plname, codec.pltype, codec.plfreq, codec.channels,
985 codec.rate);
986 } else {
987 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
988 "Channel::Init() %s (%d/%d/%" PRIuS
989 "/%d) has been "
990 "added to the RTP/RTCP receiver",
991 codec.plname, codec.pltype, codec.plfreq, codec.channels,
992 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000993 }
994
kwiberg55b97fe2016-01-28 05:22:45 -0800995 // Ensure that PCMU is used as default codec on the sending side
996 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
997 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +0000998 }
999
kwiberg55b97fe2016-01-28 05:22:45 -08001000 // Register default PT for outband 'telephone-event'
1001 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
1002 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
1003 (audio_coding_->RegisterReceiveCodec(codec) == -1)) {
1004 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1005 "Channel::Init() failed to register outband "
1006 "'telephone-event' (%d/%d) correctly",
1007 codec.pltype, codec.plfreq);
1008 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001009 }
1010
kwiberg55b97fe2016-01-28 05:22:45 -08001011 if (!STR_CASE_CMP(codec.plname, "CN")) {
1012 if ((audio_coding_->RegisterSendCodec(codec) == -1) ||
1013 (audio_coding_->RegisterReceiveCodec(codec) == -1) ||
1014 (_rtpRtcpModule->RegisterSendPayload(codec) == -1)) {
1015 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1016 "Channel::Init() failed to register CN (%d/%d) "
1017 "correctly - 1",
1018 codec.pltype, codec.plfreq);
1019 }
1020 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001021#ifdef WEBRTC_CODEC_RED
kwiberg55b97fe2016-01-28 05:22:45 -08001022 // Register RED to the receiving side of the ACM.
1023 // We will not receive an OnInitializeDecoder() callback for RED.
1024 if (!STR_CASE_CMP(codec.plname, "RED")) {
1025 if (audio_coding_->RegisterReceiveCodec(codec) == -1) {
1026 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1027 "Channel::Init() failed to register RED (%d/%d) "
1028 "correctly",
1029 codec.pltype, codec.plfreq);
1030 }
1031 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001032#endif
kwiberg55b97fe2016-01-28 05:22:45 -08001033 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001034
kwiberg55b97fe2016-01-28 05:22:45 -08001035 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1036 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1037 return -1;
1038 }
1039 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1040 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1041 return -1;
1042 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001043
kwiberg55b97fe2016-01-28 05:22:45 -08001044 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001045}
1046
kwiberg55b97fe2016-01-28 05:22:45 -08001047int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1048 OutputMixer& outputMixer,
1049 voe::TransmitMixer& transmitMixer,
1050 ProcessThread& moduleProcessThread,
1051 AudioDeviceModule& audioDeviceModule,
1052 VoiceEngineObserver* voiceEngineObserver,
1053 rtc::CriticalSection* callbackCritSect) {
1054 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1055 "Channel::SetEngineInformation()");
1056 _engineStatisticsPtr = &engineStatistics;
1057 _outputMixerPtr = &outputMixer;
1058 _transmitMixerPtr = &transmitMixer,
1059 _moduleProcessThreadPtr = &moduleProcessThread;
1060 _audioDeviceModulePtr = &audioDeviceModule;
1061 _voiceEngineObserverPtr = voiceEngineObserver;
1062 _callbackCritSectPtr = callbackCritSect;
1063 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001064}
1065
kwiberg55b97fe2016-01-28 05:22:45 -08001066int32_t Channel::UpdateLocalTimeStamp() {
1067 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1068 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001069}
1070
kwibergb7f89d62016-02-17 10:04:18 -08001071void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001072 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001073 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001074}
1075
kwiberg55b97fe2016-01-28 05:22:45 -08001076int32_t Channel::StartPlayout() {
1077 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1078 "Channel::StartPlayout()");
1079 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001080 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001081 }
1082
1083 if (!_externalMixing) {
1084 // Add participant as candidates for mixing.
1085 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1086 _engineStatisticsPtr->SetLastError(
1087 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1088 "StartPlayout() failed to add participant to mixer");
1089 return -1;
1090 }
1091 }
1092
1093 channel_state_.SetPlaying(true);
1094 if (RegisterFilePlayingToMixer() != 0)
1095 return -1;
1096
1097 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001098}
1099
kwiberg55b97fe2016-01-28 05:22:45 -08001100int32_t Channel::StopPlayout() {
1101 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1102 "Channel::StopPlayout()");
1103 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001104 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001105 }
1106
1107 if (!_externalMixing) {
1108 // Remove participant as candidates for mixing
1109 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1110 _engineStatisticsPtr->SetLastError(
1111 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1112 "StopPlayout() failed to remove participant from mixer");
1113 return -1;
1114 }
1115 }
1116
1117 channel_state_.SetPlaying(false);
1118 _outputAudioLevel.Clear();
1119
1120 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001121}
1122
kwiberg55b97fe2016-01-28 05:22:45 -08001123int32_t Channel::StartSend() {
1124 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1125 "Channel::StartSend()");
1126 // Resume the previous sequence number which was reset by StopSend().
1127 // This needs to be done before |sending| is set to true.
1128 if (send_sequence_number_)
1129 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001130
kwiberg55b97fe2016-01-28 05:22:45 -08001131 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001132 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001133 }
1134 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001135
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001136 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001137 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1138 _engineStatisticsPtr->SetLastError(
1139 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1140 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001141 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001142 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001143 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001144 return -1;
1145 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001146
kwiberg55b97fe2016-01-28 05:22:45 -08001147 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001148}
1149
kwiberg55b97fe2016-01-28 05:22:45 -08001150int32_t Channel::StopSend() {
1151 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1152 "Channel::StopSend()");
1153 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001154 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001155 }
1156 channel_state_.SetSending(false);
1157
1158 // Store the sequence number to be able to pick up the same sequence for
1159 // the next StartSend(). This is needed for restarting device, otherwise
1160 // it might cause libSRTP to complain about packets being replayed.
1161 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1162 // CL is landed. See issue
1163 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1164 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1165
1166 // Reset sending SSRC and sequence number and triggers direct transmission
1167 // of RTCP BYE
1168 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1169 _engineStatisticsPtr->SetLastError(
1170 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1171 "StartSend() RTP/RTCP failed to stop sending");
1172 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001173 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001174
1175 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001176}
1177
kwiberg55b97fe2016-01-28 05:22:45 -08001178int32_t Channel::StartReceiving() {
1179 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1180 "Channel::StartReceiving()");
1181 if (channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001182 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001183 }
1184 channel_state_.SetReceiving(true);
1185 _numberOfDiscardedPackets = 0;
1186 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001187}
1188
kwiberg55b97fe2016-01-28 05:22:45 -08001189int32_t Channel::StopReceiving() {
1190 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1191 "Channel::StopReceiving()");
1192 if (!channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001193 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001194 }
1195
1196 channel_state_.SetReceiving(false);
1197 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001198}
1199
kwiberg55b97fe2016-01-28 05:22:45 -08001200int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1201 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1202 "Channel::RegisterVoiceEngineObserver()");
1203 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001204
kwiberg55b97fe2016-01-28 05:22:45 -08001205 if (_voiceEngineObserverPtr) {
1206 _engineStatisticsPtr->SetLastError(
1207 VE_INVALID_OPERATION, kTraceError,
1208 "RegisterVoiceEngineObserver() observer already enabled");
1209 return -1;
1210 }
1211 _voiceEngineObserverPtr = &observer;
1212 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001213}
1214
kwiberg55b97fe2016-01-28 05:22:45 -08001215int32_t Channel::DeRegisterVoiceEngineObserver() {
1216 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1217 "Channel::DeRegisterVoiceEngineObserver()");
1218 rtc::CritScope cs(&_callbackCritSect);
1219
1220 if (!_voiceEngineObserverPtr) {
1221 _engineStatisticsPtr->SetLastError(
1222 VE_INVALID_OPERATION, kTraceWarning,
1223 "DeRegisterVoiceEngineObserver() observer already disabled");
1224 return 0;
1225 }
1226 _voiceEngineObserverPtr = NULL;
1227 return 0;
1228}
1229
1230int32_t Channel::GetSendCodec(CodecInst& codec) {
kwiberg1fd4a4a2015-11-03 11:20:50 -08001231 auto send_codec = audio_coding_->SendCodec();
1232 if (send_codec) {
1233 codec = *send_codec;
1234 return 0;
1235 }
1236 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001237}
1238
kwiberg55b97fe2016-01-28 05:22:45 -08001239int32_t Channel::GetRecCodec(CodecInst& codec) {
1240 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001241}
1242
kwiberg55b97fe2016-01-28 05:22:45 -08001243int32_t Channel::SetSendCodec(const CodecInst& codec) {
1244 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1245 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001246
kwiberg55b97fe2016-01-28 05:22:45 -08001247 if (audio_coding_->RegisterSendCodec(codec) != 0) {
1248 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1249 "SetSendCodec() failed to register codec to ACM");
1250 return -1;
1251 }
1252
1253 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1254 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1255 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1256 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1257 "SetSendCodec() failed to register codec to"
1258 " RTP/RTCP module");
1259 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001260 }
kwiberg55b97fe2016-01-28 05:22:45 -08001261 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001262
kwiberg55b97fe2016-01-28 05:22:45 -08001263 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1264 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1265 "SetSendCodec() failed to set audio packet size");
1266 return -1;
1267 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001268
kwiberg55b97fe2016-01-28 05:22:45 -08001269 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001270}
1271
Ivo Creusenadf89b72015-04-29 16:03:33 +02001272void Channel::SetBitRate(int bitrate_bps) {
1273 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1274 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1275 audio_coding_->SetBitRate(bitrate_bps);
1276}
1277
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001278void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue@webrtc.org74aaf292014-07-16 21:28:26 +00001279 network_predictor_->UpdatePacketLossRate(fraction_lost);
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001280 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1281
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001282 // Normalizes rate to 0 - 100.
kwiberg55b97fe2016-01-28 05:22:45 -08001283 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1284 0) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001285 assert(false); // This should not happen.
1286 }
1287}
1288
kwiberg55b97fe2016-01-28 05:22:45 -08001289int32_t Channel::SetVADStatus(bool enableVAD,
1290 ACMVADMode mode,
1291 bool disableDTX) {
1292 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1293 "Channel::SetVADStatus(mode=%d)", mode);
1294 assert(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1295 // To disable VAD, DTX must be disabled too
1296 disableDTX = ((enableVAD == false) ? true : disableDTX);
1297 if (audio_coding_->SetVAD(!disableDTX, enableVAD, mode) != 0) {
1298 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1299 kTraceError,
1300 "SetVADStatus() failed to set VAD");
1301 return -1;
1302 }
1303 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001304}
1305
kwiberg55b97fe2016-01-28 05:22:45 -08001306int32_t Channel::GetVADStatus(bool& enabledVAD,
1307 ACMVADMode& mode,
1308 bool& disabledDTX) {
1309 if (audio_coding_->VAD(&disabledDTX, &enabledVAD, &mode) != 0) {
1310 _engineStatisticsPtr->SetLastError(
1311 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1312 "GetVADStatus() failed to get VAD status");
1313 return -1;
1314 }
1315 disabledDTX = !disabledDTX;
1316 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001317}
1318
kwiberg55b97fe2016-01-28 05:22:45 -08001319int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1320 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1321 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001322
kwiberg55b97fe2016-01-28 05:22:45 -08001323 if (channel_state_.Get().playing) {
1324 _engineStatisticsPtr->SetLastError(
1325 VE_ALREADY_PLAYING, kTraceError,
1326 "SetRecPayloadType() unable to set PT while playing");
1327 return -1;
1328 }
1329 if (channel_state_.Get().receiving) {
1330 _engineStatisticsPtr->SetLastError(
1331 VE_ALREADY_LISTENING, kTraceError,
1332 "SetRecPayloadType() unable to set PT while listening");
1333 return -1;
1334 }
1335
1336 if (codec.pltype == -1) {
1337 // De-register the selected codec (RTP/RTCP module and ACM)
1338
1339 int8_t pltype(-1);
1340 CodecInst rxCodec = codec;
1341
1342 // Get payload type for the given codec
1343 rtp_payload_registry_->ReceivePayloadType(
1344 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1345 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1346 rxCodec.pltype = pltype;
1347
1348 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1349 _engineStatisticsPtr->SetLastError(
1350 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1351 "SetRecPayloadType() RTP/RTCP-module deregistration "
1352 "failed");
1353 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001354 }
kwiberg55b97fe2016-01-28 05:22:45 -08001355 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1356 _engineStatisticsPtr->SetLastError(
1357 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1358 "SetRecPayloadType() ACM deregistration failed - 1");
1359 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001360 }
kwiberg55b97fe2016-01-28 05:22:45 -08001361 return 0;
1362 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001363
kwiberg55b97fe2016-01-28 05:22:45 -08001364 if (rtp_receiver_->RegisterReceivePayload(
1365 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1366 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1367 // First attempt to register failed => de-register and try again
1368 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001369 if (rtp_receiver_->RegisterReceivePayload(
kwiberg55b97fe2016-01-28 05:22:45 -08001370 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1371 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1372 _engineStatisticsPtr->SetLastError(
1373 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1374 "SetRecPayloadType() RTP/RTCP-module registration failed");
1375 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001376 }
kwiberg55b97fe2016-01-28 05:22:45 -08001377 }
1378 if (audio_coding_->RegisterReceiveCodec(codec) != 0) {
1379 audio_coding_->UnregisterReceiveCodec(codec.pltype);
1380 if (audio_coding_->RegisterReceiveCodec(codec) != 0) {
1381 _engineStatisticsPtr->SetLastError(
1382 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1383 "SetRecPayloadType() ACM registration failed - 1");
1384 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001385 }
kwiberg55b97fe2016-01-28 05:22:45 -08001386 }
1387 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001388}
1389
kwiberg55b97fe2016-01-28 05:22:45 -08001390int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1391 int8_t payloadType(-1);
1392 if (rtp_payload_registry_->ReceivePayloadType(
1393 codec.plname, codec.plfreq, codec.channels,
1394 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1395 _engineStatisticsPtr->SetLastError(
1396 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1397 "GetRecPayloadType() failed to retrieve RX payload type");
1398 return -1;
1399 }
1400 codec.pltype = payloadType;
1401 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001402}
1403
kwiberg55b97fe2016-01-28 05:22:45 -08001404int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1405 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1406 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001407
kwiberg55b97fe2016-01-28 05:22:45 -08001408 CodecInst codec;
1409 int32_t samplingFreqHz(-1);
1410 const size_t kMono = 1;
1411 if (frequency == kFreq32000Hz)
1412 samplingFreqHz = 32000;
1413 else if (frequency == kFreq16000Hz)
1414 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001415
kwiberg55b97fe2016-01-28 05:22:45 -08001416 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1417 _engineStatisticsPtr->SetLastError(
1418 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1419 "SetSendCNPayloadType() failed to retrieve default CN codec "
1420 "settings");
1421 return -1;
1422 }
1423
1424 // Modify the payload type (must be set to dynamic range)
1425 codec.pltype = type;
1426
1427 if (audio_coding_->RegisterSendCodec(codec) != 0) {
1428 _engineStatisticsPtr->SetLastError(
1429 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1430 "SetSendCNPayloadType() failed to register CN to ACM");
1431 return -1;
1432 }
1433
1434 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1435 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1436 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1437 _engineStatisticsPtr->SetLastError(
1438 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1439 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1440 "module");
1441 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001442 }
kwiberg55b97fe2016-01-28 05:22:45 -08001443 }
1444 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001445}
1446
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001447int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001448 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001449 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001450
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001451 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001452 _engineStatisticsPtr->SetLastError(
1453 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001454 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001455 return -1;
1456 }
1457 return 0;
1458}
1459
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001460int Channel::SetOpusDtx(bool enable_dtx) {
1461 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1462 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001463 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001464 : audio_coding_->DisableOpusDtx();
1465 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001466 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1467 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001468 return -1;
1469 }
1470 return 0;
1471}
1472
kwiberg55b97fe2016-01-28 05:22:45 -08001473int32_t Channel::RegisterExternalTransport(Transport& transport) {
1474 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001475 "Channel::RegisterExternalTransport()");
1476
kwiberg55b97fe2016-01-28 05:22:45 -08001477 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001478
kwiberg55b97fe2016-01-28 05:22:45 -08001479 if (_externalTransport) {
1480 _engineStatisticsPtr->SetLastError(
1481 VE_INVALID_OPERATION, kTraceError,
1482 "RegisterExternalTransport() external transport already enabled");
1483 return -1;
1484 }
1485 _externalTransport = true;
1486 _transportPtr = &transport;
1487 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001488}
1489
kwiberg55b97fe2016-01-28 05:22:45 -08001490int32_t Channel::DeRegisterExternalTransport() {
1491 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1492 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001493
kwiberg55b97fe2016-01-28 05:22:45 -08001494 rtc::CritScope cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00001495
kwiberg55b97fe2016-01-28 05:22:45 -08001496 if (!_transportPtr) {
1497 _engineStatisticsPtr->SetLastError(
1498 VE_INVALID_OPERATION, kTraceWarning,
1499 "DeRegisterExternalTransport() external transport already "
1500 "disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00001501 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001502 }
1503 _externalTransport = false;
1504 _transportPtr = NULL;
1505 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1506 "DeRegisterExternalTransport() all transport is disabled");
1507 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001508}
1509
kwiberg55b97fe2016-01-28 05:22:45 -08001510int32_t Channel::ReceivedRTPPacket(const int8_t* data,
1511 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001512 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001513 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001514 "Channel::ReceivedRTPPacket()");
1515
1516 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001517 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001518
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001519 const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001520 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001521 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1522 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1523 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001524 return -1;
1525 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001526 header.payload_type_frequency =
1527 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001528 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001529 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001530 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001531 rtp_receive_statistics_->IncomingPacket(
1532 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001533 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001534
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001535 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001536}
1537
1538bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001539 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001540 const RTPHeader& header,
1541 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001542 if (rtp_payload_registry_->IsRtx(header)) {
1543 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001544 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001545 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001546 assert(packet_length >= header.headerLength);
1547 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001548 PayloadUnion payload_specific;
1549 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001550 &payload_specific)) {
1551 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001552 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001553 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1554 payload_specific, in_order);
1555}
1556
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001557bool Channel::HandleRtxPacket(const uint8_t* packet,
1558 size_t packet_length,
1559 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001560 if (!rtp_payload_registry_->IsRtx(header))
1561 return false;
1562
1563 // Remove the RTX header and parse the original RTP header.
1564 if (packet_length < header.headerLength)
1565 return false;
1566 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1567 return false;
1568 if (restored_packet_in_use_) {
1569 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1570 "Multiple RTX headers detected, dropping packet");
1571 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001572 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001573 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001574 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1575 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001576 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1577 "Incoming RTX packet: invalid RTP header");
1578 return false;
1579 }
1580 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001581 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001582 restored_packet_in_use_ = false;
1583 return ret;
1584}
1585
1586bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1587 StreamStatistician* statistician =
1588 rtp_receive_statistics_->GetStatistician(header.ssrc);
1589 if (!statistician)
1590 return false;
1591 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001592}
1593
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001594bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1595 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001596 // Retransmissions are handled separately if RTX is enabled.
1597 if (rtp_payload_registry_->RtxEnabled())
1598 return false;
1599 StreamStatistician* statistician =
1600 rtp_receive_statistics_->GetStatistician(header.ssrc);
1601 if (!statistician)
1602 return false;
1603 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001604 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001605 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001606 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001607}
1608
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001609int32_t Channel::ReceivedRTCPPacket(const int8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001610 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001611 "Channel::ReceivedRTCPPacket()");
1612 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001613 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001614
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001615 // Deliver RTCP packet to RTP/RTCP module for parsing
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001616 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001617 _engineStatisticsPtr->SetLastError(
1618 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1619 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1620 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001621
Minyue2013aec2015-05-13 14:14:42 +02001622 int64_t rtt = GetRTT(true);
1623 if (rtt == 0) {
1624 // Waiting for valid RTT.
1625 return 0;
1626 }
1627 uint32_t ntp_secs = 0;
1628 uint32_t ntp_frac = 0;
1629 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001630 if (0 !=
1631 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1632 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001633 // Waiting for RTCP.
1634 return 0;
1635 }
1636
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001637 {
tommi31fc21f2016-01-21 10:37:37 -08001638 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001639 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001640 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001641 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001642}
1643
niklase@google.com470e71d2011-07-07 08:21:25 +00001644int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001645 bool loop,
1646 FileFormats format,
1647 int startPosition,
1648 float volumeScaling,
1649 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001650 const CodecInst* codecInst) {
1651 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1652 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1653 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1654 "stopPosition=%d)",
1655 fileName, loop, format, volumeScaling, startPosition,
1656 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001657
kwiberg55b97fe2016-01-28 05:22:45 -08001658 if (channel_state_.Get().output_file_playing) {
1659 _engineStatisticsPtr->SetLastError(
1660 VE_ALREADY_PLAYING, kTraceError,
1661 "StartPlayingFileLocally() is already playing");
1662 return -1;
1663 }
1664
1665 {
1666 rtc::CritScope cs(&_fileCritSect);
1667
1668 if (_outputFilePlayerPtr) {
1669 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1670 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1671 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001672 }
1673
kwiberg55b97fe2016-01-28 05:22:45 -08001674 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1675 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001676
kwiberg55b97fe2016-01-28 05:22:45 -08001677 if (_outputFilePlayerPtr == NULL) {
1678 _engineStatisticsPtr->SetLastError(
1679 VE_INVALID_ARGUMENT, kTraceError,
1680 "StartPlayingFileLocally() filePlayer format is not correct");
1681 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001682 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001683
kwiberg55b97fe2016-01-28 05:22:45 -08001684 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001685
kwiberg55b97fe2016-01-28 05:22:45 -08001686 if (_outputFilePlayerPtr->StartPlayingFile(
1687 fileName, loop, startPosition, volumeScaling, notificationTime,
1688 stopPosition, (const CodecInst*)codecInst) != 0) {
1689 _engineStatisticsPtr->SetLastError(
1690 VE_BAD_FILE, kTraceError,
1691 "StartPlayingFile() failed to start file playout");
1692 _outputFilePlayerPtr->StopPlayingFile();
1693 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1694 _outputFilePlayerPtr = NULL;
1695 return -1;
1696 }
1697 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1698 channel_state_.SetOutputFilePlaying(true);
1699 }
1700
1701 if (RegisterFilePlayingToMixer() != 0)
1702 return -1;
1703
1704 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001705}
1706
1707int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001708 FileFormats format,
1709 int startPosition,
1710 float volumeScaling,
1711 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001712 const CodecInst* codecInst) {
1713 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1714 "Channel::StartPlayingFileLocally(format=%d,"
1715 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1716 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001717
kwiberg55b97fe2016-01-28 05:22:45 -08001718 if (stream == NULL) {
1719 _engineStatisticsPtr->SetLastError(
1720 VE_BAD_FILE, kTraceError,
1721 "StartPlayingFileLocally() NULL as input stream");
1722 return -1;
1723 }
1724
1725 if (channel_state_.Get().output_file_playing) {
1726 _engineStatisticsPtr->SetLastError(
1727 VE_ALREADY_PLAYING, kTraceError,
1728 "StartPlayingFileLocally() is already playing");
1729 return -1;
1730 }
1731
1732 {
1733 rtc::CritScope cs(&_fileCritSect);
1734
1735 // Destroy the old instance
1736 if (_outputFilePlayerPtr) {
1737 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1738 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1739 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001740 }
1741
kwiberg55b97fe2016-01-28 05:22:45 -08001742 // Create the instance
1743 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1744 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001745
kwiberg55b97fe2016-01-28 05:22:45 -08001746 if (_outputFilePlayerPtr == NULL) {
1747 _engineStatisticsPtr->SetLastError(
1748 VE_INVALID_ARGUMENT, kTraceError,
1749 "StartPlayingFileLocally() filePlayer format isnot correct");
1750 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001751 }
1752
kwiberg55b97fe2016-01-28 05:22:45 -08001753 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001754
kwiberg55b97fe2016-01-28 05:22:45 -08001755 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1756 volumeScaling, notificationTime,
1757 stopPosition, codecInst) != 0) {
1758 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1759 "StartPlayingFile() failed to "
1760 "start file playout");
1761 _outputFilePlayerPtr->StopPlayingFile();
1762 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1763 _outputFilePlayerPtr = NULL;
1764 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001765 }
kwiberg55b97fe2016-01-28 05:22:45 -08001766 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1767 channel_state_.SetOutputFilePlaying(true);
1768 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001769
kwiberg55b97fe2016-01-28 05:22:45 -08001770 if (RegisterFilePlayingToMixer() != 0)
1771 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001772
kwiberg55b97fe2016-01-28 05:22:45 -08001773 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001774}
1775
kwiberg55b97fe2016-01-28 05:22:45 -08001776int Channel::StopPlayingFileLocally() {
1777 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1778 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001779
kwiberg55b97fe2016-01-28 05:22:45 -08001780 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001781 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001782 }
1783
1784 {
1785 rtc::CritScope cs(&_fileCritSect);
1786
1787 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1788 _engineStatisticsPtr->SetLastError(
1789 VE_STOP_RECORDING_FAILED, kTraceError,
1790 "StopPlayingFile() could not stop playing");
1791 return -1;
1792 }
1793 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1794 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1795 _outputFilePlayerPtr = NULL;
1796 channel_state_.SetOutputFilePlaying(false);
1797 }
1798 // _fileCritSect cannot be taken while calling
1799 // SetAnonymousMixibilityStatus. Refer to comments in
1800 // StartPlayingFileLocally(const char* ...) for more details.
1801 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1802 _engineStatisticsPtr->SetLastError(
1803 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1804 "StopPlayingFile() failed to stop participant from playing as"
1805 "file in the mixer");
1806 return -1;
1807 }
1808
1809 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001810}
1811
kwiberg55b97fe2016-01-28 05:22:45 -08001812int Channel::IsPlayingFileLocally() const {
1813 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001814}
1815
kwiberg55b97fe2016-01-28 05:22:45 -08001816int Channel::RegisterFilePlayingToMixer() {
1817 // Return success for not registering for file playing to mixer if:
1818 // 1. playing file before playout is started on that channel.
1819 // 2. starting playout without file playing on that channel.
1820 if (!channel_state_.Get().playing ||
1821 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001822 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001823 }
1824
1825 // |_fileCritSect| cannot be taken while calling
1826 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1827 // frames can be pulled by the mixer. Since the frames are generated from
1828 // the file, _fileCritSect will be taken. This would result in a deadlock.
1829 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1830 channel_state_.SetOutputFilePlaying(false);
1831 rtc::CritScope cs(&_fileCritSect);
1832 _engineStatisticsPtr->SetLastError(
1833 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1834 "StartPlayingFile() failed to add participant as file to mixer");
1835 _outputFilePlayerPtr->StopPlayingFile();
1836 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1837 _outputFilePlayerPtr = NULL;
1838 return -1;
1839 }
1840
1841 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001842}
1843
niklase@google.com470e71d2011-07-07 08:21:25 +00001844int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001845 bool loop,
1846 FileFormats format,
1847 int startPosition,
1848 float volumeScaling,
1849 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001850 const CodecInst* codecInst) {
1851 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1852 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1853 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1854 "stopPosition=%d)",
1855 fileName, loop, format, volumeScaling, startPosition,
1856 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001857
kwiberg55b97fe2016-01-28 05:22:45 -08001858 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001859
kwiberg55b97fe2016-01-28 05:22:45 -08001860 if (channel_state_.Get().input_file_playing) {
1861 _engineStatisticsPtr->SetLastError(
1862 VE_ALREADY_PLAYING, kTraceWarning,
1863 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001864 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001865 }
1866
1867 // Destroy the old instance
1868 if (_inputFilePlayerPtr) {
1869 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1870 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1871 _inputFilePlayerPtr = NULL;
1872 }
1873
1874 // Create the instance
1875 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1876 (const FileFormats)format);
1877
1878 if (_inputFilePlayerPtr == NULL) {
1879 _engineStatisticsPtr->SetLastError(
1880 VE_INVALID_ARGUMENT, kTraceError,
1881 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1882 return -1;
1883 }
1884
1885 const uint32_t notificationTime(0);
1886
1887 if (_inputFilePlayerPtr->StartPlayingFile(
1888 fileName, loop, startPosition, volumeScaling, notificationTime,
1889 stopPosition, (const CodecInst*)codecInst) != 0) {
1890 _engineStatisticsPtr->SetLastError(
1891 VE_BAD_FILE, kTraceError,
1892 "StartPlayingFile() failed to start file playout");
1893 _inputFilePlayerPtr->StopPlayingFile();
1894 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1895 _inputFilePlayerPtr = NULL;
1896 return -1;
1897 }
1898 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1899 channel_state_.SetInputFilePlaying(true);
1900
1901 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001902}
1903
1904int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001905 FileFormats format,
1906 int startPosition,
1907 float volumeScaling,
1908 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001909 const CodecInst* codecInst) {
1910 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1911 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1912 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1913 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001914
kwiberg55b97fe2016-01-28 05:22:45 -08001915 if (stream == NULL) {
1916 _engineStatisticsPtr->SetLastError(
1917 VE_BAD_FILE, kTraceError,
1918 "StartPlayingFileAsMicrophone NULL as input stream");
1919 return -1;
1920 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001921
kwiberg55b97fe2016-01-28 05:22:45 -08001922 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001923
kwiberg55b97fe2016-01-28 05:22:45 -08001924 if (channel_state_.Get().input_file_playing) {
1925 _engineStatisticsPtr->SetLastError(
1926 VE_ALREADY_PLAYING, kTraceWarning,
1927 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001928 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001929 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001930
kwiberg55b97fe2016-01-28 05:22:45 -08001931 // Destroy the old instance
1932 if (_inputFilePlayerPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001933 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1934 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1935 _inputFilePlayerPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001936 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001937
kwiberg55b97fe2016-01-28 05:22:45 -08001938 // Create the instance
1939 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1940 (const FileFormats)format);
1941
1942 if (_inputFilePlayerPtr == NULL) {
1943 _engineStatisticsPtr->SetLastError(
1944 VE_INVALID_ARGUMENT, kTraceError,
1945 "StartPlayingInputFile() filePlayer format isnot correct");
1946 return -1;
1947 }
1948
1949 const uint32_t notificationTime(0);
1950
1951 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1952 volumeScaling, notificationTime,
1953 stopPosition, codecInst) != 0) {
1954 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1955 "StartPlayingFile() failed to start "
1956 "file playout");
1957 _inputFilePlayerPtr->StopPlayingFile();
1958 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1959 _inputFilePlayerPtr = NULL;
1960 return -1;
1961 }
1962
1963 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1964 channel_state_.SetInputFilePlaying(true);
1965
1966 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001967}
1968
kwiberg55b97fe2016-01-28 05:22:45 -08001969int Channel::StopPlayingFileAsMicrophone() {
1970 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1971 "Channel::StopPlayingFileAsMicrophone()");
1972
1973 rtc::CritScope cs(&_fileCritSect);
1974
1975 if (!channel_state_.Get().input_file_playing) {
1976 return 0;
1977 }
1978
1979 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
1980 _engineStatisticsPtr->SetLastError(
1981 VE_STOP_RECORDING_FAILED, kTraceError,
1982 "StopPlayingFile() could not stop playing");
1983 return -1;
1984 }
1985 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1986 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1987 _inputFilePlayerPtr = NULL;
1988 channel_state_.SetInputFilePlaying(false);
1989
1990 return 0;
1991}
1992
1993int Channel::IsPlayingFileAsMicrophone() const {
1994 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001995}
1996
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00001997int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08001998 const CodecInst* codecInst) {
1999 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2000 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00002001
kwiberg55b97fe2016-01-28 05:22:45 -08002002 if (_outputFileRecording) {
2003 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2004 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002005 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002006 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002007
kwiberg55b97fe2016-01-28 05:22:45 -08002008 FileFormats format;
2009 const uint32_t notificationTime(0); // Not supported in VoE
2010 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00002011
kwiberg55b97fe2016-01-28 05:22:45 -08002012 if ((codecInst != NULL) &&
2013 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2014 _engineStatisticsPtr->SetLastError(
2015 VE_BAD_ARGUMENT, kTraceError,
2016 "StartRecordingPlayout() invalid compression");
2017 return (-1);
2018 }
2019 if (codecInst == NULL) {
2020 format = kFileFormatPcm16kHzFile;
2021 codecInst = &dummyCodec;
2022 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2023 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2024 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2025 format = kFileFormatWavFile;
2026 } else {
2027 format = kFileFormatCompressedFile;
2028 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002029
kwiberg55b97fe2016-01-28 05:22:45 -08002030 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002031
kwiberg55b97fe2016-01-28 05:22:45 -08002032 // Destroy the old instance
2033 if (_outputFileRecorderPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002034 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2035 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2036 _outputFileRecorderPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002037 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002038
kwiberg55b97fe2016-01-28 05:22:45 -08002039 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2040 _outputFileRecorderId, (const FileFormats)format);
2041 if (_outputFileRecorderPtr == NULL) {
2042 _engineStatisticsPtr->SetLastError(
2043 VE_INVALID_ARGUMENT, kTraceError,
2044 "StartRecordingPlayout() fileRecorder format isnot correct");
2045 return -1;
2046 }
2047
2048 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2049 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2050 _engineStatisticsPtr->SetLastError(
2051 VE_BAD_FILE, kTraceError,
2052 "StartRecordingAudioFile() failed to start file recording");
2053 _outputFileRecorderPtr->StopRecording();
2054 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2055 _outputFileRecorderPtr = NULL;
2056 return -1;
2057 }
2058 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2059 _outputFileRecording = true;
2060
2061 return 0;
2062}
2063
2064int Channel::StartRecordingPlayout(OutStream* stream,
2065 const CodecInst* codecInst) {
2066 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2067 "Channel::StartRecordingPlayout()");
2068
2069 if (_outputFileRecording) {
2070 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2071 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002072 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002073 }
2074
2075 FileFormats format;
2076 const uint32_t notificationTime(0); // Not supported in VoE
2077 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2078
2079 if (codecInst != NULL && codecInst->channels != 1) {
2080 _engineStatisticsPtr->SetLastError(
2081 VE_BAD_ARGUMENT, kTraceError,
2082 "StartRecordingPlayout() invalid compression");
2083 return (-1);
2084 }
2085 if (codecInst == NULL) {
2086 format = kFileFormatPcm16kHzFile;
2087 codecInst = &dummyCodec;
2088 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2089 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2090 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2091 format = kFileFormatWavFile;
2092 } else {
2093 format = kFileFormatCompressedFile;
2094 }
2095
2096 rtc::CritScope cs(&_fileCritSect);
2097
2098 // Destroy the old instance
2099 if (_outputFileRecorderPtr) {
2100 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2101 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2102 _outputFileRecorderPtr = NULL;
2103 }
2104
2105 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2106 _outputFileRecorderId, (const FileFormats)format);
2107 if (_outputFileRecorderPtr == NULL) {
2108 _engineStatisticsPtr->SetLastError(
2109 VE_INVALID_ARGUMENT, kTraceError,
2110 "StartRecordingPlayout() fileRecorder format isnot correct");
2111 return -1;
2112 }
2113
2114 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2115 notificationTime) != 0) {
2116 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2117 "StartRecordingPlayout() failed to "
2118 "start file recording");
2119 _outputFileRecorderPtr->StopRecording();
2120 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2121 _outputFileRecorderPtr = NULL;
2122 return -1;
2123 }
2124
2125 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2126 _outputFileRecording = true;
2127
2128 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002129}
2130
kwiberg55b97fe2016-01-28 05:22:45 -08002131int Channel::StopRecordingPlayout() {
2132 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2133 "Channel::StopRecordingPlayout()");
2134
2135 if (!_outputFileRecording) {
2136 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2137 "StopRecordingPlayout() isnot recording");
2138 return -1;
2139 }
2140
2141 rtc::CritScope cs(&_fileCritSect);
2142
2143 if (_outputFileRecorderPtr->StopRecording() != 0) {
2144 _engineStatisticsPtr->SetLastError(
2145 VE_STOP_RECORDING_FAILED, kTraceError,
2146 "StopRecording() could not stop recording");
2147 return (-1);
2148 }
2149 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2150 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2151 _outputFileRecorderPtr = NULL;
2152 _outputFileRecording = false;
2153
2154 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002155}
2156
kwiberg55b97fe2016-01-28 05:22:45 -08002157void Channel::SetMixWithMicStatus(bool mix) {
2158 rtc::CritScope cs(&_fileCritSect);
2159 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002160}
2161
kwiberg55b97fe2016-01-28 05:22:45 -08002162int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2163 int8_t currentLevel = _outputAudioLevel.Level();
2164 level = static_cast<int32_t>(currentLevel);
2165 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002166}
2167
kwiberg55b97fe2016-01-28 05:22:45 -08002168int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2169 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2170 level = static_cast<int32_t>(currentLevel);
2171 return 0;
2172}
2173
2174int Channel::SetMute(bool enable) {
2175 rtc::CritScope cs(&volume_settings_critsect_);
2176 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002177 "Channel::SetMute(enable=%d)", enable);
kwiberg55b97fe2016-01-28 05:22:45 -08002178 _mute = enable;
2179 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002180}
2181
kwiberg55b97fe2016-01-28 05:22:45 -08002182bool Channel::Mute() const {
2183 rtc::CritScope cs(&volume_settings_critsect_);
2184 return _mute;
niklase@google.com470e71d2011-07-07 08:21:25 +00002185}
2186
kwiberg55b97fe2016-01-28 05:22:45 -08002187int Channel::SetOutputVolumePan(float left, float right) {
2188 rtc::CritScope cs(&volume_settings_critsect_);
2189 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002190 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002191 _panLeft = left;
2192 _panRight = right;
2193 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002194}
2195
kwiberg55b97fe2016-01-28 05:22:45 -08002196int Channel::GetOutputVolumePan(float& left, float& right) const {
2197 rtc::CritScope cs(&volume_settings_critsect_);
2198 left = _panLeft;
2199 right = _panRight;
2200 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002201}
2202
kwiberg55b97fe2016-01-28 05:22:45 -08002203int Channel::SetChannelOutputVolumeScaling(float scaling) {
2204 rtc::CritScope cs(&volume_settings_critsect_);
2205 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002206 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002207 _outputGain = scaling;
2208 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002209}
2210
kwiberg55b97fe2016-01-28 05:22:45 -08002211int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2212 rtc::CritScope cs(&volume_settings_critsect_);
2213 scaling = _outputGain;
2214 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002215}
2216
solenberg8842c3e2016-03-11 03:06:41 -08002217int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002218 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002219 "Channel::SendTelephoneEventOutband(...)");
2220 RTC_DCHECK_LE(0, event);
2221 RTC_DCHECK_GE(255, event);
2222 RTC_DCHECK_LE(0, duration_ms);
2223 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002224 if (!Sending()) {
2225 return -1;
2226 }
solenberg3ecb5c82016-03-09 07:31:58 -08002227
solenberg8842c3e2016-03-11 03:06:41 -08002228 _playOutbandDtmfEvent = false;
solenberg3ecb5c82016-03-09 07:31:58 -08002229
solenberg8842c3e2016-03-11 03:06:41 -08002230 if (_rtpRtcpModule->SendTelephoneEventOutband(
2231 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002232 _engineStatisticsPtr->SetLastError(
2233 VE_SEND_DTMF_FAILED, kTraceWarning,
2234 "SendTelephoneEventOutband() failed to send event");
2235 return -1;
2236 }
2237 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002238}
2239
solenberg3ecb5c82016-03-09 07:31:58 -08002240int Channel::SendTelephoneEventInband(unsigned char eventCode,
2241 int lengthMs,
2242 int attenuationDb,
2243 bool playDtmfEvent) {
2244 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2245 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
2246 playDtmfEvent);
2247
2248 _playInbandDtmfEvent = playDtmfEvent;
2249 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
2250
2251 return 0;
2252}
2253
2254int Channel::SetSendTelephoneEventPayloadType(unsigned char type) {
kwiberg55b97fe2016-01-28 05:22:45 -08002255 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002256 "Channel::SetSendTelephoneEventPayloadType()");
solenberg3ecb5c82016-03-09 07:31:58 -08002257 if (type > 127) {
2258 _engineStatisticsPtr->SetLastError(
2259 VE_INVALID_ARGUMENT, kTraceError,
2260 "SetSendTelephoneEventPayloadType() invalid type");
2261 return -1;
2262 }
2263 CodecInst codec = {};
kwiberg55b97fe2016-01-28 05:22:45 -08002264 codec.plfreq = 8000;
solenberg3ecb5c82016-03-09 07:31:58 -08002265 codec.pltype = type;
kwiberg55b97fe2016-01-28 05:22:45 -08002266 memcpy(codec.plname, "telephone-event", 16);
2267 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2268 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2269 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2270 _engineStatisticsPtr->SetLastError(
2271 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2272 "SetSendTelephoneEventPayloadType() failed to register send"
2273 "payload type");
2274 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002275 }
kwiberg55b97fe2016-01-28 05:22:45 -08002276 }
solenberg3ecb5c82016-03-09 07:31:58 -08002277 _sendTelephoneEventPayloadType = type;
2278 return 0;
2279}
2280
2281int Channel::GetSendTelephoneEventPayloadType(unsigned char& type) {
2282 type = _sendTelephoneEventPayloadType;
kwiberg55b97fe2016-01-28 05:22:45 -08002283 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002284}
2285
kwiberg55b97fe2016-01-28 05:22:45 -08002286int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2287 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2288 "Channel::UpdateRxVadDetection()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002289
kwiberg55b97fe2016-01-28 05:22:45 -08002290 int vadDecision = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002291
kwiberg55b97fe2016-01-28 05:22:45 -08002292 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002293
kwiberg55b97fe2016-01-28 05:22:45 -08002294 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2295 OnRxVadDetected(vadDecision);
2296 _oldVadDecision = vadDecision;
2297 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002298
kwiberg55b97fe2016-01-28 05:22:45 -08002299 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2300 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2301 vadDecision);
2302 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002303}
2304
kwiberg55b97fe2016-01-28 05:22:45 -08002305int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2306 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2307 "Channel::RegisterRxVadObserver()");
2308 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002309
kwiberg55b97fe2016-01-28 05:22:45 -08002310 if (_rxVadObserverPtr) {
2311 _engineStatisticsPtr->SetLastError(
2312 VE_INVALID_OPERATION, kTraceError,
2313 "RegisterRxVadObserver() observer already enabled");
2314 return -1;
2315 }
2316 _rxVadObserverPtr = &observer;
2317 _RxVadDetection = true;
2318 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002319}
2320
kwiberg55b97fe2016-01-28 05:22:45 -08002321int Channel::DeRegisterRxVadObserver() {
2322 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2323 "Channel::DeRegisterRxVadObserver()");
2324 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002325
kwiberg55b97fe2016-01-28 05:22:45 -08002326 if (!_rxVadObserverPtr) {
2327 _engineStatisticsPtr->SetLastError(
2328 VE_INVALID_OPERATION, kTraceWarning,
2329 "DeRegisterRxVadObserver() observer already disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002330 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002331 }
2332 _rxVadObserverPtr = NULL;
2333 _RxVadDetection = false;
2334 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002335}
2336
kwiberg55b97fe2016-01-28 05:22:45 -08002337int Channel::VoiceActivityIndicator(int& activity) {
2338 activity = _sendFrameType;
2339 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002340}
2341
2342#ifdef WEBRTC_VOICE_ENGINE_AGC
2343
kwiberg55b97fe2016-01-28 05:22:45 -08002344int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2345 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2346 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2347 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002348
kwiberg55b97fe2016-01-28 05:22:45 -08002349 GainControl::Mode agcMode = kDefaultRxAgcMode;
2350 switch (mode) {
2351 case kAgcDefault:
2352 break;
2353 case kAgcUnchanged:
2354 agcMode = rx_audioproc_->gain_control()->mode();
2355 break;
2356 case kAgcFixedDigital:
2357 agcMode = GainControl::kFixedDigital;
2358 break;
2359 case kAgcAdaptiveDigital:
2360 agcMode = GainControl::kAdaptiveDigital;
2361 break;
2362 default:
2363 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2364 "SetRxAgcStatus() invalid Agc mode");
2365 return -1;
2366 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002367
kwiberg55b97fe2016-01-28 05:22:45 -08002368 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2369 _engineStatisticsPtr->SetLastError(
2370 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2371 return -1;
2372 }
2373 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2374 _engineStatisticsPtr->SetLastError(
2375 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2376 return -1;
2377 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002378
kwiberg55b97fe2016-01-28 05:22:45 -08002379 _rxAgcIsEnabled = enable;
2380 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002381
kwiberg55b97fe2016-01-28 05:22:45 -08002382 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002383}
2384
kwiberg55b97fe2016-01-28 05:22:45 -08002385int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2386 bool enable = rx_audioproc_->gain_control()->is_enabled();
2387 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
niklase@google.com470e71d2011-07-07 08:21:25 +00002388
kwiberg55b97fe2016-01-28 05:22:45 -08002389 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002390
kwiberg55b97fe2016-01-28 05:22:45 -08002391 switch (agcMode) {
2392 case GainControl::kFixedDigital:
2393 mode = kAgcFixedDigital;
2394 break;
2395 case GainControl::kAdaptiveDigital:
2396 mode = kAgcAdaptiveDigital;
2397 break;
2398 default:
2399 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2400 "GetRxAgcStatus() invalid Agc mode");
2401 return -1;
2402 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002403
kwiberg55b97fe2016-01-28 05:22:45 -08002404 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002405}
2406
kwiberg55b97fe2016-01-28 05:22:45 -08002407int Channel::SetRxAgcConfig(AgcConfig config) {
2408 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2409 "Channel::SetRxAgcConfig()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002410
kwiberg55b97fe2016-01-28 05:22:45 -08002411 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2412 config.targetLeveldBOv) != 0) {
2413 _engineStatisticsPtr->SetLastError(
2414 VE_APM_ERROR, kTraceError,
2415 "SetRxAgcConfig() failed to set target peak |level|"
2416 "(or envelope) of the Agc");
2417 return -1;
2418 }
2419 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2420 config.digitalCompressionGaindB) != 0) {
2421 _engineStatisticsPtr->SetLastError(
2422 VE_APM_ERROR, kTraceError,
2423 "SetRxAgcConfig() failed to set the range in |gain| the"
2424 " digital compression stage may apply");
2425 return -1;
2426 }
2427 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2428 0) {
2429 _engineStatisticsPtr->SetLastError(
2430 VE_APM_ERROR, kTraceError,
2431 "SetRxAgcConfig() failed to set hard limiter to the signal");
2432 return -1;
2433 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002434
kwiberg55b97fe2016-01-28 05:22:45 -08002435 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002436}
2437
kwiberg55b97fe2016-01-28 05:22:45 -08002438int Channel::GetRxAgcConfig(AgcConfig& config) {
2439 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2440 config.digitalCompressionGaindB =
2441 rx_audioproc_->gain_control()->compression_gain_db();
2442 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
niklase@google.com470e71d2011-07-07 08:21:25 +00002443
kwiberg55b97fe2016-01-28 05:22:45 -08002444 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002445}
2446
kwiberg55b97fe2016-01-28 05:22:45 -08002447#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
niklase@google.com470e71d2011-07-07 08:21:25 +00002448
2449#ifdef WEBRTC_VOICE_ENGINE_NR
2450
kwiberg55b97fe2016-01-28 05:22:45 -08002451int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2452 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2453 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2454 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002455
kwiberg55b97fe2016-01-28 05:22:45 -08002456 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2457 switch (mode) {
2458 case kNsDefault:
2459 break;
2460 case kNsUnchanged:
2461 nsLevel = rx_audioproc_->noise_suppression()->level();
2462 break;
2463 case kNsConference:
2464 nsLevel = NoiseSuppression::kHigh;
2465 break;
2466 case kNsLowSuppression:
2467 nsLevel = NoiseSuppression::kLow;
2468 break;
2469 case kNsModerateSuppression:
2470 nsLevel = NoiseSuppression::kModerate;
2471 break;
2472 case kNsHighSuppression:
2473 nsLevel = NoiseSuppression::kHigh;
2474 break;
2475 case kNsVeryHighSuppression:
2476 nsLevel = NoiseSuppression::kVeryHigh;
2477 break;
2478 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002479
kwiberg55b97fe2016-01-28 05:22:45 -08002480 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2481 _engineStatisticsPtr->SetLastError(
2482 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2483 return -1;
2484 }
2485 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2486 _engineStatisticsPtr->SetLastError(
2487 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2488 return -1;
2489 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002490
kwiberg55b97fe2016-01-28 05:22:45 -08002491 _rxNsIsEnabled = enable;
2492 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002493
kwiberg55b97fe2016-01-28 05:22:45 -08002494 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002495}
2496
kwiberg55b97fe2016-01-28 05:22:45 -08002497int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2498 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2499 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
niklase@google.com470e71d2011-07-07 08:21:25 +00002500
kwiberg55b97fe2016-01-28 05:22:45 -08002501 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002502
kwiberg55b97fe2016-01-28 05:22:45 -08002503 switch (ncLevel) {
2504 case NoiseSuppression::kLow:
2505 mode = kNsLowSuppression;
2506 break;
2507 case NoiseSuppression::kModerate:
2508 mode = kNsModerateSuppression;
2509 break;
2510 case NoiseSuppression::kHigh:
2511 mode = kNsHighSuppression;
2512 break;
2513 case NoiseSuppression::kVeryHigh:
2514 mode = kNsVeryHighSuppression;
2515 break;
2516 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002517
kwiberg55b97fe2016-01-28 05:22:45 -08002518 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002519}
2520
kwiberg55b97fe2016-01-28 05:22:45 -08002521#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
niklase@google.com470e71d2011-07-07 08:21:25 +00002522
kwiberg55b97fe2016-01-28 05:22:45 -08002523int Channel::SetLocalSSRC(unsigned int ssrc) {
2524 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2525 "Channel::SetLocalSSRC()");
2526 if (channel_state_.Get().sending) {
2527 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2528 "SetLocalSSRC() already sending");
2529 return -1;
2530 }
2531 _rtpRtcpModule->SetSSRC(ssrc);
2532 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002533}
2534
kwiberg55b97fe2016-01-28 05:22:45 -08002535int Channel::GetLocalSSRC(unsigned int& ssrc) {
2536 ssrc = _rtpRtcpModule->SSRC();
2537 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002538}
2539
kwiberg55b97fe2016-01-28 05:22:45 -08002540int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2541 ssrc = rtp_receiver_->SSRC();
2542 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002543}
2544
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002545int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002546 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002547 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002548}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002549
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002550int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2551 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002552 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2553 if (enable &&
2554 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2555 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002556 return -1;
2557 }
2558 return 0;
2559}
2560
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002561int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2562 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2563}
2564
2565int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2566 rtp_header_parser_->DeregisterRtpHeaderExtension(
2567 kRtpExtensionAbsoluteSendTime);
kwiberg55b97fe2016-01-28 05:22:45 -08002568 if (enable &&
2569 !rtp_header_parser_->RegisterRtpHeaderExtension(
2570 kRtpExtensionAbsoluteSendTime, id)) {
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00002571 return -1;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002572 }
2573 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002574}
2575
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002576void Channel::EnableSendTransportSequenceNumber(int id) {
2577 int ret =
2578 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2579 RTC_DCHECK_EQ(0, ret);
2580}
2581
stefan3313ec92016-01-21 06:32:43 -08002582void Channel::EnableReceiveTransportSequenceNumber(int id) {
2583 rtp_header_parser_->DeregisterRtpHeaderExtension(
2584 kRtpExtensionTransportSequenceNumber);
2585 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2586 kRtpExtensionTransportSequenceNumber, id);
2587 RTC_DCHECK(ret);
2588}
2589
stefanbba9dec2016-02-01 04:39:55 -08002590void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002591 RtpPacketSender* rtp_packet_sender,
2592 TransportFeedbackObserver* transport_feedback_observer,
2593 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002594 RTC_DCHECK(rtp_packet_sender);
2595 RTC_DCHECK(transport_feedback_observer);
2596 RTC_DCHECK(packet_router && !packet_router_);
2597 feedback_observer_proxy_->SetTransportFeedbackObserver(
2598 transport_feedback_observer);
2599 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2600 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2601 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002602 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002603 packet_router_ = packet_router;
2604}
2605
stefanbba9dec2016-02-01 04:39:55 -08002606void Channel::RegisterReceiverCongestionControlObjects(
2607 PacketRouter* packet_router) {
2608 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002609 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002610 packet_router_ = packet_router;
2611}
2612
2613void Channel::ResetCongestionControlObjects() {
2614 RTC_DCHECK(packet_router_);
2615 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2616 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2617 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002618 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002619 packet_router_ = nullptr;
2620 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2621}
2622
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002623void Channel::SetRTCPStatus(bool enable) {
2624 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2625 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002626 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002627}
2628
kwiberg55b97fe2016-01-28 05:22:45 -08002629int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002630 RtcpMode method = _rtpRtcpModule->RTCP();
2631 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002632 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002633}
2634
kwiberg55b97fe2016-01-28 05:22:45 -08002635int Channel::SetRTCP_CNAME(const char cName[256]) {
2636 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2637 "Channel::SetRTCP_CNAME()");
2638 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2639 _engineStatisticsPtr->SetLastError(
2640 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2641 "SetRTCP_CNAME() failed to set RTCP CNAME");
2642 return -1;
2643 }
2644 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002645}
2646
kwiberg55b97fe2016-01-28 05:22:45 -08002647int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2648 if (cName == NULL) {
2649 _engineStatisticsPtr->SetLastError(
2650 VE_INVALID_ARGUMENT, kTraceError,
2651 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2652 return -1;
2653 }
2654 char cname[RTCP_CNAME_SIZE];
2655 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2656 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2657 _engineStatisticsPtr->SetLastError(
2658 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2659 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2660 return -1;
2661 }
2662 strcpy(cName, cname);
2663 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002664}
2665
kwiberg55b97fe2016-01-28 05:22:45 -08002666int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2667 unsigned int& NTPLow,
2668 unsigned int& timestamp,
2669 unsigned int& playoutTimestamp,
2670 unsigned int* jitter,
2671 unsigned short* fractionLost) {
2672 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002673
kwiberg55b97fe2016-01-28 05:22:45 -08002674 RTCPSenderInfo senderInfo;
2675 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2676 _engineStatisticsPtr->SetLastError(
2677 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2678 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2679 "side");
2680 return -1;
2681 }
2682
2683 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2684 // and octet count)
2685 NTPHigh = senderInfo.NTPseconds;
2686 NTPLow = senderInfo.NTPfraction;
2687 timestamp = senderInfo.RTPtimeStamp;
2688
2689 // --- Locally derived information
2690
2691 // This value is updated on each incoming RTCP packet (0 when no packet
2692 // has been received)
2693 playoutTimestamp = playout_timestamp_rtcp_;
2694
2695 if (NULL != jitter || NULL != fractionLost) {
2696 // Get all RTCP receiver report blocks that have been received on this
2697 // channel. If we receive RTP packets from a remote source we know the
2698 // remote SSRC and use the report block from him.
2699 // Otherwise use the first report block.
2700 std::vector<RTCPReportBlock> remote_stats;
2701 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2702 remote_stats.empty()) {
2703 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2704 "GetRemoteRTCPData() failed to measure statistics due"
2705 " to lack of received RTP and/or RTCP packets");
2706 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002707 }
2708
kwiberg55b97fe2016-01-28 05:22:45 -08002709 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2710 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2711 for (; it != remote_stats.end(); ++it) {
2712 if (it->remoteSSRC == remoteSSRC)
2713 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002714 }
kwiberg55b97fe2016-01-28 05:22:45 -08002715
2716 if (it == remote_stats.end()) {
2717 // If we have not received any RTCP packets from this SSRC it probably
2718 // means that we have not received any RTP packets.
2719 // Use the first received report block instead.
2720 it = remote_stats.begin();
2721 remoteSSRC = it->remoteSSRC;
2722 }
2723
2724 if (jitter) {
2725 *jitter = it->jitter;
2726 }
2727
2728 if (fractionLost) {
2729 *fractionLost = it->fractionLost;
2730 }
2731 }
2732 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002733}
2734
kwiberg55b97fe2016-01-28 05:22:45 -08002735int Channel::SendApplicationDefinedRTCPPacket(
2736 unsigned char subType,
2737 unsigned int name,
2738 const char* data,
2739 unsigned short dataLengthInBytes) {
2740 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2741 "Channel::SendApplicationDefinedRTCPPacket()");
2742 if (!channel_state_.Get().sending) {
2743 _engineStatisticsPtr->SetLastError(
2744 VE_NOT_SENDING, kTraceError,
2745 "SendApplicationDefinedRTCPPacket() not sending");
2746 return -1;
2747 }
2748 if (NULL == data) {
2749 _engineStatisticsPtr->SetLastError(
2750 VE_INVALID_ARGUMENT, kTraceError,
2751 "SendApplicationDefinedRTCPPacket() invalid data value");
2752 return -1;
2753 }
2754 if (dataLengthInBytes % 4 != 0) {
2755 _engineStatisticsPtr->SetLastError(
2756 VE_INVALID_ARGUMENT, kTraceError,
2757 "SendApplicationDefinedRTCPPacket() invalid length value");
2758 return -1;
2759 }
2760 RtcpMode status = _rtpRtcpModule->RTCP();
2761 if (status == RtcpMode::kOff) {
2762 _engineStatisticsPtr->SetLastError(
2763 VE_RTCP_ERROR, kTraceError,
2764 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2765 return -1;
2766 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002767
kwiberg55b97fe2016-01-28 05:22:45 -08002768 // Create and schedule the RTCP APP packet for transmission
2769 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2770 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2771 _engineStatisticsPtr->SetLastError(
2772 VE_SEND_ERROR, kTraceError,
2773 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2774 return -1;
2775 }
2776 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002777}
2778
kwiberg55b97fe2016-01-28 05:22:45 -08002779int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2780 unsigned int& maxJitterMs,
2781 unsigned int& discardedPackets) {
2782 // The jitter statistics is updated for each received RTP packet and is
2783 // based on received packets.
2784 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2785 // If RTCP is off, there is no timed thread in the RTCP module regularly
2786 // generating new stats, trigger the update manually here instead.
2787 StreamStatistician* statistician =
2788 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2789 if (statistician) {
2790 // Don't use returned statistics, use data from proxy instead so that
2791 // max jitter can be fetched atomically.
2792 RtcpStatistics s;
2793 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002794 }
kwiberg55b97fe2016-01-28 05:22:45 -08002795 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002796
kwiberg55b97fe2016-01-28 05:22:45 -08002797 ChannelStatistics stats = statistics_proxy_->GetStats();
2798 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2799 if (playoutFrequency > 0) {
2800 // Scale RTP statistics given the current playout frequency
2801 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2802 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2803 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002804
kwiberg55b97fe2016-01-28 05:22:45 -08002805 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002806
kwiberg55b97fe2016-01-28 05:22:45 -08002807 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002808}
2809
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002810int Channel::GetRemoteRTCPReportBlocks(
2811 std::vector<ReportBlock>* report_blocks) {
2812 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002813 _engineStatisticsPtr->SetLastError(
2814 VE_INVALID_ARGUMENT, kTraceError,
2815 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002816 return -1;
2817 }
2818
2819 // Get the report blocks from the latest received RTCP Sender or Receiver
2820 // Report. Each element in the vector contains the sender's SSRC and a
2821 // report block according to RFC 3550.
2822 std::vector<RTCPReportBlock> rtcp_report_blocks;
2823 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002824 return -1;
2825 }
2826
2827 if (rtcp_report_blocks.empty())
2828 return 0;
2829
2830 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2831 for (; it != rtcp_report_blocks.end(); ++it) {
2832 ReportBlock report_block;
2833 report_block.sender_SSRC = it->remoteSSRC;
2834 report_block.source_SSRC = it->sourceSSRC;
2835 report_block.fraction_lost = it->fractionLost;
2836 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2837 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2838 report_block.interarrival_jitter = it->jitter;
2839 report_block.last_SR_timestamp = it->lastSR;
2840 report_block.delay_since_last_SR = it->delaySinceLastSR;
2841 report_blocks->push_back(report_block);
2842 }
2843 return 0;
2844}
2845
kwiberg55b97fe2016-01-28 05:22:45 -08002846int Channel::GetRTPStatistics(CallStatistics& stats) {
2847 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002848
kwiberg55b97fe2016-01-28 05:22:45 -08002849 // The jitter statistics is updated for each received RTP packet and is
2850 // based on received packets.
2851 RtcpStatistics statistics;
2852 StreamStatistician* statistician =
2853 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002854 if (statistician) {
2855 statistician->GetStatistics(&statistics,
2856 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002857 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002858
kwiberg55b97fe2016-01-28 05:22:45 -08002859 stats.fractionLost = statistics.fraction_lost;
2860 stats.cumulativeLost = statistics.cumulative_lost;
2861 stats.extendedMax = statistics.extended_max_sequence_number;
2862 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002863
kwiberg55b97fe2016-01-28 05:22:45 -08002864 // --- RTT
2865 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002866
kwiberg55b97fe2016-01-28 05:22:45 -08002867 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002868
kwiberg55b97fe2016-01-28 05:22:45 -08002869 size_t bytesSent(0);
2870 uint32_t packetsSent(0);
2871 size_t bytesReceived(0);
2872 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002873
kwiberg55b97fe2016-01-28 05:22:45 -08002874 if (statistician) {
2875 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2876 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002877
kwiberg55b97fe2016-01-28 05:22:45 -08002878 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2879 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2880 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2881 " output will not be complete");
2882 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002883
kwiberg55b97fe2016-01-28 05:22:45 -08002884 stats.bytesSent = bytesSent;
2885 stats.packetsSent = packetsSent;
2886 stats.bytesReceived = bytesReceived;
2887 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002888
kwiberg55b97fe2016-01-28 05:22:45 -08002889 // --- Timestamps
2890 {
2891 rtc::CritScope lock(&ts_stats_lock_);
2892 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2893 }
2894 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002895}
2896
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002897int Channel::SetREDStatus(bool enable, int redPayloadtype) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002898 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002899 "Channel::SetREDStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002900
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002901 if (enable) {
2902 if (redPayloadtype < 0 || redPayloadtype > 127) {
2903 _engineStatisticsPtr->SetLastError(
2904 VE_PLTYPE_ERROR, kTraceError,
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002905 "SetREDStatus() invalid RED payload type");
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002906 return -1;
2907 }
2908
2909 if (SetRedPayloadType(redPayloadtype) < 0) {
2910 _engineStatisticsPtr->SetLastError(
2911 VE_CODEC_ERROR, kTraceError,
2912 "SetSecondarySendCodec() Failed to register RED ACM");
2913 return -1;
2914 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002915 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002916
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002917 if (audio_coding_->SetREDStatus(enable) != 0) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002918 _engineStatisticsPtr->SetLastError(
2919 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002920 "SetREDStatus() failed to set RED state in the ACM");
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002921 return -1;
2922 }
2923 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002924}
2925
kwiberg55b97fe2016-01-28 05:22:45 -08002926int Channel::GetREDStatus(bool& enabled, int& redPayloadtype) {
2927 enabled = audio_coding_->REDStatus();
2928 if (enabled) {
2929 int8_t payloadType = 0;
2930 if (_rtpRtcpModule->SendREDPayloadType(&payloadType) != 0) {
2931 _engineStatisticsPtr->SetLastError(
2932 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2933 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
2934 "module");
2935 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002936 }
kwiberg55b97fe2016-01-28 05:22:45 -08002937 redPayloadtype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +00002938 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002939 }
2940 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002941}
2942
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002943int Channel::SetCodecFECStatus(bool enable) {
2944 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2945 "Channel::SetCodecFECStatus()");
2946
2947 if (audio_coding_->SetCodecFEC(enable) != 0) {
2948 _engineStatisticsPtr->SetLastError(
2949 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2950 "SetCodecFECStatus() failed to set FEC state");
2951 return -1;
2952 }
2953 return 0;
2954}
2955
2956bool Channel::GetCodecFECStatus() {
2957 bool enabled = audio_coding_->CodecFEC();
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002958 return enabled;
2959}
2960
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002961void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2962 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002963 // If pacing is enabled we always store packets.
2964 if (!pacing_enabled_)
2965 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002966 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
2967 rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002968 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002969 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002970 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002971 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002972}
2973
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002974// Called when we are missing one or more packets.
2975int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002976 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2977}
2978
kwiberg55b97fe2016-01-28 05:22:45 -08002979uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2980 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2981 "Channel::Demultiplex()");
2982 _audioFrame.CopyFrom(audioFrame);
2983 _audioFrame.id_ = _channelId;
2984 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002985}
2986
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002987void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00002988 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07002989 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08002990 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002991 CodecInst codec;
2992 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002993
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07002994 // Never upsample or upmix the capture signal here. This should be done at the
2995 // end of the send chain.
2996 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
2997 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
2998 RemixAndResample(audio_data, number_of_frames, number_of_channels,
2999 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00003000}
3001
kwiberg55b97fe2016-01-28 05:22:45 -08003002uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
3003 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3004 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003005
kwiberg55b97fe2016-01-28 05:22:45 -08003006 if (_audioFrame.samples_per_channel_ == 0) {
3007 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3008 "Channel::PrepareEncodeAndSend() invalid audio frame");
3009 return 0xFFFFFFFF;
3010 }
3011
3012 if (channel_state_.Get().input_file_playing) {
3013 MixOrReplaceAudioWithFile(mixingFrequency);
3014 }
3015
3016 bool is_muted = Mute(); // Cache locally as Mute() takes a lock.
3017 if (is_muted) {
3018 AudioFrameOperations::Mute(_audioFrame);
3019 }
3020
3021 if (channel_state_.Get().input_external_media) {
3022 rtc::CritScope cs(&_callbackCritSect);
3023 const bool isStereo = (_audioFrame.num_channels_ == 2);
3024 if (_inputExternalMediaCallbackPtr) {
3025 _inputExternalMediaCallbackPtr->Process(
3026 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
3027 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
3028 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00003029 }
kwiberg55b97fe2016-01-28 05:22:45 -08003030 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003031
solenberg3ecb5c82016-03-09 07:31:58 -08003032 InsertInbandDtmfTone();
3033
kwiberg55b97fe2016-01-28 05:22:45 -08003034 if (_includeAudioLevelIndication) {
3035 size_t length =
3036 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
andrew@webrtc.org21299d42014-05-14 19:00:59 +00003037 if (is_muted) {
kwiberg55b97fe2016-01-28 05:22:45 -08003038 rms_level_.ProcessMuted(length);
3039 } else {
3040 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00003041 }
kwiberg55b97fe2016-01-28 05:22:45 -08003042 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003043
kwiberg55b97fe2016-01-28 05:22:45 -08003044 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003045}
3046
kwiberg55b97fe2016-01-28 05:22:45 -08003047uint32_t Channel::EncodeAndSend() {
3048 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3049 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003050
kwiberg55b97fe2016-01-28 05:22:45 -08003051 assert(_audioFrame.num_channels_ <= 2);
3052 if (_audioFrame.samples_per_channel_ == 0) {
3053 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3054 "Channel::EncodeAndSend() invalid audio frame");
3055 return 0xFFFFFFFF;
3056 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003057
kwiberg55b97fe2016-01-28 05:22:45 -08003058 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00003059
kwiberg55b97fe2016-01-28 05:22:45 -08003060 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00003061
kwiberg55b97fe2016-01-28 05:22:45 -08003062 // The ACM resamples internally.
3063 _audioFrame.timestamp_ = _timeStamp;
3064 // This call will trigger AudioPacketizationCallback::SendData if encoding
3065 // is done and payload is ready for packetization and transmission.
3066 // Otherwise, it will return without invoking the callback.
3067 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
3068 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
3069 "Channel::EncodeAndSend() ACM encoding failed");
3070 return 0xFFFFFFFF;
3071 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003072
kwiberg55b97fe2016-01-28 05:22:45 -08003073 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
3074 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003075}
3076
Minyue2013aec2015-05-13 14:14:42 +02003077void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08003078 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003079 Channel* channel = associate_send_channel_.channel();
3080 if (channel && channel->ChannelId() == channel_id) {
3081 // If this channel is associated with a send channel of the specified
3082 // Channel ID, disassociate with it.
3083 ChannelOwner ref(NULL);
3084 associate_send_channel_ = ref;
3085 }
3086}
3087
kwiberg55b97fe2016-01-28 05:22:45 -08003088int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3089 VoEMediaProcess& processObject) {
3090 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3091 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003092
kwiberg55b97fe2016-01-28 05:22:45 -08003093 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003094
kwiberg55b97fe2016-01-28 05:22:45 -08003095 if (kPlaybackPerChannel == type) {
3096 if (_outputExternalMediaCallbackPtr) {
3097 _engineStatisticsPtr->SetLastError(
3098 VE_INVALID_OPERATION, kTraceError,
3099 "Channel::RegisterExternalMediaProcessing() "
3100 "output external media already enabled");
3101 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003102 }
kwiberg55b97fe2016-01-28 05:22:45 -08003103 _outputExternalMediaCallbackPtr = &processObject;
3104 _outputExternalMedia = true;
3105 } else if (kRecordingPerChannel == type) {
3106 if (_inputExternalMediaCallbackPtr) {
3107 _engineStatisticsPtr->SetLastError(
3108 VE_INVALID_OPERATION, kTraceError,
3109 "Channel::RegisterExternalMediaProcessing() "
3110 "output external media already enabled");
3111 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003112 }
kwiberg55b97fe2016-01-28 05:22:45 -08003113 _inputExternalMediaCallbackPtr = &processObject;
3114 channel_state_.SetInputExternalMedia(true);
3115 }
3116 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003117}
3118
kwiberg55b97fe2016-01-28 05:22:45 -08003119int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3120 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3121 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003122
kwiberg55b97fe2016-01-28 05:22:45 -08003123 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003124
kwiberg55b97fe2016-01-28 05:22:45 -08003125 if (kPlaybackPerChannel == type) {
3126 if (!_outputExternalMediaCallbackPtr) {
3127 _engineStatisticsPtr->SetLastError(
3128 VE_INVALID_OPERATION, kTraceWarning,
3129 "Channel::DeRegisterExternalMediaProcessing() "
3130 "output external media already disabled");
3131 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003132 }
kwiberg55b97fe2016-01-28 05:22:45 -08003133 _outputExternalMedia = false;
3134 _outputExternalMediaCallbackPtr = NULL;
3135 } else if (kRecordingPerChannel == type) {
3136 if (!_inputExternalMediaCallbackPtr) {
3137 _engineStatisticsPtr->SetLastError(
3138 VE_INVALID_OPERATION, kTraceWarning,
3139 "Channel::DeRegisterExternalMediaProcessing() "
3140 "input external media already disabled");
3141 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003142 }
kwiberg55b97fe2016-01-28 05:22:45 -08003143 channel_state_.SetInputExternalMedia(false);
3144 _inputExternalMediaCallbackPtr = NULL;
3145 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003146
kwiberg55b97fe2016-01-28 05:22:45 -08003147 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003148}
3149
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003150int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08003151 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3152 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003153
kwiberg55b97fe2016-01-28 05:22:45 -08003154 if (channel_state_.Get().playing) {
3155 _engineStatisticsPtr->SetLastError(
3156 VE_INVALID_OPERATION, kTraceError,
3157 "Channel::SetExternalMixing() "
3158 "external mixing cannot be changed while playing.");
3159 return -1;
3160 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003161
kwiberg55b97fe2016-01-28 05:22:45 -08003162 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003163
kwiberg55b97fe2016-01-28 05:22:45 -08003164 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003165}
3166
kwiberg55b97fe2016-01-28 05:22:45 -08003167int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3168 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00003169}
3170
wu@webrtc.org24301a62013-12-13 19:17:43 +00003171void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3172 audio_coding_->GetDecodingCallStatistics(stats);
3173}
3174
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003175bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3176 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08003177 rtc::CritScope lock(&video_sync_lock_);
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003178 if (_average_jitter_buffer_delay_us == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003179 return false;
3180 }
kwiberg55b97fe2016-01-28 05:22:45 -08003181 *jitter_buffer_delay_ms =
3182 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003183 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003184 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00003185}
3186
solenberg358057b2015-11-27 10:46:42 -08003187uint32_t Channel::GetDelayEstimate() const {
3188 int jitter_buffer_delay_ms = 0;
3189 int playout_buffer_delay_ms = 0;
3190 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3191 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3192}
3193
deadbeef74375882015-08-13 12:09:10 -07003194int Channel::LeastRequiredDelayMs() const {
3195 return audio_coding_->LeastRequiredDelayMs();
3196}
3197
kwiberg55b97fe2016-01-28 05:22:45 -08003198int Channel::SetMinimumPlayoutDelay(int delayMs) {
3199 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3200 "Channel::SetMinimumPlayoutDelay()");
3201 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3202 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3203 _engineStatisticsPtr->SetLastError(
3204 VE_INVALID_ARGUMENT, kTraceError,
3205 "SetMinimumPlayoutDelay() invalid min delay");
3206 return -1;
3207 }
3208 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3209 _engineStatisticsPtr->SetLastError(
3210 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3211 "SetMinimumPlayoutDelay() failed to set min playout delay");
3212 return -1;
3213 }
3214 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003215}
3216
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003217int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07003218 uint32_t playout_timestamp_rtp = 0;
3219 {
tommi31fc21f2016-01-21 10:37:37 -08003220 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003221 playout_timestamp_rtp = playout_timestamp_rtp_;
3222 }
kwiberg55b97fe2016-01-28 05:22:45 -08003223 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003224 _engineStatisticsPtr->SetLastError(
3225 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3226 "GetPlayoutTimestamp() failed to retrieve timestamp");
3227 return -1;
3228 }
deadbeef74375882015-08-13 12:09:10 -07003229 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003230 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003231}
3232
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003233int Channel::SetInitTimestamp(unsigned int timestamp) {
3234 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00003235 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003236 if (channel_state_.Get().sending) {
3237 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3238 "SetInitTimestamp() already sending");
3239 return -1;
3240 }
3241 _rtpRtcpModule->SetStartTimestamp(timestamp);
3242 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003243}
3244
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003245int Channel::SetInitSequenceNumber(short sequenceNumber) {
3246 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3247 "Channel::SetInitSequenceNumber()");
3248 if (channel_state_.Get().sending) {
3249 _engineStatisticsPtr->SetLastError(
3250 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3251 return -1;
3252 }
3253 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3254 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003255}
3256
kwiberg55b97fe2016-01-28 05:22:45 -08003257int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3258 RtpReceiver** rtp_receiver) const {
3259 *rtpRtcpModule = _rtpRtcpModule.get();
3260 *rtp_receiver = rtp_receiver_.get();
3261 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003262}
3263
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003264// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3265// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003266int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003267 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003268 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003269
kwiberg55b97fe2016-01-28 05:22:45 -08003270 {
3271 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003272
kwiberg55b97fe2016-01-28 05:22:45 -08003273 if (_inputFilePlayerPtr == NULL) {
3274 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3275 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3276 " doesnt exist");
3277 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003278 }
3279
kwiberg55b97fe2016-01-28 05:22:45 -08003280 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3281 mixingFrequency) == -1) {
3282 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3283 "Channel::MixOrReplaceAudioWithFile() file mixing "
3284 "failed");
3285 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003286 }
kwiberg55b97fe2016-01-28 05:22:45 -08003287 if (fileSamples == 0) {
3288 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3289 "Channel::MixOrReplaceAudioWithFile() file is ended");
3290 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003291 }
kwiberg55b97fe2016-01-28 05:22:45 -08003292 }
3293
3294 assert(_audioFrame.samples_per_channel_ == fileSamples);
3295
3296 if (_mixFileWithMicrophone) {
3297 // Currently file stream is always mono.
3298 // TODO(xians): Change the code when FilePlayer supports real stereo.
3299 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3300 1, fileSamples);
3301 } else {
3302 // Replace ACM audio with file.
3303 // Currently file stream is always mono.
3304 // TODO(xians): Change the code when FilePlayer supports real stereo.
3305 _audioFrame.UpdateFrame(
3306 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3307 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3308 }
3309 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003310}
3311
kwiberg55b97fe2016-01-28 05:22:45 -08003312int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3313 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003314
kwibergb7f89d62016-02-17 10:04:18 -08003315 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003316 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003317
kwiberg55b97fe2016-01-28 05:22:45 -08003318 {
3319 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003320
kwiberg55b97fe2016-01-28 05:22:45 -08003321 if (_outputFilePlayerPtr == NULL) {
3322 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3323 "Channel::MixAudioWithFile() file mixing failed");
3324 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003325 }
3326
kwiberg55b97fe2016-01-28 05:22:45 -08003327 // We should get the frequency we ask for.
3328 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3329 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3330 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3331 "Channel::MixAudioWithFile() file mixing failed");
3332 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003333 }
kwiberg55b97fe2016-01-28 05:22:45 -08003334 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003335
kwiberg55b97fe2016-01-28 05:22:45 -08003336 if (audioFrame.samples_per_channel_ == fileSamples) {
3337 // Currently file stream is always mono.
3338 // TODO(xians): Change the code when FilePlayer supports real stereo.
3339 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3340 fileSamples);
3341 } else {
3342 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3343 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3344 ") != "
3345 "fileSamples(%" PRIuS ")",
3346 audioFrame.samples_per_channel_, fileSamples);
3347 return -1;
3348 }
3349
3350 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003351}
3352
solenberg3ecb5c82016-03-09 07:31:58 -08003353int Channel::InsertInbandDtmfTone() {
3354 // Check if we should start a new tone.
3355 if (_inbandDtmfQueue.PendingDtmf() && !_inbandDtmfGenerator.IsAddingTone() &&
3356 _inbandDtmfGenerator.DelaySinceLastTone() >
3357 kMinTelephoneEventSeparationMs) {
3358 int8_t eventCode(0);
3359 uint16_t lengthMs(0);
3360 uint8_t attenuationDb(0);
3361
3362 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
3363 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
3364 if (_playInbandDtmfEvent) {
3365 // Add tone to output mixer using a reduced length to minimize
3366 // risk of echo.
3367 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80, attenuationDb);
3368 }
3369 }
3370
3371 if (_inbandDtmfGenerator.IsAddingTone()) {
3372 uint16_t frequency(0);
3373 _inbandDtmfGenerator.GetSampleRate(frequency);
3374
3375 if (frequency != _audioFrame.sample_rate_hz_) {
3376 // Update sample rate of Dtmf tone since the mixing frequency
3377 // has changed.
3378 _inbandDtmfGenerator.SetSampleRate(
3379 (uint16_t)(_audioFrame.sample_rate_hz_));
3380 // Reset the tone to be added taking the new sample rate into
3381 // account.
3382 _inbandDtmfGenerator.ResetTone();
3383 }
3384
3385 int16_t toneBuffer[320];
3386 uint16_t toneSamples(0);
3387 // Get 10ms tone segment and set time since last tone to zero
3388 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1) {
3389 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3390 "Channel::EncodeAndSend() inserting Dtmf failed");
3391 return -1;
3392 }
3393
3394 // Replace mixed audio with DTMF tone.
3395 for (size_t sample = 0; sample < _audioFrame.samples_per_channel_;
3396 sample++) {
3397 for (size_t channel = 0; channel < _audioFrame.num_channels_; channel++) {
3398 const size_t index = sample * _audioFrame.num_channels_ + channel;
3399 _audioFrame.data_[index] = toneBuffer[sample];
3400 }
3401 }
3402
3403 assert(_audioFrame.samples_per_channel_ == toneSamples);
3404 } else {
3405 // Add 10ms to "delay-since-last-tone" counter
3406 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
3407 }
3408 return 0;
3409}
3410
deadbeef74375882015-08-13 12:09:10 -07003411void Channel::UpdatePlayoutTimestamp(bool rtcp) {
3412 uint32_t playout_timestamp = 0;
3413
kwiberg55b97fe2016-01-28 05:22:45 -08003414 if (audio_coding_->PlayoutTimestamp(&playout_timestamp) == -1) {
deadbeef74375882015-08-13 12:09:10 -07003415 // This can happen if this channel has not been received any RTP packet. In
3416 // this case, NetEq is not capable of computing playout timestamp.
3417 return;
3418 }
3419
3420 uint16_t delay_ms = 0;
3421 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003422 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003423 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3424 " delay from the ADM");
3425 _engineStatisticsPtr->SetLastError(
3426 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3427 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3428 return;
3429 }
3430
3431 jitter_buffer_playout_timestamp_ = playout_timestamp;
3432
3433 // Remove the playout delay.
3434 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
3435
kwiberg55b97fe2016-01-28 05:22:45 -08003436 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003437 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
3438 playout_timestamp);
3439
3440 {
tommi31fc21f2016-01-21 10:37:37 -08003441 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003442 if (rtcp) {
3443 playout_timestamp_rtcp_ = playout_timestamp;
3444 } else {
3445 playout_timestamp_rtp_ = playout_timestamp;
3446 }
3447 playout_delay_ms_ = delay_ms;
3448 }
3449}
3450
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003451// Called for incoming RTP packets after successful RTP header parsing.
3452void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3453 uint16_t sequence_number) {
kwiberg55b97fe2016-01-28 05:22:45 -08003454 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003455 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3456 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00003457
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003458 // Get frequency of last received payload
wu@webrtc.org94454b72014-06-05 20:34:08 +00003459 int rtp_receive_frequency = GetPlayoutFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00003460
turaj@webrtc.org167b6df2013-12-13 21:05:07 +00003461 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
3462 // every incoming packet.
kwiberg55b97fe2016-01-28 05:22:45 -08003463 uint32_t timestamp_diff_ms =
3464 (rtp_timestamp - jitter_buffer_playout_timestamp_) /
3465 (rtp_receive_frequency / 1000);
henrik.lundin@webrtc.orgd6692992014-03-20 12:04:09 +00003466 if (!IsNewerTimestamp(rtp_timestamp, jitter_buffer_playout_timestamp_) ||
3467 timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3468 // If |jitter_buffer_playout_timestamp_| is newer than the incoming RTP
3469 // timestamp, the resulting difference is negative, but is set to zero.
3470 // This can happen when a network glitch causes a packet to arrive late,
3471 // and during long comfort noise periods with clock drift.
3472 timestamp_diff_ms = 0;
3473 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003474
kwiberg55b97fe2016-01-28 05:22:45 -08003475 uint16_t packet_delay_ms =
3476 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003477
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003478 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00003479
kwiberg55b97fe2016-01-28 05:22:45 -08003480 if (timestamp_diff_ms == 0)
3481 return;
niklase@google.com470e71d2011-07-07 08:21:25 +00003482
deadbeef74375882015-08-13 12:09:10 -07003483 {
tommi31fc21f2016-01-21 10:37:37 -08003484 rtc::CritScope lock(&video_sync_lock_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003485
deadbeef74375882015-08-13 12:09:10 -07003486 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3487 _recPacketDelayMs = packet_delay_ms;
3488 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003489
deadbeef74375882015-08-13 12:09:10 -07003490 if (_average_jitter_buffer_delay_us == 0) {
3491 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3492 return;
3493 }
3494
3495 // Filter average delay value using exponential filter (alpha is
3496 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3497 // risk of rounding error) and compensate for it in GetDelayEstimate()
3498 // later.
kwiberg55b97fe2016-01-28 05:22:45 -08003499 _average_jitter_buffer_delay_us =
3500 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3501 8;
deadbeef74375882015-08-13 12:09:10 -07003502 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003503}
3504
kwiberg55b97fe2016-01-28 05:22:45 -08003505void Channel::RegisterReceiveCodecsToRTPModule() {
3506 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3507 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003508
kwiberg55b97fe2016-01-28 05:22:45 -08003509 CodecInst codec;
3510 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003511
kwiberg55b97fe2016-01-28 05:22:45 -08003512 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3513 // Open up the RTP/RTCP receiver for all supported codecs
3514 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3515 (rtp_receiver_->RegisterReceivePayload(
3516 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3517 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3518 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3519 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3520 " to register %s (%d/%d/%" PRIuS
3521 "/%d) to RTP/RTCP "
3522 "receiver",
3523 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3524 codec.rate);
3525 } else {
3526 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3527 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3528 "(%d/%d/%" PRIuS
3529 "/%d) has been added to the RTP/RTCP "
3530 "receiver",
3531 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3532 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003533 }
kwiberg55b97fe2016-01-28 05:22:45 -08003534 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003535}
3536
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00003537// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003538int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003539 CodecInst codec;
3540 bool found_red = false;
3541
3542 // Get default RED settings from the ACM database
3543 const int num_codecs = AudioCodingModule::NumberOfCodecs();
3544 for (int idx = 0; idx < num_codecs; idx++) {
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003545 audio_coding_->Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003546 if (!STR_CASE_CMP(codec.plname, "RED")) {
3547 found_red = true;
3548 break;
3549 }
3550 }
3551
3552 if (!found_red) {
3553 _engineStatisticsPtr->SetLastError(
3554 VE_CODEC_ERROR, kTraceError,
3555 "SetRedPayloadType() RED is not supported");
3556 return -1;
3557 }
3558
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00003559 codec.pltype = red_payload_type;
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003560 if (audio_coding_->RegisterSendCodec(codec) < 0) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003561 _engineStatisticsPtr->SetLastError(
3562 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3563 "SetRedPayloadType() RED registration in ACM module failed");
3564 return -1;
3565 }
3566
3567 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
3568 _engineStatisticsPtr->SetLastError(
3569 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3570 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
3571 return -1;
3572 }
3573 return 0;
3574}
3575
kwiberg55b97fe2016-01-28 05:22:45 -08003576int Channel::SetSendRtpHeaderExtension(bool enable,
3577 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003578 unsigned char id) {
3579 int error = 0;
3580 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3581 if (enable) {
3582 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3583 }
3584 return error;
3585}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003586
wu@webrtc.org94454b72014-06-05 20:34:08 +00003587int32_t Channel::GetPlayoutFrequency() {
3588 int32_t playout_frequency = audio_coding_->PlayoutFrequency();
3589 CodecInst current_recive_codec;
3590 if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
3591 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
3592 // Even though the actual sampling rate for G.722 audio is
3593 // 16,000 Hz, the RTP clock rate for the G722 payload format is
3594 // 8,000 Hz because that value was erroneously assigned in
3595 // RFC 1890 and must remain unchanged for backward compatibility.
3596 playout_frequency = 8000;
3597 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
3598 // We are resampling Opus internally to 32,000 Hz until all our
3599 // DSP routines can operate at 48,000 Hz, but the RTP clock
3600 // rate for the Opus payload format is standardized to 48,000 Hz,
3601 // because that is the maximum supported decoding sampling rate.
3602 playout_frequency = 48000;
3603 }
3604 }
3605 return playout_frequency;
3606}
3607
Minyue2013aec2015-05-13 14:14:42 +02003608int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003609 RtcpMode method = _rtpRtcpModule->RTCP();
3610 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003611 return 0;
3612 }
3613 std::vector<RTCPReportBlock> report_blocks;
3614 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003615
3616 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003617 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003618 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003619 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003620 Channel* channel = associate_send_channel_.channel();
3621 // Tries to get RTT from an associated channel. This is important for
3622 // receive-only channels.
3623 if (channel) {
3624 // To prevent infinite recursion and deadlock, calling GetRTT of
3625 // associate channel should always use "false" for argument:
3626 // |allow_associate_channel|.
3627 rtt = channel->GetRTT(false);
3628 }
3629 }
3630 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003631 }
3632
3633 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3634 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3635 for (; it != report_blocks.end(); ++it) {
3636 if (it->remoteSSRC == remoteSSRC)
3637 break;
3638 }
3639 if (it == report_blocks.end()) {
3640 // We have not received packets with SSRC matching the report blocks.
3641 // To calculate RTT we try with the SSRC of the first report block.
3642 // This is very important for send-only channels where we don't know
3643 // the SSRC of the other end.
3644 remoteSSRC = report_blocks[0].remoteSSRC;
3645 }
Minyue2013aec2015-05-13 14:14:42 +02003646
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003647 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003648 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003649 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003650 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3651 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003652 return 0;
3653 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003654 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003655}
3656
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003657} // namespace voe
3658} // namespace webrtc