blob: 148ef81d78baf4ebe907858198b265462ceb400f [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010020#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000021#include "webrtc/base/timeutils.h"
minyue@webrtc.orge509f942013-09-12 17:03:00 +000022#include "webrtc/common.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020023#include "webrtc/config.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000024#include "webrtc/modules/audio_device/include/audio_device.h"
25#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010026#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010027#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010028#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
29#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
30#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000031#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010032#include "webrtc/modules/utility/include/audio_frame_operations.h"
33#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010034#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000035#include "webrtc/voice_engine/include/voe_base.h"
36#include "webrtc/voice_engine/include/voe_external_media.h"
37#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
38#include "webrtc/voice_engine/output_mixer.h"
39#include "webrtc/voice_engine/statistics.h"
40#include "webrtc/voice_engine/transmit_mixer.h"
41#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000042
43#if defined(_WIN32)
44#include <Qos.h>
45#endif
46
andrew@webrtc.org50419b02012-11-14 19:07:54 +000047namespace webrtc {
48namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000049
Stefan Holmerb86d4e42015-12-07 10:26:18 +010050class TransportFeedbackProxy : public TransportFeedbackObserver {
51 public:
52 TransportFeedbackProxy() : feedback_observer_(nullptr) {
53 pacer_thread_.DetachFromThread();
54 network_thread_.DetachFromThread();
55 }
56
57 void SetTransportFeedbackObserver(
58 TransportFeedbackObserver* feedback_observer) {
59 RTC_DCHECK(thread_checker_.CalledOnValidThread());
60 rtc::CritScope lock(&crit_);
61 feedback_observer_ = feedback_observer;
62 }
63
64 // Implements TransportFeedbackObserver.
65 void AddPacket(uint16_t sequence_number,
66 size_t length,
67 bool was_paced) override {
68 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
69 rtc::CritScope lock(&crit_);
70 if (feedback_observer_)
71 feedback_observer_->AddPacket(sequence_number, length, was_paced);
72 }
73 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
74 RTC_DCHECK(network_thread_.CalledOnValidThread());
75 rtc::CritScope lock(&crit_);
76 if (feedback_observer_)
77 feedback_observer_->OnTransportFeedback(feedback);
78 }
79
80 private:
81 rtc::CriticalSection crit_;
82 rtc::ThreadChecker thread_checker_;
83 rtc::ThreadChecker pacer_thread_;
84 rtc::ThreadChecker network_thread_;
85 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
86};
87
88class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
89 public:
90 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
91 pacer_thread_.DetachFromThread();
92 }
93
94 void SetSequenceNumberAllocator(
95 TransportSequenceNumberAllocator* seq_num_allocator) {
96 RTC_DCHECK(thread_checker_.CalledOnValidThread());
97 rtc::CritScope lock(&crit_);
98 seq_num_allocator_ = seq_num_allocator;
99 }
100
101 // Implements TransportSequenceNumberAllocator.
102 uint16_t AllocateSequenceNumber() override {
103 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
104 rtc::CritScope lock(&crit_);
105 if (!seq_num_allocator_)
106 return 0;
107 return seq_num_allocator_->AllocateSequenceNumber();
108 }
109
110 private:
111 rtc::CriticalSection crit_;
112 rtc::ThreadChecker thread_checker_;
113 rtc::ThreadChecker pacer_thread_;
114 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
115};
116
117class RtpPacketSenderProxy : public RtpPacketSender {
118 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800119 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100120
121 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
122 RTC_DCHECK(thread_checker_.CalledOnValidThread());
123 rtc::CritScope lock(&crit_);
124 rtp_packet_sender_ = rtp_packet_sender;
125 }
126
stefanbba9dec2016-02-01 04:39:55 -0800127 bool HasPacketSender() const {
128 RTC_DCHECK(thread_checker_.CalledOnValidThread());
129 rtc::CritScope lock(&crit_);
130 return rtp_packet_sender_ != nullptr;
131 }
132
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100133 // Implements RtpPacketSender.
134 void InsertPacket(Priority priority,
135 uint32_t ssrc,
136 uint16_t sequence_number,
137 int64_t capture_time_ms,
138 size_t bytes,
139 bool retransmission) override {
140 rtc::CritScope lock(&crit_);
141 if (rtp_packet_sender_) {
142 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
143 capture_time_ms, bytes, retransmission);
144 }
145 }
146
147 private:
148 rtc::ThreadChecker thread_checker_;
149 rtc::CriticalSection crit_;
150 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
151};
152
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000153// Extend the default RTCP statistics struct with max_jitter, defined as the
154// maximum jitter value seen in an RTCP report block.
155struct ChannelStatistics : public RtcpStatistics {
156 ChannelStatistics() : rtcp(), max_jitter(0) {}
157
158 RtcpStatistics rtcp;
159 uint32_t max_jitter;
160};
161
162// Statistics callback, called at each generation of a new RTCP report block.
163class StatisticsProxy : public RtcpStatisticsCallback {
164 public:
tommi31fc21f2016-01-21 10:37:37 -0800165 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000166 virtual ~StatisticsProxy() {}
167
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000168 void StatisticsUpdated(const RtcpStatistics& statistics,
169 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000170 if (ssrc != ssrc_)
171 return;
172
tommi31fc21f2016-01-21 10:37:37 -0800173 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000174 stats_.rtcp = statistics;
175 if (statistics.jitter > stats_.max_jitter) {
176 stats_.max_jitter = statistics.jitter;
177 }
178 }
179
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000180 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000181
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000182 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800183 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000184 return stats_;
185 }
186
187 private:
188 // StatisticsUpdated calls are triggered from threads in the RTP module,
189 // while GetStats calls can be triggered from the public voice engine API,
190 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800191 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000192 const uint32_t ssrc_;
193 ChannelStatistics stats_;
194};
195
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000196class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000197 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000198 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
199 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000200
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000201 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
202 // Not used for Voice Engine.
203 }
204
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000205 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
206 int64_t rtt,
207 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000208 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
209 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
210 // report for VoiceEngine?
211 if (report_blocks.empty())
212 return;
213
214 int fraction_lost_aggregate = 0;
215 int total_number_of_packets = 0;
216
217 // If receiving multiple report blocks, calculate the weighted average based
218 // on the number of packets a report refers to.
219 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
220 block_it != report_blocks.end(); ++block_it) {
221 // Find the previous extended high sequence number for this remote SSRC,
222 // to calculate the number of RTP packets this report refers to. Ignore if
223 // we haven't seen this SSRC before.
224 std::map<uint32_t, uint32_t>::iterator seq_num_it =
225 extended_max_sequence_number_.find(block_it->sourceSSRC);
226 int number_of_packets = 0;
227 if (seq_num_it != extended_max_sequence_number_.end()) {
228 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
229 }
230 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
231 total_number_of_packets += number_of_packets;
232
233 extended_max_sequence_number_[block_it->sourceSSRC] =
234 block_it->extendedHighSeqNum;
235 }
236 int weighted_fraction_lost = 0;
237 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800238 weighted_fraction_lost =
239 (fraction_lost_aggregate + total_number_of_packets / 2) /
240 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000241 }
242 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000243 }
244
245 private:
246 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000247 // Maps remote side ssrc to extended highest sequence number received.
248 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000249};
250
kwiberg55b97fe2016-01-28 05:22:45 -0800251int32_t Channel::SendData(FrameType frameType,
252 uint8_t payloadType,
253 uint32_t timeStamp,
254 const uint8_t* payloadData,
255 size_t payloadSize,
256 const RTPFragmentationHeader* fragmentation) {
257 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
258 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
259 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
260 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000261
kwiberg55b97fe2016-01-28 05:22:45 -0800262 if (_includeAudioLevelIndication) {
263 // Store current audio level in the RTP/RTCP module.
264 // The level will be used in combination with voice-activity state
265 // (frameType) to add an RTP header extension
266 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
267 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000268
kwiberg55b97fe2016-01-28 05:22:45 -0800269 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
270 // packetization.
271 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
272 if (_rtpRtcpModule->SendOutgoingData(
273 (FrameType&)frameType, payloadType, timeStamp,
274 // Leaving the time when this frame was
275 // received from the capture device as
276 // undefined for voice for now.
277 -1, payloadData, payloadSize, fragmentation) == -1) {
278 _engineStatisticsPtr->SetLastError(
279 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
280 "Channel::SendData() failed to send data to RTP/RTCP module");
281 return -1;
282 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000283
kwiberg55b97fe2016-01-28 05:22:45 -0800284 _lastLocalTimeStamp = timeStamp;
285 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000286
kwiberg55b97fe2016-01-28 05:22:45 -0800287 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000288}
289
kwiberg55b97fe2016-01-28 05:22:45 -0800290int32_t Channel::InFrameType(FrameType frame_type) {
291 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
292 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000293
kwiberg55b97fe2016-01-28 05:22:45 -0800294 rtc::CritScope cs(&_callbackCritSect);
295 _sendFrameType = (frame_type == kAudioFrameSpeech);
296 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000297}
298
kwiberg55b97fe2016-01-28 05:22:45 -0800299int32_t Channel::OnRxVadDetected(int vadDecision) {
300 rtc::CritScope cs(&_callbackCritSect);
301 if (_rxVadObserverPtr) {
302 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
303 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000304
kwiberg55b97fe2016-01-28 05:22:45 -0800305 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000306}
307
stefan1d8a5062015-10-02 03:39:33 -0700308bool Channel::SendRtp(const uint8_t* data,
309 size_t len,
310 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800311 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
312 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000313
kwiberg55b97fe2016-01-28 05:22:45 -0800314 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000315
kwiberg55b97fe2016-01-28 05:22:45 -0800316 if (_transportPtr == NULL) {
317 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
318 "Channel::SendPacket() failed to send RTP packet due to"
319 " invalid transport object");
320 return false;
321 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000322
kwiberg55b97fe2016-01-28 05:22:45 -0800323 uint8_t* bufferToSendPtr = (uint8_t*)data;
324 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000325
kwiberg55b97fe2016-01-28 05:22:45 -0800326 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
327 std::string transport_name =
328 _externalTransport ? "external transport" : "WebRtc sockets";
329 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
330 "Channel::SendPacket() RTP transmission using %s failed",
331 transport_name.c_str());
332 return false;
333 }
334 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000335}
336
kwiberg55b97fe2016-01-28 05:22:45 -0800337bool Channel::SendRtcp(const uint8_t* data, size_t len) {
338 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
339 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000340
kwiberg55b97fe2016-01-28 05:22:45 -0800341 rtc::CritScope cs(&_callbackCritSect);
342 if (_transportPtr == NULL) {
343 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
344 "Channel::SendRtcp() failed to send RTCP packet"
345 " due to invalid transport object");
346 return false;
347 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000348
kwiberg55b97fe2016-01-28 05:22:45 -0800349 uint8_t* bufferToSendPtr = (uint8_t*)data;
350 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000351
kwiberg55b97fe2016-01-28 05:22:45 -0800352 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
353 if (n < 0) {
354 std::string transport_name =
355 _externalTransport ? "external transport" : "WebRtc sockets";
356 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
357 "Channel::SendRtcp() transmission using %s failed",
358 transport_name.c_str());
359 return false;
360 }
361 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000362}
363
Peter Boströmac547a62015-09-17 23:03:57 +0200364void Channel::OnPlayTelephoneEvent(uint8_t event,
365 uint16_t lengthMs,
366 uint8_t volume) {
kwiberg55b97fe2016-01-28 05:22:45 -0800367 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
368 "Channel::OnPlayTelephoneEvent(event=%u, lengthMs=%u,"
369 " volume=%u)",
370 event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000371
kwiberg55b97fe2016-01-28 05:22:45 -0800372 if (!_playOutbandDtmfEvent || (event > 15)) {
373 // Ignore callback since feedback is disabled or event is not a
374 // Dtmf tone event.
375 return;
376 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000377
kwiberg55b97fe2016-01-28 05:22:45 -0800378 assert(_outputMixerPtr != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +0000379
kwiberg55b97fe2016-01-28 05:22:45 -0800380 // Start playing out the Dtmf tone (if playout is enabled).
381 // Reduce length of tone with 80ms to the reduce risk of echo.
382 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000383}
384
kwiberg55b97fe2016-01-28 05:22:45 -0800385void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
386 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
387 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000388
kwiberg55b97fe2016-01-28 05:22:45 -0800389 // Update ssrc so that NTP for AV sync can be updated.
390 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000391}
392
Peter Boströmac547a62015-09-17 23:03:57 +0200393void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
394 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
395 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
396 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000397}
398
Peter Boströmac547a62015-09-17 23:03:57 +0200399int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000400 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000401 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000402 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800403 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200404 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800405 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
406 "Channel::OnInitializeDecoder(payloadType=%d, "
407 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
408 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000409
kwiberg55b97fe2016-01-28 05:22:45 -0800410 CodecInst receiveCodec = {0};
411 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000412
kwiberg55b97fe2016-01-28 05:22:45 -0800413 receiveCodec.pltype = payloadType;
414 receiveCodec.plfreq = frequency;
415 receiveCodec.channels = channels;
416 receiveCodec.rate = rate;
417 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000418
kwiberg55b97fe2016-01-28 05:22:45 -0800419 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
420 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000421
kwiberg55b97fe2016-01-28 05:22:45 -0800422 // Register the new codec to the ACM
423 if (audio_coding_->RegisterReceiveCodec(receiveCodec) == -1) {
424 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
425 "Channel::OnInitializeDecoder() invalid codec ("
426 "pt=%d, name=%s) received - 1",
427 payloadType, payloadName);
428 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
429 return -1;
430 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000431
kwiberg55b97fe2016-01-28 05:22:45 -0800432 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000433}
434
kwiberg55b97fe2016-01-28 05:22:45 -0800435int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
436 size_t payloadSize,
437 const WebRtcRTPHeader* rtpHeader) {
438 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
439 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
440 ","
441 " payloadType=%u, audioChannel=%" PRIuS ")",
442 payloadSize, rtpHeader->header.payloadType,
443 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000444
kwiberg55b97fe2016-01-28 05:22:45 -0800445 if (!channel_state_.Get().playing) {
446 // Avoid inserting into NetEQ when we are not playing. Count the
447 // packet as discarded.
448 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
449 "received packet is discarded since playing is not"
450 " activated");
451 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000452 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800453 }
454
455 // Push the incoming payload (parsed and ready for decoding) into the ACM
456 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
457 0) {
458 _engineStatisticsPtr->SetLastError(
459 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
460 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
461 return -1;
462 }
463
464 // Update the packet delay.
465 UpdatePacketDelay(rtpHeader->header.timestamp,
466 rtpHeader->header.sequenceNumber);
467
468 int64_t round_trip_time = 0;
469 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
470 NULL);
471
472 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
473 if (!nack_list.empty()) {
474 // Can't use nack_list.data() since it's not supported by all
475 // compilers.
476 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
477 }
478 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000479}
480
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000481bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000482 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000483 RTPHeader header;
484 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
485 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
486 "IncomingPacket invalid RTP header");
487 return false;
488 }
489 header.payload_type_frequency =
490 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
491 if (header.payload_type_frequency < 0)
492 return false;
493 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
494}
495
kwiberg55b97fe2016-01-28 05:22:45 -0800496int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
497 if (event_log_) {
498 unsigned int ssrc;
499 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
500 event_log_->LogAudioPlayout(ssrc);
501 }
502 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
503 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame) ==
504 -1) {
505 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
506 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
507 // In all likelihood, the audio in this frame is garbage. We return an
508 // error so that the audio mixer module doesn't add it to the mix. As
509 // a result, it won't be played out and the actions skipped here are
510 // irrelevant.
511 return -1;
512 }
513
514 if (_RxVadDetection) {
515 UpdateRxVadDetection(*audioFrame);
516 }
517
518 // Convert module ID to internal VoE channel ID
519 audioFrame->id_ = VoEChannelId(audioFrame->id_);
520 // Store speech type for dead-or-alive detection
521 _outputSpeechType = audioFrame->speech_type_;
522
523 ChannelState::State state = channel_state_.Get();
524
525 if (state.rx_apm_is_enabled) {
526 int err = rx_audioproc_->ProcessStream(audioFrame);
527 if (err) {
528 LOG(LS_ERROR) << "ProcessStream() error: " << err;
529 assert(false);
Ivo Creusenae856f22015-09-17 16:30:16 +0200530 }
kwiberg55b97fe2016-01-28 05:22:45 -0800531 }
532
533 {
534 // Pass the audio buffers to an optional sink callback, before applying
535 // scaling/panning, as that applies to the mix operation.
536 // External recipients of the audio (e.g. via AudioTrack), will do their
537 // own mixing/dynamic processing.
538 rtc::CritScope cs(&_callbackCritSect);
539 if (audio_sink_) {
540 AudioSinkInterface::Data data(
541 &audioFrame->data_[0], audioFrame->samples_per_channel_,
542 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
543 audioFrame->timestamp_);
544 audio_sink_->OnData(data);
545 }
546 }
547
548 float output_gain = 1.0f;
549 float left_pan = 1.0f;
550 float right_pan = 1.0f;
551 {
552 rtc::CritScope cs(&volume_settings_critsect_);
553 output_gain = _outputGain;
554 left_pan = _panLeft;
555 right_pan = _panRight;
556 }
557
558 // Output volume scaling
559 if (output_gain < 0.99f || output_gain > 1.01f) {
560 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
561 }
562
563 // Scale left and/or right channel(s) if stereo and master balance is
564 // active
565
566 if (left_pan != 1.0f || right_pan != 1.0f) {
567 if (audioFrame->num_channels_ == 1) {
568 // Emulate stereo mode since panning is active.
569 // The mono signal is copied to both left and right channels here.
570 AudioFrameOperations::MonoToStereo(audioFrame);
571 }
572 // For true stereo mode (when we are receiving a stereo signal), no
573 // action is needed.
574
575 // Do the panning operation (the audio frame contains stereo at this
576 // stage)
577 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
578 }
579
580 // Mix decoded PCM output with file if file mixing is enabled
581 if (state.output_file_playing) {
582 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
583 }
584
585 // External media
586 if (_outputExternalMedia) {
587 rtc::CritScope cs(&_callbackCritSect);
588 const bool isStereo = (audioFrame->num_channels_ == 2);
589 if (_outputExternalMediaCallbackPtr) {
590 _outputExternalMediaCallbackPtr->Process(
591 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
592 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
593 isStereo);
594 }
595 }
596
597 // Record playout if enabled
598 {
599 rtc::CritScope cs(&_fileCritSect);
600
601 if (_outputFileRecording && _outputFileRecorderPtr) {
602 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
603 }
604 }
605
606 // Measure audio level (0-9)
607 _outputAudioLevel.ComputeLevel(*audioFrame);
608
609 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
610 // The first frame with a valid rtp timestamp.
611 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
612 }
613
614 if (capture_start_rtp_time_stamp_ >= 0) {
615 // audioFrame.timestamp_ should be valid from now on.
616
617 // Compute elapsed time.
618 int64_t unwrap_timestamp =
619 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
620 audioFrame->elapsed_time_ms_ =
621 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
622 (GetPlayoutFrequency() / 1000);
623
niklase@google.com470e71d2011-07-07 08:21:25 +0000624 {
kwiberg55b97fe2016-01-28 05:22:45 -0800625 rtc::CritScope lock(&ts_stats_lock_);
626 // Compute ntp time.
627 audioFrame->ntp_time_ms_ =
628 ntp_estimator_.Estimate(audioFrame->timestamp_);
629 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
630 if (audioFrame->ntp_time_ms_ > 0) {
631 // Compute |capture_start_ntp_time_ms_| so that
632 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
633 capture_start_ntp_time_ms_ =
634 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000635 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000636 }
kwiberg55b97fe2016-01-28 05:22:45 -0800637 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000638
kwiberg55b97fe2016-01-28 05:22:45 -0800639 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000640}
641
kwiberg55b97fe2016-01-28 05:22:45 -0800642int32_t Channel::NeededFrequency(int32_t id) const {
643 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
644 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000645
kwiberg55b97fe2016-01-28 05:22:45 -0800646 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000647
kwiberg55b97fe2016-01-28 05:22:45 -0800648 // Determine highest needed receive frequency
649 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000650
kwiberg55b97fe2016-01-28 05:22:45 -0800651 // Return the bigger of playout and receive frequency in the ACM.
652 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
653 highestNeeded = audio_coding_->PlayoutFrequency();
654 } else {
655 highestNeeded = receiveFrequency;
656 }
657
658 // Special case, if we're playing a file on the playout side
659 // we take that frequency into consideration as well
660 // This is not needed on sending side, since the codec will
661 // limit the spectrum anyway.
662 if (channel_state_.Get().output_file_playing) {
663 rtc::CritScope cs(&_fileCritSect);
664 if (_outputFilePlayerPtr) {
665 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
666 highestNeeded = _outputFilePlayerPtr->Frequency();
667 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000668 }
kwiberg55b97fe2016-01-28 05:22:45 -0800669 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000670
kwiberg55b97fe2016-01-28 05:22:45 -0800671 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000672}
673
ivocb04965c2015-09-09 00:09:43 -0700674int32_t Channel::CreateChannel(Channel*& channel,
675 int32_t channelId,
676 uint32_t instanceId,
677 RtcEventLog* const event_log,
678 const Config& config) {
kwiberg55b97fe2016-01-28 05:22:45 -0800679 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
680 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
681 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000682
kwiberg55b97fe2016-01-28 05:22:45 -0800683 channel = new Channel(channelId, instanceId, event_log, config);
684 if (channel == NULL) {
685 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
686 "Channel::CreateChannel() unable to allocate memory for"
687 " channel");
688 return -1;
689 }
690 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000691}
692
kwiberg55b97fe2016-01-28 05:22:45 -0800693void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
694 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
695 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
696 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000697
kwiberg55b97fe2016-01-28 05:22:45 -0800698 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000699}
700
kwiberg55b97fe2016-01-28 05:22:45 -0800701void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
702 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
703 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
704 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000705
kwiberg55b97fe2016-01-28 05:22:45 -0800706 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000707}
708
kwiberg55b97fe2016-01-28 05:22:45 -0800709void Channel::PlayFileEnded(int32_t id) {
710 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
711 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000712
kwiberg55b97fe2016-01-28 05:22:45 -0800713 if (id == _inputFilePlayerId) {
714 channel_state_.SetInputFilePlaying(false);
715 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
716 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000717 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800718 } else if (id == _outputFilePlayerId) {
719 channel_state_.SetOutputFilePlaying(false);
720 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
721 "Channel::PlayFileEnded() => output file player module is"
722 " shutdown");
723 }
724}
725
726void Channel::RecordFileEnded(int32_t id) {
727 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
728 "Channel::RecordFileEnded(id=%d)", id);
729
730 assert(id == _outputFileRecorderId);
731
732 rtc::CritScope cs(&_fileCritSect);
733
734 _outputFileRecording = false;
735 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
736 "Channel::RecordFileEnded() => output file recorder module is"
737 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000738}
739
pbos@webrtc.org92135212013-05-14 08:31:39 +0000740Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000741 uint32_t instanceId,
ivocb04965c2015-09-09 00:09:43 -0700742 RtcEventLog* const event_log,
743 const Config& config)
tommi31fc21f2016-01-21 10:37:37 -0800744 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100745 _channelId(channelId),
746 event_log_(event_log),
747 rtp_header_parser_(RtpHeaderParser::Create()),
748 rtp_payload_registry_(
749 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
750 rtp_receive_statistics_(
751 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
752 rtp_receiver_(
753 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
754 this,
755 this,
756 this,
757 rtp_payload_registry_.get())),
758 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
759 _outputAudioLevel(),
760 _externalTransport(false),
761 _inputFilePlayerPtr(NULL),
762 _outputFilePlayerPtr(NULL),
763 _outputFileRecorderPtr(NULL),
764 // Avoid conflict with other channels by adding 1024 - 1026,
765 // won't use as much as 1024 channels.
766 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
767 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
768 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
769 _outputFileRecording(false),
770 _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
771 _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
772 _outputExternalMedia(false),
773 _inputExternalMediaCallbackPtr(NULL),
774 _outputExternalMediaCallbackPtr(NULL),
775 _timeStamp(0), // This is just an offset, RTP module will add it's own
776 // random offset
777 _sendTelephoneEventPayloadType(106),
778 ntp_estimator_(Clock::GetRealTimeClock()),
779 jitter_buffer_playout_timestamp_(0),
780 playout_timestamp_rtp_(0),
781 playout_timestamp_rtcp_(0),
782 playout_delay_ms_(0),
783 _numberOfDiscardedPackets(0),
784 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100785 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
786 capture_start_rtp_time_stamp_(-1),
787 capture_start_ntp_time_ms_(-1),
788 _engineStatisticsPtr(NULL),
789 _outputMixerPtr(NULL),
790 _transmitMixerPtr(NULL),
791 _moduleProcessThreadPtr(NULL),
792 _audioDeviceModulePtr(NULL),
793 _voiceEngineObserverPtr(NULL),
794 _callbackCritSectPtr(NULL),
795 _transportPtr(NULL),
796 _rxVadObserverPtr(NULL),
797 _oldVadDecision(-1),
798 _sendFrameType(0),
799 _externalMixing(false),
800 _mixFileWithMicrophone(false),
801 _mute(false),
802 _panLeft(1.0f),
803 _panRight(1.0f),
804 _outputGain(1.0f),
805 _playOutbandDtmfEvent(false),
806 _playInbandDtmfEvent(false),
807 _lastLocalTimeStamp(0),
808 _lastPayloadType(0),
809 _includeAudioLevelIndication(false),
810 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100811 _average_jitter_buffer_delay_us(0),
812 _previousTimestamp(0),
813 _recPacketDelayMs(20),
814 _RxVadDetection(false),
815 _rxAgcIsEnabled(false),
816 _rxNsIsEnabled(false),
817 restored_packet_in_use_(false),
818 rtcp_observer_(new VoERtcpObserver(this)),
819 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100820 associate_send_channel_(ChannelOwner(nullptr)),
821 pacing_enabled_(config.Get<VoicePacing>().enabled),
stefanbba9dec2016-02-01 04:39:55 -0800822 feedback_observer_proxy_(new TransportFeedbackProxy()),
823 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
824 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()) {
kwiberg55b97fe2016-01-28 05:22:45 -0800825 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
826 "Channel::Channel() - ctor");
827 AudioCodingModule::Config acm_config;
828 acm_config.id = VoEModuleId(instanceId, channelId);
829 if (config.Get<NetEqCapacityConfig>().enabled) {
830 // Clamping the buffer capacity at 20 packets. While going lower will
831 // probably work, it makes little sense.
832 acm_config.neteq_config.max_packets_in_buffer =
833 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
834 }
835 acm_config.neteq_config.enable_fast_accelerate =
836 config.Get<NetEqFastAccelerate>().enabled;
837 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200838
kwiberg55b97fe2016-01-28 05:22:45 -0800839 _inbandDtmfQueue.ResetDtmf();
840 _inbandDtmfGenerator.Init();
841 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000842
kwiberg55b97fe2016-01-28 05:22:45 -0800843 RtpRtcp::Configuration configuration;
844 configuration.audio = true;
845 configuration.outgoing_transport = this;
846 configuration.audio_messages = this;
847 configuration.receive_statistics = rtp_receive_statistics_.get();
848 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800849 if (pacing_enabled_) {
850 configuration.paced_sender = rtp_packet_sender_proxy_.get();
851 configuration.transport_sequence_number_allocator =
852 seq_num_allocator_proxy_.get();
853 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
854 }
kwiberg55b97fe2016-01-28 05:22:45 -0800855 configuration.event_log = event_log;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000856
kwiberg55b97fe2016-01-28 05:22:45 -0800857 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000858
kwiberg55b97fe2016-01-28 05:22:45 -0800859 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
860 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
861 statistics_proxy_.get());
aluebs@webrtc.orgf927fd62014-04-16 11:58:18 +0000862
kwiberg55b97fe2016-01-28 05:22:45 -0800863 Config audioproc_config;
864 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
865 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
niklase@google.com470e71d2011-07-07 08:21:25 +0000866}
867
kwiberg55b97fe2016-01-28 05:22:45 -0800868Channel::~Channel() {
869 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
870 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
871 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000872
kwiberg55b97fe2016-01-28 05:22:45 -0800873 if (_outputExternalMedia) {
874 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
875 }
876 if (channel_state_.Get().input_external_media) {
877 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
878 }
879 StopSend();
880 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000881
kwiberg55b97fe2016-01-28 05:22:45 -0800882 {
883 rtc::CritScope cs(&_fileCritSect);
884 if (_inputFilePlayerPtr) {
885 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
886 _inputFilePlayerPtr->StopPlayingFile();
887 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
888 _inputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000889 }
kwiberg55b97fe2016-01-28 05:22:45 -0800890 if (_outputFilePlayerPtr) {
891 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
892 _outputFilePlayerPtr->StopPlayingFile();
893 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
894 _outputFilePlayerPtr = NULL;
895 }
896 if (_outputFileRecorderPtr) {
897 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
898 _outputFileRecorderPtr->StopRecording();
899 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
900 _outputFileRecorderPtr = NULL;
901 }
902 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000903
kwiberg55b97fe2016-01-28 05:22:45 -0800904 // The order to safely shutdown modules in a channel is:
905 // 1. De-register callbacks in modules
906 // 2. De-register modules in process thread
907 // 3. Destroy modules
908 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
909 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
910 "~Channel() failed to de-register transport callback"
911 " (Audio coding module)");
912 }
913 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
914 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
915 "~Channel() failed to de-register VAD callback"
916 " (Audio coding module)");
917 }
918 // De-register modules in process thread
919 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +0000920
kwiberg55b97fe2016-01-28 05:22:45 -0800921 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +0000922}
923
kwiberg55b97fe2016-01-28 05:22:45 -0800924int32_t Channel::Init() {
925 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
926 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +0000927
kwiberg55b97fe2016-01-28 05:22:45 -0800928 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +0000929
kwiberg55b97fe2016-01-28 05:22:45 -0800930 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +0000931
kwiberg55b97fe2016-01-28 05:22:45 -0800932 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
933 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
934 "Channel::Init() must call SetEngineInformation() first");
935 return -1;
936 }
937
938 // --- Add modules to process thread (for periodic schedulation)
939
940 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
941
942 // --- ACM initialization
943
944 if (audio_coding_->InitializeReceiver() == -1) {
945 _engineStatisticsPtr->SetLastError(
946 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
947 "Channel::Init() unable to initialize the ACM - 1");
948 return -1;
949 }
950
951 // --- RTP/RTCP module initialization
952
953 // Ensure that RTCP is enabled by default for the created channel.
954 // Note that, the module will keep generating RTCP until it is explicitly
955 // disabled by the user.
956 // After StopListen (when no sockets exists), RTCP packets will no longer
957 // be transmitted since the Transport object will then be invalid.
958 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
959 // RTCP is enabled by default.
960 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
961 // --- Register all permanent callbacks
962 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
963 (audio_coding_->RegisterVADCallback(this) == -1);
964
965 if (fail) {
966 _engineStatisticsPtr->SetLastError(
967 VE_CANNOT_INIT_CHANNEL, kTraceError,
968 "Channel::Init() callbacks not registered");
969 return -1;
970 }
971
972 // --- Register all supported codecs to the receiving side of the
973 // RTP/RTCP module
974
975 CodecInst codec;
976 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
977
978 for (int idx = 0; idx < nSupportedCodecs; idx++) {
979 // Open up the RTP/RTCP receiver for all supported codecs
980 if ((audio_coding_->Codec(idx, &codec) == -1) ||
981 (rtp_receiver_->RegisterReceivePayload(
982 codec.plname, codec.pltype, codec.plfreq, codec.channels,
983 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
984 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
985 "Channel::Init() unable to register %s "
986 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
987 codec.plname, codec.pltype, codec.plfreq, codec.channels,
988 codec.rate);
989 } else {
990 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
991 "Channel::Init() %s (%d/%d/%" PRIuS
992 "/%d) has been "
993 "added to the RTP/RTCP receiver",
994 codec.plname, codec.pltype, codec.plfreq, codec.channels,
995 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000996 }
997
kwiberg55b97fe2016-01-28 05:22:45 -0800998 // Ensure that PCMU is used as default codec on the sending side
999 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
1000 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +00001001 }
1002
kwiberg55b97fe2016-01-28 05:22:45 -08001003 // Register default PT for outband 'telephone-event'
1004 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
1005 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
1006 (audio_coding_->RegisterReceiveCodec(codec) == -1)) {
1007 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1008 "Channel::Init() failed to register outband "
1009 "'telephone-event' (%d/%d) correctly",
1010 codec.pltype, codec.plfreq);
1011 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001012 }
1013
kwiberg55b97fe2016-01-28 05:22:45 -08001014 if (!STR_CASE_CMP(codec.plname, "CN")) {
1015 if ((audio_coding_->RegisterSendCodec(codec) == -1) ||
1016 (audio_coding_->RegisterReceiveCodec(codec) == -1) ||
1017 (_rtpRtcpModule->RegisterSendPayload(codec) == -1)) {
1018 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1019 "Channel::Init() failed to register CN (%d/%d) "
1020 "correctly - 1",
1021 codec.pltype, codec.plfreq);
1022 }
1023 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001024#ifdef WEBRTC_CODEC_RED
kwiberg55b97fe2016-01-28 05:22:45 -08001025 // Register RED to the receiving side of the ACM.
1026 // We will not receive an OnInitializeDecoder() callback for RED.
1027 if (!STR_CASE_CMP(codec.plname, "RED")) {
1028 if (audio_coding_->RegisterReceiveCodec(codec) == -1) {
1029 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1030 "Channel::Init() failed to register RED (%d/%d) "
1031 "correctly",
1032 codec.pltype, codec.plfreq);
1033 }
1034 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001035#endif
kwiberg55b97fe2016-01-28 05:22:45 -08001036 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001037
kwiberg55b97fe2016-01-28 05:22:45 -08001038 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1039 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1040 return -1;
1041 }
1042 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1043 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1044 return -1;
1045 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001046
kwiberg55b97fe2016-01-28 05:22:45 -08001047 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001048}
1049
kwiberg55b97fe2016-01-28 05:22:45 -08001050int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1051 OutputMixer& outputMixer,
1052 voe::TransmitMixer& transmitMixer,
1053 ProcessThread& moduleProcessThread,
1054 AudioDeviceModule& audioDeviceModule,
1055 VoiceEngineObserver* voiceEngineObserver,
1056 rtc::CriticalSection* callbackCritSect) {
1057 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1058 "Channel::SetEngineInformation()");
1059 _engineStatisticsPtr = &engineStatistics;
1060 _outputMixerPtr = &outputMixer;
1061 _transmitMixerPtr = &transmitMixer,
1062 _moduleProcessThreadPtr = &moduleProcessThread;
1063 _audioDeviceModulePtr = &audioDeviceModule;
1064 _voiceEngineObserverPtr = voiceEngineObserver;
1065 _callbackCritSectPtr = callbackCritSect;
1066 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001067}
1068
kwiberg55b97fe2016-01-28 05:22:45 -08001069int32_t Channel::UpdateLocalTimeStamp() {
1070 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1071 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001072}
1073
deadbeef2d110be2016-01-13 12:00:26 -08001074void Channel::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001075 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001076 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001077}
1078
kwiberg55b97fe2016-01-28 05:22:45 -08001079int32_t Channel::StartPlayout() {
1080 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1081 "Channel::StartPlayout()");
1082 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001083 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001084 }
1085
1086 if (!_externalMixing) {
1087 // Add participant as candidates for mixing.
1088 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1089 _engineStatisticsPtr->SetLastError(
1090 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1091 "StartPlayout() failed to add participant to mixer");
1092 return -1;
1093 }
1094 }
1095
1096 channel_state_.SetPlaying(true);
1097 if (RegisterFilePlayingToMixer() != 0)
1098 return -1;
1099
1100 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001101}
1102
kwiberg55b97fe2016-01-28 05:22:45 -08001103int32_t Channel::StopPlayout() {
1104 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1105 "Channel::StopPlayout()");
1106 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001107 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001108 }
1109
1110 if (!_externalMixing) {
1111 // Remove participant as candidates for mixing
1112 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1113 _engineStatisticsPtr->SetLastError(
1114 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1115 "StopPlayout() failed to remove participant from mixer");
1116 return -1;
1117 }
1118 }
1119
1120 channel_state_.SetPlaying(false);
1121 _outputAudioLevel.Clear();
1122
1123 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001124}
1125
kwiberg55b97fe2016-01-28 05:22:45 -08001126int32_t Channel::StartSend() {
1127 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1128 "Channel::StartSend()");
1129 // Resume the previous sequence number which was reset by StopSend().
1130 // This needs to be done before |sending| is set to true.
1131 if (send_sequence_number_)
1132 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001133
kwiberg55b97fe2016-01-28 05:22:45 -08001134 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001135 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001136 }
1137 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001138
kwiberg55b97fe2016-01-28 05:22:45 -08001139 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1140 _engineStatisticsPtr->SetLastError(
1141 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1142 "StartSend() RTP/RTCP failed to start sending");
1143 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001144 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001145 return -1;
1146 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001147
kwiberg55b97fe2016-01-28 05:22:45 -08001148 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001149}
1150
kwiberg55b97fe2016-01-28 05:22:45 -08001151int32_t Channel::StopSend() {
1152 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1153 "Channel::StopSend()");
1154 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001155 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001156 }
1157 channel_state_.SetSending(false);
1158
1159 // Store the sequence number to be able to pick up the same sequence for
1160 // the next StartSend(). This is needed for restarting device, otherwise
1161 // it might cause libSRTP to complain about packets being replayed.
1162 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1163 // CL is landed. See issue
1164 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1165 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1166
1167 // Reset sending SSRC and sequence number and triggers direct transmission
1168 // of RTCP BYE
1169 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1170 _engineStatisticsPtr->SetLastError(
1171 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1172 "StartSend() RTP/RTCP failed to stop sending");
1173 }
1174
1175 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001176}
1177
kwiberg55b97fe2016-01-28 05:22:45 -08001178int32_t Channel::StartReceiving() {
1179 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1180 "Channel::StartReceiving()");
1181 if (channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001182 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001183 }
1184 channel_state_.SetReceiving(true);
1185 _numberOfDiscardedPackets = 0;
1186 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001187}
1188
kwiberg55b97fe2016-01-28 05:22:45 -08001189int32_t Channel::StopReceiving() {
1190 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1191 "Channel::StopReceiving()");
1192 if (!channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001193 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001194 }
1195
1196 channel_state_.SetReceiving(false);
1197 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001198}
1199
kwiberg55b97fe2016-01-28 05:22:45 -08001200int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1201 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1202 "Channel::RegisterVoiceEngineObserver()");
1203 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001204
kwiberg55b97fe2016-01-28 05:22:45 -08001205 if (_voiceEngineObserverPtr) {
1206 _engineStatisticsPtr->SetLastError(
1207 VE_INVALID_OPERATION, kTraceError,
1208 "RegisterVoiceEngineObserver() observer already enabled");
1209 return -1;
1210 }
1211 _voiceEngineObserverPtr = &observer;
1212 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001213}
1214
kwiberg55b97fe2016-01-28 05:22:45 -08001215int32_t Channel::DeRegisterVoiceEngineObserver() {
1216 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1217 "Channel::DeRegisterVoiceEngineObserver()");
1218 rtc::CritScope cs(&_callbackCritSect);
1219
1220 if (!_voiceEngineObserverPtr) {
1221 _engineStatisticsPtr->SetLastError(
1222 VE_INVALID_OPERATION, kTraceWarning,
1223 "DeRegisterVoiceEngineObserver() observer already disabled");
1224 return 0;
1225 }
1226 _voiceEngineObserverPtr = NULL;
1227 return 0;
1228}
1229
1230int32_t Channel::GetSendCodec(CodecInst& codec) {
kwiberg1fd4a4a2015-11-03 11:20:50 -08001231 auto send_codec = audio_coding_->SendCodec();
1232 if (send_codec) {
1233 codec = *send_codec;
1234 return 0;
1235 }
1236 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001237}
1238
kwiberg55b97fe2016-01-28 05:22:45 -08001239int32_t Channel::GetRecCodec(CodecInst& codec) {
1240 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001241}
1242
kwiberg55b97fe2016-01-28 05:22:45 -08001243int32_t Channel::SetSendCodec(const CodecInst& codec) {
1244 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1245 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001246
kwiberg55b97fe2016-01-28 05:22:45 -08001247 if (audio_coding_->RegisterSendCodec(codec) != 0) {
1248 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1249 "SetSendCodec() failed to register codec to ACM");
1250 return -1;
1251 }
1252
1253 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1254 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1255 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1256 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1257 "SetSendCodec() failed to register codec to"
1258 " RTP/RTCP module");
1259 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001260 }
kwiberg55b97fe2016-01-28 05:22:45 -08001261 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001262
kwiberg55b97fe2016-01-28 05:22:45 -08001263 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1264 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1265 "SetSendCodec() failed to set audio packet size");
1266 return -1;
1267 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001268
kwiberg55b97fe2016-01-28 05:22:45 -08001269 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001270}
1271
Ivo Creusenadf89b72015-04-29 16:03:33 +02001272void Channel::SetBitRate(int bitrate_bps) {
1273 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1274 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1275 audio_coding_->SetBitRate(bitrate_bps);
1276}
1277
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001278void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue@webrtc.org74aaf292014-07-16 21:28:26 +00001279 network_predictor_->UpdatePacketLossRate(fraction_lost);
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001280 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1281
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001282 // Normalizes rate to 0 - 100.
kwiberg55b97fe2016-01-28 05:22:45 -08001283 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1284 0) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001285 assert(false); // This should not happen.
1286 }
1287}
1288
kwiberg55b97fe2016-01-28 05:22:45 -08001289int32_t Channel::SetVADStatus(bool enableVAD,
1290 ACMVADMode mode,
1291 bool disableDTX) {
1292 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1293 "Channel::SetVADStatus(mode=%d)", mode);
1294 assert(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1295 // To disable VAD, DTX must be disabled too
1296 disableDTX = ((enableVAD == false) ? true : disableDTX);
1297 if (audio_coding_->SetVAD(!disableDTX, enableVAD, mode) != 0) {
1298 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1299 kTraceError,
1300 "SetVADStatus() failed to set VAD");
1301 return -1;
1302 }
1303 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001304}
1305
kwiberg55b97fe2016-01-28 05:22:45 -08001306int32_t Channel::GetVADStatus(bool& enabledVAD,
1307 ACMVADMode& mode,
1308 bool& disabledDTX) {
1309 if (audio_coding_->VAD(&disabledDTX, &enabledVAD, &mode) != 0) {
1310 _engineStatisticsPtr->SetLastError(
1311 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1312 "GetVADStatus() failed to get VAD status");
1313 return -1;
1314 }
1315 disabledDTX = !disabledDTX;
1316 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001317}
1318
kwiberg55b97fe2016-01-28 05:22:45 -08001319int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1320 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1321 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001322
kwiberg55b97fe2016-01-28 05:22:45 -08001323 if (channel_state_.Get().playing) {
1324 _engineStatisticsPtr->SetLastError(
1325 VE_ALREADY_PLAYING, kTraceError,
1326 "SetRecPayloadType() unable to set PT while playing");
1327 return -1;
1328 }
1329 if (channel_state_.Get().receiving) {
1330 _engineStatisticsPtr->SetLastError(
1331 VE_ALREADY_LISTENING, kTraceError,
1332 "SetRecPayloadType() unable to set PT while listening");
1333 return -1;
1334 }
1335
1336 if (codec.pltype == -1) {
1337 // De-register the selected codec (RTP/RTCP module and ACM)
1338
1339 int8_t pltype(-1);
1340 CodecInst rxCodec = codec;
1341
1342 // Get payload type for the given codec
1343 rtp_payload_registry_->ReceivePayloadType(
1344 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1345 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1346 rxCodec.pltype = pltype;
1347
1348 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1349 _engineStatisticsPtr->SetLastError(
1350 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1351 "SetRecPayloadType() RTP/RTCP-module deregistration "
1352 "failed");
1353 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001354 }
kwiberg55b97fe2016-01-28 05:22:45 -08001355 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1356 _engineStatisticsPtr->SetLastError(
1357 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1358 "SetRecPayloadType() ACM deregistration failed - 1");
1359 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001360 }
kwiberg55b97fe2016-01-28 05:22:45 -08001361 return 0;
1362 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001363
kwiberg55b97fe2016-01-28 05:22:45 -08001364 if (rtp_receiver_->RegisterReceivePayload(
1365 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1366 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1367 // First attempt to register failed => de-register and try again
1368 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001369 if (rtp_receiver_->RegisterReceivePayload(
kwiberg55b97fe2016-01-28 05:22:45 -08001370 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1371 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1372 _engineStatisticsPtr->SetLastError(
1373 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1374 "SetRecPayloadType() RTP/RTCP-module registration failed");
1375 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001376 }
kwiberg55b97fe2016-01-28 05:22:45 -08001377 }
1378 if (audio_coding_->RegisterReceiveCodec(codec) != 0) {
1379 audio_coding_->UnregisterReceiveCodec(codec.pltype);
1380 if (audio_coding_->RegisterReceiveCodec(codec) != 0) {
1381 _engineStatisticsPtr->SetLastError(
1382 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1383 "SetRecPayloadType() ACM registration failed - 1");
1384 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001385 }
kwiberg55b97fe2016-01-28 05:22:45 -08001386 }
1387 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001388}
1389
kwiberg55b97fe2016-01-28 05:22:45 -08001390int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1391 int8_t payloadType(-1);
1392 if (rtp_payload_registry_->ReceivePayloadType(
1393 codec.plname, codec.plfreq, codec.channels,
1394 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1395 _engineStatisticsPtr->SetLastError(
1396 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1397 "GetRecPayloadType() failed to retrieve RX payload type");
1398 return -1;
1399 }
1400 codec.pltype = payloadType;
1401 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001402}
1403
kwiberg55b97fe2016-01-28 05:22:45 -08001404int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1405 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1406 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001407
kwiberg55b97fe2016-01-28 05:22:45 -08001408 CodecInst codec;
1409 int32_t samplingFreqHz(-1);
1410 const size_t kMono = 1;
1411 if (frequency == kFreq32000Hz)
1412 samplingFreqHz = 32000;
1413 else if (frequency == kFreq16000Hz)
1414 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001415
kwiberg55b97fe2016-01-28 05:22:45 -08001416 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1417 _engineStatisticsPtr->SetLastError(
1418 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1419 "SetSendCNPayloadType() failed to retrieve default CN codec "
1420 "settings");
1421 return -1;
1422 }
1423
1424 // Modify the payload type (must be set to dynamic range)
1425 codec.pltype = type;
1426
1427 if (audio_coding_->RegisterSendCodec(codec) != 0) {
1428 _engineStatisticsPtr->SetLastError(
1429 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1430 "SetSendCNPayloadType() failed to register CN to ACM");
1431 return -1;
1432 }
1433
1434 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1435 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1436 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1437 _engineStatisticsPtr->SetLastError(
1438 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1439 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1440 "module");
1441 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001442 }
kwiberg55b97fe2016-01-28 05:22:45 -08001443 }
1444 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001445}
1446
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001447int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001448 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001449 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001450
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001451 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001452 _engineStatisticsPtr->SetLastError(
1453 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001454 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001455 return -1;
1456 }
1457 return 0;
1458}
1459
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001460int Channel::SetOpusDtx(bool enable_dtx) {
1461 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1462 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001463 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001464 : audio_coding_->DisableOpusDtx();
1465 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001466 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1467 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001468 return -1;
1469 }
1470 return 0;
1471}
1472
kwiberg55b97fe2016-01-28 05:22:45 -08001473int32_t Channel::RegisterExternalTransport(Transport& transport) {
1474 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001475 "Channel::RegisterExternalTransport()");
1476
kwiberg55b97fe2016-01-28 05:22:45 -08001477 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001478
kwiberg55b97fe2016-01-28 05:22:45 -08001479 if (_externalTransport) {
1480 _engineStatisticsPtr->SetLastError(
1481 VE_INVALID_OPERATION, kTraceError,
1482 "RegisterExternalTransport() external transport already enabled");
1483 return -1;
1484 }
1485 _externalTransport = true;
1486 _transportPtr = &transport;
1487 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001488}
1489
kwiberg55b97fe2016-01-28 05:22:45 -08001490int32_t Channel::DeRegisterExternalTransport() {
1491 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1492 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001493
kwiberg55b97fe2016-01-28 05:22:45 -08001494 rtc::CritScope cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00001495
kwiberg55b97fe2016-01-28 05:22:45 -08001496 if (!_transportPtr) {
1497 _engineStatisticsPtr->SetLastError(
1498 VE_INVALID_OPERATION, kTraceWarning,
1499 "DeRegisterExternalTransport() external transport already "
1500 "disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00001501 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001502 }
1503 _externalTransport = false;
1504 _transportPtr = NULL;
1505 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1506 "DeRegisterExternalTransport() all transport is disabled");
1507 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001508}
1509
kwiberg55b97fe2016-01-28 05:22:45 -08001510int32_t Channel::ReceivedRTPPacket(const int8_t* data,
1511 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001512 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001513 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001514 "Channel::ReceivedRTPPacket()");
1515
1516 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001517 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001518
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001519 const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001520 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001521 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1522 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1523 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001524 return -1;
1525 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001526 header.payload_type_frequency =
1527 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001528 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001529 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001530 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001531 rtp_receive_statistics_->IncomingPacket(
1532 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001533 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001534
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001535 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001536}
1537
1538bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001539 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001540 const RTPHeader& header,
1541 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001542 if (rtp_payload_registry_->IsRtx(header)) {
1543 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001544 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001545 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001546 assert(packet_length >= header.headerLength);
1547 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001548 PayloadUnion payload_specific;
1549 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001550 &payload_specific)) {
1551 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001552 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001553 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1554 payload_specific, in_order);
1555}
1556
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001557bool Channel::HandleRtxPacket(const uint8_t* packet,
1558 size_t packet_length,
1559 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001560 if (!rtp_payload_registry_->IsRtx(header))
1561 return false;
1562
1563 // Remove the RTX header and parse the original RTP header.
1564 if (packet_length < header.headerLength)
1565 return false;
1566 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1567 return false;
1568 if (restored_packet_in_use_) {
1569 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1570 "Multiple RTX headers detected, dropping packet");
1571 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001572 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001573 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001574 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1575 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001576 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1577 "Incoming RTX packet: invalid RTP header");
1578 return false;
1579 }
1580 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001581 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001582 restored_packet_in_use_ = false;
1583 return ret;
1584}
1585
1586bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1587 StreamStatistician* statistician =
1588 rtp_receive_statistics_->GetStatistician(header.ssrc);
1589 if (!statistician)
1590 return false;
1591 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001592}
1593
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001594bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1595 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001596 // Retransmissions are handled separately if RTX is enabled.
1597 if (rtp_payload_registry_->RtxEnabled())
1598 return false;
1599 StreamStatistician* statistician =
1600 rtp_receive_statistics_->GetStatistician(header.ssrc);
1601 if (!statistician)
1602 return false;
1603 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001604 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001605 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001606 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001607}
1608
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001609int32_t Channel::ReceivedRTCPPacket(const int8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001610 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001611 "Channel::ReceivedRTCPPacket()");
1612 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001613 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001614
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001615 // Deliver RTCP packet to RTP/RTCP module for parsing
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001616 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001617 _engineStatisticsPtr->SetLastError(
1618 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1619 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1620 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001621
Minyue2013aec2015-05-13 14:14:42 +02001622 int64_t rtt = GetRTT(true);
1623 if (rtt == 0) {
1624 // Waiting for valid RTT.
1625 return 0;
1626 }
1627 uint32_t ntp_secs = 0;
1628 uint32_t ntp_frac = 0;
1629 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001630 if (0 !=
1631 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1632 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001633 // Waiting for RTCP.
1634 return 0;
1635 }
1636
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001637 {
tommi31fc21f2016-01-21 10:37:37 -08001638 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001639 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001640 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001641 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001642}
1643
niklase@google.com470e71d2011-07-07 08:21:25 +00001644int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001645 bool loop,
1646 FileFormats format,
1647 int startPosition,
1648 float volumeScaling,
1649 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001650 const CodecInst* codecInst) {
1651 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1652 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1653 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1654 "stopPosition=%d)",
1655 fileName, loop, format, volumeScaling, startPosition,
1656 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001657
kwiberg55b97fe2016-01-28 05:22:45 -08001658 if (channel_state_.Get().output_file_playing) {
1659 _engineStatisticsPtr->SetLastError(
1660 VE_ALREADY_PLAYING, kTraceError,
1661 "StartPlayingFileLocally() is already playing");
1662 return -1;
1663 }
1664
1665 {
1666 rtc::CritScope cs(&_fileCritSect);
1667
1668 if (_outputFilePlayerPtr) {
1669 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1670 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1671 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001672 }
1673
kwiberg55b97fe2016-01-28 05:22:45 -08001674 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1675 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001676
kwiberg55b97fe2016-01-28 05:22:45 -08001677 if (_outputFilePlayerPtr == NULL) {
1678 _engineStatisticsPtr->SetLastError(
1679 VE_INVALID_ARGUMENT, kTraceError,
1680 "StartPlayingFileLocally() filePlayer format is not correct");
1681 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001682 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001683
kwiberg55b97fe2016-01-28 05:22:45 -08001684 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001685
kwiberg55b97fe2016-01-28 05:22:45 -08001686 if (_outputFilePlayerPtr->StartPlayingFile(
1687 fileName, loop, startPosition, volumeScaling, notificationTime,
1688 stopPosition, (const CodecInst*)codecInst) != 0) {
1689 _engineStatisticsPtr->SetLastError(
1690 VE_BAD_FILE, kTraceError,
1691 "StartPlayingFile() failed to start file playout");
1692 _outputFilePlayerPtr->StopPlayingFile();
1693 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1694 _outputFilePlayerPtr = NULL;
1695 return -1;
1696 }
1697 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1698 channel_state_.SetOutputFilePlaying(true);
1699 }
1700
1701 if (RegisterFilePlayingToMixer() != 0)
1702 return -1;
1703
1704 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001705}
1706
1707int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001708 FileFormats format,
1709 int startPosition,
1710 float volumeScaling,
1711 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001712 const CodecInst* codecInst) {
1713 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1714 "Channel::StartPlayingFileLocally(format=%d,"
1715 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1716 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001717
kwiberg55b97fe2016-01-28 05:22:45 -08001718 if (stream == NULL) {
1719 _engineStatisticsPtr->SetLastError(
1720 VE_BAD_FILE, kTraceError,
1721 "StartPlayingFileLocally() NULL as input stream");
1722 return -1;
1723 }
1724
1725 if (channel_state_.Get().output_file_playing) {
1726 _engineStatisticsPtr->SetLastError(
1727 VE_ALREADY_PLAYING, kTraceError,
1728 "StartPlayingFileLocally() is already playing");
1729 return -1;
1730 }
1731
1732 {
1733 rtc::CritScope cs(&_fileCritSect);
1734
1735 // Destroy the old instance
1736 if (_outputFilePlayerPtr) {
1737 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1738 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1739 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001740 }
1741
kwiberg55b97fe2016-01-28 05:22:45 -08001742 // Create the instance
1743 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1744 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001745
kwiberg55b97fe2016-01-28 05:22:45 -08001746 if (_outputFilePlayerPtr == NULL) {
1747 _engineStatisticsPtr->SetLastError(
1748 VE_INVALID_ARGUMENT, kTraceError,
1749 "StartPlayingFileLocally() filePlayer format isnot correct");
1750 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001751 }
1752
kwiberg55b97fe2016-01-28 05:22:45 -08001753 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001754
kwiberg55b97fe2016-01-28 05:22:45 -08001755 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1756 volumeScaling, notificationTime,
1757 stopPosition, codecInst) != 0) {
1758 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1759 "StartPlayingFile() failed to "
1760 "start file playout");
1761 _outputFilePlayerPtr->StopPlayingFile();
1762 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1763 _outputFilePlayerPtr = NULL;
1764 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001765 }
kwiberg55b97fe2016-01-28 05:22:45 -08001766 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1767 channel_state_.SetOutputFilePlaying(true);
1768 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001769
kwiberg55b97fe2016-01-28 05:22:45 -08001770 if (RegisterFilePlayingToMixer() != 0)
1771 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001772
kwiberg55b97fe2016-01-28 05:22:45 -08001773 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001774}
1775
kwiberg55b97fe2016-01-28 05:22:45 -08001776int Channel::StopPlayingFileLocally() {
1777 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1778 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001779
kwiberg55b97fe2016-01-28 05:22:45 -08001780 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001781 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001782 }
1783
1784 {
1785 rtc::CritScope cs(&_fileCritSect);
1786
1787 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1788 _engineStatisticsPtr->SetLastError(
1789 VE_STOP_RECORDING_FAILED, kTraceError,
1790 "StopPlayingFile() could not stop playing");
1791 return -1;
1792 }
1793 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1794 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1795 _outputFilePlayerPtr = NULL;
1796 channel_state_.SetOutputFilePlaying(false);
1797 }
1798 // _fileCritSect cannot be taken while calling
1799 // SetAnonymousMixibilityStatus. Refer to comments in
1800 // StartPlayingFileLocally(const char* ...) for more details.
1801 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1802 _engineStatisticsPtr->SetLastError(
1803 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1804 "StopPlayingFile() failed to stop participant from playing as"
1805 "file in the mixer");
1806 return -1;
1807 }
1808
1809 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001810}
1811
kwiberg55b97fe2016-01-28 05:22:45 -08001812int Channel::IsPlayingFileLocally() const {
1813 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001814}
1815
kwiberg55b97fe2016-01-28 05:22:45 -08001816int Channel::RegisterFilePlayingToMixer() {
1817 // Return success for not registering for file playing to mixer if:
1818 // 1. playing file before playout is started on that channel.
1819 // 2. starting playout without file playing on that channel.
1820 if (!channel_state_.Get().playing ||
1821 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001822 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001823 }
1824
1825 // |_fileCritSect| cannot be taken while calling
1826 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1827 // frames can be pulled by the mixer. Since the frames are generated from
1828 // the file, _fileCritSect will be taken. This would result in a deadlock.
1829 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1830 channel_state_.SetOutputFilePlaying(false);
1831 rtc::CritScope cs(&_fileCritSect);
1832 _engineStatisticsPtr->SetLastError(
1833 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1834 "StartPlayingFile() failed to add participant as file to mixer");
1835 _outputFilePlayerPtr->StopPlayingFile();
1836 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1837 _outputFilePlayerPtr = NULL;
1838 return -1;
1839 }
1840
1841 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001842}
1843
niklase@google.com470e71d2011-07-07 08:21:25 +00001844int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001845 bool loop,
1846 FileFormats format,
1847 int startPosition,
1848 float volumeScaling,
1849 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001850 const CodecInst* codecInst) {
1851 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1852 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1853 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1854 "stopPosition=%d)",
1855 fileName, loop, format, volumeScaling, startPosition,
1856 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001857
kwiberg55b97fe2016-01-28 05:22:45 -08001858 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001859
kwiberg55b97fe2016-01-28 05:22:45 -08001860 if (channel_state_.Get().input_file_playing) {
1861 _engineStatisticsPtr->SetLastError(
1862 VE_ALREADY_PLAYING, kTraceWarning,
1863 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001864 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001865 }
1866
1867 // Destroy the old instance
1868 if (_inputFilePlayerPtr) {
1869 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1870 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1871 _inputFilePlayerPtr = NULL;
1872 }
1873
1874 // Create the instance
1875 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1876 (const FileFormats)format);
1877
1878 if (_inputFilePlayerPtr == NULL) {
1879 _engineStatisticsPtr->SetLastError(
1880 VE_INVALID_ARGUMENT, kTraceError,
1881 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1882 return -1;
1883 }
1884
1885 const uint32_t notificationTime(0);
1886
1887 if (_inputFilePlayerPtr->StartPlayingFile(
1888 fileName, loop, startPosition, volumeScaling, notificationTime,
1889 stopPosition, (const CodecInst*)codecInst) != 0) {
1890 _engineStatisticsPtr->SetLastError(
1891 VE_BAD_FILE, kTraceError,
1892 "StartPlayingFile() failed to start file playout");
1893 _inputFilePlayerPtr->StopPlayingFile();
1894 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1895 _inputFilePlayerPtr = NULL;
1896 return -1;
1897 }
1898 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1899 channel_state_.SetInputFilePlaying(true);
1900
1901 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001902}
1903
1904int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001905 FileFormats format,
1906 int startPosition,
1907 float volumeScaling,
1908 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001909 const CodecInst* codecInst) {
1910 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1911 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1912 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1913 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001914
kwiberg55b97fe2016-01-28 05:22:45 -08001915 if (stream == NULL) {
1916 _engineStatisticsPtr->SetLastError(
1917 VE_BAD_FILE, kTraceError,
1918 "StartPlayingFileAsMicrophone NULL as input stream");
1919 return -1;
1920 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001921
kwiberg55b97fe2016-01-28 05:22:45 -08001922 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001923
kwiberg55b97fe2016-01-28 05:22:45 -08001924 if (channel_state_.Get().input_file_playing) {
1925 _engineStatisticsPtr->SetLastError(
1926 VE_ALREADY_PLAYING, kTraceWarning,
1927 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001928 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001929 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001930
kwiberg55b97fe2016-01-28 05:22:45 -08001931 // Destroy the old instance
1932 if (_inputFilePlayerPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001933 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1934 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1935 _inputFilePlayerPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001936 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001937
kwiberg55b97fe2016-01-28 05:22:45 -08001938 // Create the instance
1939 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1940 (const FileFormats)format);
1941
1942 if (_inputFilePlayerPtr == NULL) {
1943 _engineStatisticsPtr->SetLastError(
1944 VE_INVALID_ARGUMENT, kTraceError,
1945 "StartPlayingInputFile() filePlayer format isnot correct");
1946 return -1;
1947 }
1948
1949 const uint32_t notificationTime(0);
1950
1951 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1952 volumeScaling, notificationTime,
1953 stopPosition, codecInst) != 0) {
1954 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1955 "StartPlayingFile() failed to start "
1956 "file playout");
1957 _inputFilePlayerPtr->StopPlayingFile();
1958 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1959 _inputFilePlayerPtr = NULL;
1960 return -1;
1961 }
1962
1963 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1964 channel_state_.SetInputFilePlaying(true);
1965
1966 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001967}
1968
kwiberg55b97fe2016-01-28 05:22:45 -08001969int Channel::StopPlayingFileAsMicrophone() {
1970 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1971 "Channel::StopPlayingFileAsMicrophone()");
1972
1973 rtc::CritScope cs(&_fileCritSect);
1974
1975 if (!channel_state_.Get().input_file_playing) {
1976 return 0;
1977 }
1978
1979 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
1980 _engineStatisticsPtr->SetLastError(
1981 VE_STOP_RECORDING_FAILED, kTraceError,
1982 "StopPlayingFile() could not stop playing");
1983 return -1;
1984 }
1985 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1986 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1987 _inputFilePlayerPtr = NULL;
1988 channel_state_.SetInputFilePlaying(false);
1989
1990 return 0;
1991}
1992
1993int Channel::IsPlayingFileAsMicrophone() const {
1994 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001995}
1996
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00001997int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08001998 const CodecInst* codecInst) {
1999 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2000 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00002001
kwiberg55b97fe2016-01-28 05:22:45 -08002002 if (_outputFileRecording) {
2003 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2004 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002005 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002006 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002007
kwiberg55b97fe2016-01-28 05:22:45 -08002008 FileFormats format;
2009 const uint32_t notificationTime(0); // Not supported in VoE
2010 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00002011
kwiberg55b97fe2016-01-28 05:22:45 -08002012 if ((codecInst != NULL) &&
2013 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2014 _engineStatisticsPtr->SetLastError(
2015 VE_BAD_ARGUMENT, kTraceError,
2016 "StartRecordingPlayout() invalid compression");
2017 return (-1);
2018 }
2019 if (codecInst == NULL) {
2020 format = kFileFormatPcm16kHzFile;
2021 codecInst = &dummyCodec;
2022 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2023 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2024 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2025 format = kFileFormatWavFile;
2026 } else {
2027 format = kFileFormatCompressedFile;
2028 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002029
kwiberg55b97fe2016-01-28 05:22:45 -08002030 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002031
kwiberg55b97fe2016-01-28 05:22:45 -08002032 // Destroy the old instance
2033 if (_outputFileRecorderPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002034 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2035 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2036 _outputFileRecorderPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002037 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002038
kwiberg55b97fe2016-01-28 05:22:45 -08002039 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2040 _outputFileRecorderId, (const FileFormats)format);
2041 if (_outputFileRecorderPtr == NULL) {
2042 _engineStatisticsPtr->SetLastError(
2043 VE_INVALID_ARGUMENT, kTraceError,
2044 "StartRecordingPlayout() fileRecorder format isnot correct");
2045 return -1;
2046 }
2047
2048 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2049 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2050 _engineStatisticsPtr->SetLastError(
2051 VE_BAD_FILE, kTraceError,
2052 "StartRecordingAudioFile() failed to start file recording");
2053 _outputFileRecorderPtr->StopRecording();
2054 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2055 _outputFileRecorderPtr = NULL;
2056 return -1;
2057 }
2058 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2059 _outputFileRecording = true;
2060
2061 return 0;
2062}
2063
2064int Channel::StartRecordingPlayout(OutStream* stream,
2065 const CodecInst* codecInst) {
2066 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2067 "Channel::StartRecordingPlayout()");
2068
2069 if (_outputFileRecording) {
2070 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2071 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002072 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002073 }
2074
2075 FileFormats format;
2076 const uint32_t notificationTime(0); // Not supported in VoE
2077 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2078
2079 if (codecInst != NULL && codecInst->channels != 1) {
2080 _engineStatisticsPtr->SetLastError(
2081 VE_BAD_ARGUMENT, kTraceError,
2082 "StartRecordingPlayout() invalid compression");
2083 return (-1);
2084 }
2085 if (codecInst == NULL) {
2086 format = kFileFormatPcm16kHzFile;
2087 codecInst = &dummyCodec;
2088 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2089 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2090 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2091 format = kFileFormatWavFile;
2092 } else {
2093 format = kFileFormatCompressedFile;
2094 }
2095
2096 rtc::CritScope cs(&_fileCritSect);
2097
2098 // Destroy the old instance
2099 if (_outputFileRecorderPtr) {
2100 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2101 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2102 _outputFileRecorderPtr = NULL;
2103 }
2104
2105 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2106 _outputFileRecorderId, (const FileFormats)format);
2107 if (_outputFileRecorderPtr == NULL) {
2108 _engineStatisticsPtr->SetLastError(
2109 VE_INVALID_ARGUMENT, kTraceError,
2110 "StartRecordingPlayout() fileRecorder format isnot correct");
2111 return -1;
2112 }
2113
2114 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2115 notificationTime) != 0) {
2116 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2117 "StartRecordingPlayout() failed to "
2118 "start file recording");
2119 _outputFileRecorderPtr->StopRecording();
2120 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2121 _outputFileRecorderPtr = NULL;
2122 return -1;
2123 }
2124
2125 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2126 _outputFileRecording = true;
2127
2128 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002129}
2130
kwiberg55b97fe2016-01-28 05:22:45 -08002131int Channel::StopRecordingPlayout() {
2132 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2133 "Channel::StopRecordingPlayout()");
2134
2135 if (!_outputFileRecording) {
2136 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2137 "StopRecordingPlayout() isnot recording");
2138 return -1;
2139 }
2140
2141 rtc::CritScope cs(&_fileCritSect);
2142
2143 if (_outputFileRecorderPtr->StopRecording() != 0) {
2144 _engineStatisticsPtr->SetLastError(
2145 VE_STOP_RECORDING_FAILED, kTraceError,
2146 "StopRecording() could not stop recording");
2147 return (-1);
2148 }
2149 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2150 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2151 _outputFileRecorderPtr = NULL;
2152 _outputFileRecording = false;
2153
2154 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002155}
2156
kwiberg55b97fe2016-01-28 05:22:45 -08002157void Channel::SetMixWithMicStatus(bool mix) {
2158 rtc::CritScope cs(&_fileCritSect);
2159 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002160}
2161
kwiberg55b97fe2016-01-28 05:22:45 -08002162int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2163 int8_t currentLevel = _outputAudioLevel.Level();
2164 level = static_cast<int32_t>(currentLevel);
2165 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002166}
2167
kwiberg55b97fe2016-01-28 05:22:45 -08002168int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2169 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2170 level = static_cast<int32_t>(currentLevel);
2171 return 0;
2172}
2173
2174int Channel::SetMute(bool enable) {
2175 rtc::CritScope cs(&volume_settings_critsect_);
2176 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002177 "Channel::SetMute(enable=%d)", enable);
kwiberg55b97fe2016-01-28 05:22:45 -08002178 _mute = enable;
2179 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002180}
2181
kwiberg55b97fe2016-01-28 05:22:45 -08002182bool Channel::Mute() const {
2183 rtc::CritScope cs(&volume_settings_critsect_);
2184 return _mute;
niklase@google.com470e71d2011-07-07 08:21:25 +00002185}
2186
kwiberg55b97fe2016-01-28 05:22:45 -08002187int Channel::SetOutputVolumePan(float left, float right) {
2188 rtc::CritScope cs(&volume_settings_critsect_);
2189 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002190 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002191 _panLeft = left;
2192 _panRight = right;
2193 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002194}
2195
kwiberg55b97fe2016-01-28 05:22:45 -08002196int Channel::GetOutputVolumePan(float& left, float& right) const {
2197 rtc::CritScope cs(&volume_settings_critsect_);
2198 left = _panLeft;
2199 right = _panRight;
2200 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002201}
2202
kwiberg55b97fe2016-01-28 05:22:45 -08002203int Channel::SetChannelOutputVolumeScaling(float scaling) {
2204 rtc::CritScope cs(&volume_settings_critsect_);
2205 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002206 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002207 _outputGain = scaling;
2208 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002209}
2210
kwiberg55b97fe2016-01-28 05:22:45 -08002211int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2212 rtc::CritScope cs(&volume_settings_critsect_);
2213 scaling = _outputGain;
2214 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002215}
2216
niklase@google.com470e71d2011-07-07 08:21:25 +00002217int Channel::SendTelephoneEventOutband(unsigned char eventCode,
kwiberg55b97fe2016-01-28 05:22:45 -08002218 int lengthMs,
2219 int attenuationDb,
2220 bool playDtmfEvent) {
2221 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002222 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
2223 playDtmfEvent);
kwiberg55b97fe2016-01-28 05:22:45 -08002224 if (!Sending()) {
2225 return -1;
2226 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002227
kwiberg55b97fe2016-01-28 05:22:45 -08002228 _playOutbandDtmfEvent = playDtmfEvent;
niklase@google.com470e71d2011-07-07 08:21:25 +00002229
kwiberg55b97fe2016-01-28 05:22:45 -08002230 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
2231 attenuationDb) != 0) {
2232 _engineStatisticsPtr->SetLastError(
2233 VE_SEND_DTMF_FAILED, kTraceWarning,
2234 "SendTelephoneEventOutband() failed to send event");
2235 return -1;
2236 }
2237 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002238}
2239
2240int Channel::SendTelephoneEventInband(unsigned char eventCode,
kwiberg55b97fe2016-01-28 05:22:45 -08002241 int lengthMs,
2242 int attenuationDb,
2243 bool playDtmfEvent) {
2244 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002245 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
2246 playDtmfEvent);
2247
kwiberg55b97fe2016-01-28 05:22:45 -08002248 _playInbandDtmfEvent = playDtmfEvent;
2249 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
niklase@google.com470e71d2011-07-07 08:21:25 +00002250
kwiberg55b97fe2016-01-28 05:22:45 -08002251 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002252}
2253
kwiberg55b97fe2016-01-28 05:22:45 -08002254int Channel::SetSendTelephoneEventPayloadType(unsigned char type) {
2255 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002256 "Channel::SetSendTelephoneEventPayloadType()");
kwiberg55b97fe2016-01-28 05:22:45 -08002257 if (type > 127) {
2258 _engineStatisticsPtr->SetLastError(
2259 VE_INVALID_ARGUMENT, kTraceError,
2260 "SetSendTelephoneEventPayloadType() invalid type");
2261 return -1;
2262 }
2263 CodecInst codec = {};
2264 codec.plfreq = 8000;
2265 codec.pltype = type;
2266 memcpy(codec.plname, "telephone-event", 16);
2267 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2268 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2269 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2270 _engineStatisticsPtr->SetLastError(
2271 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2272 "SetSendTelephoneEventPayloadType() failed to register send"
2273 "payload type");
2274 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002275 }
kwiberg55b97fe2016-01-28 05:22:45 -08002276 }
2277 _sendTelephoneEventPayloadType = type;
2278 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002279}
2280
kwiberg55b97fe2016-01-28 05:22:45 -08002281int Channel::GetSendTelephoneEventPayloadType(unsigned char& type) {
2282 type = _sendTelephoneEventPayloadType;
2283 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002284}
2285
kwiberg55b97fe2016-01-28 05:22:45 -08002286int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2287 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2288 "Channel::UpdateRxVadDetection()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002289
kwiberg55b97fe2016-01-28 05:22:45 -08002290 int vadDecision = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002291
kwiberg55b97fe2016-01-28 05:22:45 -08002292 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002293
kwiberg55b97fe2016-01-28 05:22:45 -08002294 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2295 OnRxVadDetected(vadDecision);
2296 _oldVadDecision = vadDecision;
2297 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002298
kwiberg55b97fe2016-01-28 05:22:45 -08002299 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2300 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2301 vadDecision);
2302 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002303}
2304
kwiberg55b97fe2016-01-28 05:22:45 -08002305int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2306 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2307 "Channel::RegisterRxVadObserver()");
2308 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002309
kwiberg55b97fe2016-01-28 05:22:45 -08002310 if (_rxVadObserverPtr) {
2311 _engineStatisticsPtr->SetLastError(
2312 VE_INVALID_OPERATION, kTraceError,
2313 "RegisterRxVadObserver() observer already enabled");
2314 return -1;
2315 }
2316 _rxVadObserverPtr = &observer;
2317 _RxVadDetection = true;
2318 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002319}
2320
kwiberg55b97fe2016-01-28 05:22:45 -08002321int Channel::DeRegisterRxVadObserver() {
2322 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2323 "Channel::DeRegisterRxVadObserver()");
2324 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002325
kwiberg55b97fe2016-01-28 05:22:45 -08002326 if (!_rxVadObserverPtr) {
2327 _engineStatisticsPtr->SetLastError(
2328 VE_INVALID_OPERATION, kTraceWarning,
2329 "DeRegisterRxVadObserver() observer already disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002330 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002331 }
2332 _rxVadObserverPtr = NULL;
2333 _RxVadDetection = false;
2334 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002335}
2336
kwiberg55b97fe2016-01-28 05:22:45 -08002337int Channel::VoiceActivityIndicator(int& activity) {
2338 activity = _sendFrameType;
2339 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002340}
2341
2342#ifdef WEBRTC_VOICE_ENGINE_AGC
2343
kwiberg55b97fe2016-01-28 05:22:45 -08002344int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2345 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2346 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2347 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002348
kwiberg55b97fe2016-01-28 05:22:45 -08002349 GainControl::Mode agcMode = kDefaultRxAgcMode;
2350 switch (mode) {
2351 case kAgcDefault:
2352 break;
2353 case kAgcUnchanged:
2354 agcMode = rx_audioproc_->gain_control()->mode();
2355 break;
2356 case kAgcFixedDigital:
2357 agcMode = GainControl::kFixedDigital;
2358 break;
2359 case kAgcAdaptiveDigital:
2360 agcMode = GainControl::kAdaptiveDigital;
2361 break;
2362 default:
2363 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2364 "SetRxAgcStatus() invalid Agc mode");
2365 return -1;
2366 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002367
kwiberg55b97fe2016-01-28 05:22:45 -08002368 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2369 _engineStatisticsPtr->SetLastError(
2370 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2371 return -1;
2372 }
2373 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2374 _engineStatisticsPtr->SetLastError(
2375 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2376 return -1;
2377 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002378
kwiberg55b97fe2016-01-28 05:22:45 -08002379 _rxAgcIsEnabled = enable;
2380 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002381
kwiberg55b97fe2016-01-28 05:22:45 -08002382 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002383}
2384
kwiberg55b97fe2016-01-28 05:22:45 -08002385int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2386 bool enable = rx_audioproc_->gain_control()->is_enabled();
2387 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
niklase@google.com470e71d2011-07-07 08:21:25 +00002388
kwiberg55b97fe2016-01-28 05:22:45 -08002389 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002390
kwiberg55b97fe2016-01-28 05:22:45 -08002391 switch (agcMode) {
2392 case GainControl::kFixedDigital:
2393 mode = kAgcFixedDigital;
2394 break;
2395 case GainControl::kAdaptiveDigital:
2396 mode = kAgcAdaptiveDigital;
2397 break;
2398 default:
2399 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2400 "GetRxAgcStatus() invalid Agc mode");
2401 return -1;
2402 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002403
kwiberg55b97fe2016-01-28 05:22:45 -08002404 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002405}
2406
kwiberg55b97fe2016-01-28 05:22:45 -08002407int Channel::SetRxAgcConfig(AgcConfig config) {
2408 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2409 "Channel::SetRxAgcConfig()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002410
kwiberg55b97fe2016-01-28 05:22:45 -08002411 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2412 config.targetLeveldBOv) != 0) {
2413 _engineStatisticsPtr->SetLastError(
2414 VE_APM_ERROR, kTraceError,
2415 "SetRxAgcConfig() failed to set target peak |level|"
2416 "(or envelope) of the Agc");
2417 return -1;
2418 }
2419 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2420 config.digitalCompressionGaindB) != 0) {
2421 _engineStatisticsPtr->SetLastError(
2422 VE_APM_ERROR, kTraceError,
2423 "SetRxAgcConfig() failed to set the range in |gain| the"
2424 " digital compression stage may apply");
2425 return -1;
2426 }
2427 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2428 0) {
2429 _engineStatisticsPtr->SetLastError(
2430 VE_APM_ERROR, kTraceError,
2431 "SetRxAgcConfig() failed to set hard limiter to the signal");
2432 return -1;
2433 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002434
kwiberg55b97fe2016-01-28 05:22:45 -08002435 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002436}
2437
kwiberg55b97fe2016-01-28 05:22:45 -08002438int Channel::GetRxAgcConfig(AgcConfig& config) {
2439 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2440 config.digitalCompressionGaindB =
2441 rx_audioproc_->gain_control()->compression_gain_db();
2442 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
niklase@google.com470e71d2011-07-07 08:21:25 +00002443
kwiberg55b97fe2016-01-28 05:22:45 -08002444 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002445}
2446
kwiberg55b97fe2016-01-28 05:22:45 -08002447#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
niklase@google.com470e71d2011-07-07 08:21:25 +00002448
2449#ifdef WEBRTC_VOICE_ENGINE_NR
2450
kwiberg55b97fe2016-01-28 05:22:45 -08002451int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2452 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2453 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2454 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002455
kwiberg55b97fe2016-01-28 05:22:45 -08002456 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2457 switch (mode) {
2458 case kNsDefault:
2459 break;
2460 case kNsUnchanged:
2461 nsLevel = rx_audioproc_->noise_suppression()->level();
2462 break;
2463 case kNsConference:
2464 nsLevel = NoiseSuppression::kHigh;
2465 break;
2466 case kNsLowSuppression:
2467 nsLevel = NoiseSuppression::kLow;
2468 break;
2469 case kNsModerateSuppression:
2470 nsLevel = NoiseSuppression::kModerate;
2471 break;
2472 case kNsHighSuppression:
2473 nsLevel = NoiseSuppression::kHigh;
2474 break;
2475 case kNsVeryHighSuppression:
2476 nsLevel = NoiseSuppression::kVeryHigh;
2477 break;
2478 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002479
kwiberg55b97fe2016-01-28 05:22:45 -08002480 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2481 _engineStatisticsPtr->SetLastError(
2482 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2483 return -1;
2484 }
2485 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2486 _engineStatisticsPtr->SetLastError(
2487 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2488 return -1;
2489 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002490
kwiberg55b97fe2016-01-28 05:22:45 -08002491 _rxNsIsEnabled = enable;
2492 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002493
kwiberg55b97fe2016-01-28 05:22:45 -08002494 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002495}
2496
kwiberg55b97fe2016-01-28 05:22:45 -08002497int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2498 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2499 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
niklase@google.com470e71d2011-07-07 08:21:25 +00002500
kwiberg55b97fe2016-01-28 05:22:45 -08002501 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002502
kwiberg55b97fe2016-01-28 05:22:45 -08002503 switch (ncLevel) {
2504 case NoiseSuppression::kLow:
2505 mode = kNsLowSuppression;
2506 break;
2507 case NoiseSuppression::kModerate:
2508 mode = kNsModerateSuppression;
2509 break;
2510 case NoiseSuppression::kHigh:
2511 mode = kNsHighSuppression;
2512 break;
2513 case NoiseSuppression::kVeryHigh:
2514 mode = kNsVeryHighSuppression;
2515 break;
2516 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002517
kwiberg55b97fe2016-01-28 05:22:45 -08002518 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002519}
2520
kwiberg55b97fe2016-01-28 05:22:45 -08002521#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
niklase@google.com470e71d2011-07-07 08:21:25 +00002522
kwiberg55b97fe2016-01-28 05:22:45 -08002523int Channel::SetLocalSSRC(unsigned int ssrc) {
2524 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2525 "Channel::SetLocalSSRC()");
2526 if (channel_state_.Get().sending) {
2527 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2528 "SetLocalSSRC() already sending");
2529 return -1;
2530 }
2531 _rtpRtcpModule->SetSSRC(ssrc);
2532 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002533}
2534
kwiberg55b97fe2016-01-28 05:22:45 -08002535int Channel::GetLocalSSRC(unsigned int& ssrc) {
2536 ssrc = _rtpRtcpModule->SSRC();
2537 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002538}
2539
kwiberg55b97fe2016-01-28 05:22:45 -08002540int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2541 ssrc = rtp_receiver_->SSRC();
2542 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002543}
2544
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002545int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002546 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002547 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002548}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002549
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002550int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2551 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002552 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2553 if (enable &&
2554 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2555 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002556 return -1;
2557 }
2558 return 0;
2559}
2560
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002561int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2562 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2563}
2564
2565int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2566 rtp_header_parser_->DeregisterRtpHeaderExtension(
2567 kRtpExtensionAbsoluteSendTime);
kwiberg55b97fe2016-01-28 05:22:45 -08002568 if (enable &&
2569 !rtp_header_parser_->RegisterRtpHeaderExtension(
2570 kRtpExtensionAbsoluteSendTime, id)) {
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00002571 return -1;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002572 }
2573 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002574}
2575
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002576void Channel::EnableSendTransportSequenceNumber(int id) {
2577 int ret =
2578 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2579 RTC_DCHECK_EQ(0, ret);
2580}
2581
stefan3313ec92016-01-21 06:32:43 -08002582void Channel::EnableReceiveTransportSequenceNumber(int id) {
2583 rtp_header_parser_->DeregisterRtpHeaderExtension(
2584 kRtpExtensionTransportSequenceNumber);
2585 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2586 kRtpExtensionTransportSequenceNumber, id);
2587 RTC_DCHECK(ret);
2588}
2589
stefanbba9dec2016-02-01 04:39:55 -08002590void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002591 RtpPacketSender* rtp_packet_sender,
2592 TransportFeedbackObserver* transport_feedback_observer,
2593 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002594 RTC_DCHECK(rtp_packet_sender);
2595 RTC_DCHECK(transport_feedback_observer);
2596 RTC_DCHECK(packet_router && !packet_router_);
2597 feedback_observer_proxy_->SetTransportFeedbackObserver(
2598 transport_feedback_observer);
2599 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2600 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2601 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
2602 packet_router->AddRtpModule(_rtpRtcpModule.get(), true);
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002603 packet_router_ = packet_router;
2604}
2605
stefanbba9dec2016-02-01 04:39:55 -08002606void Channel::RegisterReceiverCongestionControlObjects(
2607 PacketRouter* packet_router) {
2608 RTC_DCHECK(packet_router && !packet_router_);
2609 packet_router->AddRtpModule(_rtpRtcpModule.get(), false);
2610 packet_router_ = packet_router;
2611}
2612
2613void Channel::ResetCongestionControlObjects() {
2614 RTC_DCHECK(packet_router_);
2615 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2616 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2617 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
2618 const bool sender = rtp_packet_sender_proxy_->HasPacketSender();
2619 packet_router_->RemoveRtpModule(_rtpRtcpModule.get(), sender);
2620 packet_router_ = nullptr;
2621 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2622}
2623
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002624void Channel::SetRTCPStatus(bool enable) {
2625 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2626 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002627 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002628}
2629
kwiberg55b97fe2016-01-28 05:22:45 -08002630int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002631 RtcpMode method = _rtpRtcpModule->RTCP();
2632 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002633 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002634}
2635
kwiberg55b97fe2016-01-28 05:22:45 -08002636int Channel::SetRTCP_CNAME(const char cName[256]) {
2637 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2638 "Channel::SetRTCP_CNAME()");
2639 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2640 _engineStatisticsPtr->SetLastError(
2641 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2642 "SetRTCP_CNAME() failed to set RTCP CNAME");
2643 return -1;
2644 }
2645 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002646}
2647
kwiberg55b97fe2016-01-28 05:22:45 -08002648int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2649 if (cName == NULL) {
2650 _engineStatisticsPtr->SetLastError(
2651 VE_INVALID_ARGUMENT, kTraceError,
2652 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2653 return -1;
2654 }
2655 char cname[RTCP_CNAME_SIZE];
2656 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2657 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2658 _engineStatisticsPtr->SetLastError(
2659 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2660 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2661 return -1;
2662 }
2663 strcpy(cName, cname);
2664 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002665}
2666
kwiberg55b97fe2016-01-28 05:22:45 -08002667int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2668 unsigned int& NTPLow,
2669 unsigned int& timestamp,
2670 unsigned int& playoutTimestamp,
2671 unsigned int* jitter,
2672 unsigned short* fractionLost) {
2673 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002674
kwiberg55b97fe2016-01-28 05:22:45 -08002675 RTCPSenderInfo senderInfo;
2676 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2677 _engineStatisticsPtr->SetLastError(
2678 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2679 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2680 "side");
2681 return -1;
2682 }
2683
2684 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2685 // and octet count)
2686 NTPHigh = senderInfo.NTPseconds;
2687 NTPLow = senderInfo.NTPfraction;
2688 timestamp = senderInfo.RTPtimeStamp;
2689
2690 // --- Locally derived information
2691
2692 // This value is updated on each incoming RTCP packet (0 when no packet
2693 // has been received)
2694 playoutTimestamp = playout_timestamp_rtcp_;
2695
2696 if (NULL != jitter || NULL != fractionLost) {
2697 // Get all RTCP receiver report blocks that have been received on this
2698 // channel. If we receive RTP packets from a remote source we know the
2699 // remote SSRC and use the report block from him.
2700 // Otherwise use the first report block.
2701 std::vector<RTCPReportBlock> remote_stats;
2702 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2703 remote_stats.empty()) {
2704 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2705 "GetRemoteRTCPData() failed to measure statistics due"
2706 " to lack of received RTP and/or RTCP packets");
2707 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002708 }
2709
kwiberg55b97fe2016-01-28 05:22:45 -08002710 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2711 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2712 for (; it != remote_stats.end(); ++it) {
2713 if (it->remoteSSRC == remoteSSRC)
2714 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002715 }
kwiberg55b97fe2016-01-28 05:22:45 -08002716
2717 if (it == remote_stats.end()) {
2718 // If we have not received any RTCP packets from this SSRC it probably
2719 // means that we have not received any RTP packets.
2720 // Use the first received report block instead.
2721 it = remote_stats.begin();
2722 remoteSSRC = it->remoteSSRC;
2723 }
2724
2725 if (jitter) {
2726 *jitter = it->jitter;
2727 }
2728
2729 if (fractionLost) {
2730 *fractionLost = it->fractionLost;
2731 }
2732 }
2733 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002734}
2735
kwiberg55b97fe2016-01-28 05:22:45 -08002736int Channel::SendApplicationDefinedRTCPPacket(
2737 unsigned char subType,
2738 unsigned int name,
2739 const char* data,
2740 unsigned short dataLengthInBytes) {
2741 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2742 "Channel::SendApplicationDefinedRTCPPacket()");
2743 if (!channel_state_.Get().sending) {
2744 _engineStatisticsPtr->SetLastError(
2745 VE_NOT_SENDING, kTraceError,
2746 "SendApplicationDefinedRTCPPacket() not sending");
2747 return -1;
2748 }
2749 if (NULL == data) {
2750 _engineStatisticsPtr->SetLastError(
2751 VE_INVALID_ARGUMENT, kTraceError,
2752 "SendApplicationDefinedRTCPPacket() invalid data value");
2753 return -1;
2754 }
2755 if (dataLengthInBytes % 4 != 0) {
2756 _engineStatisticsPtr->SetLastError(
2757 VE_INVALID_ARGUMENT, kTraceError,
2758 "SendApplicationDefinedRTCPPacket() invalid length value");
2759 return -1;
2760 }
2761 RtcpMode status = _rtpRtcpModule->RTCP();
2762 if (status == RtcpMode::kOff) {
2763 _engineStatisticsPtr->SetLastError(
2764 VE_RTCP_ERROR, kTraceError,
2765 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2766 return -1;
2767 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002768
kwiberg55b97fe2016-01-28 05:22:45 -08002769 // Create and schedule the RTCP APP packet for transmission
2770 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2771 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2772 _engineStatisticsPtr->SetLastError(
2773 VE_SEND_ERROR, kTraceError,
2774 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2775 return -1;
2776 }
2777 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002778}
2779
kwiberg55b97fe2016-01-28 05:22:45 -08002780int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2781 unsigned int& maxJitterMs,
2782 unsigned int& discardedPackets) {
2783 // The jitter statistics is updated for each received RTP packet and is
2784 // based on received packets.
2785 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2786 // If RTCP is off, there is no timed thread in the RTCP module regularly
2787 // generating new stats, trigger the update manually here instead.
2788 StreamStatistician* statistician =
2789 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2790 if (statistician) {
2791 // Don't use returned statistics, use data from proxy instead so that
2792 // max jitter can be fetched atomically.
2793 RtcpStatistics s;
2794 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002795 }
kwiberg55b97fe2016-01-28 05:22:45 -08002796 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002797
kwiberg55b97fe2016-01-28 05:22:45 -08002798 ChannelStatistics stats = statistics_proxy_->GetStats();
2799 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2800 if (playoutFrequency > 0) {
2801 // Scale RTP statistics given the current playout frequency
2802 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2803 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2804 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002805
kwiberg55b97fe2016-01-28 05:22:45 -08002806 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002807
kwiberg55b97fe2016-01-28 05:22:45 -08002808 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002809}
2810
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002811int Channel::GetRemoteRTCPReportBlocks(
2812 std::vector<ReportBlock>* report_blocks) {
2813 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002814 _engineStatisticsPtr->SetLastError(
2815 VE_INVALID_ARGUMENT, kTraceError,
2816 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002817 return -1;
2818 }
2819
2820 // Get the report blocks from the latest received RTCP Sender or Receiver
2821 // Report. Each element in the vector contains the sender's SSRC and a
2822 // report block according to RFC 3550.
2823 std::vector<RTCPReportBlock> rtcp_report_blocks;
2824 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002825 return -1;
2826 }
2827
2828 if (rtcp_report_blocks.empty())
2829 return 0;
2830
2831 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2832 for (; it != rtcp_report_blocks.end(); ++it) {
2833 ReportBlock report_block;
2834 report_block.sender_SSRC = it->remoteSSRC;
2835 report_block.source_SSRC = it->sourceSSRC;
2836 report_block.fraction_lost = it->fractionLost;
2837 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2838 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2839 report_block.interarrival_jitter = it->jitter;
2840 report_block.last_SR_timestamp = it->lastSR;
2841 report_block.delay_since_last_SR = it->delaySinceLastSR;
2842 report_blocks->push_back(report_block);
2843 }
2844 return 0;
2845}
2846
kwiberg55b97fe2016-01-28 05:22:45 -08002847int Channel::GetRTPStatistics(CallStatistics& stats) {
2848 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002849
kwiberg55b97fe2016-01-28 05:22:45 -08002850 // The jitter statistics is updated for each received RTP packet and is
2851 // based on received packets.
2852 RtcpStatistics statistics;
2853 StreamStatistician* statistician =
2854 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2855 if (!statistician ||
2856 !statistician->GetStatistics(&statistics,
2857 _rtpRtcpModule->RTCP() == RtcpMode::kOff)) {
2858 _engineStatisticsPtr->SetLastError(
2859 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
2860 "GetRTPStatistics() failed to read RTP statistics from the "
2861 "RTP/RTCP module");
2862 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002863
kwiberg55b97fe2016-01-28 05:22:45 -08002864 stats.fractionLost = statistics.fraction_lost;
2865 stats.cumulativeLost = statistics.cumulative_lost;
2866 stats.extendedMax = statistics.extended_max_sequence_number;
2867 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002868
kwiberg55b97fe2016-01-28 05:22:45 -08002869 // --- RTT
2870 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002871
kwiberg55b97fe2016-01-28 05:22:45 -08002872 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002873
kwiberg55b97fe2016-01-28 05:22:45 -08002874 size_t bytesSent(0);
2875 uint32_t packetsSent(0);
2876 size_t bytesReceived(0);
2877 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002878
kwiberg55b97fe2016-01-28 05:22:45 -08002879 if (statistician) {
2880 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2881 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002882
kwiberg55b97fe2016-01-28 05:22:45 -08002883 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2884 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2885 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2886 " output will not be complete");
2887 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002888
kwiberg55b97fe2016-01-28 05:22:45 -08002889 stats.bytesSent = bytesSent;
2890 stats.packetsSent = packetsSent;
2891 stats.bytesReceived = bytesReceived;
2892 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002893
kwiberg55b97fe2016-01-28 05:22:45 -08002894 // --- Timestamps
2895 {
2896 rtc::CritScope lock(&ts_stats_lock_);
2897 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2898 }
2899 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002900}
2901
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002902int Channel::SetREDStatus(bool enable, int redPayloadtype) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002903 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002904 "Channel::SetREDStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002905
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002906 if (enable) {
2907 if (redPayloadtype < 0 || redPayloadtype > 127) {
2908 _engineStatisticsPtr->SetLastError(
2909 VE_PLTYPE_ERROR, kTraceError,
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002910 "SetREDStatus() invalid RED payload type");
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002911 return -1;
2912 }
2913
2914 if (SetRedPayloadType(redPayloadtype) < 0) {
2915 _engineStatisticsPtr->SetLastError(
2916 VE_CODEC_ERROR, kTraceError,
2917 "SetSecondarySendCodec() Failed to register RED ACM");
2918 return -1;
2919 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002920 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002921
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002922 if (audio_coding_->SetREDStatus(enable) != 0) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002923 _engineStatisticsPtr->SetLastError(
2924 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002925 "SetREDStatus() failed to set RED state in the ACM");
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002926 return -1;
2927 }
2928 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002929}
2930
kwiberg55b97fe2016-01-28 05:22:45 -08002931int Channel::GetREDStatus(bool& enabled, int& redPayloadtype) {
2932 enabled = audio_coding_->REDStatus();
2933 if (enabled) {
2934 int8_t payloadType = 0;
2935 if (_rtpRtcpModule->SendREDPayloadType(&payloadType) != 0) {
2936 _engineStatisticsPtr->SetLastError(
2937 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2938 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
2939 "module");
2940 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002941 }
kwiberg55b97fe2016-01-28 05:22:45 -08002942 redPayloadtype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +00002943 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002944 }
2945 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002946}
2947
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002948int Channel::SetCodecFECStatus(bool enable) {
2949 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2950 "Channel::SetCodecFECStatus()");
2951
2952 if (audio_coding_->SetCodecFEC(enable) != 0) {
2953 _engineStatisticsPtr->SetLastError(
2954 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2955 "SetCodecFECStatus() failed to set FEC state");
2956 return -1;
2957 }
2958 return 0;
2959}
2960
2961bool Channel::GetCodecFECStatus() {
2962 bool enabled = audio_coding_->CodecFEC();
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002963 return enabled;
2964}
2965
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002966void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2967 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002968 // If pacing is enabled we always store packets.
2969 if (!pacing_enabled_)
2970 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002971 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
2972 rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002973 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002974 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002975 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002976 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002977}
2978
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002979// Called when we are missing one or more packets.
2980int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002981 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2982}
2983
kwiberg55b97fe2016-01-28 05:22:45 -08002984uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2985 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2986 "Channel::Demultiplex()");
2987 _audioFrame.CopyFrom(audioFrame);
2988 _audioFrame.id_ = _channelId;
2989 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002990}
2991
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002992void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00002993 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07002994 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08002995 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002996 CodecInst codec;
2997 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002998
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07002999 // Never upsample or upmix the capture signal here. This should be done at the
3000 // end of the send chain.
3001 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
3002 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
3003 RemixAndResample(audio_data, number_of_frames, number_of_channels,
3004 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00003005}
3006
kwiberg55b97fe2016-01-28 05:22:45 -08003007uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
3008 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3009 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003010
kwiberg55b97fe2016-01-28 05:22:45 -08003011 if (_audioFrame.samples_per_channel_ == 0) {
3012 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3013 "Channel::PrepareEncodeAndSend() invalid audio frame");
3014 return 0xFFFFFFFF;
3015 }
3016
3017 if (channel_state_.Get().input_file_playing) {
3018 MixOrReplaceAudioWithFile(mixingFrequency);
3019 }
3020
3021 bool is_muted = Mute(); // Cache locally as Mute() takes a lock.
3022 if (is_muted) {
3023 AudioFrameOperations::Mute(_audioFrame);
3024 }
3025
3026 if (channel_state_.Get().input_external_media) {
3027 rtc::CritScope cs(&_callbackCritSect);
3028 const bool isStereo = (_audioFrame.num_channels_ == 2);
3029 if (_inputExternalMediaCallbackPtr) {
3030 _inputExternalMediaCallbackPtr->Process(
3031 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
3032 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
3033 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00003034 }
kwiberg55b97fe2016-01-28 05:22:45 -08003035 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003036
kwiberg55b97fe2016-01-28 05:22:45 -08003037 InsertInbandDtmfTone();
niklase@google.com470e71d2011-07-07 08:21:25 +00003038
kwiberg55b97fe2016-01-28 05:22:45 -08003039 if (_includeAudioLevelIndication) {
3040 size_t length =
3041 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
andrew@webrtc.org21299d42014-05-14 19:00:59 +00003042 if (is_muted) {
kwiberg55b97fe2016-01-28 05:22:45 -08003043 rms_level_.ProcessMuted(length);
3044 } else {
3045 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00003046 }
kwiberg55b97fe2016-01-28 05:22:45 -08003047 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003048
kwiberg55b97fe2016-01-28 05:22:45 -08003049 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003050}
3051
kwiberg55b97fe2016-01-28 05:22:45 -08003052uint32_t Channel::EncodeAndSend() {
3053 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3054 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003055
kwiberg55b97fe2016-01-28 05:22:45 -08003056 assert(_audioFrame.num_channels_ <= 2);
3057 if (_audioFrame.samples_per_channel_ == 0) {
3058 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3059 "Channel::EncodeAndSend() invalid audio frame");
3060 return 0xFFFFFFFF;
3061 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003062
kwiberg55b97fe2016-01-28 05:22:45 -08003063 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00003064
kwiberg55b97fe2016-01-28 05:22:45 -08003065 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00003066
kwiberg55b97fe2016-01-28 05:22:45 -08003067 // The ACM resamples internally.
3068 _audioFrame.timestamp_ = _timeStamp;
3069 // This call will trigger AudioPacketizationCallback::SendData if encoding
3070 // is done and payload is ready for packetization and transmission.
3071 // Otherwise, it will return without invoking the callback.
3072 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
3073 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
3074 "Channel::EncodeAndSend() ACM encoding failed");
3075 return 0xFFFFFFFF;
3076 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003077
kwiberg55b97fe2016-01-28 05:22:45 -08003078 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
3079 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003080}
3081
Minyue2013aec2015-05-13 14:14:42 +02003082void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08003083 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003084 Channel* channel = associate_send_channel_.channel();
3085 if (channel && channel->ChannelId() == channel_id) {
3086 // If this channel is associated with a send channel of the specified
3087 // Channel ID, disassociate with it.
3088 ChannelOwner ref(NULL);
3089 associate_send_channel_ = ref;
3090 }
3091}
3092
kwiberg55b97fe2016-01-28 05:22:45 -08003093int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3094 VoEMediaProcess& processObject) {
3095 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3096 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003097
kwiberg55b97fe2016-01-28 05:22:45 -08003098 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003099
kwiberg55b97fe2016-01-28 05:22:45 -08003100 if (kPlaybackPerChannel == type) {
3101 if (_outputExternalMediaCallbackPtr) {
3102 _engineStatisticsPtr->SetLastError(
3103 VE_INVALID_OPERATION, kTraceError,
3104 "Channel::RegisterExternalMediaProcessing() "
3105 "output external media already enabled");
3106 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003107 }
kwiberg55b97fe2016-01-28 05:22:45 -08003108 _outputExternalMediaCallbackPtr = &processObject;
3109 _outputExternalMedia = true;
3110 } else if (kRecordingPerChannel == type) {
3111 if (_inputExternalMediaCallbackPtr) {
3112 _engineStatisticsPtr->SetLastError(
3113 VE_INVALID_OPERATION, kTraceError,
3114 "Channel::RegisterExternalMediaProcessing() "
3115 "output external media already enabled");
3116 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003117 }
kwiberg55b97fe2016-01-28 05:22:45 -08003118 _inputExternalMediaCallbackPtr = &processObject;
3119 channel_state_.SetInputExternalMedia(true);
3120 }
3121 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003122}
3123
kwiberg55b97fe2016-01-28 05:22:45 -08003124int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3125 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3126 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003127
kwiberg55b97fe2016-01-28 05:22:45 -08003128 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003129
kwiberg55b97fe2016-01-28 05:22:45 -08003130 if (kPlaybackPerChannel == type) {
3131 if (!_outputExternalMediaCallbackPtr) {
3132 _engineStatisticsPtr->SetLastError(
3133 VE_INVALID_OPERATION, kTraceWarning,
3134 "Channel::DeRegisterExternalMediaProcessing() "
3135 "output external media already disabled");
3136 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003137 }
kwiberg55b97fe2016-01-28 05:22:45 -08003138 _outputExternalMedia = false;
3139 _outputExternalMediaCallbackPtr = NULL;
3140 } else if (kRecordingPerChannel == type) {
3141 if (!_inputExternalMediaCallbackPtr) {
3142 _engineStatisticsPtr->SetLastError(
3143 VE_INVALID_OPERATION, kTraceWarning,
3144 "Channel::DeRegisterExternalMediaProcessing() "
3145 "input external media already disabled");
3146 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003147 }
kwiberg55b97fe2016-01-28 05:22:45 -08003148 channel_state_.SetInputExternalMedia(false);
3149 _inputExternalMediaCallbackPtr = NULL;
3150 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003151
kwiberg55b97fe2016-01-28 05:22:45 -08003152 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003153}
3154
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003155int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08003156 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3157 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003158
kwiberg55b97fe2016-01-28 05:22:45 -08003159 if (channel_state_.Get().playing) {
3160 _engineStatisticsPtr->SetLastError(
3161 VE_INVALID_OPERATION, kTraceError,
3162 "Channel::SetExternalMixing() "
3163 "external mixing cannot be changed while playing.");
3164 return -1;
3165 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003166
kwiberg55b97fe2016-01-28 05:22:45 -08003167 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003168
kwiberg55b97fe2016-01-28 05:22:45 -08003169 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003170}
3171
kwiberg55b97fe2016-01-28 05:22:45 -08003172int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3173 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00003174}
3175
wu@webrtc.org24301a62013-12-13 19:17:43 +00003176void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3177 audio_coding_->GetDecodingCallStatistics(stats);
3178}
3179
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003180bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3181 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08003182 rtc::CritScope lock(&video_sync_lock_);
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003183 if (_average_jitter_buffer_delay_us == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003184 return false;
3185 }
kwiberg55b97fe2016-01-28 05:22:45 -08003186 *jitter_buffer_delay_ms =
3187 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003188 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003189 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00003190}
3191
solenberg358057b2015-11-27 10:46:42 -08003192uint32_t Channel::GetDelayEstimate() const {
3193 int jitter_buffer_delay_ms = 0;
3194 int playout_buffer_delay_ms = 0;
3195 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3196 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3197}
3198
deadbeef74375882015-08-13 12:09:10 -07003199int Channel::LeastRequiredDelayMs() const {
3200 return audio_coding_->LeastRequiredDelayMs();
3201}
3202
kwiberg55b97fe2016-01-28 05:22:45 -08003203int Channel::SetMinimumPlayoutDelay(int delayMs) {
3204 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3205 "Channel::SetMinimumPlayoutDelay()");
3206 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3207 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3208 _engineStatisticsPtr->SetLastError(
3209 VE_INVALID_ARGUMENT, kTraceError,
3210 "SetMinimumPlayoutDelay() invalid min delay");
3211 return -1;
3212 }
3213 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3214 _engineStatisticsPtr->SetLastError(
3215 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3216 "SetMinimumPlayoutDelay() failed to set min playout delay");
3217 return -1;
3218 }
3219 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003220}
3221
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003222int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07003223 uint32_t playout_timestamp_rtp = 0;
3224 {
tommi31fc21f2016-01-21 10:37:37 -08003225 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003226 playout_timestamp_rtp = playout_timestamp_rtp_;
3227 }
kwiberg55b97fe2016-01-28 05:22:45 -08003228 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003229 _engineStatisticsPtr->SetLastError(
3230 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3231 "GetPlayoutTimestamp() failed to retrieve timestamp");
3232 return -1;
3233 }
deadbeef74375882015-08-13 12:09:10 -07003234 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003235 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003236}
3237
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003238int Channel::SetInitTimestamp(unsigned int timestamp) {
3239 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00003240 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003241 if (channel_state_.Get().sending) {
3242 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3243 "SetInitTimestamp() already sending");
3244 return -1;
3245 }
3246 _rtpRtcpModule->SetStartTimestamp(timestamp);
3247 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003248}
3249
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003250int Channel::SetInitSequenceNumber(short sequenceNumber) {
3251 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3252 "Channel::SetInitSequenceNumber()");
3253 if (channel_state_.Get().sending) {
3254 _engineStatisticsPtr->SetLastError(
3255 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3256 return -1;
3257 }
3258 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3259 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003260}
3261
kwiberg55b97fe2016-01-28 05:22:45 -08003262int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3263 RtpReceiver** rtp_receiver) const {
3264 *rtpRtcpModule = _rtpRtcpModule.get();
3265 *rtp_receiver = rtp_receiver_.get();
3266 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003267}
3268
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003269// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3270// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003271int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwiberg@webrtc.org00b8f6b2015-02-26 14:34:55 +00003272 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003273 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003274
kwiberg55b97fe2016-01-28 05:22:45 -08003275 {
3276 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003277
kwiberg55b97fe2016-01-28 05:22:45 -08003278 if (_inputFilePlayerPtr == NULL) {
3279 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3280 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3281 " doesnt exist");
3282 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003283 }
3284
kwiberg55b97fe2016-01-28 05:22:45 -08003285 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3286 mixingFrequency) == -1) {
3287 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3288 "Channel::MixOrReplaceAudioWithFile() file mixing "
3289 "failed");
3290 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003291 }
kwiberg55b97fe2016-01-28 05:22:45 -08003292 if (fileSamples == 0) {
3293 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3294 "Channel::MixOrReplaceAudioWithFile() file is ended");
3295 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003296 }
kwiberg55b97fe2016-01-28 05:22:45 -08003297 }
3298
3299 assert(_audioFrame.samples_per_channel_ == fileSamples);
3300
3301 if (_mixFileWithMicrophone) {
3302 // Currently file stream is always mono.
3303 // TODO(xians): Change the code when FilePlayer supports real stereo.
3304 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3305 1, fileSamples);
3306 } else {
3307 // Replace ACM audio with file.
3308 // Currently file stream is always mono.
3309 // TODO(xians): Change the code when FilePlayer supports real stereo.
3310 _audioFrame.UpdateFrame(
3311 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3312 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3313 }
3314 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003315}
3316
kwiberg55b97fe2016-01-28 05:22:45 -08003317int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3318 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003319
kwiberg55b97fe2016-01-28 05:22:45 -08003320 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[960]);
3321 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003322
kwiberg55b97fe2016-01-28 05:22:45 -08003323 {
3324 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003325
kwiberg55b97fe2016-01-28 05:22:45 -08003326 if (_outputFilePlayerPtr == NULL) {
3327 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3328 "Channel::MixAudioWithFile() file mixing failed");
3329 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003330 }
3331
kwiberg55b97fe2016-01-28 05:22:45 -08003332 // We should get the frequency we ask for.
3333 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3334 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3335 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3336 "Channel::MixAudioWithFile() file mixing failed");
3337 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003338 }
kwiberg55b97fe2016-01-28 05:22:45 -08003339 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003340
kwiberg55b97fe2016-01-28 05:22:45 -08003341 if (audioFrame.samples_per_channel_ == fileSamples) {
3342 // Currently file stream is always mono.
3343 // TODO(xians): Change the code when FilePlayer supports real stereo.
3344 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3345 fileSamples);
3346 } else {
3347 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3348 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3349 ") != "
3350 "fileSamples(%" PRIuS ")",
3351 audioFrame.samples_per_channel_, fileSamples);
3352 return -1;
3353 }
3354
3355 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003356}
3357
kwiberg55b97fe2016-01-28 05:22:45 -08003358int Channel::InsertInbandDtmfTone() {
3359 // Check if we should start a new tone.
3360 if (_inbandDtmfQueue.PendingDtmf() && !_inbandDtmfGenerator.IsAddingTone() &&
3361 _inbandDtmfGenerator.DelaySinceLastTone() >
3362 kMinTelephoneEventSeparationMs) {
3363 int8_t eventCode(0);
3364 uint16_t lengthMs(0);
3365 uint8_t attenuationDb(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003366
kwiberg55b97fe2016-01-28 05:22:45 -08003367 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
3368 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
3369 if (_playInbandDtmfEvent) {
3370 // Add tone to output mixer using a reduced length to minimize
3371 // risk of echo.
3372 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80, attenuationDb);
3373 }
3374 }
3375
3376 if (_inbandDtmfGenerator.IsAddingTone()) {
3377 uint16_t frequency(0);
3378 _inbandDtmfGenerator.GetSampleRate(frequency);
3379
3380 if (frequency != _audioFrame.sample_rate_hz_) {
3381 // Update sample rate of Dtmf tone since the mixing frequency
3382 // has changed.
3383 _inbandDtmfGenerator.SetSampleRate(
3384 (uint16_t)(_audioFrame.sample_rate_hz_));
3385 // Reset the tone to be added taking the new sample rate into
3386 // account.
3387 _inbandDtmfGenerator.ResetTone();
niklase@google.com470e71d2011-07-07 08:21:25 +00003388 }
3389
kwiberg55b97fe2016-01-28 05:22:45 -08003390 int16_t toneBuffer[320];
3391 uint16_t toneSamples(0);
3392 // Get 10ms tone segment and set time since last tone to zero
3393 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1) {
3394 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3395 "Channel::EncodeAndSend() inserting Dtmf failed");
3396 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003397 }
kwiberg55b97fe2016-01-28 05:22:45 -08003398
3399 // Replace mixed audio with DTMF tone.
3400 for (size_t sample = 0; sample < _audioFrame.samples_per_channel_;
3401 sample++) {
3402 for (size_t channel = 0; channel < _audioFrame.num_channels_; channel++) {
3403 const size_t index = sample * _audioFrame.num_channels_ + channel;
3404 _audioFrame.data_[index] = toneBuffer[sample];
3405 }
3406 }
3407
3408 assert(_audioFrame.samples_per_channel_ == toneSamples);
3409 } else {
3410 // Add 10ms to "delay-since-last-tone" counter
3411 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
3412 }
3413 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003414}
3415
deadbeef74375882015-08-13 12:09:10 -07003416void Channel::UpdatePlayoutTimestamp(bool rtcp) {
3417 uint32_t playout_timestamp = 0;
3418
kwiberg55b97fe2016-01-28 05:22:45 -08003419 if (audio_coding_->PlayoutTimestamp(&playout_timestamp) == -1) {
deadbeef74375882015-08-13 12:09:10 -07003420 // This can happen if this channel has not been received any RTP packet. In
3421 // this case, NetEq is not capable of computing playout timestamp.
3422 return;
3423 }
3424
3425 uint16_t delay_ms = 0;
3426 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003427 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003428 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3429 " delay from the ADM");
3430 _engineStatisticsPtr->SetLastError(
3431 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3432 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3433 return;
3434 }
3435
3436 jitter_buffer_playout_timestamp_ = playout_timestamp;
3437
3438 // Remove the playout delay.
3439 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
3440
kwiberg55b97fe2016-01-28 05:22:45 -08003441 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003442 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
3443 playout_timestamp);
3444
3445 {
tommi31fc21f2016-01-21 10:37:37 -08003446 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003447 if (rtcp) {
3448 playout_timestamp_rtcp_ = playout_timestamp;
3449 } else {
3450 playout_timestamp_rtp_ = playout_timestamp;
3451 }
3452 playout_delay_ms_ = delay_ms;
3453 }
3454}
3455
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003456// Called for incoming RTP packets after successful RTP header parsing.
3457void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3458 uint16_t sequence_number) {
kwiberg55b97fe2016-01-28 05:22:45 -08003459 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003460 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3461 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00003462
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003463 // Get frequency of last received payload
wu@webrtc.org94454b72014-06-05 20:34:08 +00003464 int rtp_receive_frequency = GetPlayoutFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00003465
turaj@webrtc.org167b6df2013-12-13 21:05:07 +00003466 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
3467 // every incoming packet.
kwiberg55b97fe2016-01-28 05:22:45 -08003468 uint32_t timestamp_diff_ms =
3469 (rtp_timestamp - jitter_buffer_playout_timestamp_) /
3470 (rtp_receive_frequency / 1000);
henrik.lundin@webrtc.orgd6692992014-03-20 12:04:09 +00003471 if (!IsNewerTimestamp(rtp_timestamp, jitter_buffer_playout_timestamp_) ||
3472 timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3473 // If |jitter_buffer_playout_timestamp_| is newer than the incoming RTP
3474 // timestamp, the resulting difference is negative, but is set to zero.
3475 // This can happen when a network glitch causes a packet to arrive late,
3476 // and during long comfort noise periods with clock drift.
3477 timestamp_diff_ms = 0;
3478 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003479
kwiberg55b97fe2016-01-28 05:22:45 -08003480 uint16_t packet_delay_ms =
3481 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003482
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003483 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00003484
kwiberg55b97fe2016-01-28 05:22:45 -08003485 if (timestamp_diff_ms == 0)
3486 return;
niklase@google.com470e71d2011-07-07 08:21:25 +00003487
deadbeef74375882015-08-13 12:09:10 -07003488 {
tommi31fc21f2016-01-21 10:37:37 -08003489 rtc::CritScope lock(&video_sync_lock_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003490
deadbeef74375882015-08-13 12:09:10 -07003491 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3492 _recPacketDelayMs = packet_delay_ms;
3493 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003494
deadbeef74375882015-08-13 12:09:10 -07003495 if (_average_jitter_buffer_delay_us == 0) {
3496 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3497 return;
3498 }
3499
3500 // Filter average delay value using exponential filter (alpha is
3501 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3502 // risk of rounding error) and compensate for it in GetDelayEstimate()
3503 // later.
kwiberg55b97fe2016-01-28 05:22:45 -08003504 _average_jitter_buffer_delay_us =
3505 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3506 8;
deadbeef74375882015-08-13 12:09:10 -07003507 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003508}
3509
kwiberg55b97fe2016-01-28 05:22:45 -08003510void Channel::RegisterReceiveCodecsToRTPModule() {
3511 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3512 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003513
kwiberg55b97fe2016-01-28 05:22:45 -08003514 CodecInst codec;
3515 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003516
kwiberg55b97fe2016-01-28 05:22:45 -08003517 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3518 // Open up the RTP/RTCP receiver for all supported codecs
3519 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3520 (rtp_receiver_->RegisterReceivePayload(
3521 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3522 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3523 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3524 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3525 " to register %s (%d/%d/%" PRIuS
3526 "/%d) to RTP/RTCP "
3527 "receiver",
3528 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3529 codec.rate);
3530 } else {
3531 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3532 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3533 "(%d/%d/%" PRIuS
3534 "/%d) has been added to the RTP/RTCP "
3535 "receiver",
3536 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3537 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003538 }
kwiberg55b97fe2016-01-28 05:22:45 -08003539 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003540}
3541
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00003542// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003543int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003544 CodecInst codec;
3545 bool found_red = false;
3546
3547 // Get default RED settings from the ACM database
3548 const int num_codecs = AudioCodingModule::NumberOfCodecs();
3549 for (int idx = 0; idx < num_codecs; idx++) {
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003550 audio_coding_->Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003551 if (!STR_CASE_CMP(codec.plname, "RED")) {
3552 found_red = true;
3553 break;
3554 }
3555 }
3556
3557 if (!found_red) {
3558 _engineStatisticsPtr->SetLastError(
3559 VE_CODEC_ERROR, kTraceError,
3560 "SetRedPayloadType() RED is not supported");
3561 return -1;
3562 }
3563
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00003564 codec.pltype = red_payload_type;
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003565 if (audio_coding_->RegisterSendCodec(codec) < 0) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003566 _engineStatisticsPtr->SetLastError(
3567 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3568 "SetRedPayloadType() RED registration in ACM module failed");
3569 return -1;
3570 }
3571
3572 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
3573 _engineStatisticsPtr->SetLastError(
3574 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3575 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
3576 return -1;
3577 }
3578 return 0;
3579}
3580
kwiberg55b97fe2016-01-28 05:22:45 -08003581int Channel::SetSendRtpHeaderExtension(bool enable,
3582 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003583 unsigned char id) {
3584 int error = 0;
3585 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3586 if (enable) {
3587 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3588 }
3589 return error;
3590}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003591
wu@webrtc.org94454b72014-06-05 20:34:08 +00003592int32_t Channel::GetPlayoutFrequency() {
3593 int32_t playout_frequency = audio_coding_->PlayoutFrequency();
3594 CodecInst current_recive_codec;
3595 if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
3596 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
3597 // Even though the actual sampling rate for G.722 audio is
3598 // 16,000 Hz, the RTP clock rate for the G722 payload format is
3599 // 8,000 Hz because that value was erroneously assigned in
3600 // RFC 1890 and must remain unchanged for backward compatibility.
3601 playout_frequency = 8000;
3602 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
3603 // We are resampling Opus internally to 32,000 Hz until all our
3604 // DSP routines can operate at 48,000 Hz, but the RTP clock
3605 // rate for the Opus payload format is standardized to 48,000 Hz,
3606 // because that is the maximum supported decoding sampling rate.
3607 playout_frequency = 48000;
3608 }
3609 }
3610 return playout_frequency;
3611}
3612
Minyue2013aec2015-05-13 14:14:42 +02003613int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003614 RtcpMode method = _rtpRtcpModule->RTCP();
3615 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003616 return 0;
3617 }
3618 std::vector<RTCPReportBlock> report_blocks;
3619 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003620
3621 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003622 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003623 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003624 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003625 Channel* channel = associate_send_channel_.channel();
3626 // Tries to get RTT from an associated channel. This is important for
3627 // receive-only channels.
3628 if (channel) {
3629 // To prevent infinite recursion and deadlock, calling GetRTT of
3630 // associate channel should always use "false" for argument:
3631 // |allow_associate_channel|.
3632 rtt = channel->GetRTT(false);
3633 }
3634 }
3635 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003636 }
3637
3638 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3639 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3640 for (; it != report_blocks.end(); ++it) {
3641 if (it->remoteSSRC == remoteSSRC)
3642 break;
3643 }
3644 if (it == report_blocks.end()) {
3645 // We have not received packets with SSRC matching the report blocks.
3646 // To calculate RTT we try with the SSRC of the first report block.
3647 // This is very important for send-only channels where we don't know
3648 // the SSRC of the other end.
3649 remoteSSRC = report_blocks[0].remoteSSRC;
3650 }
Minyue2013aec2015-05-13 14:14:42 +02003651
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003652 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003653 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003654 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003655 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3656 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003657 return 0;
3658 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003659 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003660}
3661
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003662} // namespace voe
3663} // namespace webrtc