blob: abc843aef316205e74fca5fb4b4f4b1ffd29c49b [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010020#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000021#include "webrtc/base/timeutils.h"
minyue@webrtc.orge509f942013-09-12 17:03:00 +000022#include "webrtc/common.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020023#include "webrtc/config.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000024#include "webrtc/modules/audio_device/include/audio_device.h"
25#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010026#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010027#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010028#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
29#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
30#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000031#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010032#include "webrtc/modules/utility/include/audio_frame_operations.h"
33#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010034#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000035#include "webrtc/voice_engine/include/voe_base.h"
36#include "webrtc/voice_engine/include/voe_external_media.h"
37#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
38#include "webrtc/voice_engine/output_mixer.h"
39#include "webrtc/voice_engine/statistics.h"
40#include "webrtc/voice_engine/transmit_mixer.h"
41#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000042
andrew@webrtc.org50419b02012-11-14 19:07:54 +000043namespace webrtc {
44namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000045
kwibergc8d071e2016-04-06 12:22:38 -070046namespace {
47
48bool RegisterReceiveCodec(std::unique_ptr<AudioCodingModule>* acm,
49 acm2::RentACodec* rac,
50 const CodecInst& ci) {
51 const int result =
52 (*acm)->RegisterReceiveCodec(ci, [&] { return rac->RentIsacDecoder(); });
53 return result == 0;
54}
55
56} // namespace
57
solenberg8842c3e2016-03-11 03:06:41 -080058const int kTelephoneEventAttenuationdB = 10;
59
Stefan Holmerb86d4e42015-12-07 10:26:18 +010060class TransportFeedbackProxy : public TransportFeedbackObserver {
61 public:
62 TransportFeedbackProxy() : feedback_observer_(nullptr) {
63 pacer_thread_.DetachFromThread();
64 network_thread_.DetachFromThread();
65 }
66
67 void SetTransportFeedbackObserver(
68 TransportFeedbackObserver* feedback_observer) {
69 RTC_DCHECK(thread_checker_.CalledOnValidThread());
70 rtc::CritScope lock(&crit_);
71 feedback_observer_ = feedback_observer;
72 }
73
74 // Implements TransportFeedbackObserver.
75 void AddPacket(uint16_t sequence_number,
76 size_t length,
77 bool was_paced) override {
78 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
79 rtc::CritScope lock(&crit_);
80 if (feedback_observer_)
81 feedback_observer_->AddPacket(sequence_number, length, was_paced);
82 }
83 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
84 RTC_DCHECK(network_thread_.CalledOnValidThread());
85 rtc::CritScope lock(&crit_);
86 if (feedback_observer_)
87 feedback_observer_->OnTransportFeedback(feedback);
88 }
89
90 private:
91 rtc::CriticalSection crit_;
92 rtc::ThreadChecker thread_checker_;
93 rtc::ThreadChecker pacer_thread_;
94 rtc::ThreadChecker network_thread_;
95 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
96};
97
98class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
99 public:
100 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
101 pacer_thread_.DetachFromThread();
102 }
103
104 void SetSequenceNumberAllocator(
105 TransportSequenceNumberAllocator* seq_num_allocator) {
106 RTC_DCHECK(thread_checker_.CalledOnValidThread());
107 rtc::CritScope lock(&crit_);
108 seq_num_allocator_ = seq_num_allocator;
109 }
110
111 // Implements TransportSequenceNumberAllocator.
112 uint16_t AllocateSequenceNumber() override {
113 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
114 rtc::CritScope lock(&crit_);
115 if (!seq_num_allocator_)
116 return 0;
117 return seq_num_allocator_->AllocateSequenceNumber();
118 }
119
120 private:
121 rtc::CriticalSection crit_;
122 rtc::ThreadChecker thread_checker_;
123 rtc::ThreadChecker pacer_thread_;
124 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
125};
126
127class RtpPacketSenderProxy : public RtpPacketSender {
128 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800129 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100130
131 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
132 RTC_DCHECK(thread_checker_.CalledOnValidThread());
133 rtc::CritScope lock(&crit_);
134 rtp_packet_sender_ = rtp_packet_sender;
135 }
136
137 // Implements RtpPacketSender.
138 void InsertPacket(Priority priority,
139 uint32_t ssrc,
140 uint16_t sequence_number,
141 int64_t capture_time_ms,
142 size_t bytes,
143 bool retransmission) override {
144 rtc::CritScope lock(&crit_);
145 if (rtp_packet_sender_) {
146 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
147 capture_time_ms, bytes, retransmission);
148 }
149 }
150
151 private:
152 rtc::ThreadChecker thread_checker_;
153 rtc::CriticalSection crit_;
154 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
155};
156
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000157// Extend the default RTCP statistics struct with max_jitter, defined as the
158// maximum jitter value seen in an RTCP report block.
159struct ChannelStatistics : public RtcpStatistics {
160 ChannelStatistics() : rtcp(), max_jitter(0) {}
161
162 RtcpStatistics rtcp;
163 uint32_t max_jitter;
164};
165
166// Statistics callback, called at each generation of a new RTCP report block.
167class StatisticsProxy : public RtcpStatisticsCallback {
168 public:
tommi31fc21f2016-01-21 10:37:37 -0800169 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000170 virtual ~StatisticsProxy() {}
171
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000172 void StatisticsUpdated(const RtcpStatistics& statistics,
173 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000174 if (ssrc != ssrc_)
175 return;
176
tommi31fc21f2016-01-21 10:37:37 -0800177 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000178 stats_.rtcp = statistics;
179 if (statistics.jitter > stats_.max_jitter) {
180 stats_.max_jitter = statistics.jitter;
181 }
182 }
183
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000184 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000185
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000186 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800187 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000188 return stats_;
189 }
190
191 private:
192 // StatisticsUpdated calls are triggered from threads in the RTP module,
193 // while GetStats calls can be triggered from the public voice engine API,
194 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800195 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000196 const uint32_t ssrc_;
197 ChannelStatistics stats_;
198};
199
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000200class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000201 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000202 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
203 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000204
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000205 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
206 // Not used for Voice Engine.
207 }
208
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000209 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
210 int64_t rtt,
211 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000212 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
213 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
214 // report for VoiceEngine?
215 if (report_blocks.empty())
216 return;
217
218 int fraction_lost_aggregate = 0;
219 int total_number_of_packets = 0;
220
221 // If receiving multiple report blocks, calculate the weighted average based
222 // on the number of packets a report refers to.
223 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
224 block_it != report_blocks.end(); ++block_it) {
225 // Find the previous extended high sequence number for this remote SSRC,
226 // to calculate the number of RTP packets this report refers to. Ignore if
227 // we haven't seen this SSRC before.
228 std::map<uint32_t, uint32_t>::iterator seq_num_it =
229 extended_max_sequence_number_.find(block_it->sourceSSRC);
230 int number_of_packets = 0;
231 if (seq_num_it != extended_max_sequence_number_.end()) {
232 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
233 }
234 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
235 total_number_of_packets += number_of_packets;
236
237 extended_max_sequence_number_[block_it->sourceSSRC] =
238 block_it->extendedHighSeqNum;
239 }
240 int weighted_fraction_lost = 0;
241 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800242 weighted_fraction_lost =
243 (fraction_lost_aggregate + total_number_of_packets / 2) /
244 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000245 }
246 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000247 }
248
249 private:
250 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000251 // Maps remote side ssrc to extended highest sequence number received.
252 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000253};
254
kwiberg55b97fe2016-01-28 05:22:45 -0800255int32_t Channel::SendData(FrameType frameType,
256 uint8_t payloadType,
257 uint32_t timeStamp,
258 const uint8_t* payloadData,
259 size_t payloadSize,
260 const RTPFragmentationHeader* fragmentation) {
261 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
262 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
263 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
264 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000265
kwiberg55b97fe2016-01-28 05:22:45 -0800266 if (_includeAudioLevelIndication) {
267 // Store current audio level in the RTP/RTCP module.
268 // The level will be used in combination with voice-activity state
269 // (frameType) to add an RTP header extension
270 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
271 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000272
kwiberg55b97fe2016-01-28 05:22:45 -0800273 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
274 // packetization.
275 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
276 if (_rtpRtcpModule->SendOutgoingData(
277 (FrameType&)frameType, payloadType, timeStamp,
278 // Leaving the time when this frame was
279 // received from the capture device as
280 // undefined for voice for now.
281 -1, payloadData, payloadSize, fragmentation) == -1) {
282 _engineStatisticsPtr->SetLastError(
283 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
284 "Channel::SendData() failed to send data to RTP/RTCP module");
285 return -1;
286 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000287
kwiberg55b97fe2016-01-28 05:22:45 -0800288 _lastLocalTimeStamp = timeStamp;
289 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000290
kwiberg55b97fe2016-01-28 05:22:45 -0800291 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000292}
293
kwiberg55b97fe2016-01-28 05:22:45 -0800294int32_t Channel::InFrameType(FrameType frame_type) {
295 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
296 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000297
kwiberg55b97fe2016-01-28 05:22:45 -0800298 rtc::CritScope cs(&_callbackCritSect);
299 _sendFrameType = (frame_type == kAudioFrameSpeech);
300 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000301}
302
kwiberg55b97fe2016-01-28 05:22:45 -0800303int32_t Channel::OnRxVadDetected(int vadDecision) {
304 rtc::CritScope cs(&_callbackCritSect);
305 if (_rxVadObserverPtr) {
306 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
307 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000308
kwiberg55b97fe2016-01-28 05:22:45 -0800309 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000310}
311
stefan1d8a5062015-10-02 03:39:33 -0700312bool Channel::SendRtp(const uint8_t* data,
313 size_t len,
314 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800315 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
316 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000317
kwiberg55b97fe2016-01-28 05:22:45 -0800318 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000319
kwiberg55b97fe2016-01-28 05:22:45 -0800320 if (_transportPtr == NULL) {
321 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
322 "Channel::SendPacket() failed to send RTP packet due to"
323 " invalid transport object");
324 return false;
325 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000326
kwiberg55b97fe2016-01-28 05:22:45 -0800327 uint8_t* bufferToSendPtr = (uint8_t*)data;
328 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000329
kwiberg55b97fe2016-01-28 05:22:45 -0800330 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
331 std::string transport_name =
332 _externalTransport ? "external transport" : "WebRtc sockets";
333 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
334 "Channel::SendPacket() RTP transmission using %s failed",
335 transport_name.c_str());
336 return false;
337 }
338 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000339}
340
kwiberg55b97fe2016-01-28 05:22:45 -0800341bool Channel::SendRtcp(const uint8_t* data, size_t len) {
342 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
343 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000344
kwiberg55b97fe2016-01-28 05:22:45 -0800345 rtc::CritScope cs(&_callbackCritSect);
346 if (_transportPtr == NULL) {
347 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
348 "Channel::SendRtcp() failed to send RTCP packet"
349 " due to invalid transport object");
350 return false;
351 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000352
kwiberg55b97fe2016-01-28 05:22:45 -0800353 uint8_t* bufferToSendPtr = (uint8_t*)data;
354 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000355
kwiberg55b97fe2016-01-28 05:22:45 -0800356 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
357 if (n < 0) {
358 std::string transport_name =
359 _externalTransport ? "external transport" : "WebRtc sockets";
360 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
361 "Channel::SendRtcp() transmission using %s failed",
362 transport_name.c_str());
363 return false;
364 }
365 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000366}
367
kwiberg55b97fe2016-01-28 05:22:45 -0800368void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
369 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
370 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000371
kwiberg55b97fe2016-01-28 05:22:45 -0800372 // Update ssrc so that NTP for AV sync can be updated.
373 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000374}
375
Peter Boströmac547a62015-09-17 23:03:57 +0200376void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
377 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
378 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
379 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000380}
381
Peter Boströmac547a62015-09-17 23:03:57 +0200382int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000383 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000384 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000385 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800386 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200387 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800388 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
389 "Channel::OnInitializeDecoder(payloadType=%d, "
390 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
391 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000392
kwiberg55b97fe2016-01-28 05:22:45 -0800393 CodecInst receiveCodec = {0};
394 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000395
kwiberg55b97fe2016-01-28 05:22:45 -0800396 receiveCodec.pltype = payloadType;
397 receiveCodec.plfreq = frequency;
398 receiveCodec.channels = channels;
399 receiveCodec.rate = rate;
400 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000401
kwiberg55b97fe2016-01-28 05:22:45 -0800402 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
403 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000404
kwiberg55b97fe2016-01-28 05:22:45 -0800405 // Register the new codec to the ACM
kwibergc8d071e2016-04-06 12:22:38 -0700406 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, receiveCodec)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800407 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
408 "Channel::OnInitializeDecoder() invalid codec ("
409 "pt=%d, name=%s) received - 1",
410 payloadType, payloadName);
411 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
412 return -1;
413 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000414
kwiberg55b97fe2016-01-28 05:22:45 -0800415 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000416}
417
kwiberg55b97fe2016-01-28 05:22:45 -0800418int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
419 size_t payloadSize,
420 const WebRtcRTPHeader* rtpHeader) {
421 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
422 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
423 ","
424 " payloadType=%u, audioChannel=%" PRIuS ")",
425 payloadSize, rtpHeader->header.payloadType,
426 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000427
kwiberg55b97fe2016-01-28 05:22:45 -0800428 if (!channel_state_.Get().playing) {
429 // Avoid inserting into NetEQ when we are not playing. Count the
430 // packet as discarded.
431 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
432 "received packet is discarded since playing is not"
433 " activated");
434 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000435 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800436 }
437
438 // Push the incoming payload (parsed and ready for decoding) into the ACM
439 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
440 0) {
441 _engineStatisticsPtr->SetLastError(
442 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
443 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
444 return -1;
445 }
446
447 // Update the packet delay.
448 UpdatePacketDelay(rtpHeader->header.timestamp,
449 rtpHeader->header.sequenceNumber);
450
451 int64_t round_trip_time = 0;
452 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
453 NULL);
454
455 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
456 if (!nack_list.empty()) {
457 // Can't use nack_list.data() since it's not supported by all
458 // compilers.
459 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
460 }
461 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000462}
463
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000464bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000465 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000466 RTPHeader header;
467 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
468 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
469 "IncomingPacket invalid RTP header");
470 return false;
471 }
472 header.payload_type_frequency =
473 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
474 if (header.payload_type_frequency < 0)
475 return false;
476 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
477}
478
henrik.lundin42dda502016-05-18 05:36:01 -0700479MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
480 int32_t id,
481 AudioFrame* audioFrame) {
kwiberg55b97fe2016-01-28 05:22:45 -0800482 if (event_log_) {
483 unsigned int ssrc;
484 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
485 event_log_->LogAudioPlayout(ssrc);
486 }
487 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
henrik.lundind4ccb002016-05-17 12:21:55 -0700488 bool muted;
489 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame,
490 &muted) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -0800491 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
492 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
493 // In all likelihood, the audio in this frame is garbage. We return an
494 // error so that the audio mixer module doesn't add it to the mix. As
495 // a result, it won't be played out and the actions skipped here are
496 // irrelevant.
henrik.lundin42dda502016-05-18 05:36:01 -0700497 return MixerParticipant::AudioFrameInfo::kError;
kwiberg55b97fe2016-01-28 05:22:45 -0800498 }
henrik.lundind4ccb002016-05-17 12:21:55 -0700499 RTC_DCHECK(!muted);
kwiberg55b97fe2016-01-28 05:22:45 -0800500
501 if (_RxVadDetection) {
502 UpdateRxVadDetection(*audioFrame);
503 }
504
505 // Convert module ID to internal VoE channel ID
506 audioFrame->id_ = VoEChannelId(audioFrame->id_);
507 // Store speech type for dead-or-alive detection
508 _outputSpeechType = audioFrame->speech_type_;
509
510 ChannelState::State state = channel_state_.Get();
511
512 if (state.rx_apm_is_enabled) {
513 int err = rx_audioproc_->ProcessStream(audioFrame);
514 if (err) {
515 LOG(LS_ERROR) << "ProcessStream() error: " << err;
516 assert(false);
Ivo Creusenae856f22015-09-17 16:30:16 +0200517 }
kwiberg55b97fe2016-01-28 05:22:45 -0800518 }
519
520 {
521 // Pass the audio buffers to an optional sink callback, before applying
522 // scaling/panning, as that applies to the mix operation.
523 // External recipients of the audio (e.g. via AudioTrack), will do their
524 // own mixing/dynamic processing.
525 rtc::CritScope cs(&_callbackCritSect);
526 if (audio_sink_) {
527 AudioSinkInterface::Data data(
528 &audioFrame->data_[0], audioFrame->samples_per_channel_,
529 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
530 audioFrame->timestamp_);
531 audio_sink_->OnData(data);
532 }
533 }
534
535 float output_gain = 1.0f;
536 float left_pan = 1.0f;
537 float right_pan = 1.0f;
538 {
539 rtc::CritScope cs(&volume_settings_critsect_);
540 output_gain = _outputGain;
541 left_pan = _panLeft;
542 right_pan = _panRight;
543 }
544
545 // Output volume scaling
546 if (output_gain < 0.99f || output_gain > 1.01f) {
547 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
548 }
549
550 // Scale left and/or right channel(s) if stereo and master balance is
551 // active
552
553 if (left_pan != 1.0f || right_pan != 1.0f) {
554 if (audioFrame->num_channels_ == 1) {
555 // Emulate stereo mode since panning is active.
556 // The mono signal is copied to both left and right channels here.
557 AudioFrameOperations::MonoToStereo(audioFrame);
558 }
559 // For true stereo mode (when we are receiving a stereo signal), no
560 // action is needed.
561
562 // Do the panning operation (the audio frame contains stereo at this
563 // stage)
564 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
565 }
566
567 // Mix decoded PCM output with file if file mixing is enabled
568 if (state.output_file_playing) {
569 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
570 }
571
572 // External media
573 if (_outputExternalMedia) {
574 rtc::CritScope cs(&_callbackCritSect);
575 const bool isStereo = (audioFrame->num_channels_ == 2);
576 if (_outputExternalMediaCallbackPtr) {
577 _outputExternalMediaCallbackPtr->Process(
578 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
579 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
580 isStereo);
581 }
582 }
583
584 // Record playout if enabled
585 {
586 rtc::CritScope cs(&_fileCritSect);
587
588 if (_outputFileRecording && _outputFileRecorderPtr) {
589 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
590 }
591 }
592
593 // Measure audio level (0-9)
594 _outputAudioLevel.ComputeLevel(*audioFrame);
595
596 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
597 // The first frame with a valid rtp timestamp.
598 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
599 }
600
601 if (capture_start_rtp_time_stamp_ >= 0) {
602 // audioFrame.timestamp_ should be valid from now on.
603
604 // Compute elapsed time.
605 int64_t unwrap_timestamp =
606 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
607 audioFrame->elapsed_time_ms_ =
608 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
609 (GetPlayoutFrequency() / 1000);
610
niklase@google.com470e71d2011-07-07 08:21:25 +0000611 {
kwiberg55b97fe2016-01-28 05:22:45 -0800612 rtc::CritScope lock(&ts_stats_lock_);
613 // Compute ntp time.
614 audioFrame->ntp_time_ms_ =
615 ntp_estimator_.Estimate(audioFrame->timestamp_);
616 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
617 if (audioFrame->ntp_time_ms_ > 0) {
618 // Compute |capture_start_ntp_time_ms_| so that
619 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
620 capture_start_ntp_time_ms_ =
621 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000622 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000623 }
kwiberg55b97fe2016-01-28 05:22:45 -0800624 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000625
henrik.lundin42dda502016-05-18 05:36:01 -0700626 return muted ? MixerParticipant::AudioFrameInfo::kMuted
627 : MixerParticipant::AudioFrameInfo::kNormal;
niklase@google.com470e71d2011-07-07 08:21:25 +0000628}
629
kwiberg55b97fe2016-01-28 05:22:45 -0800630int32_t Channel::NeededFrequency(int32_t id) const {
631 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
632 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000633
kwiberg55b97fe2016-01-28 05:22:45 -0800634 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000635
kwiberg55b97fe2016-01-28 05:22:45 -0800636 // Determine highest needed receive frequency
637 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000638
kwiberg55b97fe2016-01-28 05:22:45 -0800639 // Return the bigger of playout and receive frequency in the ACM.
640 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
641 highestNeeded = audio_coding_->PlayoutFrequency();
642 } else {
643 highestNeeded = receiveFrequency;
644 }
645
646 // Special case, if we're playing a file on the playout side
647 // we take that frequency into consideration as well
648 // This is not needed on sending side, since the codec will
649 // limit the spectrum anyway.
650 if (channel_state_.Get().output_file_playing) {
651 rtc::CritScope cs(&_fileCritSect);
652 if (_outputFilePlayerPtr) {
653 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
654 highestNeeded = _outputFilePlayerPtr->Frequency();
655 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000656 }
kwiberg55b97fe2016-01-28 05:22:45 -0800657 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000658
kwiberg55b97fe2016-01-28 05:22:45 -0800659 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000660}
661
ivocb04965c2015-09-09 00:09:43 -0700662int32_t Channel::CreateChannel(Channel*& channel,
663 int32_t channelId,
664 uint32_t instanceId,
665 RtcEventLog* const event_log,
666 const Config& config) {
kwiberg55b97fe2016-01-28 05:22:45 -0800667 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
668 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
669 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000670
kwiberg55b97fe2016-01-28 05:22:45 -0800671 channel = new Channel(channelId, instanceId, event_log, config);
672 if (channel == NULL) {
673 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
674 "Channel::CreateChannel() unable to allocate memory for"
675 " channel");
676 return -1;
677 }
678 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000679}
680
kwiberg55b97fe2016-01-28 05:22:45 -0800681void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
682 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
683 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
684 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000685
kwiberg55b97fe2016-01-28 05:22:45 -0800686 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000687}
688
kwiberg55b97fe2016-01-28 05:22:45 -0800689void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
690 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
691 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
692 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000693
kwiberg55b97fe2016-01-28 05:22:45 -0800694 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000695}
696
kwiberg55b97fe2016-01-28 05:22:45 -0800697void Channel::PlayFileEnded(int32_t id) {
698 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
699 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000700
kwiberg55b97fe2016-01-28 05:22:45 -0800701 if (id == _inputFilePlayerId) {
702 channel_state_.SetInputFilePlaying(false);
703 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
704 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000705 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800706 } else if (id == _outputFilePlayerId) {
707 channel_state_.SetOutputFilePlaying(false);
708 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
709 "Channel::PlayFileEnded() => output file player module is"
710 " shutdown");
711 }
712}
713
714void Channel::RecordFileEnded(int32_t id) {
715 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
716 "Channel::RecordFileEnded(id=%d)", id);
717
718 assert(id == _outputFileRecorderId);
719
720 rtc::CritScope cs(&_fileCritSect);
721
722 _outputFileRecording = false;
723 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
724 "Channel::RecordFileEnded() => output file recorder module is"
725 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000726}
727
pbos@webrtc.org92135212013-05-14 08:31:39 +0000728Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000729 uint32_t instanceId,
ivocb04965c2015-09-09 00:09:43 -0700730 RtcEventLog* const event_log,
731 const Config& config)
tommi31fc21f2016-01-21 10:37:37 -0800732 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100733 _channelId(channelId),
734 event_log_(event_log),
735 rtp_header_parser_(RtpHeaderParser::Create()),
736 rtp_payload_registry_(
737 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
738 rtp_receive_statistics_(
739 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
740 rtp_receiver_(
741 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100742 this,
743 this,
744 rtp_payload_registry_.get())),
745 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
746 _outputAudioLevel(),
747 _externalTransport(false),
748 _inputFilePlayerPtr(NULL),
749 _outputFilePlayerPtr(NULL),
750 _outputFileRecorderPtr(NULL),
751 // Avoid conflict with other channels by adding 1024 - 1026,
752 // won't use as much as 1024 channels.
753 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
754 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
755 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
756 _outputFileRecording(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100757 _outputExternalMedia(false),
758 _inputExternalMediaCallbackPtr(NULL),
759 _outputExternalMediaCallbackPtr(NULL),
760 _timeStamp(0), // This is just an offset, RTP module will add it's own
761 // random offset
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100762 ntp_estimator_(Clock::GetRealTimeClock()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100763 playout_timestamp_rtp_(0),
764 playout_timestamp_rtcp_(0),
765 playout_delay_ms_(0),
766 _numberOfDiscardedPackets(0),
767 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100768 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
769 capture_start_rtp_time_stamp_(-1),
770 capture_start_ntp_time_ms_(-1),
771 _engineStatisticsPtr(NULL),
772 _outputMixerPtr(NULL),
773 _transmitMixerPtr(NULL),
774 _moduleProcessThreadPtr(NULL),
775 _audioDeviceModulePtr(NULL),
776 _voiceEngineObserverPtr(NULL),
777 _callbackCritSectPtr(NULL),
778 _transportPtr(NULL),
779 _rxVadObserverPtr(NULL),
780 _oldVadDecision(-1),
781 _sendFrameType(0),
782 _externalMixing(false),
783 _mixFileWithMicrophone(false),
solenberg1c2af8e2016-03-24 10:36:00 -0700784 input_mute_(false),
785 previous_frame_muted_(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100786 _panLeft(1.0f),
787 _panRight(1.0f),
788 _outputGain(1.0f),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100789 _lastLocalTimeStamp(0),
790 _lastPayloadType(0),
791 _includeAudioLevelIndication(false),
792 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100793 _average_jitter_buffer_delay_us(0),
794 _previousTimestamp(0),
795 _recPacketDelayMs(20),
796 _RxVadDetection(false),
797 _rxAgcIsEnabled(false),
798 _rxNsIsEnabled(false),
799 restored_packet_in_use_(false),
800 rtcp_observer_(new VoERtcpObserver(this)),
801 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100802 associate_send_channel_(ChannelOwner(nullptr)),
803 pacing_enabled_(config.Get<VoicePacing>().enabled),
stefanbba9dec2016-02-01 04:39:55 -0800804 feedback_observer_proxy_(new TransportFeedbackProxy()),
805 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
806 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()) {
kwiberg55b97fe2016-01-28 05:22:45 -0800807 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
808 "Channel::Channel() - ctor");
809 AudioCodingModule::Config acm_config;
810 acm_config.id = VoEModuleId(instanceId, channelId);
811 if (config.Get<NetEqCapacityConfig>().enabled) {
812 // Clamping the buffer capacity at 20 packets. While going lower will
813 // probably work, it makes little sense.
814 acm_config.neteq_config.max_packets_in_buffer =
815 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
816 }
817 acm_config.neteq_config.enable_fast_accelerate =
818 config.Get<NetEqFastAccelerate>().enabled;
henrik.lundind4ccb002016-05-17 12:21:55 -0700819 acm_config.neteq_config.enable_muted_state = false;
kwiberg55b97fe2016-01-28 05:22:45 -0800820 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200821
kwiberg55b97fe2016-01-28 05:22:45 -0800822 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000823
kwiberg55b97fe2016-01-28 05:22:45 -0800824 RtpRtcp::Configuration configuration;
825 configuration.audio = true;
826 configuration.outgoing_transport = this;
kwiberg55b97fe2016-01-28 05:22:45 -0800827 configuration.receive_statistics = rtp_receive_statistics_.get();
828 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800829 if (pacing_enabled_) {
830 configuration.paced_sender = rtp_packet_sender_proxy_.get();
831 configuration.transport_sequence_number_allocator =
832 seq_num_allocator_proxy_.get();
833 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
834 }
kwiberg55b97fe2016-01-28 05:22:45 -0800835 configuration.event_log = event_log;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000836
kwiberg55b97fe2016-01-28 05:22:45 -0800837 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100838 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000839
kwiberg55b97fe2016-01-28 05:22:45 -0800840 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
841 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
842 statistics_proxy_.get());
aluebs@webrtc.orgf927fd62014-04-16 11:58:18 +0000843
kwiberg55b97fe2016-01-28 05:22:45 -0800844 Config audioproc_config;
845 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
846 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
niklase@google.com470e71d2011-07-07 08:21:25 +0000847}
848
kwiberg55b97fe2016-01-28 05:22:45 -0800849Channel::~Channel() {
850 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
851 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
852 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000853
kwiberg55b97fe2016-01-28 05:22:45 -0800854 if (_outputExternalMedia) {
855 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
856 }
857 if (channel_state_.Get().input_external_media) {
858 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
859 }
860 StopSend();
861 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000862
kwiberg55b97fe2016-01-28 05:22:45 -0800863 {
864 rtc::CritScope cs(&_fileCritSect);
865 if (_inputFilePlayerPtr) {
866 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
867 _inputFilePlayerPtr->StopPlayingFile();
868 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
869 _inputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000870 }
kwiberg55b97fe2016-01-28 05:22:45 -0800871 if (_outputFilePlayerPtr) {
872 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
873 _outputFilePlayerPtr->StopPlayingFile();
874 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
875 _outputFilePlayerPtr = NULL;
876 }
877 if (_outputFileRecorderPtr) {
878 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
879 _outputFileRecorderPtr->StopRecording();
880 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
881 _outputFileRecorderPtr = NULL;
882 }
883 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000884
kwiberg55b97fe2016-01-28 05:22:45 -0800885 // The order to safely shutdown modules in a channel is:
886 // 1. De-register callbacks in modules
887 // 2. De-register modules in process thread
888 // 3. Destroy modules
889 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
890 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
891 "~Channel() failed to de-register transport callback"
892 " (Audio coding module)");
893 }
894 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
895 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
896 "~Channel() failed to de-register VAD callback"
897 " (Audio coding module)");
898 }
899 // De-register modules in process thread
900 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +0000901
kwiberg55b97fe2016-01-28 05:22:45 -0800902 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +0000903}
904
kwiberg55b97fe2016-01-28 05:22:45 -0800905int32_t Channel::Init() {
906 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
907 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +0000908
kwiberg55b97fe2016-01-28 05:22:45 -0800909 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +0000910
kwiberg55b97fe2016-01-28 05:22:45 -0800911 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +0000912
kwiberg55b97fe2016-01-28 05:22:45 -0800913 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
914 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
915 "Channel::Init() must call SetEngineInformation() first");
916 return -1;
917 }
918
919 // --- Add modules to process thread (for periodic schedulation)
920
921 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
922
923 // --- ACM initialization
924
925 if (audio_coding_->InitializeReceiver() == -1) {
926 _engineStatisticsPtr->SetLastError(
927 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
928 "Channel::Init() unable to initialize the ACM - 1");
929 return -1;
930 }
931
932 // --- RTP/RTCP module initialization
933
934 // Ensure that RTCP is enabled by default for the created channel.
935 // Note that, the module will keep generating RTCP until it is explicitly
936 // disabled by the user.
937 // After StopListen (when no sockets exists), RTCP packets will no longer
938 // be transmitted since the Transport object will then be invalid.
939 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
940 // RTCP is enabled by default.
941 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
942 // --- Register all permanent callbacks
943 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
944 (audio_coding_->RegisterVADCallback(this) == -1);
945
946 if (fail) {
947 _engineStatisticsPtr->SetLastError(
948 VE_CANNOT_INIT_CHANNEL, kTraceError,
949 "Channel::Init() callbacks not registered");
950 return -1;
951 }
952
953 // --- Register all supported codecs to the receiving side of the
954 // RTP/RTCP module
955
956 CodecInst codec;
957 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
958
959 for (int idx = 0; idx < nSupportedCodecs; idx++) {
960 // Open up the RTP/RTCP receiver for all supported codecs
961 if ((audio_coding_->Codec(idx, &codec) == -1) ||
962 (rtp_receiver_->RegisterReceivePayload(
963 codec.plname, codec.pltype, codec.plfreq, codec.channels,
964 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
965 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
966 "Channel::Init() unable to register %s "
967 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
968 codec.plname, codec.pltype, codec.plfreq, codec.channels,
969 codec.rate);
970 } else {
971 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
972 "Channel::Init() %s (%d/%d/%" PRIuS
973 "/%d) has been "
974 "added to the RTP/RTCP receiver",
975 codec.plname, codec.pltype, codec.plfreq, codec.channels,
976 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000977 }
978
kwiberg55b97fe2016-01-28 05:22:45 -0800979 // Ensure that PCMU is used as default codec on the sending side
980 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
981 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +0000982 }
983
kwiberg55b97fe2016-01-28 05:22:45 -0800984 // Register default PT for outband 'telephone-event'
985 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
kwibergc8d071e2016-04-06 12:22:38 -0700986 if (_rtpRtcpModule->RegisterSendPayload(codec) == -1 ||
987 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800988 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
989 "Channel::Init() failed to register outband "
990 "'telephone-event' (%d/%d) correctly",
991 codec.pltype, codec.plfreq);
992 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000993 }
994
kwiberg55b97fe2016-01-28 05:22:45 -0800995 if (!STR_CASE_CMP(codec.plname, "CN")) {
kwibergc8d071e2016-04-06 12:22:38 -0700996 if (!codec_manager_.RegisterEncoder(codec) ||
997 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get()) ||
998 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec) ||
999 _rtpRtcpModule->RegisterSendPayload(codec) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08001000 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1001 "Channel::Init() failed to register CN (%d/%d) "
1002 "correctly - 1",
1003 codec.pltype, codec.plfreq);
1004 }
1005 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001006#ifdef WEBRTC_CODEC_RED
kwiberg55b97fe2016-01-28 05:22:45 -08001007 // Register RED to the receiving side of the ACM.
1008 // We will not receive an OnInitializeDecoder() callback for RED.
1009 if (!STR_CASE_CMP(codec.plname, "RED")) {
kwibergc8d071e2016-04-06 12:22:38 -07001010 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001011 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1012 "Channel::Init() failed to register RED (%d/%d) "
1013 "correctly",
1014 codec.pltype, codec.plfreq);
1015 }
1016 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001017#endif
kwiberg55b97fe2016-01-28 05:22:45 -08001018 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001019
kwiberg55b97fe2016-01-28 05:22:45 -08001020 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1021 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1022 return -1;
1023 }
1024 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1025 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1026 return -1;
1027 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001028
kwiberg55b97fe2016-01-28 05:22:45 -08001029 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001030}
1031
kwiberg55b97fe2016-01-28 05:22:45 -08001032int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1033 OutputMixer& outputMixer,
1034 voe::TransmitMixer& transmitMixer,
1035 ProcessThread& moduleProcessThread,
1036 AudioDeviceModule& audioDeviceModule,
1037 VoiceEngineObserver* voiceEngineObserver,
1038 rtc::CriticalSection* callbackCritSect) {
1039 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1040 "Channel::SetEngineInformation()");
1041 _engineStatisticsPtr = &engineStatistics;
1042 _outputMixerPtr = &outputMixer;
1043 _transmitMixerPtr = &transmitMixer,
1044 _moduleProcessThreadPtr = &moduleProcessThread;
1045 _audioDeviceModulePtr = &audioDeviceModule;
1046 _voiceEngineObserverPtr = voiceEngineObserver;
1047 _callbackCritSectPtr = callbackCritSect;
1048 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001049}
1050
kwiberg55b97fe2016-01-28 05:22:45 -08001051int32_t Channel::UpdateLocalTimeStamp() {
1052 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1053 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001054}
1055
kwibergb7f89d62016-02-17 10:04:18 -08001056void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001057 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001058 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001059}
1060
kwiberg55b97fe2016-01-28 05:22:45 -08001061int32_t Channel::StartPlayout() {
1062 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1063 "Channel::StartPlayout()");
1064 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001065 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001066 }
1067
1068 if (!_externalMixing) {
1069 // Add participant as candidates for mixing.
1070 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1071 _engineStatisticsPtr->SetLastError(
1072 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1073 "StartPlayout() failed to add participant to mixer");
1074 return -1;
1075 }
1076 }
1077
1078 channel_state_.SetPlaying(true);
1079 if (RegisterFilePlayingToMixer() != 0)
1080 return -1;
1081
1082 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001083}
1084
kwiberg55b97fe2016-01-28 05:22:45 -08001085int32_t Channel::StopPlayout() {
1086 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1087 "Channel::StopPlayout()");
1088 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001089 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001090 }
1091
1092 if (!_externalMixing) {
1093 // Remove participant as candidates for mixing
1094 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1095 _engineStatisticsPtr->SetLastError(
1096 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1097 "StopPlayout() failed to remove participant from mixer");
1098 return -1;
1099 }
1100 }
1101
1102 channel_state_.SetPlaying(false);
1103 _outputAudioLevel.Clear();
1104
1105 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001106}
1107
kwiberg55b97fe2016-01-28 05:22:45 -08001108int32_t Channel::StartSend() {
1109 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1110 "Channel::StartSend()");
1111 // Resume the previous sequence number which was reset by StopSend().
1112 // This needs to be done before |sending| is set to true.
1113 if (send_sequence_number_)
1114 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001115
kwiberg55b97fe2016-01-28 05:22:45 -08001116 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001117 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001118 }
1119 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001120
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001121 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001122 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1123 _engineStatisticsPtr->SetLastError(
1124 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1125 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001126 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001127 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001128 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001129 return -1;
1130 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001131
kwiberg55b97fe2016-01-28 05:22:45 -08001132 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001133}
1134
kwiberg55b97fe2016-01-28 05:22:45 -08001135int32_t Channel::StopSend() {
1136 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1137 "Channel::StopSend()");
1138 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001139 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001140 }
1141 channel_state_.SetSending(false);
1142
1143 // Store the sequence number to be able to pick up the same sequence for
1144 // the next StartSend(). This is needed for restarting device, otherwise
1145 // it might cause libSRTP to complain about packets being replayed.
1146 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1147 // CL is landed. See issue
1148 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1149 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1150
1151 // Reset sending SSRC and sequence number and triggers direct transmission
1152 // of RTCP BYE
1153 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1154 _engineStatisticsPtr->SetLastError(
1155 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1156 "StartSend() RTP/RTCP failed to stop sending");
1157 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001158 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001159
1160 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001161}
1162
kwiberg55b97fe2016-01-28 05:22:45 -08001163int32_t Channel::StartReceiving() {
1164 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1165 "Channel::StartReceiving()");
1166 if (channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001167 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001168 }
1169 channel_state_.SetReceiving(true);
1170 _numberOfDiscardedPackets = 0;
1171 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001172}
1173
kwiberg55b97fe2016-01-28 05:22:45 -08001174int32_t Channel::StopReceiving() {
1175 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1176 "Channel::StopReceiving()");
1177 if (!channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001178 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001179 }
1180
1181 channel_state_.SetReceiving(false);
1182 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001183}
1184
kwiberg55b97fe2016-01-28 05:22:45 -08001185int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1186 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1187 "Channel::RegisterVoiceEngineObserver()");
1188 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001189
kwiberg55b97fe2016-01-28 05:22:45 -08001190 if (_voiceEngineObserverPtr) {
1191 _engineStatisticsPtr->SetLastError(
1192 VE_INVALID_OPERATION, kTraceError,
1193 "RegisterVoiceEngineObserver() observer already enabled");
1194 return -1;
1195 }
1196 _voiceEngineObserverPtr = &observer;
1197 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001198}
1199
kwiberg55b97fe2016-01-28 05:22:45 -08001200int32_t Channel::DeRegisterVoiceEngineObserver() {
1201 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1202 "Channel::DeRegisterVoiceEngineObserver()");
1203 rtc::CritScope cs(&_callbackCritSect);
1204
1205 if (!_voiceEngineObserverPtr) {
1206 _engineStatisticsPtr->SetLastError(
1207 VE_INVALID_OPERATION, kTraceWarning,
1208 "DeRegisterVoiceEngineObserver() observer already disabled");
1209 return 0;
1210 }
1211 _voiceEngineObserverPtr = NULL;
1212 return 0;
1213}
1214
1215int32_t Channel::GetSendCodec(CodecInst& codec) {
kwibergc8d071e2016-04-06 12:22:38 -07001216 auto send_codec = codec_manager_.GetCodecInst();
kwiberg1fd4a4a2015-11-03 11:20:50 -08001217 if (send_codec) {
1218 codec = *send_codec;
1219 return 0;
1220 }
1221 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001222}
1223
kwiberg55b97fe2016-01-28 05:22:45 -08001224int32_t Channel::GetRecCodec(CodecInst& codec) {
1225 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001226}
1227
kwiberg55b97fe2016-01-28 05:22:45 -08001228int32_t Channel::SetSendCodec(const CodecInst& codec) {
1229 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1230 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001231
kwibergc8d071e2016-04-06 12:22:38 -07001232 if (!codec_manager_.RegisterEncoder(codec) ||
1233 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001234 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1235 "SetSendCodec() failed to register codec to ACM");
1236 return -1;
1237 }
1238
1239 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1240 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1241 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1242 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1243 "SetSendCodec() failed to register codec to"
1244 " RTP/RTCP module");
1245 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001246 }
kwiberg55b97fe2016-01-28 05:22:45 -08001247 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001248
kwiberg55b97fe2016-01-28 05:22:45 -08001249 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1250 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1251 "SetSendCodec() failed to set audio packet size");
1252 return -1;
1253 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001254
kwiberg55b97fe2016-01-28 05:22:45 -08001255 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001256}
1257
Ivo Creusenadf89b72015-04-29 16:03:33 +02001258void Channel::SetBitRate(int bitrate_bps) {
1259 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1260 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1261 audio_coding_->SetBitRate(bitrate_bps);
1262}
1263
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001264void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue@webrtc.org74aaf292014-07-16 21:28:26 +00001265 network_predictor_->UpdatePacketLossRate(fraction_lost);
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001266 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1267
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001268 // Normalizes rate to 0 - 100.
kwiberg55b97fe2016-01-28 05:22:45 -08001269 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1270 0) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001271 assert(false); // This should not happen.
1272 }
1273}
1274
kwiberg55b97fe2016-01-28 05:22:45 -08001275int32_t Channel::SetVADStatus(bool enableVAD,
1276 ACMVADMode mode,
1277 bool disableDTX) {
1278 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1279 "Channel::SetVADStatus(mode=%d)", mode);
kwibergc8d071e2016-04-06 12:22:38 -07001280 RTC_DCHECK(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1281 if (!codec_manager_.SetVAD(enableVAD, mode) ||
1282 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001283 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1284 kTraceError,
1285 "SetVADStatus() failed to set VAD");
1286 return -1;
1287 }
1288 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001289}
1290
kwiberg55b97fe2016-01-28 05:22:45 -08001291int32_t Channel::GetVADStatus(bool& enabledVAD,
1292 ACMVADMode& mode,
1293 bool& disabledDTX) {
kwibergc8d071e2016-04-06 12:22:38 -07001294 const auto* params = codec_manager_.GetStackParams();
1295 enabledVAD = params->use_cng;
1296 mode = params->vad_mode;
1297 disabledDTX = !params->use_cng;
kwiberg55b97fe2016-01-28 05:22:45 -08001298 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001299}
1300
kwiberg55b97fe2016-01-28 05:22:45 -08001301int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1302 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1303 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001304
kwiberg55b97fe2016-01-28 05:22:45 -08001305 if (channel_state_.Get().playing) {
1306 _engineStatisticsPtr->SetLastError(
1307 VE_ALREADY_PLAYING, kTraceError,
1308 "SetRecPayloadType() unable to set PT while playing");
1309 return -1;
1310 }
1311 if (channel_state_.Get().receiving) {
1312 _engineStatisticsPtr->SetLastError(
1313 VE_ALREADY_LISTENING, kTraceError,
1314 "SetRecPayloadType() unable to set PT while listening");
1315 return -1;
1316 }
1317
1318 if (codec.pltype == -1) {
1319 // De-register the selected codec (RTP/RTCP module and ACM)
1320
1321 int8_t pltype(-1);
1322 CodecInst rxCodec = codec;
1323
1324 // Get payload type for the given codec
1325 rtp_payload_registry_->ReceivePayloadType(
1326 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1327 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1328 rxCodec.pltype = pltype;
1329
1330 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1331 _engineStatisticsPtr->SetLastError(
1332 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1333 "SetRecPayloadType() RTP/RTCP-module deregistration "
1334 "failed");
1335 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001336 }
kwiberg55b97fe2016-01-28 05:22:45 -08001337 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1338 _engineStatisticsPtr->SetLastError(
1339 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1340 "SetRecPayloadType() ACM deregistration failed - 1");
1341 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001342 }
kwiberg55b97fe2016-01-28 05:22:45 -08001343 return 0;
1344 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001345
kwiberg55b97fe2016-01-28 05:22:45 -08001346 if (rtp_receiver_->RegisterReceivePayload(
1347 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1348 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1349 // First attempt to register failed => de-register and try again
kwibergc8d071e2016-04-06 12:22:38 -07001350 // TODO(kwiberg): Retrying is probably not necessary, since
1351 // AcmReceiver::AddCodec also retries.
kwiberg55b97fe2016-01-28 05:22:45 -08001352 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001353 if (rtp_receiver_->RegisterReceivePayload(
kwiberg55b97fe2016-01-28 05:22:45 -08001354 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1355 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1356 _engineStatisticsPtr->SetLastError(
1357 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1358 "SetRecPayloadType() RTP/RTCP-module registration failed");
1359 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001360 }
kwiberg55b97fe2016-01-28 05:22:45 -08001361 }
kwibergc8d071e2016-04-06 12:22:38 -07001362 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001363 audio_coding_->UnregisterReceiveCodec(codec.pltype);
kwibergc8d071e2016-04-06 12:22:38 -07001364 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001365 _engineStatisticsPtr->SetLastError(
1366 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1367 "SetRecPayloadType() ACM registration failed - 1");
1368 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001369 }
kwiberg55b97fe2016-01-28 05:22:45 -08001370 }
1371 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001372}
1373
kwiberg55b97fe2016-01-28 05:22:45 -08001374int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1375 int8_t payloadType(-1);
1376 if (rtp_payload_registry_->ReceivePayloadType(
1377 codec.plname, codec.plfreq, codec.channels,
1378 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1379 _engineStatisticsPtr->SetLastError(
1380 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1381 "GetRecPayloadType() failed to retrieve RX payload type");
1382 return -1;
1383 }
1384 codec.pltype = payloadType;
1385 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001386}
1387
kwiberg55b97fe2016-01-28 05:22:45 -08001388int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1389 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1390 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001391
kwiberg55b97fe2016-01-28 05:22:45 -08001392 CodecInst codec;
1393 int32_t samplingFreqHz(-1);
1394 const size_t kMono = 1;
1395 if (frequency == kFreq32000Hz)
1396 samplingFreqHz = 32000;
1397 else if (frequency == kFreq16000Hz)
1398 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001399
kwiberg55b97fe2016-01-28 05:22:45 -08001400 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1401 _engineStatisticsPtr->SetLastError(
1402 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1403 "SetSendCNPayloadType() failed to retrieve default CN codec "
1404 "settings");
1405 return -1;
1406 }
1407
1408 // Modify the payload type (must be set to dynamic range)
1409 codec.pltype = type;
1410
kwibergc8d071e2016-04-06 12:22:38 -07001411 if (!codec_manager_.RegisterEncoder(codec) ||
1412 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001413 _engineStatisticsPtr->SetLastError(
1414 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1415 "SetSendCNPayloadType() failed to register CN to ACM");
1416 return -1;
1417 }
1418
1419 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1420 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1421 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1422 _engineStatisticsPtr->SetLastError(
1423 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1424 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1425 "module");
1426 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001427 }
kwiberg55b97fe2016-01-28 05:22:45 -08001428 }
1429 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001430}
1431
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001432int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001433 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001434 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001435
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001436 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001437 _engineStatisticsPtr->SetLastError(
1438 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001439 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001440 return -1;
1441 }
1442 return 0;
1443}
1444
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001445int Channel::SetOpusDtx(bool enable_dtx) {
1446 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1447 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001448 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001449 : audio_coding_->DisableOpusDtx();
1450 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001451 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1452 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001453 return -1;
1454 }
1455 return 0;
1456}
1457
mflodman3d7db262016-04-29 00:57:13 -07001458int32_t Channel::RegisterExternalTransport(Transport* transport) {
kwiberg55b97fe2016-01-28 05:22:45 -08001459 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001460 "Channel::RegisterExternalTransport()");
1461
kwiberg55b97fe2016-01-28 05:22:45 -08001462 rtc::CritScope cs(&_callbackCritSect);
kwiberg55b97fe2016-01-28 05:22:45 -08001463 if (_externalTransport) {
1464 _engineStatisticsPtr->SetLastError(
1465 VE_INVALID_OPERATION, kTraceError,
1466 "RegisterExternalTransport() external transport already enabled");
1467 return -1;
1468 }
1469 _externalTransport = true;
mflodman3d7db262016-04-29 00:57:13 -07001470 _transportPtr = transport;
kwiberg55b97fe2016-01-28 05:22:45 -08001471 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001472}
1473
kwiberg55b97fe2016-01-28 05:22:45 -08001474int32_t Channel::DeRegisterExternalTransport() {
1475 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1476 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001477
kwiberg55b97fe2016-01-28 05:22:45 -08001478 rtc::CritScope cs(&_callbackCritSect);
mflodman3d7db262016-04-29 00:57:13 -07001479 if (_transportPtr) {
1480 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1481 "DeRegisterExternalTransport() all transport is disabled");
1482 } else {
kwiberg55b97fe2016-01-28 05:22:45 -08001483 _engineStatisticsPtr->SetLastError(
1484 VE_INVALID_OPERATION, kTraceWarning,
1485 "DeRegisterExternalTransport() external transport already "
1486 "disabled");
kwiberg55b97fe2016-01-28 05:22:45 -08001487 }
1488 _externalTransport = false;
1489 _transportPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001490 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001491}
1492
mflodman3d7db262016-04-29 00:57:13 -07001493int32_t Channel::ReceivedRTPPacket(const uint8_t* received_packet,
kwiberg55b97fe2016-01-28 05:22:45 -08001494 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001495 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001496 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001497 "Channel::ReceivedRTPPacket()");
1498
1499 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001500 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001501
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001502 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001503 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1504 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1505 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001506 return -1;
1507 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001508 header.payload_type_frequency =
1509 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001510 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001511 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001512 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001513 rtp_receive_statistics_->IncomingPacket(
1514 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001515 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001516
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001517 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001518}
1519
1520bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001521 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001522 const RTPHeader& header,
1523 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001524 if (rtp_payload_registry_->IsRtx(header)) {
1525 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001526 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001527 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001528 assert(packet_length >= header.headerLength);
1529 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001530 PayloadUnion payload_specific;
1531 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001532 &payload_specific)) {
1533 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001534 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001535 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1536 payload_specific, in_order);
1537}
1538
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001539bool Channel::HandleRtxPacket(const uint8_t* packet,
1540 size_t packet_length,
1541 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001542 if (!rtp_payload_registry_->IsRtx(header))
1543 return false;
1544
1545 // Remove the RTX header and parse the original RTP header.
1546 if (packet_length < header.headerLength)
1547 return false;
1548 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1549 return false;
1550 if (restored_packet_in_use_) {
1551 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1552 "Multiple RTX headers detected, dropping packet");
1553 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001554 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001555 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001556 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1557 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001558 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1559 "Incoming RTX packet: invalid RTP header");
1560 return false;
1561 }
1562 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001563 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001564 restored_packet_in_use_ = false;
1565 return ret;
1566}
1567
1568bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1569 StreamStatistician* statistician =
1570 rtp_receive_statistics_->GetStatistician(header.ssrc);
1571 if (!statistician)
1572 return false;
1573 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001574}
1575
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001576bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1577 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001578 // Retransmissions are handled separately if RTX is enabled.
1579 if (rtp_payload_registry_->RtxEnabled())
1580 return false;
1581 StreamStatistician* statistician =
1582 rtp_receive_statistics_->GetStatistician(header.ssrc);
1583 if (!statistician)
1584 return false;
1585 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001586 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001587 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001588 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001589}
1590
mflodman3d7db262016-04-29 00:57:13 -07001591int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001592 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001593 "Channel::ReceivedRTCPPacket()");
1594 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001595 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001596
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001597 // Deliver RTCP packet to RTP/RTCP module for parsing
mflodman3d7db262016-04-29 00:57:13 -07001598 if (_rtpRtcpModule->IncomingRtcpPacket(data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001599 _engineStatisticsPtr->SetLastError(
1600 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1601 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1602 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001603
Minyue2013aec2015-05-13 14:14:42 +02001604 int64_t rtt = GetRTT(true);
1605 if (rtt == 0) {
1606 // Waiting for valid RTT.
1607 return 0;
1608 }
1609 uint32_t ntp_secs = 0;
1610 uint32_t ntp_frac = 0;
1611 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001612 if (0 !=
1613 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1614 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001615 // Waiting for RTCP.
1616 return 0;
1617 }
1618
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001619 {
tommi31fc21f2016-01-21 10:37:37 -08001620 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001621 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001622 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001623 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001624}
1625
niklase@google.com470e71d2011-07-07 08:21:25 +00001626int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001627 bool loop,
1628 FileFormats format,
1629 int startPosition,
1630 float volumeScaling,
1631 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001632 const CodecInst* codecInst) {
1633 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1634 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1635 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1636 "stopPosition=%d)",
1637 fileName, loop, format, volumeScaling, startPosition,
1638 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001639
kwiberg55b97fe2016-01-28 05:22:45 -08001640 if (channel_state_.Get().output_file_playing) {
1641 _engineStatisticsPtr->SetLastError(
1642 VE_ALREADY_PLAYING, kTraceError,
1643 "StartPlayingFileLocally() is already playing");
1644 return -1;
1645 }
1646
1647 {
1648 rtc::CritScope cs(&_fileCritSect);
1649
1650 if (_outputFilePlayerPtr) {
1651 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1652 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1653 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001654 }
1655
kwiberg55b97fe2016-01-28 05:22:45 -08001656 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1657 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001658
kwiberg55b97fe2016-01-28 05:22:45 -08001659 if (_outputFilePlayerPtr == NULL) {
1660 _engineStatisticsPtr->SetLastError(
1661 VE_INVALID_ARGUMENT, kTraceError,
1662 "StartPlayingFileLocally() filePlayer format is not correct");
1663 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001664 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001665
kwiberg55b97fe2016-01-28 05:22:45 -08001666 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001667
kwiberg55b97fe2016-01-28 05:22:45 -08001668 if (_outputFilePlayerPtr->StartPlayingFile(
1669 fileName, loop, startPosition, volumeScaling, notificationTime,
1670 stopPosition, (const CodecInst*)codecInst) != 0) {
1671 _engineStatisticsPtr->SetLastError(
1672 VE_BAD_FILE, kTraceError,
1673 "StartPlayingFile() failed to start file playout");
1674 _outputFilePlayerPtr->StopPlayingFile();
1675 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1676 _outputFilePlayerPtr = NULL;
1677 return -1;
1678 }
1679 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1680 channel_state_.SetOutputFilePlaying(true);
1681 }
1682
1683 if (RegisterFilePlayingToMixer() != 0)
1684 return -1;
1685
1686 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001687}
1688
1689int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001690 FileFormats format,
1691 int startPosition,
1692 float volumeScaling,
1693 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001694 const CodecInst* codecInst) {
1695 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1696 "Channel::StartPlayingFileLocally(format=%d,"
1697 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1698 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001699
kwiberg55b97fe2016-01-28 05:22:45 -08001700 if (stream == NULL) {
1701 _engineStatisticsPtr->SetLastError(
1702 VE_BAD_FILE, kTraceError,
1703 "StartPlayingFileLocally() NULL as input stream");
1704 return -1;
1705 }
1706
1707 if (channel_state_.Get().output_file_playing) {
1708 _engineStatisticsPtr->SetLastError(
1709 VE_ALREADY_PLAYING, kTraceError,
1710 "StartPlayingFileLocally() is already playing");
1711 return -1;
1712 }
1713
1714 {
1715 rtc::CritScope cs(&_fileCritSect);
1716
1717 // Destroy the old instance
1718 if (_outputFilePlayerPtr) {
1719 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1720 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1721 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001722 }
1723
kwiberg55b97fe2016-01-28 05:22:45 -08001724 // Create the instance
1725 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1726 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001727
kwiberg55b97fe2016-01-28 05:22:45 -08001728 if (_outputFilePlayerPtr == NULL) {
1729 _engineStatisticsPtr->SetLastError(
1730 VE_INVALID_ARGUMENT, kTraceError,
1731 "StartPlayingFileLocally() filePlayer format isnot correct");
1732 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001733 }
1734
kwiberg55b97fe2016-01-28 05:22:45 -08001735 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001736
kwiberg55b97fe2016-01-28 05:22:45 -08001737 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1738 volumeScaling, notificationTime,
1739 stopPosition, codecInst) != 0) {
1740 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1741 "StartPlayingFile() failed to "
1742 "start file playout");
1743 _outputFilePlayerPtr->StopPlayingFile();
1744 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1745 _outputFilePlayerPtr = NULL;
1746 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001747 }
kwiberg55b97fe2016-01-28 05:22:45 -08001748 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1749 channel_state_.SetOutputFilePlaying(true);
1750 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001751
kwiberg55b97fe2016-01-28 05:22:45 -08001752 if (RegisterFilePlayingToMixer() != 0)
1753 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001754
kwiberg55b97fe2016-01-28 05:22:45 -08001755 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001756}
1757
kwiberg55b97fe2016-01-28 05:22:45 -08001758int Channel::StopPlayingFileLocally() {
1759 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1760 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001761
kwiberg55b97fe2016-01-28 05:22:45 -08001762 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001763 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001764 }
1765
1766 {
1767 rtc::CritScope cs(&_fileCritSect);
1768
1769 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1770 _engineStatisticsPtr->SetLastError(
1771 VE_STOP_RECORDING_FAILED, kTraceError,
1772 "StopPlayingFile() could not stop playing");
1773 return -1;
1774 }
1775 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1776 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1777 _outputFilePlayerPtr = NULL;
1778 channel_state_.SetOutputFilePlaying(false);
1779 }
1780 // _fileCritSect cannot be taken while calling
1781 // SetAnonymousMixibilityStatus. Refer to comments in
1782 // StartPlayingFileLocally(const char* ...) for more details.
1783 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1784 _engineStatisticsPtr->SetLastError(
1785 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1786 "StopPlayingFile() failed to stop participant from playing as"
1787 "file in the mixer");
1788 return -1;
1789 }
1790
1791 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001792}
1793
kwiberg55b97fe2016-01-28 05:22:45 -08001794int Channel::IsPlayingFileLocally() const {
1795 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001796}
1797
kwiberg55b97fe2016-01-28 05:22:45 -08001798int Channel::RegisterFilePlayingToMixer() {
1799 // Return success for not registering for file playing to mixer if:
1800 // 1. playing file before playout is started on that channel.
1801 // 2. starting playout without file playing on that channel.
1802 if (!channel_state_.Get().playing ||
1803 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001804 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001805 }
1806
1807 // |_fileCritSect| cannot be taken while calling
1808 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1809 // frames can be pulled by the mixer. Since the frames are generated from
1810 // the file, _fileCritSect will be taken. This would result in a deadlock.
1811 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1812 channel_state_.SetOutputFilePlaying(false);
1813 rtc::CritScope cs(&_fileCritSect);
1814 _engineStatisticsPtr->SetLastError(
1815 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1816 "StartPlayingFile() failed to add participant as file to mixer");
1817 _outputFilePlayerPtr->StopPlayingFile();
1818 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1819 _outputFilePlayerPtr = NULL;
1820 return -1;
1821 }
1822
1823 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001824}
1825
niklase@google.com470e71d2011-07-07 08:21:25 +00001826int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001827 bool loop,
1828 FileFormats format,
1829 int startPosition,
1830 float volumeScaling,
1831 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001832 const CodecInst* codecInst) {
1833 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1834 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1835 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1836 "stopPosition=%d)",
1837 fileName, loop, format, volumeScaling, startPosition,
1838 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001839
kwiberg55b97fe2016-01-28 05:22:45 -08001840 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001841
kwiberg55b97fe2016-01-28 05:22:45 -08001842 if (channel_state_.Get().input_file_playing) {
1843 _engineStatisticsPtr->SetLastError(
1844 VE_ALREADY_PLAYING, kTraceWarning,
1845 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001846 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001847 }
1848
1849 // Destroy the old instance
1850 if (_inputFilePlayerPtr) {
1851 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1852 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1853 _inputFilePlayerPtr = NULL;
1854 }
1855
1856 // Create the instance
1857 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1858 (const FileFormats)format);
1859
1860 if (_inputFilePlayerPtr == NULL) {
1861 _engineStatisticsPtr->SetLastError(
1862 VE_INVALID_ARGUMENT, kTraceError,
1863 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1864 return -1;
1865 }
1866
1867 const uint32_t notificationTime(0);
1868
1869 if (_inputFilePlayerPtr->StartPlayingFile(
1870 fileName, loop, startPosition, volumeScaling, notificationTime,
1871 stopPosition, (const CodecInst*)codecInst) != 0) {
1872 _engineStatisticsPtr->SetLastError(
1873 VE_BAD_FILE, kTraceError,
1874 "StartPlayingFile() failed to start file playout");
1875 _inputFilePlayerPtr->StopPlayingFile();
1876 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1877 _inputFilePlayerPtr = NULL;
1878 return -1;
1879 }
1880 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1881 channel_state_.SetInputFilePlaying(true);
1882
1883 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001884}
1885
1886int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001887 FileFormats format,
1888 int startPosition,
1889 float volumeScaling,
1890 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001891 const CodecInst* codecInst) {
1892 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1893 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1894 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1895 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001896
kwiberg55b97fe2016-01-28 05:22:45 -08001897 if (stream == NULL) {
1898 _engineStatisticsPtr->SetLastError(
1899 VE_BAD_FILE, kTraceError,
1900 "StartPlayingFileAsMicrophone NULL as input stream");
1901 return -1;
1902 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001903
kwiberg55b97fe2016-01-28 05:22:45 -08001904 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001905
kwiberg55b97fe2016-01-28 05:22:45 -08001906 if (channel_state_.Get().input_file_playing) {
1907 _engineStatisticsPtr->SetLastError(
1908 VE_ALREADY_PLAYING, kTraceWarning,
1909 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001910 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001911 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001912
kwiberg55b97fe2016-01-28 05:22:45 -08001913 // Destroy the old instance
1914 if (_inputFilePlayerPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001915 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1916 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1917 _inputFilePlayerPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001918 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001919
kwiberg55b97fe2016-01-28 05:22:45 -08001920 // Create the instance
1921 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1922 (const FileFormats)format);
1923
1924 if (_inputFilePlayerPtr == NULL) {
1925 _engineStatisticsPtr->SetLastError(
1926 VE_INVALID_ARGUMENT, kTraceError,
1927 "StartPlayingInputFile() filePlayer format isnot correct");
1928 return -1;
1929 }
1930
1931 const uint32_t notificationTime(0);
1932
1933 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1934 volumeScaling, notificationTime,
1935 stopPosition, codecInst) != 0) {
1936 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1937 "StartPlayingFile() failed to start "
1938 "file playout");
1939 _inputFilePlayerPtr->StopPlayingFile();
1940 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1941 _inputFilePlayerPtr = NULL;
1942 return -1;
1943 }
1944
1945 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1946 channel_state_.SetInputFilePlaying(true);
1947
1948 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001949}
1950
kwiberg55b97fe2016-01-28 05:22:45 -08001951int Channel::StopPlayingFileAsMicrophone() {
1952 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1953 "Channel::StopPlayingFileAsMicrophone()");
1954
1955 rtc::CritScope cs(&_fileCritSect);
1956
1957 if (!channel_state_.Get().input_file_playing) {
1958 return 0;
1959 }
1960
1961 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
1962 _engineStatisticsPtr->SetLastError(
1963 VE_STOP_RECORDING_FAILED, kTraceError,
1964 "StopPlayingFile() could not stop playing");
1965 return -1;
1966 }
1967 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1968 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1969 _inputFilePlayerPtr = NULL;
1970 channel_state_.SetInputFilePlaying(false);
1971
1972 return 0;
1973}
1974
1975int Channel::IsPlayingFileAsMicrophone() const {
1976 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001977}
1978
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00001979int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08001980 const CodecInst* codecInst) {
1981 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1982 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00001983
kwiberg55b97fe2016-01-28 05:22:45 -08001984 if (_outputFileRecording) {
1985 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
1986 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00001987 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001988 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001989
kwiberg55b97fe2016-01-28 05:22:45 -08001990 FileFormats format;
1991 const uint32_t notificationTime(0); // Not supported in VoE
1992 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00001993
kwiberg55b97fe2016-01-28 05:22:45 -08001994 if ((codecInst != NULL) &&
1995 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
1996 _engineStatisticsPtr->SetLastError(
1997 VE_BAD_ARGUMENT, kTraceError,
1998 "StartRecordingPlayout() invalid compression");
1999 return (-1);
2000 }
2001 if (codecInst == NULL) {
2002 format = kFileFormatPcm16kHzFile;
2003 codecInst = &dummyCodec;
2004 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2005 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2006 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2007 format = kFileFormatWavFile;
2008 } else {
2009 format = kFileFormatCompressedFile;
2010 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002011
kwiberg55b97fe2016-01-28 05:22:45 -08002012 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002013
kwiberg55b97fe2016-01-28 05:22:45 -08002014 // Destroy the old instance
2015 if (_outputFileRecorderPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002016 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2017 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2018 _outputFileRecorderPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002019 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002020
kwiberg55b97fe2016-01-28 05:22:45 -08002021 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2022 _outputFileRecorderId, (const FileFormats)format);
2023 if (_outputFileRecorderPtr == NULL) {
2024 _engineStatisticsPtr->SetLastError(
2025 VE_INVALID_ARGUMENT, kTraceError,
2026 "StartRecordingPlayout() fileRecorder format isnot correct");
2027 return -1;
2028 }
2029
2030 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2031 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2032 _engineStatisticsPtr->SetLastError(
2033 VE_BAD_FILE, kTraceError,
2034 "StartRecordingAudioFile() failed to start file recording");
2035 _outputFileRecorderPtr->StopRecording();
2036 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2037 _outputFileRecorderPtr = NULL;
2038 return -1;
2039 }
2040 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2041 _outputFileRecording = true;
2042
2043 return 0;
2044}
2045
2046int Channel::StartRecordingPlayout(OutStream* stream,
2047 const CodecInst* codecInst) {
2048 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2049 "Channel::StartRecordingPlayout()");
2050
2051 if (_outputFileRecording) {
2052 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2053 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002054 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002055 }
2056
2057 FileFormats format;
2058 const uint32_t notificationTime(0); // Not supported in VoE
2059 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2060
2061 if (codecInst != NULL && codecInst->channels != 1) {
2062 _engineStatisticsPtr->SetLastError(
2063 VE_BAD_ARGUMENT, kTraceError,
2064 "StartRecordingPlayout() invalid compression");
2065 return (-1);
2066 }
2067 if (codecInst == NULL) {
2068 format = kFileFormatPcm16kHzFile;
2069 codecInst = &dummyCodec;
2070 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2071 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2072 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2073 format = kFileFormatWavFile;
2074 } else {
2075 format = kFileFormatCompressedFile;
2076 }
2077
2078 rtc::CritScope cs(&_fileCritSect);
2079
2080 // Destroy the old instance
2081 if (_outputFileRecorderPtr) {
2082 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2083 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2084 _outputFileRecorderPtr = NULL;
2085 }
2086
2087 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2088 _outputFileRecorderId, (const FileFormats)format);
2089 if (_outputFileRecorderPtr == NULL) {
2090 _engineStatisticsPtr->SetLastError(
2091 VE_INVALID_ARGUMENT, kTraceError,
2092 "StartRecordingPlayout() fileRecorder format isnot correct");
2093 return -1;
2094 }
2095
2096 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2097 notificationTime) != 0) {
2098 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2099 "StartRecordingPlayout() failed to "
2100 "start file recording");
2101 _outputFileRecorderPtr->StopRecording();
2102 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2103 _outputFileRecorderPtr = NULL;
2104 return -1;
2105 }
2106
2107 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2108 _outputFileRecording = true;
2109
2110 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002111}
2112
kwiberg55b97fe2016-01-28 05:22:45 -08002113int Channel::StopRecordingPlayout() {
2114 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2115 "Channel::StopRecordingPlayout()");
2116
2117 if (!_outputFileRecording) {
2118 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2119 "StopRecordingPlayout() isnot recording");
2120 return -1;
2121 }
2122
2123 rtc::CritScope cs(&_fileCritSect);
2124
2125 if (_outputFileRecorderPtr->StopRecording() != 0) {
2126 _engineStatisticsPtr->SetLastError(
2127 VE_STOP_RECORDING_FAILED, kTraceError,
2128 "StopRecording() could not stop recording");
2129 return (-1);
2130 }
2131 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2132 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2133 _outputFileRecorderPtr = NULL;
2134 _outputFileRecording = false;
2135
2136 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002137}
2138
kwiberg55b97fe2016-01-28 05:22:45 -08002139void Channel::SetMixWithMicStatus(bool mix) {
2140 rtc::CritScope cs(&_fileCritSect);
2141 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002142}
2143
kwiberg55b97fe2016-01-28 05:22:45 -08002144int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2145 int8_t currentLevel = _outputAudioLevel.Level();
2146 level = static_cast<int32_t>(currentLevel);
2147 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002148}
2149
kwiberg55b97fe2016-01-28 05:22:45 -08002150int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2151 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2152 level = static_cast<int32_t>(currentLevel);
2153 return 0;
2154}
2155
solenberg1c2af8e2016-03-24 10:36:00 -07002156int Channel::SetInputMute(bool enable) {
kwiberg55b97fe2016-01-28 05:22:45 -08002157 rtc::CritScope cs(&volume_settings_critsect_);
2158 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002159 "Channel::SetMute(enable=%d)", enable);
solenberg1c2af8e2016-03-24 10:36:00 -07002160 input_mute_ = enable;
kwiberg55b97fe2016-01-28 05:22:45 -08002161 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002162}
2163
solenberg1c2af8e2016-03-24 10:36:00 -07002164bool Channel::InputMute() const {
kwiberg55b97fe2016-01-28 05:22:45 -08002165 rtc::CritScope cs(&volume_settings_critsect_);
solenberg1c2af8e2016-03-24 10:36:00 -07002166 return input_mute_;
niklase@google.com470e71d2011-07-07 08:21:25 +00002167}
2168
kwiberg55b97fe2016-01-28 05:22:45 -08002169int Channel::SetOutputVolumePan(float left, float right) {
2170 rtc::CritScope cs(&volume_settings_critsect_);
2171 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002172 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002173 _panLeft = left;
2174 _panRight = right;
2175 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002176}
2177
kwiberg55b97fe2016-01-28 05:22:45 -08002178int Channel::GetOutputVolumePan(float& left, float& right) const {
2179 rtc::CritScope cs(&volume_settings_critsect_);
2180 left = _panLeft;
2181 right = _panRight;
2182 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002183}
2184
kwiberg55b97fe2016-01-28 05:22:45 -08002185int Channel::SetChannelOutputVolumeScaling(float scaling) {
2186 rtc::CritScope cs(&volume_settings_critsect_);
2187 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002188 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002189 _outputGain = scaling;
2190 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002191}
2192
kwiberg55b97fe2016-01-28 05:22:45 -08002193int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2194 rtc::CritScope cs(&volume_settings_critsect_);
2195 scaling = _outputGain;
2196 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002197}
2198
solenberg8842c3e2016-03-11 03:06:41 -08002199int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002200 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002201 "Channel::SendTelephoneEventOutband(...)");
2202 RTC_DCHECK_LE(0, event);
2203 RTC_DCHECK_GE(255, event);
2204 RTC_DCHECK_LE(0, duration_ms);
2205 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002206 if (!Sending()) {
2207 return -1;
2208 }
solenberg8842c3e2016-03-11 03:06:41 -08002209 if (_rtpRtcpModule->SendTelephoneEventOutband(
2210 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002211 _engineStatisticsPtr->SetLastError(
2212 VE_SEND_DTMF_FAILED, kTraceWarning,
2213 "SendTelephoneEventOutband() failed to send event");
2214 return -1;
2215 }
2216 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002217}
2218
solenberg31642aa2016-03-14 08:00:37 -07002219int Channel::SetSendTelephoneEventPayloadType(int payload_type) {
kwiberg55b97fe2016-01-28 05:22:45 -08002220 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002221 "Channel::SetSendTelephoneEventPayloadType()");
solenberg31642aa2016-03-14 08:00:37 -07002222 RTC_DCHECK_LE(0, payload_type);
2223 RTC_DCHECK_GE(127, payload_type);
2224 CodecInst codec = {0};
kwiberg55b97fe2016-01-28 05:22:45 -08002225 codec.plfreq = 8000;
solenberg31642aa2016-03-14 08:00:37 -07002226 codec.pltype = payload_type;
kwiberg55b97fe2016-01-28 05:22:45 -08002227 memcpy(codec.plname, "telephone-event", 16);
2228 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2229 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2230 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2231 _engineStatisticsPtr->SetLastError(
2232 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2233 "SetSendTelephoneEventPayloadType() failed to register send"
2234 "payload type");
2235 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002236 }
kwiberg55b97fe2016-01-28 05:22:45 -08002237 }
kwiberg55b97fe2016-01-28 05:22:45 -08002238 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002239}
2240
kwiberg55b97fe2016-01-28 05:22:45 -08002241int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2242 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2243 "Channel::UpdateRxVadDetection()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002244
kwiberg55b97fe2016-01-28 05:22:45 -08002245 int vadDecision = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002246
kwiberg55b97fe2016-01-28 05:22:45 -08002247 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002248
kwiberg55b97fe2016-01-28 05:22:45 -08002249 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2250 OnRxVadDetected(vadDecision);
2251 _oldVadDecision = vadDecision;
2252 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002253
kwiberg55b97fe2016-01-28 05:22:45 -08002254 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2255 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2256 vadDecision);
2257 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002258}
2259
kwiberg55b97fe2016-01-28 05:22:45 -08002260int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2261 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2262 "Channel::RegisterRxVadObserver()");
2263 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002264
kwiberg55b97fe2016-01-28 05:22:45 -08002265 if (_rxVadObserverPtr) {
2266 _engineStatisticsPtr->SetLastError(
2267 VE_INVALID_OPERATION, kTraceError,
2268 "RegisterRxVadObserver() observer already enabled");
2269 return -1;
2270 }
2271 _rxVadObserverPtr = &observer;
2272 _RxVadDetection = true;
2273 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002274}
2275
kwiberg55b97fe2016-01-28 05:22:45 -08002276int Channel::DeRegisterRxVadObserver() {
2277 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2278 "Channel::DeRegisterRxVadObserver()");
2279 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002280
kwiberg55b97fe2016-01-28 05:22:45 -08002281 if (!_rxVadObserverPtr) {
2282 _engineStatisticsPtr->SetLastError(
2283 VE_INVALID_OPERATION, kTraceWarning,
2284 "DeRegisterRxVadObserver() observer already disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002285 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002286 }
2287 _rxVadObserverPtr = NULL;
2288 _RxVadDetection = false;
2289 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002290}
2291
kwiberg55b97fe2016-01-28 05:22:45 -08002292int Channel::VoiceActivityIndicator(int& activity) {
2293 activity = _sendFrameType;
2294 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002295}
2296
2297#ifdef WEBRTC_VOICE_ENGINE_AGC
2298
kwiberg55b97fe2016-01-28 05:22:45 -08002299int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2300 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2301 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2302 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002303
kwiberg55b97fe2016-01-28 05:22:45 -08002304 GainControl::Mode agcMode = kDefaultRxAgcMode;
2305 switch (mode) {
2306 case kAgcDefault:
2307 break;
2308 case kAgcUnchanged:
2309 agcMode = rx_audioproc_->gain_control()->mode();
2310 break;
2311 case kAgcFixedDigital:
2312 agcMode = GainControl::kFixedDigital;
2313 break;
2314 case kAgcAdaptiveDigital:
2315 agcMode = GainControl::kAdaptiveDigital;
2316 break;
2317 default:
2318 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2319 "SetRxAgcStatus() invalid Agc mode");
2320 return -1;
2321 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002322
kwiberg55b97fe2016-01-28 05:22:45 -08002323 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2324 _engineStatisticsPtr->SetLastError(
2325 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2326 return -1;
2327 }
2328 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2329 _engineStatisticsPtr->SetLastError(
2330 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2331 return -1;
2332 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002333
kwiberg55b97fe2016-01-28 05:22:45 -08002334 _rxAgcIsEnabled = enable;
2335 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002336
kwiberg55b97fe2016-01-28 05:22:45 -08002337 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002338}
2339
kwiberg55b97fe2016-01-28 05:22:45 -08002340int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2341 bool enable = rx_audioproc_->gain_control()->is_enabled();
2342 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
niklase@google.com470e71d2011-07-07 08:21:25 +00002343
kwiberg55b97fe2016-01-28 05:22:45 -08002344 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002345
kwiberg55b97fe2016-01-28 05:22:45 -08002346 switch (agcMode) {
2347 case GainControl::kFixedDigital:
2348 mode = kAgcFixedDigital;
2349 break;
2350 case GainControl::kAdaptiveDigital:
2351 mode = kAgcAdaptiveDigital;
2352 break;
2353 default:
2354 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2355 "GetRxAgcStatus() invalid Agc mode");
2356 return -1;
2357 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002358
kwiberg55b97fe2016-01-28 05:22:45 -08002359 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002360}
2361
kwiberg55b97fe2016-01-28 05:22:45 -08002362int Channel::SetRxAgcConfig(AgcConfig config) {
2363 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2364 "Channel::SetRxAgcConfig()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002365
kwiberg55b97fe2016-01-28 05:22:45 -08002366 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2367 config.targetLeveldBOv) != 0) {
2368 _engineStatisticsPtr->SetLastError(
2369 VE_APM_ERROR, kTraceError,
2370 "SetRxAgcConfig() failed to set target peak |level|"
2371 "(or envelope) of the Agc");
2372 return -1;
2373 }
2374 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2375 config.digitalCompressionGaindB) != 0) {
2376 _engineStatisticsPtr->SetLastError(
2377 VE_APM_ERROR, kTraceError,
2378 "SetRxAgcConfig() failed to set the range in |gain| the"
2379 " digital compression stage may apply");
2380 return -1;
2381 }
2382 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2383 0) {
2384 _engineStatisticsPtr->SetLastError(
2385 VE_APM_ERROR, kTraceError,
2386 "SetRxAgcConfig() failed to set hard limiter to the signal");
2387 return -1;
2388 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002389
kwiberg55b97fe2016-01-28 05:22:45 -08002390 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002391}
2392
kwiberg55b97fe2016-01-28 05:22:45 -08002393int Channel::GetRxAgcConfig(AgcConfig& config) {
2394 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2395 config.digitalCompressionGaindB =
2396 rx_audioproc_->gain_control()->compression_gain_db();
2397 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
niklase@google.com470e71d2011-07-07 08:21:25 +00002398
kwiberg55b97fe2016-01-28 05:22:45 -08002399 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002400}
2401
kwiberg55b97fe2016-01-28 05:22:45 -08002402#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
niklase@google.com470e71d2011-07-07 08:21:25 +00002403
2404#ifdef WEBRTC_VOICE_ENGINE_NR
2405
kwiberg55b97fe2016-01-28 05:22:45 -08002406int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2407 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2408 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2409 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002410
kwiberg55b97fe2016-01-28 05:22:45 -08002411 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2412 switch (mode) {
2413 case kNsDefault:
2414 break;
2415 case kNsUnchanged:
2416 nsLevel = rx_audioproc_->noise_suppression()->level();
2417 break;
2418 case kNsConference:
2419 nsLevel = NoiseSuppression::kHigh;
2420 break;
2421 case kNsLowSuppression:
2422 nsLevel = NoiseSuppression::kLow;
2423 break;
2424 case kNsModerateSuppression:
2425 nsLevel = NoiseSuppression::kModerate;
2426 break;
2427 case kNsHighSuppression:
2428 nsLevel = NoiseSuppression::kHigh;
2429 break;
2430 case kNsVeryHighSuppression:
2431 nsLevel = NoiseSuppression::kVeryHigh;
2432 break;
2433 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002434
kwiberg55b97fe2016-01-28 05:22:45 -08002435 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2436 _engineStatisticsPtr->SetLastError(
2437 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2438 return -1;
2439 }
2440 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2441 _engineStatisticsPtr->SetLastError(
2442 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2443 return -1;
2444 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002445
kwiberg55b97fe2016-01-28 05:22:45 -08002446 _rxNsIsEnabled = enable;
2447 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002448
kwiberg55b97fe2016-01-28 05:22:45 -08002449 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002450}
2451
kwiberg55b97fe2016-01-28 05:22:45 -08002452int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2453 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2454 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
niklase@google.com470e71d2011-07-07 08:21:25 +00002455
kwiberg55b97fe2016-01-28 05:22:45 -08002456 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002457
kwiberg55b97fe2016-01-28 05:22:45 -08002458 switch (ncLevel) {
2459 case NoiseSuppression::kLow:
2460 mode = kNsLowSuppression;
2461 break;
2462 case NoiseSuppression::kModerate:
2463 mode = kNsModerateSuppression;
2464 break;
2465 case NoiseSuppression::kHigh:
2466 mode = kNsHighSuppression;
2467 break;
2468 case NoiseSuppression::kVeryHigh:
2469 mode = kNsVeryHighSuppression;
2470 break;
2471 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002472
kwiberg55b97fe2016-01-28 05:22:45 -08002473 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002474}
2475
kwiberg55b97fe2016-01-28 05:22:45 -08002476#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
niklase@google.com470e71d2011-07-07 08:21:25 +00002477
kwiberg55b97fe2016-01-28 05:22:45 -08002478int Channel::SetLocalSSRC(unsigned int ssrc) {
2479 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2480 "Channel::SetLocalSSRC()");
2481 if (channel_state_.Get().sending) {
2482 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2483 "SetLocalSSRC() already sending");
2484 return -1;
2485 }
2486 _rtpRtcpModule->SetSSRC(ssrc);
2487 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002488}
2489
kwiberg55b97fe2016-01-28 05:22:45 -08002490int Channel::GetLocalSSRC(unsigned int& ssrc) {
2491 ssrc = _rtpRtcpModule->SSRC();
2492 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002493}
2494
kwiberg55b97fe2016-01-28 05:22:45 -08002495int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2496 ssrc = rtp_receiver_->SSRC();
2497 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002498}
2499
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002500int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002501 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002502 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002503}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002504
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002505int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2506 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002507 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2508 if (enable &&
2509 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2510 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002511 return -1;
2512 }
2513 return 0;
2514}
2515
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002516int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2517 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2518}
2519
2520int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2521 rtp_header_parser_->DeregisterRtpHeaderExtension(
2522 kRtpExtensionAbsoluteSendTime);
kwiberg55b97fe2016-01-28 05:22:45 -08002523 if (enable &&
2524 !rtp_header_parser_->RegisterRtpHeaderExtension(
2525 kRtpExtensionAbsoluteSendTime, id)) {
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00002526 return -1;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002527 }
2528 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002529}
2530
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002531void Channel::EnableSendTransportSequenceNumber(int id) {
2532 int ret =
2533 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2534 RTC_DCHECK_EQ(0, ret);
2535}
2536
stefan3313ec92016-01-21 06:32:43 -08002537void Channel::EnableReceiveTransportSequenceNumber(int id) {
2538 rtp_header_parser_->DeregisterRtpHeaderExtension(
2539 kRtpExtensionTransportSequenceNumber);
2540 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2541 kRtpExtensionTransportSequenceNumber, id);
2542 RTC_DCHECK(ret);
2543}
2544
stefanbba9dec2016-02-01 04:39:55 -08002545void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002546 RtpPacketSender* rtp_packet_sender,
2547 TransportFeedbackObserver* transport_feedback_observer,
2548 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002549 RTC_DCHECK(rtp_packet_sender);
2550 RTC_DCHECK(transport_feedback_observer);
2551 RTC_DCHECK(packet_router && !packet_router_);
2552 feedback_observer_proxy_->SetTransportFeedbackObserver(
2553 transport_feedback_observer);
2554 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2555 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2556 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002557 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002558 packet_router_ = packet_router;
2559}
2560
stefanbba9dec2016-02-01 04:39:55 -08002561void Channel::RegisterReceiverCongestionControlObjects(
2562 PacketRouter* packet_router) {
2563 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002564 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002565 packet_router_ = packet_router;
2566}
2567
2568void Channel::ResetCongestionControlObjects() {
2569 RTC_DCHECK(packet_router_);
2570 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2571 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2572 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002573 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002574 packet_router_ = nullptr;
2575 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2576}
2577
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002578void Channel::SetRTCPStatus(bool enable) {
2579 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2580 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002581 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002582}
2583
kwiberg55b97fe2016-01-28 05:22:45 -08002584int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002585 RtcpMode method = _rtpRtcpModule->RTCP();
2586 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002587 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002588}
2589
kwiberg55b97fe2016-01-28 05:22:45 -08002590int Channel::SetRTCP_CNAME(const char cName[256]) {
2591 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2592 "Channel::SetRTCP_CNAME()");
2593 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2594 _engineStatisticsPtr->SetLastError(
2595 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2596 "SetRTCP_CNAME() failed to set RTCP CNAME");
2597 return -1;
2598 }
2599 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002600}
2601
kwiberg55b97fe2016-01-28 05:22:45 -08002602int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2603 if (cName == NULL) {
2604 _engineStatisticsPtr->SetLastError(
2605 VE_INVALID_ARGUMENT, kTraceError,
2606 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2607 return -1;
2608 }
2609 char cname[RTCP_CNAME_SIZE];
2610 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2611 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2612 _engineStatisticsPtr->SetLastError(
2613 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2614 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2615 return -1;
2616 }
2617 strcpy(cName, cname);
2618 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002619}
2620
kwiberg55b97fe2016-01-28 05:22:45 -08002621int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2622 unsigned int& NTPLow,
2623 unsigned int& timestamp,
2624 unsigned int& playoutTimestamp,
2625 unsigned int* jitter,
2626 unsigned short* fractionLost) {
2627 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002628
kwiberg55b97fe2016-01-28 05:22:45 -08002629 RTCPSenderInfo senderInfo;
2630 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2631 _engineStatisticsPtr->SetLastError(
2632 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2633 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2634 "side");
2635 return -1;
2636 }
2637
2638 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2639 // and octet count)
2640 NTPHigh = senderInfo.NTPseconds;
2641 NTPLow = senderInfo.NTPfraction;
2642 timestamp = senderInfo.RTPtimeStamp;
2643
2644 // --- Locally derived information
2645
2646 // This value is updated on each incoming RTCP packet (0 when no packet
2647 // has been received)
2648 playoutTimestamp = playout_timestamp_rtcp_;
2649
2650 if (NULL != jitter || NULL != fractionLost) {
2651 // Get all RTCP receiver report blocks that have been received on this
2652 // channel. If we receive RTP packets from a remote source we know the
2653 // remote SSRC and use the report block from him.
2654 // Otherwise use the first report block.
2655 std::vector<RTCPReportBlock> remote_stats;
2656 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2657 remote_stats.empty()) {
2658 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2659 "GetRemoteRTCPData() failed to measure statistics due"
2660 " to lack of received RTP and/or RTCP packets");
2661 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002662 }
2663
kwiberg55b97fe2016-01-28 05:22:45 -08002664 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2665 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2666 for (; it != remote_stats.end(); ++it) {
2667 if (it->remoteSSRC == remoteSSRC)
2668 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002669 }
kwiberg55b97fe2016-01-28 05:22:45 -08002670
2671 if (it == remote_stats.end()) {
2672 // If we have not received any RTCP packets from this SSRC it probably
2673 // means that we have not received any RTP packets.
2674 // Use the first received report block instead.
2675 it = remote_stats.begin();
2676 remoteSSRC = it->remoteSSRC;
2677 }
2678
2679 if (jitter) {
2680 *jitter = it->jitter;
2681 }
2682
2683 if (fractionLost) {
2684 *fractionLost = it->fractionLost;
2685 }
2686 }
2687 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002688}
2689
kwiberg55b97fe2016-01-28 05:22:45 -08002690int Channel::SendApplicationDefinedRTCPPacket(
2691 unsigned char subType,
2692 unsigned int name,
2693 const char* data,
2694 unsigned short dataLengthInBytes) {
2695 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2696 "Channel::SendApplicationDefinedRTCPPacket()");
2697 if (!channel_state_.Get().sending) {
2698 _engineStatisticsPtr->SetLastError(
2699 VE_NOT_SENDING, kTraceError,
2700 "SendApplicationDefinedRTCPPacket() not sending");
2701 return -1;
2702 }
2703 if (NULL == data) {
2704 _engineStatisticsPtr->SetLastError(
2705 VE_INVALID_ARGUMENT, kTraceError,
2706 "SendApplicationDefinedRTCPPacket() invalid data value");
2707 return -1;
2708 }
2709 if (dataLengthInBytes % 4 != 0) {
2710 _engineStatisticsPtr->SetLastError(
2711 VE_INVALID_ARGUMENT, kTraceError,
2712 "SendApplicationDefinedRTCPPacket() invalid length value");
2713 return -1;
2714 }
2715 RtcpMode status = _rtpRtcpModule->RTCP();
2716 if (status == RtcpMode::kOff) {
2717 _engineStatisticsPtr->SetLastError(
2718 VE_RTCP_ERROR, kTraceError,
2719 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2720 return -1;
2721 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002722
kwiberg55b97fe2016-01-28 05:22:45 -08002723 // Create and schedule the RTCP APP packet for transmission
2724 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2725 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2726 _engineStatisticsPtr->SetLastError(
2727 VE_SEND_ERROR, kTraceError,
2728 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2729 return -1;
2730 }
2731 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002732}
2733
kwiberg55b97fe2016-01-28 05:22:45 -08002734int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2735 unsigned int& maxJitterMs,
2736 unsigned int& discardedPackets) {
2737 // The jitter statistics is updated for each received RTP packet and is
2738 // based on received packets.
2739 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2740 // If RTCP is off, there is no timed thread in the RTCP module regularly
2741 // generating new stats, trigger the update manually here instead.
2742 StreamStatistician* statistician =
2743 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2744 if (statistician) {
2745 // Don't use returned statistics, use data from proxy instead so that
2746 // max jitter can be fetched atomically.
2747 RtcpStatistics s;
2748 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002749 }
kwiberg55b97fe2016-01-28 05:22:45 -08002750 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002751
kwiberg55b97fe2016-01-28 05:22:45 -08002752 ChannelStatistics stats = statistics_proxy_->GetStats();
2753 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2754 if (playoutFrequency > 0) {
2755 // Scale RTP statistics given the current playout frequency
2756 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2757 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2758 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002759
kwiberg55b97fe2016-01-28 05:22:45 -08002760 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002761
kwiberg55b97fe2016-01-28 05:22:45 -08002762 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002763}
2764
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002765int Channel::GetRemoteRTCPReportBlocks(
2766 std::vector<ReportBlock>* report_blocks) {
2767 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002768 _engineStatisticsPtr->SetLastError(
2769 VE_INVALID_ARGUMENT, kTraceError,
2770 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002771 return -1;
2772 }
2773
2774 // Get the report blocks from the latest received RTCP Sender or Receiver
2775 // Report. Each element in the vector contains the sender's SSRC and a
2776 // report block according to RFC 3550.
2777 std::vector<RTCPReportBlock> rtcp_report_blocks;
2778 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002779 return -1;
2780 }
2781
2782 if (rtcp_report_blocks.empty())
2783 return 0;
2784
2785 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2786 for (; it != rtcp_report_blocks.end(); ++it) {
2787 ReportBlock report_block;
2788 report_block.sender_SSRC = it->remoteSSRC;
2789 report_block.source_SSRC = it->sourceSSRC;
2790 report_block.fraction_lost = it->fractionLost;
2791 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2792 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2793 report_block.interarrival_jitter = it->jitter;
2794 report_block.last_SR_timestamp = it->lastSR;
2795 report_block.delay_since_last_SR = it->delaySinceLastSR;
2796 report_blocks->push_back(report_block);
2797 }
2798 return 0;
2799}
2800
kwiberg55b97fe2016-01-28 05:22:45 -08002801int Channel::GetRTPStatistics(CallStatistics& stats) {
2802 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002803
kwiberg55b97fe2016-01-28 05:22:45 -08002804 // The jitter statistics is updated for each received RTP packet and is
2805 // based on received packets.
2806 RtcpStatistics statistics;
2807 StreamStatistician* statistician =
2808 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002809 if (statistician) {
2810 statistician->GetStatistics(&statistics,
2811 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002812 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002813
kwiberg55b97fe2016-01-28 05:22:45 -08002814 stats.fractionLost = statistics.fraction_lost;
2815 stats.cumulativeLost = statistics.cumulative_lost;
2816 stats.extendedMax = statistics.extended_max_sequence_number;
2817 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002818
kwiberg55b97fe2016-01-28 05:22:45 -08002819 // --- RTT
2820 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002821
kwiberg55b97fe2016-01-28 05:22:45 -08002822 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002823
kwiberg55b97fe2016-01-28 05:22:45 -08002824 size_t bytesSent(0);
2825 uint32_t packetsSent(0);
2826 size_t bytesReceived(0);
2827 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002828
kwiberg55b97fe2016-01-28 05:22:45 -08002829 if (statistician) {
2830 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2831 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002832
kwiberg55b97fe2016-01-28 05:22:45 -08002833 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2834 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2835 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2836 " output will not be complete");
2837 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002838
kwiberg55b97fe2016-01-28 05:22:45 -08002839 stats.bytesSent = bytesSent;
2840 stats.packetsSent = packetsSent;
2841 stats.bytesReceived = bytesReceived;
2842 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002843
kwiberg55b97fe2016-01-28 05:22:45 -08002844 // --- Timestamps
2845 {
2846 rtc::CritScope lock(&ts_stats_lock_);
2847 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2848 }
2849 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002850}
2851
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002852int Channel::SetREDStatus(bool enable, int redPayloadtype) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002853 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002854 "Channel::SetREDStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002855
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002856 if (enable) {
2857 if (redPayloadtype < 0 || redPayloadtype > 127) {
2858 _engineStatisticsPtr->SetLastError(
2859 VE_PLTYPE_ERROR, kTraceError,
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002860 "SetREDStatus() invalid RED payload type");
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002861 return -1;
2862 }
2863
2864 if (SetRedPayloadType(redPayloadtype) < 0) {
2865 _engineStatisticsPtr->SetLastError(
2866 VE_CODEC_ERROR, kTraceError,
2867 "SetSecondarySendCodec() Failed to register RED ACM");
2868 return -1;
2869 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002870 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002871
kwibergc8d071e2016-04-06 12:22:38 -07002872 if (!codec_manager_.SetCopyRed(enable) ||
2873 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002874 _engineStatisticsPtr->SetLastError(
2875 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002876 "SetREDStatus() failed to set RED state in the ACM");
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002877 return -1;
2878 }
2879 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002880}
2881
kwiberg55b97fe2016-01-28 05:22:45 -08002882int Channel::GetREDStatus(bool& enabled, int& redPayloadtype) {
kwibergc8d071e2016-04-06 12:22:38 -07002883 enabled = codec_manager_.GetStackParams()->use_red;
kwiberg55b97fe2016-01-28 05:22:45 -08002884 if (enabled) {
2885 int8_t payloadType = 0;
2886 if (_rtpRtcpModule->SendREDPayloadType(&payloadType) != 0) {
2887 _engineStatisticsPtr->SetLastError(
2888 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2889 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
2890 "module");
2891 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002892 }
kwiberg55b97fe2016-01-28 05:22:45 -08002893 redPayloadtype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +00002894 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002895 }
2896 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002897}
2898
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002899int Channel::SetCodecFECStatus(bool enable) {
2900 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2901 "Channel::SetCodecFECStatus()");
2902
kwibergc8d071e2016-04-06 12:22:38 -07002903 if (!codec_manager_.SetCodecFEC(enable) ||
2904 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002905 _engineStatisticsPtr->SetLastError(
2906 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2907 "SetCodecFECStatus() failed to set FEC state");
2908 return -1;
2909 }
2910 return 0;
2911}
2912
2913bool Channel::GetCodecFECStatus() {
kwibergc8d071e2016-04-06 12:22:38 -07002914 return codec_manager_.GetStackParams()->use_codec_fec;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002915}
2916
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002917void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2918 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002919 // If pacing is enabled we always store packets.
2920 if (!pacing_enabled_)
2921 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002922 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002923 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002924 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002925 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002926 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002927}
2928
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002929// Called when we are missing one or more packets.
2930int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002931 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2932}
2933
kwiberg55b97fe2016-01-28 05:22:45 -08002934uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2935 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2936 "Channel::Demultiplex()");
2937 _audioFrame.CopyFrom(audioFrame);
2938 _audioFrame.id_ = _channelId;
2939 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002940}
2941
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002942void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00002943 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07002944 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08002945 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002946 CodecInst codec;
2947 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002948
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07002949 // Never upsample or upmix the capture signal here. This should be done at the
2950 // end of the send chain.
2951 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
2952 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
2953 RemixAndResample(audio_data, number_of_frames, number_of_channels,
2954 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002955}
2956
kwiberg55b97fe2016-01-28 05:22:45 -08002957uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
2958 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2959 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002960
kwiberg55b97fe2016-01-28 05:22:45 -08002961 if (_audioFrame.samples_per_channel_ == 0) {
2962 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2963 "Channel::PrepareEncodeAndSend() invalid audio frame");
2964 return 0xFFFFFFFF;
2965 }
2966
2967 if (channel_state_.Get().input_file_playing) {
2968 MixOrReplaceAudioWithFile(mixingFrequency);
2969 }
2970
solenberg1c2af8e2016-03-24 10:36:00 -07002971 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
2972 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
kwiberg55b97fe2016-01-28 05:22:45 -08002973
2974 if (channel_state_.Get().input_external_media) {
2975 rtc::CritScope cs(&_callbackCritSect);
2976 const bool isStereo = (_audioFrame.num_channels_ == 2);
2977 if (_inputExternalMediaCallbackPtr) {
2978 _inputExternalMediaCallbackPtr->Process(
2979 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
2980 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
2981 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00002982 }
kwiberg55b97fe2016-01-28 05:22:45 -08002983 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002984
kwiberg55b97fe2016-01-28 05:22:45 -08002985 if (_includeAudioLevelIndication) {
2986 size_t length =
2987 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
solenberg1c2af8e2016-03-24 10:36:00 -07002988 if (is_muted && previous_frame_muted_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002989 rms_level_.ProcessMuted(length);
2990 } else {
2991 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00002992 }
kwiberg55b97fe2016-01-28 05:22:45 -08002993 }
solenberg1c2af8e2016-03-24 10:36:00 -07002994 previous_frame_muted_ = is_muted;
niklase@google.com470e71d2011-07-07 08:21:25 +00002995
kwiberg55b97fe2016-01-28 05:22:45 -08002996 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002997}
2998
kwiberg55b97fe2016-01-28 05:22:45 -08002999uint32_t Channel::EncodeAndSend() {
3000 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3001 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003002
kwiberg55b97fe2016-01-28 05:22:45 -08003003 assert(_audioFrame.num_channels_ <= 2);
3004 if (_audioFrame.samples_per_channel_ == 0) {
3005 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3006 "Channel::EncodeAndSend() invalid audio frame");
3007 return 0xFFFFFFFF;
3008 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003009
kwiberg55b97fe2016-01-28 05:22:45 -08003010 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00003011
kwiberg55b97fe2016-01-28 05:22:45 -08003012 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00003013
kwiberg55b97fe2016-01-28 05:22:45 -08003014 // The ACM resamples internally.
3015 _audioFrame.timestamp_ = _timeStamp;
3016 // This call will trigger AudioPacketizationCallback::SendData if encoding
3017 // is done and payload is ready for packetization and transmission.
3018 // Otherwise, it will return without invoking the callback.
3019 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
3020 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
3021 "Channel::EncodeAndSend() ACM encoding failed");
3022 return 0xFFFFFFFF;
3023 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003024
kwiberg55b97fe2016-01-28 05:22:45 -08003025 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
3026 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003027}
3028
Minyue2013aec2015-05-13 14:14:42 +02003029void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08003030 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003031 Channel* channel = associate_send_channel_.channel();
3032 if (channel && channel->ChannelId() == channel_id) {
3033 // If this channel is associated with a send channel of the specified
3034 // Channel ID, disassociate with it.
3035 ChannelOwner ref(NULL);
3036 associate_send_channel_ = ref;
3037 }
3038}
3039
kwiberg55b97fe2016-01-28 05:22:45 -08003040int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3041 VoEMediaProcess& processObject) {
3042 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3043 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003044
kwiberg55b97fe2016-01-28 05:22:45 -08003045 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003046
kwiberg55b97fe2016-01-28 05:22:45 -08003047 if (kPlaybackPerChannel == type) {
3048 if (_outputExternalMediaCallbackPtr) {
3049 _engineStatisticsPtr->SetLastError(
3050 VE_INVALID_OPERATION, kTraceError,
3051 "Channel::RegisterExternalMediaProcessing() "
3052 "output external media already enabled");
3053 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003054 }
kwiberg55b97fe2016-01-28 05:22:45 -08003055 _outputExternalMediaCallbackPtr = &processObject;
3056 _outputExternalMedia = true;
3057 } else if (kRecordingPerChannel == type) {
3058 if (_inputExternalMediaCallbackPtr) {
3059 _engineStatisticsPtr->SetLastError(
3060 VE_INVALID_OPERATION, kTraceError,
3061 "Channel::RegisterExternalMediaProcessing() "
3062 "output external media already enabled");
3063 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003064 }
kwiberg55b97fe2016-01-28 05:22:45 -08003065 _inputExternalMediaCallbackPtr = &processObject;
3066 channel_state_.SetInputExternalMedia(true);
3067 }
3068 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003069}
3070
kwiberg55b97fe2016-01-28 05:22:45 -08003071int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3072 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3073 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003074
kwiberg55b97fe2016-01-28 05:22:45 -08003075 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003076
kwiberg55b97fe2016-01-28 05:22:45 -08003077 if (kPlaybackPerChannel == type) {
3078 if (!_outputExternalMediaCallbackPtr) {
3079 _engineStatisticsPtr->SetLastError(
3080 VE_INVALID_OPERATION, kTraceWarning,
3081 "Channel::DeRegisterExternalMediaProcessing() "
3082 "output external media already disabled");
3083 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003084 }
kwiberg55b97fe2016-01-28 05:22:45 -08003085 _outputExternalMedia = false;
3086 _outputExternalMediaCallbackPtr = NULL;
3087 } else if (kRecordingPerChannel == type) {
3088 if (!_inputExternalMediaCallbackPtr) {
3089 _engineStatisticsPtr->SetLastError(
3090 VE_INVALID_OPERATION, kTraceWarning,
3091 "Channel::DeRegisterExternalMediaProcessing() "
3092 "input external media already disabled");
3093 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003094 }
kwiberg55b97fe2016-01-28 05:22:45 -08003095 channel_state_.SetInputExternalMedia(false);
3096 _inputExternalMediaCallbackPtr = NULL;
3097 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003098
kwiberg55b97fe2016-01-28 05:22:45 -08003099 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003100}
3101
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003102int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08003103 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3104 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003105
kwiberg55b97fe2016-01-28 05:22:45 -08003106 if (channel_state_.Get().playing) {
3107 _engineStatisticsPtr->SetLastError(
3108 VE_INVALID_OPERATION, kTraceError,
3109 "Channel::SetExternalMixing() "
3110 "external mixing cannot be changed while playing.");
3111 return -1;
3112 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003113
kwiberg55b97fe2016-01-28 05:22:45 -08003114 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003115
kwiberg55b97fe2016-01-28 05:22:45 -08003116 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003117}
3118
kwiberg55b97fe2016-01-28 05:22:45 -08003119int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3120 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00003121}
3122
wu@webrtc.org24301a62013-12-13 19:17:43 +00003123void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3124 audio_coding_->GetDecodingCallStatistics(stats);
3125}
3126
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003127bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3128 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08003129 rtc::CritScope lock(&video_sync_lock_);
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003130 if (_average_jitter_buffer_delay_us == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003131 return false;
3132 }
kwiberg55b97fe2016-01-28 05:22:45 -08003133 *jitter_buffer_delay_ms =
3134 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003135 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003136 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00003137}
3138
solenberg358057b2015-11-27 10:46:42 -08003139uint32_t Channel::GetDelayEstimate() const {
3140 int jitter_buffer_delay_ms = 0;
3141 int playout_buffer_delay_ms = 0;
3142 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3143 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3144}
3145
deadbeef74375882015-08-13 12:09:10 -07003146int Channel::LeastRequiredDelayMs() const {
3147 return audio_coding_->LeastRequiredDelayMs();
3148}
3149
kwiberg55b97fe2016-01-28 05:22:45 -08003150int Channel::SetMinimumPlayoutDelay(int delayMs) {
3151 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3152 "Channel::SetMinimumPlayoutDelay()");
3153 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3154 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3155 _engineStatisticsPtr->SetLastError(
3156 VE_INVALID_ARGUMENT, kTraceError,
3157 "SetMinimumPlayoutDelay() invalid min delay");
3158 return -1;
3159 }
3160 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3161 _engineStatisticsPtr->SetLastError(
3162 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3163 "SetMinimumPlayoutDelay() failed to set min playout delay");
3164 return -1;
3165 }
3166 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003167}
3168
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003169int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07003170 uint32_t playout_timestamp_rtp = 0;
3171 {
tommi31fc21f2016-01-21 10:37:37 -08003172 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003173 playout_timestamp_rtp = playout_timestamp_rtp_;
3174 }
kwiberg55b97fe2016-01-28 05:22:45 -08003175 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003176 _engineStatisticsPtr->SetLastError(
3177 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3178 "GetPlayoutTimestamp() failed to retrieve timestamp");
3179 return -1;
3180 }
deadbeef74375882015-08-13 12:09:10 -07003181 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003182 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003183}
3184
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003185int Channel::SetInitTimestamp(unsigned int timestamp) {
3186 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00003187 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003188 if (channel_state_.Get().sending) {
3189 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3190 "SetInitTimestamp() already sending");
3191 return -1;
3192 }
3193 _rtpRtcpModule->SetStartTimestamp(timestamp);
3194 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003195}
3196
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003197int Channel::SetInitSequenceNumber(short sequenceNumber) {
3198 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3199 "Channel::SetInitSequenceNumber()");
3200 if (channel_state_.Get().sending) {
3201 _engineStatisticsPtr->SetLastError(
3202 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3203 return -1;
3204 }
3205 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3206 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003207}
3208
kwiberg55b97fe2016-01-28 05:22:45 -08003209int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3210 RtpReceiver** rtp_receiver) const {
3211 *rtpRtcpModule = _rtpRtcpModule.get();
3212 *rtp_receiver = rtp_receiver_.get();
3213 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003214}
3215
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003216// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3217// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003218int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003219 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003220 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003221
kwiberg55b97fe2016-01-28 05:22:45 -08003222 {
3223 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003224
kwiberg55b97fe2016-01-28 05:22:45 -08003225 if (_inputFilePlayerPtr == NULL) {
3226 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3227 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3228 " doesnt exist");
3229 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003230 }
3231
kwiberg55b97fe2016-01-28 05:22:45 -08003232 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3233 mixingFrequency) == -1) {
3234 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3235 "Channel::MixOrReplaceAudioWithFile() file mixing "
3236 "failed");
3237 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003238 }
kwiberg55b97fe2016-01-28 05:22:45 -08003239 if (fileSamples == 0) {
3240 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3241 "Channel::MixOrReplaceAudioWithFile() file is ended");
3242 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003243 }
kwiberg55b97fe2016-01-28 05:22:45 -08003244 }
3245
3246 assert(_audioFrame.samples_per_channel_ == fileSamples);
3247
3248 if (_mixFileWithMicrophone) {
3249 // Currently file stream is always mono.
3250 // TODO(xians): Change the code when FilePlayer supports real stereo.
3251 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3252 1, fileSamples);
3253 } else {
3254 // Replace ACM audio with file.
3255 // Currently file stream is always mono.
3256 // TODO(xians): Change the code when FilePlayer supports real stereo.
3257 _audioFrame.UpdateFrame(
3258 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3259 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3260 }
3261 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003262}
3263
kwiberg55b97fe2016-01-28 05:22:45 -08003264int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3265 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003266
kwibergb7f89d62016-02-17 10:04:18 -08003267 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003268 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003269
kwiberg55b97fe2016-01-28 05:22:45 -08003270 {
3271 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003272
kwiberg55b97fe2016-01-28 05:22:45 -08003273 if (_outputFilePlayerPtr == NULL) {
3274 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3275 "Channel::MixAudioWithFile() file mixing failed");
3276 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003277 }
3278
kwiberg55b97fe2016-01-28 05:22:45 -08003279 // We should get the frequency we ask for.
3280 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3281 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3282 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3283 "Channel::MixAudioWithFile() file mixing failed");
3284 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003285 }
kwiberg55b97fe2016-01-28 05:22:45 -08003286 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003287
kwiberg55b97fe2016-01-28 05:22:45 -08003288 if (audioFrame.samples_per_channel_ == fileSamples) {
3289 // Currently file stream is always mono.
3290 // TODO(xians): Change the code when FilePlayer supports real stereo.
3291 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3292 fileSamples);
3293 } else {
3294 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3295 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3296 ") != "
3297 "fileSamples(%" PRIuS ")",
3298 audioFrame.samples_per_channel_, fileSamples);
3299 return -1;
3300 }
3301
3302 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003303}
3304
deadbeef74375882015-08-13 12:09:10 -07003305void Channel::UpdatePlayoutTimestamp(bool rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003306 jitter_buffer_playout_timestamp_ = audio_coding_->PlayoutTimestamp();
deadbeef74375882015-08-13 12:09:10 -07003307
henrik.lundin96bd5022016-04-06 04:13:56 -07003308 if (!jitter_buffer_playout_timestamp_) {
3309 // This can happen if this channel has not received any RTP packets. In
3310 // this case, NetEq is not capable of computing a playout timestamp.
deadbeef74375882015-08-13 12:09:10 -07003311 return;
3312 }
3313
3314 uint16_t delay_ms = 0;
3315 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003316 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003317 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3318 " delay from the ADM");
3319 _engineStatisticsPtr->SetLastError(
3320 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3321 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3322 return;
3323 }
3324
henrik.lundin96bd5022016-04-06 04:13:56 -07003325 RTC_DCHECK(jitter_buffer_playout_timestamp_);
3326 uint32_t playout_timestamp = *jitter_buffer_playout_timestamp_;
deadbeef74375882015-08-13 12:09:10 -07003327
3328 // Remove the playout delay.
henrik.lundin96bd5022016-04-06 04:13:56 -07003329 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
deadbeef74375882015-08-13 12:09:10 -07003330
kwiberg55b97fe2016-01-28 05:22:45 -08003331 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003332 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
henrik.lundin96bd5022016-04-06 04:13:56 -07003333 playout_timestamp);
deadbeef74375882015-08-13 12:09:10 -07003334
3335 {
tommi31fc21f2016-01-21 10:37:37 -08003336 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003337 if (rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003338 playout_timestamp_rtcp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003339 } else {
henrik.lundin96bd5022016-04-06 04:13:56 -07003340 playout_timestamp_rtp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003341 }
3342 playout_delay_ms_ = delay_ms;
3343 }
3344}
3345
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003346// Called for incoming RTP packets after successful RTP header parsing.
3347void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3348 uint16_t sequence_number) {
kwiberg55b97fe2016-01-28 05:22:45 -08003349 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003350 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3351 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00003352
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003353 // Get frequency of last received payload
wu@webrtc.org94454b72014-06-05 20:34:08 +00003354 int rtp_receive_frequency = GetPlayoutFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00003355
turaj@webrtc.org167b6df2013-12-13 21:05:07 +00003356 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
henrik.lundin96bd5022016-04-06 04:13:56 -07003357 // every incoming packet. May be empty if no valid playout timestamp is
3358 // available.
3359 // If |rtp_timestamp| is newer than |jitter_buffer_playout_timestamp_|, the
3360 // resulting difference is positive and will be used. When the inverse is
3361 // true (can happen when a network glitch causes a packet to arrive late,
3362 // and during long comfort noise periods with clock drift), or when
3363 // |jitter_buffer_playout_timestamp_| has no value, the difference is not
3364 // changed from the initial 0.
3365 uint32_t timestamp_diff_ms = 0;
3366 if (jitter_buffer_playout_timestamp_ &&
3367 IsNewerTimestamp(rtp_timestamp, *jitter_buffer_playout_timestamp_)) {
3368 timestamp_diff_ms = (rtp_timestamp - *jitter_buffer_playout_timestamp_) /
3369 (rtp_receive_frequency / 1000);
3370 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3371 // Diff is too large; set it to zero instead.
3372 timestamp_diff_ms = 0;
3373 }
henrik.lundin@webrtc.orgd6692992014-03-20 12:04:09 +00003374 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003375
kwiberg55b97fe2016-01-28 05:22:45 -08003376 uint16_t packet_delay_ms =
3377 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003378
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003379 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00003380
kwiberg55b97fe2016-01-28 05:22:45 -08003381 if (timestamp_diff_ms == 0)
3382 return;
niklase@google.com470e71d2011-07-07 08:21:25 +00003383
deadbeef74375882015-08-13 12:09:10 -07003384 {
tommi31fc21f2016-01-21 10:37:37 -08003385 rtc::CritScope lock(&video_sync_lock_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003386
deadbeef74375882015-08-13 12:09:10 -07003387 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3388 _recPacketDelayMs = packet_delay_ms;
3389 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003390
deadbeef74375882015-08-13 12:09:10 -07003391 if (_average_jitter_buffer_delay_us == 0) {
3392 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3393 return;
3394 }
3395
3396 // Filter average delay value using exponential filter (alpha is
3397 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3398 // risk of rounding error) and compensate for it in GetDelayEstimate()
3399 // later.
kwiberg55b97fe2016-01-28 05:22:45 -08003400 _average_jitter_buffer_delay_us =
3401 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3402 8;
deadbeef74375882015-08-13 12:09:10 -07003403 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003404}
3405
kwiberg55b97fe2016-01-28 05:22:45 -08003406void Channel::RegisterReceiveCodecsToRTPModule() {
3407 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3408 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003409
kwiberg55b97fe2016-01-28 05:22:45 -08003410 CodecInst codec;
3411 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003412
kwiberg55b97fe2016-01-28 05:22:45 -08003413 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3414 // Open up the RTP/RTCP receiver for all supported codecs
3415 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3416 (rtp_receiver_->RegisterReceivePayload(
3417 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3418 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3419 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3420 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3421 " to register %s (%d/%d/%" PRIuS
3422 "/%d) to RTP/RTCP "
3423 "receiver",
3424 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3425 codec.rate);
3426 } else {
3427 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3428 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3429 "(%d/%d/%" PRIuS
3430 "/%d) has been added to the RTP/RTCP "
3431 "receiver",
3432 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3433 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003434 }
kwiberg55b97fe2016-01-28 05:22:45 -08003435 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003436}
3437
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00003438// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003439int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003440 CodecInst codec;
3441 bool found_red = false;
3442
3443 // Get default RED settings from the ACM database
3444 const int num_codecs = AudioCodingModule::NumberOfCodecs();
3445 for (int idx = 0; idx < num_codecs; idx++) {
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003446 audio_coding_->Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003447 if (!STR_CASE_CMP(codec.plname, "RED")) {
3448 found_red = true;
3449 break;
3450 }
3451 }
3452
3453 if (!found_red) {
3454 _engineStatisticsPtr->SetLastError(
3455 VE_CODEC_ERROR, kTraceError,
3456 "SetRedPayloadType() RED is not supported");
3457 return -1;
3458 }
3459
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00003460 codec.pltype = red_payload_type;
kwibergc8d071e2016-04-06 12:22:38 -07003461 if (!codec_manager_.RegisterEncoder(codec) ||
3462 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003463 _engineStatisticsPtr->SetLastError(
3464 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3465 "SetRedPayloadType() RED registration in ACM module failed");
3466 return -1;
3467 }
3468
3469 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
3470 _engineStatisticsPtr->SetLastError(
3471 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3472 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
3473 return -1;
3474 }
3475 return 0;
3476}
3477
kwiberg55b97fe2016-01-28 05:22:45 -08003478int Channel::SetSendRtpHeaderExtension(bool enable,
3479 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003480 unsigned char id) {
3481 int error = 0;
3482 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3483 if (enable) {
3484 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3485 }
3486 return error;
3487}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003488
wu@webrtc.org94454b72014-06-05 20:34:08 +00003489int32_t Channel::GetPlayoutFrequency() {
3490 int32_t playout_frequency = audio_coding_->PlayoutFrequency();
3491 CodecInst current_recive_codec;
3492 if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
3493 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
3494 // Even though the actual sampling rate for G.722 audio is
3495 // 16,000 Hz, the RTP clock rate for the G722 payload format is
3496 // 8,000 Hz because that value was erroneously assigned in
3497 // RFC 1890 and must remain unchanged for backward compatibility.
3498 playout_frequency = 8000;
3499 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
3500 // We are resampling Opus internally to 32,000 Hz until all our
3501 // DSP routines can operate at 48,000 Hz, but the RTP clock
3502 // rate for the Opus payload format is standardized to 48,000 Hz,
3503 // because that is the maximum supported decoding sampling rate.
3504 playout_frequency = 48000;
3505 }
3506 }
3507 return playout_frequency;
3508}
3509
Minyue2013aec2015-05-13 14:14:42 +02003510int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003511 RtcpMode method = _rtpRtcpModule->RTCP();
3512 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003513 return 0;
3514 }
3515 std::vector<RTCPReportBlock> report_blocks;
3516 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003517
3518 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003519 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003520 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003521 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003522 Channel* channel = associate_send_channel_.channel();
3523 // Tries to get RTT from an associated channel. This is important for
3524 // receive-only channels.
3525 if (channel) {
3526 // To prevent infinite recursion and deadlock, calling GetRTT of
3527 // associate channel should always use "false" for argument:
3528 // |allow_associate_channel|.
3529 rtt = channel->GetRTT(false);
3530 }
3531 }
3532 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003533 }
3534
3535 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3536 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3537 for (; it != report_blocks.end(); ++it) {
3538 if (it->remoteSSRC == remoteSSRC)
3539 break;
3540 }
3541 if (it == report_blocks.end()) {
3542 // We have not received packets with SSRC matching the report blocks.
3543 // To calculate RTT we try with the SSRC of the first report block.
3544 // This is very important for send-only channels where we don't know
3545 // the SSRC of the other end.
3546 remoteSSRC = report_blocks[0].remoteSSRC;
3547 }
Minyue2013aec2015-05-13 14:14:42 +02003548
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003549 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003550 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003551 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003552 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3553 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003554 return 0;
3555 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003556 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003557}
3558
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003559} // namespace voe
3560} // namespace webrtc