blob: 742e53e836c5e933b9ae49533b9ee128494d5590 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010020#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000021#include "webrtc/base/timeutils.h"
minyue@webrtc.orge509f942013-09-12 17:03:00 +000022#include "webrtc/common.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020023#include "webrtc/config.h"
ossue3525782016-05-25 07:37:43 -070024#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000025#include "webrtc/modules/audio_device/include/audio_device.h"
26#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010027#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010028#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010029#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
30#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
31#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000032#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010033#include "webrtc/modules/utility/include/audio_frame_operations.h"
34#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010035#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000036#include "webrtc/voice_engine/include/voe_base.h"
37#include "webrtc/voice_engine/include/voe_external_media.h"
38#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
39#include "webrtc/voice_engine/output_mixer.h"
40#include "webrtc/voice_engine/statistics.h"
41#include "webrtc/voice_engine/transmit_mixer.h"
42#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000043
andrew@webrtc.org50419b02012-11-14 19:07:54 +000044namespace webrtc {
45namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000046
kwibergc8d071e2016-04-06 12:22:38 -070047namespace {
48
49bool RegisterReceiveCodec(std::unique_ptr<AudioCodingModule>* acm,
50 acm2::RentACodec* rac,
51 const CodecInst& ci) {
52 const int result =
53 (*acm)->RegisterReceiveCodec(ci, [&] { return rac->RentIsacDecoder(); });
54 return result == 0;
55}
56
57} // namespace
58
solenberg8842c3e2016-03-11 03:06:41 -080059const int kTelephoneEventAttenuationdB = 10;
60
Stefan Holmerb86d4e42015-12-07 10:26:18 +010061class TransportFeedbackProxy : public TransportFeedbackObserver {
62 public:
63 TransportFeedbackProxy() : feedback_observer_(nullptr) {
64 pacer_thread_.DetachFromThread();
65 network_thread_.DetachFromThread();
66 }
67
68 void SetTransportFeedbackObserver(
69 TransportFeedbackObserver* feedback_observer) {
70 RTC_DCHECK(thread_checker_.CalledOnValidThread());
71 rtc::CritScope lock(&crit_);
72 feedback_observer_ = feedback_observer;
73 }
74
75 // Implements TransportFeedbackObserver.
76 void AddPacket(uint16_t sequence_number,
77 size_t length,
78 bool was_paced) override {
79 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
80 rtc::CritScope lock(&crit_);
81 if (feedback_observer_)
82 feedback_observer_->AddPacket(sequence_number, length, was_paced);
83 }
84 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
85 RTC_DCHECK(network_thread_.CalledOnValidThread());
86 rtc::CritScope lock(&crit_);
87 if (feedback_observer_)
88 feedback_observer_->OnTransportFeedback(feedback);
89 }
90
91 private:
92 rtc::CriticalSection crit_;
93 rtc::ThreadChecker thread_checker_;
94 rtc::ThreadChecker pacer_thread_;
95 rtc::ThreadChecker network_thread_;
96 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
97};
98
99class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
100 public:
101 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
102 pacer_thread_.DetachFromThread();
103 }
104
105 void SetSequenceNumberAllocator(
106 TransportSequenceNumberAllocator* seq_num_allocator) {
107 RTC_DCHECK(thread_checker_.CalledOnValidThread());
108 rtc::CritScope lock(&crit_);
109 seq_num_allocator_ = seq_num_allocator;
110 }
111
112 // Implements TransportSequenceNumberAllocator.
113 uint16_t AllocateSequenceNumber() override {
114 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
115 rtc::CritScope lock(&crit_);
116 if (!seq_num_allocator_)
117 return 0;
118 return seq_num_allocator_->AllocateSequenceNumber();
119 }
120
121 private:
122 rtc::CriticalSection crit_;
123 rtc::ThreadChecker thread_checker_;
124 rtc::ThreadChecker pacer_thread_;
125 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
126};
127
128class RtpPacketSenderProxy : public RtpPacketSender {
129 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800130 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100131
132 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
133 RTC_DCHECK(thread_checker_.CalledOnValidThread());
134 rtc::CritScope lock(&crit_);
135 rtp_packet_sender_ = rtp_packet_sender;
136 }
137
138 // Implements RtpPacketSender.
139 void InsertPacket(Priority priority,
140 uint32_t ssrc,
141 uint16_t sequence_number,
142 int64_t capture_time_ms,
143 size_t bytes,
144 bool retransmission) override {
145 rtc::CritScope lock(&crit_);
146 if (rtp_packet_sender_) {
147 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
148 capture_time_ms, bytes, retransmission);
149 }
150 }
151
152 private:
153 rtc::ThreadChecker thread_checker_;
154 rtc::CriticalSection crit_;
155 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
156};
157
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000158// Extend the default RTCP statistics struct with max_jitter, defined as the
159// maximum jitter value seen in an RTCP report block.
160struct ChannelStatistics : public RtcpStatistics {
161 ChannelStatistics() : rtcp(), max_jitter(0) {}
162
163 RtcpStatistics rtcp;
164 uint32_t max_jitter;
165};
166
167// Statistics callback, called at each generation of a new RTCP report block.
168class StatisticsProxy : public RtcpStatisticsCallback {
169 public:
tommi31fc21f2016-01-21 10:37:37 -0800170 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000171 virtual ~StatisticsProxy() {}
172
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000173 void StatisticsUpdated(const RtcpStatistics& statistics,
174 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000175 if (ssrc != ssrc_)
176 return;
177
tommi31fc21f2016-01-21 10:37:37 -0800178 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000179 stats_.rtcp = statistics;
180 if (statistics.jitter > stats_.max_jitter) {
181 stats_.max_jitter = statistics.jitter;
182 }
183 }
184
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000185 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000186
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000187 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800188 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000189 return stats_;
190 }
191
192 private:
193 // StatisticsUpdated calls are triggered from threads in the RTP module,
194 // while GetStats calls can be triggered from the public voice engine API,
195 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800196 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000197 const uint32_t ssrc_;
198 ChannelStatistics stats_;
199};
200
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000201class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000202 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000203 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
204 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000205
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000206 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
207 // Not used for Voice Engine.
208 }
209
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000210 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
211 int64_t rtt,
212 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000213 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
214 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
215 // report for VoiceEngine?
216 if (report_blocks.empty())
217 return;
218
219 int fraction_lost_aggregate = 0;
220 int total_number_of_packets = 0;
221
222 // If receiving multiple report blocks, calculate the weighted average based
223 // on the number of packets a report refers to.
224 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
225 block_it != report_blocks.end(); ++block_it) {
226 // Find the previous extended high sequence number for this remote SSRC,
227 // to calculate the number of RTP packets this report refers to. Ignore if
228 // we haven't seen this SSRC before.
229 std::map<uint32_t, uint32_t>::iterator seq_num_it =
230 extended_max_sequence_number_.find(block_it->sourceSSRC);
231 int number_of_packets = 0;
232 if (seq_num_it != extended_max_sequence_number_.end()) {
233 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
234 }
235 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
236 total_number_of_packets += number_of_packets;
237
238 extended_max_sequence_number_[block_it->sourceSSRC] =
239 block_it->extendedHighSeqNum;
240 }
241 int weighted_fraction_lost = 0;
242 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800243 weighted_fraction_lost =
244 (fraction_lost_aggregate + total_number_of_packets / 2) /
245 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000246 }
247 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000248 }
249
250 private:
251 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000252 // Maps remote side ssrc to extended highest sequence number received.
253 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000254};
255
kwiberg55b97fe2016-01-28 05:22:45 -0800256int32_t Channel::SendData(FrameType frameType,
257 uint8_t payloadType,
258 uint32_t timeStamp,
259 const uint8_t* payloadData,
260 size_t payloadSize,
261 const RTPFragmentationHeader* fragmentation) {
262 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
263 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
264 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
265 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000266
kwiberg55b97fe2016-01-28 05:22:45 -0800267 if (_includeAudioLevelIndication) {
268 // Store current audio level in the RTP/RTCP module.
269 // The level will be used in combination with voice-activity state
270 // (frameType) to add an RTP header extension
271 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
272 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000273
kwiberg55b97fe2016-01-28 05:22:45 -0800274 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
275 // packetization.
276 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
277 if (_rtpRtcpModule->SendOutgoingData(
278 (FrameType&)frameType, payloadType, timeStamp,
279 // Leaving the time when this frame was
280 // received from the capture device as
281 // undefined for voice for now.
282 -1, payloadData, payloadSize, fragmentation) == -1) {
283 _engineStatisticsPtr->SetLastError(
284 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
285 "Channel::SendData() failed to send data to RTP/RTCP module");
286 return -1;
287 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000288
kwiberg55b97fe2016-01-28 05:22:45 -0800289 _lastLocalTimeStamp = timeStamp;
290 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000291
kwiberg55b97fe2016-01-28 05:22:45 -0800292 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000293}
294
kwiberg55b97fe2016-01-28 05:22:45 -0800295int32_t Channel::InFrameType(FrameType frame_type) {
296 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
297 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000298
kwiberg55b97fe2016-01-28 05:22:45 -0800299 rtc::CritScope cs(&_callbackCritSect);
300 _sendFrameType = (frame_type == kAudioFrameSpeech);
301 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000302}
303
kwiberg55b97fe2016-01-28 05:22:45 -0800304int32_t Channel::OnRxVadDetected(int vadDecision) {
305 rtc::CritScope cs(&_callbackCritSect);
306 if (_rxVadObserverPtr) {
307 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
308 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000309
kwiberg55b97fe2016-01-28 05:22:45 -0800310 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000311}
312
stefan1d8a5062015-10-02 03:39:33 -0700313bool Channel::SendRtp(const uint8_t* data,
314 size_t len,
315 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800316 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
317 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000318
kwiberg55b97fe2016-01-28 05:22:45 -0800319 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000320
kwiberg55b97fe2016-01-28 05:22:45 -0800321 if (_transportPtr == NULL) {
322 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
323 "Channel::SendPacket() failed to send RTP packet due to"
324 " invalid transport object");
325 return false;
326 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000327
kwiberg55b97fe2016-01-28 05:22:45 -0800328 uint8_t* bufferToSendPtr = (uint8_t*)data;
329 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000330
kwiberg55b97fe2016-01-28 05:22:45 -0800331 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
332 std::string transport_name =
333 _externalTransport ? "external transport" : "WebRtc sockets";
334 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
335 "Channel::SendPacket() RTP transmission using %s failed",
336 transport_name.c_str());
337 return false;
338 }
339 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000340}
341
kwiberg55b97fe2016-01-28 05:22:45 -0800342bool Channel::SendRtcp(const uint8_t* data, size_t len) {
343 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
344 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000345
kwiberg55b97fe2016-01-28 05:22:45 -0800346 rtc::CritScope cs(&_callbackCritSect);
347 if (_transportPtr == NULL) {
348 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
349 "Channel::SendRtcp() failed to send RTCP packet"
350 " due to invalid transport object");
351 return false;
352 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000353
kwiberg55b97fe2016-01-28 05:22:45 -0800354 uint8_t* bufferToSendPtr = (uint8_t*)data;
355 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000356
kwiberg55b97fe2016-01-28 05:22:45 -0800357 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
358 if (n < 0) {
359 std::string transport_name =
360 _externalTransport ? "external transport" : "WebRtc sockets";
361 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
362 "Channel::SendRtcp() transmission using %s failed",
363 transport_name.c_str());
364 return false;
365 }
366 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000367}
368
kwiberg55b97fe2016-01-28 05:22:45 -0800369void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
370 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
371 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000372
kwiberg55b97fe2016-01-28 05:22:45 -0800373 // Update ssrc so that NTP for AV sync can be updated.
374 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000375}
376
Peter Boströmac547a62015-09-17 23:03:57 +0200377void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
378 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
379 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
380 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000381}
382
Peter Boströmac547a62015-09-17 23:03:57 +0200383int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000384 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000385 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000386 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800387 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200388 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800389 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
390 "Channel::OnInitializeDecoder(payloadType=%d, "
391 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
392 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000393
kwiberg55b97fe2016-01-28 05:22:45 -0800394 CodecInst receiveCodec = {0};
395 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000396
kwiberg55b97fe2016-01-28 05:22:45 -0800397 receiveCodec.pltype = payloadType;
398 receiveCodec.plfreq = frequency;
399 receiveCodec.channels = channels;
400 receiveCodec.rate = rate;
401 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000402
kwiberg55b97fe2016-01-28 05:22:45 -0800403 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
404 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000405
kwiberg55b97fe2016-01-28 05:22:45 -0800406 // Register the new codec to the ACM
kwibergc8d071e2016-04-06 12:22:38 -0700407 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, receiveCodec)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800408 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
409 "Channel::OnInitializeDecoder() invalid codec ("
410 "pt=%d, name=%s) received - 1",
411 payloadType, payloadName);
412 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
413 return -1;
414 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000415
kwiberg55b97fe2016-01-28 05:22:45 -0800416 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000417}
418
kwiberg55b97fe2016-01-28 05:22:45 -0800419int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
420 size_t payloadSize,
421 const WebRtcRTPHeader* rtpHeader) {
422 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
423 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
424 ","
425 " payloadType=%u, audioChannel=%" PRIuS ")",
426 payloadSize, rtpHeader->header.payloadType,
427 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000428
kwiberg55b97fe2016-01-28 05:22:45 -0800429 if (!channel_state_.Get().playing) {
430 // Avoid inserting into NetEQ when we are not playing. Count the
431 // packet as discarded.
432 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
433 "received packet is discarded since playing is not"
434 " activated");
435 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000436 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800437 }
438
439 // Push the incoming payload (parsed and ready for decoding) into the ACM
440 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
441 0) {
442 _engineStatisticsPtr->SetLastError(
443 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
444 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
445 return -1;
446 }
447
448 // Update the packet delay.
449 UpdatePacketDelay(rtpHeader->header.timestamp,
450 rtpHeader->header.sequenceNumber);
451
452 int64_t round_trip_time = 0;
453 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
454 NULL);
455
456 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
457 if (!nack_list.empty()) {
458 // Can't use nack_list.data() since it's not supported by all
459 // compilers.
460 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
461 }
462 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000463}
464
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000465bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000466 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000467 RTPHeader header;
468 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
469 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
470 "IncomingPacket invalid RTP header");
471 return false;
472 }
473 header.payload_type_frequency =
474 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
475 if (header.payload_type_frequency < 0)
476 return false;
477 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
478}
479
henrik.lundin42dda502016-05-18 05:36:01 -0700480MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
481 int32_t id,
482 AudioFrame* audioFrame) {
kwiberg55b97fe2016-01-28 05:22:45 -0800483 if (event_log_) {
484 unsigned int ssrc;
485 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
486 event_log_->LogAudioPlayout(ssrc);
487 }
488 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
henrik.lundind4ccb002016-05-17 12:21:55 -0700489 bool muted;
490 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame,
491 &muted) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -0800492 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
493 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
494 // In all likelihood, the audio in this frame is garbage. We return an
495 // error so that the audio mixer module doesn't add it to the mix. As
496 // a result, it won't be played out and the actions skipped here are
497 // irrelevant.
henrik.lundin42dda502016-05-18 05:36:01 -0700498 return MixerParticipant::AudioFrameInfo::kError;
kwiberg55b97fe2016-01-28 05:22:45 -0800499 }
henrik.lundina89ab962016-05-18 08:52:45 -0700500
501 if (muted) {
502 // TODO(henrik.lundin): We should be able to do better than this. But we
503 // will have to go through all the cases below where the audio samples may
504 // be used, and handle the muted case in some way.
505 audioFrame->Mute();
506 }
kwiberg55b97fe2016-01-28 05:22:45 -0800507
508 if (_RxVadDetection) {
509 UpdateRxVadDetection(*audioFrame);
510 }
511
512 // Convert module ID to internal VoE channel ID
513 audioFrame->id_ = VoEChannelId(audioFrame->id_);
514 // Store speech type for dead-or-alive detection
515 _outputSpeechType = audioFrame->speech_type_;
516
517 ChannelState::State state = channel_state_.Get();
518
519 if (state.rx_apm_is_enabled) {
520 int err = rx_audioproc_->ProcessStream(audioFrame);
521 if (err) {
522 LOG(LS_ERROR) << "ProcessStream() error: " << err;
523 assert(false);
Ivo Creusenae856f22015-09-17 16:30:16 +0200524 }
kwiberg55b97fe2016-01-28 05:22:45 -0800525 }
526
527 {
528 // Pass the audio buffers to an optional sink callback, before applying
529 // scaling/panning, as that applies to the mix operation.
530 // External recipients of the audio (e.g. via AudioTrack), will do their
531 // own mixing/dynamic processing.
532 rtc::CritScope cs(&_callbackCritSect);
533 if (audio_sink_) {
534 AudioSinkInterface::Data data(
535 &audioFrame->data_[0], audioFrame->samples_per_channel_,
536 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
537 audioFrame->timestamp_);
538 audio_sink_->OnData(data);
539 }
540 }
541
542 float output_gain = 1.0f;
543 float left_pan = 1.0f;
544 float right_pan = 1.0f;
545 {
546 rtc::CritScope cs(&volume_settings_critsect_);
547 output_gain = _outputGain;
548 left_pan = _panLeft;
549 right_pan = _panRight;
550 }
551
552 // Output volume scaling
553 if (output_gain < 0.99f || output_gain > 1.01f) {
554 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
555 }
556
557 // Scale left and/or right channel(s) if stereo and master balance is
558 // active
559
560 if (left_pan != 1.0f || right_pan != 1.0f) {
561 if (audioFrame->num_channels_ == 1) {
562 // Emulate stereo mode since panning is active.
563 // The mono signal is copied to both left and right channels here.
564 AudioFrameOperations::MonoToStereo(audioFrame);
565 }
566 // For true stereo mode (when we are receiving a stereo signal), no
567 // action is needed.
568
569 // Do the panning operation (the audio frame contains stereo at this
570 // stage)
571 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
572 }
573
574 // Mix decoded PCM output with file if file mixing is enabled
575 if (state.output_file_playing) {
576 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
henrik.lundina89ab962016-05-18 08:52:45 -0700577 muted = false; // We may have added non-zero samples.
kwiberg55b97fe2016-01-28 05:22:45 -0800578 }
579
580 // External media
581 if (_outputExternalMedia) {
582 rtc::CritScope cs(&_callbackCritSect);
583 const bool isStereo = (audioFrame->num_channels_ == 2);
584 if (_outputExternalMediaCallbackPtr) {
585 _outputExternalMediaCallbackPtr->Process(
586 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
587 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
588 isStereo);
589 }
590 }
591
592 // Record playout if enabled
593 {
594 rtc::CritScope cs(&_fileCritSect);
595
596 if (_outputFileRecording && _outputFileRecorderPtr) {
597 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
598 }
599 }
600
601 // Measure audio level (0-9)
henrik.lundina89ab962016-05-18 08:52:45 -0700602 // TODO(henrik.lundin) Use the |muted| information here too.
kwiberg55b97fe2016-01-28 05:22:45 -0800603 _outputAudioLevel.ComputeLevel(*audioFrame);
604
605 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
606 // The first frame with a valid rtp timestamp.
607 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
608 }
609
610 if (capture_start_rtp_time_stamp_ >= 0) {
611 // audioFrame.timestamp_ should be valid from now on.
612
613 // Compute elapsed time.
614 int64_t unwrap_timestamp =
615 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
616 audioFrame->elapsed_time_ms_ =
617 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
618 (GetPlayoutFrequency() / 1000);
619
niklase@google.com470e71d2011-07-07 08:21:25 +0000620 {
kwiberg55b97fe2016-01-28 05:22:45 -0800621 rtc::CritScope lock(&ts_stats_lock_);
622 // Compute ntp time.
623 audioFrame->ntp_time_ms_ =
624 ntp_estimator_.Estimate(audioFrame->timestamp_);
625 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
626 if (audioFrame->ntp_time_ms_ > 0) {
627 // Compute |capture_start_ntp_time_ms_| so that
628 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
629 capture_start_ntp_time_ms_ =
630 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000631 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000632 }
kwiberg55b97fe2016-01-28 05:22:45 -0800633 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000634
henrik.lundin42dda502016-05-18 05:36:01 -0700635 return muted ? MixerParticipant::AudioFrameInfo::kMuted
636 : MixerParticipant::AudioFrameInfo::kNormal;
niklase@google.com470e71d2011-07-07 08:21:25 +0000637}
638
kwiberg55b97fe2016-01-28 05:22:45 -0800639int32_t Channel::NeededFrequency(int32_t id) const {
640 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
641 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000642
kwiberg55b97fe2016-01-28 05:22:45 -0800643 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000644
kwiberg55b97fe2016-01-28 05:22:45 -0800645 // Determine highest needed receive frequency
646 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000647
kwiberg55b97fe2016-01-28 05:22:45 -0800648 // Return the bigger of playout and receive frequency in the ACM.
649 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
650 highestNeeded = audio_coding_->PlayoutFrequency();
651 } else {
652 highestNeeded = receiveFrequency;
653 }
654
655 // Special case, if we're playing a file on the playout side
656 // we take that frequency into consideration as well
657 // This is not needed on sending side, since the codec will
658 // limit the spectrum anyway.
659 if (channel_state_.Get().output_file_playing) {
660 rtc::CritScope cs(&_fileCritSect);
661 if (_outputFilePlayerPtr) {
662 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
663 highestNeeded = _outputFilePlayerPtr->Frequency();
664 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000665 }
kwiberg55b97fe2016-01-28 05:22:45 -0800666 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000667
kwiberg55b97fe2016-01-28 05:22:45 -0800668 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000669}
670
ivocb04965c2015-09-09 00:09:43 -0700671int32_t Channel::CreateChannel(Channel*& channel,
672 int32_t channelId,
673 uint32_t instanceId,
674 RtcEventLog* const event_log,
675 const Config& config) {
kwiberg55b97fe2016-01-28 05:22:45 -0800676 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
677 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
678 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000679
kwiberg55b97fe2016-01-28 05:22:45 -0800680 channel = new Channel(channelId, instanceId, event_log, config);
681 if (channel == NULL) {
682 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
683 "Channel::CreateChannel() unable to allocate memory for"
684 " channel");
685 return -1;
686 }
687 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000688}
689
kwiberg55b97fe2016-01-28 05:22:45 -0800690void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
691 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
692 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
693 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000694
kwiberg55b97fe2016-01-28 05:22:45 -0800695 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000696}
697
kwiberg55b97fe2016-01-28 05:22:45 -0800698void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
699 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
700 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
701 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000702
kwiberg55b97fe2016-01-28 05:22:45 -0800703 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000704}
705
kwiberg55b97fe2016-01-28 05:22:45 -0800706void Channel::PlayFileEnded(int32_t id) {
707 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
708 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000709
kwiberg55b97fe2016-01-28 05:22:45 -0800710 if (id == _inputFilePlayerId) {
711 channel_state_.SetInputFilePlaying(false);
712 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
713 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000714 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800715 } else if (id == _outputFilePlayerId) {
716 channel_state_.SetOutputFilePlaying(false);
717 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
718 "Channel::PlayFileEnded() => output file player module is"
719 " shutdown");
720 }
721}
722
723void Channel::RecordFileEnded(int32_t id) {
724 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
725 "Channel::RecordFileEnded(id=%d)", id);
726
727 assert(id == _outputFileRecorderId);
728
729 rtc::CritScope cs(&_fileCritSect);
730
731 _outputFileRecording = false;
732 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
733 "Channel::RecordFileEnded() => output file recorder module is"
734 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000735}
736
pbos@webrtc.org92135212013-05-14 08:31:39 +0000737Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000738 uint32_t instanceId,
ivocb04965c2015-09-09 00:09:43 -0700739 RtcEventLog* const event_log,
740 const Config& config)
tommi31fc21f2016-01-21 10:37:37 -0800741 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100742 _channelId(channelId),
743 event_log_(event_log),
744 rtp_header_parser_(RtpHeaderParser::Create()),
745 rtp_payload_registry_(
746 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
747 rtp_receive_statistics_(
748 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
749 rtp_receiver_(
750 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100751 this,
752 this,
753 rtp_payload_registry_.get())),
754 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
755 _outputAudioLevel(),
756 _externalTransport(false),
757 _inputFilePlayerPtr(NULL),
758 _outputFilePlayerPtr(NULL),
759 _outputFileRecorderPtr(NULL),
760 // Avoid conflict with other channels by adding 1024 - 1026,
761 // won't use as much as 1024 channels.
762 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
763 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
764 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
765 _outputFileRecording(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100766 _outputExternalMedia(false),
767 _inputExternalMediaCallbackPtr(NULL),
768 _outputExternalMediaCallbackPtr(NULL),
769 _timeStamp(0), // This is just an offset, RTP module will add it's own
770 // random offset
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100771 ntp_estimator_(Clock::GetRealTimeClock()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100772 playout_timestamp_rtp_(0),
773 playout_timestamp_rtcp_(0),
774 playout_delay_ms_(0),
775 _numberOfDiscardedPackets(0),
776 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100777 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
778 capture_start_rtp_time_stamp_(-1),
779 capture_start_ntp_time_ms_(-1),
780 _engineStatisticsPtr(NULL),
781 _outputMixerPtr(NULL),
782 _transmitMixerPtr(NULL),
783 _moduleProcessThreadPtr(NULL),
784 _audioDeviceModulePtr(NULL),
785 _voiceEngineObserverPtr(NULL),
786 _callbackCritSectPtr(NULL),
787 _transportPtr(NULL),
788 _rxVadObserverPtr(NULL),
789 _oldVadDecision(-1),
790 _sendFrameType(0),
791 _externalMixing(false),
792 _mixFileWithMicrophone(false),
solenberg1c2af8e2016-03-24 10:36:00 -0700793 input_mute_(false),
794 previous_frame_muted_(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100795 _panLeft(1.0f),
796 _panRight(1.0f),
797 _outputGain(1.0f),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100798 _lastLocalTimeStamp(0),
799 _lastPayloadType(0),
800 _includeAudioLevelIndication(false),
801 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100802 _average_jitter_buffer_delay_us(0),
803 _previousTimestamp(0),
804 _recPacketDelayMs(20),
805 _RxVadDetection(false),
806 _rxAgcIsEnabled(false),
807 _rxNsIsEnabled(false),
808 restored_packet_in_use_(false),
809 rtcp_observer_(new VoERtcpObserver(this)),
810 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100811 associate_send_channel_(ChannelOwner(nullptr)),
812 pacing_enabled_(config.Get<VoicePacing>().enabled),
stefanbba9dec2016-02-01 04:39:55 -0800813 feedback_observer_proxy_(new TransportFeedbackProxy()),
814 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
815 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()) {
kwiberg55b97fe2016-01-28 05:22:45 -0800816 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
817 "Channel::Channel() - ctor");
818 AudioCodingModule::Config acm_config;
819 acm_config.id = VoEModuleId(instanceId, channelId);
820 if (config.Get<NetEqCapacityConfig>().enabled) {
821 // Clamping the buffer capacity at 20 packets. While going lower will
822 // probably work, it makes little sense.
823 acm_config.neteq_config.max_packets_in_buffer =
824 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
825 }
826 acm_config.neteq_config.enable_fast_accelerate =
827 config.Get<NetEqFastAccelerate>().enabled;
henrik.lundina89ab962016-05-18 08:52:45 -0700828 acm_config.neteq_config.enable_muted_state = true;
ossue3525782016-05-25 07:37:43 -0700829 acm_config.decoder_factory = CreateBuiltinAudioDecoderFactory();
kwiberg55b97fe2016-01-28 05:22:45 -0800830 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200831
kwiberg55b97fe2016-01-28 05:22:45 -0800832 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000833
kwiberg55b97fe2016-01-28 05:22:45 -0800834 RtpRtcp::Configuration configuration;
835 configuration.audio = true;
836 configuration.outgoing_transport = this;
kwiberg55b97fe2016-01-28 05:22:45 -0800837 configuration.receive_statistics = rtp_receive_statistics_.get();
838 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800839 if (pacing_enabled_) {
840 configuration.paced_sender = rtp_packet_sender_proxy_.get();
841 configuration.transport_sequence_number_allocator =
842 seq_num_allocator_proxy_.get();
843 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
844 }
kwiberg55b97fe2016-01-28 05:22:45 -0800845 configuration.event_log = event_log;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000846
kwiberg55b97fe2016-01-28 05:22:45 -0800847 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100848 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000849
kwiberg55b97fe2016-01-28 05:22:45 -0800850 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
851 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
852 statistics_proxy_.get());
aluebs@webrtc.orgf927fd62014-04-16 11:58:18 +0000853
kwiberg55b97fe2016-01-28 05:22:45 -0800854 Config audioproc_config;
855 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
856 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
niklase@google.com470e71d2011-07-07 08:21:25 +0000857}
858
kwiberg55b97fe2016-01-28 05:22:45 -0800859Channel::~Channel() {
860 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
861 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
862 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000863
kwiberg55b97fe2016-01-28 05:22:45 -0800864 if (_outputExternalMedia) {
865 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
866 }
867 if (channel_state_.Get().input_external_media) {
868 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
869 }
870 StopSend();
871 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000872
kwiberg55b97fe2016-01-28 05:22:45 -0800873 {
874 rtc::CritScope cs(&_fileCritSect);
875 if (_inputFilePlayerPtr) {
876 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
877 _inputFilePlayerPtr->StopPlayingFile();
878 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
879 _inputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000880 }
kwiberg55b97fe2016-01-28 05:22:45 -0800881 if (_outputFilePlayerPtr) {
882 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
883 _outputFilePlayerPtr->StopPlayingFile();
884 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
885 _outputFilePlayerPtr = NULL;
886 }
887 if (_outputFileRecorderPtr) {
888 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
889 _outputFileRecorderPtr->StopRecording();
890 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
891 _outputFileRecorderPtr = NULL;
892 }
893 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000894
kwiberg55b97fe2016-01-28 05:22:45 -0800895 // The order to safely shutdown modules in a channel is:
896 // 1. De-register callbacks in modules
897 // 2. De-register modules in process thread
898 // 3. Destroy modules
899 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
900 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
901 "~Channel() failed to de-register transport callback"
902 " (Audio coding module)");
903 }
904 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
905 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
906 "~Channel() failed to de-register VAD callback"
907 " (Audio coding module)");
908 }
909 // De-register modules in process thread
910 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +0000911
kwiberg55b97fe2016-01-28 05:22:45 -0800912 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +0000913}
914
kwiberg55b97fe2016-01-28 05:22:45 -0800915int32_t Channel::Init() {
916 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
917 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +0000918
kwiberg55b97fe2016-01-28 05:22:45 -0800919 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +0000920
kwiberg55b97fe2016-01-28 05:22:45 -0800921 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +0000922
kwiberg55b97fe2016-01-28 05:22:45 -0800923 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
924 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
925 "Channel::Init() must call SetEngineInformation() first");
926 return -1;
927 }
928
929 // --- Add modules to process thread (for periodic schedulation)
930
931 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
932
933 // --- ACM initialization
934
935 if (audio_coding_->InitializeReceiver() == -1) {
936 _engineStatisticsPtr->SetLastError(
937 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
938 "Channel::Init() unable to initialize the ACM - 1");
939 return -1;
940 }
941
942 // --- RTP/RTCP module initialization
943
944 // Ensure that RTCP is enabled by default for the created channel.
945 // Note that, the module will keep generating RTCP until it is explicitly
946 // disabled by the user.
947 // After StopListen (when no sockets exists), RTCP packets will no longer
948 // be transmitted since the Transport object will then be invalid.
949 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
950 // RTCP is enabled by default.
951 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
952 // --- Register all permanent callbacks
953 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
954 (audio_coding_->RegisterVADCallback(this) == -1);
955
956 if (fail) {
957 _engineStatisticsPtr->SetLastError(
958 VE_CANNOT_INIT_CHANNEL, kTraceError,
959 "Channel::Init() callbacks not registered");
960 return -1;
961 }
962
963 // --- Register all supported codecs to the receiving side of the
964 // RTP/RTCP module
965
966 CodecInst codec;
967 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
968
969 for (int idx = 0; idx < nSupportedCodecs; idx++) {
970 // Open up the RTP/RTCP receiver for all supported codecs
971 if ((audio_coding_->Codec(idx, &codec) == -1) ||
972 (rtp_receiver_->RegisterReceivePayload(
973 codec.plname, codec.pltype, codec.plfreq, codec.channels,
974 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
975 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
976 "Channel::Init() unable to register %s "
977 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
978 codec.plname, codec.pltype, codec.plfreq, codec.channels,
979 codec.rate);
980 } else {
981 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
982 "Channel::Init() %s (%d/%d/%" PRIuS
983 "/%d) has been "
984 "added to the RTP/RTCP receiver",
985 codec.plname, codec.pltype, codec.plfreq, codec.channels,
986 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000987 }
988
kwiberg55b97fe2016-01-28 05:22:45 -0800989 // Ensure that PCMU is used as default codec on the sending side
990 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
991 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +0000992 }
993
kwiberg55b97fe2016-01-28 05:22:45 -0800994 // Register default PT for outband 'telephone-event'
995 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
kwibergc8d071e2016-04-06 12:22:38 -0700996 if (_rtpRtcpModule->RegisterSendPayload(codec) == -1 ||
997 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800998 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
999 "Channel::Init() failed to register outband "
1000 "'telephone-event' (%d/%d) correctly",
1001 codec.pltype, codec.plfreq);
1002 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001003 }
1004
kwiberg55b97fe2016-01-28 05:22:45 -08001005 if (!STR_CASE_CMP(codec.plname, "CN")) {
kwibergc8d071e2016-04-06 12:22:38 -07001006 if (!codec_manager_.RegisterEncoder(codec) ||
1007 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get()) ||
1008 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec) ||
1009 _rtpRtcpModule->RegisterSendPayload(codec) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08001010 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1011 "Channel::Init() failed to register CN (%d/%d) "
1012 "correctly - 1",
1013 codec.pltype, codec.plfreq);
1014 }
1015 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001016#ifdef WEBRTC_CODEC_RED
kwiberg55b97fe2016-01-28 05:22:45 -08001017 // Register RED to the receiving side of the ACM.
1018 // We will not receive an OnInitializeDecoder() callback for RED.
1019 if (!STR_CASE_CMP(codec.plname, "RED")) {
kwibergc8d071e2016-04-06 12:22:38 -07001020 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001021 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1022 "Channel::Init() failed to register RED (%d/%d) "
1023 "correctly",
1024 codec.pltype, codec.plfreq);
1025 }
1026 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001027#endif
kwiberg55b97fe2016-01-28 05:22:45 -08001028 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001029
kwiberg55b97fe2016-01-28 05:22:45 -08001030 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1031 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1032 return -1;
1033 }
1034 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1035 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1036 return -1;
1037 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001038
kwiberg55b97fe2016-01-28 05:22:45 -08001039 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001040}
1041
kwiberg55b97fe2016-01-28 05:22:45 -08001042int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1043 OutputMixer& outputMixer,
1044 voe::TransmitMixer& transmitMixer,
1045 ProcessThread& moduleProcessThread,
1046 AudioDeviceModule& audioDeviceModule,
1047 VoiceEngineObserver* voiceEngineObserver,
1048 rtc::CriticalSection* callbackCritSect) {
1049 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1050 "Channel::SetEngineInformation()");
1051 _engineStatisticsPtr = &engineStatistics;
1052 _outputMixerPtr = &outputMixer;
1053 _transmitMixerPtr = &transmitMixer,
1054 _moduleProcessThreadPtr = &moduleProcessThread;
1055 _audioDeviceModulePtr = &audioDeviceModule;
1056 _voiceEngineObserverPtr = voiceEngineObserver;
1057 _callbackCritSectPtr = callbackCritSect;
1058 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001059}
1060
kwiberg55b97fe2016-01-28 05:22:45 -08001061int32_t Channel::UpdateLocalTimeStamp() {
1062 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1063 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001064}
1065
kwibergb7f89d62016-02-17 10:04:18 -08001066void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001067 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001068 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001069}
1070
kwiberg55b97fe2016-01-28 05:22:45 -08001071int32_t Channel::StartPlayout() {
1072 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1073 "Channel::StartPlayout()");
1074 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001075 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001076 }
1077
1078 if (!_externalMixing) {
1079 // Add participant as candidates for mixing.
1080 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1081 _engineStatisticsPtr->SetLastError(
1082 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1083 "StartPlayout() failed to add participant to mixer");
1084 return -1;
1085 }
1086 }
1087
1088 channel_state_.SetPlaying(true);
1089 if (RegisterFilePlayingToMixer() != 0)
1090 return -1;
1091
1092 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001093}
1094
kwiberg55b97fe2016-01-28 05:22:45 -08001095int32_t Channel::StopPlayout() {
1096 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1097 "Channel::StopPlayout()");
1098 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001099 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001100 }
1101
1102 if (!_externalMixing) {
1103 // Remove participant as candidates for mixing
1104 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1105 _engineStatisticsPtr->SetLastError(
1106 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1107 "StopPlayout() failed to remove participant from mixer");
1108 return -1;
1109 }
1110 }
1111
1112 channel_state_.SetPlaying(false);
1113 _outputAudioLevel.Clear();
1114
1115 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001116}
1117
kwiberg55b97fe2016-01-28 05:22:45 -08001118int32_t Channel::StartSend() {
1119 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1120 "Channel::StartSend()");
1121 // Resume the previous sequence number which was reset by StopSend().
1122 // This needs to be done before |sending| is set to true.
1123 if (send_sequence_number_)
1124 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001125
kwiberg55b97fe2016-01-28 05:22:45 -08001126 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001127 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001128 }
1129 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001130
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001131 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001132 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1133 _engineStatisticsPtr->SetLastError(
1134 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1135 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001136 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001137 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001138 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001139 return -1;
1140 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001141
kwiberg55b97fe2016-01-28 05:22:45 -08001142 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001143}
1144
kwiberg55b97fe2016-01-28 05:22:45 -08001145int32_t Channel::StopSend() {
1146 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1147 "Channel::StopSend()");
1148 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001149 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001150 }
1151 channel_state_.SetSending(false);
1152
1153 // Store the sequence number to be able to pick up the same sequence for
1154 // the next StartSend(). This is needed for restarting device, otherwise
1155 // it might cause libSRTP to complain about packets being replayed.
1156 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1157 // CL is landed. See issue
1158 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1159 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1160
1161 // Reset sending SSRC and sequence number and triggers direct transmission
1162 // of RTCP BYE
1163 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1164 _engineStatisticsPtr->SetLastError(
1165 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1166 "StartSend() RTP/RTCP failed to stop sending");
1167 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001168 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001169
1170 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001171}
1172
kwiberg55b97fe2016-01-28 05:22:45 -08001173int32_t Channel::StartReceiving() {
1174 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1175 "Channel::StartReceiving()");
1176 if (channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001177 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001178 }
1179 channel_state_.SetReceiving(true);
1180 _numberOfDiscardedPackets = 0;
1181 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001182}
1183
kwiberg55b97fe2016-01-28 05:22:45 -08001184int32_t Channel::StopReceiving() {
1185 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1186 "Channel::StopReceiving()");
1187 if (!channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001188 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001189 }
1190
1191 channel_state_.SetReceiving(false);
1192 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001193}
1194
kwiberg55b97fe2016-01-28 05:22:45 -08001195int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1196 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1197 "Channel::RegisterVoiceEngineObserver()");
1198 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001199
kwiberg55b97fe2016-01-28 05:22:45 -08001200 if (_voiceEngineObserverPtr) {
1201 _engineStatisticsPtr->SetLastError(
1202 VE_INVALID_OPERATION, kTraceError,
1203 "RegisterVoiceEngineObserver() observer already enabled");
1204 return -1;
1205 }
1206 _voiceEngineObserverPtr = &observer;
1207 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001208}
1209
kwiberg55b97fe2016-01-28 05:22:45 -08001210int32_t Channel::DeRegisterVoiceEngineObserver() {
1211 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1212 "Channel::DeRegisterVoiceEngineObserver()");
1213 rtc::CritScope cs(&_callbackCritSect);
1214
1215 if (!_voiceEngineObserverPtr) {
1216 _engineStatisticsPtr->SetLastError(
1217 VE_INVALID_OPERATION, kTraceWarning,
1218 "DeRegisterVoiceEngineObserver() observer already disabled");
1219 return 0;
1220 }
1221 _voiceEngineObserverPtr = NULL;
1222 return 0;
1223}
1224
1225int32_t Channel::GetSendCodec(CodecInst& codec) {
kwibergc8d071e2016-04-06 12:22:38 -07001226 auto send_codec = codec_manager_.GetCodecInst();
kwiberg1fd4a4a2015-11-03 11:20:50 -08001227 if (send_codec) {
1228 codec = *send_codec;
1229 return 0;
1230 }
1231 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001232}
1233
kwiberg55b97fe2016-01-28 05:22:45 -08001234int32_t Channel::GetRecCodec(CodecInst& codec) {
1235 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001236}
1237
kwiberg55b97fe2016-01-28 05:22:45 -08001238int32_t Channel::SetSendCodec(const CodecInst& codec) {
1239 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1240 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001241
kwibergc8d071e2016-04-06 12:22:38 -07001242 if (!codec_manager_.RegisterEncoder(codec) ||
1243 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001244 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1245 "SetSendCodec() failed to register codec to ACM");
1246 return -1;
1247 }
1248
1249 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1250 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1251 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1252 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1253 "SetSendCodec() failed to register codec to"
1254 " RTP/RTCP module");
1255 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001256 }
kwiberg55b97fe2016-01-28 05:22:45 -08001257 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001258
kwiberg55b97fe2016-01-28 05:22:45 -08001259 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1260 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1261 "SetSendCodec() failed to set audio packet size");
1262 return -1;
1263 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001264
kwiberg55b97fe2016-01-28 05:22:45 -08001265 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001266}
1267
Ivo Creusenadf89b72015-04-29 16:03:33 +02001268void Channel::SetBitRate(int bitrate_bps) {
1269 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1270 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1271 audio_coding_->SetBitRate(bitrate_bps);
1272}
1273
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001274void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue@webrtc.org74aaf292014-07-16 21:28:26 +00001275 network_predictor_->UpdatePacketLossRate(fraction_lost);
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001276 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1277
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001278 // Normalizes rate to 0 - 100.
kwiberg55b97fe2016-01-28 05:22:45 -08001279 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1280 0) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001281 assert(false); // This should not happen.
1282 }
1283}
1284
kwiberg55b97fe2016-01-28 05:22:45 -08001285int32_t Channel::SetVADStatus(bool enableVAD,
1286 ACMVADMode mode,
1287 bool disableDTX) {
1288 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1289 "Channel::SetVADStatus(mode=%d)", mode);
kwibergc8d071e2016-04-06 12:22:38 -07001290 RTC_DCHECK(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1291 if (!codec_manager_.SetVAD(enableVAD, mode) ||
1292 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001293 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1294 kTraceError,
1295 "SetVADStatus() failed to set VAD");
1296 return -1;
1297 }
1298 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001299}
1300
kwiberg55b97fe2016-01-28 05:22:45 -08001301int32_t Channel::GetVADStatus(bool& enabledVAD,
1302 ACMVADMode& mode,
1303 bool& disabledDTX) {
kwibergc8d071e2016-04-06 12:22:38 -07001304 const auto* params = codec_manager_.GetStackParams();
1305 enabledVAD = params->use_cng;
1306 mode = params->vad_mode;
1307 disabledDTX = !params->use_cng;
kwiberg55b97fe2016-01-28 05:22:45 -08001308 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001309}
1310
kwiberg55b97fe2016-01-28 05:22:45 -08001311int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1312 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1313 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001314
kwiberg55b97fe2016-01-28 05:22:45 -08001315 if (channel_state_.Get().playing) {
1316 _engineStatisticsPtr->SetLastError(
1317 VE_ALREADY_PLAYING, kTraceError,
1318 "SetRecPayloadType() unable to set PT while playing");
1319 return -1;
1320 }
1321 if (channel_state_.Get().receiving) {
1322 _engineStatisticsPtr->SetLastError(
1323 VE_ALREADY_LISTENING, kTraceError,
1324 "SetRecPayloadType() unable to set PT while listening");
1325 return -1;
1326 }
1327
1328 if (codec.pltype == -1) {
1329 // De-register the selected codec (RTP/RTCP module and ACM)
1330
1331 int8_t pltype(-1);
1332 CodecInst rxCodec = codec;
1333
1334 // Get payload type for the given codec
1335 rtp_payload_registry_->ReceivePayloadType(
1336 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1337 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1338 rxCodec.pltype = pltype;
1339
1340 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1341 _engineStatisticsPtr->SetLastError(
1342 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1343 "SetRecPayloadType() RTP/RTCP-module deregistration "
1344 "failed");
1345 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001346 }
kwiberg55b97fe2016-01-28 05:22:45 -08001347 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1348 _engineStatisticsPtr->SetLastError(
1349 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1350 "SetRecPayloadType() ACM deregistration failed - 1");
1351 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001352 }
kwiberg55b97fe2016-01-28 05:22:45 -08001353 return 0;
1354 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001355
kwiberg55b97fe2016-01-28 05:22:45 -08001356 if (rtp_receiver_->RegisterReceivePayload(
1357 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1358 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1359 // First attempt to register failed => de-register and try again
kwibergc8d071e2016-04-06 12:22:38 -07001360 // TODO(kwiberg): Retrying is probably not necessary, since
1361 // AcmReceiver::AddCodec also retries.
kwiberg55b97fe2016-01-28 05:22:45 -08001362 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001363 if (rtp_receiver_->RegisterReceivePayload(
kwiberg55b97fe2016-01-28 05:22:45 -08001364 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1365 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1366 _engineStatisticsPtr->SetLastError(
1367 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1368 "SetRecPayloadType() RTP/RTCP-module registration failed");
1369 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001370 }
kwiberg55b97fe2016-01-28 05:22:45 -08001371 }
kwibergc8d071e2016-04-06 12:22:38 -07001372 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001373 audio_coding_->UnregisterReceiveCodec(codec.pltype);
kwibergc8d071e2016-04-06 12:22:38 -07001374 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001375 _engineStatisticsPtr->SetLastError(
1376 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1377 "SetRecPayloadType() ACM registration failed - 1");
1378 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001379 }
kwiberg55b97fe2016-01-28 05:22:45 -08001380 }
1381 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001382}
1383
kwiberg55b97fe2016-01-28 05:22:45 -08001384int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1385 int8_t payloadType(-1);
1386 if (rtp_payload_registry_->ReceivePayloadType(
1387 codec.plname, codec.plfreq, codec.channels,
1388 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1389 _engineStatisticsPtr->SetLastError(
1390 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1391 "GetRecPayloadType() failed to retrieve RX payload type");
1392 return -1;
1393 }
1394 codec.pltype = payloadType;
1395 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001396}
1397
kwiberg55b97fe2016-01-28 05:22:45 -08001398int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1399 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1400 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001401
kwiberg55b97fe2016-01-28 05:22:45 -08001402 CodecInst codec;
1403 int32_t samplingFreqHz(-1);
1404 const size_t kMono = 1;
1405 if (frequency == kFreq32000Hz)
1406 samplingFreqHz = 32000;
1407 else if (frequency == kFreq16000Hz)
1408 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001409
kwiberg55b97fe2016-01-28 05:22:45 -08001410 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1411 _engineStatisticsPtr->SetLastError(
1412 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1413 "SetSendCNPayloadType() failed to retrieve default CN codec "
1414 "settings");
1415 return -1;
1416 }
1417
1418 // Modify the payload type (must be set to dynamic range)
1419 codec.pltype = type;
1420
kwibergc8d071e2016-04-06 12:22:38 -07001421 if (!codec_manager_.RegisterEncoder(codec) ||
1422 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001423 _engineStatisticsPtr->SetLastError(
1424 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1425 "SetSendCNPayloadType() failed to register CN to ACM");
1426 return -1;
1427 }
1428
1429 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1430 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1431 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1432 _engineStatisticsPtr->SetLastError(
1433 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1434 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1435 "module");
1436 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001437 }
kwiberg55b97fe2016-01-28 05:22:45 -08001438 }
1439 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001440}
1441
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001442int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001443 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001444 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001445
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001446 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001447 _engineStatisticsPtr->SetLastError(
1448 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001449 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001450 return -1;
1451 }
1452 return 0;
1453}
1454
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001455int Channel::SetOpusDtx(bool enable_dtx) {
1456 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1457 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001458 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001459 : audio_coding_->DisableOpusDtx();
1460 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001461 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1462 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001463 return -1;
1464 }
1465 return 0;
1466}
1467
mflodman3d7db262016-04-29 00:57:13 -07001468int32_t Channel::RegisterExternalTransport(Transport* transport) {
kwiberg55b97fe2016-01-28 05:22:45 -08001469 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001470 "Channel::RegisterExternalTransport()");
1471
kwiberg55b97fe2016-01-28 05:22:45 -08001472 rtc::CritScope cs(&_callbackCritSect);
kwiberg55b97fe2016-01-28 05:22:45 -08001473 if (_externalTransport) {
1474 _engineStatisticsPtr->SetLastError(
1475 VE_INVALID_OPERATION, kTraceError,
1476 "RegisterExternalTransport() external transport already enabled");
1477 return -1;
1478 }
1479 _externalTransport = true;
mflodman3d7db262016-04-29 00:57:13 -07001480 _transportPtr = transport;
kwiberg55b97fe2016-01-28 05:22:45 -08001481 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001482}
1483
kwiberg55b97fe2016-01-28 05:22:45 -08001484int32_t Channel::DeRegisterExternalTransport() {
1485 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1486 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001487
kwiberg55b97fe2016-01-28 05:22:45 -08001488 rtc::CritScope cs(&_callbackCritSect);
mflodman3d7db262016-04-29 00:57:13 -07001489 if (_transportPtr) {
1490 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1491 "DeRegisterExternalTransport() all transport is disabled");
1492 } else {
kwiberg55b97fe2016-01-28 05:22:45 -08001493 _engineStatisticsPtr->SetLastError(
1494 VE_INVALID_OPERATION, kTraceWarning,
1495 "DeRegisterExternalTransport() external transport already "
1496 "disabled");
kwiberg55b97fe2016-01-28 05:22:45 -08001497 }
1498 _externalTransport = false;
1499 _transportPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001500 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001501}
1502
mflodman3d7db262016-04-29 00:57:13 -07001503int32_t Channel::ReceivedRTPPacket(const uint8_t* received_packet,
kwiberg55b97fe2016-01-28 05:22:45 -08001504 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001505 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001506 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001507 "Channel::ReceivedRTPPacket()");
1508
1509 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001510 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001511
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001512 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001513 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1514 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1515 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001516 return -1;
1517 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001518 header.payload_type_frequency =
1519 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001520 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001521 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001522 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001523 rtp_receive_statistics_->IncomingPacket(
1524 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001525 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001526
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001527 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001528}
1529
1530bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001531 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001532 const RTPHeader& header,
1533 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001534 if (rtp_payload_registry_->IsRtx(header)) {
1535 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001536 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001537 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001538 assert(packet_length >= header.headerLength);
1539 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001540 PayloadUnion payload_specific;
1541 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001542 &payload_specific)) {
1543 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001544 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001545 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1546 payload_specific, in_order);
1547}
1548
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001549bool Channel::HandleRtxPacket(const uint8_t* packet,
1550 size_t packet_length,
1551 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001552 if (!rtp_payload_registry_->IsRtx(header))
1553 return false;
1554
1555 // Remove the RTX header and parse the original RTP header.
1556 if (packet_length < header.headerLength)
1557 return false;
1558 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1559 return false;
1560 if (restored_packet_in_use_) {
1561 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1562 "Multiple RTX headers detected, dropping packet");
1563 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001564 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001565 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001566 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1567 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001568 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1569 "Incoming RTX packet: invalid RTP header");
1570 return false;
1571 }
1572 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001573 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001574 restored_packet_in_use_ = false;
1575 return ret;
1576}
1577
1578bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1579 StreamStatistician* statistician =
1580 rtp_receive_statistics_->GetStatistician(header.ssrc);
1581 if (!statistician)
1582 return false;
1583 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001584}
1585
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001586bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1587 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001588 // Retransmissions are handled separately if RTX is enabled.
1589 if (rtp_payload_registry_->RtxEnabled())
1590 return false;
1591 StreamStatistician* statistician =
1592 rtp_receive_statistics_->GetStatistician(header.ssrc);
1593 if (!statistician)
1594 return false;
1595 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001596 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001597 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001598 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001599}
1600
mflodman3d7db262016-04-29 00:57:13 -07001601int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001602 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001603 "Channel::ReceivedRTCPPacket()");
1604 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001605 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001606
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001607 // Deliver RTCP packet to RTP/RTCP module for parsing
mflodman3d7db262016-04-29 00:57:13 -07001608 if (_rtpRtcpModule->IncomingRtcpPacket(data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001609 _engineStatisticsPtr->SetLastError(
1610 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1611 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1612 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001613
Minyue2013aec2015-05-13 14:14:42 +02001614 int64_t rtt = GetRTT(true);
1615 if (rtt == 0) {
1616 // Waiting for valid RTT.
1617 return 0;
1618 }
1619 uint32_t ntp_secs = 0;
1620 uint32_t ntp_frac = 0;
1621 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001622 if (0 !=
1623 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1624 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001625 // Waiting for RTCP.
1626 return 0;
1627 }
1628
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001629 {
tommi31fc21f2016-01-21 10:37:37 -08001630 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001631 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001632 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001633 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001634}
1635
niklase@google.com470e71d2011-07-07 08:21:25 +00001636int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001637 bool loop,
1638 FileFormats format,
1639 int startPosition,
1640 float volumeScaling,
1641 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001642 const CodecInst* codecInst) {
1643 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1644 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1645 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1646 "stopPosition=%d)",
1647 fileName, loop, format, volumeScaling, startPosition,
1648 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001649
kwiberg55b97fe2016-01-28 05:22:45 -08001650 if (channel_state_.Get().output_file_playing) {
1651 _engineStatisticsPtr->SetLastError(
1652 VE_ALREADY_PLAYING, kTraceError,
1653 "StartPlayingFileLocally() is already playing");
1654 return -1;
1655 }
1656
1657 {
1658 rtc::CritScope cs(&_fileCritSect);
1659
1660 if (_outputFilePlayerPtr) {
1661 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1662 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1663 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001664 }
1665
kwiberg55b97fe2016-01-28 05:22:45 -08001666 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1667 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001668
kwiberg55b97fe2016-01-28 05:22:45 -08001669 if (_outputFilePlayerPtr == NULL) {
1670 _engineStatisticsPtr->SetLastError(
1671 VE_INVALID_ARGUMENT, kTraceError,
1672 "StartPlayingFileLocally() filePlayer format is not correct");
1673 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001674 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001675
kwiberg55b97fe2016-01-28 05:22:45 -08001676 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001677
kwiberg55b97fe2016-01-28 05:22:45 -08001678 if (_outputFilePlayerPtr->StartPlayingFile(
1679 fileName, loop, startPosition, volumeScaling, notificationTime,
1680 stopPosition, (const CodecInst*)codecInst) != 0) {
1681 _engineStatisticsPtr->SetLastError(
1682 VE_BAD_FILE, kTraceError,
1683 "StartPlayingFile() failed to start file playout");
1684 _outputFilePlayerPtr->StopPlayingFile();
1685 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1686 _outputFilePlayerPtr = NULL;
1687 return -1;
1688 }
1689 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1690 channel_state_.SetOutputFilePlaying(true);
1691 }
1692
1693 if (RegisterFilePlayingToMixer() != 0)
1694 return -1;
1695
1696 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001697}
1698
1699int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001700 FileFormats format,
1701 int startPosition,
1702 float volumeScaling,
1703 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001704 const CodecInst* codecInst) {
1705 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1706 "Channel::StartPlayingFileLocally(format=%d,"
1707 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1708 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001709
kwiberg55b97fe2016-01-28 05:22:45 -08001710 if (stream == NULL) {
1711 _engineStatisticsPtr->SetLastError(
1712 VE_BAD_FILE, kTraceError,
1713 "StartPlayingFileLocally() NULL as input stream");
1714 return -1;
1715 }
1716
1717 if (channel_state_.Get().output_file_playing) {
1718 _engineStatisticsPtr->SetLastError(
1719 VE_ALREADY_PLAYING, kTraceError,
1720 "StartPlayingFileLocally() is already playing");
1721 return -1;
1722 }
1723
1724 {
1725 rtc::CritScope cs(&_fileCritSect);
1726
1727 // Destroy the old instance
1728 if (_outputFilePlayerPtr) {
1729 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1730 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1731 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001732 }
1733
kwiberg55b97fe2016-01-28 05:22:45 -08001734 // Create the instance
1735 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1736 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001737
kwiberg55b97fe2016-01-28 05:22:45 -08001738 if (_outputFilePlayerPtr == NULL) {
1739 _engineStatisticsPtr->SetLastError(
1740 VE_INVALID_ARGUMENT, kTraceError,
1741 "StartPlayingFileLocally() filePlayer format isnot correct");
1742 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001743 }
1744
kwiberg55b97fe2016-01-28 05:22:45 -08001745 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001746
kwiberg55b97fe2016-01-28 05:22:45 -08001747 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1748 volumeScaling, notificationTime,
1749 stopPosition, codecInst) != 0) {
1750 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1751 "StartPlayingFile() failed to "
1752 "start file playout");
1753 _outputFilePlayerPtr->StopPlayingFile();
1754 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1755 _outputFilePlayerPtr = NULL;
1756 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001757 }
kwiberg55b97fe2016-01-28 05:22:45 -08001758 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1759 channel_state_.SetOutputFilePlaying(true);
1760 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001761
kwiberg55b97fe2016-01-28 05:22:45 -08001762 if (RegisterFilePlayingToMixer() != 0)
1763 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001764
kwiberg55b97fe2016-01-28 05:22:45 -08001765 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001766}
1767
kwiberg55b97fe2016-01-28 05:22:45 -08001768int Channel::StopPlayingFileLocally() {
1769 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1770 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001771
kwiberg55b97fe2016-01-28 05:22:45 -08001772 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001773 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001774 }
1775
1776 {
1777 rtc::CritScope cs(&_fileCritSect);
1778
1779 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1780 _engineStatisticsPtr->SetLastError(
1781 VE_STOP_RECORDING_FAILED, kTraceError,
1782 "StopPlayingFile() could not stop playing");
1783 return -1;
1784 }
1785 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1786 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1787 _outputFilePlayerPtr = NULL;
1788 channel_state_.SetOutputFilePlaying(false);
1789 }
1790 // _fileCritSect cannot be taken while calling
1791 // SetAnonymousMixibilityStatus. Refer to comments in
1792 // StartPlayingFileLocally(const char* ...) for more details.
1793 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1794 _engineStatisticsPtr->SetLastError(
1795 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1796 "StopPlayingFile() failed to stop participant from playing as"
1797 "file in the mixer");
1798 return -1;
1799 }
1800
1801 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001802}
1803
kwiberg55b97fe2016-01-28 05:22:45 -08001804int Channel::IsPlayingFileLocally() const {
1805 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001806}
1807
kwiberg55b97fe2016-01-28 05:22:45 -08001808int Channel::RegisterFilePlayingToMixer() {
1809 // Return success for not registering for file playing to mixer if:
1810 // 1. playing file before playout is started on that channel.
1811 // 2. starting playout without file playing on that channel.
1812 if (!channel_state_.Get().playing ||
1813 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001814 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001815 }
1816
1817 // |_fileCritSect| cannot be taken while calling
1818 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1819 // frames can be pulled by the mixer. Since the frames are generated from
1820 // the file, _fileCritSect will be taken. This would result in a deadlock.
1821 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1822 channel_state_.SetOutputFilePlaying(false);
1823 rtc::CritScope cs(&_fileCritSect);
1824 _engineStatisticsPtr->SetLastError(
1825 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1826 "StartPlayingFile() failed to add participant as file to mixer");
1827 _outputFilePlayerPtr->StopPlayingFile();
1828 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1829 _outputFilePlayerPtr = NULL;
1830 return -1;
1831 }
1832
1833 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001834}
1835
niklase@google.com470e71d2011-07-07 08:21:25 +00001836int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001837 bool loop,
1838 FileFormats format,
1839 int startPosition,
1840 float volumeScaling,
1841 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001842 const CodecInst* codecInst) {
1843 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1844 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1845 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1846 "stopPosition=%d)",
1847 fileName, loop, format, volumeScaling, startPosition,
1848 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001849
kwiberg55b97fe2016-01-28 05:22:45 -08001850 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001851
kwiberg55b97fe2016-01-28 05:22:45 -08001852 if (channel_state_.Get().input_file_playing) {
1853 _engineStatisticsPtr->SetLastError(
1854 VE_ALREADY_PLAYING, kTraceWarning,
1855 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001856 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001857 }
1858
1859 // Destroy the old instance
1860 if (_inputFilePlayerPtr) {
1861 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1862 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1863 _inputFilePlayerPtr = NULL;
1864 }
1865
1866 // Create the instance
1867 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1868 (const FileFormats)format);
1869
1870 if (_inputFilePlayerPtr == NULL) {
1871 _engineStatisticsPtr->SetLastError(
1872 VE_INVALID_ARGUMENT, kTraceError,
1873 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1874 return -1;
1875 }
1876
1877 const uint32_t notificationTime(0);
1878
1879 if (_inputFilePlayerPtr->StartPlayingFile(
1880 fileName, loop, startPosition, volumeScaling, notificationTime,
1881 stopPosition, (const CodecInst*)codecInst) != 0) {
1882 _engineStatisticsPtr->SetLastError(
1883 VE_BAD_FILE, kTraceError,
1884 "StartPlayingFile() failed to start file playout");
1885 _inputFilePlayerPtr->StopPlayingFile();
1886 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1887 _inputFilePlayerPtr = NULL;
1888 return -1;
1889 }
1890 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1891 channel_state_.SetInputFilePlaying(true);
1892
1893 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001894}
1895
1896int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001897 FileFormats format,
1898 int startPosition,
1899 float volumeScaling,
1900 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001901 const CodecInst* codecInst) {
1902 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1903 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1904 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1905 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001906
kwiberg55b97fe2016-01-28 05:22:45 -08001907 if (stream == NULL) {
1908 _engineStatisticsPtr->SetLastError(
1909 VE_BAD_FILE, kTraceError,
1910 "StartPlayingFileAsMicrophone NULL as input stream");
1911 return -1;
1912 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001913
kwiberg55b97fe2016-01-28 05:22:45 -08001914 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001915
kwiberg55b97fe2016-01-28 05:22:45 -08001916 if (channel_state_.Get().input_file_playing) {
1917 _engineStatisticsPtr->SetLastError(
1918 VE_ALREADY_PLAYING, kTraceWarning,
1919 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001920 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001921 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001922
kwiberg55b97fe2016-01-28 05:22:45 -08001923 // Destroy the old instance
1924 if (_inputFilePlayerPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001925 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1926 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1927 _inputFilePlayerPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001928 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001929
kwiberg55b97fe2016-01-28 05:22:45 -08001930 // Create the instance
1931 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1932 (const FileFormats)format);
1933
1934 if (_inputFilePlayerPtr == NULL) {
1935 _engineStatisticsPtr->SetLastError(
1936 VE_INVALID_ARGUMENT, kTraceError,
1937 "StartPlayingInputFile() filePlayer format isnot correct");
1938 return -1;
1939 }
1940
1941 const uint32_t notificationTime(0);
1942
1943 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1944 volumeScaling, notificationTime,
1945 stopPosition, codecInst) != 0) {
1946 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1947 "StartPlayingFile() failed to start "
1948 "file playout");
1949 _inputFilePlayerPtr->StopPlayingFile();
1950 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1951 _inputFilePlayerPtr = NULL;
1952 return -1;
1953 }
1954
1955 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1956 channel_state_.SetInputFilePlaying(true);
1957
1958 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001959}
1960
kwiberg55b97fe2016-01-28 05:22:45 -08001961int Channel::StopPlayingFileAsMicrophone() {
1962 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1963 "Channel::StopPlayingFileAsMicrophone()");
1964
1965 rtc::CritScope cs(&_fileCritSect);
1966
1967 if (!channel_state_.Get().input_file_playing) {
1968 return 0;
1969 }
1970
1971 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
1972 _engineStatisticsPtr->SetLastError(
1973 VE_STOP_RECORDING_FAILED, kTraceError,
1974 "StopPlayingFile() could not stop playing");
1975 return -1;
1976 }
1977 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1978 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1979 _inputFilePlayerPtr = NULL;
1980 channel_state_.SetInputFilePlaying(false);
1981
1982 return 0;
1983}
1984
1985int Channel::IsPlayingFileAsMicrophone() const {
1986 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001987}
1988
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00001989int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08001990 const CodecInst* codecInst) {
1991 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1992 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00001993
kwiberg55b97fe2016-01-28 05:22:45 -08001994 if (_outputFileRecording) {
1995 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
1996 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00001997 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001998 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001999
kwiberg55b97fe2016-01-28 05:22:45 -08002000 FileFormats format;
2001 const uint32_t notificationTime(0); // Not supported in VoE
2002 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00002003
kwiberg55b97fe2016-01-28 05:22:45 -08002004 if ((codecInst != NULL) &&
2005 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2006 _engineStatisticsPtr->SetLastError(
2007 VE_BAD_ARGUMENT, kTraceError,
2008 "StartRecordingPlayout() invalid compression");
2009 return (-1);
2010 }
2011 if (codecInst == NULL) {
2012 format = kFileFormatPcm16kHzFile;
2013 codecInst = &dummyCodec;
2014 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2015 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2016 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2017 format = kFileFormatWavFile;
2018 } else {
2019 format = kFileFormatCompressedFile;
2020 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002021
kwiberg55b97fe2016-01-28 05:22:45 -08002022 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002023
kwiberg55b97fe2016-01-28 05:22:45 -08002024 // Destroy the old instance
2025 if (_outputFileRecorderPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002026 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2027 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2028 _outputFileRecorderPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002029 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002030
kwiberg55b97fe2016-01-28 05:22:45 -08002031 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2032 _outputFileRecorderId, (const FileFormats)format);
2033 if (_outputFileRecorderPtr == NULL) {
2034 _engineStatisticsPtr->SetLastError(
2035 VE_INVALID_ARGUMENT, kTraceError,
2036 "StartRecordingPlayout() fileRecorder format isnot correct");
2037 return -1;
2038 }
2039
2040 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2041 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2042 _engineStatisticsPtr->SetLastError(
2043 VE_BAD_FILE, kTraceError,
2044 "StartRecordingAudioFile() failed to start file recording");
2045 _outputFileRecorderPtr->StopRecording();
2046 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2047 _outputFileRecorderPtr = NULL;
2048 return -1;
2049 }
2050 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2051 _outputFileRecording = true;
2052
2053 return 0;
2054}
2055
2056int Channel::StartRecordingPlayout(OutStream* stream,
2057 const CodecInst* codecInst) {
2058 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2059 "Channel::StartRecordingPlayout()");
2060
2061 if (_outputFileRecording) {
2062 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2063 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002064 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002065 }
2066
2067 FileFormats format;
2068 const uint32_t notificationTime(0); // Not supported in VoE
2069 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2070
2071 if (codecInst != NULL && codecInst->channels != 1) {
2072 _engineStatisticsPtr->SetLastError(
2073 VE_BAD_ARGUMENT, kTraceError,
2074 "StartRecordingPlayout() invalid compression");
2075 return (-1);
2076 }
2077 if (codecInst == NULL) {
2078 format = kFileFormatPcm16kHzFile;
2079 codecInst = &dummyCodec;
2080 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2081 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2082 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2083 format = kFileFormatWavFile;
2084 } else {
2085 format = kFileFormatCompressedFile;
2086 }
2087
2088 rtc::CritScope cs(&_fileCritSect);
2089
2090 // Destroy the old instance
2091 if (_outputFileRecorderPtr) {
2092 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2093 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2094 _outputFileRecorderPtr = NULL;
2095 }
2096
2097 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2098 _outputFileRecorderId, (const FileFormats)format);
2099 if (_outputFileRecorderPtr == NULL) {
2100 _engineStatisticsPtr->SetLastError(
2101 VE_INVALID_ARGUMENT, kTraceError,
2102 "StartRecordingPlayout() fileRecorder format isnot correct");
2103 return -1;
2104 }
2105
2106 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2107 notificationTime) != 0) {
2108 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2109 "StartRecordingPlayout() failed to "
2110 "start file recording");
2111 _outputFileRecorderPtr->StopRecording();
2112 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2113 _outputFileRecorderPtr = NULL;
2114 return -1;
2115 }
2116
2117 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2118 _outputFileRecording = true;
2119
2120 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002121}
2122
kwiberg55b97fe2016-01-28 05:22:45 -08002123int Channel::StopRecordingPlayout() {
2124 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2125 "Channel::StopRecordingPlayout()");
2126
2127 if (!_outputFileRecording) {
2128 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2129 "StopRecordingPlayout() isnot recording");
2130 return -1;
2131 }
2132
2133 rtc::CritScope cs(&_fileCritSect);
2134
2135 if (_outputFileRecorderPtr->StopRecording() != 0) {
2136 _engineStatisticsPtr->SetLastError(
2137 VE_STOP_RECORDING_FAILED, kTraceError,
2138 "StopRecording() could not stop recording");
2139 return (-1);
2140 }
2141 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2142 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2143 _outputFileRecorderPtr = NULL;
2144 _outputFileRecording = false;
2145
2146 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002147}
2148
kwiberg55b97fe2016-01-28 05:22:45 -08002149void Channel::SetMixWithMicStatus(bool mix) {
2150 rtc::CritScope cs(&_fileCritSect);
2151 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002152}
2153
kwiberg55b97fe2016-01-28 05:22:45 -08002154int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2155 int8_t currentLevel = _outputAudioLevel.Level();
2156 level = static_cast<int32_t>(currentLevel);
2157 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002158}
2159
kwiberg55b97fe2016-01-28 05:22:45 -08002160int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2161 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2162 level = static_cast<int32_t>(currentLevel);
2163 return 0;
2164}
2165
solenberg1c2af8e2016-03-24 10:36:00 -07002166int Channel::SetInputMute(bool enable) {
kwiberg55b97fe2016-01-28 05:22:45 -08002167 rtc::CritScope cs(&volume_settings_critsect_);
2168 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002169 "Channel::SetMute(enable=%d)", enable);
solenberg1c2af8e2016-03-24 10:36:00 -07002170 input_mute_ = enable;
kwiberg55b97fe2016-01-28 05:22:45 -08002171 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002172}
2173
solenberg1c2af8e2016-03-24 10:36:00 -07002174bool Channel::InputMute() const {
kwiberg55b97fe2016-01-28 05:22:45 -08002175 rtc::CritScope cs(&volume_settings_critsect_);
solenberg1c2af8e2016-03-24 10:36:00 -07002176 return input_mute_;
niklase@google.com470e71d2011-07-07 08:21:25 +00002177}
2178
kwiberg55b97fe2016-01-28 05:22:45 -08002179int Channel::SetOutputVolumePan(float left, float right) {
2180 rtc::CritScope cs(&volume_settings_critsect_);
2181 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002182 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002183 _panLeft = left;
2184 _panRight = right;
2185 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002186}
2187
kwiberg55b97fe2016-01-28 05:22:45 -08002188int Channel::GetOutputVolumePan(float& left, float& right) const {
2189 rtc::CritScope cs(&volume_settings_critsect_);
2190 left = _panLeft;
2191 right = _panRight;
2192 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002193}
2194
kwiberg55b97fe2016-01-28 05:22:45 -08002195int Channel::SetChannelOutputVolumeScaling(float scaling) {
2196 rtc::CritScope cs(&volume_settings_critsect_);
2197 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002198 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002199 _outputGain = scaling;
2200 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002201}
2202
kwiberg55b97fe2016-01-28 05:22:45 -08002203int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2204 rtc::CritScope cs(&volume_settings_critsect_);
2205 scaling = _outputGain;
2206 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002207}
2208
solenberg8842c3e2016-03-11 03:06:41 -08002209int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002210 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002211 "Channel::SendTelephoneEventOutband(...)");
2212 RTC_DCHECK_LE(0, event);
2213 RTC_DCHECK_GE(255, event);
2214 RTC_DCHECK_LE(0, duration_ms);
2215 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002216 if (!Sending()) {
2217 return -1;
2218 }
solenberg8842c3e2016-03-11 03:06:41 -08002219 if (_rtpRtcpModule->SendTelephoneEventOutband(
2220 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002221 _engineStatisticsPtr->SetLastError(
2222 VE_SEND_DTMF_FAILED, kTraceWarning,
2223 "SendTelephoneEventOutband() failed to send event");
2224 return -1;
2225 }
2226 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002227}
2228
solenberg31642aa2016-03-14 08:00:37 -07002229int Channel::SetSendTelephoneEventPayloadType(int payload_type) {
kwiberg55b97fe2016-01-28 05:22:45 -08002230 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002231 "Channel::SetSendTelephoneEventPayloadType()");
solenberg31642aa2016-03-14 08:00:37 -07002232 RTC_DCHECK_LE(0, payload_type);
2233 RTC_DCHECK_GE(127, payload_type);
2234 CodecInst codec = {0};
kwiberg55b97fe2016-01-28 05:22:45 -08002235 codec.plfreq = 8000;
solenberg31642aa2016-03-14 08:00:37 -07002236 codec.pltype = payload_type;
kwiberg55b97fe2016-01-28 05:22:45 -08002237 memcpy(codec.plname, "telephone-event", 16);
2238 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2239 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2240 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2241 _engineStatisticsPtr->SetLastError(
2242 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2243 "SetSendTelephoneEventPayloadType() failed to register send"
2244 "payload type");
2245 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002246 }
kwiberg55b97fe2016-01-28 05:22:45 -08002247 }
kwiberg55b97fe2016-01-28 05:22:45 -08002248 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002249}
2250
kwiberg55b97fe2016-01-28 05:22:45 -08002251int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2252 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2253 "Channel::UpdateRxVadDetection()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002254
kwiberg55b97fe2016-01-28 05:22:45 -08002255 int vadDecision = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002256
kwiberg55b97fe2016-01-28 05:22:45 -08002257 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002258
kwiberg55b97fe2016-01-28 05:22:45 -08002259 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2260 OnRxVadDetected(vadDecision);
2261 _oldVadDecision = vadDecision;
2262 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002263
kwiberg55b97fe2016-01-28 05:22:45 -08002264 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2265 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2266 vadDecision);
2267 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002268}
2269
kwiberg55b97fe2016-01-28 05:22:45 -08002270int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2271 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2272 "Channel::RegisterRxVadObserver()");
2273 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002274
kwiberg55b97fe2016-01-28 05:22:45 -08002275 if (_rxVadObserverPtr) {
2276 _engineStatisticsPtr->SetLastError(
2277 VE_INVALID_OPERATION, kTraceError,
2278 "RegisterRxVadObserver() observer already enabled");
2279 return -1;
2280 }
2281 _rxVadObserverPtr = &observer;
2282 _RxVadDetection = true;
2283 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002284}
2285
kwiberg55b97fe2016-01-28 05:22:45 -08002286int Channel::DeRegisterRxVadObserver() {
2287 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2288 "Channel::DeRegisterRxVadObserver()");
2289 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002290
kwiberg55b97fe2016-01-28 05:22:45 -08002291 if (!_rxVadObserverPtr) {
2292 _engineStatisticsPtr->SetLastError(
2293 VE_INVALID_OPERATION, kTraceWarning,
2294 "DeRegisterRxVadObserver() observer already disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002295 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002296 }
2297 _rxVadObserverPtr = NULL;
2298 _RxVadDetection = false;
2299 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002300}
2301
kwiberg55b97fe2016-01-28 05:22:45 -08002302int Channel::VoiceActivityIndicator(int& activity) {
2303 activity = _sendFrameType;
2304 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002305}
2306
2307#ifdef WEBRTC_VOICE_ENGINE_AGC
2308
kwiberg55b97fe2016-01-28 05:22:45 -08002309int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2310 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2311 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2312 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002313
kwiberg55b97fe2016-01-28 05:22:45 -08002314 GainControl::Mode agcMode = kDefaultRxAgcMode;
2315 switch (mode) {
2316 case kAgcDefault:
2317 break;
2318 case kAgcUnchanged:
2319 agcMode = rx_audioproc_->gain_control()->mode();
2320 break;
2321 case kAgcFixedDigital:
2322 agcMode = GainControl::kFixedDigital;
2323 break;
2324 case kAgcAdaptiveDigital:
2325 agcMode = GainControl::kAdaptiveDigital;
2326 break;
2327 default:
2328 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2329 "SetRxAgcStatus() invalid Agc mode");
2330 return -1;
2331 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002332
kwiberg55b97fe2016-01-28 05:22:45 -08002333 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2334 _engineStatisticsPtr->SetLastError(
2335 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2336 return -1;
2337 }
2338 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2339 _engineStatisticsPtr->SetLastError(
2340 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2341 return -1;
2342 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002343
kwiberg55b97fe2016-01-28 05:22:45 -08002344 _rxAgcIsEnabled = enable;
2345 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002346
kwiberg55b97fe2016-01-28 05:22:45 -08002347 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002348}
2349
kwiberg55b97fe2016-01-28 05:22:45 -08002350int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2351 bool enable = rx_audioproc_->gain_control()->is_enabled();
2352 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
niklase@google.com470e71d2011-07-07 08:21:25 +00002353
kwiberg55b97fe2016-01-28 05:22:45 -08002354 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002355
kwiberg55b97fe2016-01-28 05:22:45 -08002356 switch (agcMode) {
2357 case GainControl::kFixedDigital:
2358 mode = kAgcFixedDigital;
2359 break;
2360 case GainControl::kAdaptiveDigital:
2361 mode = kAgcAdaptiveDigital;
2362 break;
2363 default:
2364 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2365 "GetRxAgcStatus() invalid Agc mode");
2366 return -1;
2367 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002368
kwiberg55b97fe2016-01-28 05:22:45 -08002369 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002370}
2371
kwiberg55b97fe2016-01-28 05:22:45 -08002372int Channel::SetRxAgcConfig(AgcConfig config) {
2373 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2374 "Channel::SetRxAgcConfig()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002375
kwiberg55b97fe2016-01-28 05:22:45 -08002376 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2377 config.targetLeveldBOv) != 0) {
2378 _engineStatisticsPtr->SetLastError(
2379 VE_APM_ERROR, kTraceError,
2380 "SetRxAgcConfig() failed to set target peak |level|"
2381 "(or envelope) of the Agc");
2382 return -1;
2383 }
2384 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2385 config.digitalCompressionGaindB) != 0) {
2386 _engineStatisticsPtr->SetLastError(
2387 VE_APM_ERROR, kTraceError,
2388 "SetRxAgcConfig() failed to set the range in |gain| the"
2389 " digital compression stage may apply");
2390 return -1;
2391 }
2392 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2393 0) {
2394 _engineStatisticsPtr->SetLastError(
2395 VE_APM_ERROR, kTraceError,
2396 "SetRxAgcConfig() failed to set hard limiter to the signal");
2397 return -1;
2398 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002399
kwiberg55b97fe2016-01-28 05:22:45 -08002400 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002401}
2402
kwiberg55b97fe2016-01-28 05:22:45 -08002403int Channel::GetRxAgcConfig(AgcConfig& config) {
2404 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2405 config.digitalCompressionGaindB =
2406 rx_audioproc_->gain_control()->compression_gain_db();
2407 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
niklase@google.com470e71d2011-07-07 08:21:25 +00002408
kwiberg55b97fe2016-01-28 05:22:45 -08002409 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002410}
2411
kwiberg55b97fe2016-01-28 05:22:45 -08002412#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
niklase@google.com470e71d2011-07-07 08:21:25 +00002413
2414#ifdef WEBRTC_VOICE_ENGINE_NR
2415
kwiberg55b97fe2016-01-28 05:22:45 -08002416int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2417 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2418 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2419 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002420
kwiberg55b97fe2016-01-28 05:22:45 -08002421 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2422 switch (mode) {
2423 case kNsDefault:
2424 break;
2425 case kNsUnchanged:
2426 nsLevel = rx_audioproc_->noise_suppression()->level();
2427 break;
2428 case kNsConference:
2429 nsLevel = NoiseSuppression::kHigh;
2430 break;
2431 case kNsLowSuppression:
2432 nsLevel = NoiseSuppression::kLow;
2433 break;
2434 case kNsModerateSuppression:
2435 nsLevel = NoiseSuppression::kModerate;
2436 break;
2437 case kNsHighSuppression:
2438 nsLevel = NoiseSuppression::kHigh;
2439 break;
2440 case kNsVeryHighSuppression:
2441 nsLevel = NoiseSuppression::kVeryHigh;
2442 break;
2443 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002444
kwiberg55b97fe2016-01-28 05:22:45 -08002445 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2446 _engineStatisticsPtr->SetLastError(
2447 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2448 return -1;
2449 }
2450 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2451 _engineStatisticsPtr->SetLastError(
2452 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2453 return -1;
2454 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002455
kwiberg55b97fe2016-01-28 05:22:45 -08002456 _rxNsIsEnabled = enable;
2457 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002458
kwiberg55b97fe2016-01-28 05:22:45 -08002459 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002460}
2461
kwiberg55b97fe2016-01-28 05:22:45 -08002462int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2463 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2464 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
niklase@google.com470e71d2011-07-07 08:21:25 +00002465
kwiberg55b97fe2016-01-28 05:22:45 -08002466 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002467
kwiberg55b97fe2016-01-28 05:22:45 -08002468 switch (ncLevel) {
2469 case NoiseSuppression::kLow:
2470 mode = kNsLowSuppression;
2471 break;
2472 case NoiseSuppression::kModerate:
2473 mode = kNsModerateSuppression;
2474 break;
2475 case NoiseSuppression::kHigh:
2476 mode = kNsHighSuppression;
2477 break;
2478 case NoiseSuppression::kVeryHigh:
2479 mode = kNsVeryHighSuppression;
2480 break;
2481 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002482
kwiberg55b97fe2016-01-28 05:22:45 -08002483 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002484}
2485
kwiberg55b97fe2016-01-28 05:22:45 -08002486#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
niklase@google.com470e71d2011-07-07 08:21:25 +00002487
kwiberg55b97fe2016-01-28 05:22:45 -08002488int Channel::SetLocalSSRC(unsigned int ssrc) {
2489 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2490 "Channel::SetLocalSSRC()");
2491 if (channel_state_.Get().sending) {
2492 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2493 "SetLocalSSRC() already sending");
2494 return -1;
2495 }
2496 _rtpRtcpModule->SetSSRC(ssrc);
2497 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002498}
2499
kwiberg55b97fe2016-01-28 05:22:45 -08002500int Channel::GetLocalSSRC(unsigned int& ssrc) {
2501 ssrc = _rtpRtcpModule->SSRC();
2502 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002503}
2504
kwiberg55b97fe2016-01-28 05:22:45 -08002505int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2506 ssrc = rtp_receiver_->SSRC();
2507 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002508}
2509
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002510int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002511 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002512 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002513}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002514
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002515int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2516 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002517 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2518 if (enable &&
2519 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2520 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002521 return -1;
2522 }
2523 return 0;
2524}
2525
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002526int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2527 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2528}
2529
2530int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2531 rtp_header_parser_->DeregisterRtpHeaderExtension(
2532 kRtpExtensionAbsoluteSendTime);
kwiberg55b97fe2016-01-28 05:22:45 -08002533 if (enable &&
2534 !rtp_header_parser_->RegisterRtpHeaderExtension(
2535 kRtpExtensionAbsoluteSendTime, id)) {
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00002536 return -1;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002537 }
2538 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002539}
2540
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002541void Channel::EnableSendTransportSequenceNumber(int id) {
2542 int ret =
2543 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2544 RTC_DCHECK_EQ(0, ret);
2545}
2546
stefan3313ec92016-01-21 06:32:43 -08002547void Channel::EnableReceiveTransportSequenceNumber(int id) {
2548 rtp_header_parser_->DeregisterRtpHeaderExtension(
2549 kRtpExtensionTransportSequenceNumber);
2550 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2551 kRtpExtensionTransportSequenceNumber, id);
2552 RTC_DCHECK(ret);
2553}
2554
stefanbba9dec2016-02-01 04:39:55 -08002555void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002556 RtpPacketSender* rtp_packet_sender,
2557 TransportFeedbackObserver* transport_feedback_observer,
2558 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002559 RTC_DCHECK(rtp_packet_sender);
2560 RTC_DCHECK(transport_feedback_observer);
2561 RTC_DCHECK(packet_router && !packet_router_);
2562 feedback_observer_proxy_->SetTransportFeedbackObserver(
2563 transport_feedback_observer);
2564 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2565 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2566 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002567 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002568 packet_router_ = packet_router;
2569}
2570
stefanbba9dec2016-02-01 04:39:55 -08002571void Channel::RegisterReceiverCongestionControlObjects(
2572 PacketRouter* packet_router) {
2573 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002574 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002575 packet_router_ = packet_router;
2576}
2577
2578void Channel::ResetCongestionControlObjects() {
2579 RTC_DCHECK(packet_router_);
2580 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2581 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2582 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002583 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002584 packet_router_ = nullptr;
2585 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2586}
2587
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002588void Channel::SetRTCPStatus(bool enable) {
2589 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2590 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002591 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002592}
2593
kwiberg55b97fe2016-01-28 05:22:45 -08002594int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002595 RtcpMode method = _rtpRtcpModule->RTCP();
2596 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002597 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002598}
2599
kwiberg55b97fe2016-01-28 05:22:45 -08002600int Channel::SetRTCP_CNAME(const char cName[256]) {
2601 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2602 "Channel::SetRTCP_CNAME()");
2603 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2604 _engineStatisticsPtr->SetLastError(
2605 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2606 "SetRTCP_CNAME() failed to set RTCP CNAME");
2607 return -1;
2608 }
2609 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002610}
2611
kwiberg55b97fe2016-01-28 05:22:45 -08002612int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2613 if (cName == NULL) {
2614 _engineStatisticsPtr->SetLastError(
2615 VE_INVALID_ARGUMENT, kTraceError,
2616 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2617 return -1;
2618 }
2619 char cname[RTCP_CNAME_SIZE];
2620 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2621 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2622 _engineStatisticsPtr->SetLastError(
2623 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2624 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2625 return -1;
2626 }
2627 strcpy(cName, cname);
2628 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002629}
2630
kwiberg55b97fe2016-01-28 05:22:45 -08002631int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2632 unsigned int& NTPLow,
2633 unsigned int& timestamp,
2634 unsigned int& playoutTimestamp,
2635 unsigned int* jitter,
2636 unsigned short* fractionLost) {
2637 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002638
kwiberg55b97fe2016-01-28 05:22:45 -08002639 RTCPSenderInfo senderInfo;
2640 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2641 _engineStatisticsPtr->SetLastError(
2642 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2643 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2644 "side");
2645 return -1;
2646 }
2647
2648 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2649 // and octet count)
2650 NTPHigh = senderInfo.NTPseconds;
2651 NTPLow = senderInfo.NTPfraction;
2652 timestamp = senderInfo.RTPtimeStamp;
2653
2654 // --- Locally derived information
2655
2656 // This value is updated on each incoming RTCP packet (0 when no packet
2657 // has been received)
2658 playoutTimestamp = playout_timestamp_rtcp_;
2659
2660 if (NULL != jitter || NULL != fractionLost) {
2661 // Get all RTCP receiver report blocks that have been received on this
2662 // channel. If we receive RTP packets from a remote source we know the
2663 // remote SSRC and use the report block from him.
2664 // Otherwise use the first report block.
2665 std::vector<RTCPReportBlock> remote_stats;
2666 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2667 remote_stats.empty()) {
2668 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2669 "GetRemoteRTCPData() failed to measure statistics due"
2670 " to lack of received RTP and/or RTCP packets");
2671 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002672 }
2673
kwiberg55b97fe2016-01-28 05:22:45 -08002674 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2675 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2676 for (; it != remote_stats.end(); ++it) {
2677 if (it->remoteSSRC == remoteSSRC)
2678 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002679 }
kwiberg55b97fe2016-01-28 05:22:45 -08002680
2681 if (it == remote_stats.end()) {
2682 // If we have not received any RTCP packets from this SSRC it probably
2683 // means that we have not received any RTP packets.
2684 // Use the first received report block instead.
2685 it = remote_stats.begin();
2686 remoteSSRC = it->remoteSSRC;
2687 }
2688
2689 if (jitter) {
2690 *jitter = it->jitter;
2691 }
2692
2693 if (fractionLost) {
2694 *fractionLost = it->fractionLost;
2695 }
2696 }
2697 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002698}
2699
kwiberg55b97fe2016-01-28 05:22:45 -08002700int Channel::SendApplicationDefinedRTCPPacket(
2701 unsigned char subType,
2702 unsigned int name,
2703 const char* data,
2704 unsigned short dataLengthInBytes) {
2705 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2706 "Channel::SendApplicationDefinedRTCPPacket()");
2707 if (!channel_state_.Get().sending) {
2708 _engineStatisticsPtr->SetLastError(
2709 VE_NOT_SENDING, kTraceError,
2710 "SendApplicationDefinedRTCPPacket() not sending");
2711 return -1;
2712 }
2713 if (NULL == data) {
2714 _engineStatisticsPtr->SetLastError(
2715 VE_INVALID_ARGUMENT, kTraceError,
2716 "SendApplicationDefinedRTCPPacket() invalid data value");
2717 return -1;
2718 }
2719 if (dataLengthInBytes % 4 != 0) {
2720 _engineStatisticsPtr->SetLastError(
2721 VE_INVALID_ARGUMENT, kTraceError,
2722 "SendApplicationDefinedRTCPPacket() invalid length value");
2723 return -1;
2724 }
2725 RtcpMode status = _rtpRtcpModule->RTCP();
2726 if (status == RtcpMode::kOff) {
2727 _engineStatisticsPtr->SetLastError(
2728 VE_RTCP_ERROR, kTraceError,
2729 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2730 return -1;
2731 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002732
kwiberg55b97fe2016-01-28 05:22:45 -08002733 // Create and schedule the RTCP APP packet for transmission
2734 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2735 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2736 _engineStatisticsPtr->SetLastError(
2737 VE_SEND_ERROR, kTraceError,
2738 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2739 return -1;
2740 }
2741 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002742}
2743
kwiberg55b97fe2016-01-28 05:22:45 -08002744int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2745 unsigned int& maxJitterMs,
2746 unsigned int& discardedPackets) {
2747 // The jitter statistics is updated for each received RTP packet and is
2748 // based on received packets.
2749 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2750 // If RTCP is off, there is no timed thread in the RTCP module regularly
2751 // generating new stats, trigger the update manually here instead.
2752 StreamStatistician* statistician =
2753 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2754 if (statistician) {
2755 // Don't use returned statistics, use data from proxy instead so that
2756 // max jitter can be fetched atomically.
2757 RtcpStatistics s;
2758 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002759 }
kwiberg55b97fe2016-01-28 05:22:45 -08002760 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002761
kwiberg55b97fe2016-01-28 05:22:45 -08002762 ChannelStatistics stats = statistics_proxy_->GetStats();
2763 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2764 if (playoutFrequency > 0) {
2765 // Scale RTP statistics given the current playout frequency
2766 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2767 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2768 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002769
kwiberg55b97fe2016-01-28 05:22:45 -08002770 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002771
kwiberg55b97fe2016-01-28 05:22:45 -08002772 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002773}
2774
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002775int Channel::GetRemoteRTCPReportBlocks(
2776 std::vector<ReportBlock>* report_blocks) {
2777 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002778 _engineStatisticsPtr->SetLastError(
2779 VE_INVALID_ARGUMENT, kTraceError,
2780 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002781 return -1;
2782 }
2783
2784 // Get the report blocks from the latest received RTCP Sender or Receiver
2785 // Report. Each element in the vector contains the sender's SSRC and a
2786 // report block according to RFC 3550.
2787 std::vector<RTCPReportBlock> rtcp_report_blocks;
2788 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002789 return -1;
2790 }
2791
2792 if (rtcp_report_blocks.empty())
2793 return 0;
2794
2795 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2796 for (; it != rtcp_report_blocks.end(); ++it) {
2797 ReportBlock report_block;
2798 report_block.sender_SSRC = it->remoteSSRC;
2799 report_block.source_SSRC = it->sourceSSRC;
2800 report_block.fraction_lost = it->fractionLost;
2801 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2802 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2803 report_block.interarrival_jitter = it->jitter;
2804 report_block.last_SR_timestamp = it->lastSR;
2805 report_block.delay_since_last_SR = it->delaySinceLastSR;
2806 report_blocks->push_back(report_block);
2807 }
2808 return 0;
2809}
2810
kwiberg55b97fe2016-01-28 05:22:45 -08002811int Channel::GetRTPStatistics(CallStatistics& stats) {
2812 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002813
kwiberg55b97fe2016-01-28 05:22:45 -08002814 // The jitter statistics is updated for each received RTP packet and is
2815 // based on received packets.
2816 RtcpStatistics statistics;
2817 StreamStatistician* statistician =
2818 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002819 if (statistician) {
2820 statistician->GetStatistics(&statistics,
2821 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002822 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002823
kwiberg55b97fe2016-01-28 05:22:45 -08002824 stats.fractionLost = statistics.fraction_lost;
2825 stats.cumulativeLost = statistics.cumulative_lost;
2826 stats.extendedMax = statistics.extended_max_sequence_number;
2827 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002828
kwiberg55b97fe2016-01-28 05:22:45 -08002829 // --- RTT
2830 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002831
kwiberg55b97fe2016-01-28 05:22:45 -08002832 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002833
kwiberg55b97fe2016-01-28 05:22:45 -08002834 size_t bytesSent(0);
2835 uint32_t packetsSent(0);
2836 size_t bytesReceived(0);
2837 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002838
kwiberg55b97fe2016-01-28 05:22:45 -08002839 if (statistician) {
2840 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2841 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002842
kwiberg55b97fe2016-01-28 05:22:45 -08002843 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2844 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2845 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2846 " output will not be complete");
2847 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002848
kwiberg55b97fe2016-01-28 05:22:45 -08002849 stats.bytesSent = bytesSent;
2850 stats.packetsSent = packetsSent;
2851 stats.bytesReceived = bytesReceived;
2852 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002853
kwiberg55b97fe2016-01-28 05:22:45 -08002854 // --- Timestamps
2855 {
2856 rtc::CritScope lock(&ts_stats_lock_);
2857 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2858 }
2859 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002860}
2861
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002862int Channel::SetREDStatus(bool enable, int redPayloadtype) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002863 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002864 "Channel::SetREDStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002865
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002866 if (enable) {
2867 if (redPayloadtype < 0 || redPayloadtype > 127) {
2868 _engineStatisticsPtr->SetLastError(
2869 VE_PLTYPE_ERROR, kTraceError,
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002870 "SetREDStatus() invalid RED payload type");
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002871 return -1;
2872 }
2873
2874 if (SetRedPayloadType(redPayloadtype) < 0) {
2875 _engineStatisticsPtr->SetLastError(
2876 VE_CODEC_ERROR, kTraceError,
2877 "SetSecondarySendCodec() Failed to register RED ACM");
2878 return -1;
2879 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002880 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002881
kwibergc8d071e2016-04-06 12:22:38 -07002882 if (!codec_manager_.SetCopyRed(enable) ||
2883 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002884 _engineStatisticsPtr->SetLastError(
2885 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002886 "SetREDStatus() failed to set RED state in the ACM");
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002887 return -1;
2888 }
2889 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002890}
2891
kwiberg55b97fe2016-01-28 05:22:45 -08002892int Channel::GetREDStatus(bool& enabled, int& redPayloadtype) {
kwibergc8d071e2016-04-06 12:22:38 -07002893 enabled = codec_manager_.GetStackParams()->use_red;
kwiberg55b97fe2016-01-28 05:22:45 -08002894 if (enabled) {
2895 int8_t payloadType = 0;
2896 if (_rtpRtcpModule->SendREDPayloadType(&payloadType) != 0) {
2897 _engineStatisticsPtr->SetLastError(
2898 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2899 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
2900 "module");
2901 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002902 }
kwiberg55b97fe2016-01-28 05:22:45 -08002903 redPayloadtype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +00002904 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002905 }
2906 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002907}
2908
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002909int Channel::SetCodecFECStatus(bool enable) {
2910 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2911 "Channel::SetCodecFECStatus()");
2912
kwibergc8d071e2016-04-06 12:22:38 -07002913 if (!codec_manager_.SetCodecFEC(enable) ||
2914 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002915 _engineStatisticsPtr->SetLastError(
2916 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2917 "SetCodecFECStatus() failed to set FEC state");
2918 return -1;
2919 }
2920 return 0;
2921}
2922
2923bool Channel::GetCodecFECStatus() {
kwibergc8d071e2016-04-06 12:22:38 -07002924 return codec_manager_.GetStackParams()->use_codec_fec;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002925}
2926
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002927void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2928 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002929 // If pacing is enabled we always store packets.
2930 if (!pacing_enabled_)
2931 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002932 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002933 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002934 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002935 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002936 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002937}
2938
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002939// Called when we are missing one or more packets.
2940int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002941 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2942}
2943
kwiberg55b97fe2016-01-28 05:22:45 -08002944uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2945 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2946 "Channel::Demultiplex()");
2947 _audioFrame.CopyFrom(audioFrame);
2948 _audioFrame.id_ = _channelId;
2949 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002950}
2951
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002952void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00002953 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07002954 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08002955 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002956 CodecInst codec;
2957 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002958
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07002959 // Never upsample or upmix the capture signal here. This should be done at the
2960 // end of the send chain.
2961 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
2962 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
2963 RemixAndResample(audio_data, number_of_frames, number_of_channels,
2964 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002965}
2966
kwiberg55b97fe2016-01-28 05:22:45 -08002967uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
2968 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2969 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002970
kwiberg55b97fe2016-01-28 05:22:45 -08002971 if (_audioFrame.samples_per_channel_ == 0) {
2972 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2973 "Channel::PrepareEncodeAndSend() invalid audio frame");
2974 return 0xFFFFFFFF;
2975 }
2976
2977 if (channel_state_.Get().input_file_playing) {
2978 MixOrReplaceAudioWithFile(mixingFrequency);
2979 }
2980
solenberg1c2af8e2016-03-24 10:36:00 -07002981 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
2982 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
kwiberg55b97fe2016-01-28 05:22:45 -08002983
2984 if (channel_state_.Get().input_external_media) {
2985 rtc::CritScope cs(&_callbackCritSect);
2986 const bool isStereo = (_audioFrame.num_channels_ == 2);
2987 if (_inputExternalMediaCallbackPtr) {
2988 _inputExternalMediaCallbackPtr->Process(
2989 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
2990 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
2991 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00002992 }
kwiberg55b97fe2016-01-28 05:22:45 -08002993 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002994
kwiberg55b97fe2016-01-28 05:22:45 -08002995 if (_includeAudioLevelIndication) {
2996 size_t length =
2997 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
solenberg1c2af8e2016-03-24 10:36:00 -07002998 if (is_muted && previous_frame_muted_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002999 rms_level_.ProcessMuted(length);
3000 } else {
3001 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00003002 }
kwiberg55b97fe2016-01-28 05:22:45 -08003003 }
solenberg1c2af8e2016-03-24 10:36:00 -07003004 previous_frame_muted_ = is_muted;
niklase@google.com470e71d2011-07-07 08:21:25 +00003005
kwiberg55b97fe2016-01-28 05:22:45 -08003006 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003007}
3008
kwiberg55b97fe2016-01-28 05:22:45 -08003009uint32_t Channel::EncodeAndSend() {
3010 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3011 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003012
kwiberg55b97fe2016-01-28 05:22:45 -08003013 assert(_audioFrame.num_channels_ <= 2);
3014 if (_audioFrame.samples_per_channel_ == 0) {
3015 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3016 "Channel::EncodeAndSend() invalid audio frame");
3017 return 0xFFFFFFFF;
3018 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003019
kwiberg55b97fe2016-01-28 05:22:45 -08003020 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00003021
kwiberg55b97fe2016-01-28 05:22:45 -08003022 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00003023
kwiberg55b97fe2016-01-28 05:22:45 -08003024 // The ACM resamples internally.
3025 _audioFrame.timestamp_ = _timeStamp;
3026 // This call will trigger AudioPacketizationCallback::SendData if encoding
3027 // is done and payload is ready for packetization and transmission.
3028 // Otherwise, it will return without invoking the callback.
3029 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
3030 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
3031 "Channel::EncodeAndSend() ACM encoding failed");
3032 return 0xFFFFFFFF;
3033 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003034
kwiberg55b97fe2016-01-28 05:22:45 -08003035 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
3036 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003037}
3038
Minyue2013aec2015-05-13 14:14:42 +02003039void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08003040 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003041 Channel* channel = associate_send_channel_.channel();
3042 if (channel && channel->ChannelId() == channel_id) {
3043 // If this channel is associated with a send channel of the specified
3044 // Channel ID, disassociate with it.
3045 ChannelOwner ref(NULL);
3046 associate_send_channel_ = ref;
3047 }
3048}
3049
kwiberg55b97fe2016-01-28 05:22:45 -08003050int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3051 VoEMediaProcess& processObject) {
3052 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3053 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003054
kwiberg55b97fe2016-01-28 05:22:45 -08003055 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003056
kwiberg55b97fe2016-01-28 05:22:45 -08003057 if (kPlaybackPerChannel == type) {
3058 if (_outputExternalMediaCallbackPtr) {
3059 _engineStatisticsPtr->SetLastError(
3060 VE_INVALID_OPERATION, kTraceError,
3061 "Channel::RegisterExternalMediaProcessing() "
3062 "output external media already enabled");
3063 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003064 }
kwiberg55b97fe2016-01-28 05:22:45 -08003065 _outputExternalMediaCallbackPtr = &processObject;
3066 _outputExternalMedia = true;
3067 } else if (kRecordingPerChannel == type) {
3068 if (_inputExternalMediaCallbackPtr) {
3069 _engineStatisticsPtr->SetLastError(
3070 VE_INVALID_OPERATION, kTraceError,
3071 "Channel::RegisterExternalMediaProcessing() "
3072 "output external media already enabled");
3073 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003074 }
kwiberg55b97fe2016-01-28 05:22:45 -08003075 _inputExternalMediaCallbackPtr = &processObject;
3076 channel_state_.SetInputExternalMedia(true);
3077 }
3078 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003079}
3080
kwiberg55b97fe2016-01-28 05:22:45 -08003081int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3082 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3083 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003084
kwiberg55b97fe2016-01-28 05:22:45 -08003085 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003086
kwiberg55b97fe2016-01-28 05:22:45 -08003087 if (kPlaybackPerChannel == type) {
3088 if (!_outputExternalMediaCallbackPtr) {
3089 _engineStatisticsPtr->SetLastError(
3090 VE_INVALID_OPERATION, kTraceWarning,
3091 "Channel::DeRegisterExternalMediaProcessing() "
3092 "output external media already disabled");
3093 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003094 }
kwiberg55b97fe2016-01-28 05:22:45 -08003095 _outputExternalMedia = false;
3096 _outputExternalMediaCallbackPtr = NULL;
3097 } else if (kRecordingPerChannel == type) {
3098 if (!_inputExternalMediaCallbackPtr) {
3099 _engineStatisticsPtr->SetLastError(
3100 VE_INVALID_OPERATION, kTraceWarning,
3101 "Channel::DeRegisterExternalMediaProcessing() "
3102 "input external media already disabled");
3103 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003104 }
kwiberg55b97fe2016-01-28 05:22:45 -08003105 channel_state_.SetInputExternalMedia(false);
3106 _inputExternalMediaCallbackPtr = NULL;
3107 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003108
kwiberg55b97fe2016-01-28 05:22:45 -08003109 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003110}
3111
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003112int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08003113 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3114 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003115
kwiberg55b97fe2016-01-28 05:22:45 -08003116 if (channel_state_.Get().playing) {
3117 _engineStatisticsPtr->SetLastError(
3118 VE_INVALID_OPERATION, kTraceError,
3119 "Channel::SetExternalMixing() "
3120 "external mixing cannot be changed while playing.");
3121 return -1;
3122 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003123
kwiberg55b97fe2016-01-28 05:22:45 -08003124 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003125
kwiberg55b97fe2016-01-28 05:22:45 -08003126 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003127}
3128
kwiberg55b97fe2016-01-28 05:22:45 -08003129int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3130 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00003131}
3132
wu@webrtc.org24301a62013-12-13 19:17:43 +00003133void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3134 audio_coding_->GetDecodingCallStatistics(stats);
3135}
3136
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003137bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3138 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08003139 rtc::CritScope lock(&video_sync_lock_);
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003140 if (_average_jitter_buffer_delay_us == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003141 return false;
3142 }
kwiberg55b97fe2016-01-28 05:22:45 -08003143 *jitter_buffer_delay_ms =
3144 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003145 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003146 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00003147}
3148
solenberg358057b2015-11-27 10:46:42 -08003149uint32_t Channel::GetDelayEstimate() const {
3150 int jitter_buffer_delay_ms = 0;
3151 int playout_buffer_delay_ms = 0;
3152 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3153 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3154}
3155
deadbeef74375882015-08-13 12:09:10 -07003156int Channel::LeastRequiredDelayMs() const {
3157 return audio_coding_->LeastRequiredDelayMs();
3158}
3159
kwiberg55b97fe2016-01-28 05:22:45 -08003160int Channel::SetMinimumPlayoutDelay(int delayMs) {
3161 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3162 "Channel::SetMinimumPlayoutDelay()");
3163 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3164 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3165 _engineStatisticsPtr->SetLastError(
3166 VE_INVALID_ARGUMENT, kTraceError,
3167 "SetMinimumPlayoutDelay() invalid min delay");
3168 return -1;
3169 }
3170 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3171 _engineStatisticsPtr->SetLastError(
3172 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3173 "SetMinimumPlayoutDelay() failed to set min playout delay");
3174 return -1;
3175 }
3176 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003177}
3178
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003179int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07003180 uint32_t playout_timestamp_rtp = 0;
3181 {
tommi31fc21f2016-01-21 10:37:37 -08003182 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003183 playout_timestamp_rtp = playout_timestamp_rtp_;
3184 }
kwiberg55b97fe2016-01-28 05:22:45 -08003185 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003186 _engineStatisticsPtr->SetLastError(
3187 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3188 "GetPlayoutTimestamp() failed to retrieve timestamp");
3189 return -1;
3190 }
deadbeef74375882015-08-13 12:09:10 -07003191 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003192 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003193}
3194
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003195int Channel::SetInitTimestamp(unsigned int timestamp) {
3196 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00003197 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003198 if (channel_state_.Get().sending) {
3199 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3200 "SetInitTimestamp() already sending");
3201 return -1;
3202 }
3203 _rtpRtcpModule->SetStartTimestamp(timestamp);
3204 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003205}
3206
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003207int Channel::SetInitSequenceNumber(short sequenceNumber) {
3208 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3209 "Channel::SetInitSequenceNumber()");
3210 if (channel_state_.Get().sending) {
3211 _engineStatisticsPtr->SetLastError(
3212 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3213 return -1;
3214 }
3215 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3216 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003217}
3218
kwiberg55b97fe2016-01-28 05:22:45 -08003219int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3220 RtpReceiver** rtp_receiver) const {
3221 *rtpRtcpModule = _rtpRtcpModule.get();
3222 *rtp_receiver = rtp_receiver_.get();
3223 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003224}
3225
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003226// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3227// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003228int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003229 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003230 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003231
kwiberg55b97fe2016-01-28 05:22:45 -08003232 {
3233 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003234
kwiberg55b97fe2016-01-28 05:22:45 -08003235 if (_inputFilePlayerPtr == NULL) {
3236 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3237 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3238 " doesnt exist");
3239 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003240 }
3241
kwiberg55b97fe2016-01-28 05:22:45 -08003242 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3243 mixingFrequency) == -1) {
3244 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3245 "Channel::MixOrReplaceAudioWithFile() file mixing "
3246 "failed");
3247 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003248 }
kwiberg55b97fe2016-01-28 05:22:45 -08003249 if (fileSamples == 0) {
3250 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3251 "Channel::MixOrReplaceAudioWithFile() file is ended");
3252 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003253 }
kwiberg55b97fe2016-01-28 05:22:45 -08003254 }
3255
3256 assert(_audioFrame.samples_per_channel_ == fileSamples);
3257
3258 if (_mixFileWithMicrophone) {
3259 // Currently file stream is always mono.
3260 // TODO(xians): Change the code when FilePlayer supports real stereo.
3261 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3262 1, fileSamples);
3263 } else {
3264 // Replace ACM audio with file.
3265 // Currently file stream is always mono.
3266 // TODO(xians): Change the code when FilePlayer supports real stereo.
3267 _audioFrame.UpdateFrame(
3268 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3269 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3270 }
3271 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003272}
3273
kwiberg55b97fe2016-01-28 05:22:45 -08003274int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3275 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003276
kwibergb7f89d62016-02-17 10:04:18 -08003277 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003278 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003279
kwiberg55b97fe2016-01-28 05:22:45 -08003280 {
3281 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003282
kwiberg55b97fe2016-01-28 05:22:45 -08003283 if (_outputFilePlayerPtr == NULL) {
3284 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3285 "Channel::MixAudioWithFile() file mixing failed");
3286 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003287 }
3288
kwiberg55b97fe2016-01-28 05:22:45 -08003289 // We should get the frequency we ask for.
3290 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3291 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3292 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3293 "Channel::MixAudioWithFile() file mixing failed");
3294 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003295 }
kwiberg55b97fe2016-01-28 05:22:45 -08003296 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003297
kwiberg55b97fe2016-01-28 05:22:45 -08003298 if (audioFrame.samples_per_channel_ == fileSamples) {
3299 // Currently file stream is always mono.
3300 // TODO(xians): Change the code when FilePlayer supports real stereo.
3301 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3302 fileSamples);
3303 } else {
3304 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3305 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3306 ") != "
3307 "fileSamples(%" PRIuS ")",
3308 audioFrame.samples_per_channel_, fileSamples);
3309 return -1;
3310 }
3311
3312 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003313}
3314
deadbeef74375882015-08-13 12:09:10 -07003315void Channel::UpdatePlayoutTimestamp(bool rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003316 jitter_buffer_playout_timestamp_ = audio_coding_->PlayoutTimestamp();
deadbeef74375882015-08-13 12:09:10 -07003317
henrik.lundin96bd5022016-04-06 04:13:56 -07003318 if (!jitter_buffer_playout_timestamp_) {
3319 // This can happen if this channel has not received any RTP packets. In
3320 // this case, NetEq is not capable of computing a playout timestamp.
deadbeef74375882015-08-13 12:09:10 -07003321 return;
3322 }
3323
3324 uint16_t delay_ms = 0;
3325 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003326 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003327 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3328 " delay from the ADM");
3329 _engineStatisticsPtr->SetLastError(
3330 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3331 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3332 return;
3333 }
3334
henrik.lundin96bd5022016-04-06 04:13:56 -07003335 RTC_DCHECK(jitter_buffer_playout_timestamp_);
3336 uint32_t playout_timestamp = *jitter_buffer_playout_timestamp_;
deadbeef74375882015-08-13 12:09:10 -07003337
3338 // Remove the playout delay.
henrik.lundin96bd5022016-04-06 04:13:56 -07003339 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
deadbeef74375882015-08-13 12:09:10 -07003340
kwiberg55b97fe2016-01-28 05:22:45 -08003341 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003342 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
henrik.lundin96bd5022016-04-06 04:13:56 -07003343 playout_timestamp);
deadbeef74375882015-08-13 12:09:10 -07003344
3345 {
tommi31fc21f2016-01-21 10:37:37 -08003346 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003347 if (rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003348 playout_timestamp_rtcp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003349 } else {
henrik.lundin96bd5022016-04-06 04:13:56 -07003350 playout_timestamp_rtp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003351 }
3352 playout_delay_ms_ = delay_ms;
3353 }
3354}
3355
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003356// Called for incoming RTP packets after successful RTP header parsing.
3357void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3358 uint16_t sequence_number) {
kwiberg55b97fe2016-01-28 05:22:45 -08003359 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003360 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3361 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00003362
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003363 // Get frequency of last received payload
wu@webrtc.org94454b72014-06-05 20:34:08 +00003364 int rtp_receive_frequency = GetPlayoutFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00003365
turaj@webrtc.org167b6df2013-12-13 21:05:07 +00003366 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
henrik.lundin96bd5022016-04-06 04:13:56 -07003367 // every incoming packet. May be empty if no valid playout timestamp is
3368 // available.
3369 // If |rtp_timestamp| is newer than |jitter_buffer_playout_timestamp_|, the
3370 // resulting difference is positive and will be used. When the inverse is
3371 // true (can happen when a network glitch causes a packet to arrive late,
3372 // and during long comfort noise periods with clock drift), or when
3373 // |jitter_buffer_playout_timestamp_| has no value, the difference is not
3374 // changed from the initial 0.
3375 uint32_t timestamp_diff_ms = 0;
3376 if (jitter_buffer_playout_timestamp_ &&
3377 IsNewerTimestamp(rtp_timestamp, *jitter_buffer_playout_timestamp_)) {
3378 timestamp_diff_ms = (rtp_timestamp - *jitter_buffer_playout_timestamp_) /
3379 (rtp_receive_frequency / 1000);
3380 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3381 // Diff is too large; set it to zero instead.
3382 timestamp_diff_ms = 0;
3383 }
henrik.lundin@webrtc.orgd6692992014-03-20 12:04:09 +00003384 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003385
kwiberg55b97fe2016-01-28 05:22:45 -08003386 uint16_t packet_delay_ms =
3387 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003388
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003389 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00003390
kwiberg55b97fe2016-01-28 05:22:45 -08003391 if (timestamp_diff_ms == 0)
3392 return;
niklase@google.com470e71d2011-07-07 08:21:25 +00003393
deadbeef74375882015-08-13 12:09:10 -07003394 {
tommi31fc21f2016-01-21 10:37:37 -08003395 rtc::CritScope lock(&video_sync_lock_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003396
deadbeef74375882015-08-13 12:09:10 -07003397 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3398 _recPacketDelayMs = packet_delay_ms;
3399 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003400
deadbeef74375882015-08-13 12:09:10 -07003401 if (_average_jitter_buffer_delay_us == 0) {
3402 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3403 return;
3404 }
3405
3406 // Filter average delay value using exponential filter (alpha is
3407 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3408 // risk of rounding error) and compensate for it in GetDelayEstimate()
3409 // later.
kwiberg55b97fe2016-01-28 05:22:45 -08003410 _average_jitter_buffer_delay_us =
3411 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3412 8;
deadbeef74375882015-08-13 12:09:10 -07003413 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003414}
3415
kwiberg55b97fe2016-01-28 05:22:45 -08003416void Channel::RegisterReceiveCodecsToRTPModule() {
3417 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3418 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003419
kwiberg55b97fe2016-01-28 05:22:45 -08003420 CodecInst codec;
3421 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003422
kwiberg55b97fe2016-01-28 05:22:45 -08003423 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3424 // Open up the RTP/RTCP receiver for all supported codecs
3425 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3426 (rtp_receiver_->RegisterReceivePayload(
3427 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3428 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3429 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3430 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3431 " to register %s (%d/%d/%" PRIuS
3432 "/%d) to RTP/RTCP "
3433 "receiver",
3434 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3435 codec.rate);
3436 } else {
3437 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3438 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3439 "(%d/%d/%" PRIuS
3440 "/%d) has been added to the RTP/RTCP "
3441 "receiver",
3442 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3443 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003444 }
kwiberg55b97fe2016-01-28 05:22:45 -08003445 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003446}
3447
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00003448// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003449int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003450 CodecInst codec;
3451 bool found_red = false;
3452
3453 // Get default RED settings from the ACM database
3454 const int num_codecs = AudioCodingModule::NumberOfCodecs();
3455 for (int idx = 0; idx < num_codecs; idx++) {
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003456 audio_coding_->Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003457 if (!STR_CASE_CMP(codec.plname, "RED")) {
3458 found_red = true;
3459 break;
3460 }
3461 }
3462
3463 if (!found_red) {
3464 _engineStatisticsPtr->SetLastError(
3465 VE_CODEC_ERROR, kTraceError,
3466 "SetRedPayloadType() RED is not supported");
3467 return -1;
3468 }
3469
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00003470 codec.pltype = red_payload_type;
kwibergc8d071e2016-04-06 12:22:38 -07003471 if (!codec_manager_.RegisterEncoder(codec) ||
3472 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003473 _engineStatisticsPtr->SetLastError(
3474 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3475 "SetRedPayloadType() RED registration in ACM module failed");
3476 return -1;
3477 }
3478
3479 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
3480 _engineStatisticsPtr->SetLastError(
3481 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3482 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
3483 return -1;
3484 }
3485 return 0;
3486}
3487
kwiberg55b97fe2016-01-28 05:22:45 -08003488int Channel::SetSendRtpHeaderExtension(bool enable,
3489 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003490 unsigned char id) {
3491 int error = 0;
3492 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3493 if (enable) {
3494 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3495 }
3496 return error;
3497}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003498
wu@webrtc.org94454b72014-06-05 20:34:08 +00003499int32_t Channel::GetPlayoutFrequency() {
3500 int32_t playout_frequency = audio_coding_->PlayoutFrequency();
3501 CodecInst current_recive_codec;
3502 if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
3503 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
3504 // Even though the actual sampling rate for G.722 audio is
3505 // 16,000 Hz, the RTP clock rate for the G722 payload format is
3506 // 8,000 Hz because that value was erroneously assigned in
3507 // RFC 1890 and must remain unchanged for backward compatibility.
3508 playout_frequency = 8000;
3509 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
3510 // We are resampling Opus internally to 32,000 Hz until all our
3511 // DSP routines can operate at 48,000 Hz, but the RTP clock
3512 // rate for the Opus payload format is standardized to 48,000 Hz,
3513 // because that is the maximum supported decoding sampling rate.
3514 playout_frequency = 48000;
3515 }
3516 }
3517 return playout_frequency;
3518}
3519
Minyue2013aec2015-05-13 14:14:42 +02003520int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003521 RtcpMode method = _rtpRtcpModule->RTCP();
3522 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003523 return 0;
3524 }
3525 std::vector<RTCPReportBlock> report_blocks;
3526 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003527
3528 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003529 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003530 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003531 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003532 Channel* channel = associate_send_channel_.channel();
3533 // Tries to get RTT from an associated channel. This is important for
3534 // receive-only channels.
3535 if (channel) {
3536 // To prevent infinite recursion and deadlock, calling GetRTT of
3537 // associate channel should always use "false" for argument:
3538 // |allow_associate_channel|.
3539 rtt = channel->GetRTT(false);
3540 }
3541 }
3542 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003543 }
3544
3545 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3546 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3547 for (; it != report_blocks.end(); ++it) {
3548 if (it->remoteSSRC == remoteSSRC)
3549 break;
3550 }
3551 if (it == report_blocks.end()) {
3552 // We have not received packets with SSRC matching the report blocks.
3553 // To calculate RTT we try with the SSRC of the first report block.
3554 // This is very important for send-only channels where we don't know
3555 // the SSRC of the other end.
3556 remoteSSRC = report_blocks[0].remoteSSRC;
3557 }
Minyue2013aec2015-05-13 14:14:42 +02003558
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003559 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003560 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003561 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003562 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3563 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003564 return 0;
3565 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003566 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003567}
3568
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003569} // namespace voe
3570} // namespace webrtc