blob: 943a04572be088689cb655578001dd533703c0b7 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010020#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000021#include "webrtc/base/timeutils.h"
minyue@webrtc.orge509f942013-09-12 17:03:00 +000022#include "webrtc/common.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020023#include "webrtc/config.h"
ossue3525782016-05-25 07:37:43 -070024#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000025#include "webrtc/modules/audio_device/include/audio_device.h"
26#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010027#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010028#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010029#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
30#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
31#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000032#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010033#include "webrtc/modules/utility/include/audio_frame_operations.h"
34#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010035#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000036#include "webrtc/voice_engine/include/voe_base.h"
37#include "webrtc/voice_engine/include/voe_external_media.h"
38#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
39#include "webrtc/voice_engine/output_mixer.h"
40#include "webrtc/voice_engine/statistics.h"
41#include "webrtc/voice_engine/transmit_mixer.h"
42#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000043
andrew@webrtc.org50419b02012-11-14 19:07:54 +000044namespace webrtc {
45namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000046
kwibergc8d071e2016-04-06 12:22:38 -070047namespace {
48
49bool RegisterReceiveCodec(std::unique_ptr<AudioCodingModule>* acm,
50 acm2::RentACodec* rac,
51 const CodecInst& ci) {
kwibergabe95ba2016-06-02 02:58:59 -070052 const int result = (*acm)->RegisterReceiveCodec(
53 ci, [&] { return rac->RentIsacDecoder(ci.plfreq); });
kwibergc8d071e2016-04-06 12:22:38 -070054 return result == 0;
55}
56
57} // namespace
58
solenberg8842c3e2016-03-11 03:06:41 -080059const int kTelephoneEventAttenuationdB = 10;
60
Stefan Holmerb86d4e42015-12-07 10:26:18 +010061class TransportFeedbackProxy : public TransportFeedbackObserver {
62 public:
63 TransportFeedbackProxy() : feedback_observer_(nullptr) {
64 pacer_thread_.DetachFromThread();
65 network_thread_.DetachFromThread();
66 }
67
68 void SetTransportFeedbackObserver(
69 TransportFeedbackObserver* feedback_observer) {
70 RTC_DCHECK(thread_checker_.CalledOnValidThread());
71 rtc::CritScope lock(&crit_);
72 feedback_observer_ = feedback_observer;
73 }
74
75 // Implements TransportFeedbackObserver.
76 void AddPacket(uint16_t sequence_number,
77 size_t length,
philipela1ed0b32016-06-01 06:31:17 -070078 int probe_cluster_id) override {
Stefan Holmerb86d4e42015-12-07 10:26:18 +010079 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
80 rtc::CritScope lock(&crit_);
81 if (feedback_observer_)
pbos2169d8b2016-06-20 11:53:02 -070082 feedback_observer_->AddPacket(sequence_number, length, probe_cluster_id);
Stefan Holmerb86d4e42015-12-07 10:26:18 +010083 }
84 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
85 RTC_DCHECK(network_thread_.CalledOnValidThread());
86 rtc::CritScope lock(&crit_);
87 if (feedback_observer_)
88 feedback_observer_->OnTransportFeedback(feedback);
89 }
90
91 private:
92 rtc::CriticalSection crit_;
93 rtc::ThreadChecker thread_checker_;
94 rtc::ThreadChecker pacer_thread_;
95 rtc::ThreadChecker network_thread_;
96 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
97};
98
99class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
100 public:
101 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
102 pacer_thread_.DetachFromThread();
103 }
104
105 void SetSequenceNumberAllocator(
106 TransportSequenceNumberAllocator* seq_num_allocator) {
107 RTC_DCHECK(thread_checker_.CalledOnValidThread());
108 rtc::CritScope lock(&crit_);
109 seq_num_allocator_ = seq_num_allocator;
110 }
111
112 // Implements TransportSequenceNumberAllocator.
113 uint16_t AllocateSequenceNumber() override {
114 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
115 rtc::CritScope lock(&crit_);
116 if (!seq_num_allocator_)
117 return 0;
118 return seq_num_allocator_->AllocateSequenceNumber();
119 }
120
121 private:
122 rtc::CriticalSection crit_;
123 rtc::ThreadChecker thread_checker_;
124 rtc::ThreadChecker pacer_thread_;
125 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
126};
127
128class RtpPacketSenderProxy : public RtpPacketSender {
129 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800130 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100131
132 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
133 RTC_DCHECK(thread_checker_.CalledOnValidThread());
134 rtc::CritScope lock(&crit_);
135 rtp_packet_sender_ = rtp_packet_sender;
136 }
137
138 // Implements RtpPacketSender.
139 void InsertPacket(Priority priority,
140 uint32_t ssrc,
141 uint16_t sequence_number,
142 int64_t capture_time_ms,
143 size_t bytes,
144 bool retransmission) override {
145 rtc::CritScope lock(&crit_);
146 if (rtp_packet_sender_) {
147 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
148 capture_time_ms, bytes, retransmission);
149 }
150 }
151
152 private:
153 rtc::ThreadChecker thread_checker_;
154 rtc::CriticalSection crit_;
155 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
156};
157
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000158// Extend the default RTCP statistics struct with max_jitter, defined as the
159// maximum jitter value seen in an RTCP report block.
160struct ChannelStatistics : public RtcpStatistics {
161 ChannelStatistics() : rtcp(), max_jitter(0) {}
162
163 RtcpStatistics rtcp;
164 uint32_t max_jitter;
165};
166
167// Statistics callback, called at each generation of a new RTCP report block.
168class StatisticsProxy : public RtcpStatisticsCallback {
169 public:
tommi31fc21f2016-01-21 10:37:37 -0800170 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000171 virtual ~StatisticsProxy() {}
172
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000173 void StatisticsUpdated(const RtcpStatistics& statistics,
174 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000175 if (ssrc != ssrc_)
176 return;
177
tommi31fc21f2016-01-21 10:37:37 -0800178 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000179 stats_.rtcp = statistics;
180 if (statistics.jitter > stats_.max_jitter) {
181 stats_.max_jitter = statistics.jitter;
182 }
183 }
184
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000185 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000186
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000187 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800188 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000189 return stats_;
190 }
191
192 private:
193 // StatisticsUpdated calls are triggered from threads in the RTP module,
194 // while GetStats calls can be triggered from the public voice engine API,
195 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800196 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000197 const uint32_t ssrc_;
198 ChannelStatistics stats_;
199};
200
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000201class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000202 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000203 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
204 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000205
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000206 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
207 // Not used for Voice Engine.
208 }
209
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000210 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
211 int64_t rtt,
212 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000213 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
214 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
215 // report for VoiceEngine?
216 if (report_blocks.empty())
217 return;
218
219 int fraction_lost_aggregate = 0;
220 int total_number_of_packets = 0;
221
222 // If receiving multiple report blocks, calculate the weighted average based
223 // on the number of packets a report refers to.
224 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
225 block_it != report_blocks.end(); ++block_it) {
226 // Find the previous extended high sequence number for this remote SSRC,
227 // to calculate the number of RTP packets this report refers to. Ignore if
228 // we haven't seen this SSRC before.
229 std::map<uint32_t, uint32_t>::iterator seq_num_it =
230 extended_max_sequence_number_.find(block_it->sourceSSRC);
231 int number_of_packets = 0;
232 if (seq_num_it != extended_max_sequence_number_.end()) {
233 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
234 }
235 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
236 total_number_of_packets += number_of_packets;
237
238 extended_max_sequence_number_[block_it->sourceSSRC] =
239 block_it->extendedHighSeqNum;
240 }
241 int weighted_fraction_lost = 0;
242 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800243 weighted_fraction_lost =
244 (fraction_lost_aggregate + total_number_of_packets / 2) /
245 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000246 }
247 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000248 }
249
250 private:
251 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000252 // Maps remote side ssrc to extended highest sequence number received.
253 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000254};
255
kwiberg55b97fe2016-01-28 05:22:45 -0800256int32_t Channel::SendData(FrameType frameType,
257 uint8_t payloadType,
258 uint32_t timeStamp,
259 const uint8_t* payloadData,
260 size_t payloadSize,
261 const RTPFragmentationHeader* fragmentation) {
262 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
263 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
264 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
265 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000266
kwiberg55b97fe2016-01-28 05:22:45 -0800267 if (_includeAudioLevelIndication) {
268 // Store current audio level in the RTP/RTCP module.
269 // The level will be used in combination with voice-activity state
270 // (frameType) to add an RTP header extension
271 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
272 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000273
kwiberg55b97fe2016-01-28 05:22:45 -0800274 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
275 // packetization.
276 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
277 if (_rtpRtcpModule->SendOutgoingData(
278 (FrameType&)frameType, payloadType, timeStamp,
279 // Leaving the time when this frame was
280 // received from the capture device as
281 // undefined for voice for now.
282 -1, payloadData, payloadSize, fragmentation) == -1) {
283 _engineStatisticsPtr->SetLastError(
284 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
285 "Channel::SendData() failed to send data to RTP/RTCP module");
286 return -1;
287 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000288
kwiberg55b97fe2016-01-28 05:22:45 -0800289 _lastLocalTimeStamp = timeStamp;
290 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000291
kwiberg55b97fe2016-01-28 05:22:45 -0800292 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000293}
294
kwiberg55b97fe2016-01-28 05:22:45 -0800295int32_t Channel::InFrameType(FrameType frame_type) {
296 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
297 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000298
kwiberg55b97fe2016-01-28 05:22:45 -0800299 rtc::CritScope cs(&_callbackCritSect);
300 _sendFrameType = (frame_type == kAudioFrameSpeech);
301 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000302}
303
kwiberg55b97fe2016-01-28 05:22:45 -0800304int32_t Channel::OnRxVadDetected(int vadDecision) {
305 rtc::CritScope cs(&_callbackCritSect);
306 if (_rxVadObserverPtr) {
307 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
308 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000309
kwiberg55b97fe2016-01-28 05:22:45 -0800310 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000311}
312
stefan1d8a5062015-10-02 03:39:33 -0700313bool Channel::SendRtp(const uint8_t* data,
314 size_t len,
315 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800316 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
317 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000318
kwiberg55b97fe2016-01-28 05:22:45 -0800319 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000320
kwiberg55b97fe2016-01-28 05:22:45 -0800321 if (_transportPtr == NULL) {
322 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
323 "Channel::SendPacket() failed to send RTP packet due to"
324 " invalid transport object");
325 return false;
326 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000327
kwiberg55b97fe2016-01-28 05:22:45 -0800328 uint8_t* bufferToSendPtr = (uint8_t*)data;
329 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000330
kwiberg55b97fe2016-01-28 05:22:45 -0800331 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
332 std::string transport_name =
333 _externalTransport ? "external transport" : "WebRtc sockets";
334 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
335 "Channel::SendPacket() RTP transmission using %s failed",
336 transport_name.c_str());
337 return false;
338 }
339 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000340}
341
kwiberg55b97fe2016-01-28 05:22:45 -0800342bool Channel::SendRtcp(const uint8_t* data, size_t len) {
343 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
344 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000345
kwiberg55b97fe2016-01-28 05:22:45 -0800346 rtc::CritScope cs(&_callbackCritSect);
347 if (_transportPtr == NULL) {
348 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
349 "Channel::SendRtcp() failed to send RTCP packet"
350 " due to invalid transport object");
351 return false;
352 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000353
kwiberg55b97fe2016-01-28 05:22:45 -0800354 uint8_t* bufferToSendPtr = (uint8_t*)data;
355 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000356
kwiberg55b97fe2016-01-28 05:22:45 -0800357 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
358 if (n < 0) {
359 std::string transport_name =
360 _externalTransport ? "external transport" : "WebRtc sockets";
361 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
362 "Channel::SendRtcp() transmission using %s failed",
363 transport_name.c_str());
364 return false;
365 }
366 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000367}
368
kwiberg55b97fe2016-01-28 05:22:45 -0800369void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
370 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
371 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000372
kwiberg55b97fe2016-01-28 05:22:45 -0800373 // Update ssrc so that NTP for AV sync can be updated.
374 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000375}
376
Peter Boströmac547a62015-09-17 23:03:57 +0200377void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
378 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
379 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
380 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000381}
382
Peter Boströmac547a62015-09-17 23:03:57 +0200383int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000384 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000385 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000386 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800387 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200388 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800389 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
390 "Channel::OnInitializeDecoder(payloadType=%d, "
391 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
392 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000393
kwiberg55b97fe2016-01-28 05:22:45 -0800394 CodecInst receiveCodec = {0};
395 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000396
kwiberg55b97fe2016-01-28 05:22:45 -0800397 receiveCodec.pltype = payloadType;
398 receiveCodec.plfreq = frequency;
399 receiveCodec.channels = channels;
400 receiveCodec.rate = rate;
401 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000402
kwiberg55b97fe2016-01-28 05:22:45 -0800403 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
404 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000405
kwiberg55b97fe2016-01-28 05:22:45 -0800406 // Register the new codec to the ACM
kwibergc8d071e2016-04-06 12:22:38 -0700407 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, receiveCodec)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800408 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
409 "Channel::OnInitializeDecoder() invalid codec ("
410 "pt=%d, name=%s) received - 1",
411 payloadType, payloadName);
412 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
413 return -1;
414 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000415
kwiberg55b97fe2016-01-28 05:22:45 -0800416 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000417}
418
kwiberg55b97fe2016-01-28 05:22:45 -0800419int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
420 size_t payloadSize,
421 const WebRtcRTPHeader* rtpHeader) {
422 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
423 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
424 ","
425 " payloadType=%u, audioChannel=%" PRIuS ")",
426 payloadSize, rtpHeader->header.payloadType,
427 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000428
kwiberg55b97fe2016-01-28 05:22:45 -0800429 if (!channel_state_.Get().playing) {
430 // Avoid inserting into NetEQ when we are not playing. Count the
431 // packet as discarded.
432 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
433 "received packet is discarded since playing is not"
434 " activated");
435 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000436 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800437 }
438
439 // Push the incoming payload (parsed and ready for decoding) into the ACM
440 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
441 0) {
442 _engineStatisticsPtr->SetLastError(
443 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
444 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
445 return -1;
446 }
447
448 // Update the packet delay.
449 UpdatePacketDelay(rtpHeader->header.timestamp,
450 rtpHeader->header.sequenceNumber);
451
452 int64_t round_trip_time = 0;
453 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
454 NULL);
455
456 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
457 if (!nack_list.empty()) {
458 // Can't use nack_list.data() since it's not supported by all
459 // compilers.
460 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
461 }
462 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000463}
464
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000465bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000466 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000467 RTPHeader header;
468 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
469 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
470 "IncomingPacket invalid RTP header");
471 return false;
472 }
473 header.payload_type_frequency =
474 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
475 if (header.payload_type_frequency < 0)
476 return false;
477 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
478}
479
henrik.lundin42dda502016-05-18 05:36:01 -0700480MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
481 int32_t id,
482 AudioFrame* audioFrame) {
ivoc9e03c3b2016-06-30 00:59:43 -0700483 if (event_log_) {
484 unsigned int ssrc;
485 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
486 event_log_->LogAudioPlayout(ssrc);
487 }
kwiberg55b97fe2016-01-28 05:22:45 -0800488 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
henrik.lundind4ccb002016-05-17 12:21:55 -0700489 bool muted;
490 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame,
491 &muted) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -0800492 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
493 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
494 // In all likelihood, the audio in this frame is garbage. We return an
495 // error so that the audio mixer module doesn't add it to the mix. As
496 // a result, it won't be played out and the actions skipped here are
497 // irrelevant.
henrik.lundin42dda502016-05-18 05:36:01 -0700498 return MixerParticipant::AudioFrameInfo::kError;
kwiberg55b97fe2016-01-28 05:22:45 -0800499 }
henrik.lundina89ab962016-05-18 08:52:45 -0700500
501 if (muted) {
502 // TODO(henrik.lundin): We should be able to do better than this. But we
503 // will have to go through all the cases below where the audio samples may
504 // be used, and handle the muted case in some way.
505 audioFrame->Mute();
506 }
kwiberg55b97fe2016-01-28 05:22:45 -0800507
508 if (_RxVadDetection) {
509 UpdateRxVadDetection(*audioFrame);
510 }
511
512 // Convert module ID to internal VoE channel ID
513 audioFrame->id_ = VoEChannelId(audioFrame->id_);
514 // Store speech type for dead-or-alive detection
515 _outputSpeechType = audioFrame->speech_type_;
516
517 ChannelState::State state = channel_state_.Get();
518
519 if (state.rx_apm_is_enabled) {
520 int err = rx_audioproc_->ProcessStream(audioFrame);
521 if (err) {
522 LOG(LS_ERROR) << "ProcessStream() error: " << err;
523 assert(false);
Ivo Creusenae856f22015-09-17 16:30:16 +0200524 }
kwiberg55b97fe2016-01-28 05:22:45 -0800525 }
526
527 {
528 // Pass the audio buffers to an optional sink callback, before applying
529 // scaling/panning, as that applies to the mix operation.
530 // External recipients of the audio (e.g. via AudioTrack), will do their
531 // own mixing/dynamic processing.
532 rtc::CritScope cs(&_callbackCritSect);
533 if (audio_sink_) {
534 AudioSinkInterface::Data data(
535 &audioFrame->data_[0], audioFrame->samples_per_channel_,
536 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
537 audioFrame->timestamp_);
538 audio_sink_->OnData(data);
539 }
540 }
541
542 float output_gain = 1.0f;
543 float left_pan = 1.0f;
544 float right_pan = 1.0f;
545 {
546 rtc::CritScope cs(&volume_settings_critsect_);
547 output_gain = _outputGain;
548 left_pan = _panLeft;
549 right_pan = _panRight;
550 }
551
552 // Output volume scaling
553 if (output_gain < 0.99f || output_gain > 1.01f) {
554 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
555 }
556
557 // Scale left and/or right channel(s) if stereo and master balance is
558 // active
559
560 if (left_pan != 1.0f || right_pan != 1.0f) {
561 if (audioFrame->num_channels_ == 1) {
562 // Emulate stereo mode since panning is active.
563 // The mono signal is copied to both left and right channels here.
564 AudioFrameOperations::MonoToStereo(audioFrame);
565 }
566 // For true stereo mode (when we are receiving a stereo signal), no
567 // action is needed.
568
569 // Do the panning operation (the audio frame contains stereo at this
570 // stage)
571 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
572 }
573
574 // Mix decoded PCM output with file if file mixing is enabled
575 if (state.output_file_playing) {
576 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
henrik.lundina89ab962016-05-18 08:52:45 -0700577 muted = false; // We may have added non-zero samples.
kwiberg55b97fe2016-01-28 05:22:45 -0800578 }
579
580 // External media
581 if (_outputExternalMedia) {
582 rtc::CritScope cs(&_callbackCritSect);
583 const bool isStereo = (audioFrame->num_channels_ == 2);
584 if (_outputExternalMediaCallbackPtr) {
585 _outputExternalMediaCallbackPtr->Process(
586 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
587 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
588 isStereo);
589 }
590 }
591
592 // Record playout if enabled
593 {
594 rtc::CritScope cs(&_fileCritSect);
595
596 if (_outputFileRecording && _outputFileRecorderPtr) {
597 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
598 }
599 }
600
601 // Measure audio level (0-9)
henrik.lundina89ab962016-05-18 08:52:45 -0700602 // TODO(henrik.lundin) Use the |muted| information here too.
kwiberg55b97fe2016-01-28 05:22:45 -0800603 _outputAudioLevel.ComputeLevel(*audioFrame);
604
605 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
606 // The first frame with a valid rtp timestamp.
607 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
608 }
609
610 if (capture_start_rtp_time_stamp_ >= 0) {
611 // audioFrame.timestamp_ should be valid from now on.
612
613 // Compute elapsed time.
614 int64_t unwrap_timestamp =
615 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
616 audioFrame->elapsed_time_ms_ =
617 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
618 (GetPlayoutFrequency() / 1000);
619
niklase@google.com470e71d2011-07-07 08:21:25 +0000620 {
kwiberg55b97fe2016-01-28 05:22:45 -0800621 rtc::CritScope lock(&ts_stats_lock_);
622 // Compute ntp time.
623 audioFrame->ntp_time_ms_ =
624 ntp_estimator_.Estimate(audioFrame->timestamp_);
625 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
626 if (audioFrame->ntp_time_ms_ > 0) {
627 // Compute |capture_start_ntp_time_ms_| so that
628 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
629 capture_start_ntp_time_ms_ =
630 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000631 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000632 }
kwiberg55b97fe2016-01-28 05:22:45 -0800633 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000634
henrik.lundin42dda502016-05-18 05:36:01 -0700635 return muted ? MixerParticipant::AudioFrameInfo::kMuted
636 : MixerParticipant::AudioFrameInfo::kNormal;
niklase@google.com470e71d2011-07-07 08:21:25 +0000637}
638
kwiberg55b97fe2016-01-28 05:22:45 -0800639int32_t Channel::NeededFrequency(int32_t id) const {
640 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
641 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000642
kwiberg55b97fe2016-01-28 05:22:45 -0800643 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000644
kwiberg55b97fe2016-01-28 05:22:45 -0800645 // Determine highest needed receive frequency
646 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000647
kwiberg55b97fe2016-01-28 05:22:45 -0800648 // Return the bigger of playout and receive frequency in the ACM.
649 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
650 highestNeeded = audio_coding_->PlayoutFrequency();
651 } else {
652 highestNeeded = receiveFrequency;
653 }
654
655 // Special case, if we're playing a file on the playout side
656 // we take that frequency into consideration as well
657 // This is not needed on sending side, since the codec will
658 // limit the spectrum anyway.
659 if (channel_state_.Get().output_file_playing) {
660 rtc::CritScope cs(&_fileCritSect);
661 if (_outputFilePlayerPtr) {
662 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
663 highestNeeded = _outputFilePlayerPtr->Frequency();
664 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000665 }
kwiberg55b97fe2016-01-28 05:22:45 -0800666 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000667
kwiberg55b97fe2016-01-28 05:22:45 -0800668 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000669}
670
ivocb04965c2015-09-09 00:09:43 -0700671int32_t Channel::CreateChannel(Channel*& channel,
672 int32_t channelId,
673 uint32_t instanceId,
ivoc9e03c3b2016-06-30 00:59:43 -0700674 RtcEventLog* const event_log,
ivocb04965c2015-09-09 00:09:43 -0700675 const Config& config) {
ivoc9e03c3b2016-06-30 00:59:43 -0700676 return CreateChannel(channel, channelId, instanceId, event_log, config,
ossu5f7cfa52016-05-30 08:11:28 -0700677 CreateBuiltinAudioDecoderFactory());
678}
679
680int32_t Channel::CreateChannel(
681 Channel*& channel,
682 int32_t channelId,
683 uint32_t instanceId,
ivoc9e03c3b2016-06-30 00:59:43 -0700684 RtcEventLog* const event_log,
ossu5f7cfa52016-05-30 08:11:28 -0700685 const Config& config,
686 const rtc::scoped_refptr<AudioDecoderFactory>& decoder_factory) {
kwiberg55b97fe2016-01-28 05:22:45 -0800687 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
688 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
689 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000690
ivoc9e03c3b2016-06-30 00:59:43 -0700691 channel =
692 new Channel(channelId, instanceId, event_log, config, decoder_factory);
kwiberg55b97fe2016-01-28 05:22:45 -0800693 if (channel == NULL) {
694 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
695 "Channel::CreateChannel() unable to allocate memory for"
696 " channel");
697 return -1;
698 }
699 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000700}
701
kwiberg55b97fe2016-01-28 05:22:45 -0800702void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
703 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
704 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
705 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000706
kwiberg55b97fe2016-01-28 05:22:45 -0800707 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000708}
709
kwiberg55b97fe2016-01-28 05:22:45 -0800710void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
711 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
712 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
713 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000714
kwiberg55b97fe2016-01-28 05:22:45 -0800715 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000716}
717
kwiberg55b97fe2016-01-28 05:22:45 -0800718void Channel::PlayFileEnded(int32_t id) {
719 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
720 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000721
kwiberg55b97fe2016-01-28 05:22:45 -0800722 if (id == _inputFilePlayerId) {
723 channel_state_.SetInputFilePlaying(false);
724 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
725 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000726 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800727 } else if (id == _outputFilePlayerId) {
728 channel_state_.SetOutputFilePlaying(false);
729 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
730 "Channel::PlayFileEnded() => output file player module is"
731 " shutdown");
732 }
733}
734
735void Channel::RecordFileEnded(int32_t id) {
736 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
737 "Channel::RecordFileEnded(id=%d)", id);
738
739 assert(id == _outputFileRecorderId);
740
741 rtc::CritScope cs(&_fileCritSect);
742
743 _outputFileRecording = false;
744 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
745 "Channel::RecordFileEnded() => output file recorder module is"
746 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000747}
748
pbos@webrtc.org92135212013-05-14 08:31:39 +0000749Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000750 uint32_t instanceId,
ivoc9e03c3b2016-06-30 00:59:43 -0700751 RtcEventLog* const event_log,
ossu5f7cfa52016-05-30 08:11:28 -0700752 const Config& config,
753 const rtc::scoped_refptr<AudioDecoderFactory>& decoder_factory)
tommi31fc21f2016-01-21 10:37:37 -0800754 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100755 _channelId(channelId),
ivoc9e03c3b2016-06-30 00:59:43 -0700756 event_log_(event_log),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100757 rtp_header_parser_(RtpHeaderParser::Create()),
758 rtp_payload_registry_(
759 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
760 rtp_receive_statistics_(
761 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
762 rtp_receiver_(
763 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100764 this,
765 this,
766 rtp_payload_registry_.get())),
767 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
768 _outputAudioLevel(),
769 _externalTransport(false),
770 _inputFilePlayerPtr(NULL),
771 _outputFilePlayerPtr(NULL),
772 _outputFileRecorderPtr(NULL),
773 // Avoid conflict with other channels by adding 1024 - 1026,
774 // won't use as much as 1024 channels.
775 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
776 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
777 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
778 _outputFileRecording(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100779 _outputExternalMedia(false),
780 _inputExternalMediaCallbackPtr(NULL),
781 _outputExternalMediaCallbackPtr(NULL),
782 _timeStamp(0), // This is just an offset, RTP module will add it's own
783 // random offset
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100784 ntp_estimator_(Clock::GetRealTimeClock()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100785 playout_timestamp_rtp_(0),
786 playout_timestamp_rtcp_(0),
787 playout_delay_ms_(0),
788 _numberOfDiscardedPackets(0),
789 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100790 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
791 capture_start_rtp_time_stamp_(-1),
792 capture_start_ntp_time_ms_(-1),
793 _engineStatisticsPtr(NULL),
794 _outputMixerPtr(NULL),
795 _transmitMixerPtr(NULL),
796 _moduleProcessThreadPtr(NULL),
797 _audioDeviceModulePtr(NULL),
798 _voiceEngineObserverPtr(NULL),
799 _callbackCritSectPtr(NULL),
800 _transportPtr(NULL),
801 _rxVadObserverPtr(NULL),
802 _oldVadDecision(-1),
803 _sendFrameType(0),
804 _externalMixing(false),
805 _mixFileWithMicrophone(false),
solenberg1c2af8e2016-03-24 10:36:00 -0700806 input_mute_(false),
807 previous_frame_muted_(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100808 _panLeft(1.0f),
809 _panRight(1.0f),
810 _outputGain(1.0f),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100811 _lastLocalTimeStamp(0),
812 _lastPayloadType(0),
813 _includeAudioLevelIndication(false),
814 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100815 _average_jitter_buffer_delay_us(0),
816 _previousTimestamp(0),
817 _recPacketDelayMs(20),
818 _RxVadDetection(false),
819 _rxAgcIsEnabled(false),
820 _rxNsIsEnabled(false),
821 restored_packet_in_use_(false),
822 rtcp_observer_(new VoERtcpObserver(this)),
823 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100824 associate_send_channel_(ChannelOwner(nullptr)),
825 pacing_enabled_(config.Get<VoicePacing>().enabled),
stefanbba9dec2016-02-01 04:39:55 -0800826 feedback_observer_proxy_(new TransportFeedbackProxy()),
827 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
ossu29b1a8d2016-06-13 07:34:51 -0700828 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()),
829 decoder_factory_(decoder_factory) {
kwiberg55b97fe2016-01-28 05:22:45 -0800830 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
831 "Channel::Channel() - ctor");
832 AudioCodingModule::Config acm_config;
833 acm_config.id = VoEModuleId(instanceId, channelId);
834 if (config.Get<NetEqCapacityConfig>().enabled) {
835 // Clamping the buffer capacity at 20 packets. While going lower will
836 // probably work, it makes little sense.
837 acm_config.neteq_config.max_packets_in_buffer =
838 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
839 }
840 acm_config.neteq_config.enable_fast_accelerate =
841 config.Get<NetEqFastAccelerate>().enabled;
henrik.lundina89ab962016-05-18 08:52:45 -0700842 acm_config.neteq_config.enable_muted_state = true;
ossu5f7cfa52016-05-30 08:11:28 -0700843 acm_config.decoder_factory = decoder_factory;
kwiberg55b97fe2016-01-28 05:22:45 -0800844 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200845
kwiberg55b97fe2016-01-28 05:22:45 -0800846 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000847
kwiberg55b97fe2016-01-28 05:22:45 -0800848 RtpRtcp::Configuration configuration;
849 configuration.audio = true;
850 configuration.outgoing_transport = this;
kwiberg55b97fe2016-01-28 05:22:45 -0800851 configuration.receive_statistics = rtp_receive_statistics_.get();
852 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800853 if (pacing_enabled_) {
854 configuration.paced_sender = rtp_packet_sender_proxy_.get();
855 configuration.transport_sequence_number_allocator =
856 seq_num_allocator_proxy_.get();
857 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
858 }
ivoc9e03c3b2016-06-30 00:59:43 -0700859 configuration.event_log = event_log;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000860
kwiberg55b97fe2016-01-28 05:22:45 -0800861 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100862 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000863
kwiberg55b97fe2016-01-28 05:22:45 -0800864 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
865 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
866 statistics_proxy_.get());
aluebs@webrtc.orgf927fd62014-04-16 11:58:18 +0000867
kwiberg55b97fe2016-01-28 05:22:45 -0800868 Config audioproc_config;
869 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
870 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
niklase@google.com470e71d2011-07-07 08:21:25 +0000871}
872
kwiberg55b97fe2016-01-28 05:22:45 -0800873Channel::~Channel() {
874 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
875 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
876 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000877
kwiberg55b97fe2016-01-28 05:22:45 -0800878 if (_outputExternalMedia) {
879 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
880 }
881 if (channel_state_.Get().input_external_media) {
882 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
883 }
884 StopSend();
885 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000886
kwiberg55b97fe2016-01-28 05:22:45 -0800887 {
888 rtc::CritScope cs(&_fileCritSect);
889 if (_inputFilePlayerPtr) {
890 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
891 _inputFilePlayerPtr->StopPlayingFile();
892 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
893 _inputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000894 }
kwiberg55b97fe2016-01-28 05:22:45 -0800895 if (_outputFilePlayerPtr) {
896 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
897 _outputFilePlayerPtr->StopPlayingFile();
898 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
899 _outputFilePlayerPtr = NULL;
900 }
901 if (_outputFileRecorderPtr) {
902 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
903 _outputFileRecorderPtr->StopRecording();
904 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
905 _outputFileRecorderPtr = NULL;
906 }
907 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000908
kwiberg55b97fe2016-01-28 05:22:45 -0800909 // The order to safely shutdown modules in a channel is:
910 // 1. De-register callbacks in modules
911 // 2. De-register modules in process thread
912 // 3. Destroy modules
913 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
914 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
915 "~Channel() failed to de-register transport callback"
916 " (Audio coding module)");
917 }
918 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
919 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
920 "~Channel() failed to de-register VAD callback"
921 " (Audio coding module)");
922 }
923 // De-register modules in process thread
924 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +0000925
kwiberg55b97fe2016-01-28 05:22:45 -0800926 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +0000927}
928
kwiberg55b97fe2016-01-28 05:22:45 -0800929int32_t Channel::Init() {
930 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
931 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +0000932
kwiberg55b97fe2016-01-28 05:22:45 -0800933 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +0000934
kwiberg55b97fe2016-01-28 05:22:45 -0800935 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +0000936
kwiberg55b97fe2016-01-28 05:22:45 -0800937 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
938 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
939 "Channel::Init() must call SetEngineInformation() first");
940 return -1;
941 }
942
943 // --- Add modules to process thread (for periodic schedulation)
944
945 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
946
947 // --- ACM initialization
948
949 if (audio_coding_->InitializeReceiver() == -1) {
950 _engineStatisticsPtr->SetLastError(
951 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
952 "Channel::Init() unable to initialize the ACM - 1");
953 return -1;
954 }
955
956 // --- RTP/RTCP module initialization
957
958 // Ensure that RTCP is enabled by default for the created channel.
959 // Note that, the module will keep generating RTCP until it is explicitly
960 // disabled by the user.
961 // After StopListen (when no sockets exists), RTCP packets will no longer
962 // be transmitted since the Transport object will then be invalid.
963 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
964 // RTCP is enabled by default.
965 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
966 // --- Register all permanent callbacks
967 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
968 (audio_coding_->RegisterVADCallback(this) == -1);
969
970 if (fail) {
971 _engineStatisticsPtr->SetLastError(
972 VE_CANNOT_INIT_CHANNEL, kTraceError,
973 "Channel::Init() callbacks not registered");
974 return -1;
975 }
976
977 // --- Register all supported codecs to the receiving side of the
978 // RTP/RTCP module
979
980 CodecInst codec;
981 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
982
983 for (int idx = 0; idx < nSupportedCodecs; idx++) {
984 // Open up the RTP/RTCP receiver for all supported codecs
985 if ((audio_coding_->Codec(idx, &codec) == -1) ||
986 (rtp_receiver_->RegisterReceivePayload(
987 codec.plname, codec.pltype, codec.plfreq, codec.channels,
988 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
989 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
990 "Channel::Init() unable to register %s "
991 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
992 codec.plname, codec.pltype, codec.plfreq, codec.channels,
993 codec.rate);
994 } else {
995 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
996 "Channel::Init() %s (%d/%d/%" PRIuS
997 "/%d) has been "
998 "added to the RTP/RTCP receiver",
999 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1000 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00001001 }
1002
kwiberg55b97fe2016-01-28 05:22:45 -08001003 // Ensure that PCMU is used as default codec on the sending side
1004 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
1005 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +00001006 }
1007
kwiberg55b97fe2016-01-28 05:22:45 -08001008 // Register default PT for outband 'telephone-event'
1009 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
kwibergc8d071e2016-04-06 12:22:38 -07001010 if (_rtpRtcpModule->RegisterSendPayload(codec) == -1 ||
1011 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001012 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1013 "Channel::Init() failed to register outband "
1014 "'telephone-event' (%d/%d) correctly",
1015 codec.pltype, codec.plfreq);
1016 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001017 }
1018
kwiberg55b97fe2016-01-28 05:22:45 -08001019 if (!STR_CASE_CMP(codec.plname, "CN")) {
kwibergc8d071e2016-04-06 12:22:38 -07001020 if (!codec_manager_.RegisterEncoder(codec) ||
1021 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get()) ||
1022 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec) ||
1023 _rtpRtcpModule->RegisterSendPayload(codec) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08001024 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1025 "Channel::Init() failed to register CN (%d/%d) "
1026 "correctly - 1",
1027 codec.pltype, codec.plfreq);
1028 }
1029 }
kwiberg55b97fe2016-01-28 05:22:45 -08001030 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001031
kwiberg55b97fe2016-01-28 05:22:45 -08001032 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1033 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1034 return -1;
1035 }
1036 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1037 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1038 return -1;
1039 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001040
kwiberg55b97fe2016-01-28 05:22:45 -08001041 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001042}
1043
kwiberg55b97fe2016-01-28 05:22:45 -08001044int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1045 OutputMixer& outputMixer,
1046 voe::TransmitMixer& transmitMixer,
1047 ProcessThread& moduleProcessThread,
1048 AudioDeviceModule& audioDeviceModule,
1049 VoiceEngineObserver* voiceEngineObserver,
1050 rtc::CriticalSection* callbackCritSect) {
1051 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1052 "Channel::SetEngineInformation()");
1053 _engineStatisticsPtr = &engineStatistics;
1054 _outputMixerPtr = &outputMixer;
1055 _transmitMixerPtr = &transmitMixer,
1056 _moduleProcessThreadPtr = &moduleProcessThread;
1057 _audioDeviceModulePtr = &audioDeviceModule;
1058 _voiceEngineObserverPtr = voiceEngineObserver;
1059 _callbackCritSectPtr = callbackCritSect;
1060 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001061}
1062
kwiberg55b97fe2016-01-28 05:22:45 -08001063int32_t Channel::UpdateLocalTimeStamp() {
1064 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1065 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001066}
1067
kwibergb7f89d62016-02-17 10:04:18 -08001068void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001069 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001070 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001071}
1072
ossu29b1a8d2016-06-13 07:34:51 -07001073const rtc::scoped_refptr<AudioDecoderFactory>&
1074Channel::GetAudioDecoderFactory() const {
1075 return decoder_factory_;
1076}
1077
kwiberg55b97fe2016-01-28 05:22:45 -08001078int32_t Channel::StartPlayout() {
1079 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1080 "Channel::StartPlayout()");
1081 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001082 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001083 }
1084
1085 if (!_externalMixing) {
1086 // Add participant as candidates for mixing.
1087 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1088 _engineStatisticsPtr->SetLastError(
1089 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1090 "StartPlayout() failed to add participant to mixer");
1091 return -1;
1092 }
1093 }
1094
1095 channel_state_.SetPlaying(true);
1096 if (RegisterFilePlayingToMixer() != 0)
1097 return -1;
1098
1099 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001100}
1101
kwiberg55b97fe2016-01-28 05:22:45 -08001102int32_t Channel::StopPlayout() {
1103 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1104 "Channel::StopPlayout()");
1105 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001106 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001107 }
1108
1109 if (!_externalMixing) {
1110 // Remove participant as candidates for mixing
1111 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1112 _engineStatisticsPtr->SetLastError(
1113 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1114 "StopPlayout() failed to remove participant from mixer");
1115 return -1;
1116 }
1117 }
1118
1119 channel_state_.SetPlaying(false);
1120 _outputAudioLevel.Clear();
1121
1122 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001123}
1124
kwiberg55b97fe2016-01-28 05:22:45 -08001125int32_t Channel::StartSend() {
1126 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1127 "Channel::StartSend()");
1128 // Resume the previous sequence number which was reset by StopSend().
1129 // This needs to be done before |sending| is set to true.
1130 if (send_sequence_number_)
1131 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001132
kwiberg55b97fe2016-01-28 05:22:45 -08001133 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001134 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001135 }
1136 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001137
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001138 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001139 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1140 _engineStatisticsPtr->SetLastError(
1141 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1142 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001143 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001144 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001145 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001146 return -1;
1147 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001148
kwiberg55b97fe2016-01-28 05:22:45 -08001149 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001150}
1151
kwiberg55b97fe2016-01-28 05:22:45 -08001152int32_t Channel::StopSend() {
1153 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1154 "Channel::StopSend()");
1155 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001156 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001157 }
1158 channel_state_.SetSending(false);
1159
1160 // Store the sequence number to be able to pick up the same sequence for
1161 // the next StartSend(). This is needed for restarting device, otherwise
1162 // it might cause libSRTP to complain about packets being replayed.
1163 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1164 // CL is landed. See issue
1165 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1166 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1167
1168 // Reset sending SSRC and sequence number and triggers direct transmission
1169 // of RTCP BYE
1170 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1171 _engineStatisticsPtr->SetLastError(
1172 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1173 "StartSend() RTP/RTCP failed to stop sending");
1174 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001175 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001176
1177 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001178}
1179
kwiberg55b97fe2016-01-28 05:22:45 -08001180int32_t Channel::StartReceiving() {
1181 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1182 "Channel::StartReceiving()");
1183 if (channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001184 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001185 }
1186 channel_state_.SetReceiving(true);
1187 _numberOfDiscardedPackets = 0;
1188 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001189}
1190
kwiberg55b97fe2016-01-28 05:22:45 -08001191int32_t Channel::StopReceiving() {
1192 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1193 "Channel::StopReceiving()");
1194 if (!channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001195 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001196 }
1197
1198 channel_state_.SetReceiving(false);
1199 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001200}
1201
kwiberg55b97fe2016-01-28 05:22:45 -08001202int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1203 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1204 "Channel::RegisterVoiceEngineObserver()");
1205 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001206
kwiberg55b97fe2016-01-28 05:22:45 -08001207 if (_voiceEngineObserverPtr) {
1208 _engineStatisticsPtr->SetLastError(
1209 VE_INVALID_OPERATION, kTraceError,
1210 "RegisterVoiceEngineObserver() observer already enabled");
1211 return -1;
1212 }
1213 _voiceEngineObserverPtr = &observer;
1214 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001215}
1216
kwiberg55b97fe2016-01-28 05:22:45 -08001217int32_t Channel::DeRegisterVoiceEngineObserver() {
1218 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1219 "Channel::DeRegisterVoiceEngineObserver()");
1220 rtc::CritScope cs(&_callbackCritSect);
1221
1222 if (!_voiceEngineObserverPtr) {
1223 _engineStatisticsPtr->SetLastError(
1224 VE_INVALID_OPERATION, kTraceWarning,
1225 "DeRegisterVoiceEngineObserver() observer already disabled");
1226 return 0;
1227 }
1228 _voiceEngineObserverPtr = NULL;
1229 return 0;
1230}
1231
1232int32_t Channel::GetSendCodec(CodecInst& codec) {
kwibergc8d071e2016-04-06 12:22:38 -07001233 auto send_codec = codec_manager_.GetCodecInst();
kwiberg1fd4a4a2015-11-03 11:20:50 -08001234 if (send_codec) {
1235 codec = *send_codec;
1236 return 0;
1237 }
1238 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001239}
1240
kwiberg55b97fe2016-01-28 05:22:45 -08001241int32_t Channel::GetRecCodec(CodecInst& codec) {
1242 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001243}
1244
kwiberg55b97fe2016-01-28 05:22:45 -08001245int32_t Channel::SetSendCodec(const CodecInst& codec) {
1246 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1247 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001248
kwibergc8d071e2016-04-06 12:22:38 -07001249 if (!codec_manager_.RegisterEncoder(codec) ||
1250 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001251 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1252 "SetSendCodec() failed to register codec to ACM");
1253 return -1;
1254 }
1255
1256 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1257 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1258 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1259 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1260 "SetSendCodec() failed to register codec to"
1261 " RTP/RTCP module");
1262 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001263 }
kwiberg55b97fe2016-01-28 05:22:45 -08001264 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001265
kwiberg55b97fe2016-01-28 05:22:45 -08001266 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1267 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1268 "SetSendCodec() failed to set audio packet size");
1269 return -1;
1270 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001271
kwiberg55b97fe2016-01-28 05:22:45 -08001272 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001273}
1274
Ivo Creusenadf89b72015-04-29 16:03:33 +02001275void Channel::SetBitRate(int bitrate_bps) {
1276 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1277 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1278 audio_coding_->SetBitRate(bitrate_bps);
1279}
1280
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001281void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue@webrtc.org74aaf292014-07-16 21:28:26 +00001282 network_predictor_->UpdatePacketLossRate(fraction_lost);
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001283 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1284
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001285 // Normalizes rate to 0 - 100.
kwiberg55b97fe2016-01-28 05:22:45 -08001286 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1287 0) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001288 assert(false); // This should not happen.
1289 }
1290}
1291
kwiberg55b97fe2016-01-28 05:22:45 -08001292int32_t Channel::SetVADStatus(bool enableVAD,
1293 ACMVADMode mode,
1294 bool disableDTX) {
1295 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1296 "Channel::SetVADStatus(mode=%d)", mode);
kwibergc8d071e2016-04-06 12:22:38 -07001297 RTC_DCHECK(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1298 if (!codec_manager_.SetVAD(enableVAD, mode) ||
1299 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001300 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1301 kTraceError,
1302 "SetVADStatus() failed to set VAD");
1303 return -1;
1304 }
1305 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001306}
1307
kwiberg55b97fe2016-01-28 05:22:45 -08001308int32_t Channel::GetVADStatus(bool& enabledVAD,
1309 ACMVADMode& mode,
1310 bool& disabledDTX) {
kwibergc8d071e2016-04-06 12:22:38 -07001311 const auto* params = codec_manager_.GetStackParams();
1312 enabledVAD = params->use_cng;
1313 mode = params->vad_mode;
1314 disabledDTX = !params->use_cng;
kwiberg55b97fe2016-01-28 05:22:45 -08001315 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001316}
1317
kwiberg55b97fe2016-01-28 05:22:45 -08001318int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1319 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1320 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001321
kwiberg55b97fe2016-01-28 05:22:45 -08001322 if (channel_state_.Get().playing) {
1323 _engineStatisticsPtr->SetLastError(
1324 VE_ALREADY_PLAYING, kTraceError,
1325 "SetRecPayloadType() unable to set PT while playing");
1326 return -1;
1327 }
1328 if (channel_state_.Get().receiving) {
1329 _engineStatisticsPtr->SetLastError(
1330 VE_ALREADY_LISTENING, kTraceError,
1331 "SetRecPayloadType() unable to set PT while listening");
1332 return -1;
1333 }
1334
1335 if (codec.pltype == -1) {
1336 // De-register the selected codec (RTP/RTCP module and ACM)
1337
1338 int8_t pltype(-1);
1339 CodecInst rxCodec = codec;
1340
1341 // Get payload type for the given codec
1342 rtp_payload_registry_->ReceivePayloadType(
1343 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1344 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1345 rxCodec.pltype = pltype;
1346
1347 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1348 _engineStatisticsPtr->SetLastError(
1349 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1350 "SetRecPayloadType() RTP/RTCP-module deregistration "
1351 "failed");
1352 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001353 }
kwiberg55b97fe2016-01-28 05:22:45 -08001354 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1355 _engineStatisticsPtr->SetLastError(
1356 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1357 "SetRecPayloadType() ACM deregistration failed - 1");
1358 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001359 }
kwiberg55b97fe2016-01-28 05:22:45 -08001360 return 0;
1361 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001362
kwiberg55b97fe2016-01-28 05:22:45 -08001363 if (rtp_receiver_->RegisterReceivePayload(
1364 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1365 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1366 // First attempt to register failed => de-register and try again
kwibergc8d071e2016-04-06 12:22:38 -07001367 // TODO(kwiberg): Retrying is probably not necessary, since
1368 // AcmReceiver::AddCodec also retries.
kwiberg55b97fe2016-01-28 05:22:45 -08001369 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001370 if (rtp_receiver_->RegisterReceivePayload(
kwiberg55b97fe2016-01-28 05:22:45 -08001371 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1372 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1373 _engineStatisticsPtr->SetLastError(
1374 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1375 "SetRecPayloadType() RTP/RTCP-module registration failed");
1376 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001377 }
kwiberg55b97fe2016-01-28 05:22:45 -08001378 }
kwibergc8d071e2016-04-06 12:22:38 -07001379 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001380 audio_coding_->UnregisterReceiveCodec(codec.pltype);
kwibergc8d071e2016-04-06 12:22:38 -07001381 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001382 _engineStatisticsPtr->SetLastError(
1383 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1384 "SetRecPayloadType() ACM registration failed - 1");
1385 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001386 }
kwiberg55b97fe2016-01-28 05:22:45 -08001387 }
1388 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001389}
1390
kwiberg55b97fe2016-01-28 05:22:45 -08001391int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1392 int8_t payloadType(-1);
1393 if (rtp_payload_registry_->ReceivePayloadType(
1394 codec.plname, codec.plfreq, codec.channels,
1395 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1396 _engineStatisticsPtr->SetLastError(
1397 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1398 "GetRecPayloadType() failed to retrieve RX payload type");
1399 return -1;
1400 }
1401 codec.pltype = payloadType;
1402 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001403}
1404
kwiberg55b97fe2016-01-28 05:22:45 -08001405int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1406 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1407 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001408
kwiberg55b97fe2016-01-28 05:22:45 -08001409 CodecInst codec;
1410 int32_t samplingFreqHz(-1);
1411 const size_t kMono = 1;
1412 if (frequency == kFreq32000Hz)
1413 samplingFreqHz = 32000;
1414 else if (frequency == kFreq16000Hz)
1415 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001416
kwiberg55b97fe2016-01-28 05:22:45 -08001417 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1418 _engineStatisticsPtr->SetLastError(
1419 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1420 "SetSendCNPayloadType() failed to retrieve default CN codec "
1421 "settings");
1422 return -1;
1423 }
1424
1425 // Modify the payload type (must be set to dynamic range)
1426 codec.pltype = type;
1427
kwibergc8d071e2016-04-06 12:22:38 -07001428 if (!codec_manager_.RegisterEncoder(codec) ||
1429 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001430 _engineStatisticsPtr->SetLastError(
1431 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1432 "SetSendCNPayloadType() failed to register CN to ACM");
1433 return -1;
1434 }
1435
1436 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1437 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1438 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1439 _engineStatisticsPtr->SetLastError(
1440 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1441 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1442 "module");
1443 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001444 }
kwiberg55b97fe2016-01-28 05:22:45 -08001445 }
1446 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001447}
1448
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001449int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001450 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001451 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001452
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001453 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001454 _engineStatisticsPtr->SetLastError(
1455 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001456 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001457 return -1;
1458 }
1459 return 0;
1460}
1461
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001462int Channel::SetOpusDtx(bool enable_dtx) {
1463 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1464 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001465 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001466 : audio_coding_->DisableOpusDtx();
1467 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001468 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1469 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001470 return -1;
1471 }
1472 return 0;
1473}
1474
mflodman3d7db262016-04-29 00:57:13 -07001475int32_t Channel::RegisterExternalTransport(Transport* transport) {
kwiberg55b97fe2016-01-28 05:22:45 -08001476 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001477 "Channel::RegisterExternalTransport()");
1478
kwiberg55b97fe2016-01-28 05:22:45 -08001479 rtc::CritScope cs(&_callbackCritSect);
kwiberg55b97fe2016-01-28 05:22:45 -08001480 if (_externalTransport) {
1481 _engineStatisticsPtr->SetLastError(
1482 VE_INVALID_OPERATION, kTraceError,
1483 "RegisterExternalTransport() external transport already enabled");
1484 return -1;
1485 }
1486 _externalTransport = true;
mflodman3d7db262016-04-29 00:57:13 -07001487 _transportPtr = transport;
kwiberg55b97fe2016-01-28 05:22:45 -08001488 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001489}
1490
kwiberg55b97fe2016-01-28 05:22:45 -08001491int32_t Channel::DeRegisterExternalTransport() {
1492 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1493 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001494
kwiberg55b97fe2016-01-28 05:22:45 -08001495 rtc::CritScope cs(&_callbackCritSect);
mflodman3d7db262016-04-29 00:57:13 -07001496 if (_transportPtr) {
1497 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1498 "DeRegisterExternalTransport() all transport is disabled");
1499 } else {
kwiberg55b97fe2016-01-28 05:22:45 -08001500 _engineStatisticsPtr->SetLastError(
1501 VE_INVALID_OPERATION, kTraceWarning,
1502 "DeRegisterExternalTransport() external transport already "
1503 "disabled");
kwiberg55b97fe2016-01-28 05:22:45 -08001504 }
1505 _externalTransport = false;
1506 _transportPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001507 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001508}
1509
mflodman3d7db262016-04-29 00:57:13 -07001510int32_t Channel::ReceivedRTPPacket(const uint8_t* received_packet,
kwiberg55b97fe2016-01-28 05:22:45 -08001511 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001512 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001513 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001514 "Channel::ReceivedRTPPacket()");
1515
1516 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001517 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001518
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001519 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001520 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1521 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1522 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001523 return -1;
1524 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001525 header.payload_type_frequency =
1526 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001527 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001528 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001529 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001530 rtp_receive_statistics_->IncomingPacket(
1531 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001532 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001533
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001534 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001535}
1536
1537bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001538 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001539 const RTPHeader& header,
1540 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001541 if (rtp_payload_registry_->IsRtx(header)) {
1542 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001543 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001544 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001545 assert(packet_length >= header.headerLength);
1546 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001547 PayloadUnion payload_specific;
1548 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001549 &payload_specific)) {
1550 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001551 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001552 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1553 payload_specific, in_order);
1554}
1555
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001556bool Channel::HandleRtxPacket(const uint8_t* packet,
1557 size_t packet_length,
1558 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001559 if (!rtp_payload_registry_->IsRtx(header))
1560 return false;
1561
1562 // Remove the RTX header and parse the original RTP header.
1563 if (packet_length < header.headerLength)
1564 return false;
1565 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1566 return false;
1567 if (restored_packet_in_use_) {
1568 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1569 "Multiple RTX headers detected, dropping packet");
1570 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001571 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001572 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001573 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1574 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001575 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1576 "Incoming RTX packet: invalid RTP header");
1577 return false;
1578 }
1579 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001580 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001581 restored_packet_in_use_ = false;
1582 return ret;
1583}
1584
1585bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1586 StreamStatistician* statistician =
1587 rtp_receive_statistics_->GetStatistician(header.ssrc);
1588 if (!statistician)
1589 return false;
1590 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001591}
1592
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001593bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1594 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001595 // Retransmissions are handled separately if RTX is enabled.
1596 if (rtp_payload_registry_->RtxEnabled())
1597 return false;
1598 StreamStatistician* statistician =
1599 rtp_receive_statistics_->GetStatistician(header.ssrc);
1600 if (!statistician)
1601 return false;
1602 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001603 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001604 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001605 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001606}
1607
mflodman3d7db262016-04-29 00:57:13 -07001608int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001609 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001610 "Channel::ReceivedRTCPPacket()");
1611 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001612 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001613
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001614 // Deliver RTCP packet to RTP/RTCP module for parsing
mflodman3d7db262016-04-29 00:57:13 -07001615 if (_rtpRtcpModule->IncomingRtcpPacket(data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001616 _engineStatisticsPtr->SetLastError(
1617 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1618 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1619 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001620
Minyue2013aec2015-05-13 14:14:42 +02001621 int64_t rtt = GetRTT(true);
1622 if (rtt == 0) {
1623 // Waiting for valid RTT.
1624 return 0;
1625 }
1626 uint32_t ntp_secs = 0;
1627 uint32_t ntp_frac = 0;
1628 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001629 if (0 !=
1630 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1631 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001632 // Waiting for RTCP.
1633 return 0;
1634 }
1635
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001636 {
tommi31fc21f2016-01-21 10:37:37 -08001637 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001638 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001639 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001640 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001641}
1642
niklase@google.com470e71d2011-07-07 08:21:25 +00001643int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001644 bool loop,
1645 FileFormats format,
1646 int startPosition,
1647 float volumeScaling,
1648 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001649 const CodecInst* codecInst) {
1650 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1651 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1652 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1653 "stopPosition=%d)",
1654 fileName, loop, format, volumeScaling, startPosition,
1655 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001656
kwiberg55b97fe2016-01-28 05:22:45 -08001657 if (channel_state_.Get().output_file_playing) {
1658 _engineStatisticsPtr->SetLastError(
1659 VE_ALREADY_PLAYING, kTraceError,
1660 "StartPlayingFileLocally() is already playing");
1661 return -1;
1662 }
1663
1664 {
1665 rtc::CritScope cs(&_fileCritSect);
1666
1667 if (_outputFilePlayerPtr) {
1668 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1669 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1670 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001671 }
1672
kwiberg55b97fe2016-01-28 05:22:45 -08001673 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1674 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001675
kwiberg55b97fe2016-01-28 05:22:45 -08001676 if (_outputFilePlayerPtr == NULL) {
1677 _engineStatisticsPtr->SetLastError(
1678 VE_INVALID_ARGUMENT, kTraceError,
1679 "StartPlayingFileLocally() filePlayer format is not correct");
1680 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001681 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001682
kwiberg55b97fe2016-01-28 05:22:45 -08001683 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001684
kwiberg55b97fe2016-01-28 05:22:45 -08001685 if (_outputFilePlayerPtr->StartPlayingFile(
1686 fileName, loop, startPosition, volumeScaling, notificationTime,
1687 stopPosition, (const CodecInst*)codecInst) != 0) {
1688 _engineStatisticsPtr->SetLastError(
1689 VE_BAD_FILE, kTraceError,
1690 "StartPlayingFile() failed to start file playout");
1691 _outputFilePlayerPtr->StopPlayingFile();
1692 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1693 _outputFilePlayerPtr = NULL;
1694 return -1;
1695 }
1696 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1697 channel_state_.SetOutputFilePlaying(true);
1698 }
1699
1700 if (RegisterFilePlayingToMixer() != 0)
1701 return -1;
1702
1703 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001704}
1705
1706int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001707 FileFormats format,
1708 int startPosition,
1709 float volumeScaling,
1710 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001711 const CodecInst* codecInst) {
1712 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1713 "Channel::StartPlayingFileLocally(format=%d,"
1714 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1715 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001716
kwiberg55b97fe2016-01-28 05:22:45 -08001717 if (stream == NULL) {
1718 _engineStatisticsPtr->SetLastError(
1719 VE_BAD_FILE, kTraceError,
1720 "StartPlayingFileLocally() NULL as input stream");
1721 return -1;
1722 }
1723
1724 if (channel_state_.Get().output_file_playing) {
1725 _engineStatisticsPtr->SetLastError(
1726 VE_ALREADY_PLAYING, kTraceError,
1727 "StartPlayingFileLocally() is already playing");
1728 return -1;
1729 }
1730
1731 {
1732 rtc::CritScope cs(&_fileCritSect);
1733
1734 // Destroy the old instance
1735 if (_outputFilePlayerPtr) {
1736 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1737 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1738 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001739 }
1740
kwiberg55b97fe2016-01-28 05:22:45 -08001741 // Create the instance
1742 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1743 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001744
kwiberg55b97fe2016-01-28 05:22:45 -08001745 if (_outputFilePlayerPtr == NULL) {
1746 _engineStatisticsPtr->SetLastError(
1747 VE_INVALID_ARGUMENT, kTraceError,
1748 "StartPlayingFileLocally() filePlayer format isnot correct");
1749 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001750 }
1751
kwiberg55b97fe2016-01-28 05:22:45 -08001752 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001753
kwiberg55b97fe2016-01-28 05:22:45 -08001754 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1755 volumeScaling, notificationTime,
1756 stopPosition, codecInst) != 0) {
1757 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1758 "StartPlayingFile() failed to "
1759 "start file playout");
1760 _outputFilePlayerPtr->StopPlayingFile();
1761 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1762 _outputFilePlayerPtr = NULL;
1763 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001764 }
kwiberg55b97fe2016-01-28 05:22:45 -08001765 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1766 channel_state_.SetOutputFilePlaying(true);
1767 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001768
kwiberg55b97fe2016-01-28 05:22:45 -08001769 if (RegisterFilePlayingToMixer() != 0)
1770 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001771
kwiberg55b97fe2016-01-28 05:22:45 -08001772 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001773}
1774
kwiberg55b97fe2016-01-28 05:22:45 -08001775int Channel::StopPlayingFileLocally() {
1776 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1777 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001778
kwiberg55b97fe2016-01-28 05:22:45 -08001779 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001780 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001781 }
1782
1783 {
1784 rtc::CritScope cs(&_fileCritSect);
1785
1786 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1787 _engineStatisticsPtr->SetLastError(
1788 VE_STOP_RECORDING_FAILED, kTraceError,
1789 "StopPlayingFile() could not stop playing");
1790 return -1;
1791 }
1792 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1793 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1794 _outputFilePlayerPtr = NULL;
1795 channel_state_.SetOutputFilePlaying(false);
1796 }
1797 // _fileCritSect cannot be taken while calling
1798 // SetAnonymousMixibilityStatus. Refer to comments in
1799 // StartPlayingFileLocally(const char* ...) for more details.
1800 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1801 _engineStatisticsPtr->SetLastError(
1802 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1803 "StopPlayingFile() failed to stop participant from playing as"
1804 "file in the mixer");
1805 return -1;
1806 }
1807
1808 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001809}
1810
kwiberg55b97fe2016-01-28 05:22:45 -08001811int Channel::IsPlayingFileLocally() const {
1812 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001813}
1814
kwiberg55b97fe2016-01-28 05:22:45 -08001815int Channel::RegisterFilePlayingToMixer() {
1816 // Return success for not registering for file playing to mixer if:
1817 // 1. playing file before playout is started on that channel.
1818 // 2. starting playout without file playing on that channel.
1819 if (!channel_state_.Get().playing ||
1820 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001821 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001822 }
1823
1824 // |_fileCritSect| cannot be taken while calling
1825 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1826 // frames can be pulled by the mixer. Since the frames are generated from
1827 // the file, _fileCritSect will be taken. This would result in a deadlock.
1828 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1829 channel_state_.SetOutputFilePlaying(false);
1830 rtc::CritScope cs(&_fileCritSect);
1831 _engineStatisticsPtr->SetLastError(
1832 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1833 "StartPlayingFile() failed to add participant as file to mixer");
1834 _outputFilePlayerPtr->StopPlayingFile();
1835 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1836 _outputFilePlayerPtr = NULL;
1837 return -1;
1838 }
1839
1840 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001841}
1842
niklase@google.com470e71d2011-07-07 08:21:25 +00001843int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001844 bool loop,
1845 FileFormats format,
1846 int startPosition,
1847 float volumeScaling,
1848 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001849 const CodecInst* codecInst) {
1850 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1851 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1852 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1853 "stopPosition=%d)",
1854 fileName, loop, format, volumeScaling, startPosition,
1855 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001856
kwiberg55b97fe2016-01-28 05:22:45 -08001857 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001858
kwiberg55b97fe2016-01-28 05:22:45 -08001859 if (channel_state_.Get().input_file_playing) {
1860 _engineStatisticsPtr->SetLastError(
1861 VE_ALREADY_PLAYING, kTraceWarning,
1862 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001863 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001864 }
1865
1866 // Destroy the old instance
1867 if (_inputFilePlayerPtr) {
1868 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1869 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1870 _inputFilePlayerPtr = NULL;
1871 }
1872
1873 // Create the instance
1874 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1875 (const FileFormats)format);
1876
1877 if (_inputFilePlayerPtr == NULL) {
1878 _engineStatisticsPtr->SetLastError(
1879 VE_INVALID_ARGUMENT, kTraceError,
1880 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1881 return -1;
1882 }
1883
1884 const uint32_t notificationTime(0);
1885
1886 if (_inputFilePlayerPtr->StartPlayingFile(
1887 fileName, loop, startPosition, volumeScaling, notificationTime,
1888 stopPosition, (const CodecInst*)codecInst) != 0) {
1889 _engineStatisticsPtr->SetLastError(
1890 VE_BAD_FILE, kTraceError,
1891 "StartPlayingFile() failed to start file playout");
1892 _inputFilePlayerPtr->StopPlayingFile();
1893 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1894 _inputFilePlayerPtr = NULL;
1895 return -1;
1896 }
1897 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1898 channel_state_.SetInputFilePlaying(true);
1899
1900 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001901}
1902
1903int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001904 FileFormats format,
1905 int startPosition,
1906 float volumeScaling,
1907 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001908 const CodecInst* codecInst) {
1909 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1910 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1911 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1912 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001913
kwiberg55b97fe2016-01-28 05:22:45 -08001914 if (stream == NULL) {
1915 _engineStatisticsPtr->SetLastError(
1916 VE_BAD_FILE, kTraceError,
1917 "StartPlayingFileAsMicrophone NULL as input stream");
1918 return -1;
1919 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001920
kwiberg55b97fe2016-01-28 05:22:45 -08001921 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001922
kwiberg55b97fe2016-01-28 05:22:45 -08001923 if (channel_state_.Get().input_file_playing) {
1924 _engineStatisticsPtr->SetLastError(
1925 VE_ALREADY_PLAYING, kTraceWarning,
1926 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001927 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001928 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001929
kwiberg55b97fe2016-01-28 05:22:45 -08001930 // Destroy the old instance
1931 if (_inputFilePlayerPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001932 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1933 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1934 _inputFilePlayerPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001935 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001936
kwiberg55b97fe2016-01-28 05:22:45 -08001937 // Create the instance
1938 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1939 (const FileFormats)format);
1940
1941 if (_inputFilePlayerPtr == NULL) {
1942 _engineStatisticsPtr->SetLastError(
1943 VE_INVALID_ARGUMENT, kTraceError,
1944 "StartPlayingInputFile() filePlayer format isnot correct");
1945 return -1;
1946 }
1947
1948 const uint32_t notificationTime(0);
1949
1950 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1951 volumeScaling, notificationTime,
1952 stopPosition, codecInst) != 0) {
1953 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1954 "StartPlayingFile() failed to start "
1955 "file playout");
1956 _inputFilePlayerPtr->StopPlayingFile();
1957 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1958 _inputFilePlayerPtr = NULL;
1959 return -1;
1960 }
1961
1962 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1963 channel_state_.SetInputFilePlaying(true);
1964
1965 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001966}
1967
kwiberg55b97fe2016-01-28 05:22:45 -08001968int Channel::StopPlayingFileAsMicrophone() {
1969 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1970 "Channel::StopPlayingFileAsMicrophone()");
1971
1972 rtc::CritScope cs(&_fileCritSect);
1973
1974 if (!channel_state_.Get().input_file_playing) {
1975 return 0;
1976 }
1977
1978 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
1979 _engineStatisticsPtr->SetLastError(
1980 VE_STOP_RECORDING_FAILED, kTraceError,
1981 "StopPlayingFile() could not stop playing");
1982 return -1;
1983 }
1984 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1985 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1986 _inputFilePlayerPtr = NULL;
1987 channel_state_.SetInputFilePlaying(false);
1988
1989 return 0;
1990}
1991
1992int Channel::IsPlayingFileAsMicrophone() const {
1993 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001994}
1995
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00001996int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08001997 const CodecInst* codecInst) {
1998 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1999 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00002000
kwiberg55b97fe2016-01-28 05:22:45 -08002001 if (_outputFileRecording) {
2002 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2003 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002004 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002005 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002006
kwiberg55b97fe2016-01-28 05:22:45 -08002007 FileFormats format;
2008 const uint32_t notificationTime(0); // Not supported in VoE
2009 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00002010
kwiberg55b97fe2016-01-28 05:22:45 -08002011 if ((codecInst != NULL) &&
2012 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2013 _engineStatisticsPtr->SetLastError(
2014 VE_BAD_ARGUMENT, kTraceError,
2015 "StartRecordingPlayout() invalid compression");
2016 return (-1);
2017 }
2018 if (codecInst == NULL) {
2019 format = kFileFormatPcm16kHzFile;
2020 codecInst = &dummyCodec;
2021 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2022 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2023 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2024 format = kFileFormatWavFile;
2025 } else {
2026 format = kFileFormatCompressedFile;
2027 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002028
kwiberg55b97fe2016-01-28 05:22:45 -08002029 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002030
kwiberg55b97fe2016-01-28 05:22:45 -08002031 // Destroy the old instance
2032 if (_outputFileRecorderPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002033 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2034 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2035 _outputFileRecorderPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002036 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002037
kwiberg55b97fe2016-01-28 05:22:45 -08002038 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2039 _outputFileRecorderId, (const FileFormats)format);
2040 if (_outputFileRecorderPtr == NULL) {
2041 _engineStatisticsPtr->SetLastError(
2042 VE_INVALID_ARGUMENT, kTraceError,
2043 "StartRecordingPlayout() fileRecorder format isnot correct");
2044 return -1;
2045 }
2046
2047 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2048 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2049 _engineStatisticsPtr->SetLastError(
2050 VE_BAD_FILE, kTraceError,
2051 "StartRecordingAudioFile() failed to start file recording");
2052 _outputFileRecorderPtr->StopRecording();
2053 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2054 _outputFileRecorderPtr = NULL;
2055 return -1;
2056 }
2057 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2058 _outputFileRecording = true;
2059
2060 return 0;
2061}
2062
2063int Channel::StartRecordingPlayout(OutStream* stream,
2064 const CodecInst* codecInst) {
2065 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2066 "Channel::StartRecordingPlayout()");
2067
2068 if (_outputFileRecording) {
2069 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2070 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002071 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002072 }
2073
2074 FileFormats format;
2075 const uint32_t notificationTime(0); // Not supported in VoE
2076 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2077
2078 if (codecInst != NULL && codecInst->channels != 1) {
2079 _engineStatisticsPtr->SetLastError(
2080 VE_BAD_ARGUMENT, kTraceError,
2081 "StartRecordingPlayout() invalid compression");
2082 return (-1);
2083 }
2084 if (codecInst == NULL) {
2085 format = kFileFormatPcm16kHzFile;
2086 codecInst = &dummyCodec;
2087 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2088 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2089 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2090 format = kFileFormatWavFile;
2091 } else {
2092 format = kFileFormatCompressedFile;
2093 }
2094
2095 rtc::CritScope cs(&_fileCritSect);
2096
2097 // Destroy the old instance
2098 if (_outputFileRecorderPtr) {
2099 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2100 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2101 _outputFileRecorderPtr = NULL;
2102 }
2103
2104 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2105 _outputFileRecorderId, (const FileFormats)format);
2106 if (_outputFileRecorderPtr == NULL) {
2107 _engineStatisticsPtr->SetLastError(
2108 VE_INVALID_ARGUMENT, kTraceError,
2109 "StartRecordingPlayout() fileRecorder format isnot correct");
2110 return -1;
2111 }
2112
2113 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2114 notificationTime) != 0) {
2115 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2116 "StartRecordingPlayout() failed to "
2117 "start file recording");
2118 _outputFileRecorderPtr->StopRecording();
2119 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2120 _outputFileRecorderPtr = NULL;
2121 return -1;
2122 }
2123
2124 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2125 _outputFileRecording = true;
2126
2127 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002128}
2129
kwiberg55b97fe2016-01-28 05:22:45 -08002130int Channel::StopRecordingPlayout() {
2131 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2132 "Channel::StopRecordingPlayout()");
2133
2134 if (!_outputFileRecording) {
2135 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2136 "StopRecordingPlayout() isnot recording");
2137 return -1;
2138 }
2139
2140 rtc::CritScope cs(&_fileCritSect);
2141
2142 if (_outputFileRecorderPtr->StopRecording() != 0) {
2143 _engineStatisticsPtr->SetLastError(
2144 VE_STOP_RECORDING_FAILED, kTraceError,
2145 "StopRecording() could not stop recording");
2146 return (-1);
2147 }
2148 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2149 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2150 _outputFileRecorderPtr = NULL;
2151 _outputFileRecording = false;
2152
2153 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002154}
2155
kwiberg55b97fe2016-01-28 05:22:45 -08002156void Channel::SetMixWithMicStatus(bool mix) {
2157 rtc::CritScope cs(&_fileCritSect);
2158 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002159}
2160
kwiberg55b97fe2016-01-28 05:22:45 -08002161int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2162 int8_t currentLevel = _outputAudioLevel.Level();
2163 level = static_cast<int32_t>(currentLevel);
2164 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002165}
2166
kwiberg55b97fe2016-01-28 05:22:45 -08002167int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2168 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2169 level = static_cast<int32_t>(currentLevel);
2170 return 0;
2171}
2172
solenberg1c2af8e2016-03-24 10:36:00 -07002173int Channel::SetInputMute(bool enable) {
kwiberg55b97fe2016-01-28 05:22:45 -08002174 rtc::CritScope cs(&volume_settings_critsect_);
2175 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002176 "Channel::SetMute(enable=%d)", enable);
solenberg1c2af8e2016-03-24 10:36:00 -07002177 input_mute_ = enable;
kwiberg55b97fe2016-01-28 05:22:45 -08002178 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002179}
2180
solenberg1c2af8e2016-03-24 10:36:00 -07002181bool Channel::InputMute() const {
kwiberg55b97fe2016-01-28 05:22:45 -08002182 rtc::CritScope cs(&volume_settings_critsect_);
solenberg1c2af8e2016-03-24 10:36:00 -07002183 return input_mute_;
niklase@google.com470e71d2011-07-07 08:21:25 +00002184}
2185
kwiberg55b97fe2016-01-28 05:22:45 -08002186int Channel::SetOutputVolumePan(float left, float right) {
2187 rtc::CritScope cs(&volume_settings_critsect_);
2188 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002189 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002190 _panLeft = left;
2191 _panRight = right;
2192 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002193}
2194
kwiberg55b97fe2016-01-28 05:22:45 -08002195int Channel::GetOutputVolumePan(float& left, float& right) const {
2196 rtc::CritScope cs(&volume_settings_critsect_);
2197 left = _panLeft;
2198 right = _panRight;
2199 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002200}
2201
kwiberg55b97fe2016-01-28 05:22:45 -08002202int Channel::SetChannelOutputVolumeScaling(float scaling) {
2203 rtc::CritScope cs(&volume_settings_critsect_);
2204 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002205 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002206 _outputGain = scaling;
2207 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002208}
2209
kwiberg55b97fe2016-01-28 05:22:45 -08002210int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2211 rtc::CritScope cs(&volume_settings_critsect_);
2212 scaling = _outputGain;
2213 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002214}
2215
solenberg8842c3e2016-03-11 03:06:41 -08002216int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002217 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002218 "Channel::SendTelephoneEventOutband(...)");
2219 RTC_DCHECK_LE(0, event);
2220 RTC_DCHECK_GE(255, event);
2221 RTC_DCHECK_LE(0, duration_ms);
2222 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002223 if (!Sending()) {
2224 return -1;
2225 }
solenberg8842c3e2016-03-11 03:06:41 -08002226 if (_rtpRtcpModule->SendTelephoneEventOutband(
2227 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002228 _engineStatisticsPtr->SetLastError(
2229 VE_SEND_DTMF_FAILED, kTraceWarning,
2230 "SendTelephoneEventOutband() failed to send event");
2231 return -1;
2232 }
2233 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002234}
2235
solenberg31642aa2016-03-14 08:00:37 -07002236int Channel::SetSendTelephoneEventPayloadType(int payload_type) {
kwiberg55b97fe2016-01-28 05:22:45 -08002237 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002238 "Channel::SetSendTelephoneEventPayloadType()");
solenberg31642aa2016-03-14 08:00:37 -07002239 RTC_DCHECK_LE(0, payload_type);
2240 RTC_DCHECK_GE(127, payload_type);
2241 CodecInst codec = {0};
kwiberg55b97fe2016-01-28 05:22:45 -08002242 codec.plfreq = 8000;
solenberg31642aa2016-03-14 08:00:37 -07002243 codec.pltype = payload_type;
kwiberg55b97fe2016-01-28 05:22:45 -08002244 memcpy(codec.plname, "telephone-event", 16);
2245 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2246 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2247 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2248 _engineStatisticsPtr->SetLastError(
2249 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2250 "SetSendTelephoneEventPayloadType() failed to register send"
2251 "payload type");
2252 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002253 }
kwiberg55b97fe2016-01-28 05:22:45 -08002254 }
kwiberg55b97fe2016-01-28 05:22:45 -08002255 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002256}
2257
kwiberg55b97fe2016-01-28 05:22:45 -08002258int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2259 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2260 "Channel::UpdateRxVadDetection()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002261
kwiberg55b97fe2016-01-28 05:22:45 -08002262 int vadDecision = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002263
kwiberg55b97fe2016-01-28 05:22:45 -08002264 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002265
kwiberg55b97fe2016-01-28 05:22:45 -08002266 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2267 OnRxVadDetected(vadDecision);
2268 _oldVadDecision = vadDecision;
2269 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002270
kwiberg55b97fe2016-01-28 05:22:45 -08002271 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2272 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2273 vadDecision);
2274 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002275}
2276
kwiberg55b97fe2016-01-28 05:22:45 -08002277int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2278 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2279 "Channel::RegisterRxVadObserver()");
2280 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002281
kwiberg55b97fe2016-01-28 05:22:45 -08002282 if (_rxVadObserverPtr) {
2283 _engineStatisticsPtr->SetLastError(
2284 VE_INVALID_OPERATION, kTraceError,
2285 "RegisterRxVadObserver() observer already enabled");
2286 return -1;
2287 }
2288 _rxVadObserverPtr = &observer;
2289 _RxVadDetection = true;
2290 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002291}
2292
kwiberg55b97fe2016-01-28 05:22:45 -08002293int Channel::DeRegisterRxVadObserver() {
2294 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2295 "Channel::DeRegisterRxVadObserver()");
2296 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002297
kwiberg55b97fe2016-01-28 05:22:45 -08002298 if (!_rxVadObserverPtr) {
2299 _engineStatisticsPtr->SetLastError(
2300 VE_INVALID_OPERATION, kTraceWarning,
2301 "DeRegisterRxVadObserver() observer already disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002302 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002303 }
2304 _rxVadObserverPtr = NULL;
2305 _RxVadDetection = false;
2306 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002307}
2308
kwiberg55b97fe2016-01-28 05:22:45 -08002309int Channel::VoiceActivityIndicator(int& activity) {
2310 activity = _sendFrameType;
2311 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002312}
2313
2314#ifdef WEBRTC_VOICE_ENGINE_AGC
2315
kwiberg55b97fe2016-01-28 05:22:45 -08002316int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2317 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2318 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2319 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002320
kwiberg55b97fe2016-01-28 05:22:45 -08002321 GainControl::Mode agcMode = kDefaultRxAgcMode;
2322 switch (mode) {
2323 case kAgcDefault:
2324 break;
2325 case kAgcUnchanged:
2326 agcMode = rx_audioproc_->gain_control()->mode();
2327 break;
2328 case kAgcFixedDigital:
2329 agcMode = GainControl::kFixedDigital;
2330 break;
2331 case kAgcAdaptiveDigital:
2332 agcMode = GainControl::kAdaptiveDigital;
2333 break;
2334 default:
2335 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2336 "SetRxAgcStatus() invalid Agc mode");
2337 return -1;
2338 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002339
kwiberg55b97fe2016-01-28 05:22:45 -08002340 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2341 _engineStatisticsPtr->SetLastError(
2342 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2343 return -1;
2344 }
2345 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2346 _engineStatisticsPtr->SetLastError(
2347 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2348 return -1;
2349 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002350
kwiberg55b97fe2016-01-28 05:22:45 -08002351 _rxAgcIsEnabled = enable;
2352 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002353
kwiberg55b97fe2016-01-28 05:22:45 -08002354 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002355}
2356
kwiberg55b97fe2016-01-28 05:22:45 -08002357int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2358 bool enable = rx_audioproc_->gain_control()->is_enabled();
2359 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
niklase@google.com470e71d2011-07-07 08:21:25 +00002360
kwiberg55b97fe2016-01-28 05:22:45 -08002361 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002362
kwiberg55b97fe2016-01-28 05:22:45 -08002363 switch (agcMode) {
2364 case GainControl::kFixedDigital:
2365 mode = kAgcFixedDigital;
2366 break;
2367 case GainControl::kAdaptiveDigital:
2368 mode = kAgcAdaptiveDigital;
2369 break;
2370 default:
2371 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2372 "GetRxAgcStatus() invalid Agc mode");
2373 return -1;
2374 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002375
kwiberg55b97fe2016-01-28 05:22:45 -08002376 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002377}
2378
kwiberg55b97fe2016-01-28 05:22:45 -08002379int Channel::SetRxAgcConfig(AgcConfig config) {
2380 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2381 "Channel::SetRxAgcConfig()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002382
kwiberg55b97fe2016-01-28 05:22:45 -08002383 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2384 config.targetLeveldBOv) != 0) {
2385 _engineStatisticsPtr->SetLastError(
2386 VE_APM_ERROR, kTraceError,
2387 "SetRxAgcConfig() failed to set target peak |level|"
2388 "(or envelope) of the Agc");
2389 return -1;
2390 }
2391 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2392 config.digitalCompressionGaindB) != 0) {
2393 _engineStatisticsPtr->SetLastError(
2394 VE_APM_ERROR, kTraceError,
2395 "SetRxAgcConfig() failed to set the range in |gain| the"
2396 " digital compression stage may apply");
2397 return -1;
2398 }
2399 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2400 0) {
2401 _engineStatisticsPtr->SetLastError(
2402 VE_APM_ERROR, kTraceError,
2403 "SetRxAgcConfig() failed to set hard limiter to the signal");
2404 return -1;
2405 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002406
kwiberg55b97fe2016-01-28 05:22:45 -08002407 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002408}
2409
kwiberg55b97fe2016-01-28 05:22:45 -08002410int Channel::GetRxAgcConfig(AgcConfig& config) {
2411 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2412 config.digitalCompressionGaindB =
2413 rx_audioproc_->gain_control()->compression_gain_db();
2414 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
niklase@google.com470e71d2011-07-07 08:21:25 +00002415
kwiberg55b97fe2016-01-28 05:22:45 -08002416 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002417}
2418
kwiberg55b97fe2016-01-28 05:22:45 -08002419#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
niklase@google.com470e71d2011-07-07 08:21:25 +00002420
2421#ifdef WEBRTC_VOICE_ENGINE_NR
2422
kwiberg55b97fe2016-01-28 05:22:45 -08002423int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2424 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2425 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2426 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002427
kwiberg55b97fe2016-01-28 05:22:45 -08002428 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2429 switch (mode) {
2430 case kNsDefault:
2431 break;
2432 case kNsUnchanged:
2433 nsLevel = rx_audioproc_->noise_suppression()->level();
2434 break;
2435 case kNsConference:
2436 nsLevel = NoiseSuppression::kHigh;
2437 break;
2438 case kNsLowSuppression:
2439 nsLevel = NoiseSuppression::kLow;
2440 break;
2441 case kNsModerateSuppression:
2442 nsLevel = NoiseSuppression::kModerate;
2443 break;
2444 case kNsHighSuppression:
2445 nsLevel = NoiseSuppression::kHigh;
2446 break;
2447 case kNsVeryHighSuppression:
2448 nsLevel = NoiseSuppression::kVeryHigh;
2449 break;
2450 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002451
kwiberg55b97fe2016-01-28 05:22:45 -08002452 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2453 _engineStatisticsPtr->SetLastError(
2454 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2455 return -1;
2456 }
2457 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2458 _engineStatisticsPtr->SetLastError(
2459 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2460 return -1;
2461 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002462
kwiberg55b97fe2016-01-28 05:22:45 -08002463 _rxNsIsEnabled = enable;
2464 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002465
kwiberg55b97fe2016-01-28 05:22:45 -08002466 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002467}
2468
kwiberg55b97fe2016-01-28 05:22:45 -08002469int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2470 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2471 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
niklase@google.com470e71d2011-07-07 08:21:25 +00002472
kwiberg55b97fe2016-01-28 05:22:45 -08002473 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002474
kwiberg55b97fe2016-01-28 05:22:45 -08002475 switch (ncLevel) {
2476 case NoiseSuppression::kLow:
2477 mode = kNsLowSuppression;
2478 break;
2479 case NoiseSuppression::kModerate:
2480 mode = kNsModerateSuppression;
2481 break;
2482 case NoiseSuppression::kHigh:
2483 mode = kNsHighSuppression;
2484 break;
2485 case NoiseSuppression::kVeryHigh:
2486 mode = kNsVeryHighSuppression;
2487 break;
2488 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002489
kwiberg55b97fe2016-01-28 05:22:45 -08002490 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002491}
2492
kwiberg55b97fe2016-01-28 05:22:45 -08002493#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
niklase@google.com470e71d2011-07-07 08:21:25 +00002494
kwiberg55b97fe2016-01-28 05:22:45 -08002495int Channel::SetLocalSSRC(unsigned int ssrc) {
2496 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2497 "Channel::SetLocalSSRC()");
2498 if (channel_state_.Get().sending) {
2499 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2500 "SetLocalSSRC() already sending");
2501 return -1;
2502 }
2503 _rtpRtcpModule->SetSSRC(ssrc);
2504 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002505}
2506
kwiberg55b97fe2016-01-28 05:22:45 -08002507int Channel::GetLocalSSRC(unsigned int& ssrc) {
2508 ssrc = _rtpRtcpModule->SSRC();
2509 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002510}
2511
kwiberg55b97fe2016-01-28 05:22:45 -08002512int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2513 ssrc = rtp_receiver_->SSRC();
2514 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002515}
2516
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002517int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002518 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002519 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002520}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002521
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002522int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2523 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002524 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2525 if (enable &&
2526 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2527 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002528 return -1;
2529 }
2530 return 0;
2531}
2532
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002533int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2534 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2535}
2536
2537int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2538 rtp_header_parser_->DeregisterRtpHeaderExtension(
2539 kRtpExtensionAbsoluteSendTime);
kwiberg55b97fe2016-01-28 05:22:45 -08002540 if (enable &&
2541 !rtp_header_parser_->RegisterRtpHeaderExtension(
2542 kRtpExtensionAbsoluteSendTime, id)) {
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00002543 return -1;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002544 }
2545 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002546}
2547
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002548void Channel::EnableSendTransportSequenceNumber(int id) {
2549 int ret =
2550 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2551 RTC_DCHECK_EQ(0, ret);
2552}
2553
stefan3313ec92016-01-21 06:32:43 -08002554void Channel::EnableReceiveTransportSequenceNumber(int id) {
2555 rtp_header_parser_->DeregisterRtpHeaderExtension(
2556 kRtpExtensionTransportSequenceNumber);
2557 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2558 kRtpExtensionTransportSequenceNumber, id);
2559 RTC_DCHECK(ret);
2560}
2561
stefanbba9dec2016-02-01 04:39:55 -08002562void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002563 RtpPacketSender* rtp_packet_sender,
2564 TransportFeedbackObserver* transport_feedback_observer,
2565 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002566 RTC_DCHECK(rtp_packet_sender);
2567 RTC_DCHECK(transport_feedback_observer);
2568 RTC_DCHECK(packet_router && !packet_router_);
2569 feedback_observer_proxy_->SetTransportFeedbackObserver(
2570 transport_feedback_observer);
2571 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2572 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2573 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002574 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002575 packet_router_ = packet_router;
2576}
2577
stefanbba9dec2016-02-01 04:39:55 -08002578void Channel::RegisterReceiverCongestionControlObjects(
2579 PacketRouter* packet_router) {
2580 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002581 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002582 packet_router_ = packet_router;
2583}
2584
2585void Channel::ResetCongestionControlObjects() {
2586 RTC_DCHECK(packet_router_);
2587 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2588 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2589 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002590 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002591 packet_router_ = nullptr;
2592 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2593}
2594
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002595void Channel::SetRTCPStatus(bool enable) {
2596 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2597 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002598 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002599}
2600
kwiberg55b97fe2016-01-28 05:22:45 -08002601int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002602 RtcpMode method = _rtpRtcpModule->RTCP();
2603 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002604 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002605}
2606
kwiberg55b97fe2016-01-28 05:22:45 -08002607int Channel::SetRTCP_CNAME(const char cName[256]) {
2608 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2609 "Channel::SetRTCP_CNAME()");
2610 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2611 _engineStatisticsPtr->SetLastError(
2612 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2613 "SetRTCP_CNAME() failed to set RTCP CNAME");
2614 return -1;
2615 }
2616 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002617}
2618
kwiberg55b97fe2016-01-28 05:22:45 -08002619int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2620 if (cName == NULL) {
2621 _engineStatisticsPtr->SetLastError(
2622 VE_INVALID_ARGUMENT, kTraceError,
2623 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2624 return -1;
2625 }
2626 char cname[RTCP_CNAME_SIZE];
2627 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2628 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2629 _engineStatisticsPtr->SetLastError(
2630 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2631 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2632 return -1;
2633 }
2634 strcpy(cName, cname);
2635 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002636}
2637
kwiberg55b97fe2016-01-28 05:22:45 -08002638int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2639 unsigned int& NTPLow,
2640 unsigned int& timestamp,
2641 unsigned int& playoutTimestamp,
2642 unsigned int* jitter,
2643 unsigned short* fractionLost) {
2644 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002645
kwiberg55b97fe2016-01-28 05:22:45 -08002646 RTCPSenderInfo senderInfo;
2647 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2648 _engineStatisticsPtr->SetLastError(
2649 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2650 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2651 "side");
2652 return -1;
2653 }
2654
2655 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2656 // and octet count)
2657 NTPHigh = senderInfo.NTPseconds;
2658 NTPLow = senderInfo.NTPfraction;
2659 timestamp = senderInfo.RTPtimeStamp;
2660
2661 // --- Locally derived information
2662
2663 // This value is updated on each incoming RTCP packet (0 when no packet
2664 // has been received)
2665 playoutTimestamp = playout_timestamp_rtcp_;
2666
2667 if (NULL != jitter || NULL != fractionLost) {
2668 // Get all RTCP receiver report blocks that have been received on this
2669 // channel. If we receive RTP packets from a remote source we know the
2670 // remote SSRC and use the report block from him.
2671 // Otherwise use the first report block.
2672 std::vector<RTCPReportBlock> remote_stats;
2673 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2674 remote_stats.empty()) {
2675 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2676 "GetRemoteRTCPData() failed to measure statistics due"
2677 " to lack of received RTP and/or RTCP packets");
2678 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002679 }
2680
kwiberg55b97fe2016-01-28 05:22:45 -08002681 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2682 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2683 for (; it != remote_stats.end(); ++it) {
2684 if (it->remoteSSRC == remoteSSRC)
2685 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002686 }
kwiberg55b97fe2016-01-28 05:22:45 -08002687
2688 if (it == remote_stats.end()) {
2689 // If we have not received any RTCP packets from this SSRC it probably
2690 // means that we have not received any RTP packets.
2691 // Use the first received report block instead.
2692 it = remote_stats.begin();
2693 remoteSSRC = it->remoteSSRC;
2694 }
2695
2696 if (jitter) {
2697 *jitter = it->jitter;
2698 }
2699
2700 if (fractionLost) {
2701 *fractionLost = it->fractionLost;
2702 }
2703 }
2704 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002705}
2706
kwiberg55b97fe2016-01-28 05:22:45 -08002707int Channel::SendApplicationDefinedRTCPPacket(
2708 unsigned char subType,
2709 unsigned int name,
2710 const char* data,
2711 unsigned short dataLengthInBytes) {
2712 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2713 "Channel::SendApplicationDefinedRTCPPacket()");
2714 if (!channel_state_.Get().sending) {
2715 _engineStatisticsPtr->SetLastError(
2716 VE_NOT_SENDING, kTraceError,
2717 "SendApplicationDefinedRTCPPacket() not sending");
2718 return -1;
2719 }
2720 if (NULL == data) {
2721 _engineStatisticsPtr->SetLastError(
2722 VE_INVALID_ARGUMENT, kTraceError,
2723 "SendApplicationDefinedRTCPPacket() invalid data value");
2724 return -1;
2725 }
2726 if (dataLengthInBytes % 4 != 0) {
2727 _engineStatisticsPtr->SetLastError(
2728 VE_INVALID_ARGUMENT, kTraceError,
2729 "SendApplicationDefinedRTCPPacket() invalid length value");
2730 return -1;
2731 }
2732 RtcpMode status = _rtpRtcpModule->RTCP();
2733 if (status == RtcpMode::kOff) {
2734 _engineStatisticsPtr->SetLastError(
2735 VE_RTCP_ERROR, kTraceError,
2736 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2737 return -1;
2738 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002739
kwiberg55b97fe2016-01-28 05:22:45 -08002740 // Create and schedule the RTCP APP packet for transmission
2741 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2742 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2743 _engineStatisticsPtr->SetLastError(
2744 VE_SEND_ERROR, kTraceError,
2745 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2746 return -1;
2747 }
2748 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002749}
2750
kwiberg55b97fe2016-01-28 05:22:45 -08002751int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2752 unsigned int& maxJitterMs,
2753 unsigned int& discardedPackets) {
2754 // The jitter statistics is updated for each received RTP packet and is
2755 // based on received packets.
2756 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2757 // If RTCP is off, there is no timed thread in the RTCP module regularly
2758 // generating new stats, trigger the update manually here instead.
2759 StreamStatistician* statistician =
2760 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2761 if (statistician) {
2762 // Don't use returned statistics, use data from proxy instead so that
2763 // max jitter can be fetched atomically.
2764 RtcpStatistics s;
2765 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002766 }
kwiberg55b97fe2016-01-28 05:22:45 -08002767 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002768
kwiberg55b97fe2016-01-28 05:22:45 -08002769 ChannelStatistics stats = statistics_proxy_->GetStats();
2770 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2771 if (playoutFrequency > 0) {
2772 // Scale RTP statistics given the current playout frequency
2773 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2774 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2775 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002776
kwiberg55b97fe2016-01-28 05:22:45 -08002777 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002778
kwiberg55b97fe2016-01-28 05:22:45 -08002779 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002780}
2781
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002782int Channel::GetRemoteRTCPReportBlocks(
2783 std::vector<ReportBlock>* report_blocks) {
2784 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002785 _engineStatisticsPtr->SetLastError(
2786 VE_INVALID_ARGUMENT, kTraceError,
2787 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002788 return -1;
2789 }
2790
2791 // Get the report blocks from the latest received RTCP Sender or Receiver
2792 // Report. Each element in the vector contains the sender's SSRC and a
2793 // report block according to RFC 3550.
2794 std::vector<RTCPReportBlock> rtcp_report_blocks;
2795 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002796 return -1;
2797 }
2798
2799 if (rtcp_report_blocks.empty())
2800 return 0;
2801
2802 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2803 for (; it != rtcp_report_blocks.end(); ++it) {
2804 ReportBlock report_block;
2805 report_block.sender_SSRC = it->remoteSSRC;
2806 report_block.source_SSRC = it->sourceSSRC;
2807 report_block.fraction_lost = it->fractionLost;
2808 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2809 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2810 report_block.interarrival_jitter = it->jitter;
2811 report_block.last_SR_timestamp = it->lastSR;
2812 report_block.delay_since_last_SR = it->delaySinceLastSR;
2813 report_blocks->push_back(report_block);
2814 }
2815 return 0;
2816}
2817
kwiberg55b97fe2016-01-28 05:22:45 -08002818int Channel::GetRTPStatistics(CallStatistics& stats) {
2819 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002820
kwiberg55b97fe2016-01-28 05:22:45 -08002821 // The jitter statistics is updated for each received RTP packet and is
2822 // based on received packets.
2823 RtcpStatistics statistics;
2824 StreamStatistician* statistician =
2825 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002826 if (statistician) {
2827 statistician->GetStatistics(&statistics,
2828 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002829 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002830
kwiberg55b97fe2016-01-28 05:22:45 -08002831 stats.fractionLost = statistics.fraction_lost;
2832 stats.cumulativeLost = statistics.cumulative_lost;
2833 stats.extendedMax = statistics.extended_max_sequence_number;
2834 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002835
kwiberg55b97fe2016-01-28 05:22:45 -08002836 // --- RTT
2837 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002838
kwiberg55b97fe2016-01-28 05:22:45 -08002839 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002840
kwiberg55b97fe2016-01-28 05:22:45 -08002841 size_t bytesSent(0);
2842 uint32_t packetsSent(0);
2843 size_t bytesReceived(0);
2844 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002845
kwiberg55b97fe2016-01-28 05:22:45 -08002846 if (statistician) {
2847 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2848 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002849
kwiberg55b97fe2016-01-28 05:22:45 -08002850 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2851 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2852 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2853 " output will not be complete");
2854 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002855
kwiberg55b97fe2016-01-28 05:22:45 -08002856 stats.bytesSent = bytesSent;
2857 stats.packetsSent = packetsSent;
2858 stats.bytesReceived = bytesReceived;
2859 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002860
kwiberg55b97fe2016-01-28 05:22:45 -08002861 // --- Timestamps
2862 {
2863 rtc::CritScope lock(&ts_stats_lock_);
2864 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2865 }
2866 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002867}
2868
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002869int Channel::SetCodecFECStatus(bool enable) {
2870 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2871 "Channel::SetCodecFECStatus()");
2872
kwibergc8d071e2016-04-06 12:22:38 -07002873 if (!codec_manager_.SetCodecFEC(enable) ||
2874 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002875 _engineStatisticsPtr->SetLastError(
2876 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2877 "SetCodecFECStatus() failed to set FEC state");
2878 return -1;
2879 }
2880 return 0;
2881}
2882
2883bool Channel::GetCodecFECStatus() {
kwibergc8d071e2016-04-06 12:22:38 -07002884 return codec_manager_.GetStackParams()->use_codec_fec;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002885}
2886
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002887void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2888 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002889 // If pacing is enabled we always store packets.
2890 if (!pacing_enabled_)
2891 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002892 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002893 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002894 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002895 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002896 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002897}
2898
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002899// Called when we are missing one or more packets.
2900int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002901 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2902}
2903
kwiberg55b97fe2016-01-28 05:22:45 -08002904uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2905 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2906 "Channel::Demultiplex()");
2907 _audioFrame.CopyFrom(audioFrame);
2908 _audioFrame.id_ = _channelId;
2909 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002910}
2911
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002912void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00002913 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07002914 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08002915 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002916 CodecInst codec;
2917 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002918
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07002919 // Never upsample or upmix the capture signal here. This should be done at the
2920 // end of the send chain.
2921 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
2922 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
2923 RemixAndResample(audio_data, number_of_frames, number_of_channels,
2924 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002925}
2926
kwiberg55b97fe2016-01-28 05:22:45 -08002927uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
2928 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2929 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002930
kwiberg55b97fe2016-01-28 05:22:45 -08002931 if (_audioFrame.samples_per_channel_ == 0) {
2932 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2933 "Channel::PrepareEncodeAndSend() invalid audio frame");
2934 return 0xFFFFFFFF;
2935 }
2936
2937 if (channel_state_.Get().input_file_playing) {
2938 MixOrReplaceAudioWithFile(mixingFrequency);
2939 }
2940
solenberg1c2af8e2016-03-24 10:36:00 -07002941 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
2942 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
kwiberg55b97fe2016-01-28 05:22:45 -08002943
2944 if (channel_state_.Get().input_external_media) {
2945 rtc::CritScope cs(&_callbackCritSect);
2946 const bool isStereo = (_audioFrame.num_channels_ == 2);
2947 if (_inputExternalMediaCallbackPtr) {
2948 _inputExternalMediaCallbackPtr->Process(
2949 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
2950 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
2951 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00002952 }
kwiberg55b97fe2016-01-28 05:22:45 -08002953 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002954
kwiberg55b97fe2016-01-28 05:22:45 -08002955 if (_includeAudioLevelIndication) {
2956 size_t length =
2957 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
Tommi60c4e0a2016-05-26 21:35:27 +02002958 RTC_CHECK_LE(length, sizeof(_audioFrame.data_));
solenberg1c2af8e2016-03-24 10:36:00 -07002959 if (is_muted && previous_frame_muted_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002960 rms_level_.ProcessMuted(length);
2961 } else {
2962 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00002963 }
kwiberg55b97fe2016-01-28 05:22:45 -08002964 }
solenberg1c2af8e2016-03-24 10:36:00 -07002965 previous_frame_muted_ = is_muted;
niklase@google.com470e71d2011-07-07 08:21:25 +00002966
kwiberg55b97fe2016-01-28 05:22:45 -08002967 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002968}
2969
kwiberg55b97fe2016-01-28 05:22:45 -08002970uint32_t Channel::EncodeAndSend() {
2971 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2972 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002973
kwiberg55b97fe2016-01-28 05:22:45 -08002974 assert(_audioFrame.num_channels_ <= 2);
2975 if (_audioFrame.samples_per_channel_ == 0) {
2976 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2977 "Channel::EncodeAndSend() invalid audio frame");
2978 return 0xFFFFFFFF;
2979 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002980
kwiberg55b97fe2016-01-28 05:22:45 -08002981 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00002982
kwiberg55b97fe2016-01-28 05:22:45 -08002983 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00002984
kwiberg55b97fe2016-01-28 05:22:45 -08002985 // The ACM resamples internally.
2986 _audioFrame.timestamp_ = _timeStamp;
2987 // This call will trigger AudioPacketizationCallback::SendData if encoding
2988 // is done and payload is ready for packetization and transmission.
2989 // Otherwise, it will return without invoking the callback.
2990 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
2991 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
2992 "Channel::EncodeAndSend() ACM encoding failed");
2993 return 0xFFFFFFFF;
2994 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002995
kwiberg55b97fe2016-01-28 05:22:45 -08002996 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
2997 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002998}
2999
Minyue2013aec2015-05-13 14:14:42 +02003000void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08003001 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003002 Channel* channel = associate_send_channel_.channel();
3003 if (channel && channel->ChannelId() == channel_id) {
3004 // If this channel is associated with a send channel of the specified
3005 // Channel ID, disassociate with it.
3006 ChannelOwner ref(NULL);
3007 associate_send_channel_ = ref;
3008 }
3009}
3010
kwiberg55b97fe2016-01-28 05:22:45 -08003011int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3012 VoEMediaProcess& processObject) {
3013 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3014 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003015
kwiberg55b97fe2016-01-28 05:22:45 -08003016 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003017
kwiberg55b97fe2016-01-28 05:22:45 -08003018 if (kPlaybackPerChannel == type) {
3019 if (_outputExternalMediaCallbackPtr) {
3020 _engineStatisticsPtr->SetLastError(
3021 VE_INVALID_OPERATION, kTraceError,
3022 "Channel::RegisterExternalMediaProcessing() "
3023 "output external media already enabled");
3024 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003025 }
kwiberg55b97fe2016-01-28 05:22:45 -08003026 _outputExternalMediaCallbackPtr = &processObject;
3027 _outputExternalMedia = true;
3028 } else if (kRecordingPerChannel == type) {
3029 if (_inputExternalMediaCallbackPtr) {
3030 _engineStatisticsPtr->SetLastError(
3031 VE_INVALID_OPERATION, kTraceError,
3032 "Channel::RegisterExternalMediaProcessing() "
3033 "output external media already enabled");
3034 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003035 }
kwiberg55b97fe2016-01-28 05:22:45 -08003036 _inputExternalMediaCallbackPtr = &processObject;
3037 channel_state_.SetInputExternalMedia(true);
3038 }
3039 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003040}
3041
kwiberg55b97fe2016-01-28 05:22:45 -08003042int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3043 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3044 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003045
kwiberg55b97fe2016-01-28 05:22:45 -08003046 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003047
kwiberg55b97fe2016-01-28 05:22:45 -08003048 if (kPlaybackPerChannel == type) {
3049 if (!_outputExternalMediaCallbackPtr) {
3050 _engineStatisticsPtr->SetLastError(
3051 VE_INVALID_OPERATION, kTraceWarning,
3052 "Channel::DeRegisterExternalMediaProcessing() "
3053 "output external media already disabled");
3054 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003055 }
kwiberg55b97fe2016-01-28 05:22:45 -08003056 _outputExternalMedia = false;
3057 _outputExternalMediaCallbackPtr = NULL;
3058 } else if (kRecordingPerChannel == type) {
3059 if (!_inputExternalMediaCallbackPtr) {
3060 _engineStatisticsPtr->SetLastError(
3061 VE_INVALID_OPERATION, kTraceWarning,
3062 "Channel::DeRegisterExternalMediaProcessing() "
3063 "input external media already disabled");
3064 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003065 }
kwiberg55b97fe2016-01-28 05:22:45 -08003066 channel_state_.SetInputExternalMedia(false);
3067 _inputExternalMediaCallbackPtr = NULL;
3068 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003069
kwiberg55b97fe2016-01-28 05:22:45 -08003070 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003071}
3072
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003073int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08003074 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3075 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003076
kwiberg55b97fe2016-01-28 05:22:45 -08003077 if (channel_state_.Get().playing) {
3078 _engineStatisticsPtr->SetLastError(
3079 VE_INVALID_OPERATION, kTraceError,
3080 "Channel::SetExternalMixing() "
3081 "external mixing cannot be changed while playing.");
3082 return -1;
3083 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003084
kwiberg55b97fe2016-01-28 05:22:45 -08003085 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003086
kwiberg55b97fe2016-01-28 05:22:45 -08003087 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003088}
3089
kwiberg55b97fe2016-01-28 05:22:45 -08003090int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3091 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00003092}
3093
wu@webrtc.org24301a62013-12-13 19:17:43 +00003094void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3095 audio_coding_->GetDecodingCallStatistics(stats);
3096}
3097
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003098bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3099 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08003100 rtc::CritScope lock(&video_sync_lock_);
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003101 if (_average_jitter_buffer_delay_us == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003102 return false;
3103 }
kwiberg55b97fe2016-01-28 05:22:45 -08003104 *jitter_buffer_delay_ms =
3105 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003106 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003107 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00003108}
3109
solenberg358057b2015-11-27 10:46:42 -08003110uint32_t Channel::GetDelayEstimate() const {
3111 int jitter_buffer_delay_ms = 0;
3112 int playout_buffer_delay_ms = 0;
3113 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3114 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3115}
3116
deadbeef74375882015-08-13 12:09:10 -07003117int Channel::LeastRequiredDelayMs() const {
3118 return audio_coding_->LeastRequiredDelayMs();
3119}
3120
kwiberg55b97fe2016-01-28 05:22:45 -08003121int Channel::SetMinimumPlayoutDelay(int delayMs) {
3122 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3123 "Channel::SetMinimumPlayoutDelay()");
3124 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3125 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3126 _engineStatisticsPtr->SetLastError(
3127 VE_INVALID_ARGUMENT, kTraceError,
3128 "SetMinimumPlayoutDelay() invalid min delay");
3129 return -1;
3130 }
3131 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3132 _engineStatisticsPtr->SetLastError(
3133 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3134 "SetMinimumPlayoutDelay() failed to set min playout delay");
3135 return -1;
3136 }
3137 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003138}
3139
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003140int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07003141 uint32_t playout_timestamp_rtp = 0;
3142 {
tommi31fc21f2016-01-21 10:37:37 -08003143 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003144 playout_timestamp_rtp = playout_timestamp_rtp_;
3145 }
kwiberg55b97fe2016-01-28 05:22:45 -08003146 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003147 _engineStatisticsPtr->SetLastError(
3148 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3149 "GetPlayoutTimestamp() failed to retrieve timestamp");
3150 return -1;
3151 }
deadbeef74375882015-08-13 12:09:10 -07003152 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003153 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003154}
3155
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003156int Channel::SetInitTimestamp(unsigned int timestamp) {
3157 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00003158 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003159 if (channel_state_.Get().sending) {
3160 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3161 "SetInitTimestamp() already sending");
3162 return -1;
3163 }
3164 _rtpRtcpModule->SetStartTimestamp(timestamp);
3165 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003166}
3167
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003168int Channel::SetInitSequenceNumber(short sequenceNumber) {
3169 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3170 "Channel::SetInitSequenceNumber()");
3171 if (channel_state_.Get().sending) {
3172 _engineStatisticsPtr->SetLastError(
3173 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3174 return -1;
3175 }
3176 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3177 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003178}
3179
kwiberg55b97fe2016-01-28 05:22:45 -08003180int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3181 RtpReceiver** rtp_receiver) const {
3182 *rtpRtcpModule = _rtpRtcpModule.get();
3183 *rtp_receiver = rtp_receiver_.get();
3184 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003185}
3186
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003187// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3188// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003189int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003190 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003191 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003192
kwiberg55b97fe2016-01-28 05:22:45 -08003193 {
3194 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003195
kwiberg55b97fe2016-01-28 05:22:45 -08003196 if (_inputFilePlayerPtr == NULL) {
3197 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3198 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3199 " doesnt exist");
3200 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003201 }
3202
kwiberg55b97fe2016-01-28 05:22:45 -08003203 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3204 mixingFrequency) == -1) {
3205 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3206 "Channel::MixOrReplaceAudioWithFile() file mixing "
3207 "failed");
3208 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003209 }
kwiberg55b97fe2016-01-28 05:22:45 -08003210 if (fileSamples == 0) {
3211 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3212 "Channel::MixOrReplaceAudioWithFile() file is ended");
3213 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003214 }
kwiberg55b97fe2016-01-28 05:22:45 -08003215 }
3216
3217 assert(_audioFrame.samples_per_channel_ == fileSamples);
3218
3219 if (_mixFileWithMicrophone) {
3220 // Currently file stream is always mono.
3221 // TODO(xians): Change the code when FilePlayer supports real stereo.
3222 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3223 1, fileSamples);
3224 } else {
3225 // Replace ACM audio with file.
3226 // Currently file stream is always mono.
3227 // TODO(xians): Change the code when FilePlayer supports real stereo.
3228 _audioFrame.UpdateFrame(
3229 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3230 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3231 }
3232 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003233}
3234
kwiberg55b97fe2016-01-28 05:22:45 -08003235int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3236 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003237
kwibergb7f89d62016-02-17 10:04:18 -08003238 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003239 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003240
kwiberg55b97fe2016-01-28 05:22:45 -08003241 {
3242 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003243
kwiberg55b97fe2016-01-28 05:22:45 -08003244 if (_outputFilePlayerPtr == NULL) {
3245 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3246 "Channel::MixAudioWithFile() file mixing failed");
3247 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003248 }
3249
kwiberg55b97fe2016-01-28 05:22:45 -08003250 // We should get the frequency we ask for.
3251 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3252 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3253 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3254 "Channel::MixAudioWithFile() file mixing failed");
3255 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003256 }
kwiberg55b97fe2016-01-28 05:22:45 -08003257 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003258
kwiberg55b97fe2016-01-28 05:22:45 -08003259 if (audioFrame.samples_per_channel_ == fileSamples) {
3260 // Currently file stream is always mono.
3261 // TODO(xians): Change the code when FilePlayer supports real stereo.
3262 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3263 fileSamples);
3264 } else {
3265 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3266 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3267 ") != "
3268 "fileSamples(%" PRIuS ")",
3269 audioFrame.samples_per_channel_, fileSamples);
3270 return -1;
3271 }
3272
3273 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003274}
3275
deadbeef74375882015-08-13 12:09:10 -07003276void Channel::UpdatePlayoutTimestamp(bool rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003277 jitter_buffer_playout_timestamp_ = audio_coding_->PlayoutTimestamp();
deadbeef74375882015-08-13 12:09:10 -07003278
henrik.lundin96bd5022016-04-06 04:13:56 -07003279 if (!jitter_buffer_playout_timestamp_) {
3280 // This can happen if this channel has not received any RTP packets. In
3281 // this case, NetEq is not capable of computing a playout timestamp.
deadbeef74375882015-08-13 12:09:10 -07003282 return;
3283 }
3284
3285 uint16_t delay_ms = 0;
3286 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003287 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003288 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3289 " delay from the ADM");
3290 _engineStatisticsPtr->SetLastError(
3291 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3292 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3293 return;
3294 }
3295
henrik.lundin96bd5022016-04-06 04:13:56 -07003296 RTC_DCHECK(jitter_buffer_playout_timestamp_);
3297 uint32_t playout_timestamp = *jitter_buffer_playout_timestamp_;
deadbeef74375882015-08-13 12:09:10 -07003298
3299 // Remove the playout delay.
henrik.lundin96bd5022016-04-06 04:13:56 -07003300 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
deadbeef74375882015-08-13 12:09:10 -07003301
kwiberg55b97fe2016-01-28 05:22:45 -08003302 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003303 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
henrik.lundin96bd5022016-04-06 04:13:56 -07003304 playout_timestamp);
deadbeef74375882015-08-13 12:09:10 -07003305
3306 {
tommi31fc21f2016-01-21 10:37:37 -08003307 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003308 if (rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003309 playout_timestamp_rtcp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003310 } else {
henrik.lundin96bd5022016-04-06 04:13:56 -07003311 playout_timestamp_rtp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003312 }
3313 playout_delay_ms_ = delay_ms;
3314 }
3315}
3316
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003317// Called for incoming RTP packets after successful RTP header parsing.
3318void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3319 uint16_t sequence_number) {
kwiberg55b97fe2016-01-28 05:22:45 -08003320 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003321 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3322 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00003323
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003324 // Get frequency of last received payload
wu@webrtc.org94454b72014-06-05 20:34:08 +00003325 int rtp_receive_frequency = GetPlayoutFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00003326
turaj@webrtc.org167b6df2013-12-13 21:05:07 +00003327 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
henrik.lundin96bd5022016-04-06 04:13:56 -07003328 // every incoming packet. May be empty if no valid playout timestamp is
3329 // available.
3330 // If |rtp_timestamp| is newer than |jitter_buffer_playout_timestamp_|, the
3331 // resulting difference is positive and will be used. When the inverse is
3332 // true (can happen when a network glitch causes a packet to arrive late,
3333 // and during long comfort noise periods with clock drift), or when
3334 // |jitter_buffer_playout_timestamp_| has no value, the difference is not
3335 // changed from the initial 0.
3336 uint32_t timestamp_diff_ms = 0;
3337 if (jitter_buffer_playout_timestamp_ &&
3338 IsNewerTimestamp(rtp_timestamp, *jitter_buffer_playout_timestamp_)) {
3339 timestamp_diff_ms = (rtp_timestamp - *jitter_buffer_playout_timestamp_) /
3340 (rtp_receive_frequency / 1000);
3341 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3342 // Diff is too large; set it to zero instead.
3343 timestamp_diff_ms = 0;
3344 }
henrik.lundin@webrtc.orgd6692992014-03-20 12:04:09 +00003345 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003346
kwiberg55b97fe2016-01-28 05:22:45 -08003347 uint16_t packet_delay_ms =
3348 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003349
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003350 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00003351
kwiberg55b97fe2016-01-28 05:22:45 -08003352 if (timestamp_diff_ms == 0)
3353 return;
niklase@google.com470e71d2011-07-07 08:21:25 +00003354
deadbeef74375882015-08-13 12:09:10 -07003355 {
tommi31fc21f2016-01-21 10:37:37 -08003356 rtc::CritScope lock(&video_sync_lock_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003357
deadbeef74375882015-08-13 12:09:10 -07003358 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3359 _recPacketDelayMs = packet_delay_ms;
3360 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003361
deadbeef74375882015-08-13 12:09:10 -07003362 if (_average_jitter_buffer_delay_us == 0) {
3363 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3364 return;
3365 }
3366
3367 // Filter average delay value using exponential filter (alpha is
3368 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3369 // risk of rounding error) and compensate for it in GetDelayEstimate()
3370 // later.
kwiberg55b97fe2016-01-28 05:22:45 -08003371 _average_jitter_buffer_delay_us =
3372 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3373 8;
deadbeef74375882015-08-13 12:09:10 -07003374 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003375}
3376
kwiberg55b97fe2016-01-28 05:22:45 -08003377void Channel::RegisterReceiveCodecsToRTPModule() {
3378 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3379 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003380
kwiberg55b97fe2016-01-28 05:22:45 -08003381 CodecInst codec;
3382 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003383
kwiberg55b97fe2016-01-28 05:22:45 -08003384 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3385 // Open up the RTP/RTCP receiver for all supported codecs
3386 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3387 (rtp_receiver_->RegisterReceivePayload(
3388 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3389 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3390 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3391 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3392 " to register %s (%d/%d/%" PRIuS
3393 "/%d) to RTP/RTCP "
3394 "receiver",
3395 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3396 codec.rate);
3397 } else {
3398 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3399 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3400 "(%d/%d/%" PRIuS
3401 "/%d) has been added to the RTP/RTCP "
3402 "receiver",
3403 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3404 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003405 }
kwiberg55b97fe2016-01-28 05:22:45 -08003406 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003407}
3408
kwiberg55b97fe2016-01-28 05:22:45 -08003409int Channel::SetSendRtpHeaderExtension(bool enable,
3410 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003411 unsigned char id) {
3412 int error = 0;
3413 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3414 if (enable) {
3415 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3416 }
3417 return error;
3418}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003419
wu@webrtc.org94454b72014-06-05 20:34:08 +00003420int32_t Channel::GetPlayoutFrequency() {
3421 int32_t playout_frequency = audio_coding_->PlayoutFrequency();
3422 CodecInst current_recive_codec;
3423 if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
3424 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
3425 // Even though the actual sampling rate for G.722 audio is
3426 // 16,000 Hz, the RTP clock rate for the G722 payload format is
3427 // 8,000 Hz because that value was erroneously assigned in
3428 // RFC 1890 and must remain unchanged for backward compatibility.
3429 playout_frequency = 8000;
3430 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
3431 // We are resampling Opus internally to 32,000 Hz until all our
3432 // DSP routines can operate at 48,000 Hz, but the RTP clock
3433 // rate for the Opus payload format is standardized to 48,000 Hz,
3434 // because that is the maximum supported decoding sampling rate.
3435 playout_frequency = 48000;
3436 }
3437 }
3438 return playout_frequency;
3439}
3440
Minyue2013aec2015-05-13 14:14:42 +02003441int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003442 RtcpMode method = _rtpRtcpModule->RTCP();
3443 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003444 return 0;
3445 }
3446 std::vector<RTCPReportBlock> report_blocks;
3447 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003448
3449 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003450 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003451 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003452 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003453 Channel* channel = associate_send_channel_.channel();
3454 // Tries to get RTT from an associated channel. This is important for
3455 // receive-only channels.
3456 if (channel) {
3457 // To prevent infinite recursion and deadlock, calling GetRTT of
3458 // associate channel should always use "false" for argument:
3459 // |allow_associate_channel|.
3460 rtt = channel->GetRTT(false);
3461 }
3462 }
3463 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003464 }
3465
3466 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3467 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3468 for (; it != report_blocks.end(); ++it) {
3469 if (it->remoteSSRC == remoteSSRC)
3470 break;
3471 }
3472 if (it == report_blocks.end()) {
3473 // We have not received packets with SSRC matching the report blocks.
3474 // To calculate RTT we try with the SSRC of the first report block.
3475 // This is very important for send-only channels where we don't know
3476 // the SSRC of the other end.
3477 remoteSSRC = report_blocks[0].remoteSSRC;
3478 }
Minyue2013aec2015-05-13 14:14:42 +02003479
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003480 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003481 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003482 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003483 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3484 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003485 return 0;
3486 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003487 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003488}
3489
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003490} // namespace voe
3491} // namespace webrtc