blob: b514dd71d0814a677ce96d489dcb819f660b8dcf [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010020#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000021#include "webrtc/base/timeutils.h"
minyue@webrtc.orge509f942013-09-12 17:03:00 +000022#include "webrtc/common.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020023#include "webrtc/config.h"
ossue3525782016-05-25 07:37:43 -070024#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000025#include "webrtc/modules/audio_device/include/audio_device.h"
26#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010027#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010028#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010029#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
30#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
31#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000032#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010033#include "webrtc/modules/utility/include/audio_frame_operations.h"
34#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010035#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000036#include "webrtc/voice_engine/include/voe_base.h"
37#include "webrtc/voice_engine/include/voe_external_media.h"
38#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
39#include "webrtc/voice_engine/output_mixer.h"
40#include "webrtc/voice_engine/statistics.h"
41#include "webrtc/voice_engine/transmit_mixer.h"
42#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000043
andrew@webrtc.org50419b02012-11-14 19:07:54 +000044namespace webrtc {
45namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000046
kwibergc8d071e2016-04-06 12:22:38 -070047namespace {
48
49bool RegisterReceiveCodec(std::unique_ptr<AudioCodingModule>* acm,
50 acm2::RentACodec* rac,
51 const CodecInst& ci) {
52 const int result =
53 (*acm)->RegisterReceiveCodec(ci, [&] { return rac->RentIsacDecoder(); });
54 return result == 0;
55}
56
57} // namespace
58
solenberg8842c3e2016-03-11 03:06:41 -080059const int kTelephoneEventAttenuationdB = 10;
60
Stefan Holmerb86d4e42015-12-07 10:26:18 +010061class TransportFeedbackProxy : public TransportFeedbackObserver {
62 public:
63 TransportFeedbackProxy() : feedback_observer_(nullptr) {
64 pacer_thread_.DetachFromThread();
65 network_thread_.DetachFromThread();
66 }
67
68 void SetTransportFeedbackObserver(
69 TransportFeedbackObserver* feedback_observer) {
70 RTC_DCHECK(thread_checker_.CalledOnValidThread());
71 rtc::CritScope lock(&crit_);
72 feedback_observer_ = feedback_observer;
73 }
74
75 // Implements TransportFeedbackObserver.
76 void AddPacket(uint16_t sequence_number,
77 size_t length,
philipela1ed0b32016-06-01 06:31:17 -070078 bool was_paced,
79 int probe_cluster_id) override {
Stefan Holmerb86d4e42015-12-07 10:26:18 +010080 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
81 rtc::CritScope lock(&crit_);
82 if (feedback_observer_)
philipela1ed0b32016-06-01 06:31:17 -070083 feedback_observer_->AddPacket(sequence_number, length, was_paced,
84 probe_cluster_id);
Stefan Holmerb86d4e42015-12-07 10:26:18 +010085 }
86 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
87 RTC_DCHECK(network_thread_.CalledOnValidThread());
88 rtc::CritScope lock(&crit_);
89 if (feedback_observer_)
90 feedback_observer_->OnTransportFeedback(feedback);
91 }
92
93 private:
94 rtc::CriticalSection crit_;
95 rtc::ThreadChecker thread_checker_;
96 rtc::ThreadChecker pacer_thread_;
97 rtc::ThreadChecker network_thread_;
98 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
99};
100
101class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
102 public:
103 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
104 pacer_thread_.DetachFromThread();
105 }
106
107 void SetSequenceNumberAllocator(
108 TransportSequenceNumberAllocator* seq_num_allocator) {
109 RTC_DCHECK(thread_checker_.CalledOnValidThread());
110 rtc::CritScope lock(&crit_);
111 seq_num_allocator_ = seq_num_allocator;
112 }
113
114 // Implements TransportSequenceNumberAllocator.
115 uint16_t AllocateSequenceNumber() override {
116 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
117 rtc::CritScope lock(&crit_);
118 if (!seq_num_allocator_)
119 return 0;
120 return seq_num_allocator_->AllocateSequenceNumber();
121 }
122
123 private:
124 rtc::CriticalSection crit_;
125 rtc::ThreadChecker thread_checker_;
126 rtc::ThreadChecker pacer_thread_;
127 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
128};
129
130class RtpPacketSenderProxy : public RtpPacketSender {
131 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800132 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100133
134 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
135 RTC_DCHECK(thread_checker_.CalledOnValidThread());
136 rtc::CritScope lock(&crit_);
137 rtp_packet_sender_ = rtp_packet_sender;
138 }
139
140 // Implements RtpPacketSender.
141 void InsertPacket(Priority priority,
142 uint32_t ssrc,
143 uint16_t sequence_number,
144 int64_t capture_time_ms,
145 size_t bytes,
146 bool retransmission) override {
147 rtc::CritScope lock(&crit_);
148 if (rtp_packet_sender_) {
149 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
150 capture_time_ms, bytes, retransmission);
151 }
152 }
153
154 private:
155 rtc::ThreadChecker thread_checker_;
156 rtc::CriticalSection crit_;
157 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
158};
159
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000160// Extend the default RTCP statistics struct with max_jitter, defined as the
161// maximum jitter value seen in an RTCP report block.
162struct ChannelStatistics : public RtcpStatistics {
163 ChannelStatistics() : rtcp(), max_jitter(0) {}
164
165 RtcpStatistics rtcp;
166 uint32_t max_jitter;
167};
168
169// Statistics callback, called at each generation of a new RTCP report block.
170class StatisticsProxy : public RtcpStatisticsCallback {
171 public:
tommi31fc21f2016-01-21 10:37:37 -0800172 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000173 virtual ~StatisticsProxy() {}
174
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000175 void StatisticsUpdated(const RtcpStatistics& statistics,
176 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000177 if (ssrc != ssrc_)
178 return;
179
tommi31fc21f2016-01-21 10:37:37 -0800180 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000181 stats_.rtcp = statistics;
182 if (statistics.jitter > stats_.max_jitter) {
183 stats_.max_jitter = statistics.jitter;
184 }
185 }
186
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000187 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000188
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000189 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800190 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000191 return stats_;
192 }
193
194 private:
195 // StatisticsUpdated calls are triggered from threads in the RTP module,
196 // while GetStats calls can be triggered from the public voice engine API,
197 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800198 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000199 const uint32_t ssrc_;
200 ChannelStatistics stats_;
201};
202
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000203class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000204 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000205 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
206 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000207
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000208 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
209 // Not used for Voice Engine.
210 }
211
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000212 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
213 int64_t rtt,
214 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000215 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
216 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
217 // report for VoiceEngine?
218 if (report_blocks.empty())
219 return;
220
221 int fraction_lost_aggregate = 0;
222 int total_number_of_packets = 0;
223
224 // If receiving multiple report blocks, calculate the weighted average based
225 // on the number of packets a report refers to.
226 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
227 block_it != report_blocks.end(); ++block_it) {
228 // Find the previous extended high sequence number for this remote SSRC,
229 // to calculate the number of RTP packets this report refers to. Ignore if
230 // we haven't seen this SSRC before.
231 std::map<uint32_t, uint32_t>::iterator seq_num_it =
232 extended_max_sequence_number_.find(block_it->sourceSSRC);
233 int number_of_packets = 0;
234 if (seq_num_it != extended_max_sequence_number_.end()) {
235 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
236 }
237 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
238 total_number_of_packets += number_of_packets;
239
240 extended_max_sequence_number_[block_it->sourceSSRC] =
241 block_it->extendedHighSeqNum;
242 }
243 int weighted_fraction_lost = 0;
244 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800245 weighted_fraction_lost =
246 (fraction_lost_aggregate + total_number_of_packets / 2) /
247 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000248 }
249 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000250 }
251
252 private:
253 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000254 // Maps remote side ssrc to extended highest sequence number received.
255 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000256};
257
kwiberg55b97fe2016-01-28 05:22:45 -0800258int32_t Channel::SendData(FrameType frameType,
259 uint8_t payloadType,
260 uint32_t timeStamp,
261 const uint8_t* payloadData,
262 size_t payloadSize,
263 const RTPFragmentationHeader* fragmentation) {
264 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
265 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
266 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
267 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000268
kwiberg55b97fe2016-01-28 05:22:45 -0800269 if (_includeAudioLevelIndication) {
270 // Store current audio level in the RTP/RTCP module.
271 // The level will be used in combination with voice-activity state
272 // (frameType) to add an RTP header extension
273 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
274 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000275
kwiberg55b97fe2016-01-28 05:22:45 -0800276 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
277 // packetization.
278 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
279 if (_rtpRtcpModule->SendOutgoingData(
280 (FrameType&)frameType, payloadType, timeStamp,
281 // Leaving the time when this frame was
282 // received from the capture device as
283 // undefined for voice for now.
284 -1, payloadData, payloadSize, fragmentation) == -1) {
285 _engineStatisticsPtr->SetLastError(
286 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
287 "Channel::SendData() failed to send data to RTP/RTCP module");
288 return -1;
289 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000290
kwiberg55b97fe2016-01-28 05:22:45 -0800291 _lastLocalTimeStamp = timeStamp;
292 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000293
kwiberg55b97fe2016-01-28 05:22:45 -0800294 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000295}
296
kwiberg55b97fe2016-01-28 05:22:45 -0800297int32_t Channel::InFrameType(FrameType frame_type) {
298 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
299 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000300
kwiberg55b97fe2016-01-28 05:22:45 -0800301 rtc::CritScope cs(&_callbackCritSect);
302 _sendFrameType = (frame_type == kAudioFrameSpeech);
303 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000304}
305
kwiberg55b97fe2016-01-28 05:22:45 -0800306int32_t Channel::OnRxVadDetected(int vadDecision) {
307 rtc::CritScope cs(&_callbackCritSect);
308 if (_rxVadObserverPtr) {
309 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
310 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000311
kwiberg55b97fe2016-01-28 05:22:45 -0800312 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000313}
314
stefan1d8a5062015-10-02 03:39:33 -0700315bool Channel::SendRtp(const uint8_t* data,
316 size_t len,
317 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800318 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
319 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000320
kwiberg55b97fe2016-01-28 05:22:45 -0800321 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000322
kwiberg55b97fe2016-01-28 05:22:45 -0800323 if (_transportPtr == NULL) {
324 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
325 "Channel::SendPacket() failed to send RTP packet due to"
326 " invalid transport object");
327 return false;
328 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000329
kwiberg55b97fe2016-01-28 05:22:45 -0800330 uint8_t* bufferToSendPtr = (uint8_t*)data;
331 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000332
kwiberg55b97fe2016-01-28 05:22:45 -0800333 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
334 std::string transport_name =
335 _externalTransport ? "external transport" : "WebRtc sockets";
336 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
337 "Channel::SendPacket() RTP transmission using %s failed",
338 transport_name.c_str());
339 return false;
340 }
341 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000342}
343
kwiberg55b97fe2016-01-28 05:22:45 -0800344bool Channel::SendRtcp(const uint8_t* data, size_t len) {
345 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
346 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000347
kwiberg55b97fe2016-01-28 05:22:45 -0800348 rtc::CritScope cs(&_callbackCritSect);
349 if (_transportPtr == NULL) {
350 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
351 "Channel::SendRtcp() failed to send RTCP packet"
352 " due to invalid transport object");
353 return false;
354 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000355
kwiberg55b97fe2016-01-28 05:22:45 -0800356 uint8_t* bufferToSendPtr = (uint8_t*)data;
357 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000358
kwiberg55b97fe2016-01-28 05:22:45 -0800359 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
360 if (n < 0) {
361 std::string transport_name =
362 _externalTransport ? "external transport" : "WebRtc sockets";
363 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
364 "Channel::SendRtcp() transmission using %s failed",
365 transport_name.c_str());
366 return false;
367 }
368 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000369}
370
kwiberg55b97fe2016-01-28 05:22:45 -0800371void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
372 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
373 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000374
kwiberg55b97fe2016-01-28 05:22:45 -0800375 // Update ssrc so that NTP for AV sync can be updated.
376 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000377}
378
Peter Boströmac547a62015-09-17 23:03:57 +0200379void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
380 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
381 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
382 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000383}
384
Peter Boströmac547a62015-09-17 23:03:57 +0200385int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000386 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000387 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000388 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800389 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200390 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800391 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
392 "Channel::OnInitializeDecoder(payloadType=%d, "
393 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
394 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000395
kwiberg55b97fe2016-01-28 05:22:45 -0800396 CodecInst receiveCodec = {0};
397 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000398
kwiberg55b97fe2016-01-28 05:22:45 -0800399 receiveCodec.pltype = payloadType;
400 receiveCodec.plfreq = frequency;
401 receiveCodec.channels = channels;
402 receiveCodec.rate = rate;
403 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000404
kwiberg55b97fe2016-01-28 05:22:45 -0800405 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
406 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000407
kwiberg55b97fe2016-01-28 05:22:45 -0800408 // Register the new codec to the ACM
kwibergc8d071e2016-04-06 12:22:38 -0700409 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, receiveCodec)) {
kwiberg55b97fe2016-01-28 05:22:45 -0800410 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
411 "Channel::OnInitializeDecoder() invalid codec ("
412 "pt=%d, name=%s) received - 1",
413 payloadType, payloadName);
414 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
415 return -1;
416 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000417
kwiberg55b97fe2016-01-28 05:22:45 -0800418 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000419}
420
kwiberg55b97fe2016-01-28 05:22:45 -0800421int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
422 size_t payloadSize,
423 const WebRtcRTPHeader* rtpHeader) {
424 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
425 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
426 ","
427 " payloadType=%u, audioChannel=%" PRIuS ")",
428 payloadSize, rtpHeader->header.payloadType,
429 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000430
kwiberg55b97fe2016-01-28 05:22:45 -0800431 if (!channel_state_.Get().playing) {
432 // Avoid inserting into NetEQ when we are not playing. Count the
433 // packet as discarded.
434 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
435 "received packet is discarded since playing is not"
436 " activated");
437 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000438 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800439 }
440
441 // Push the incoming payload (parsed and ready for decoding) into the ACM
442 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
443 0) {
444 _engineStatisticsPtr->SetLastError(
445 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
446 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
447 return -1;
448 }
449
450 // Update the packet delay.
451 UpdatePacketDelay(rtpHeader->header.timestamp,
452 rtpHeader->header.sequenceNumber);
453
454 int64_t round_trip_time = 0;
455 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
456 NULL);
457
458 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
459 if (!nack_list.empty()) {
460 // Can't use nack_list.data() since it's not supported by all
461 // compilers.
462 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
463 }
464 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000465}
466
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000467bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000468 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000469 RTPHeader header;
470 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
471 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
472 "IncomingPacket invalid RTP header");
473 return false;
474 }
475 header.payload_type_frequency =
476 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
477 if (header.payload_type_frequency < 0)
478 return false;
479 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
480}
481
henrik.lundin42dda502016-05-18 05:36:01 -0700482MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
483 int32_t id,
484 AudioFrame* audioFrame) {
kwiberg55b97fe2016-01-28 05:22:45 -0800485 if (event_log_) {
486 unsigned int ssrc;
487 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
488 event_log_->LogAudioPlayout(ssrc);
489 }
490 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
henrik.lundind4ccb002016-05-17 12:21:55 -0700491 bool muted;
492 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame,
493 &muted) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -0800494 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
495 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
496 // In all likelihood, the audio in this frame is garbage. We return an
497 // error so that the audio mixer module doesn't add it to the mix. As
498 // a result, it won't be played out and the actions skipped here are
499 // irrelevant.
henrik.lundin42dda502016-05-18 05:36:01 -0700500 return MixerParticipant::AudioFrameInfo::kError;
kwiberg55b97fe2016-01-28 05:22:45 -0800501 }
henrik.lundina89ab962016-05-18 08:52:45 -0700502
503 if (muted) {
504 // TODO(henrik.lundin): We should be able to do better than this. But we
505 // will have to go through all the cases below where the audio samples may
506 // be used, and handle the muted case in some way.
507 audioFrame->Mute();
508 }
kwiberg55b97fe2016-01-28 05:22:45 -0800509
510 if (_RxVadDetection) {
511 UpdateRxVadDetection(*audioFrame);
512 }
513
514 // Convert module ID to internal VoE channel ID
515 audioFrame->id_ = VoEChannelId(audioFrame->id_);
516 // Store speech type for dead-or-alive detection
517 _outputSpeechType = audioFrame->speech_type_;
518
519 ChannelState::State state = channel_state_.Get();
520
521 if (state.rx_apm_is_enabled) {
522 int err = rx_audioproc_->ProcessStream(audioFrame);
523 if (err) {
524 LOG(LS_ERROR) << "ProcessStream() error: " << err;
525 assert(false);
Ivo Creusenae856f22015-09-17 16:30:16 +0200526 }
kwiberg55b97fe2016-01-28 05:22:45 -0800527 }
528
529 {
530 // Pass the audio buffers to an optional sink callback, before applying
531 // scaling/panning, as that applies to the mix operation.
532 // External recipients of the audio (e.g. via AudioTrack), will do their
533 // own mixing/dynamic processing.
534 rtc::CritScope cs(&_callbackCritSect);
535 if (audio_sink_) {
536 AudioSinkInterface::Data data(
537 &audioFrame->data_[0], audioFrame->samples_per_channel_,
538 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
539 audioFrame->timestamp_);
540 audio_sink_->OnData(data);
541 }
542 }
543
544 float output_gain = 1.0f;
545 float left_pan = 1.0f;
546 float right_pan = 1.0f;
547 {
548 rtc::CritScope cs(&volume_settings_critsect_);
549 output_gain = _outputGain;
550 left_pan = _panLeft;
551 right_pan = _panRight;
552 }
553
554 // Output volume scaling
555 if (output_gain < 0.99f || output_gain > 1.01f) {
556 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
557 }
558
559 // Scale left and/or right channel(s) if stereo and master balance is
560 // active
561
562 if (left_pan != 1.0f || right_pan != 1.0f) {
563 if (audioFrame->num_channels_ == 1) {
564 // Emulate stereo mode since panning is active.
565 // The mono signal is copied to both left and right channels here.
566 AudioFrameOperations::MonoToStereo(audioFrame);
567 }
568 // For true stereo mode (when we are receiving a stereo signal), no
569 // action is needed.
570
571 // Do the panning operation (the audio frame contains stereo at this
572 // stage)
573 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
574 }
575
576 // Mix decoded PCM output with file if file mixing is enabled
577 if (state.output_file_playing) {
578 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
henrik.lundina89ab962016-05-18 08:52:45 -0700579 muted = false; // We may have added non-zero samples.
kwiberg55b97fe2016-01-28 05:22:45 -0800580 }
581
582 // External media
583 if (_outputExternalMedia) {
584 rtc::CritScope cs(&_callbackCritSect);
585 const bool isStereo = (audioFrame->num_channels_ == 2);
586 if (_outputExternalMediaCallbackPtr) {
587 _outputExternalMediaCallbackPtr->Process(
588 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
589 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
590 isStereo);
591 }
592 }
593
594 // Record playout if enabled
595 {
596 rtc::CritScope cs(&_fileCritSect);
597
598 if (_outputFileRecording && _outputFileRecorderPtr) {
599 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
600 }
601 }
602
603 // Measure audio level (0-9)
henrik.lundina89ab962016-05-18 08:52:45 -0700604 // TODO(henrik.lundin) Use the |muted| information here too.
kwiberg55b97fe2016-01-28 05:22:45 -0800605 _outputAudioLevel.ComputeLevel(*audioFrame);
606
607 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
608 // The first frame with a valid rtp timestamp.
609 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
610 }
611
612 if (capture_start_rtp_time_stamp_ >= 0) {
613 // audioFrame.timestamp_ should be valid from now on.
614
615 // Compute elapsed time.
616 int64_t unwrap_timestamp =
617 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
618 audioFrame->elapsed_time_ms_ =
619 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
620 (GetPlayoutFrequency() / 1000);
621
niklase@google.com470e71d2011-07-07 08:21:25 +0000622 {
kwiberg55b97fe2016-01-28 05:22:45 -0800623 rtc::CritScope lock(&ts_stats_lock_);
624 // Compute ntp time.
625 audioFrame->ntp_time_ms_ =
626 ntp_estimator_.Estimate(audioFrame->timestamp_);
627 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
628 if (audioFrame->ntp_time_ms_ > 0) {
629 // Compute |capture_start_ntp_time_ms_| so that
630 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
631 capture_start_ntp_time_ms_ =
632 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000633 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000634 }
kwiberg55b97fe2016-01-28 05:22:45 -0800635 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000636
henrik.lundin42dda502016-05-18 05:36:01 -0700637 return muted ? MixerParticipant::AudioFrameInfo::kMuted
638 : MixerParticipant::AudioFrameInfo::kNormal;
niklase@google.com470e71d2011-07-07 08:21:25 +0000639}
640
kwiberg55b97fe2016-01-28 05:22:45 -0800641int32_t Channel::NeededFrequency(int32_t id) const {
642 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
643 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000644
kwiberg55b97fe2016-01-28 05:22:45 -0800645 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000646
kwiberg55b97fe2016-01-28 05:22:45 -0800647 // Determine highest needed receive frequency
648 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000649
kwiberg55b97fe2016-01-28 05:22:45 -0800650 // Return the bigger of playout and receive frequency in the ACM.
651 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
652 highestNeeded = audio_coding_->PlayoutFrequency();
653 } else {
654 highestNeeded = receiveFrequency;
655 }
656
657 // Special case, if we're playing a file on the playout side
658 // we take that frequency into consideration as well
659 // This is not needed on sending side, since the codec will
660 // limit the spectrum anyway.
661 if (channel_state_.Get().output_file_playing) {
662 rtc::CritScope cs(&_fileCritSect);
663 if (_outputFilePlayerPtr) {
664 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
665 highestNeeded = _outputFilePlayerPtr->Frequency();
666 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000667 }
kwiberg55b97fe2016-01-28 05:22:45 -0800668 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000669
kwiberg55b97fe2016-01-28 05:22:45 -0800670 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000671}
672
ivocb04965c2015-09-09 00:09:43 -0700673int32_t Channel::CreateChannel(Channel*& channel,
674 int32_t channelId,
675 uint32_t instanceId,
676 RtcEventLog* const event_log,
677 const Config& config) {
ossu5f7cfa52016-05-30 08:11:28 -0700678 return CreateChannel(channel, channelId, instanceId, event_log, config,
679 CreateBuiltinAudioDecoderFactory());
680}
681
682int32_t Channel::CreateChannel(
683 Channel*& channel,
684 int32_t channelId,
685 uint32_t instanceId,
686 RtcEventLog* const event_log,
687 const Config& config,
688 const rtc::scoped_refptr<AudioDecoderFactory>& decoder_factory) {
kwiberg55b97fe2016-01-28 05:22:45 -0800689 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
690 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
691 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000692
ossu5f7cfa52016-05-30 08:11:28 -0700693 channel =
694 new Channel(channelId, instanceId, event_log, config, decoder_factory);
kwiberg55b97fe2016-01-28 05:22:45 -0800695 if (channel == NULL) {
696 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
697 "Channel::CreateChannel() unable to allocate memory for"
698 " channel");
699 return -1;
700 }
701 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000702}
703
kwiberg55b97fe2016-01-28 05:22:45 -0800704void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
705 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
706 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
707 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000708
kwiberg55b97fe2016-01-28 05:22:45 -0800709 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000710}
711
kwiberg55b97fe2016-01-28 05:22:45 -0800712void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
713 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
714 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
715 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000716
kwiberg55b97fe2016-01-28 05:22:45 -0800717 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000718}
719
kwiberg55b97fe2016-01-28 05:22:45 -0800720void Channel::PlayFileEnded(int32_t id) {
721 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
722 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000723
kwiberg55b97fe2016-01-28 05:22:45 -0800724 if (id == _inputFilePlayerId) {
725 channel_state_.SetInputFilePlaying(false);
726 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
727 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000728 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800729 } else if (id == _outputFilePlayerId) {
730 channel_state_.SetOutputFilePlaying(false);
731 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
732 "Channel::PlayFileEnded() => output file player module is"
733 " shutdown");
734 }
735}
736
737void Channel::RecordFileEnded(int32_t id) {
738 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
739 "Channel::RecordFileEnded(id=%d)", id);
740
741 assert(id == _outputFileRecorderId);
742
743 rtc::CritScope cs(&_fileCritSect);
744
745 _outputFileRecording = false;
746 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
747 "Channel::RecordFileEnded() => output file recorder module is"
748 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000749}
750
pbos@webrtc.org92135212013-05-14 08:31:39 +0000751Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000752 uint32_t instanceId,
ivocb04965c2015-09-09 00:09:43 -0700753 RtcEventLog* const event_log,
ossu5f7cfa52016-05-30 08:11:28 -0700754 const Config& config,
755 const rtc::scoped_refptr<AudioDecoderFactory>& decoder_factory)
tommi31fc21f2016-01-21 10:37:37 -0800756 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100757 _channelId(channelId),
758 event_log_(event_log),
759 rtp_header_parser_(RtpHeaderParser::Create()),
760 rtp_payload_registry_(
761 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
762 rtp_receive_statistics_(
763 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
764 rtp_receiver_(
765 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100766 this,
767 this,
768 rtp_payload_registry_.get())),
769 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
770 _outputAudioLevel(),
771 _externalTransport(false),
772 _inputFilePlayerPtr(NULL),
773 _outputFilePlayerPtr(NULL),
774 _outputFileRecorderPtr(NULL),
775 // Avoid conflict with other channels by adding 1024 - 1026,
776 // won't use as much as 1024 channels.
777 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
778 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
779 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
780 _outputFileRecording(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100781 _outputExternalMedia(false),
782 _inputExternalMediaCallbackPtr(NULL),
783 _outputExternalMediaCallbackPtr(NULL),
784 _timeStamp(0), // This is just an offset, RTP module will add it's own
785 // random offset
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100786 ntp_estimator_(Clock::GetRealTimeClock()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100787 playout_timestamp_rtp_(0),
788 playout_timestamp_rtcp_(0),
789 playout_delay_ms_(0),
790 _numberOfDiscardedPackets(0),
791 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100792 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
793 capture_start_rtp_time_stamp_(-1),
794 capture_start_ntp_time_ms_(-1),
795 _engineStatisticsPtr(NULL),
796 _outputMixerPtr(NULL),
797 _transmitMixerPtr(NULL),
798 _moduleProcessThreadPtr(NULL),
799 _audioDeviceModulePtr(NULL),
800 _voiceEngineObserverPtr(NULL),
801 _callbackCritSectPtr(NULL),
802 _transportPtr(NULL),
803 _rxVadObserverPtr(NULL),
804 _oldVadDecision(-1),
805 _sendFrameType(0),
806 _externalMixing(false),
807 _mixFileWithMicrophone(false),
solenberg1c2af8e2016-03-24 10:36:00 -0700808 input_mute_(false),
809 previous_frame_muted_(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100810 _panLeft(1.0f),
811 _panRight(1.0f),
812 _outputGain(1.0f),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100813 _lastLocalTimeStamp(0),
814 _lastPayloadType(0),
815 _includeAudioLevelIndication(false),
816 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100817 _average_jitter_buffer_delay_us(0),
818 _previousTimestamp(0),
819 _recPacketDelayMs(20),
820 _RxVadDetection(false),
821 _rxAgcIsEnabled(false),
822 _rxNsIsEnabled(false),
823 restored_packet_in_use_(false),
824 rtcp_observer_(new VoERtcpObserver(this)),
825 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100826 associate_send_channel_(ChannelOwner(nullptr)),
827 pacing_enabled_(config.Get<VoicePacing>().enabled),
stefanbba9dec2016-02-01 04:39:55 -0800828 feedback_observer_proxy_(new TransportFeedbackProxy()),
829 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
830 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()) {
kwiberg55b97fe2016-01-28 05:22:45 -0800831 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
832 "Channel::Channel() - ctor");
833 AudioCodingModule::Config acm_config;
834 acm_config.id = VoEModuleId(instanceId, channelId);
835 if (config.Get<NetEqCapacityConfig>().enabled) {
836 // Clamping the buffer capacity at 20 packets. While going lower will
837 // probably work, it makes little sense.
838 acm_config.neteq_config.max_packets_in_buffer =
839 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
840 }
841 acm_config.neteq_config.enable_fast_accelerate =
842 config.Get<NetEqFastAccelerate>().enabled;
henrik.lundina89ab962016-05-18 08:52:45 -0700843 acm_config.neteq_config.enable_muted_state = true;
ossu5f7cfa52016-05-30 08:11:28 -0700844 acm_config.decoder_factory = decoder_factory;
kwiberg55b97fe2016-01-28 05:22:45 -0800845 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200846
kwiberg55b97fe2016-01-28 05:22:45 -0800847 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000848
kwiberg55b97fe2016-01-28 05:22:45 -0800849 RtpRtcp::Configuration configuration;
850 configuration.audio = true;
851 configuration.outgoing_transport = this;
kwiberg55b97fe2016-01-28 05:22:45 -0800852 configuration.receive_statistics = rtp_receive_statistics_.get();
853 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800854 if (pacing_enabled_) {
855 configuration.paced_sender = rtp_packet_sender_proxy_.get();
856 configuration.transport_sequence_number_allocator =
857 seq_num_allocator_proxy_.get();
858 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
859 }
kwiberg55b97fe2016-01-28 05:22:45 -0800860 configuration.event_log = event_log;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000861
kwiberg55b97fe2016-01-28 05:22:45 -0800862 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100863 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000864
kwiberg55b97fe2016-01-28 05:22:45 -0800865 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
866 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
867 statistics_proxy_.get());
aluebs@webrtc.orgf927fd62014-04-16 11:58:18 +0000868
kwiberg55b97fe2016-01-28 05:22:45 -0800869 Config audioproc_config;
870 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
871 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
niklase@google.com470e71d2011-07-07 08:21:25 +0000872}
873
kwiberg55b97fe2016-01-28 05:22:45 -0800874Channel::~Channel() {
875 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
876 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
877 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000878
kwiberg55b97fe2016-01-28 05:22:45 -0800879 if (_outputExternalMedia) {
880 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
881 }
882 if (channel_state_.Get().input_external_media) {
883 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
884 }
885 StopSend();
886 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000887
kwiberg55b97fe2016-01-28 05:22:45 -0800888 {
889 rtc::CritScope cs(&_fileCritSect);
890 if (_inputFilePlayerPtr) {
891 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
892 _inputFilePlayerPtr->StopPlayingFile();
893 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
894 _inputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000895 }
kwiberg55b97fe2016-01-28 05:22:45 -0800896 if (_outputFilePlayerPtr) {
897 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
898 _outputFilePlayerPtr->StopPlayingFile();
899 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
900 _outputFilePlayerPtr = NULL;
901 }
902 if (_outputFileRecorderPtr) {
903 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
904 _outputFileRecorderPtr->StopRecording();
905 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
906 _outputFileRecorderPtr = NULL;
907 }
908 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000909
kwiberg55b97fe2016-01-28 05:22:45 -0800910 // The order to safely shutdown modules in a channel is:
911 // 1. De-register callbacks in modules
912 // 2. De-register modules in process thread
913 // 3. Destroy modules
914 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
915 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
916 "~Channel() failed to de-register transport callback"
917 " (Audio coding module)");
918 }
919 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
920 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
921 "~Channel() failed to de-register VAD callback"
922 " (Audio coding module)");
923 }
924 // De-register modules in process thread
925 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +0000926
kwiberg55b97fe2016-01-28 05:22:45 -0800927 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +0000928}
929
kwiberg55b97fe2016-01-28 05:22:45 -0800930int32_t Channel::Init() {
931 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
932 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +0000933
kwiberg55b97fe2016-01-28 05:22:45 -0800934 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +0000935
kwiberg55b97fe2016-01-28 05:22:45 -0800936 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +0000937
kwiberg55b97fe2016-01-28 05:22:45 -0800938 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
939 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
940 "Channel::Init() must call SetEngineInformation() first");
941 return -1;
942 }
943
944 // --- Add modules to process thread (for periodic schedulation)
945
946 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
947
948 // --- ACM initialization
949
950 if (audio_coding_->InitializeReceiver() == -1) {
951 _engineStatisticsPtr->SetLastError(
952 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
953 "Channel::Init() unable to initialize the ACM - 1");
954 return -1;
955 }
956
957 // --- RTP/RTCP module initialization
958
959 // Ensure that RTCP is enabled by default for the created channel.
960 // Note that, the module will keep generating RTCP until it is explicitly
961 // disabled by the user.
962 // After StopListen (when no sockets exists), RTCP packets will no longer
963 // be transmitted since the Transport object will then be invalid.
964 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
965 // RTCP is enabled by default.
966 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
967 // --- Register all permanent callbacks
968 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
969 (audio_coding_->RegisterVADCallback(this) == -1);
970
971 if (fail) {
972 _engineStatisticsPtr->SetLastError(
973 VE_CANNOT_INIT_CHANNEL, kTraceError,
974 "Channel::Init() callbacks not registered");
975 return -1;
976 }
977
978 // --- Register all supported codecs to the receiving side of the
979 // RTP/RTCP module
980
981 CodecInst codec;
982 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
983
984 for (int idx = 0; idx < nSupportedCodecs; idx++) {
985 // Open up the RTP/RTCP receiver for all supported codecs
986 if ((audio_coding_->Codec(idx, &codec) == -1) ||
987 (rtp_receiver_->RegisterReceivePayload(
988 codec.plname, codec.pltype, codec.plfreq, codec.channels,
989 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
990 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
991 "Channel::Init() unable to register %s "
992 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
993 codec.plname, codec.pltype, codec.plfreq, codec.channels,
994 codec.rate);
995 } else {
996 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
997 "Channel::Init() %s (%d/%d/%" PRIuS
998 "/%d) has been "
999 "added to the RTP/RTCP receiver",
1000 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1001 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00001002 }
1003
kwiberg55b97fe2016-01-28 05:22:45 -08001004 // Ensure that PCMU is used as default codec on the sending side
1005 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
1006 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +00001007 }
1008
kwiberg55b97fe2016-01-28 05:22:45 -08001009 // Register default PT for outband 'telephone-event'
1010 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
kwibergc8d071e2016-04-06 12:22:38 -07001011 if (_rtpRtcpModule->RegisterSendPayload(codec) == -1 ||
1012 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001013 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1014 "Channel::Init() failed to register outband "
1015 "'telephone-event' (%d/%d) correctly",
1016 codec.pltype, codec.plfreq);
1017 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001018 }
1019
kwiberg55b97fe2016-01-28 05:22:45 -08001020 if (!STR_CASE_CMP(codec.plname, "CN")) {
kwibergc8d071e2016-04-06 12:22:38 -07001021 if (!codec_manager_.RegisterEncoder(codec) ||
1022 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get()) ||
1023 !RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec) ||
1024 _rtpRtcpModule->RegisterSendPayload(codec) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08001025 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1026 "Channel::Init() failed to register CN (%d/%d) "
1027 "correctly - 1",
1028 codec.pltype, codec.plfreq);
1029 }
1030 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001031#ifdef WEBRTC_CODEC_RED
kwiberg55b97fe2016-01-28 05:22:45 -08001032 // Register RED to the receiving side of the ACM.
1033 // We will not receive an OnInitializeDecoder() callback for RED.
1034 if (!STR_CASE_CMP(codec.plname, "RED")) {
kwibergc8d071e2016-04-06 12:22:38 -07001035 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001036 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1037 "Channel::Init() failed to register RED (%d/%d) "
1038 "correctly",
1039 codec.pltype, codec.plfreq);
1040 }
1041 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001042#endif
kwiberg55b97fe2016-01-28 05:22:45 -08001043 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001044
kwiberg55b97fe2016-01-28 05:22:45 -08001045 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1046 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1047 return -1;
1048 }
1049 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1050 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1051 return -1;
1052 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001053
kwiberg55b97fe2016-01-28 05:22:45 -08001054 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001055}
1056
kwiberg55b97fe2016-01-28 05:22:45 -08001057int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1058 OutputMixer& outputMixer,
1059 voe::TransmitMixer& transmitMixer,
1060 ProcessThread& moduleProcessThread,
1061 AudioDeviceModule& audioDeviceModule,
1062 VoiceEngineObserver* voiceEngineObserver,
1063 rtc::CriticalSection* callbackCritSect) {
1064 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1065 "Channel::SetEngineInformation()");
1066 _engineStatisticsPtr = &engineStatistics;
1067 _outputMixerPtr = &outputMixer;
1068 _transmitMixerPtr = &transmitMixer,
1069 _moduleProcessThreadPtr = &moduleProcessThread;
1070 _audioDeviceModulePtr = &audioDeviceModule;
1071 _voiceEngineObserverPtr = voiceEngineObserver;
1072 _callbackCritSectPtr = callbackCritSect;
1073 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001074}
1075
kwiberg55b97fe2016-01-28 05:22:45 -08001076int32_t Channel::UpdateLocalTimeStamp() {
1077 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1078 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001079}
1080
kwibergb7f89d62016-02-17 10:04:18 -08001081void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001082 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001083 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001084}
1085
kwiberg55b97fe2016-01-28 05:22:45 -08001086int32_t Channel::StartPlayout() {
1087 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1088 "Channel::StartPlayout()");
1089 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001090 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001091 }
1092
1093 if (!_externalMixing) {
1094 // Add participant as candidates for mixing.
1095 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1096 _engineStatisticsPtr->SetLastError(
1097 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1098 "StartPlayout() failed to add participant to mixer");
1099 return -1;
1100 }
1101 }
1102
1103 channel_state_.SetPlaying(true);
1104 if (RegisterFilePlayingToMixer() != 0)
1105 return -1;
1106
1107 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001108}
1109
kwiberg55b97fe2016-01-28 05:22:45 -08001110int32_t Channel::StopPlayout() {
1111 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1112 "Channel::StopPlayout()");
1113 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001114 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001115 }
1116
1117 if (!_externalMixing) {
1118 // Remove participant as candidates for mixing
1119 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1120 _engineStatisticsPtr->SetLastError(
1121 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1122 "StopPlayout() failed to remove participant from mixer");
1123 return -1;
1124 }
1125 }
1126
1127 channel_state_.SetPlaying(false);
1128 _outputAudioLevel.Clear();
1129
1130 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001131}
1132
kwiberg55b97fe2016-01-28 05:22:45 -08001133int32_t Channel::StartSend() {
1134 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1135 "Channel::StartSend()");
1136 // Resume the previous sequence number which was reset by StopSend().
1137 // This needs to be done before |sending| is set to true.
1138 if (send_sequence_number_)
1139 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001140
kwiberg55b97fe2016-01-28 05:22:45 -08001141 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001142 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001143 }
1144 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001145
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001146 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001147 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1148 _engineStatisticsPtr->SetLastError(
1149 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1150 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001151 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001152 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001153 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001154 return -1;
1155 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001156
kwiberg55b97fe2016-01-28 05:22:45 -08001157 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001158}
1159
kwiberg55b97fe2016-01-28 05:22:45 -08001160int32_t Channel::StopSend() {
1161 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1162 "Channel::StopSend()");
1163 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001164 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001165 }
1166 channel_state_.SetSending(false);
1167
1168 // Store the sequence number to be able to pick up the same sequence for
1169 // the next StartSend(). This is needed for restarting device, otherwise
1170 // it might cause libSRTP to complain about packets being replayed.
1171 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1172 // CL is landed. See issue
1173 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1174 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1175
1176 // Reset sending SSRC and sequence number and triggers direct transmission
1177 // of RTCP BYE
1178 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1179 _engineStatisticsPtr->SetLastError(
1180 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1181 "StartSend() RTP/RTCP failed to stop sending");
1182 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001183 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001184
1185 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001186}
1187
kwiberg55b97fe2016-01-28 05:22:45 -08001188int32_t Channel::StartReceiving() {
1189 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1190 "Channel::StartReceiving()");
1191 if (channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001192 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001193 }
1194 channel_state_.SetReceiving(true);
1195 _numberOfDiscardedPackets = 0;
1196 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001197}
1198
kwiberg55b97fe2016-01-28 05:22:45 -08001199int32_t Channel::StopReceiving() {
1200 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1201 "Channel::StopReceiving()");
1202 if (!channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001203 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001204 }
1205
1206 channel_state_.SetReceiving(false);
1207 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001208}
1209
kwiberg55b97fe2016-01-28 05:22:45 -08001210int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1211 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1212 "Channel::RegisterVoiceEngineObserver()");
1213 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001214
kwiberg55b97fe2016-01-28 05:22:45 -08001215 if (_voiceEngineObserverPtr) {
1216 _engineStatisticsPtr->SetLastError(
1217 VE_INVALID_OPERATION, kTraceError,
1218 "RegisterVoiceEngineObserver() observer already enabled");
1219 return -1;
1220 }
1221 _voiceEngineObserverPtr = &observer;
1222 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001223}
1224
kwiberg55b97fe2016-01-28 05:22:45 -08001225int32_t Channel::DeRegisterVoiceEngineObserver() {
1226 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1227 "Channel::DeRegisterVoiceEngineObserver()");
1228 rtc::CritScope cs(&_callbackCritSect);
1229
1230 if (!_voiceEngineObserverPtr) {
1231 _engineStatisticsPtr->SetLastError(
1232 VE_INVALID_OPERATION, kTraceWarning,
1233 "DeRegisterVoiceEngineObserver() observer already disabled");
1234 return 0;
1235 }
1236 _voiceEngineObserverPtr = NULL;
1237 return 0;
1238}
1239
1240int32_t Channel::GetSendCodec(CodecInst& codec) {
kwibergc8d071e2016-04-06 12:22:38 -07001241 auto send_codec = codec_manager_.GetCodecInst();
kwiberg1fd4a4a2015-11-03 11:20:50 -08001242 if (send_codec) {
1243 codec = *send_codec;
1244 return 0;
1245 }
1246 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001247}
1248
kwiberg55b97fe2016-01-28 05:22:45 -08001249int32_t Channel::GetRecCodec(CodecInst& codec) {
1250 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001251}
1252
kwiberg55b97fe2016-01-28 05:22:45 -08001253int32_t Channel::SetSendCodec(const CodecInst& codec) {
1254 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1255 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001256
kwibergc8d071e2016-04-06 12:22:38 -07001257 if (!codec_manager_.RegisterEncoder(codec) ||
1258 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001259 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1260 "SetSendCodec() failed to register codec to ACM");
1261 return -1;
1262 }
1263
1264 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1265 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1266 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1267 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1268 "SetSendCodec() failed to register codec to"
1269 " RTP/RTCP module");
1270 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001271 }
kwiberg55b97fe2016-01-28 05:22:45 -08001272 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001273
kwiberg55b97fe2016-01-28 05:22:45 -08001274 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1275 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1276 "SetSendCodec() failed to set audio packet size");
1277 return -1;
1278 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001279
kwiberg55b97fe2016-01-28 05:22:45 -08001280 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001281}
1282
Ivo Creusenadf89b72015-04-29 16:03:33 +02001283void Channel::SetBitRate(int bitrate_bps) {
1284 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1285 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1286 audio_coding_->SetBitRate(bitrate_bps);
1287}
1288
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001289void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue@webrtc.org74aaf292014-07-16 21:28:26 +00001290 network_predictor_->UpdatePacketLossRate(fraction_lost);
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001291 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1292
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001293 // Normalizes rate to 0 - 100.
kwiberg55b97fe2016-01-28 05:22:45 -08001294 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1295 0) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001296 assert(false); // This should not happen.
1297 }
1298}
1299
kwiberg55b97fe2016-01-28 05:22:45 -08001300int32_t Channel::SetVADStatus(bool enableVAD,
1301 ACMVADMode mode,
1302 bool disableDTX) {
1303 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1304 "Channel::SetVADStatus(mode=%d)", mode);
kwibergc8d071e2016-04-06 12:22:38 -07001305 RTC_DCHECK(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1306 if (!codec_manager_.SetVAD(enableVAD, mode) ||
1307 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001308 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1309 kTraceError,
1310 "SetVADStatus() failed to set VAD");
1311 return -1;
1312 }
1313 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001314}
1315
kwiberg55b97fe2016-01-28 05:22:45 -08001316int32_t Channel::GetVADStatus(bool& enabledVAD,
1317 ACMVADMode& mode,
1318 bool& disabledDTX) {
kwibergc8d071e2016-04-06 12:22:38 -07001319 const auto* params = codec_manager_.GetStackParams();
1320 enabledVAD = params->use_cng;
1321 mode = params->vad_mode;
1322 disabledDTX = !params->use_cng;
kwiberg55b97fe2016-01-28 05:22:45 -08001323 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001324}
1325
kwiberg55b97fe2016-01-28 05:22:45 -08001326int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1327 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1328 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001329
kwiberg55b97fe2016-01-28 05:22:45 -08001330 if (channel_state_.Get().playing) {
1331 _engineStatisticsPtr->SetLastError(
1332 VE_ALREADY_PLAYING, kTraceError,
1333 "SetRecPayloadType() unable to set PT while playing");
1334 return -1;
1335 }
1336 if (channel_state_.Get().receiving) {
1337 _engineStatisticsPtr->SetLastError(
1338 VE_ALREADY_LISTENING, kTraceError,
1339 "SetRecPayloadType() unable to set PT while listening");
1340 return -1;
1341 }
1342
1343 if (codec.pltype == -1) {
1344 // De-register the selected codec (RTP/RTCP module and ACM)
1345
1346 int8_t pltype(-1);
1347 CodecInst rxCodec = codec;
1348
1349 // Get payload type for the given codec
1350 rtp_payload_registry_->ReceivePayloadType(
1351 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1352 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1353 rxCodec.pltype = pltype;
1354
1355 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1356 _engineStatisticsPtr->SetLastError(
1357 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1358 "SetRecPayloadType() RTP/RTCP-module deregistration "
1359 "failed");
1360 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001361 }
kwiberg55b97fe2016-01-28 05:22:45 -08001362 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1363 _engineStatisticsPtr->SetLastError(
1364 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1365 "SetRecPayloadType() ACM deregistration failed - 1");
1366 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001367 }
kwiberg55b97fe2016-01-28 05:22:45 -08001368 return 0;
1369 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001370
kwiberg55b97fe2016-01-28 05:22:45 -08001371 if (rtp_receiver_->RegisterReceivePayload(
1372 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1373 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1374 // First attempt to register failed => de-register and try again
kwibergc8d071e2016-04-06 12:22:38 -07001375 // TODO(kwiberg): Retrying is probably not necessary, since
1376 // AcmReceiver::AddCodec also retries.
kwiberg55b97fe2016-01-28 05:22:45 -08001377 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001378 if (rtp_receiver_->RegisterReceivePayload(
kwiberg55b97fe2016-01-28 05:22:45 -08001379 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1380 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1381 _engineStatisticsPtr->SetLastError(
1382 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1383 "SetRecPayloadType() RTP/RTCP-module registration failed");
1384 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001385 }
kwiberg55b97fe2016-01-28 05:22:45 -08001386 }
kwibergc8d071e2016-04-06 12:22:38 -07001387 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001388 audio_coding_->UnregisterReceiveCodec(codec.pltype);
kwibergc8d071e2016-04-06 12:22:38 -07001389 if (!RegisterReceiveCodec(&audio_coding_, &rent_a_codec_, codec)) {
kwiberg55b97fe2016-01-28 05:22:45 -08001390 _engineStatisticsPtr->SetLastError(
1391 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1392 "SetRecPayloadType() ACM registration failed - 1");
1393 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001394 }
kwiberg55b97fe2016-01-28 05:22:45 -08001395 }
1396 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001397}
1398
kwiberg55b97fe2016-01-28 05:22:45 -08001399int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1400 int8_t payloadType(-1);
1401 if (rtp_payload_registry_->ReceivePayloadType(
1402 codec.plname, codec.plfreq, codec.channels,
1403 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1404 _engineStatisticsPtr->SetLastError(
1405 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1406 "GetRecPayloadType() failed to retrieve RX payload type");
1407 return -1;
1408 }
1409 codec.pltype = payloadType;
1410 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001411}
1412
kwiberg55b97fe2016-01-28 05:22:45 -08001413int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1414 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1415 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001416
kwiberg55b97fe2016-01-28 05:22:45 -08001417 CodecInst codec;
1418 int32_t samplingFreqHz(-1);
1419 const size_t kMono = 1;
1420 if (frequency == kFreq32000Hz)
1421 samplingFreqHz = 32000;
1422 else if (frequency == kFreq16000Hz)
1423 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001424
kwiberg55b97fe2016-01-28 05:22:45 -08001425 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1426 _engineStatisticsPtr->SetLastError(
1427 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1428 "SetSendCNPayloadType() failed to retrieve default CN codec "
1429 "settings");
1430 return -1;
1431 }
1432
1433 // Modify the payload type (must be set to dynamic range)
1434 codec.pltype = type;
1435
kwibergc8d071e2016-04-06 12:22:38 -07001436 if (!codec_manager_.RegisterEncoder(codec) ||
1437 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
kwiberg55b97fe2016-01-28 05:22:45 -08001438 _engineStatisticsPtr->SetLastError(
1439 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1440 "SetSendCNPayloadType() failed to register CN to ACM");
1441 return -1;
1442 }
1443
1444 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1445 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1446 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1447 _engineStatisticsPtr->SetLastError(
1448 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1449 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1450 "module");
1451 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001452 }
kwiberg55b97fe2016-01-28 05:22:45 -08001453 }
1454 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001455}
1456
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001457int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001458 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001459 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001460
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001461 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001462 _engineStatisticsPtr->SetLastError(
1463 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001464 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001465 return -1;
1466 }
1467 return 0;
1468}
1469
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001470int Channel::SetOpusDtx(bool enable_dtx) {
1471 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1472 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001473 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001474 : audio_coding_->DisableOpusDtx();
1475 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001476 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1477 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001478 return -1;
1479 }
1480 return 0;
1481}
1482
mflodman3d7db262016-04-29 00:57:13 -07001483int32_t Channel::RegisterExternalTransport(Transport* transport) {
kwiberg55b97fe2016-01-28 05:22:45 -08001484 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001485 "Channel::RegisterExternalTransport()");
1486
kwiberg55b97fe2016-01-28 05:22:45 -08001487 rtc::CritScope cs(&_callbackCritSect);
kwiberg55b97fe2016-01-28 05:22:45 -08001488 if (_externalTransport) {
1489 _engineStatisticsPtr->SetLastError(
1490 VE_INVALID_OPERATION, kTraceError,
1491 "RegisterExternalTransport() external transport already enabled");
1492 return -1;
1493 }
1494 _externalTransport = true;
mflodman3d7db262016-04-29 00:57:13 -07001495 _transportPtr = transport;
kwiberg55b97fe2016-01-28 05:22:45 -08001496 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001497}
1498
kwiberg55b97fe2016-01-28 05:22:45 -08001499int32_t Channel::DeRegisterExternalTransport() {
1500 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1501 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001502
kwiberg55b97fe2016-01-28 05:22:45 -08001503 rtc::CritScope cs(&_callbackCritSect);
mflodman3d7db262016-04-29 00:57:13 -07001504 if (_transportPtr) {
1505 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1506 "DeRegisterExternalTransport() all transport is disabled");
1507 } else {
kwiberg55b97fe2016-01-28 05:22:45 -08001508 _engineStatisticsPtr->SetLastError(
1509 VE_INVALID_OPERATION, kTraceWarning,
1510 "DeRegisterExternalTransport() external transport already "
1511 "disabled");
kwiberg55b97fe2016-01-28 05:22:45 -08001512 }
1513 _externalTransport = false;
1514 _transportPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001515 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001516}
1517
mflodman3d7db262016-04-29 00:57:13 -07001518int32_t Channel::ReceivedRTPPacket(const uint8_t* received_packet,
kwiberg55b97fe2016-01-28 05:22:45 -08001519 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001520 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001521 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001522 "Channel::ReceivedRTPPacket()");
1523
1524 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001525 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001526
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001527 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001528 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1529 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1530 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001531 return -1;
1532 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001533 header.payload_type_frequency =
1534 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001535 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001536 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001537 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001538 rtp_receive_statistics_->IncomingPacket(
1539 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001540 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001541
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001542 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001543}
1544
1545bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001546 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001547 const RTPHeader& header,
1548 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001549 if (rtp_payload_registry_->IsRtx(header)) {
1550 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001551 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001552 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001553 assert(packet_length >= header.headerLength);
1554 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001555 PayloadUnion payload_specific;
1556 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001557 &payload_specific)) {
1558 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001559 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001560 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1561 payload_specific, in_order);
1562}
1563
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001564bool Channel::HandleRtxPacket(const uint8_t* packet,
1565 size_t packet_length,
1566 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001567 if (!rtp_payload_registry_->IsRtx(header))
1568 return false;
1569
1570 // Remove the RTX header and parse the original RTP header.
1571 if (packet_length < header.headerLength)
1572 return false;
1573 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1574 return false;
1575 if (restored_packet_in_use_) {
1576 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1577 "Multiple RTX headers detected, dropping packet");
1578 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001579 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001580 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001581 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1582 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001583 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1584 "Incoming RTX packet: invalid RTP header");
1585 return false;
1586 }
1587 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001588 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001589 restored_packet_in_use_ = false;
1590 return ret;
1591}
1592
1593bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1594 StreamStatistician* statistician =
1595 rtp_receive_statistics_->GetStatistician(header.ssrc);
1596 if (!statistician)
1597 return false;
1598 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001599}
1600
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001601bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1602 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001603 // Retransmissions are handled separately if RTX is enabled.
1604 if (rtp_payload_registry_->RtxEnabled())
1605 return false;
1606 StreamStatistician* statistician =
1607 rtp_receive_statistics_->GetStatistician(header.ssrc);
1608 if (!statistician)
1609 return false;
1610 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001611 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001612 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001613 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001614}
1615
mflodman3d7db262016-04-29 00:57:13 -07001616int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001617 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001618 "Channel::ReceivedRTCPPacket()");
1619 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001620 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001621
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001622 // Deliver RTCP packet to RTP/RTCP module for parsing
mflodman3d7db262016-04-29 00:57:13 -07001623 if (_rtpRtcpModule->IncomingRtcpPacket(data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001624 _engineStatisticsPtr->SetLastError(
1625 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1626 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1627 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001628
Minyue2013aec2015-05-13 14:14:42 +02001629 int64_t rtt = GetRTT(true);
1630 if (rtt == 0) {
1631 // Waiting for valid RTT.
1632 return 0;
1633 }
1634 uint32_t ntp_secs = 0;
1635 uint32_t ntp_frac = 0;
1636 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001637 if (0 !=
1638 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1639 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001640 // Waiting for RTCP.
1641 return 0;
1642 }
1643
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001644 {
tommi31fc21f2016-01-21 10:37:37 -08001645 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001646 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001647 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001648 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001649}
1650
niklase@google.com470e71d2011-07-07 08:21:25 +00001651int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001652 bool loop,
1653 FileFormats format,
1654 int startPosition,
1655 float volumeScaling,
1656 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001657 const CodecInst* codecInst) {
1658 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1659 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1660 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1661 "stopPosition=%d)",
1662 fileName, loop, format, volumeScaling, startPosition,
1663 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001664
kwiberg55b97fe2016-01-28 05:22:45 -08001665 if (channel_state_.Get().output_file_playing) {
1666 _engineStatisticsPtr->SetLastError(
1667 VE_ALREADY_PLAYING, kTraceError,
1668 "StartPlayingFileLocally() is already playing");
1669 return -1;
1670 }
1671
1672 {
1673 rtc::CritScope cs(&_fileCritSect);
1674
1675 if (_outputFilePlayerPtr) {
1676 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1677 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1678 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001679 }
1680
kwiberg55b97fe2016-01-28 05:22:45 -08001681 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1682 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001683
kwiberg55b97fe2016-01-28 05:22:45 -08001684 if (_outputFilePlayerPtr == NULL) {
1685 _engineStatisticsPtr->SetLastError(
1686 VE_INVALID_ARGUMENT, kTraceError,
1687 "StartPlayingFileLocally() filePlayer format is not correct");
1688 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001689 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001690
kwiberg55b97fe2016-01-28 05:22:45 -08001691 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001692
kwiberg55b97fe2016-01-28 05:22:45 -08001693 if (_outputFilePlayerPtr->StartPlayingFile(
1694 fileName, loop, startPosition, volumeScaling, notificationTime,
1695 stopPosition, (const CodecInst*)codecInst) != 0) {
1696 _engineStatisticsPtr->SetLastError(
1697 VE_BAD_FILE, kTraceError,
1698 "StartPlayingFile() failed to start file playout");
1699 _outputFilePlayerPtr->StopPlayingFile();
1700 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1701 _outputFilePlayerPtr = NULL;
1702 return -1;
1703 }
1704 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1705 channel_state_.SetOutputFilePlaying(true);
1706 }
1707
1708 if (RegisterFilePlayingToMixer() != 0)
1709 return -1;
1710
1711 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001712}
1713
1714int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001715 FileFormats format,
1716 int startPosition,
1717 float volumeScaling,
1718 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001719 const CodecInst* codecInst) {
1720 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1721 "Channel::StartPlayingFileLocally(format=%d,"
1722 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1723 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001724
kwiberg55b97fe2016-01-28 05:22:45 -08001725 if (stream == NULL) {
1726 _engineStatisticsPtr->SetLastError(
1727 VE_BAD_FILE, kTraceError,
1728 "StartPlayingFileLocally() NULL as input stream");
1729 return -1;
1730 }
1731
1732 if (channel_state_.Get().output_file_playing) {
1733 _engineStatisticsPtr->SetLastError(
1734 VE_ALREADY_PLAYING, kTraceError,
1735 "StartPlayingFileLocally() is already playing");
1736 return -1;
1737 }
1738
1739 {
1740 rtc::CritScope cs(&_fileCritSect);
1741
1742 // Destroy the old instance
1743 if (_outputFilePlayerPtr) {
1744 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1745 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1746 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001747 }
1748
kwiberg55b97fe2016-01-28 05:22:45 -08001749 // Create the instance
1750 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1751 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001752
kwiberg55b97fe2016-01-28 05:22:45 -08001753 if (_outputFilePlayerPtr == NULL) {
1754 _engineStatisticsPtr->SetLastError(
1755 VE_INVALID_ARGUMENT, kTraceError,
1756 "StartPlayingFileLocally() filePlayer format isnot correct");
1757 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001758 }
1759
kwiberg55b97fe2016-01-28 05:22:45 -08001760 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001761
kwiberg55b97fe2016-01-28 05:22:45 -08001762 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1763 volumeScaling, notificationTime,
1764 stopPosition, codecInst) != 0) {
1765 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1766 "StartPlayingFile() failed to "
1767 "start file playout");
1768 _outputFilePlayerPtr->StopPlayingFile();
1769 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1770 _outputFilePlayerPtr = NULL;
1771 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001772 }
kwiberg55b97fe2016-01-28 05:22:45 -08001773 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1774 channel_state_.SetOutputFilePlaying(true);
1775 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001776
kwiberg55b97fe2016-01-28 05:22:45 -08001777 if (RegisterFilePlayingToMixer() != 0)
1778 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001779
kwiberg55b97fe2016-01-28 05:22:45 -08001780 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001781}
1782
kwiberg55b97fe2016-01-28 05:22:45 -08001783int Channel::StopPlayingFileLocally() {
1784 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1785 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001786
kwiberg55b97fe2016-01-28 05:22:45 -08001787 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001788 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001789 }
1790
1791 {
1792 rtc::CritScope cs(&_fileCritSect);
1793
1794 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1795 _engineStatisticsPtr->SetLastError(
1796 VE_STOP_RECORDING_FAILED, kTraceError,
1797 "StopPlayingFile() could not stop playing");
1798 return -1;
1799 }
1800 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1801 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1802 _outputFilePlayerPtr = NULL;
1803 channel_state_.SetOutputFilePlaying(false);
1804 }
1805 // _fileCritSect cannot be taken while calling
1806 // SetAnonymousMixibilityStatus. Refer to comments in
1807 // StartPlayingFileLocally(const char* ...) for more details.
1808 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1809 _engineStatisticsPtr->SetLastError(
1810 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1811 "StopPlayingFile() failed to stop participant from playing as"
1812 "file in the mixer");
1813 return -1;
1814 }
1815
1816 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001817}
1818
kwiberg55b97fe2016-01-28 05:22:45 -08001819int Channel::IsPlayingFileLocally() const {
1820 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001821}
1822
kwiberg55b97fe2016-01-28 05:22:45 -08001823int Channel::RegisterFilePlayingToMixer() {
1824 // Return success for not registering for file playing to mixer if:
1825 // 1. playing file before playout is started on that channel.
1826 // 2. starting playout without file playing on that channel.
1827 if (!channel_state_.Get().playing ||
1828 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001829 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001830 }
1831
1832 // |_fileCritSect| cannot be taken while calling
1833 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1834 // frames can be pulled by the mixer. Since the frames are generated from
1835 // the file, _fileCritSect will be taken. This would result in a deadlock.
1836 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1837 channel_state_.SetOutputFilePlaying(false);
1838 rtc::CritScope cs(&_fileCritSect);
1839 _engineStatisticsPtr->SetLastError(
1840 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1841 "StartPlayingFile() failed to add participant as file to mixer");
1842 _outputFilePlayerPtr->StopPlayingFile();
1843 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1844 _outputFilePlayerPtr = NULL;
1845 return -1;
1846 }
1847
1848 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001849}
1850
niklase@google.com470e71d2011-07-07 08:21:25 +00001851int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001852 bool loop,
1853 FileFormats format,
1854 int startPosition,
1855 float volumeScaling,
1856 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001857 const CodecInst* codecInst) {
1858 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1859 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1860 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1861 "stopPosition=%d)",
1862 fileName, loop, format, volumeScaling, startPosition,
1863 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001864
kwiberg55b97fe2016-01-28 05:22:45 -08001865 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001866
kwiberg55b97fe2016-01-28 05:22:45 -08001867 if (channel_state_.Get().input_file_playing) {
1868 _engineStatisticsPtr->SetLastError(
1869 VE_ALREADY_PLAYING, kTraceWarning,
1870 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001871 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001872 }
1873
1874 // Destroy the old instance
1875 if (_inputFilePlayerPtr) {
1876 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1877 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1878 _inputFilePlayerPtr = NULL;
1879 }
1880
1881 // Create the instance
1882 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1883 (const FileFormats)format);
1884
1885 if (_inputFilePlayerPtr == NULL) {
1886 _engineStatisticsPtr->SetLastError(
1887 VE_INVALID_ARGUMENT, kTraceError,
1888 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1889 return -1;
1890 }
1891
1892 const uint32_t notificationTime(0);
1893
1894 if (_inputFilePlayerPtr->StartPlayingFile(
1895 fileName, loop, startPosition, volumeScaling, notificationTime,
1896 stopPosition, (const CodecInst*)codecInst) != 0) {
1897 _engineStatisticsPtr->SetLastError(
1898 VE_BAD_FILE, kTraceError,
1899 "StartPlayingFile() failed to start file playout");
1900 _inputFilePlayerPtr->StopPlayingFile();
1901 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1902 _inputFilePlayerPtr = NULL;
1903 return -1;
1904 }
1905 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1906 channel_state_.SetInputFilePlaying(true);
1907
1908 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001909}
1910
1911int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001912 FileFormats format,
1913 int startPosition,
1914 float volumeScaling,
1915 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001916 const CodecInst* codecInst) {
1917 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1918 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1919 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1920 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001921
kwiberg55b97fe2016-01-28 05:22:45 -08001922 if (stream == NULL) {
1923 _engineStatisticsPtr->SetLastError(
1924 VE_BAD_FILE, kTraceError,
1925 "StartPlayingFileAsMicrophone NULL as input stream");
1926 return -1;
1927 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001928
kwiberg55b97fe2016-01-28 05:22:45 -08001929 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001930
kwiberg55b97fe2016-01-28 05:22:45 -08001931 if (channel_state_.Get().input_file_playing) {
1932 _engineStatisticsPtr->SetLastError(
1933 VE_ALREADY_PLAYING, kTraceWarning,
1934 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001935 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001936 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001937
kwiberg55b97fe2016-01-28 05:22:45 -08001938 // Destroy the old instance
1939 if (_inputFilePlayerPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001940 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1941 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1942 _inputFilePlayerPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001943 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001944
kwiberg55b97fe2016-01-28 05:22:45 -08001945 // Create the instance
1946 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1947 (const FileFormats)format);
1948
1949 if (_inputFilePlayerPtr == NULL) {
1950 _engineStatisticsPtr->SetLastError(
1951 VE_INVALID_ARGUMENT, kTraceError,
1952 "StartPlayingInputFile() filePlayer format isnot correct");
1953 return -1;
1954 }
1955
1956 const uint32_t notificationTime(0);
1957
1958 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1959 volumeScaling, notificationTime,
1960 stopPosition, codecInst) != 0) {
1961 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1962 "StartPlayingFile() failed to start "
1963 "file playout");
1964 _inputFilePlayerPtr->StopPlayingFile();
1965 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1966 _inputFilePlayerPtr = NULL;
1967 return -1;
1968 }
1969
1970 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1971 channel_state_.SetInputFilePlaying(true);
1972
1973 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001974}
1975
kwiberg55b97fe2016-01-28 05:22:45 -08001976int Channel::StopPlayingFileAsMicrophone() {
1977 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1978 "Channel::StopPlayingFileAsMicrophone()");
1979
1980 rtc::CritScope cs(&_fileCritSect);
1981
1982 if (!channel_state_.Get().input_file_playing) {
1983 return 0;
1984 }
1985
1986 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
1987 _engineStatisticsPtr->SetLastError(
1988 VE_STOP_RECORDING_FAILED, kTraceError,
1989 "StopPlayingFile() could not stop playing");
1990 return -1;
1991 }
1992 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1993 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1994 _inputFilePlayerPtr = NULL;
1995 channel_state_.SetInputFilePlaying(false);
1996
1997 return 0;
1998}
1999
2000int Channel::IsPlayingFileAsMicrophone() const {
2001 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00002002}
2003
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00002004int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08002005 const CodecInst* codecInst) {
2006 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2007 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00002008
kwiberg55b97fe2016-01-28 05:22:45 -08002009 if (_outputFileRecording) {
2010 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2011 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002012 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002013 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002014
kwiberg55b97fe2016-01-28 05:22:45 -08002015 FileFormats format;
2016 const uint32_t notificationTime(0); // Not supported in VoE
2017 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00002018
kwiberg55b97fe2016-01-28 05:22:45 -08002019 if ((codecInst != NULL) &&
2020 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2021 _engineStatisticsPtr->SetLastError(
2022 VE_BAD_ARGUMENT, kTraceError,
2023 "StartRecordingPlayout() invalid compression");
2024 return (-1);
2025 }
2026 if (codecInst == NULL) {
2027 format = kFileFormatPcm16kHzFile;
2028 codecInst = &dummyCodec;
2029 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2030 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2031 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2032 format = kFileFormatWavFile;
2033 } else {
2034 format = kFileFormatCompressedFile;
2035 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002036
kwiberg55b97fe2016-01-28 05:22:45 -08002037 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002038
kwiberg55b97fe2016-01-28 05:22:45 -08002039 // Destroy the old instance
2040 if (_outputFileRecorderPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002041 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2042 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2043 _outputFileRecorderPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002044 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002045
kwiberg55b97fe2016-01-28 05:22:45 -08002046 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2047 _outputFileRecorderId, (const FileFormats)format);
2048 if (_outputFileRecorderPtr == NULL) {
2049 _engineStatisticsPtr->SetLastError(
2050 VE_INVALID_ARGUMENT, kTraceError,
2051 "StartRecordingPlayout() fileRecorder format isnot correct");
2052 return -1;
2053 }
2054
2055 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2056 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2057 _engineStatisticsPtr->SetLastError(
2058 VE_BAD_FILE, kTraceError,
2059 "StartRecordingAudioFile() failed to start file recording");
2060 _outputFileRecorderPtr->StopRecording();
2061 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2062 _outputFileRecorderPtr = NULL;
2063 return -1;
2064 }
2065 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2066 _outputFileRecording = true;
2067
2068 return 0;
2069}
2070
2071int Channel::StartRecordingPlayout(OutStream* stream,
2072 const CodecInst* codecInst) {
2073 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2074 "Channel::StartRecordingPlayout()");
2075
2076 if (_outputFileRecording) {
2077 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2078 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002079 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002080 }
2081
2082 FileFormats format;
2083 const uint32_t notificationTime(0); // Not supported in VoE
2084 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2085
2086 if (codecInst != NULL && codecInst->channels != 1) {
2087 _engineStatisticsPtr->SetLastError(
2088 VE_BAD_ARGUMENT, kTraceError,
2089 "StartRecordingPlayout() invalid compression");
2090 return (-1);
2091 }
2092 if (codecInst == NULL) {
2093 format = kFileFormatPcm16kHzFile;
2094 codecInst = &dummyCodec;
2095 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2096 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2097 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2098 format = kFileFormatWavFile;
2099 } else {
2100 format = kFileFormatCompressedFile;
2101 }
2102
2103 rtc::CritScope cs(&_fileCritSect);
2104
2105 // Destroy the old instance
2106 if (_outputFileRecorderPtr) {
2107 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2108 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2109 _outputFileRecorderPtr = NULL;
2110 }
2111
2112 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2113 _outputFileRecorderId, (const FileFormats)format);
2114 if (_outputFileRecorderPtr == NULL) {
2115 _engineStatisticsPtr->SetLastError(
2116 VE_INVALID_ARGUMENT, kTraceError,
2117 "StartRecordingPlayout() fileRecorder format isnot correct");
2118 return -1;
2119 }
2120
2121 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2122 notificationTime) != 0) {
2123 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2124 "StartRecordingPlayout() failed to "
2125 "start file recording");
2126 _outputFileRecorderPtr->StopRecording();
2127 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2128 _outputFileRecorderPtr = NULL;
2129 return -1;
2130 }
2131
2132 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2133 _outputFileRecording = true;
2134
2135 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002136}
2137
kwiberg55b97fe2016-01-28 05:22:45 -08002138int Channel::StopRecordingPlayout() {
2139 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2140 "Channel::StopRecordingPlayout()");
2141
2142 if (!_outputFileRecording) {
2143 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2144 "StopRecordingPlayout() isnot recording");
2145 return -1;
2146 }
2147
2148 rtc::CritScope cs(&_fileCritSect);
2149
2150 if (_outputFileRecorderPtr->StopRecording() != 0) {
2151 _engineStatisticsPtr->SetLastError(
2152 VE_STOP_RECORDING_FAILED, kTraceError,
2153 "StopRecording() could not stop recording");
2154 return (-1);
2155 }
2156 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2157 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2158 _outputFileRecorderPtr = NULL;
2159 _outputFileRecording = false;
2160
2161 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002162}
2163
kwiberg55b97fe2016-01-28 05:22:45 -08002164void Channel::SetMixWithMicStatus(bool mix) {
2165 rtc::CritScope cs(&_fileCritSect);
2166 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002167}
2168
kwiberg55b97fe2016-01-28 05:22:45 -08002169int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2170 int8_t currentLevel = _outputAudioLevel.Level();
2171 level = static_cast<int32_t>(currentLevel);
2172 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002173}
2174
kwiberg55b97fe2016-01-28 05:22:45 -08002175int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2176 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2177 level = static_cast<int32_t>(currentLevel);
2178 return 0;
2179}
2180
solenberg1c2af8e2016-03-24 10:36:00 -07002181int Channel::SetInputMute(bool enable) {
kwiberg55b97fe2016-01-28 05:22:45 -08002182 rtc::CritScope cs(&volume_settings_critsect_);
2183 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002184 "Channel::SetMute(enable=%d)", enable);
solenberg1c2af8e2016-03-24 10:36:00 -07002185 input_mute_ = enable;
kwiberg55b97fe2016-01-28 05:22:45 -08002186 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002187}
2188
solenberg1c2af8e2016-03-24 10:36:00 -07002189bool Channel::InputMute() const {
kwiberg55b97fe2016-01-28 05:22:45 -08002190 rtc::CritScope cs(&volume_settings_critsect_);
solenberg1c2af8e2016-03-24 10:36:00 -07002191 return input_mute_;
niklase@google.com470e71d2011-07-07 08:21:25 +00002192}
2193
kwiberg55b97fe2016-01-28 05:22:45 -08002194int Channel::SetOutputVolumePan(float left, float right) {
2195 rtc::CritScope cs(&volume_settings_critsect_);
2196 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002197 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002198 _panLeft = left;
2199 _panRight = right;
2200 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002201}
2202
kwiberg55b97fe2016-01-28 05:22:45 -08002203int Channel::GetOutputVolumePan(float& left, float& right) const {
2204 rtc::CritScope cs(&volume_settings_critsect_);
2205 left = _panLeft;
2206 right = _panRight;
2207 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002208}
2209
kwiberg55b97fe2016-01-28 05:22:45 -08002210int Channel::SetChannelOutputVolumeScaling(float scaling) {
2211 rtc::CritScope cs(&volume_settings_critsect_);
2212 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002213 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002214 _outputGain = scaling;
2215 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002216}
2217
kwiberg55b97fe2016-01-28 05:22:45 -08002218int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2219 rtc::CritScope cs(&volume_settings_critsect_);
2220 scaling = _outputGain;
2221 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002222}
2223
solenberg8842c3e2016-03-11 03:06:41 -08002224int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002225 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002226 "Channel::SendTelephoneEventOutband(...)");
2227 RTC_DCHECK_LE(0, event);
2228 RTC_DCHECK_GE(255, event);
2229 RTC_DCHECK_LE(0, duration_ms);
2230 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002231 if (!Sending()) {
2232 return -1;
2233 }
solenberg8842c3e2016-03-11 03:06:41 -08002234 if (_rtpRtcpModule->SendTelephoneEventOutband(
2235 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002236 _engineStatisticsPtr->SetLastError(
2237 VE_SEND_DTMF_FAILED, kTraceWarning,
2238 "SendTelephoneEventOutband() failed to send event");
2239 return -1;
2240 }
2241 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002242}
2243
solenberg31642aa2016-03-14 08:00:37 -07002244int Channel::SetSendTelephoneEventPayloadType(int payload_type) {
kwiberg55b97fe2016-01-28 05:22:45 -08002245 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002246 "Channel::SetSendTelephoneEventPayloadType()");
solenberg31642aa2016-03-14 08:00:37 -07002247 RTC_DCHECK_LE(0, payload_type);
2248 RTC_DCHECK_GE(127, payload_type);
2249 CodecInst codec = {0};
kwiberg55b97fe2016-01-28 05:22:45 -08002250 codec.plfreq = 8000;
solenberg31642aa2016-03-14 08:00:37 -07002251 codec.pltype = payload_type;
kwiberg55b97fe2016-01-28 05:22:45 -08002252 memcpy(codec.plname, "telephone-event", 16);
2253 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2254 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2255 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2256 _engineStatisticsPtr->SetLastError(
2257 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2258 "SetSendTelephoneEventPayloadType() failed to register send"
2259 "payload type");
2260 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002261 }
kwiberg55b97fe2016-01-28 05:22:45 -08002262 }
kwiberg55b97fe2016-01-28 05:22:45 -08002263 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002264}
2265
kwiberg55b97fe2016-01-28 05:22:45 -08002266int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2267 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2268 "Channel::UpdateRxVadDetection()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002269
kwiberg55b97fe2016-01-28 05:22:45 -08002270 int vadDecision = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002271
kwiberg55b97fe2016-01-28 05:22:45 -08002272 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002273
kwiberg55b97fe2016-01-28 05:22:45 -08002274 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2275 OnRxVadDetected(vadDecision);
2276 _oldVadDecision = vadDecision;
2277 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002278
kwiberg55b97fe2016-01-28 05:22:45 -08002279 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2280 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2281 vadDecision);
2282 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002283}
2284
kwiberg55b97fe2016-01-28 05:22:45 -08002285int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2286 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2287 "Channel::RegisterRxVadObserver()");
2288 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002289
kwiberg55b97fe2016-01-28 05:22:45 -08002290 if (_rxVadObserverPtr) {
2291 _engineStatisticsPtr->SetLastError(
2292 VE_INVALID_OPERATION, kTraceError,
2293 "RegisterRxVadObserver() observer already enabled");
2294 return -1;
2295 }
2296 _rxVadObserverPtr = &observer;
2297 _RxVadDetection = true;
2298 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002299}
2300
kwiberg55b97fe2016-01-28 05:22:45 -08002301int Channel::DeRegisterRxVadObserver() {
2302 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2303 "Channel::DeRegisterRxVadObserver()");
2304 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002305
kwiberg55b97fe2016-01-28 05:22:45 -08002306 if (!_rxVadObserverPtr) {
2307 _engineStatisticsPtr->SetLastError(
2308 VE_INVALID_OPERATION, kTraceWarning,
2309 "DeRegisterRxVadObserver() observer already disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002310 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002311 }
2312 _rxVadObserverPtr = NULL;
2313 _RxVadDetection = false;
2314 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002315}
2316
kwiberg55b97fe2016-01-28 05:22:45 -08002317int Channel::VoiceActivityIndicator(int& activity) {
2318 activity = _sendFrameType;
2319 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002320}
2321
2322#ifdef WEBRTC_VOICE_ENGINE_AGC
2323
kwiberg55b97fe2016-01-28 05:22:45 -08002324int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2325 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2326 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2327 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002328
kwiberg55b97fe2016-01-28 05:22:45 -08002329 GainControl::Mode agcMode = kDefaultRxAgcMode;
2330 switch (mode) {
2331 case kAgcDefault:
2332 break;
2333 case kAgcUnchanged:
2334 agcMode = rx_audioproc_->gain_control()->mode();
2335 break;
2336 case kAgcFixedDigital:
2337 agcMode = GainControl::kFixedDigital;
2338 break;
2339 case kAgcAdaptiveDigital:
2340 agcMode = GainControl::kAdaptiveDigital;
2341 break;
2342 default:
2343 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2344 "SetRxAgcStatus() invalid Agc mode");
2345 return -1;
2346 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002347
kwiberg55b97fe2016-01-28 05:22:45 -08002348 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2349 _engineStatisticsPtr->SetLastError(
2350 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2351 return -1;
2352 }
2353 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2354 _engineStatisticsPtr->SetLastError(
2355 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2356 return -1;
2357 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002358
kwiberg55b97fe2016-01-28 05:22:45 -08002359 _rxAgcIsEnabled = enable;
2360 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002361
kwiberg55b97fe2016-01-28 05:22:45 -08002362 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002363}
2364
kwiberg55b97fe2016-01-28 05:22:45 -08002365int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2366 bool enable = rx_audioproc_->gain_control()->is_enabled();
2367 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
niklase@google.com470e71d2011-07-07 08:21:25 +00002368
kwiberg55b97fe2016-01-28 05:22:45 -08002369 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002370
kwiberg55b97fe2016-01-28 05:22:45 -08002371 switch (agcMode) {
2372 case GainControl::kFixedDigital:
2373 mode = kAgcFixedDigital;
2374 break;
2375 case GainControl::kAdaptiveDigital:
2376 mode = kAgcAdaptiveDigital;
2377 break;
2378 default:
2379 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2380 "GetRxAgcStatus() invalid Agc mode");
2381 return -1;
2382 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002383
kwiberg55b97fe2016-01-28 05:22:45 -08002384 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002385}
2386
kwiberg55b97fe2016-01-28 05:22:45 -08002387int Channel::SetRxAgcConfig(AgcConfig config) {
2388 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2389 "Channel::SetRxAgcConfig()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002390
kwiberg55b97fe2016-01-28 05:22:45 -08002391 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2392 config.targetLeveldBOv) != 0) {
2393 _engineStatisticsPtr->SetLastError(
2394 VE_APM_ERROR, kTraceError,
2395 "SetRxAgcConfig() failed to set target peak |level|"
2396 "(or envelope) of the Agc");
2397 return -1;
2398 }
2399 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2400 config.digitalCompressionGaindB) != 0) {
2401 _engineStatisticsPtr->SetLastError(
2402 VE_APM_ERROR, kTraceError,
2403 "SetRxAgcConfig() failed to set the range in |gain| the"
2404 " digital compression stage may apply");
2405 return -1;
2406 }
2407 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2408 0) {
2409 _engineStatisticsPtr->SetLastError(
2410 VE_APM_ERROR, kTraceError,
2411 "SetRxAgcConfig() failed to set hard limiter to the signal");
2412 return -1;
2413 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002414
kwiberg55b97fe2016-01-28 05:22:45 -08002415 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002416}
2417
kwiberg55b97fe2016-01-28 05:22:45 -08002418int Channel::GetRxAgcConfig(AgcConfig& config) {
2419 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2420 config.digitalCompressionGaindB =
2421 rx_audioproc_->gain_control()->compression_gain_db();
2422 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
niklase@google.com470e71d2011-07-07 08:21:25 +00002423
kwiberg55b97fe2016-01-28 05:22:45 -08002424 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002425}
2426
kwiberg55b97fe2016-01-28 05:22:45 -08002427#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
niklase@google.com470e71d2011-07-07 08:21:25 +00002428
2429#ifdef WEBRTC_VOICE_ENGINE_NR
2430
kwiberg55b97fe2016-01-28 05:22:45 -08002431int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2432 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2433 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2434 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002435
kwiberg55b97fe2016-01-28 05:22:45 -08002436 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2437 switch (mode) {
2438 case kNsDefault:
2439 break;
2440 case kNsUnchanged:
2441 nsLevel = rx_audioproc_->noise_suppression()->level();
2442 break;
2443 case kNsConference:
2444 nsLevel = NoiseSuppression::kHigh;
2445 break;
2446 case kNsLowSuppression:
2447 nsLevel = NoiseSuppression::kLow;
2448 break;
2449 case kNsModerateSuppression:
2450 nsLevel = NoiseSuppression::kModerate;
2451 break;
2452 case kNsHighSuppression:
2453 nsLevel = NoiseSuppression::kHigh;
2454 break;
2455 case kNsVeryHighSuppression:
2456 nsLevel = NoiseSuppression::kVeryHigh;
2457 break;
2458 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002459
kwiberg55b97fe2016-01-28 05:22:45 -08002460 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2461 _engineStatisticsPtr->SetLastError(
2462 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2463 return -1;
2464 }
2465 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2466 _engineStatisticsPtr->SetLastError(
2467 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2468 return -1;
2469 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002470
kwiberg55b97fe2016-01-28 05:22:45 -08002471 _rxNsIsEnabled = enable;
2472 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002473
kwiberg55b97fe2016-01-28 05:22:45 -08002474 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002475}
2476
kwiberg55b97fe2016-01-28 05:22:45 -08002477int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2478 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2479 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
niklase@google.com470e71d2011-07-07 08:21:25 +00002480
kwiberg55b97fe2016-01-28 05:22:45 -08002481 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002482
kwiberg55b97fe2016-01-28 05:22:45 -08002483 switch (ncLevel) {
2484 case NoiseSuppression::kLow:
2485 mode = kNsLowSuppression;
2486 break;
2487 case NoiseSuppression::kModerate:
2488 mode = kNsModerateSuppression;
2489 break;
2490 case NoiseSuppression::kHigh:
2491 mode = kNsHighSuppression;
2492 break;
2493 case NoiseSuppression::kVeryHigh:
2494 mode = kNsVeryHighSuppression;
2495 break;
2496 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002497
kwiberg55b97fe2016-01-28 05:22:45 -08002498 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002499}
2500
kwiberg55b97fe2016-01-28 05:22:45 -08002501#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
niklase@google.com470e71d2011-07-07 08:21:25 +00002502
kwiberg55b97fe2016-01-28 05:22:45 -08002503int Channel::SetLocalSSRC(unsigned int ssrc) {
2504 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2505 "Channel::SetLocalSSRC()");
2506 if (channel_state_.Get().sending) {
2507 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2508 "SetLocalSSRC() already sending");
2509 return -1;
2510 }
2511 _rtpRtcpModule->SetSSRC(ssrc);
2512 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002513}
2514
kwiberg55b97fe2016-01-28 05:22:45 -08002515int Channel::GetLocalSSRC(unsigned int& ssrc) {
2516 ssrc = _rtpRtcpModule->SSRC();
2517 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002518}
2519
kwiberg55b97fe2016-01-28 05:22:45 -08002520int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2521 ssrc = rtp_receiver_->SSRC();
2522 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002523}
2524
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002525int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002526 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002527 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002528}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002529
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002530int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2531 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002532 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2533 if (enable &&
2534 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2535 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002536 return -1;
2537 }
2538 return 0;
2539}
2540
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002541int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2542 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2543}
2544
2545int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2546 rtp_header_parser_->DeregisterRtpHeaderExtension(
2547 kRtpExtensionAbsoluteSendTime);
kwiberg55b97fe2016-01-28 05:22:45 -08002548 if (enable &&
2549 !rtp_header_parser_->RegisterRtpHeaderExtension(
2550 kRtpExtensionAbsoluteSendTime, id)) {
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00002551 return -1;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002552 }
2553 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002554}
2555
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002556void Channel::EnableSendTransportSequenceNumber(int id) {
2557 int ret =
2558 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2559 RTC_DCHECK_EQ(0, ret);
2560}
2561
stefan3313ec92016-01-21 06:32:43 -08002562void Channel::EnableReceiveTransportSequenceNumber(int id) {
2563 rtp_header_parser_->DeregisterRtpHeaderExtension(
2564 kRtpExtensionTransportSequenceNumber);
2565 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2566 kRtpExtensionTransportSequenceNumber, id);
2567 RTC_DCHECK(ret);
2568}
2569
stefanbba9dec2016-02-01 04:39:55 -08002570void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002571 RtpPacketSender* rtp_packet_sender,
2572 TransportFeedbackObserver* transport_feedback_observer,
2573 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002574 RTC_DCHECK(rtp_packet_sender);
2575 RTC_DCHECK(transport_feedback_observer);
2576 RTC_DCHECK(packet_router && !packet_router_);
2577 feedback_observer_proxy_->SetTransportFeedbackObserver(
2578 transport_feedback_observer);
2579 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2580 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2581 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002582 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002583 packet_router_ = packet_router;
2584}
2585
stefanbba9dec2016-02-01 04:39:55 -08002586void Channel::RegisterReceiverCongestionControlObjects(
2587 PacketRouter* packet_router) {
2588 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002589 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002590 packet_router_ = packet_router;
2591}
2592
2593void Channel::ResetCongestionControlObjects() {
2594 RTC_DCHECK(packet_router_);
2595 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2596 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2597 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002598 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002599 packet_router_ = nullptr;
2600 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2601}
2602
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002603void Channel::SetRTCPStatus(bool enable) {
2604 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2605 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002606 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002607}
2608
kwiberg55b97fe2016-01-28 05:22:45 -08002609int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002610 RtcpMode method = _rtpRtcpModule->RTCP();
2611 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002612 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002613}
2614
kwiberg55b97fe2016-01-28 05:22:45 -08002615int Channel::SetRTCP_CNAME(const char cName[256]) {
2616 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2617 "Channel::SetRTCP_CNAME()");
2618 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2619 _engineStatisticsPtr->SetLastError(
2620 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2621 "SetRTCP_CNAME() failed to set RTCP CNAME");
2622 return -1;
2623 }
2624 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002625}
2626
kwiberg55b97fe2016-01-28 05:22:45 -08002627int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2628 if (cName == NULL) {
2629 _engineStatisticsPtr->SetLastError(
2630 VE_INVALID_ARGUMENT, kTraceError,
2631 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2632 return -1;
2633 }
2634 char cname[RTCP_CNAME_SIZE];
2635 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2636 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2637 _engineStatisticsPtr->SetLastError(
2638 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2639 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2640 return -1;
2641 }
2642 strcpy(cName, cname);
2643 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002644}
2645
kwiberg55b97fe2016-01-28 05:22:45 -08002646int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2647 unsigned int& NTPLow,
2648 unsigned int& timestamp,
2649 unsigned int& playoutTimestamp,
2650 unsigned int* jitter,
2651 unsigned short* fractionLost) {
2652 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002653
kwiberg55b97fe2016-01-28 05:22:45 -08002654 RTCPSenderInfo senderInfo;
2655 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2656 _engineStatisticsPtr->SetLastError(
2657 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2658 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2659 "side");
2660 return -1;
2661 }
2662
2663 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2664 // and octet count)
2665 NTPHigh = senderInfo.NTPseconds;
2666 NTPLow = senderInfo.NTPfraction;
2667 timestamp = senderInfo.RTPtimeStamp;
2668
2669 // --- Locally derived information
2670
2671 // This value is updated on each incoming RTCP packet (0 when no packet
2672 // has been received)
2673 playoutTimestamp = playout_timestamp_rtcp_;
2674
2675 if (NULL != jitter || NULL != fractionLost) {
2676 // Get all RTCP receiver report blocks that have been received on this
2677 // channel. If we receive RTP packets from a remote source we know the
2678 // remote SSRC and use the report block from him.
2679 // Otherwise use the first report block.
2680 std::vector<RTCPReportBlock> remote_stats;
2681 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2682 remote_stats.empty()) {
2683 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2684 "GetRemoteRTCPData() failed to measure statistics due"
2685 " to lack of received RTP and/or RTCP packets");
2686 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002687 }
2688
kwiberg55b97fe2016-01-28 05:22:45 -08002689 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2690 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2691 for (; it != remote_stats.end(); ++it) {
2692 if (it->remoteSSRC == remoteSSRC)
2693 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002694 }
kwiberg55b97fe2016-01-28 05:22:45 -08002695
2696 if (it == remote_stats.end()) {
2697 // If we have not received any RTCP packets from this SSRC it probably
2698 // means that we have not received any RTP packets.
2699 // Use the first received report block instead.
2700 it = remote_stats.begin();
2701 remoteSSRC = it->remoteSSRC;
2702 }
2703
2704 if (jitter) {
2705 *jitter = it->jitter;
2706 }
2707
2708 if (fractionLost) {
2709 *fractionLost = it->fractionLost;
2710 }
2711 }
2712 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002713}
2714
kwiberg55b97fe2016-01-28 05:22:45 -08002715int Channel::SendApplicationDefinedRTCPPacket(
2716 unsigned char subType,
2717 unsigned int name,
2718 const char* data,
2719 unsigned short dataLengthInBytes) {
2720 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2721 "Channel::SendApplicationDefinedRTCPPacket()");
2722 if (!channel_state_.Get().sending) {
2723 _engineStatisticsPtr->SetLastError(
2724 VE_NOT_SENDING, kTraceError,
2725 "SendApplicationDefinedRTCPPacket() not sending");
2726 return -1;
2727 }
2728 if (NULL == data) {
2729 _engineStatisticsPtr->SetLastError(
2730 VE_INVALID_ARGUMENT, kTraceError,
2731 "SendApplicationDefinedRTCPPacket() invalid data value");
2732 return -1;
2733 }
2734 if (dataLengthInBytes % 4 != 0) {
2735 _engineStatisticsPtr->SetLastError(
2736 VE_INVALID_ARGUMENT, kTraceError,
2737 "SendApplicationDefinedRTCPPacket() invalid length value");
2738 return -1;
2739 }
2740 RtcpMode status = _rtpRtcpModule->RTCP();
2741 if (status == RtcpMode::kOff) {
2742 _engineStatisticsPtr->SetLastError(
2743 VE_RTCP_ERROR, kTraceError,
2744 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2745 return -1;
2746 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002747
kwiberg55b97fe2016-01-28 05:22:45 -08002748 // Create and schedule the RTCP APP packet for transmission
2749 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2750 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2751 _engineStatisticsPtr->SetLastError(
2752 VE_SEND_ERROR, kTraceError,
2753 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2754 return -1;
2755 }
2756 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002757}
2758
kwiberg55b97fe2016-01-28 05:22:45 -08002759int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2760 unsigned int& maxJitterMs,
2761 unsigned int& discardedPackets) {
2762 // The jitter statistics is updated for each received RTP packet and is
2763 // based on received packets.
2764 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2765 // If RTCP is off, there is no timed thread in the RTCP module regularly
2766 // generating new stats, trigger the update manually here instead.
2767 StreamStatistician* statistician =
2768 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2769 if (statistician) {
2770 // Don't use returned statistics, use data from proxy instead so that
2771 // max jitter can be fetched atomically.
2772 RtcpStatistics s;
2773 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002774 }
kwiberg55b97fe2016-01-28 05:22:45 -08002775 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002776
kwiberg55b97fe2016-01-28 05:22:45 -08002777 ChannelStatistics stats = statistics_proxy_->GetStats();
2778 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2779 if (playoutFrequency > 0) {
2780 // Scale RTP statistics given the current playout frequency
2781 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2782 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2783 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002784
kwiberg55b97fe2016-01-28 05:22:45 -08002785 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002786
kwiberg55b97fe2016-01-28 05:22:45 -08002787 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002788}
2789
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002790int Channel::GetRemoteRTCPReportBlocks(
2791 std::vector<ReportBlock>* report_blocks) {
2792 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002793 _engineStatisticsPtr->SetLastError(
2794 VE_INVALID_ARGUMENT, kTraceError,
2795 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002796 return -1;
2797 }
2798
2799 // Get the report blocks from the latest received RTCP Sender or Receiver
2800 // Report. Each element in the vector contains the sender's SSRC and a
2801 // report block according to RFC 3550.
2802 std::vector<RTCPReportBlock> rtcp_report_blocks;
2803 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002804 return -1;
2805 }
2806
2807 if (rtcp_report_blocks.empty())
2808 return 0;
2809
2810 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2811 for (; it != rtcp_report_blocks.end(); ++it) {
2812 ReportBlock report_block;
2813 report_block.sender_SSRC = it->remoteSSRC;
2814 report_block.source_SSRC = it->sourceSSRC;
2815 report_block.fraction_lost = it->fractionLost;
2816 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2817 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2818 report_block.interarrival_jitter = it->jitter;
2819 report_block.last_SR_timestamp = it->lastSR;
2820 report_block.delay_since_last_SR = it->delaySinceLastSR;
2821 report_blocks->push_back(report_block);
2822 }
2823 return 0;
2824}
2825
kwiberg55b97fe2016-01-28 05:22:45 -08002826int Channel::GetRTPStatistics(CallStatistics& stats) {
2827 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002828
kwiberg55b97fe2016-01-28 05:22:45 -08002829 // The jitter statistics is updated for each received RTP packet and is
2830 // based on received packets.
2831 RtcpStatistics statistics;
2832 StreamStatistician* statistician =
2833 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002834 if (statistician) {
2835 statistician->GetStatistics(&statistics,
2836 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002837 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002838
kwiberg55b97fe2016-01-28 05:22:45 -08002839 stats.fractionLost = statistics.fraction_lost;
2840 stats.cumulativeLost = statistics.cumulative_lost;
2841 stats.extendedMax = statistics.extended_max_sequence_number;
2842 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002843
kwiberg55b97fe2016-01-28 05:22:45 -08002844 // --- RTT
2845 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002846
kwiberg55b97fe2016-01-28 05:22:45 -08002847 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002848
kwiberg55b97fe2016-01-28 05:22:45 -08002849 size_t bytesSent(0);
2850 uint32_t packetsSent(0);
2851 size_t bytesReceived(0);
2852 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002853
kwiberg55b97fe2016-01-28 05:22:45 -08002854 if (statistician) {
2855 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2856 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002857
kwiberg55b97fe2016-01-28 05:22:45 -08002858 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2859 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2860 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2861 " output will not be complete");
2862 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002863
kwiberg55b97fe2016-01-28 05:22:45 -08002864 stats.bytesSent = bytesSent;
2865 stats.packetsSent = packetsSent;
2866 stats.bytesReceived = bytesReceived;
2867 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002868
kwiberg55b97fe2016-01-28 05:22:45 -08002869 // --- Timestamps
2870 {
2871 rtc::CritScope lock(&ts_stats_lock_);
2872 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2873 }
2874 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002875}
2876
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002877int Channel::SetREDStatus(bool enable, int redPayloadtype) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002878 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002879 "Channel::SetREDStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002880
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002881 if (enable) {
2882 if (redPayloadtype < 0 || redPayloadtype > 127) {
2883 _engineStatisticsPtr->SetLastError(
2884 VE_PLTYPE_ERROR, kTraceError,
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002885 "SetREDStatus() invalid RED payload type");
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002886 return -1;
2887 }
2888
2889 if (SetRedPayloadType(redPayloadtype) < 0) {
2890 _engineStatisticsPtr->SetLastError(
2891 VE_CODEC_ERROR, kTraceError,
2892 "SetSecondarySendCodec() Failed to register RED ACM");
2893 return -1;
2894 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002895 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002896
kwibergc8d071e2016-04-06 12:22:38 -07002897 if (!codec_manager_.SetCopyRed(enable) ||
2898 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002899 _engineStatisticsPtr->SetLastError(
2900 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002901 "SetREDStatus() failed to set RED state in the ACM");
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002902 return -1;
2903 }
2904 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002905}
2906
kwiberg55b97fe2016-01-28 05:22:45 -08002907int Channel::GetREDStatus(bool& enabled, int& redPayloadtype) {
kwibergc8d071e2016-04-06 12:22:38 -07002908 enabled = codec_manager_.GetStackParams()->use_red;
kwiberg55b97fe2016-01-28 05:22:45 -08002909 if (enabled) {
2910 int8_t payloadType = 0;
2911 if (_rtpRtcpModule->SendREDPayloadType(&payloadType) != 0) {
2912 _engineStatisticsPtr->SetLastError(
2913 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2914 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
2915 "module");
2916 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002917 }
kwiberg55b97fe2016-01-28 05:22:45 -08002918 redPayloadtype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +00002919 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002920 }
2921 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002922}
2923
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002924int Channel::SetCodecFECStatus(bool enable) {
2925 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2926 "Channel::SetCodecFECStatus()");
2927
kwibergc8d071e2016-04-06 12:22:38 -07002928 if (!codec_manager_.SetCodecFEC(enable) ||
2929 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002930 _engineStatisticsPtr->SetLastError(
2931 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2932 "SetCodecFECStatus() failed to set FEC state");
2933 return -1;
2934 }
2935 return 0;
2936}
2937
2938bool Channel::GetCodecFECStatus() {
kwibergc8d071e2016-04-06 12:22:38 -07002939 return codec_manager_.GetStackParams()->use_codec_fec;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002940}
2941
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002942void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2943 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002944 // If pacing is enabled we always store packets.
2945 if (!pacing_enabled_)
2946 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002947 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002948 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002949 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002950 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002951 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002952}
2953
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002954// Called when we are missing one or more packets.
2955int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002956 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2957}
2958
kwiberg55b97fe2016-01-28 05:22:45 -08002959uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2960 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2961 "Channel::Demultiplex()");
2962 _audioFrame.CopyFrom(audioFrame);
2963 _audioFrame.id_ = _channelId;
2964 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002965}
2966
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002967void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00002968 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07002969 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08002970 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002971 CodecInst codec;
2972 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002973
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07002974 // Never upsample or upmix the capture signal here. This should be done at the
2975 // end of the send chain.
2976 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
2977 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
2978 RemixAndResample(audio_data, number_of_frames, number_of_channels,
2979 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002980}
2981
kwiberg55b97fe2016-01-28 05:22:45 -08002982uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
2983 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2984 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002985
kwiberg55b97fe2016-01-28 05:22:45 -08002986 if (_audioFrame.samples_per_channel_ == 0) {
2987 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2988 "Channel::PrepareEncodeAndSend() invalid audio frame");
2989 return 0xFFFFFFFF;
2990 }
2991
2992 if (channel_state_.Get().input_file_playing) {
2993 MixOrReplaceAudioWithFile(mixingFrequency);
2994 }
2995
solenberg1c2af8e2016-03-24 10:36:00 -07002996 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
2997 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
kwiberg55b97fe2016-01-28 05:22:45 -08002998
2999 if (channel_state_.Get().input_external_media) {
3000 rtc::CritScope cs(&_callbackCritSect);
3001 const bool isStereo = (_audioFrame.num_channels_ == 2);
3002 if (_inputExternalMediaCallbackPtr) {
3003 _inputExternalMediaCallbackPtr->Process(
3004 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
3005 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
3006 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00003007 }
kwiberg55b97fe2016-01-28 05:22:45 -08003008 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003009
kwiberg55b97fe2016-01-28 05:22:45 -08003010 if (_includeAudioLevelIndication) {
3011 size_t length =
3012 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
Tommi60c4e0a2016-05-26 21:35:27 +02003013 RTC_CHECK_LE(length, sizeof(_audioFrame.data_));
solenberg1c2af8e2016-03-24 10:36:00 -07003014 if (is_muted && previous_frame_muted_) {
kwiberg55b97fe2016-01-28 05:22:45 -08003015 rms_level_.ProcessMuted(length);
3016 } else {
3017 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00003018 }
kwiberg55b97fe2016-01-28 05:22:45 -08003019 }
solenberg1c2af8e2016-03-24 10:36:00 -07003020 previous_frame_muted_ = is_muted;
niklase@google.com470e71d2011-07-07 08:21:25 +00003021
kwiberg55b97fe2016-01-28 05:22:45 -08003022 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003023}
3024
kwiberg55b97fe2016-01-28 05:22:45 -08003025uint32_t Channel::EncodeAndSend() {
3026 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3027 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003028
kwiberg55b97fe2016-01-28 05:22:45 -08003029 assert(_audioFrame.num_channels_ <= 2);
3030 if (_audioFrame.samples_per_channel_ == 0) {
3031 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3032 "Channel::EncodeAndSend() invalid audio frame");
3033 return 0xFFFFFFFF;
3034 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003035
kwiberg55b97fe2016-01-28 05:22:45 -08003036 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00003037
kwiberg55b97fe2016-01-28 05:22:45 -08003038 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00003039
kwiberg55b97fe2016-01-28 05:22:45 -08003040 // The ACM resamples internally.
3041 _audioFrame.timestamp_ = _timeStamp;
3042 // This call will trigger AudioPacketizationCallback::SendData if encoding
3043 // is done and payload is ready for packetization and transmission.
3044 // Otherwise, it will return without invoking the callback.
3045 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
3046 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
3047 "Channel::EncodeAndSend() ACM encoding failed");
3048 return 0xFFFFFFFF;
3049 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003050
kwiberg55b97fe2016-01-28 05:22:45 -08003051 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
3052 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003053}
3054
Minyue2013aec2015-05-13 14:14:42 +02003055void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08003056 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003057 Channel* channel = associate_send_channel_.channel();
3058 if (channel && channel->ChannelId() == channel_id) {
3059 // If this channel is associated with a send channel of the specified
3060 // Channel ID, disassociate with it.
3061 ChannelOwner ref(NULL);
3062 associate_send_channel_ = ref;
3063 }
3064}
3065
kwiberg55b97fe2016-01-28 05:22:45 -08003066int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3067 VoEMediaProcess& processObject) {
3068 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3069 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003070
kwiberg55b97fe2016-01-28 05:22:45 -08003071 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003072
kwiberg55b97fe2016-01-28 05:22:45 -08003073 if (kPlaybackPerChannel == type) {
3074 if (_outputExternalMediaCallbackPtr) {
3075 _engineStatisticsPtr->SetLastError(
3076 VE_INVALID_OPERATION, kTraceError,
3077 "Channel::RegisterExternalMediaProcessing() "
3078 "output external media already enabled");
3079 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003080 }
kwiberg55b97fe2016-01-28 05:22:45 -08003081 _outputExternalMediaCallbackPtr = &processObject;
3082 _outputExternalMedia = true;
3083 } else if (kRecordingPerChannel == type) {
3084 if (_inputExternalMediaCallbackPtr) {
3085 _engineStatisticsPtr->SetLastError(
3086 VE_INVALID_OPERATION, kTraceError,
3087 "Channel::RegisterExternalMediaProcessing() "
3088 "output external media already enabled");
3089 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003090 }
kwiberg55b97fe2016-01-28 05:22:45 -08003091 _inputExternalMediaCallbackPtr = &processObject;
3092 channel_state_.SetInputExternalMedia(true);
3093 }
3094 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003095}
3096
kwiberg55b97fe2016-01-28 05:22:45 -08003097int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3098 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3099 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003100
kwiberg55b97fe2016-01-28 05:22:45 -08003101 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003102
kwiberg55b97fe2016-01-28 05:22:45 -08003103 if (kPlaybackPerChannel == type) {
3104 if (!_outputExternalMediaCallbackPtr) {
3105 _engineStatisticsPtr->SetLastError(
3106 VE_INVALID_OPERATION, kTraceWarning,
3107 "Channel::DeRegisterExternalMediaProcessing() "
3108 "output external media already disabled");
3109 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003110 }
kwiberg55b97fe2016-01-28 05:22:45 -08003111 _outputExternalMedia = false;
3112 _outputExternalMediaCallbackPtr = NULL;
3113 } else if (kRecordingPerChannel == type) {
3114 if (!_inputExternalMediaCallbackPtr) {
3115 _engineStatisticsPtr->SetLastError(
3116 VE_INVALID_OPERATION, kTraceWarning,
3117 "Channel::DeRegisterExternalMediaProcessing() "
3118 "input external media already disabled");
3119 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003120 }
kwiberg55b97fe2016-01-28 05:22:45 -08003121 channel_state_.SetInputExternalMedia(false);
3122 _inputExternalMediaCallbackPtr = NULL;
3123 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003124
kwiberg55b97fe2016-01-28 05:22:45 -08003125 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003126}
3127
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003128int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08003129 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3130 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003131
kwiberg55b97fe2016-01-28 05:22:45 -08003132 if (channel_state_.Get().playing) {
3133 _engineStatisticsPtr->SetLastError(
3134 VE_INVALID_OPERATION, kTraceError,
3135 "Channel::SetExternalMixing() "
3136 "external mixing cannot be changed while playing.");
3137 return -1;
3138 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003139
kwiberg55b97fe2016-01-28 05:22:45 -08003140 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003141
kwiberg55b97fe2016-01-28 05:22:45 -08003142 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003143}
3144
kwiberg55b97fe2016-01-28 05:22:45 -08003145int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3146 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00003147}
3148
wu@webrtc.org24301a62013-12-13 19:17:43 +00003149void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3150 audio_coding_->GetDecodingCallStatistics(stats);
3151}
3152
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003153bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3154 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08003155 rtc::CritScope lock(&video_sync_lock_);
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003156 if (_average_jitter_buffer_delay_us == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003157 return false;
3158 }
kwiberg55b97fe2016-01-28 05:22:45 -08003159 *jitter_buffer_delay_ms =
3160 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003161 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003162 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00003163}
3164
solenberg358057b2015-11-27 10:46:42 -08003165uint32_t Channel::GetDelayEstimate() const {
3166 int jitter_buffer_delay_ms = 0;
3167 int playout_buffer_delay_ms = 0;
3168 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3169 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3170}
3171
deadbeef74375882015-08-13 12:09:10 -07003172int Channel::LeastRequiredDelayMs() const {
3173 return audio_coding_->LeastRequiredDelayMs();
3174}
3175
kwiberg55b97fe2016-01-28 05:22:45 -08003176int Channel::SetMinimumPlayoutDelay(int delayMs) {
3177 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3178 "Channel::SetMinimumPlayoutDelay()");
3179 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3180 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3181 _engineStatisticsPtr->SetLastError(
3182 VE_INVALID_ARGUMENT, kTraceError,
3183 "SetMinimumPlayoutDelay() invalid min delay");
3184 return -1;
3185 }
3186 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3187 _engineStatisticsPtr->SetLastError(
3188 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3189 "SetMinimumPlayoutDelay() failed to set min playout delay");
3190 return -1;
3191 }
3192 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003193}
3194
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003195int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07003196 uint32_t playout_timestamp_rtp = 0;
3197 {
tommi31fc21f2016-01-21 10:37:37 -08003198 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003199 playout_timestamp_rtp = playout_timestamp_rtp_;
3200 }
kwiberg55b97fe2016-01-28 05:22:45 -08003201 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003202 _engineStatisticsPtr->SetLastError(
3203 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3204 "GetPlayoutTimestamp() failed to retrieve timestamp");
3205 return -1;
3206 }
deadbeef74375882015-08-13 12:09:10 -07003207 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003208 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003209}
3210
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003211int Channel::SetInitTimestamp(unsigned int timestamp) {
3212 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00003213 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003214 if (channel_state_.Get().sending) {
3215 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3216 "SetInitTimestamp() already sending");
3217 return -1;
3218 }
3219 _rtpRtcpModule->SetStartTimestamp(timestamp);
3220 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003221}
3222
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003223int Channel::SetInitSequenceNumber(short sequenceNumber) {
3224 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3225 "Channel::SetInitSequenceNumber()");
3226 if (channel_state_.Get().sending) {
3227 _engineStatisticsPtr->SetLastError(
3228 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3229 return -1;
3230 }
3231 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3232 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003233}
3234
kwiberg55b97fe2016-01-28 05:22:45 -08003235int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3236 RtpReceiver** rtp_receiver) const {
3237 *rtpRtcpModule = _rtpRtcpModule.get();
3238 *rtp_receiver = rtp_receiver_.get();
3239 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003240}
3241
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003242// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3243// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003244int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003245 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003246 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003247
kwiberg55b97fe2016-01-28 05:22:45 -08003248 {
3249 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003250
kwiberg55b97fe2016-01-28 05:22:45 -08003251 if (_inputFilePlayerPtr == NULL) {
3252 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3253 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3254 " doesnt exist");
3255 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003256 }
3257
kwiberg55b97fe2016-01-28 05:22:45 -08003258 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3259 mixingFrequency) == -1) {
3260 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3261 "Channel::MixOrReplaceAudioWithFile() file mixing "
3262 "failed");
3263 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003264 }
kwiberg55b97fe2016-01-28 05:22:45 -08003265 if (fileSamples == 0) {
3266 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3267 "Channel::MixOrReplaceAudioWithFile() file is ended");
3268 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003269 }
kwiberg55b97fe2016-01-28 05:22:45 -08003270 }
3271
3272 assert(_audioFrame.samples_per_channel_ == fileSamples);
3273
3274 if (_mixFileWithMicrophone) {
3275 // Currently file stream is always mono.
3276 // TODO(xians): Change the code when FilePlayer supports real stereo.
3277 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3278 1, fileSamples);
3279 } else {
3280 // Replace ACM audio with file.
3281 // Currently file stream is always mono.
3282 // TODO(xians): Change the code when FilePlayer supports real stereo.
3283 _audioFrame.UpdateFrame(
3284 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3285 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3286 }
3287 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003288}
3289
kwiberg55b97fe2016-01-28 05:22:45 -08003290int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3291 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003292
kwibergb7f89d62016-02-17 10:04:18 -08003293 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003294 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003295
kwiberg55b97fe2016-01-28 05:22:45 -08003296 {
3297 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003298
kwiberg55b97fe2016-01-28 05:22:45 -08003299 if (_outputFilePlayerPtr == NULL) {
3300 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3301 "Channel::MixAudioWithFile() file mixing failed");
3302 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003303 }
3304
kwiberg55b97fe2016-01-28 05:22:45 -08003305 // We should get the frequency we ask for.
3306 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3307 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3308 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3309 "Channel::MixAudioWithFile() file mixing failed");
3310 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003311 }
kwiberg55b97fe2016-01-28 05:22:45 -08003312 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003313
kwiberg55b97fe2016-01-28 05:22:45 -08003314 if (audioFrame.samples_per_channel_ == fileSamples) {
3315 // Currently file stream is always mono.
3316 // TODO(xians): Change the code when FilePlayer supports real stereo.
3317 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3318 fileSamples);
3319 } else {
3320 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3321 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3322 ") != "
3323 "fileSamples(%" PRIuS ")",
3324 audioFrame.samples_per_channel_, fileSamples);
3325 return -1;
3326 }
3327
3328 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003329}
3330
deadbeef74375882015-08-13 12:09:10 -07003331void Channel::UpdatePlayoutTimestamp(bool rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003332 jitter_buffer_playout_timestamp_ = audio_coding_->PlayoutTimestamp();
deadbeef74375882015-08-13 12:09:10 -07003333
henrik.lundin96bd5022016-04-06 04:13:56 -07003334 if (!jitter_buffer_playout_timestamp_) {
3335 // This can happen if this channel has not received any RTP packets. In
3336 // this case, NetEq is not capable of computing a playout timestamp.
deadbeef74375882015-08-13 12:09:10 -07003337 return;
3338 }
3339
3340 uint16_t delay_ms = 0;
3341 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003342 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003343 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3344 " delay from the ADM");
3345 _engineStatisticsPtr->SetLastError(
3346 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3347 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3348 return;
3349 }
3350
henrik.lundin96bd5022016-04-06 04:13:56 -07003351 RTC_DCHECK(jitter_buffer_playout_timestamp_);
3352 uint32_t playout_timestamp = *jitter_buffer_playout_timestamp_;
deadbeef74375882015-08-13 12:09:10 -07003353
3354 // Remove the playout delay.
henrik.lundin96bd5022016-04-06 04:13:56 -07003355 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
deadbeef74375882015-08-13 12:09:10 -07003356
kwiberg55b97fe2016-01-28 05:22:45 -08003357 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003358 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
henrik.lundin96bd5022016-04-06 04:13:56 -07003359 playout_timestamp);
deadbeef74375882015-08-13 12:09:10 -07003360
3361 {
tommi31fc21f2016-01-21 10:37:37 -08003362 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003363 if (rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003364 playout_timestamp_rtcp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003365 } else {
henrik.lundin96bd5022016-04-06 04:13:56 -07003366 playout_timestamp_rtp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003367 }
3368 playout_delay_ms_ = delay_ms;
3369 }
3370}
3371
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003372// Called for incoming RTP packets after successful RTP header parsing.
3373void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3374 uint16_t sequence_number) {
kwiberg55b97fe2016-01-28 05:22:45 -08003375 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003376 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3377 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00003378
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003379 // Get frequency of last received payload
wu@webrtc.org94454b72014-06-05 20:34:08 +00003380 int rtp_receive_frequency = GetPlayoutFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00003381
turaj@webrtc.org167b6df2013-12-13 21:05:07 +00003382 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
henrik.lundin96bd5022016-04-06 04:13:56 -07003383 // every incoming packet. May be empty if no valid playout timestamp is
3384 // available.
3385 // If |rtp_timestamp| is newer than |jitter_buffer_playout_timestamp_|, the
3386 // resulting difference is positive and will be used. When the inverse is
3387 // true (can happen when a network glitch causes a packet to arrive late,
3388 // and during long comfort noise periods with clock drift), or when
3389 // |jitter_buffer_playout_timestamp_| has no value, the difference is not
3390 // changed from the initial 0.
3391 uint32_t timestamp_diff_ms = 0;
3392 if (jitter_buffer_playout_timestamp_ &&
3393 IsNewerTimestamp(rtp_timestamp, *jitter_buffer_playout_timestamp_)) {
3394 timestamp_diff_ms = (rtp_timestamp - *jitter_buffer_playout_timestamp_) /
3395 (rtp_receive_frequency / 1000);
3396 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3397 // Diff is too large; set it to zero instead.
3398 timestamp_diff_ms = 0;
3399 }
henrik.lundin@webrtc.orgd6692992014-03-20 12:04:09 +00003400 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003401
kwiberg55b97fe2016-01-28 05:22:45 -08003402 uint16_t packet_delay_ms =
3403 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003404
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003405 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00003406
kwiberg55b97fe2016-01-28 05:22:45 -08003407 if (timestamp_diff_ms == 0)
3408 return;
niklase@google.com470e71d2011-07-07 08:21:25 +00003409
deadbeef74375882015-08-13 12:09:10 -07003410 {
tommi31fc21f2016-01-21 10:37:37 -08003411 rtc::CritScope lock(&video_sync_lock_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003412
deadbeef74375882015-08-13 12:09:10 -07003413 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3414 _recPacketDelayMs = packet_delay_ms;
3415 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003416
deadbeef74375882015-08-13 12:09:10 -07003417 if (_average_jitter_buffer_delay_us == 0) {
3418 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3419 return;
3420 }
3421
3422 // Filter average delay value using exponential filter (alpha is
3423 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3424 // risk of rounding error) and compensate for it in GetDelayEstimate()
3425 // later.
kwiberg55b97fe2016-01-28 05:22:45 -08003426 _average_jitter_buffer_delay_us =
3427 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3428 8;
deadbeef74375882015-08-13 12:09:10 -07003429 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003430}
3431
kwiberg55b97fe2016-01-28 05:22:45 -08003432void Channel::RegisterReceiveCodecsToRTPModule() {
3433 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3434 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003435
kwiberg55b97fe2016-01-28 05:22:45 -08003436 CodecInst codec;
3437 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003438
kwiberg55b97fe2016-01-28 05:22:45 -08003439 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3440 // Open up the RTP/RTCP receiver for all supported codecs
3441 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3442 (rtp_receiver_->RegisterReceivePayload(
3443 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3444 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3445 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3446 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3447 " to register %s (%d/%d/%" PRIuS
3448 "/%d) to RTP/RTCP "
3449 "receiver",
3450 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3451 codec.rate);
3452 } else {
3453 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3454 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3455 "(%d/%d/%" PRIuS
3456 "/%d) has been added to the RTP/RTCP "
3457 "receiver",
3458 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3459 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003460 }
kwiberg55b97fe2016-01-28 05:22:45 -08003461 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003462}
3463
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00003464// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003465int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003466 CodecInst codec;
3467 bool found_red = false;
3468
3469 // Get default RED settings from the ACM database
3470 const int num_codecs = AudioCodingModule::NumberOfCodecs();
3471 for (int idx = 0; idx < num_codecs; idx++) {
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003472 audio_coding_->Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003473 if (!STR_CASE_CMP(codec.plname, "RED")) {
3474 found_red = true;
3475 break;
3476 }
3477 }
3478
3479 if (!found_red) {
3480 _engineStatisticsPtr->SetLastError(
3481 VE_CODEC_ERROR, kTraceError,
3482 "SetRedPayloadType() RED is not supported");
3483 return -1;
3484 }
3485
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00003486 codec.pltype = red_payload_type;
kwibergc8d071e2016-04-06 12:22:38 -07003487 if (!codec_manager_.RegisterEncoder(codec) ||
3488 !codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003489 _engineStatisticsPtr->SetLastError(
3490 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3491 "SetRedPayloadType() RED registration in ACM module failed");
3492 return -1;
3493 }
3494
3495 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
3496 _engineStatisticsPtr->SetLastError(
3497 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3498 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
3499 return -1;
3500 }
3501 return 0;
3502}
3503
kwiberg55b97fe2016-01-28 05:22:45 -08003504int Channel::SetSendRtpHeaderExtension(bool enable,
3505 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003506 unsigned char id) {
3507 int error = 0;
3508 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3509 if (enable) {
3510 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3511 }
3512 return error;
3513}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003514
wu@webrtc.org94454b72014-06-05 20:34:08 +00003515int32_t Channel::GetPlayoutFrequency() {
3516 int32_t playout_frequency = audio_coding_->PlayoutFrequency();
3517 CodecInst current_recive_codec;
3518 if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
3519 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
3520 // Even though the actual sampling rate for G.722 audio is
3521 // 16,000 Hz, the RTP clock rate for the G722 payload format is
3522 // 8,000 Hz because that value was erroneously assigned in
3523 // RFC 1890 and must remain unchanged for backward compatibility.
3524 playout_frequency = 8000;
3525 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
3526 // We are resampling Opus internally to 32,000 Hz until all our
3527 // DSP routines can operate at 48,000 Hz, but the RTP clock
3528 // rate for the Opus payload format is standardized to 48,000 Hz,
3529 // because that is the maximum supported decoding sampling rate.
3530 playout_frequency = 48000;
3531 }
3532 }
3533 return playout_frequency;
3534}
3535
Minyue2013aec2015-05-13 14:14:42 +02003536int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003537 RtcpMode method = _rtpRtcpModule->RTCP();
3538 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003539 return 0;
3540 }
3541 std::vector<RTCPReportBlock> report_blocks;
3542 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003543
3544 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003545 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003546 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003547 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003548 Channel* channel = associate_send_channel_.channel();
3549 // Tries to get RTT from an associated channel. This is important for
3550 // receive-only channels.
3551 if (channel) {
3552 // To prevent infinite recursion and deadlock, calling GetRTT of
3553 // associate channel should always use "false" for argument:
3554 // |allow_associate_channel|.
3555 rtt = channel->GetRTT(false);
3556 }
3557 }
3558 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003559 }
3560
3561 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3562 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3563 for (; it != report_blocks.end(); ++it) {
3564 if (it->remoteSSRC == remoteSSRC)
3565 break;
3566 }
3567 if (it == report_blocks.end()) {
3568 // We have not received packets with SSRC matching the report blocks.
3569 // To calculate RTT we try with the SSRC of the first report block.
3570 // This is very important for send-only channels where we don't know
3571 // the SSRC of the other end.
3572 remoteSSRC = report_blocks[0].remoteSSRC;
3573 }
Minyue2013aec2015-05-13 14:14:42 +02003574
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003575 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003576 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003577 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003578 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3579 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003580 return 0;
3581 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003582 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003583}
3584
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003585} // namespace voe
3586} // namespace webrtc