blob: 8f1d0c18bda724f40c6fd6a82ccc7b6c07347c41 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010020#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000021#include "webrtc/base/timeutils.h"
minyue@webrtc.orge509f942013-09-12 17:03:00 +000022#include "webrtc/common.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020023#include "webrtc/config.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000024#include "webrtc/modules/audio_device/include/audio_device.h"
25#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010026#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010027#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010028#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
29#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
30#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000031#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010032#include "webrtc/modules/utility/include/audio_frame_operations.h"
33#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010034#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000035#include "webrtc/voice_engine/include/voe_base.h"
36#include "webrtc/voice_engine/include/voe_external_media.h"
37#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
38#include "webrtc/voice_engine/output_mixer.h"
39#include "webrtc/voice_engine/statistics.h"
40#include "webrtc/voice_engine/transmit_mixer.h"
41#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000042
andrew@webrtc.org50419b02012-11-14 19:07:54 +000043namespace webrtc {
44namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000045
solenberg8842c3e2016-03-11 03:06:41 -080046const int kTelephoneEventAttenuationdB = 10;
47
Stefan Holmerb86d4e42015-12-07 10:26:18 +010048class TransportFeedbackProxy : public TransportFeedbackObserver {
49 public:
50 TransportFeedbackProxy() : feedback_observer_(nullptr) {
51 pacer_thread_.DetachFromThread();
52 network_thread_.DetachFromThread();
53 }
54
55 void SetTransportFeedbackObserver(
56 TransportFeedbackObserver* feedback_observer) {
57 RTC_DCHECK(thread_checker_.CalledOnValidThread());
58 rtc::CritScope lock(&crit_);
59 feedback_observer_ = feedback_observer;
60 }
61
62 // Implements TransportFeedbackObserver.
63 void AddPacket(uint16_t sequence_number,
64 size_t length,
65 bool was_paced) override {
66 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
67 rtc::CritScope lock(&crit_);
68 if (feedback_observer_)
69 feedback_observer_->AddPacket(sequence_number, length, was_paced);
70 }
71 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
72 RTC_DCHECK(network_thread_.CalledOnValidThread());
73 rtc::CritScope lock(&crit_);
74 if (feedback_observer_)
75 feedback_observer_->OnTransportFeedback(feedback);
76 }
77
78 private:
79 rtc::CriticalSection crit_;
80 rtc::ThreadChecker thread_checker_;
81 rtc::ThreadChecker pacer_thread_;
82 rtc::ThreadChecker network_thread_;
83 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
84};
85
86class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
87 public:
88 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
89 pacer_thread_.DetachFromThread();
90 }
91
92 void SetSequenceNumberAllocator(
93 TransportSequenceNumberAllocator* seq_num_allocator) {
94 RTC_DCHECK(thread_checker_.CalledOnValidThread());
95 rtc::CritScope lock(&crit_);
96 seq_num_allocator_ = seq_num_allocator;
97 }
98
99 // Implements TransportSequenceNumberAllocator.
100 uint16_t AllocateSequenceNumber() override {
101 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
102 rtc::CritScope lock(&crit_);
103 if (!seq_num_allocator_)
104 return 0;
105 return seq_num_allocator_->AllocateSequenceNumber();
106 }
107
108 private:
109 rtc::CriticalSection crit_;
110 rtc::ThreadChecker thread_checker_;
111 rtc::ThreadChecker pacer_thread_;
112 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
113};
114
115class RtpPacketSenderProxy : public RtpPacketSender {
116 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800117 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100118
119 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
120 RTC_DCHECK(thread_checker_.CalledOnValidThread());
121 rtc::CritScope lock(&crit_);
122 rtp_packet_sender_ = rtp_packet_sender;
123 }
124
125 // Implements RtpPacketSender.
126 void InsertPacket(Priority priority,
127 uint32_t ssrc,
128 uint16_t sequence_number,
129 int64_t capture_time_ms,
130 size_t bytes,
131 bool retransmission) override {
132 rtc::CritScope lock(&crit_);
133 if (rtp_packet_sender_) {
134 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
135 capture_time_ms, bytes, retransmission);
136 }
137 }
138
139 private:
140 rtc::ThreadChecker thread_checker_;
141 rtc::CriticalSection crit_;
142 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
143};
144
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000145// Extend the default RTCP statistics struct with max_jitter, defined as the
146// maximum jitter value seen in an RTCP report block.
147struct ChannelStatistics : public RtcpStatistics {
148 ChannelStatistics() : rtcp(), max_jitter(0) {}
149
150 RtcpStatistics rtcp;
151 uint32_t max_jitter;
152};
153
154// Statistics callback, called at each generation of a new RTCP report block.
155class StatisticsProxy : public RtcpStatisticsCallback {
156 public:
tommi31fc21f2016-01-21 10:37:37 -0800157 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000158 virtual ~StatisticsProxy() {}
159
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000160 void StatisticsUpdated(const RtcpStatistics& statistics,
161 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000162 if (ssrc != ssrc_)
163 return;
164
tommi31fc21f2016-01-21 10:37:37 -0800165 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000166 stats_.rtcp = statistics;
167 if (statistics.jitter > stats_.max_jitter) {
168 stats_.max_jitter = statistics.jitter;
169 }
170 }
171
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000172 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000173
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000174 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800175 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000176 return stats_;
177 }
178
179 private:
180 // StatisticsUpdated calls are triggered from threads in the RTP module,
181 // while GetStats calls can be triggered from the public voice engine API,
182 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800183 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000184 const uint32_t ssrc_;
185 ChannelStatistics stats_;
186};
187
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000188class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000189 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000190 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
191 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000192
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000193 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
194 // Not used for Voice Engine.
195 }
196
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000197 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
198 int64_t rtt,
199 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000200 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
201 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
202 // report for VoiceEngine?
203 if (report_blocks.empty())
204 return;
205
206 int fraction_lost_aggregate = 0;
207 int total_number_of_packets = 0;
208
209 // If receiving multiple report blocks, calculate the weighted average based
210 // on the number of packets a report refers to.
211 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
212 block_it != report_blocks.end(); ++block_it) {
213 // Find the previous extended high sequence number for this remote SSRC,
214 // to calculate the number of RTP packets this report refers to. Ignore if
215 // we haven't seen this SSRC before.
216 std::map<uint32_t, uint32_t>::iterator seq_num_it =
217 extended_max_sequence_number_.find(block_it->sourceSSRC);
218 int number_of_packets = 0;
219 if (seq_num_it != extended_max_sequence_number_.end()) {
220 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
221 }
222 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
223 total_number_of_packets += number_of_packets;
224
225 extended_max_sequence_number_[block_it->sourceSSRC] =
226 block_it->extendedHighSeqNum;
227 }
228 int weighted_fraction_lost = 0;
229 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800230 weighted_fraction_lost =
231 (fraction_lost_aggregate + total_number_of_packets / 2) /
232 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000233 }
234 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000235 }
236
237 private:
238 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000239 // Maps remote side ssrc to extended highest sequence number received.
240 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000241};
242
kwiberg55b97fe2016-01-28 05:22:45 -0800243int32_t Channel::SendData(FrameType frameType,
244 uint8_t payloadType,
245 uint32_t timeStamp,
246 const uint8_t* payloadData,
247 size_t payloadSize,
248 const RTPFragmentationHeader* fragmentation) {
249 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
250 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
251 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
252 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000253
kwiberg55b97fe2016-01-28 05:22:45 -0800254 if (_includeAudioLevelIndication) {
255 // Store current audio level in the RTP/RTCP module.
256 // The level will be used in combination with voice-activity state
257 // (frameType) to add an RTP header extension
258 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
259 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000260
kwiberg55b97fe2016-01-28 05:22:45 -0800261 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
262 // packetization.
263 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
264 if (_rtpRtcpModule->SendOutgoingData(
265 (FrameType&)frameType, payloadType, timeStamp,
266 // Leaving the time when this frame was
267 // received from the capture device as
268 // undefined for voice for now.
269 -1, payloadData, payloadSize, fragmentation) == -1) {
270 _engineStatisticsPtr->SetLastError(
271 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
272 "Channel::SendData() failed to send data to RTP/RTCP module");
273 return -1;
274 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000275
kwiberg55b97fe2016-01-28 05:22:45 -0800276 _lastLocalTimeStamp = timeStamp;
277 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000278
kwiberg55b97fe2016-01-28 05:22:45 -0800279 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000280}
281
kwiberg55b97fe2016-01-28 05:22:45 -0800282int32_t Channel::InFrameType(FrameType frame_type) {
283 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
284 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000285
kwiberg55b97fe2016-01-28 05:22:45 -0800286 rtc::CritScope cs(&_callbackCritSect);
287 _sendFrameType = (frame_type == kAudioFrameSpeech);
288 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000289}
290
kwiberg55b97fe2016-01-28 05:22:45 -0800291int32_t Channel::OnRxVadDetected(int vadDecision) {
292 rtc::CritScope cs(&_callbackCritSect);
293 if (_rxVadObserverPtr) {
294 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
295 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000296
kwiberg55b97fe2016-01-28 05:22:45 -0800297 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000298}
299
stefan1d8a5062015-10-02 03:39:33 -0700300bool Channel::SendRtp(const uint8_t* data,
301 size_t len,
302 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800303 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
304 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000305
kwiberg55b97fe2016-01-28 05:22:45 -0800306 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000307
kwiberg55b97fe2016-01-28 05:22:45 -0800308 if (_transportPtr == NULL) {
309 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
310 "Channel::SendPacket() failed to send RTP packet due to"
311 " invalid transport object");
312 return false;
313 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000314
kwiberg55b97fe2016-01-28 05:22:45 -0800315 uint8_t* bufferToSendPtr = (uint8_t*)data;
316 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000317
kwiberg55b97fe2016-01-28 05:22:45 -0800318 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
319 std::string transport_name =
320 _externalTransport ? "external transport" : "WebRtc sockets";
321 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
322 "Channel::SendPacket() RTP transmission using %s failed",
323 transport_name.c_str());
324 return false;
325 }
326 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000327}
328
kwiberg55b97fe2016-01-28 05:22:45 -0800329bool Channel::SendRtcp(const uint8_t* data, size_t len) {
330 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
331 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000332
kwiberg55b97fe2016-01-28 05:22:45 -0800333 rtc::CritScope cs(&_callbackCritSect);
334 if (_transportPtr == NULL) {
335 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
336 "Channel::SendRtcp() failed to send RTCP packet"
337 " due to invalid transport object");
338 return false;
339 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000340
kwiberg55b97fe2016-01-28 05:22:45 -0800341 uint8_t* bufferToSendPtr = (uint8_t*)data;
342 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000343
kwiberg55b97fe2016-01-28 05:22:45 -0800344 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
345 if (n < 0) {
346 std::string transport_name =
347 _externalTransport ? "external transport" : "WebRtc sockets";
348 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
349 "Channel::SendRtcp() transmission using %s failed",
350 transport_name.c_str());
351 return false;
352 }
353 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000354}
355
Peter Boströmac547a62015-09-17 23:03:57 +0200356void Channel::OnPlayTelephoneEvent(uint8_t event,
357 uint16_t lengthMs,
358 uint8_t volume) {
kwiberg55b97fe2016-01-28 05:22:45 -0800359 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
360 "Channel::OnPlayTelephoneEvent(event=%u, lengthMs=%u,"
361 " volume=%u)",
362 event, lengthMs, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000363
solenberg31642aa2016-03-14 08:00:37 -0700364 if (!_playOutbandDtmfEvent || event > 15) {
kwiberg55b97fe2016-01-28 05:22:45 -0800365 // Ignore callback since feedback is disabled or event is not a
366 // Dtmf tone event.
367 return;
368 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000369
kwiberg55b97fe2016-01-28 05:22:45 -0800370 assert(_outputMixerPtr != NULL);
niklase@google.com470e71d2011-07-07 08:21:25 +0000371
kwiberg55b97fe2016-01-28 05:22:45 -0800372 // Start playing out the Dtmf tone (if playout is enabled).
373 // Reduce length of tone with 80ms to the reduce risk of echo.
374 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
niklase@google.com470e71d2011-07-07 08:21:25 +0000375}
376
kwiberg55b97fe2016-01-28 05:22:45 -0800377void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
378 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
379 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000380
kwiberg55b97fe2016-01-28 05:22:45 -0800381 // Update ssrc so that NTP for AV sync can be updated.
382 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000383}
384
Peter Boströmac547a62015-09-17 23:03:57 +0200385void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
386 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
387 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
388 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000389}
390
Peter Boströmac547a62015-09-17 23:03:57 +0200391int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000392 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000393 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000394 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800395 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200396 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800397 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
398 "Channel::OnInitializeDecoder(payloadType=%d, "
399 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
400 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000401
kwiberg55b97fe2016-01-28 05:22:45 -0800402 CodecInst receiveCodec = {0};
403 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000404
kwiberg55b97fe2016-01-28 05:22:45 -0800405 receiveCodec.pltype = payloadType;
406 receiveCodec.plfreq = frequency;
407 receiveCodec.channels = channels;
408 receiveCodec.rate = rate;
409 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000410
kwiberg55b97fe2016-01-28 05:22:45 -0800411 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
412 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000413
kwiberg55b97fe2016-01-28 05:22:45 -0800414 // Register the new codec to the ACM
415 if (audio_coding_->RegisterReceiveCodec(receiveCodec) == -1) {
416 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
417 "Channel::OnInitializeDecoder() invalid codec ("
418 "pt=%d, name=%s) received - 1",
419 payloadType, payloadName);
420 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
421 return -1;
422 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000423
kwiberg55b97fe2016-01-28 05:22:45 -0800424 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000425}
426
kwiberg55b97fe2016-01-28 05:22:45 -0800427int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
428 size_t payloadSize,
429 const WebRtcRTPHeader* rtpHeader) {
430 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
431 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
432 ","
433 " payloadType=%u, audioChannel=%" PRIuS ")",
434 payloadSize, rtpHeader->header.payloadType,
435 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000436
kwiberg55b97fe2016-01-28 05:22:45 -0800437 if (!channel_state_.Get().playing) {
438 // Avoid inserting into NetEQ when we are not playing. Count the
439 // packet as discarded.
440 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
441 "received packet is discarded since playing is not"
442 " activated");
443 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000444 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800445 }
446
447 // Push the incoming payload (parsed and ready for decoding) into the ACM
448 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
449 0) {
450 _engineStatisticsPtr->SetLastError(
451 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
452 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
453 return -1;
454 }
455
456 // Update the packet delay.
457 UpdatePacketDelay(rtpHeader->header.timestamp,
458 rtpHeader->header.sequenceNumber);
459
460 int64_t round_trip_time = 0;
461 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
462 NULL);
463
464 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
465 if (!nack_list.empty()) {
466 // Can't use nack_list.data() since it's not supported by all
467 // compilers.
468 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
469 }
470 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000471}
472
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000473bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000474 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000475 RTPHeader header;
476 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
477 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
478 "IncomingPacket invalid RTP header");
479 return false;
480 }
481 header.payload_type_frequency =
482 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
483 if (header.payload_type_frequency < 0)
484 return false;
485 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
486}
487
kwiberg55b97fe2016-01-28 05:22:45 -0800488int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
489 if (event_log_) {
490 unsigned int ssrc;
491 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
492 event_log_->LogAudioPlayout(ssrc);
493 }
494 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
495 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame) ==
496 -1) {
497 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
498 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
499 // In all likelihood, the audio in this frame is garbage. We return an
500 // error so that the audio mixer module doesn't add it to the mix. As
501 // a result, it won't be played out and the actions skipped here are
502 // irrelevant.
503 return -1;
504 }
505
506 if (_RxVadDetection) {
507 UpdateRxVadDetection(*audioFrame);
508 }
509
510 // Convert module ID to internal VoE channel ID
511 audioFrame->id_ = VoEChannelId(audioFrame->id_);
512 // Store speech type for dead-or-alive detection
513 _outputSpeechType = audioFrame->speech_type_;
514
515 ChannelState::State state = channel_state_.Get();
516
517 if (state.rx_apm_is_enabled) {
518 int err = rx_audioproc_->ProcessStream(audioFrame);
519 if (err) {
520 LOG(LS_ERROR) << "ProcessStream() error: " << err;
521 assert(false);
Ivo Creusenae856f22015-09-17 16:30:16 +0200522 }
kwiberg55b97fe2016-01-28 05:22:45 -0800523 }
524
525 {
526 // Pass the audio buffers to an optional sink callback, before applying
527 // scaling/panning, as that applies to the mix operation.
528 // External recipients of the audio (e.g. via AudioTrack), will do their
529 // own mixing/dynamic processing.
530 rtc::CritScope cs(&_callbackCritSect);
531 if (audio_sink_) {
532 AudioSinkInterface::Data data(
533 &audioFrame->data_[0], audioFrame->samples_per_channel_,
534 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
535 audioFrame->timestamp_);
536 audio_sink_->OnData(data);
537 }
538 }
539
540 float output_gain = 1.0f;
541 float left_pan = 1.0f;
542 float right_pan = 1.0f;
543 {
544 rtc::CritScope cs(&volume_settings_critsect_);
545 output_gain = _outputGain;
546 left_pan = _panLeft;
547 right_pan = _panRight;
548 }
549
550 // Output volume scaling
551 if (output_gain < 0.99f || output_gain > 1.01f) {
552 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
553 }
554
555 // Scale left and/or right channel(s) if stereo and master balance is
556 // active
557
558 if (left_pan != 1.0f || right_pan != 1.0f) {
559 if (audioFrame->num_channels_ == 1) {
560 // Emulate stereo mode since panning is active.
561 // The mono signal is copied to both left and right channels here.
562 AudioFrameOperations::MonoToStereo(audioFrame);
563 }
564 // For true stereo mode (when we are receiving a stereo signal), no
565 // action is needed.
566
567 // Do the panning operation (the audio frame contains stereo at this
568 // stage)
569 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
570 }
571
572 // Mix decoded PCM output with file if file mixing is enabled
573 if (state.output_file_playing) {
574 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
575 }
576
577 // External media
578 if (_outputExternalMedia) {
579 rtc::CritScope cs(&_callbackCritSect);
580 const bool isStereo = (audioFrame->num_channels_ == 2);
581 if (_outputExternalMediaCallbackPtr) {
582 _outputExternalMediaCallbackPtr->Process(
583 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
584 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
585 isStereo);
586 }
587 }
588
589 // Record playout if enabled
590 {
591 rtc::CritScope cs(&_fileCritSect);
592
593 if (_outputFileRecording && _outputFileRecorderPtr) {
594 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
595 }
596 }
597
598 // Measure audio level (0-9)
599 _outputAudioLevel.ComputeLevel(*audioFrame);
600
601 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
602 // The first frame with a valid rtp timestamp.
603 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
604 }
605
606 if (capture_start_rtp_time_stamp_ >= 0) {
607 // audioFrame.timestamp_ should be valid from now on.
608
609 // Compute elapsed time.
610 int64_t unwrap_timestamp =
611 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
612 audioFrame->elapsed_time_ms_ =
613 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
614 (GetPlayoutFrequency() / 1000);
615
niklase@google.com470e71d2011-07-07 08:21:25 +0000616 {
kwiberg55b97fe2016-01-28 05:22:45 -0800617 rtc::CritScope lock(&ts_stats_lock_);
618 // Compute ntp time.
619 audioFrame->ntp_time_ms_ =
620 ntp_estimator_.Estimate(audioFrame->timestamp_);
621 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
622 if (audioFrame->ntp_time_ms_ > 0) {
623 // Compute |capture_start_ntp_time_ms_| so that
624 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
625 capture_start_ntp_time_ms_ =
626 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000627 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000628 }
kwiberg55b97fe2016-01-28 05:22:45 -0800629 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000630
kwiberg55b97fe2016-01-28 05:22:45 -0800631 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000632}
633
kwiberg55b97fe2016-01-28 05:22:45 -0800634int32_t Channel::NeededFrequency(int32_t id) const {
635 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
636 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000637
kwiberg55b97fe2016-01-28 05:22:45 -0800638 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000639
kwiberg55b97fe2016-01-28 05:22:45 -0800640 // Determine highest needed receive frequency
641 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000642
kwiberg55b97fe2016-01-28 05:22:45 -0800643 // Return the bigger of playout and receive frequency in the ACM.
644 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
645 highestNeeded = audio_coding_->PlayoutFrequency();
646 } else {
647 highestNeeded = receiveFrequency;
648 }
649
650 // Special case, if we're playing a file on the playout side
651 // we take that frequency into consideration as well
652 // This is not needed on sending side, since the codec will
653 // limit the spectrum anyway.
654 if (channel_state_.Get().output_file_playing) {
655 rtc::CritScope cs(&_fileCritSect);
656 if (_outputFilePlayerPtr) {
657 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
658 highestNeeded = _outputFilePlayerPtr->Frequency();
659 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000660 }
kwiberg55b97fe2016-01-28 05:22:45 -0800661 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000662
kwiberg55b97fe2016-01-28 05:22:45 -0800663 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000664}
665
ivocb04965c2015-09-09 00:09:43 -0700666int32_t Channel::CreateChannel(Channel*& channel,
667 int32_t channelId,
668 uint32_t instanceId,
669 RtcEventLog* const event_log,
670 const Config& config) {
kwiberg55b97fe2016-01-28 05:22:45 -0800671 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
672 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
673 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000674
kwiberg55b97fe2016-01-28 05:22:45 -0800675 channel = new Channel(channelId, instanceId, event_log, config);
676 if (channel == NULL) {
677 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
678 "Channel::CreateChannel() unable to allocate memory for"
679 " channel");
680 return -1;
681 }
682 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000683}
684
kwiberg55b97fe2016-01-28 05:22:45 -0800685void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
686 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
687 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
688 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000689
kwiberg55b97fe2016-01-28 05:22:45 -0800690 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000691}
692
kwiberg55b97fe2016-01-28 05:22:45 -0800693void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
694 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
695 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
696 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000697
kwiberg55b97fe2016-01-28 05:22:45 -0800698 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000699}
700
kwiberg55b97fe2016-01-28 05:22:45 -0800701void Channel::PlayFileEnded(int32_t id) {
702 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
703 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000704
kwiberg55b97fe2016-01-28 05:22:45 -0800705 if (id == _inputFilePlayerId) {
706 channel_state_.SetInputFilePlaying(false);
707 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
708 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000709 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800710 } else if (id == _outputFilePlayerId) {
711 channel_state_.SetOutputFilePlaying(false);
712 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
713 "Channel::PlayFileEnded() => output file player module is"
714 " shutdown");
715 }
716}
717
718void Channel::RecordFileEnded(int32_t id) {
719 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
720 "Channel::RecordFileEnded(id=%d)", id);
721
722 assert(id == _outputFileRecorderId);
723
724 rtc::CritScope cs(&_fileCritSect);
725
726 _outputFileRecording = false;
727 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
728 "Channel::RecordFileEnded() => output file recorder module is"
729 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000730}
731
pbos@webrtc.org92135212013-05-14 08:31:39 +0000732Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000733 uint32_t instanceId,
ivocb04965c2015-09-09 00:09:43 -0700734 RtcEventLog* const event_log,
735 const Config& config)
tommi31fc21f2016-01-21 10:37:37 -0800736 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100737 _channelId(channelId),
738 event_log_(event_log),
739 rtp_header_parser_(RtpHeaderParser::Create()),
740 rtp_payload_registry_(
741 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
742 rtp_receive_statistics_(
743 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
744 rtp_receiver_(
745 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
746 this,
747 this,
748 this,
749 rtp_payload_registry_.get())),
750 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
751 _outputAudioLevel(),
752 _externalTransport(false),
753 _inputFilePlayerPtr(NULL),
754 _outputFilePlayerPtr(NULL),
755 _outputFileRecorderPtr(NULL),
756 // Avoid conflict with other channels by adding 1024 - 1026,
757 // won't use as much as 1024 channels.
758 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
759 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
760 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
761 _outputFileRecording(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100762 _outputExternalMedia(false),
763 _inputExternalMediaCallbackPtr(NULL),
764 _outputExternalMediaCallbackPtr(NULL),
765 _timeStamp(0), // This is just an offset, RTP module will add it's own
766 // random offset
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100767 ntp_estimator_(Clock::GetRealTimeClock()),
768 jitter_buffer_playout_timestamp_(0),
769 playout_timestamp_rtp_(0),
770 playout_timestamp_rtcp_(0),
771 playout_delay_ms_(0),
772 _numberOfDiscardedPackets(0),
773 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100774 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
775 capture_start_rtp_time_stamp_(-1),
776 capture_start_ntp_time_ms_(-1),
777 _engineStatisticsPtr(NULL),
778 _outputMixerPtr(NULL),
779 _transmitMixerPtr(NULL),
780 _moduleProcessThreadPtr(NULL),
781 _audioDeviceModulePtr(NULL),
782 _voiceEngineObserverPtr(NULL),
783 _callbackCritSectPtr(NULL),
784 _transportPtr(NULL),
785 _rxVadObserverPtr(NULL),
786 _oldVadDecision(-1),
787 _sendFrameType(0),
788 _externalMixing(false),
789 _mixFileWithMicrophone(false),
790 _mute(false),
791 _panLeft(1.0f),
792 _panRight(1.0f),
793 _outputGain(1.0f),
solenberg3ecb5c82016-03-09 07:31:58 -0800794 _playOutbandDtmfEvent(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100795 _lastLocalTimeStamp(0),
796 _lastPayloadType(0),
797 _includeAudioLevelIndication(false),
798 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100799 _average_jitter_buffer_delay_us(0),
800 _previousTimestamp(0),
801 _recPacketDelayMs(20),
802 _RxVadDetection(false),
803 _rxAgcIsEnabled(false),
804 _rxNsIsEnabled(false),
805 restored_packet_in_use_(false),
806 rtcp_observer_(new VoERtcpObserver(this)),
807 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100808 associate_send_channel_(ChannelOwner(nullptr)),
809 pacing_enabled_(config.Get<VoicePacing>().enabled),
stefanbba9dec2016-02-01 04:39:55 -0800810 feedback_observer_proxy_(new TransportFeedbackProxy()),
811 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
812 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()) {
kwiberg55b97fe2016-01-28 05:22:45 -0800813 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
814 "Channel::Channel() - ctor");
815 AudioCodingModule::Config acm_config;
816 acm_config.id = VoEModuleId(instanceId, channelId);
817 if (config.Get<NetEqCapacityConfig>().enabled) {
818 // Clamping the buffer capacity at 20 packets. While going lower will
819 // probably work, it makes little sense.
820 acm_config.neteq_config.max_packets_in_buffer =
821 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
822 }
823 acm_config.neteq_config.enable_fast_accelerate =
824 config.Get<NetEqFastAccelerate>().enabled;
825 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200826
kwiberg55b97fe2016-01-28 05:22:45 -0800827 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000828
kwiberg55b97fe2016-01-28 05:22:45 -0800829 RtpRtcp::Configuration configuration;
830 configuration.audio = true;
831 configuration.outgoing_transport = this;
832 configuration.audio_messages = this;
833 configuration.receive_statistics = rtp_receive_statistics_.get();
834 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800835 if (pacing_enabled_) {
836 configuration.paced_sender = rtp_packet_sender_proxy_.get();
837 configuration.transport_sequence_number_allocator =
838 seq_num_allocator_proxy_.get();
839 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
840 }
kwiberg55b97fe2016-01-28 05:22:45 -0800841 configuration.event_log = event_log;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000842
kwiberg55b97fe2016-01-28 05:22:45 -0800843 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100844 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000845
kwiberg55b97fe2016-01-28 05:22:45 -0800846 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
847 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
848 statistics_proxy_.get());
aluebs@webrtc.orgf927fd62014-04-16 11:58:18 +0000849
kwiberg55b97fe2016-01-28 05:22:45 -0800850 Config audioproc_config;
851 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
852 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
niklase@google.com470e71d2011-07-07 08:21:25 +0000853}
854
kwiberg55b97fe2016-01-28 05:22:45 -0800855Channel::~Channel() {
856 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
857 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
858 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000859
kwiberg55b97fe2016-01-28 05:22:45 -0800860 if (_outputExternalMedia) {
861 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
862 }
863 if (channel_state_.Get().input_external_media) {
864 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
865 }
866 StopSend();
867 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000868
kwiberg55b97fe2016-01-28 05:22:45 -0800869 {
870 rtc::CritScope cs(&_fileCritSect);
871 if (_inputFilePlayerPtr) {
872 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
873 _inputFilePlayerPtr->StopPlayingFile();
874 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
875 _inputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000876 }
kwiberg55b97fe2016-01-28 05:22:45 -0800877 if (_outputFilePlayerPtr) {
878 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
879 _outputFilePlayerPtr->StopPlayingFile();
880 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
881 _outputFilePlayerPtr = NULL;
882 }
883 if (_outputFileRecorderPtr) {
884 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
885 _outputFileRecorderPtr->StopRecording();
886 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
887 _outputFileRecorderPtr = NULL;
888 }
889 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000890
kwiberg55b97fe2016-01-28 05:22:45 -0800891 // The order to safely shutdown modules in a channel is:
892 // 1. De-register callbacks in modules
893 // 2. De-register modules in process thread
894 // 3. Destroy modules
895 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
896 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
897 "~Channel() failed to de-register transport callback"
898 " (Audio coding module)");
899 }
900 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
901 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
902 "~Channel() failed to de-register VAD callback"
903 " (Audio coding module)");
904 }
905 // De-register modules in process thread
906 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +0000907
kwiberg55b97fe2016-01-28 05:22:45 -0800908 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +0000909}
910
kwiberg55b97fe2016-01-28 05:22:45 -0800911int32_t Channel::Init() {
912 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
913 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +0000914
kwiberg55b97fe2016-01-28 05:22:45 -0800915 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +0000916
kwiberg55b97fe2016-01-28 05:22:45 -0800917 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +0000918
kwiberg55b97fe2016-01-28 05:22:45 -0800919 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
920 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
921 "Channel::Init() must call SetEngineInformation() first");
922 return -1;
923 }
924
925 // --- Add modules to process thread (for periodic schedulation)
926
927 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
928
929 // --- ACM initialization
930
931 if (audio_coding_->InitializeReceiver() == -1) {
932 _engineStatisticsPtr->SetLastError(
933 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
934 "Channel::Init() unable to initialize the ACM - 1");
935 return -1;
936 }
937
938 // --- RTP/RTCP module initialization
939
940 // Ensure that RTCP is enabled by default for the created channel.
941 // Note that, the module will keep generating RTCP until it is explicitly
942 // disabled by the user.
943 // After StopListen (when no sockets exists), RTCP packets will no longer
944 // be transmitted since the Transport object will then be invalid.
945 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
946 // RTCP is enabled by default.
947 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
948 // --- Register all permanent callbacks
949 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
950 (audio_coding_->RegisterVADCallback(this) == -1);
951
952 if (fail) {
953 _engineStatisticsPtr->SetLastError(
954 VE_CANNOT_INIT_CHANNEL, kTraceError,
955 "Channel::Init() callbacks not registered");
956 return -1;
957 }
958
959 // --- Register all supported codecs to the receiving side of the
960 // RTP/RTCP module
961
962 CodecInst codec;
963 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
964
965 for (int idx = 0; idx < nSupportedCodecs; idx++) {
966 // Open up the RTP/RTCP receiver for all supported codecs
967 if ((audio_coding_->Codec(idx, &codec) == -1) ||
968 (rtp_receiver_->RegisterReceivePayload(
969 codec.plname, codec.pltype, codec.plfreq, codec.channels,
970 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
971 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
972 "Channel::Init() unable to register %s "
973 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
974 codec.plname, codec.pltype, codec.plfreq, codec.channels,
975 codec.rate);
976 } else {
977 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
978 "Channel::Init() %s (%d/%d/%" PRIuS
979 "/%d) has been "
980 "added to the RTP/RTCP receiver",
981 codec.plname, codec.pltype, codec.plfreq, codec.channels,
982 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000983 }
984
kwiberg55b97fe2016-01-28 05:22:45 -0800985 // Ensure that PCMU is used as default codec on the sending side
986 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
987 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +0000988 }
989
kwiberg55b97fe2016-01-28 05:22:45 -0800990 // Register default PT for outband 'telephone-event'
991 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
992 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
993 (audio_coding_->RegisterReceiveCodec(codec) == -1)) {
994 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
995 "Channel::Init() failed to register outband "
996 "'telephone-event' (%d/%d) correctly",
997 codec.pltype, codec.plfreq);
998 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000999 }
1000
kwiberg55b97fe2016-01-28 05:22:45 -08001001 if (!STR_CASE_CMP(codec.plname, "CN")) {
1002 if ((audio_coding_->RegisterSendCodec(codec) == -1) ||
1003 (audio_coding_->RegisterReceiveCodec(codec) == -1) ||
1004 (_rtpRtcpModule->RegisterSendPayload(codec) == -1)) {
1005 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1006 "Channel::Init() failed to register CN (%d/%d) "
1007 "correctly - 1",
1008 codec.pltype, codec.plfreq);
1009 }
1010 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001011#ifdef WEBRTC_CODEC_RED
kwiberg55b97fe2016-01-28 05:22:45 -08001012 // Register RED to the receiving side of the ACM.
1013 // We will not receive an OnInitializeDecoder() callback for RED.
1014 if (!STR_CASE_CMP(codec.plname, "RED")) {
1015 if (audio_coding_->RegisterReceiveCodec(codec) == -1) {
1016 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1017 "Channel::Init() failed to register RED (%d/%d) "
1018 "correctly",
1019 codec.pltype, codec.plfreq);
1020 }
1021 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001022#endif
kwiberg55b97fe2016-01-28 05:22:45 -08001023 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001024
kwiberg55b97fe2016-01-28 05:22:45 -08001025 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1026 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1027 return -1;
1028 }
1029 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1030 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1031 return -1;
1032 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001033
kwiberg55b97fe2016-01-28 05:22:45 -08001034 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001035}
1036
kwiberg55b97fe2016-01-28 05:22:45 -08001037int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1038 OutputMixer& outputMixer,
1039 voe::TransmitMixer& transmitMixer,
1040 ProcessThread& moduleProcessThread,
1041 AudioDeviceModule& audioDeviceModule,
1042 VoiceEngineObserver* voiceEngineObserver,
1043 rtc::CriticalSection* callbackCritSect) {
1044 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1045 "Channel::SetEngineInformation()");
1046 _engineStatisticsPtr = &engineStatistics;
1047 _outputMixerPtr = &outputMixer;
1048 _transmitMixerPtr = &transmitMixer,
1049 _moduleProcessThreadPtr = &moduleProcessThread;
1050 _audioDeviceModulePtr = &audioDeviceModule;
1051 _voiceEngineObserverPtr = voiceEngineObserver;
1052 _callbackCritSectPtr = callbackCritSect;
1053 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001054}
1055
kwiberg55b97fe2016-01-28 05:22:45 -08001056int32_t Channel::UpdateLocalTimeStamp() {
1057 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1058 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001059}
1060
kwibergb7f89d62016-02-17 10:04:18 -08001061void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001062 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001063 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001064}
1065
kwiberg55b97fe2016-01-28 05:22:45 -08001066int32_t Channel::StartPlayout() {
1067 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1068 "Channel::StartPlayout()");
1069 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001070 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001071 }
1072
1073 if (!_externalMixing) {
1074 // Add participant as candidates for mixing.
1075 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1076 _engineStatisticsPtr->SetLastError(
1077 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1078 "StartPlayout() failed to add participant to mixer");
1079 return -1;
1080 }
1081 }
1082
1083 channel_state_.SetPlaying(true);
1084 if (RegisterFilePlayingToMixer() != 0)
1085 return -1;
1086
1087 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001088}
1089
kwiberg55b97fe2016-01-28 05:22:45 -08001090int32_t Channel::StopPlayout() {
1091 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1092 "Channel::StopPlayout()");
1093 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001094 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001095 }
1096
1097 if (!_externalMixing) {
1098 // Remove participant as candidates for mixing
1099 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1100 _engineStatisticsPtr->SetLastError(
1101 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1102 "StopPlayout() failed to remove participant from mixer");
1103 return -1;
1104 }
1105 }
1106
1107 channel_state_.SetPlaying(false);
1108 _outputAudioLevel.Clear();
1109
1110 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001111}
1112
kwiberg55b97fe2016-01-28 05:22:45 -08001113int32_t Channel::StartSend() {
1114 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1115 "Channel::StartSend()");
1116 // Resume the previous sequence number which was reset by StopSend().
1117 // This needs to be done before |sending| is set to true.
1118 if (send_sequence_number_)
1119 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001120
kwiberg55b97fe2016-01-28 05:22:45 -08001121 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001122 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001123 }
1124 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001125
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001126 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001127 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1128 _engineStatisticsPtr->SetLastError(
1129 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1130 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001131 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001132 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001133 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001134 return -1;
1135 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001136
kwiberg55b97fe2016-01-28 05:22:45 -08001137 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001138}
1139
kwiberg55b97fe2016-01-28 05:22:45 -08001140int32_t Channel::StopSend() {
1141 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1142 "Channel::StopSend()");
1143 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001144 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001145 }
1146 channel_state_.SetSending(false);
1147
1148 // Store the sequence number to be able to pick up the same sequence for
1149 // the next StartSend(). This is needed for restarting device, otherwise
1150 // it might cause libSRTP to complain about packets being replayed.
1151 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1152 // CL is landed. See issue
1153 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1154 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1155
1156 // Reset sending SSRC and sequence number and triggers direct transmission
1157 // of RTCP BYE
1158 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1159 _engineStatisticsPtr->SetLastError(
1160 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1161 "StartSend() RTP/RTCP failed to stop sending");
1162 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001163 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001164
1165 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001166}
1167
kwiberg55b97fe2016-01-28 05:22:45 -08001168int32_t Channel::StartReceiving() {
1169 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1170 "Channel::StartReceiving()");
1171 if (channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001172 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001173 }
1174 channel_state_.SetReceiving(true);
1175 _numberOfDiscardedPackets = 0;
1176 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001177}
1178
kwiberg55b97fe2016-01-28 05:22:45 -08001179int32_t Channel::StopReceiving() {
1180 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1181 "Channel::StopReceiving()");
1182 if (!channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001183 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001184 }
1185
1186 channel_state_.SetReceiving(false);
1187 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001188}
1189
kwiberg55b97fe2016-01-28 05:22:45 -08001190int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1191 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1192 "Channel::RegisterVoiceEngineObserver()");
1193 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001194
kwiberg55b97fe2016-01-28 05:22:45 -08001195 if (_voiceEngineObserverPtr) {
1196 _engineStatisticsPtr->SetLastError(
1197 VE_INVALID_OPERATION, kTraceError,
1198 "RegisterVoiceEngineObserver() observer already enabled");
1199 return -1;
1200 }
1201 _voiceEngineObserverPtr = &observer;
1202 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001203}
1204
kwiberg55b97fe2016-01-28 05:22:45 -08001205int32_t Channel::DeRegisterVoiceEngineObserver() {
1206 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1207 "Channel::DeRegisterVoiceEngineObserver()");
1208 rtc::CritScope cs(&_callbackCritSect);
1209
1210 if (!_voiceEngineObserverPtr) {
1211 _engineStatisticsPtr->SetLastError(
1212 VE_INVALID_OPERATION, kTraceWarning,
1213 "DeRegisterVoiceEngineObserver() observer already disabled");
1214 return 0;
1215 }
1216 _voiceEngineObserverPtr = NULL;
1217 return 0;
1218}
1219
1220int32_t Channel::GetSendCodec(CodecInst& codec) {
kwiberg1fd4a4a2015-11-03 11:20:50 -08001221 auto send_codec = audio_coding_->SendCodec();
1222 if (send_codec) {
1223 codec = *send_codec;
1224 return 0;
1225 }
1226 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001227}
1228
kwiberg55b97fe2016-01-28 05:22:45 -08001229int32_t Channel::GetRecCodec(CodecInst& codec) {
1230 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001231}
1232
kwiberg55b97fe2016-01-28 05:22:45 -08001233int32_t Channel::SetSendCodec(const CodecInst& codec) {
1234 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1235 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001236
kwiberg55b97fe2016-01-28 05:22:45 -08001237 if (audio_coding_->RegisterSendCodec(codec) != 0) {
1238 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1239 "SetSendCodec() failed to register codec to ACM");
1240 return -1;
1241 }
1242
1243 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1244 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1245 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1246 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1247 "SetSendCodec() failed to register codec to"
1248 " RTP/RTCP module");
1249 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001250 }
kwiberg55b97fe2016-01-28 05:22:45 -08001251 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001252
kwiberg55b97fe2016-01-28 05:22:45 -08001253 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1254 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1255 "SetSendCodec() failed to set audio packet size");
1256 return -1;
1257 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001258
kwiberg55b97fe2016-01-28 05:22:45 -08001259 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001260}
1261
Ivo Creusenadf89b72015-04-29 16:03:33 +02001262void Channel::SetBitRate(int bitrate_bps) {
1263 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1264 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1265 audio_coding_->SetBitRate(bitrate_bps);
1266}
1267
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001268void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue@webrtc.org74aaf292014-07-16 21:28:26 +00001269 network_predictor_->UpdatePacketLossRate(fraction_lost);
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001270 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1271
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001272 // Normalizes rate to 0 - 100.
kwiberg55b97fe2016-01-28 05:22:45 -08001273 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1274 0) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001275 assert(false); // This should not happen.
1276 }
1277}
1278
kwiberg55b97fe2016-01-28 05:22:45 -08001279int32_t Channel::SetVADStatus(bool enableVAD,
1280 ACMVADMode mode,
1281 bool disableDTX) {
1282 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1283 "Channel::SetVADStatus(mode=%d)", mode);
1284 assert(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1285 // To disable VAD, DTX must be disabled too
1286 disableDTX = ((enableVAD == false) ? true : disableDTX);
1287 if (audio_coding_->SetVAD(!disableDTX, enableVAD, mode) != 0) {
1288 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1289 kTraceError,
1290 "SetVADStatus() failed to set VAD");
1291 return -1;
1292 }
1293 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001294}
1295
kwiberg55b97fe2016-01-28 05:22:45 -08001296int32_t Channel::GetVADStatus(bool& enabledVAD,
1297 ACMVADMode& mode,
1298 bool& disabledDTX) {
1299 if (audio_coding_->VAD(&disabledDTX, &enabledVAD, &mode) != 0) {
1300 _engineStatisticsPtr->SetLastError(
1301 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1302 "GetVADStatus() failed to get VAD status");
1303 return -1;
1304 }
1305 disabledDTX = !disabledDTX;
1306 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001307}
1308
kwiberg55b97fe2016-01-28 05:22:45 -08001309int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1310 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1311 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001312
kwiberg55b97fe2016-01-28 05:22:45 -08001313 if (channel_state_.Get().playing) {
1314 _engineStatisticsPtr->SetLastError(
1315 VE_ALREADY_PLAYING, kTraceError,
1316 "SetRecPayloadType() unable to set PT while playing");
1317 return -1;
1318 }
1319 if (channel_state_.Get().receiving) {
1320 _engineStatisticsPtr->SetLastError(
1321 VE_ALREADY_LISTENING, kTraceError,
1322 "SetRecPayloadType() unable to set PT while listening");
1323 return -1;
1324 }
1325
1326 if (codec.pltype == -1) {
1327 // De-register the selected codec (RTP/RTCP module and ACM)
1328
1329 int8_t pltype(-1);
1330 CodecInst rxCodec = codec;
1331
1332 // Get payload type for the given codec
1333 rtp_payload_registry_->ReceivePayloadType(
1334 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1335 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1336 rxCodec.pltype = pltype;
1337
1338 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1339 _engineStatisticsPtr->SetLastError(
1340 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1341 "SetRecPayloadType() RTP/RTCP-module deregistration "
1342 "failed");
1343 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001344 }
kwiberg55b97fe2016-01-28 05:22:45 -08001345 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1346 _engineStatisticsPtr->SetLastError(
1347 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1348 "SetRecPayloadType() ACM deregistration failed - 1");
1349 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001350 }
kwiberg55b97fe2016-01-28 05:22:45 -08001351 return 0;
1352 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001353
kwiberg55b97fe2016-01-28 05:22:45 -08001354 if (rtp_receiver_->RegisterReceivePayload(
1355 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1356 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1357 // First attempt to register failed => de-register and try again
1358 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001359 if (rtp_receiver_->RegisterReceivePayload(
kwiberg55b97fe2016-01-28 05:22:45 -08001360 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1361 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1362 _engineStatisticsPtr->SetLastError(
1363 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1364 "SetRecPayloadType() RTP/RTCP-module registration failed");
1365 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001366 }
kwiberg55b97fe2016-01-28 05:22:45 -08001367 }
1368 if (audio_coding_->RegisterReceiveCodec(codec) != 0) {
1369 audio_coding_->UnregisterReceiveCodec(codec.pltype);
1370 if (audio_coding_->RegisterReceiveCodec(codec) != 0) {
1371 _engineStatisticsPtr->SetLastError(
1372 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1373 "SetRecPayloadType() ACM registration failed - 1");
1374 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001375 }
kwiberg55b97fe2016-01-28 05:22:45 -08001376 }
1377 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001378}
1379
kwiberg55b97fe2016-01-28 05:22:45 -08001380int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1381 int8_t payloadType(-1);
1382 if (rtp_payload_registry_->ReceivePayloadType(
1383 codec.plname, codec.plfreq, codec.channels,
1384 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1385 _engineStatisticsPtr->SetLastError(
1386 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1387 "GetRecPayloadType() failed to retrieve RX payload type");
1388 return -1;
1389 }
1390 codec.pltype = payloadType;
1391 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001392}
1393
kwiberg55b97fe2016-01-28 05:22:45 -08001394int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1395 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1396 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001397
kwiberg55b97fe2016-01-28 05:22:45 -08001398 CodecInst codec;
1399 int32_t samplingFreqHz(-1);
1400 const size_t kMono = 1;
1401 if (frequency == kFreq32000Hz)
1402 samplingFreqHz = 32000;
1403 else if (frequency == kFreq16000Hz)
1404 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001405
kwiberg55b97fe2016-01-28 05:22:45 -08001406 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1407 _engineStatisticsPtr->SetLastError(
1408 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1409 "SetSendCNPayloadType() failed to retrieve default CN codec "
1410 "settings");
1411 return -1;
1412 }
1413
1414 // Modify the payload type (must be set to dynamic range)
1415 codec.pltype = type;
1416
1417 if (audio_coding_->RegisterSendCodec(codec) != 0) {
1418 _engineStatisticsPtr->SetLastError(
1419 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1420 "SetSendCNPayloadType() failed to register CN to ACM");
1421 return -1;
1422 }
1423
1424 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1425 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1426 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1427 _engineStatisticsPtr->SetLastError(
1428 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1429 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1430 "module");
1431 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001432 }
kwiberg55b97fe2016-01-28 05:22:45 -08001433 }
1434 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001435}
1436
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001437int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001438 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001439 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001440
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001441 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001442 _engineStatisticsPtr->SetLastError(
1443 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001444 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001445 return -1;
1446 }
1447 return 0;
1448}
1449
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001450int Channel::SetOpusDtx(bool enable_dtx) {
1451 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1452 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001453 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001454 : audio_coding_->DisableOpusDtx();
1455 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001456 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1457 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001458 return -1;
1459 }
1460 return 0;
1461}
1462
kwiberg55b97fe2016-01-28 05:22:45 -08001463int32_t Channel::RegisterExternalTransport(Transport& transport) {
1464 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001465 "Channel::RegisterExternalTransport()");
1466
kwiberg55b97fe2016-01-28 05:22:45 -08001467 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001468
kwiberg55b97fe2016-01-28 05:22:45 -08001469 if (_externalTransport) {
1470 _engineStatisticsPtr->SetLastError(
1471 VE_INVALID_OPERATION, kTraceError,
1472 "RegisterExternalTransport() external transport already enabled");
1473 return -1;
1474 }
1475 _externalTransport = true;
1476 _transportPtr = &transport;
1477 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001478}
1479
kwiberg55b97fe2016-01-28 05:22:45 -08001480int32_t Channel::DeRegisterExternalTransport() {
1481 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1482 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001483
kwiberg55b97fe2016-01-28 05:22:45 -08001484 rtc::CritScope cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00001485
kwiberg55b97fe2016-01-28 05:22:45 -08001486 if (!_transportPtr) {
1487 _engineStatisticsPtr->SetLastError(
1488 VE_INVALID_OPERATION, kTraceWarning,
1489 "DeRegisterExternalTransport() external transport already "
1490 "disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00001491 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001492 }
1493 _externalTransport = false;
1494 _transportPtr = NULL;
1495 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1496 "DeRegisterExternalTransport() all transport is disabled");
1497 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001498}
1499
kwiberg55b97fe2016-01-28 05:22:45 -08001500int32_t Channel::ReceivedRTPPacket(const int8_t* data,
1501 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001502 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001503 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001504 "Channel::ReceivedRTPPacket()");
1505
1506 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001507 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001508
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001509 const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001510 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001511 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1512 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1513 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001514 return -1;
1515 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001516 header.payload_type_frequency =
1517 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001518 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001519 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001520 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001521 rtp_receive_statistics_->IncomingPacket(
1522 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001523 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001524
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001525 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001526}
1527
1528bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001529 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001530 const RTPHeader& header,
1531 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001532 if (rtp_payload_registry_->IsRtx(header)) {
1533 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001534 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001535 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001536 assert(packet_length >= header.headerLength);
1537 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001538 PayloadUnion payload_specific;
1539 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001540 &payload_specific)) {
1541 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001542 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001543 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1544 payload_specific, in_order);
1545}
1546
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001547bool Channel::HandleRtxPacket(const uint8_t* packet,
1548 size_t packet_length,
1549 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001550 if (!rtp_payload_registry_->IsRtx(header))
1551 return false;
1552
1553 // Remove the RTX header and parse the original RTP header.
1554 if (packet_length < header.headerLength)
1555 return false;
1556 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1557 return false;
1558 if (restored_packet_in_use_) {
1559 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1560 "Multiple RTX headers detected, dropping packet");
1561 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001562 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001563 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001564 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1565 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001566 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1567 "Incoming RTX packet: invalid RTP header");
1568 return false;
1569 }
1570 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001571 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001572 restored_packet_in_use_ = false;
1573 return ret;
1574}
1575
1576bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1577 StreamStatistician* statistician =
1578 rtp_receive_statistics_->GetStatistician(header.ssrc);
1579 if (!statistician)
1580 return false;
1581 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001582}
1583
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001584bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1585 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001586 // Retransmissions are handled separately if RTX is enabled.
1587 if (rtp_payload_registry_->RtxEnabled())
1588 return false;
1589 StreamStatistician* statistician =
1590 rtp_receive_statistics_->GetStatistician(header.ssrc);
1591 if (!statistician)
1592 return false;
1593 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001594 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001595 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001596 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001597}
1598
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001599int32_t Channel::ReceivedRTCPPacket(const int8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001600 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001601 "Channel::ReceivedRTCPPacket()");
1602 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001603 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001604
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001605 // Deliver RTCP packet to RTP/RTCP module for parsing
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001606 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001607 _engineStatisticsPtr->SetLastError(
1608 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1609 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1610 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001611
Minyue2013aec2015-05-13 14:14:42 +02001612 int64_t rtt = GetRTT(true);
1613 if (rtt == 0) {
1614 // Waiting for valid RTT.
1615 return 0;
1616 }
1617 uint32_t ntp_secs = 0;
1618 uint32_t ntp_frac = 0;
1619 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001620 if (0 !=
1621 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1622 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001623 // Waiting for RTCP.
1624 return 0;
1625 }
1626
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001627 {
tommi31fc21f2016-01-21 10:37:37 -08001628 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001629 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001630 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001631 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001632}
1633
niklase@google.com470e71d2011-07-07 08:21:25 +00001634int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001635 bool loop,
1636 FileFormats format,
1637 int startPosition,
1638 float volumeScaling,
1639 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001640 const CodecInst* codecInst) {
1641 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1642 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1643 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1644 "stopPosition=%d)",
1645 fileName, loop, format, volumeScaling, startPosition,
1646 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001647
kwiberg55b97fe2016-01-28 05:22:45 -08001648 if (channel_state_.Get().output_file_playing) {
1649 _engineStatisticsPtr->SetLastError(
1650 VE_ALREADY_PLAYING, kTraceError,
1651 "StartPlayingFileLocally() is already playing");
1652 return -1;
1653 }
1654
1655 {
1656 rtc::CritScope cs(&_fileCritSect);
1657
1658 if (_outputFilePlayerPtr) {
1659 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1660 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1661 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001662 }
1663
kwiberg55b97fe2016-01-28 05:22:45 -08001664 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1665 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001666
kwiberg55b97fe2016-01-28 05:22:45 -08001667 if (_outputFilePlayerPtr == NULL) {
1668 _engineStatisticsPtr->SetLastError(
1669 VE_INVALID_ARGUMENT, kTraceError,
1670 "StartPlayingFileLocally() filePlayer format is not correct");
1671 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001672 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001673
kwiberg55b97fe2016-01-28 05:22:45 -08001674 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001675
kwiberg55b97fe2016-01-28 05:22:45 -08001676 if (_outputFilePlayerPtr->StartPlayingFile(
1677 fileName, loop, startPosition, volumeScaling, notificationTime,
1678 stopPosition, (const CodecInst*)codecInst) != 0) {
1679 _engineStatisticsPtr->SetLastError(
1680 VE_BAD_FILE, kTraceError,
1681 "StartPlayingFile() failed to start file playout");
1682 _outputFilePlayerPtr->StopPlayingFile();
1683 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1684 _outputFilePlayerPtr = NULL;
1685 return -1;
1686 }
1687 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1688 channel_state_.SetOutputFilePlaying(true);
1689 }
1690
1691 if (RegisterFilePlayingToMixer() != 0)
1692 return -1;
1693
1694 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001695}
1696
1697int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001698 FileFormats format,
1699 int startPosition,
1700 float volumeScaling,
1701 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001702 const CodecInst* codecInst) {
1703 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1704 "Channel::StartPlayingFileLocally(format=%d,"
1705 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1706 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001707
kwiberg55b97fe2016-01-28 05:22:45 -08001708 if (stream == NULL) {
1709 _engineStatisticsPtr->SetLastError(
1710 VE_BAD_FILE, kTraceError,
1711 "StartPlayingFileLocally() NULL as input stream");
1712 return -1;
1713 }
1714
1715 if (channel_state_.Get().output_file_playing) {
1716 _engineStatisticsPtr->SetLastError(
1717 VE_ALREADY_PLAYING, kTraceError,
1718 "StartPlayingFileLocally() is already playing");
1719 return -1;
1720 }
1721
1722 {
1723 rtc::CritScope cs(&_fileCritSect);
1724
1725 // Destroy the old instance
1726 if (_outputFilePlayerPtr) {
1727 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1728 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1729 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001730 }
1731
kwiberg55b97fe2016-01-28 05:22:45 -08001732 // Create the instance
1733 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1734 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001735
kwiberg55b97fe2016-01-28 05:22:45 -08001736 if (_outputFilePlayerPtr == NULL) {
1737 _engineStatisticsPtr->SetLastError(
1738 VE_INVALID_ARGUMENT, kTraceError,
1739 "StartPlayingFileLocally() filePlayer format isnot correct");
1740 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001741 }
1742
kwiberg55b97fe2016-01-28 05:22:45 -08001743 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001744
kwiberg55b97fe2016-01-28 05:22:45 -08001745 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1746 volumeScaling, notificationTime,
1747 stopPosition, codecInst) != 0) {
1748 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1749 "StartPlayingFile() failed to "
1750 "start file playout");
1751 _outputFilePlayerPtr->StopPlayingFile();
1752 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1753 _outputFilePlayerPtr = NULL;
1754 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001755 }
kwiberg55b97fe2016-01-28 05:22:45 -08001756 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1757 channel_state_.SetOutputFilePlaying(true);
1758 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001759
kwiberg55b97fe2016-01-28 05:22:45 -08001760 if (RegisterFilePlayingToMixer() != 0)
1761 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001762
kwiberg55b97fe2016-01-28 05:22:45 -08001763 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001764}
1765
kwiberg55b97fe2016-01-28 05:22:45 -08001766int Channel::StopPlayingFileLocally() {
1767 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1768 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001769
kwiberg55b97fe2016-01-28 05:22:45 -08001770 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001771 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001772 }
1773
1774 {
1775 rtc::CritScope cs(&_fileCritSect);
1776
1777 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1778 _engineStatisticsPtr->SetLastError(
1779 VE_STOP_RECORDING_FAILED, kTraceError,
1780 "StopPlayingFile() could not stop playing");
1781 return -1;
1782 }
1783 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1784 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1785 _outputFilePlayerPtr = NULL;
1786 channel_state_.SetOutputFilePlaying(false);
1787 }
1788 // _fileCritSect cannot be taken while calling
1789 // SetAnonymousMixibilityStatus. Refer to comments in
1790 // StartPlayingFileLocally(const char* ...) for more details.
1791 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1792 _engineStatisticsPtr->SetLastError(
1793 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1794 "StopPlayingFile() failed to stop participant from playing as"
1795 "file in the mixer");
1796 return -1;
1797 }
1798
1799 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001800}
1801
kwiberg55b97fe2016-01-28 05:22:45 -08001802int Channel::IsPlayingFileLocally() const {
1803 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001804}
1805
kwiberg55b97fe2016-01-28 05:22:45 -08001806int Channel::RegisterFilePlayingToMixer() {
1807 // Return success for not registering for file playing to mixer if:
1808 // 1. playing file before playout is started on that channel.
1809 // 2. starting playout without file playing on that channel.
1810 if (!channel_state_.Get().playing ||
1811 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001812 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001813 }
1814
1815 // |_fileCritSect| cannot be taken while calling
1816 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1817 // frames can be pulled by the mixer. Since the frames are generated from
1818 // the file, _fileCritSect will be taken. This would result in a deadlock.
1819 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1820 channel_state_.SetOutputFilePlaying(false);
1821 rtc::CritScope cs(&_fileCritSect);
1822 _engineStatisticsPtr->SetLastError(
1823 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1824 "StartPlayingFile() failed to add participant as file to mixer");
1825 _outputFilePlayerPtr->StopPlayingFile();
1826 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1827 _outputFilePlayerPtr = NULL;
1828 return -1;
1829 }
1830
1831 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001832}
1833
niklase@google.com470e71d2011-07-07 08:21:25 +00001834int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001835 bool loop,
1836 FileFormats format,
1837 int startPosition,
1838 float volumeScaling,
1839 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001840 const CodecInst* codecInst) {
1841 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1842 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1843 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1844 "stopPosition=%d)",
1845 fileName, loop, format, volumeScaling, startPosition,
1846 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001847
kwiberg55b97fe2016-01-28 05:22:45 -08001848 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001849
kwiberg55b97fe2016-01-28 05:22:45 -08001850 if (channel_state_.Get().input_file_playing) {
1851 _engineStatisticsPtr->SetLastError(
1852 VE_ALREADY_PLAYING, kTraceWarning,
1853 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001854 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001855 }
1856
1857 // Destroy the old instance
1858 if (_inputFilePlayerPtr) {
1859 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1860 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1861 _inputFilePlayerPtr = NULL;
1862 }
1863
1864 // Create the instance
1865 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1866 (const FileFormats)format);
1867
1868 if (_inputFilePlayerPtr == NULL) {
1869 _engineStatisticsPtr->SetLastError(
1870 VE_INVALID_ARGUMENT, kTraceError,
1871 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1872 return -1;
1873 }
1874
1875 const uint32_t notificationTime(0);
1876
1877 if (_inputFilePlayerPtr->StartPlayingFile(
1878 fileName, loop, startPosition, volumeScaling, notificationTime,
1879 stopPosition, (const CodecInst*)codecInst) != 0) {
1880 _engineStatisticsPtr->SetLastError(
1881 VE_BAD_FILE, kTraceError,
1882 "StartPlayingFile() failed to start file playout");
1883 _inputFilePlayerPtr->StopPlayingFile();
1884 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1885 _inputFilePlayerPtr = NULL;
1886 return -1;
1887 }
1888 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1889 channel_state_.SetInputFilePlaying(true);
1890
1891 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001892}
1893
1894int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001895 FileFormats format,
1896 int startPosition,
1897 float volumeScaling,
1898 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001899 const CodecInst* codecInst) {
1900 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1901 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1902 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1903 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001904
kwiberg55b97fe2016-01-28 05:22:45 -08001905 if (stream == NULL) {
1906 _engineStatisticsPtr->SetLastError(
1907 VE_BAD_FILE, kTraceError,
1908 "StartPlayingFileAsMicrophone NULL as input stream");
1909 return -1;
1910 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001911
kwiberg55b97fe2016-01-28 05:22:45 -08001912 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001913
kwiberg55b97fe2016-01-28 05:22:45 -08001914 if (channel_state_.Get().input_file_playing) {
1915 _engineStatisticsPtr->SetLastError(
1916 VE_ALREADY_PLAYING, kTraceWarning,
1917 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001918 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001919 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001920
kwiberg55b97fe2016-01-28 05:22:45 -08001921 // Destroy the old instance
1922 if (_inputFilePlayerPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001923 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1924 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1925 _inputFilePlayerPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001926 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001927
kwiberg55b97fe2016-01-28 05:22:45 -08001928 // Create the instance
1929 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1930 (const FileFormats)format);
1931
1932 if (_inputFilePlayerPtr == NULL) {
1933 _engineStatisticsPtr->SetLastError(
1934 VE_INVALID_ARGUMENT, kTraceError,
1935 "StartPlayingInputFile() filePlayer format isnot correct");
1936 return -1;
1937 }
1938
1939 const uint32_t notificationTime(0);
1940
1941 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1942 volumeScaling, notificationTime,
1943 stopPosition, codecInst) != 0) {
1944 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1945 "StartPlayingFile() failed to start "
1946 "file playout");
1947 _inputFilePlayerPtr->StopPlayingFile();
1948 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1949 _inputFilePlayerPtr = NULL;
1950 return -1;
1951 }
1952
1953 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1954 channel_state_.SetInputFilePlaying(true);
1955
1956 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001957}
1958
kwiberg55b97fe2016-01-28 05:22:45 -08001959int Channel::StopPlayingFileAsMicrophone() {
1960 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1961 "Channel::StopPlayingFileAsMicrophone()");
1962
1963 rtc::CritScope cs(&_fileCritSect);
1964
1965 if (!channel_state_.Get().input_file_playing) {
1966 return 0;
1967 }
1968
1969 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
1970 _engineStatisticsPtr->SetLastError(
1971 VE_STOP_RECORDING_FAILED, kTraceError,
1972 "StopPlayingFile() could not stop playing");
1973 return -1;
1974 }
1975 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1976 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1977 _inputFilePlayerPtr = NULL;
1978 channel_state_.SetInputFilePlaying(false);
1979
1980 return 0;
1981}
1982
1983int Channel::IsPlayingFileAsMicrophone() const {
1984 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001985}
1986
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00001987int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08001988 const CodecInst* codecInst) {
1989 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1990 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00001991
kwiberg55b97fe2016-01-28 05:22:45 -08001992 if (_outputFileRecording) {
1993 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
1994 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00001995 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001996 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001997
kwiberg55b97fe2016-01-28 05:22:45 -08001998 FileFormats format;
1999 const uint32_t notificationTime(0); // Not supported in VoE
2000 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00002001
kwiberg55b97fe2016-01-28 05:22:45 -08002002 if ((codecInst != NULL) &&
2003 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2004 _engineStatisticsPtr->SetLastError(
2005 VE_BAD_ARGUMENT, kTraceError,
2006 "StartRecordingPlayout() invalid compression");
2007 return (-1);
2008 }
2009 if (codecInst == NULL) {
2010 format = kFileFormatPcm16kHzFile;
2011 codecInst = &dummyCodec;
2012 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2013 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2014 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2015 format = kFileFormatWavFile;
2016 } else {
2017 format = kFileFormatCompressedFile;
2018 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002019
kwiberg55b97fe2016-01-28 05:22:45 -08002020 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002021
kwiberg55b97fe2016-01-28 05:22:45 -08002022 // Destroy the old instance
2023 if (_outputFileRecorderPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002024 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2025 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2026 _outputFileRecorderPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002027 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002028
kwiberg55b97fe2016-01-28 05:22:45 -08002029 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2030 _outputFileRecorderId, (const FileFormats)format);
2031 if (_outputFileRecorderPtr == NULL) {
2032 _engineStatisticsPtr->SetLastError(
2033 VE_INVALID_ARGUMENT, kTraceError,
2034 "StartRecordingPlayout() fileRecorder format isnot correct");
2035 return -1;
2036 }
2037
2038 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2039 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2040 _engineStatisticsPtr->SetLastError(
2041 VE_BAD_FILE, kTraceError,
2042 "StartRecordingAudioFile() failed to start file recording");
2043 _outputFileRecorderPtr->StopRecording();
2044 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2045 _outputFileRecorderPtr = NULL;
2046 return -1;
2047 }
2048 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2049 _outputFileRecording = true;
2050
2051 return 0;
2052}
2053
2054int Channel::StartRecordingPlayout(OutStream* stream,
2055 const CodecInst* codecInst) {
2056 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2057 "Channel::StartRecordingPlayout()");
2058
2059 if (_outputFileRecording) {
2060 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2061 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002062 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002063 }
2064
2065 FileFormats format;
2066 const uint32_t notificationTime(0); // Not supported in VoE
2067 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2068
2069 if (codecInst != NULL && codecInst->channels != 1) {
2070 _engineStatisticsPtr->SetLastError(
2071 VE_BAD_ARGUMENT, kTraceError,
2072 "StartRecordingPlayout() invalid compression");
2073 return (-1);
2074 }
2075 if (codecInst == NULL) {
2076 format = kFileFormatPcm16kHzFile;
2077 codecInst = &dummyCodec;
2078 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2079 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2080 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2081 format = kFileFormatWavFile;
2082 } else {
2083 format = kFileFormatCompressedFile;
2084 }
2085
2086 rtc::CritScope cs(&_fileCritSect);
2087
2088 // Destroy the old instance
2089 if (_outputFileRecorderPtr) {
2090 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2091 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2092 _outputFileRecorderPtr = NULL;
2093 }
2094
2095 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2096 _outputFileRecorderId, (const FileFormats)format);
2097 if (_outputFileRecorderPtr == NULL) {
2098 _engineStatisticsPtr->SetLastError(
2099 VE_INVALID_ARGUMENT, kTraceError,
2100 "StartRecordingPlayout() fileRecorder format isnot correct");
2101 return -1;
2102 }
2103
2104 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2105 notificationTime) != 0) {
2106 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2107 "StartRecordingPlayout() failed to "
2108 "start file recording");
2109 _outputFileRecorderPtr->StopRecording();
2110 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2111 _outputFileRecorderPtr = NULL;
2112 return -1;
2113 }
2114
2115 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2116 _outputFileRecording = true;
2117
2118 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002119}
2120
kwiberg55b97fe2016-01-28 05:22:45 -08002121int Channel::StopRecordingPlayout() {
2122 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2123 "Channel::StopRecordingPlayout()");
2124
2125 if (!_outputFileRecording) {
2126 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2127 "StopRecordingPlayout() isnot recording");
2128 return -1;
2129 }
2130
2131 rtc::CritScope cs(&_fileCritSect);
2132
2133 if (_outputFileRecorderPtr->StopRecording() != 0) {
2134 _engineStatisticsPtr->SetLastError(
2135 VE_STOP_RECORDING_FAILED, kTraceError,
2136 "StopRecording() could not stop recording");
2137 return (-1);
2138 }
2139 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2140 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2141 _outputFileRecorderPtr = NULL;
2142 _outputFileRecording = false;
2143
2144 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002145}
2146
kwiberg55b97fe2016-01-28 05:22:45 -08002147void Channel::SetMixWithMicStatus(bool mix) {
2148 rtc::CritScope cs(&_fileCritSect);
2149 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002150}
2151
kwiberg55b97fe2016-01-28 05:22:45 -08002152int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2153 int8_t currentLevel = _outputAudioLevel.Level();
2154 level = static_cast<int32_t>(currentLevel);
2155 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002156}
2157
kwiberg55b97fe2016-01-28 05:22:45 -08002158int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2159 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2160 level = static_cast<int32_t>(currentLevel);
2161 return 0;
2162}
2163
2164int Channel::SetMute(bool enable) {
2165 rtc::CritScope cs(&volume_settings_critsect_);
2166 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002167 "Channel::SetMute(enable=%d)", enable);
kwiberg55b97fe2016-01-28 05:22:45 -08002168 _mute = enable;
2169 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002170}
2171
kwiberg55b97fe2016-01-28 05:22:45 -08002172bool Channel::Mute() const {
2173 rtc::CritScope cs(&volume_settings_critsect_);
2174 return _mute;
niklase@google.com470e71d2011-07-07 08:21:25 +00002175}
2176
kwiberg55b97fe2016-01-28 05:22:45 -08002177int Channel::SetOutputVolumePan(float left, float right) {
2178 rtc::CritScope cs(&volume_settings_critsect_);
2179 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002180 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002181 _panLeft = left;
2182 _panRight = right;
2183 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002184}
2185
kwiberg55b97fe2016-01-28 05:22:45 -08002186int Channel::GetOutputVolumePan(float& left, float& right) const {
2187 rtc::CritScope cs(&volume_settings_critsect_);
2188 left = _panLeft;
2189 right = _panRight;
2190 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002191}
2192
kwiberg55b97fe2016-01-28 05:22:45 -08002193int Channel::SetChannelOutputVolumeScaling(float scaling) {
2194 rtc::CritScope cs(&volume_settings_critsect_);
2195 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002196 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002197 _outputGain = scaling;
2198 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002199}
2200
kwiberg55b97fe2016-01-28 05:22:45 -08002201int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2202 rtc::CritScope cs(&volume_settings_critsect_);
2203 scaling = _outputGain;
2204 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002205}
2206
solenberg8842c3e2016-03-11 03:06:41 -08002207int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002208 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002209 "Channel::SendTelephoneEventOutband(...)");
2210 RTC_DCHECK_LE(0, event);
2211 RTC_DCHECK_GE(255, event);
2212 RTC_DCHECK_LE(0, duration_ms);
2213 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002214 if (!Sending()) {
2215 return -1;
2216 }
solenberg3ecb5c82016-03-09 07:31:58 -08002217
solenberg8842c3e2016-03-11 03:06:41 -08002218 _playOutbandDtmfEvent = false;
solenberg3ecb5c82016-03-09 07:31:58 -08002219
solenberg8842c3e2016-03-11 03:06:41 -08002220 if (_rtpRtcpModule->SendTelephoneEventOutband(
2221 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002222 _engineStatisticsPtr->SetLastError(
2223 VE_SEND_DTMF_FAILED, kTraceWarning,
2224 "SendTelephoneEventOutband() failed to send event");
2225 return -1;
2226 }
2227 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002228}
2229
solenberg31642aa2016-03-14 08:00:37 -07002230int Channel::SetSendTelephoneEventPayloadType(int payload_type) {
kwiberg55b97fe2016-01-28 05:22:45 -08002231 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002232 "Channel::SetSendTelephoneEventPayloadType()");
solenberg31642aa2016-03-14 08:00:37 -07002233 RTC_DCHECK_LE(0, payload_type);
2234 RTC_DCHECK_GE(127, payload_type);
2235 CodecInst codec = {0};
kwiberg55b97fe2016-01-28 05:22:45 -08002236 codec.plfreq = 8000;
solenberg31642aa2016-03-14 08:00:37 -07002237 codec.pltype = payload_type;
kwiberg55b97fe2016-01-28 05:22:45 -08002238 memcpy(codec.plname, "telephone-event", 16);
2239 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2240 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2241 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2242 _engineStatisticsPtr->SetLastError(
2243 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2244 "SetSendTelephoneEventPayloadType() failed to register send"
2245 "payload type");
2246 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002247 }
kwiberg55b97fe2016-01-28 05:22:45 -08002248 }
kwiberg55b97fe2016-01-28 05:22:45 -08002249 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002250}
2251
kwiberg55b97fe2016-01-28 05:22:45 -08002252int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2253 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2254 "Channel::UpdateRxVadDetection()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002255
kwiberg55b97fe2016-01-28 05:22:45 -08002256 int vadDecision = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002257
kwiberg55b97fe2016-01-28 05:22:45 -08002258 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002259
kwiberg55b97fe2016-01-28 05:22:45 -08002260 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2261 OnRxVadDetected(vadDecision);
2262 _oldVadDecision = vadDecision;
2263 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002264
kwiberg55b97fe2016-01-28 05:22:45 -08002265 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2266 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2267 vadDecision);
2268 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002269}
2270
kwiberg55b97fe2016-01-28 05:22:45 -08002271int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2272 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2273 "Channel::RegisterRxVadObserver()");
2274 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002275
kwiberg55b97fe2016-01-28 05:22:45 -08002276 if (_rxVadObserverPtr) {
2277 _engineStatisticsPtr->SetLastError(
2278 VE_INVALID_OPERATION, kTraceError,
2279 "RegisterRxVadObserver() observer already enabled");
2280 return -1;
2281 }
2282 _rxVadObserverPtr = &observer;
2283 _RxVadDetection = true;
2284 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002285}
2286
kwiberg55b97fe2016-01-28 05:22:45 -08002287int Channel::DeRegisterRxVadObserver() {
2288 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2289 "Channel::DeRegisterRxVadObserver()");
2290 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002291
kwiberg55b97fe2016-01-28 05:22:45 -08002292 if (!_rxVadObserverPtr) {
2293 _engineStatisticsPtr->SetLastError(
2294 VE_INVALID_OPERATION, kTraceWarning,
2295 "DeRegisterRxVadObserver() observer already disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002296 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002297 }
2298 _rxVadObserverPtr = NULL;
2299 _RxVadDetection = false;
2300 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002301}
2302
kwiberg55b97fe2016-01-28 05:22:45 -08002303int Channel::VoiceActivityIndicator(int& activity) {
2304 activity = _sendFrameType;
2305 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002306}
2307
2308#ifdef WEBRTC_VOICE_ENGINE_AGC
2309
kwiberg55b97fe2016-01-28 05:22:45 -08002310int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2311 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2312 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2313 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002314
kwiberg55b97fe2016-01-28 05:22:45 -08002315 GainControl::Mode agcMode = kDefaultRxAgcMode;
2316 switch (mode) {
2317 case kAgcDefault:
2318 break;
2319 case kAgcUnchanged:
2320 agcMode = rx_audioproc_->gain_control()->mode();
2321 break;
2322 case kAgcFixedDigital:
2323 agcMode = GainControl::kFixedDigital;
2324 break;
2325 case kAgcAdaptiveDigital:
2326 agcMode = GainControl::kAdaptiveDigital;
2327 break;
2328 default:
2329 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2330 "SetRxAgcStatus() invalid Agc mode");
2331 return -1;
2332 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002333
kwiberg55b97fe2016-01-28 05:22:45 -08002334 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2335 _engineStatisticsPtr->SetLastError(
2336 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2337 return -1;
2338 }
2339 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2340 _engineStatisticsPtr->SetLastError(
2341 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2342 return -1;
2343 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002344
kwiberg55b97fe2016-01-28 05:22:45 -08002345 _rxAgcIsEnabled = enable;
2346 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002347
kwiberg55b97fe2016-01-28 05:22:45 -08002348 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002349}
2350
kwiberg55b97fe2016-01-28 05:22:45 -08002351int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2352 bool enable = rx_audioproc_->gain_control()->is_enabled();
2353 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
niklase@google.com470e71d2011-07-07 08:21:25 +00002354
kwiberg55b97fe2016-01-28 05:22:45 -08002355 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002356
kwiberg55b97fe2016-01-28 05:22:45 -08002357 switch (agcMode) {
2358 case GainControl::kFixedDigital:
2359 mode = kAgcFixedDigital;
2360 break;
2361 case GainControl::kAdaptiveDigital:
2362 mode = kAgcAdaptiveDigital;
2363 break;
2364 default:
2365 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2366 "GetRxAgcStatus() invalid Agc mode");
2367 return -1;
2368 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002369
kwiberg55b97fe2016-01-28 05:22:45 -08002370 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002371}
2372
kwiberg55b97fe2016-01-28 05:22:45 -08002373int Channel::SetRxAgcConfig(AgcConfig config) {
2374 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2375 "Channel::SetRxAgcConfig()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002376
kwiberg55b97fe2016-01-28 05:22:45 -08002377 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2378 config.targetLeveldBOv) != 0) {
2379 _engineStatisticsPtr->SetLastError(
2380 VE_APM_ERROR, kTraceError,
2381 "SetRxAgcConfig() failed to set target peak |level|"
2382 "(or envelope) of the Agc");
2383 return -1;
2384 }
2385 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2386 config.digitalCompressionGaindB) != 0) {
2387 _engineStatisticsPtr->SetLastError(
2388 VE_APM_ERROR, kTraceError,
2389 "SetRxAgcConfig() failed to set the range in |gain| the"
2390 " digital compression stage may apply");
2391 return -1;
2392 }
2393 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2394 0) {
2395 _engineStatisticsPtr->SetLastError(
2396 VE_APM_ERROR, kTraceError,
2397 "SetRxAgcConfig() failed to set hard limiter to the signal");
2398 return -1;
2399 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002400
kwiberg55b97fe2016-01-28 05:22:45 -08002401 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002402}
2403
kwiberg55b97fe2016-01-28 05:22:45 -08002404int Channel::GetRxAgcConfig(AgcConfig& config) {
2405 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2406 config.digitalCompressionGaindB =
2407 rx_audioproc_->gain_control()->compression_gain_db();
2408 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
niklase@google.com470e71d2011-07-07 08:21:25 +00002409
kwiberg55b97fe2016-01-28 05:22:45 -08002410 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002411}
2412
kwiberg55b97fe2016-01-28 05:22:45 -08002413#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
niklase@google.com470e71d2011-07-07 08:21:25 +00002414
2415#ifdef WEBRTC_VOICE_ENGINE_NR
2416
kwiberg55b97fe2016-01-28 05:22:45 -08002417int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2418 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2419 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2420 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002421
kwiberg55b97fe2016-01-28 05:22:45 -08002422 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2423 switch (mode) {
2424 case kNsDefault:
2425 break;
2426 case kNsUnchanged:
2427 nsLevel = rx_audioproc_->noise_suppression()->level();
2428 break;
2429 case kNsConference:
2430 nsLevel = NoiseSuppression::kHigh;
2431 break;
2432 case kNsLowSuppression:
2433 nsLevel = NoiseSuppression::kLow;
2434 break;
2435 case kNsModerateSuppression:
2436 nsLevel = NoiseSuppression::kModerate;
2437 break;
2438 case kNsHighSuppression:
2439 nsLevel = NoiseSuppression::kHigh;
2440 break;
2441 case kNsVeryHighSuppression:
2442 nsLevel = NoiseSuppression::kVeryHigh;
2443 break;
2444 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002445
kwiberg55b97fe2016-01-28 05:22:45 -08002446 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2447 _engineStatisticsPtr->SetLastError(
2448 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2449 return -1;
2450 }
2451 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2452 _engineStatisticsPtr->SetLastError(
2453 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2454 return -1;
2455 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002456
kwiberg55b97fe2016-01-28 05:22:45 -08002457 _rxNsIsEnabled = enable;
2458 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002459
kwiberg55b97fe2016-01-28 05:22:45 -08002460 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002461}
2462
kwiberg55b97fe2016-01-28 05:22:45 -08002463int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2464 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2465 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
niklase@google.com470e71d2011-07-07 08:21:25 +00002466
kwiberg55b97fe2016-01-28 05:22:45 -08002467 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002468
kwiberg55b97fe2016-01-28 05:22:45 -08002469 switch (ncLevel) {
2470 case NoiseSuppression::kLow:
2471 mode = kNsLowSuppression;
2472 break;
2473 case NoiseSuppression::kModerate:
2474 mode = kNsModerateSuppression;
2475 break;
2476 case NoiseSuppression::kHigh:
2477 mode = kNsHighSuppression;
2478 break;
2479 case NoiseSuppression::kVeryHigh:
2480 mode = kNsVeryHighSuppression;
2481 break;
2482 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002483
kwiberg55b97fe2016-01-28 05:22:45 -08002484 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002485}
2486
kwiberg55b97fe2016-01-28 05:22:45 -08002487#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
niklase@google.com470e71d2011-07-07 08:21:25 +00002488
kwiberg55b97fe2016-01-28 05:22:45 -08002489int Channel::SetLocalSSRC(unsigned int ssrc) {
2490 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2491 "Channel::SetLocalSSRC()");
2492 if (channel_state_.Get().sending) {
2493 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2494 "SetLocalSSRC() already sending");
2495 return -1;
2496 }
2497 _rtpRtcpModule->SetSSRC(ssrc);
2498 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002499}
2500
kwiberg55b97fe2016-01-28 05:22:45 -08002501int Channel::GetLocalSSRC(unsigned int& ssrc) {
2502 ssrc = _rtpRtcpModule->SSRC();
2503 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002504}
2505
kwiberg55b97fe2016-01-28 05:22:45 -08002506int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2507 ssrc = rtp_receiver_->SSRC();
2508 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002509}
2510
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002511int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002512 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002513 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002514}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002515
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002516int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2517 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002518 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2519 if (enable &&
2520 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2521 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002522 return -1;
2523 }
2524 return 0;
2525}
2526
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002527int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2528 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2529}
2530
2531int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2532 rtp_header_parser_->DeregisterRtpHeaderExtension(
2533 kRtpExtensionAbsoluteSendTime);
kwiberg55b97fe2016-01-28 05:22:45 -08002534 if (enable &&
2535 !rtp_header_parser_->RegisterRtpHeaderExtension(
2536 kRtpExtensionAbsoluteSendTime, id)) {
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00002537 return -1;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002538 }
2539 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002540}
2541
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002542void Channel::EnableSendTransportSequenceNumber(int id) {
2543 int ret =
2544 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2545 RTC_DCHECK_EQ(0, ret);
2546}
2547
stefan3313ec92016-01-21 06:32:43 -08002548void Channel::EnableReceiveTransportSequenceNumber(int id) {
2549 rtp_header_parser_->DeregisterRtpHeaderExtension(
2550 kRtpExtensionTransportSequenceNumber);
2551 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2552 kRtpExtensionTransportSequenceNumber, id);
2553 RTC_DCHECK(ret);
2554}
2555
stefanbba9dec2016-02-01 04:39:55 -08002556void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002557 RtpPacketSender* rtp_packet_sender,
2558 TransportFeedbackObserver* transport_feedback_observer,
2559 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002560 RTC_DCHECK(rtp_packet_sender);
2561 RTC_DCHECK(transport_feedback_observer);
2562 RTC_DCHECK(packet_router && !packet_router_);
2563 feedback_observer_proxy_->SetTransportFeedbackObserver(
2564 transport_feedback_observer);
2565 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2566 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2567 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002568 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002569 packet_router_ = packet_router;
2570}
2571
stefanbba9dec2016-02-01 04:39:55 -08002572void Channel::RegisterReceiverCongestionControlObjects(
2573 PacketRouter* packet_router) {
2574 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002575 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002576 packet_router_ = packet_router;
2577}
2578
2579void Channel::ResetCongestionControlObjects() {
2580 RTC_DCHECK(packet_router_);
2581 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2582 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2583 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002584 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002585 packet_router_ = nullptr;
2586 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2587}
2588
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002589void Channel::SetRTCPStatus(bool enable) {
2590 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2591 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002592 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002593}
2594
kwiberg55b97fe2016-01-28 05:22:45 -08002595int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002596 RtcpMode method = _rtpRtcpModule->RTCP();
2597 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002598 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002599}
2600
kwiberg55b97fe2016-01-28 05:22:45 -08002601int Channel::SetRTCP_CNAME(const char cName[256]) {
2602 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2603 "Channel::SetRTCP_CNAME()");
2604 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2605 _engineStatisticsPtr->SetLastError(
2606 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2607 "SetRTCP_CNAME() failed to set RTCP CNAME");
2608 return -1;
2609 }
2610 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002611}
2612
kwiberg55b97fe2016-01-28 05:22:45 -08002613int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2614 if (cName == NULL) {
2615 _engineStatisticsPtr->SetLastError(
2616 VE_INVALID_ARGUMENT, kTraceError,
2617 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2618 return -1;
2619 }
2620 char cname[RTCP_CNAME_SIZE];
2621 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2622 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2623 _engineStatisticsPtr->SetLastError(
2624 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2625 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2626 return -1;
2627 }
2628 strcpy(cName, cname);
2629 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002630}
2631
kwiberg55b97fe2016-01-28 05:22:45 -08002632int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2633 unsigned int& NTPLow,
2634 unsigned int& timestamp,
2635 unsigned int& playoutTimestamp,
2636 unsigned int* jitter,
2637 unsigned short* fractionLost) {
2638 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002639
kwiberg55b97fe2016-01-28 05:22:45 -08002640 RTCPSenderInfo senderInfo;
2641 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2642 _engineStatisticsPtr->SetLastError(
2643 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2644 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2645 "side");
2646 return -1;
2647 }
2648
2649 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2650 // and octet count)
2651 NTPHigh = senderInfo.NTPseconds;
2652 NTPLow = senderInfo.NTPfraction;
2653 timestamp = senderInfo.RTPtimeStamp;
2654
2655 // --- Locally derived information
2656
2657 // This value is updated on each incoming RTCP packet (0 when no packet
2658 // has been received)
2659 playoutTimestamp = playout_timestamp_rtcp_;
2660
2661 if (NULL != jitter || NULL != fractionLost) {
2662 // Get all RTCP receiver report blocks that have been received on this
2663 // channel. If we receive RTP packets from a remote source we know the
2664 // remote SSRC and use the report block from him.
2665 // Otherwise use the first report block.
2666 std::vector<RTCPReportBlock> remote_stats;
2667 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2668 remote_stats.empty()) {
2669 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2670 "GetRemoteRTCPData() failed to measure statistics due"
2671 " to lack of received RTP and/or RTCP packets");
2672 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002673 }
2674
kwiberg55b97fe2016-01-28 05:22:45 -08002675 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2676 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2677 for (; it != remote_stats.end(); ++it) {
2678 if (it->remoteSSRC == remoteSSRC)
2679 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002680 }
kwiberg55b97fe2016-01-28 05:22:45 -08002681
2682 if (it == remote_stats.end()) {
2683 // If we have not received any RTCP packets from this SSRC it probably
2684 // means that we have not received any RTP packets.
2685 // Use the first received report block instead.
2686 it = remote_stats.begin();
2687 remoteSSRC = it->remoteSSRC;
2688 }
2689
2690 if (jitter) {
2691 *jitter = it->jitter;
2692 }
2693
2694 if (fractionLost) {
2695 *fractionLost = it->fractionLost;
2696 }
2697 }
2698 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002699}
2700
kwiberg55b97fe2016-01-28 05:22:45 -08002701int Channel::SendApplicationDefinedRTCPPacket(
2702 unsigned char subType,
2703 unsigned int name,
2704 const char* data,
2705 unsigned short dataLengthInBytes) {
2706 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2707 "Channel::SendApplicationDefinedRTCPPacket()");
2708 if (!channel_state_.Get().sending) {
2709 _engineStatisticsPtr->SetLastError(
2710 VE_NOT_SENDING, kTraceError,
2711 "SendApplicationDefinedRTCPPacket() not sending");
2712 return -1;
2713 }
2714 if (NULL == data) {
2715 _engineStatisticsPtr->SetLastError(
2716 VE_INVALID_ARGUMENT, kTraceError,
2717 "SendApplicationDefinedRTCPPacket() invalid data value");
2718 return -1;
2719 }
2720 if (dataLengthInBytes % 4 != 0) {
2721 _engineStatisticsPtr->SetLastError(
2722 VE_INVALID_ARGUMENT, kTraceError,
2723 "SendApplicationDefinedRTCPPacket() invalid length value");
2724 return -1;
2725 }
2726 RtcpMode status = _rtpRtcpModule->RTCP();
2727 if (status == RtcpMode::kOff) {
2728 _engineStatisticsPtr->SetLastError(
2729 VE_RTCP_ERROR, kTraceError,
2730 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2731 return -1;
2732 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002733
kwiberg55b97fe2016-01-28 05:22:45 -08002734 // Create and schedule the RTCP APP packet for transmission
2735 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2736 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2737 _engineStatisticsPtr->SetLastError(
2738 VE_SEND_ERROR, kTraceError,
2739 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2740 return -1;
2741 }
2742 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002743}
2744
kwiberg55b97fe2016-01-28 05:22:45 -08002745int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2746 unsigned int& maxJitterMs,
2747 unsigned int& discardedPackets) {
2748 // The jitter statistics is updated for each received RTP packet and is
2749 // based on received packets.
2750 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2751 // If RTCP is off, there is no timed thread in the RTCP module regularly
2752 // generating new stats, trigger the update manually here instead.
2753 StreamStatistician* statistician =
2754 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2755 if (statistician) {
2756 // Don't use returned statistics, use data from proxy instead so that
2757 // max jitter can be fetched atomically.
2758 RtcpStatistics s;
2759 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002760 }
kwiberg55b97fe2016-01-28 05:22:45 -08002761 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002762
kwiberg55b97fe2016-01-28 05:22:45 -08002763 ChannelStatistics stats = statistics_proxy_->GetStats();
2764 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2765 if (playoutFrequency > 0) {
2766 // Scale RTP statistics given the current playout frequency
2767 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2768 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2769 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002770
kwiberg55b97fe2016-01-28 05:22:45 -08002771 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002772
kwiberg55b97fe2016-01-28 05:22:45 -08002773 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002774}
2775
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002776int Channel::GetRemoteRTCPReportBlocks(
2777 std::vector<ReportBlock>* report_blocks) {
2778 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002779 _engineStatisticsPtr->SetLastError(
2780 VE_INVALID_ARGUMENT, kTraceError,
2781 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002782 return -1;
2783 }
2784
2785 // Get the report blocks from the latest received RTCP Sender or Receiver
2786 // Report. Each element in the vector contains the sender's SSRC and a
2787 // report block according to RFC 3550.
2788 std::vector<RTCPReportBlock> rtcp_report_blocks;
2789 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002790 return -1;
2791 }
2792
2793 if (rtcp_report_blocks.empty())
2794 return 0;
2795
2796 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2797 for (; it != rtcp_report_blocks.end(); ++it) {
2798 ReportBlock report_block;
2799 report_block.sender_SSRC = it->remoteSSRC;
2800 report_block.source_SSRC = it->sourceSSRC;
2801 report_block.fraction_lost = it->fractionLost;
2802 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2803 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2804 report_block.interarrival_jitter = it->jitter;
2805 report_block.last_SR_timestamp = it->lastSR;
2806 report_block.delay_since_last_SR = it->delaySinceLastSR;
2807 report_blocks->push_back(report_block);
2808 }
2809 return 0;
2810}
2811
kwiberg55b97fe2016-01-28 05:22:45 -08002812int Channel::GetRTPStatistics(CallStatistics& stats) {
2813 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002814
kwiberg55b97fe2016-01-28 05:22:45 -08002815 // The jitter statistics is updated for each received RTP packet and is
2816 // based on received packets.
2817 RtcpStatistics statistics;
2818 StreamStatistician* statistician =
2819 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002820 if (statistician) {
2821 statistician->GetStatistics(&statistics,
2822 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002823 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002824
kwiberg55b97fe2016-01-28 05:22:45 -08002825 stats.fractionLost = statistics.fraction_lost;
2826 stats.cumulativeLost = statistics.cumulative_lost;
2827 stats.extendedMax = statistics.extended_max_sequence_number;
2828 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002829
kwiberg55b97fe2016-01-28 05:22:45 -08002830 // --- RTT
2831 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002832
kwiberg55b97fe2016-01-28 05:22:45 -08002833 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002834
kwiberg55b97fe2016-01-28 05:22:45 -08002835 size_t bytesSent(0);
2836 uint32_t packetsSent(0);
2837 size_t bytesReceived(0);
2838 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002839
kwiberg55b97fe2016-01-28 05:22:45 -08002840 if (statistician) {
2841 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2842 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002843
kwiberg55b97fe2016-01-28 05:22:45 -08002844 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2845 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2846 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2847 " output will not be complete");
2848 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002849
kwiberg55b97fe2016-01-28 05:22:45 -08002850 stats.bytesSent = bytesSent;
2851 stats.packetsSent = packetsSent;
2852 stats.bytesReceived = bytesReceived;
2853 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002854
kwiberg55b97fe2016-01-28 05:22:45 -08002855 // --- Timestamps
2856 {
2857 rtc::CritScope lock(&ts_stats_lock_);
2858 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2859 }
2860 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002861}
2862
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002863int Channel::SetREDStatus(bool enable, int redPayloadtype) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002864 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002865 "Channel::SetREDStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002866
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002867 if (enable) {
2868 if (redPayloadtype < 0 || redPayloadtype > 127) {
2869 _engineStatisticsPtr->SetLastError(
2870 VE_PLTYPE_ERROR, kTraceError,
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002871 "SetREDStatus() invalid RED payload type");
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002872 return -1;
2873 }
2874
2875 if (SetRedPayloadType(redPayloadtype) < 0) {
2876 _engineStatisticsPtr->SetLastError(
2877 VE_CODEC_ERROR, kTraceError,
2878 "SetSecondarySendCodec() Failed to register RED ACM");
2879 return -1;
2880 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002881 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002882
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002883 if (audio_coding_->SetREDStatus(enable) != 0) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002884 _engineStatisticsPtr->SetLastError(
2885 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002886 "SetREDStatus() failed to set RED state in the ACM");
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002887 return -1;
2888 }
2889 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002890}
2891
kwiberg55b97fe2016-01-28 05:22:45 -08002892int Channel::GetREDStatus(bool& enabled, int& redPayloadtype) {
2893 enabled = audio_coding_->REDStatus();
2894 if (enabled) {
2895 int8_t payloadType = 0;
2896 if (_rtpRtcpModule->SendREDPayloadType(&payloadType) != 0) {
2897 _engineStatisticsPtr->SetLastError(
2898 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2899 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
2900 "module");
2901 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002902 }
kwiberg55b97fe2016-01-28 05:22:45 -08002903 redPayloadtype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +00002904 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002905 }
2906 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002907}
2908
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002909int Channel::SetCodecFECStatus(bool enable) {
2910 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2911 "Channel::SetCodecFECStatus()");
2912
2913 if (audio_coding_->SetCodecFEC(enable) != 0) {
2914 _engineStatisticsPtr->SetLastError(
2915 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2916 "SetCodecFECStatus() failed to set FEC state");
2917 return -1;
2918 }
2919 return 0;
2920}
2921
2922bool Channel::GetCodecFECStatus() {
2923 bool enabled = audio_coding_->CodecFEC();
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002924 return enabled;
2925}
2926
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002927void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2928 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002929 // If pacing is enabled we always store packets.
2930 if (!pacing_enabled_)
2931 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002932 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
2933 rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002934 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002935 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002936 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002937 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002938}
2939
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002940// Called when we are missing one or more packets.
2941int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002942 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2943}
2944
kwiberg55b97fe2016-01-28 05:22:45 -08002945uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2946 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2947 "Channel::Demultiplex()");
2948 _audioFrame.CopyFrom(audioFrame);
2949 _audioFrame.id_ = _channelId;
2950 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002951}
2952
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002953void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00002954 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07002955 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08002956 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002957 CodecInst codec;
2958 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002959
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07002960 // Never upsample or upmix the capture signal here. This should be done at the
2961 // end of the send chain.
2962 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
2963 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
2964 RemixAndResample(audio_data, number_of_frames, number_of_channels,
2965 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002966}
2967
kwiberg55b97fe2016-01-28 05:22:45 -08002968uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
2969 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2970 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002971
kwiberg55b97fe2016-01-28 05:22:45 -08002972 if (_audioFrame.samples_per_channel_ == 0) {
2973 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2974 "Channel::PrepareEncodeAndSend() invalid audio frame");
2975 return 0xFFFFFFFF;
2976 }
2977
2978 if (channel_state_.Get().input_file_playing) {
2979 MixOrReplaceAudioWithFile(mixingFrequency);
2980 }
2981
2982 bool is_muted = Mute(); // Cache locally as Mute() takes a lock.
2983 if (is_muted) {
2984 AudioFrameOperations::Mute(_audioFrame);
2985 }
2986
2987 if (channel_state_.Get().input_external_media) {
2988 rtc::CritScope cs(&_callbackCritSect);
2989 const bool isStereo = (_audioFrame.num_channels_ == 2);
2990 if (_inputExternalMediaCallbackPtr) {
2991 _inputExternalMediaCallbackPtr->Process(
2992 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
2993 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
2994 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00002995 }
kwiberg55b97fe2016-01-28 05:22:45 -08002996 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002997
kwiberg55b97fe2016-01-28 05:22:45 -08002998 if (_includeAudioLevelIndication) {
2999 size_t length =
3000 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
andrew@webrtc.org21299d42014-05-14 19:00:59 +00003001 if (is_muted) {
kwiberg55b97fe2016-01-28 05:22:45 -08003002 rms_level_.ProcessMuted(length);
3003 } else {
3004 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00003005 }
kwiberg55b97fe2016-01-28 05:22:45 -08003006 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003007
kwiberg55b97fe2016-01-28 05:22:45 -08003008 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003009}
3010
kwiberg55b97fe2016-01-28 05:22:45 -08003011uint32_t Channel::EncodeAndSend() {
3012 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3013 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003014
kwiberg55b97fe2016-01-28 05:22:45 -08003015 assert(_audioFrame.num_channels_ <= 2);
3016 if (_audioFrame.samples_per_channel_ == 0) {
3017 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3018 "Channel::EncodeAndSend() invalid audio frame");
3019 return 0xFFFFFFFF;
3020 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003021
kwiberg55b97fe2016-01-28 05:22:45 -08003022 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00003023
kwiberg55b97fe2016-01-28 05:22:45 -08003024 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00003025
kwiberg55b97fe2016-01-28 05:22:45 -08003026 // The ACM resamples internally.
3027 _audioFrame.timestamp_ = _timeStamp;
3028 // This call will trigger AudioPacketizationCallback::SendData if encoding
3029 // is done and payload is ready for packetization and transmission.
3030 // Otherwise, it will return without invoking the callback.
3031 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
3032 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
3033 "Channel::EncodeAndSend() ACM encoding failed");
3034 return 0xFFFFFFFF;
3035 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003036
kwiberg55b97fe2016-01-28 05:22:45 -08003037 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
3038 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003039}
3040
Minyue2013aec2015-05-13 14:14:42 +02003041void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08003042 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003043 Channel* channel = associate_send_channel_.channel();
3044 if (channel && channel->ChannelId() == channel_id) {
3045 // If this channel is associated with a send channel of the specified
3046 // Channel ID, disassociate with it.
3047 ChannelOwner ref(NULL);
3048 associate_send_channel_ = ref;
3049 }
3050}
3051
kwiberg55b97fe2016-01-28 05:22:45 -08003052int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3053 VoEMediaProcess& processObject) {
3054 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3055 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003056
kwiberg55b97fe2016-01-28 05:22:45 -08003057 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003058
kwiberg55b97fe2016-01-28 05:22:45 -08003059 if (kPlaybackPerChannel == type) {
3060 if (_outputExternalMediaCallbackPtr) {
3061 _engineStatisticsPtr->SetLastError(
3062 VE_INVALID_OPERATION, kTraceError,
3063 "Channel::RegisterExternalMediaProcessing() "
3064 "output external media already enabled");
3065 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003066 }
kwiberg55b97fe2016-01-28 05:22:45 -08003067 _outputExternalMediaCallbackPtr = &processObject;
3068 _outputExternalMedia = true;
3069 } else if (kRecordingPerChannel == type) {
3070 if (_inputExternalMediaCallbackPtr) {
3071 _engineStatisticsPtr->SetLastError(
3072 VE_INVALID_OPERATION, kTraceError,
3073 "Channel::RegisterExternalMediaProcessing() "
3074 "output external media already enabled");
3075 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003076 }
kwiberg55b97fe2016-01-28 05:22:45 -08003077 _inputExternalMediaCallbackPtr = &processObject;
3078 channel_state_.SetInputExternalMedia(true);
3079 }
3080 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003081}
3082
kwiberg55b97fe2016-01-28 05:22:45 -08003083int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3084 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3085 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003086
kwiberg55b97fe2016-01-28 05:22:45 -08003087 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003088
kwiberg55b97fe2016-01-28 05:22:45 -08003089 if (kPlaybackPerChannel == type) {
3090 if (!_outputExternalMediaCallbackPtr) {
3091 _engineStatisticsPtr->SetLastError(
3092 VE_INVALID_OPERATION, kTraceWarning,
3093 "Channel::DeRegisterExternalMediaProcessing() "
3094 "output external media already disabled");
3095 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003096 }
kwiberg55b97fe2016-01-28 05:22:45 -08003097 _outputExternalMedia = false;
3098 _outputExternalMediaCallbackPtr = NULL;
3099 } else if (kRecordingPerChannel == type) {
3100 if (!_inputExternalMediaCallbackPtr) {
3101 _engineStatisticsPtr->SetLastError(
3102 VE_INVALID_OPERATION, kTraceWarning,
3103 "Channel::DeRegisterExternalMediaProcessing() "
3104 "input external media already disabled");
3105 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003106 }
kwiberg55b97fe2016-01-28 05:22:45 -08003107 channel_state_.SetInputExternalMedia(false);
3108 _inputExternalMediaCallbackPtr = NULL;
3109 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003110
kwiberg55b97fe2016-01-28 05:22:45 -08003111 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003112}
3113
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003114int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08003115 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3116 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003117
kwiberg55b97fe2016-01-28 05:22:45 -08003118 if (channel_state_.Get().playing) {
3119 _engineStatisticsPtr->SetLastError(
3120 VE_INVALID_OPERATION, kTraceError,
3121 "Channel::SetExternalMixing() "
3122 "external mixing cannot be changed while playing.");
3123 return -1;
3124 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003125
kwiberg55b97fe2016-01-28 05:22:45 -08003126 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003127
kwiberg55b97fe2016-01-28 05:22:45 -08003128 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003129}
3130
kwiberg55b97fe2016-01-28 05:22:45 -08003131int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3132 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00003133}
3134
wu@webrtc.org24301a62013-12-13 19:17:43 +00003135void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3136 audio_coding_->GetDecodingCallStatistics(stats);
3137}
3138
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003139bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3140 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08003141 rtc::CritScope lock(&video_sync_lock_);
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003142 if (_average_jitter_buffer_delay_us == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003143 return false;
3144 }
kwiberg55b97fe2016-01-28 05:22:45 -08003145 *jitter_buffer_delay_ms =
3146 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003147 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003148 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00003149}
3150
solenberg358057b2015-11-27 10:46:42 -08003151uint32_t Channel::GetDelayEstimate() const {
3152 int jitter_buffer_delay_ms = 0;
3153 int playout_buffer_delay_ms = 0;
3154 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3155 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3156}
3157
deadbeef74375882015-08-13 12:09:10 -07003158int Channel::LeastRequiredDelayMs() const {
3159 return audio_coding_->LeastRequiredDelayMs();
3160}
3161
kwiberg55b97fe2016-01-28 05:22:45 -08003162int Channel::SetMinimumPlayoutDelay(int delayMs) {
3163 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3164 "Channel::SetMinimumPlayoutDelay()");
3165 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3166 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3167 _engineStatisticsPtr->SetLastError(
3168 VE_INVALID_ARGUMENT, kTraceError,
3169 "SetMinimumPlayoutDelay() invalid min delay");
3170 return -1;
3171 }
3172 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3173 _engineStatisticsPtr->SetLastError(
3174 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3175 "SetMinimumPlayoutDelay() failed to set min playout delay");
3176 return -1;
3177 }
3178 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003179}
3180
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003181int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07003182 uint32_t playout_timestamp_rtp = 0;
3183 {
tommi31fc21f2016-01-21 10:37:37 -08003184 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003185 playout_timestamp_rtp = playout_timestamp_rtp_;
3186 }
kwiberg55b97fe2016-01-28 05:22:45 -08003187 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003188 _engineStatisticsPtr->SetLastError(
3189 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3190 "GetPlayoutTimestamp() failed to retrieve timestamp");
3191 return -1;
3192 }
deadbeef74375882015-08-13 12:09:10 -07003193 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003194 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003195}
3196
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003197int Channel::SetInitTimestamp(unsigned int timestamp) {
3198 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00003199 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003200 if (channel_state_.Get().sending) {
3201 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3202 "SetInitTimestamp() already sending");
3203 return -1;
3204 }
3205 _rtpRtcpModule->SetStartTimestamp(timestamp);
3206 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003207}
3208
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003209int Channel::SetInitSequenceNumber(short sequenceNumber) {
3210 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3211 "Channel::SetInitSequenceNumber()");
3212 if (channel_state_.Get().sending) {
3213 _engineStatisticsPtr->SetLastError(
3214 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3215 return -1;
3216 }
3217 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3218 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003219}
3220
kwiberg55b97fe2016-01-28 05:22:45 -08003221int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3222 RtpReceiver** rtp_receiver) const {
3223 *rtpRtcpModule = _rtpRtcpModule.get();
3224 *rtp_receiver = rtp_receiver_.get();
3225 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003226}
3227
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003228// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3229// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003230int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003231 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003232 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003233
kwiberg55b97fe2016-01-28 05:22:45 -08003234 {
3235 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003236
kwiberg55b97fe2016-01-28 05:22:45 -08003237 if (_inputFilePlayerPtr == NULL) {
3238 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3239 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3240 " doesnt exist");
3241 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003242 }
3243
kwiberg55b97fe2016-01-28 05:22:45 -08003244 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3245 mixingFrequency) == -1) {
3246 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3247 "Channel::MixOrReplaceAudioWithFile() file mixing "
3248 "failed");
3249 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003250 }
kwiberg55b97fe2016-01-28 05:22:45 -08003251 if (fileSamples == 0) {
3252 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3253 "Channel::MixOrReplaceAudioWithFile() file is ended");
3254 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003255 }
kwiberg55b97fe2016-01-28 05:22:45 -08003256 }
3257
3258 assert(_audioFrame.samples_per_channel_ == fileSamples);
3259
3260 if (_mixFileWithMicrophone) {
3261 // Currently file stream is always mono.
3262 // TODO(xians): Change the code when FilePlayer supports real stereo.
3263 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3264 1, fileSamples);
3265 } else {
3266 // Replace ACM audio with file.
3267 // Currently file stream is always mono.
3268 // TODO(xians): Change the code when FilePlayer supports real stereo.
3269 _audioFrame.UpdateFrame(
3270 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3271 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3272 }
3273 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003274}
3275
kwiberg55b97fe2016-01-28 05:22:45 -08003276int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3277 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003278
kwibergb7f89d62016-02-17 10:04:18 -08003279 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003280 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003281
kwiberg55b97fe2016-01-28 05:22:45 -08003282 {
3283 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003284
kwiberg55b97fe2016-01-28 05:22:45 -08003285 if (_outputFilePlayerPtr == NULL) {
3286 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3287 "Channel::MixAudioWithFile() file mixing failed");
3288 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003289 }
3290
kwiberg55b97fe2016-01-28 05:22:45 -08003291 // We should get the frequency we ask for.
3292 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3293 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3294 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3295 "Channel::MixAudioWithFile() file mixing failed");
3296 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003297 }
kwiberg55b97fe2016-01-28 05:22:45 -08003298 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003299
kwiberg55b97fe2016-01-28 05:22:45 -08003300 if (audioFrame.samples_per_channel_ == fileSamples) {
3301 // Currently file stream is always mono.
3302 // TODO(xians): Change the code when FilePlayer supports real stereo.
3303 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3304 fileSamples);
3305 } else {
3306 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3307 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3308 ") != "
3309 "fileSamples(%" PRIuS ")",
3310 audioFrame.samples_per_channel_, fileSamples);
3311 return -1;
3312 }
3313
3314 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003315}
3316
deadbeef74375882015-08-13 12:09:10 -07003317void Channel::UpdatePlayoutTimestamp(bool rtcp) {
3318 uint32_t playout_timestamp = 0;
3319
kwiberg55b97fe2016-01-28 05:22:45 -08003320 if (audio_coding_->PlayoutTimestamp(&playout_timestamp) == -1) {
deadbeef74375882015-08-13 12:09:10 -07003321 // This can happen if this channel has not been received any RTP packet. In
3322 // this case, NetEq is not capable of computing playout timestamp.
3323 return;
3324 }
3325
3326 uint16_t delay_ms = 0;
3327 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003328 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003329 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3330 " delay from the ADM");
3331 _engineStatisticsPtr->SetLastError(
3332 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3333 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3334 return;
3335 }
3336
3337 jitter_buffer_playout_timestamp_ = playout_timestamp;
3338
3339 // Remove the playout delay.
3340 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
3341
kwiberg55b97fe2016-01-28 05:22:45 -08003342 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003343 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
3344 playout_timestamp);
3345
3346 {
tommi31fc21f2016-01-21 10:37:37 -08003347 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003348 if (rtcp) {
3349 playout_timestamp_rtcp_ = playout_timestamp;
3350 } else {
3351 playout_timestamp_rtp_ = playout_timestamp;
3352 }
3353 playout_delay_ms_ = delay_ms;
3354 }
3355}
3356
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003357// Called for incoming RTP packets after successful RTP header parsing.
3358void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3359 uint16_t sequence_number) {
kwiberg55b97fe2016-01-28 05:22:45 -08003360 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003361 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3362 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00003363
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003364 // Get frequency of last received payload
wu@webrtc.org94454b72014-06-05 20:34:08 +00003365 int rtp_receive_frequency = GetPlayoutFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00003366
turaj@webrtc.org167b6df2013-12-13 21:05:07 +00003367 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
3368 // every incoming packet.
kwiberg55b97fe2016-01-28 05:22:45 -08003369 uint32_t timestamp_diff_ms =
3370 (rtp_timestamp - jitter_buffer_playout_timestamp_) /
3371 (rtp_receive_frequency / 1000);
henrik.lundin@webrtc.orgd6692992014-03-20 12:04:09 +00003372 if (!IsNewerTimestamp(rtp_timestamp, jitter_buffer_playout_timestamp_) ||
3373 timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3374 // If |jitter_buffer_playout_timestamp_| is newer than the incoming RTP
3375 // timestamp, the resulting difference is negative, but is set to zero.
3376 // This can happen when a network glitch causes a packet to arrive late,
3377 // and during long comfort noise periods with clock drift.
3378 timestamp_diff_ms = 0;
3379 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003380
kwiberg55b97fe2016-01-28 05:22:45 -08003381 uint16_t packet_delay_ms =
3382 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003383
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003384 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00003385
kwiberg55b97fe2016-01-28 05:22:45 -08003386 if (timestamp_diff_ms == 0)
3387 return;
niklase@google.com470e71d2011-07-07 08:21:25 +00003388
deadbeef74375882015-08-13 12:09:10 -07003389 {
tommi31fc21f2016-01-21 10:37:37 -08003390 rtc::CritScope lock(&video_sync_lock_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003391
deadbeef74375882015-08-13 12:09:10 -07003392 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3393 _recPacketDelayMs = packet_delay_ms;
3394 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003395
deadbeef74375882015-08-13 12:09:10 -07003396 if (_average_jitter_buffer_delay_us == 0) {
3397 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3398 return;
3399 }
3400
3401 // Filter average delay value using exponential filter (alpha is
3402 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3403 // risk of rounding error) and compensate for it in GetDelayEstimate()
3404 // later.
kwiberg55b97fe2016-01-28 05:22:45 -08003405 _average_jitter_buffer_delay_us =
3406 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3407 8;
deadbeef74375882015-08-13 12:09:10 -07003408 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003409}
3410
kwiberg55b97fe2016-01-28 05:22:45 -08003411void Channel::RegisterReceiveCodecsToRTPModule() {
3412 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3413 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003414
kwiberg55b97fe2016-01-28 05:22:45 -08003415 CodecInst codec;
3416 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003417
kwiberg55b97fe2016-01-28 05:22:45 -08003418 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3419 // Open up the RTP/RTCP receiver for all supported codecs
3420 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3421 (rtp_receiver_->RegisterReceivePayload(
3422 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3423 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3424 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3425 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3426 " to register %s (%d/%d/%" PRIuS
3427 "/%d) to RTP/RTCP "
3428 "receiver",
3429 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3430 codec.rate);
3431 } else {
3432 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3433 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3434 "(%d/%d/%" PRIuS
3435 "/%d) has been added to the RTP/RTCP "
3436 "receiver",
3437 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3438 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003439 }
kwiberg55b97fe2016-01-28 05:22:45 -08003440 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003441}
3442
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00003443// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003444int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003445 CodecInst codec;
3446 bool found_red = false;
3447
3448 // Get default RED settings from the ACM database
3449 const int num_codecs = AudioCodingModule::NumberOfCodecs();
3450 for (int idx = 0; idx < num_codecs; idx++) {
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003451 audio_coding_->Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003452 if (!STR_CASE_CMP(codec.plname, "RED")) {
3453 found_red = true;
3454 break;
3455 }
3456 }
3457
3458 if (!found_red) {
3459 _engineStatisticsPtr->SetLastError(
3460 VE_CODEC_ERROR, kTraceError,
3461 "SetRedPayloadType() RED is not supported");
3462 return -1;
3463 }
3464
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00003465 codec.pltype = red_payload_type;
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003466 if (audio_coding_->RegisterSendCodec(codec) < 0) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003467 _engineStatisticsPtr->SetLastError(
3468 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3469 "SetRedPayloadType() RED registration in ACM module failed");
3470 return -1;
3471 }
3472
3473 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
3474 _engineStatisticsPtr->SetLastError(
3475 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3476 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
3477 return -1;
3478 }
3479 return 0;
3480}
3481
kwiberg55b97fe2016-01-28 05:22:45 -08003482int Channel::SetSendRtpHeaderExtension(bool enable,
3483 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003484 unsigned char id) {
3485 int error = 0;
3486 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3487 if (enable) {
3488 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3489 }
3490 return error;
3491}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003492
wu@webrtc.org94454b72014-06-05 20:34:08 +00003493int32_t Channel::GetPlayoutFrequency() {
3494 int32_t playout_frequency = audio_coding_->PlayoutFrequency();
3495 CodecInst current_recive_codec;
3496 if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
3497 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
3498 // Even though the actual sampling rate for G.722 audio is
3499 // 16,000 Hz, the RTP clock rate for the G722 payload format is
3500 // 8,000 Hz because that value was erroneously assigned in
3501 // RFC 1890 and must remain unchanged for backward compatibility.
3502 playout_frequency = 8000;
3503 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
3504 // We are resampling Opus internally to 32,000 Hz until all our
3505 // DSP routines can operate at 48,000 Hz, but the RTP clock
3506 // rate for the Opus payload format is standardized to 48,000 Hz,
3507 // because that is the maximum supported decoding sampling rate.
3508 playout_frequency = 48000;
3509 }
3510 }
3511 return playout_frequency;
3512}
3513
Minyue2013aec2015-05-13 14:14:42 +02003514int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003515 RtcpMode method = _rtpRtcpModule->RTCP();
3516 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003517 return 0;
3518 }
3519 std::vector<RTCPReportBlock> report_blocks;
3520 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003521
3522 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003523 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003524 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003525 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003526 Channel* channel = associate_send_channel_.channel();
3527 // Tries to get RTT from an associated channel. This is important for
3528 // receive-only channels.
3529 if (channel) {
3530 // To prevent infinite recursion and deadlock, calling GetRTT of
3531 // associate channel should always use "false" for argument:
3532 // |allow_associate_channel|.
3533 rtt = channel->GetRTT(false);
3534 }
3535 }
3536 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003537 }
3538
3539 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3540 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3541 for (; it != report_blocks.end(); ++it) {
3542 if (it->remoteSSRC == remoteSSRC)
3543 break;
3544 }
3545 if (it == report_blocks.end()) {
3546 // We have not received packets with SSRC matching the report blocks.
3547 // To calculate RTT we try with the SSRC of the first report block.
3548 // This is very important for send-only channels where we don't know
3549 // the SSRC of the other end.
3550 remoteSSRC = report_blocks[0].remoteSSRC;
3551 }
Minyue2013aec2015-05-13 14:14:42 +02003552
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003553 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003554 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003555 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003556 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3557 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003558 return 0;
3559 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003560 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003561}
3562
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003563} // namespace voe
3564} // namespace webrtc