blob: ccf2455d2424559b8f9ec75fede1c02403137acc [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
henrika@webrtc.org2919e952012-01-31 08:45:03 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000011#include "webrtc/voice_engine/channel.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
Henrik Lundin64dad832015-05-11 12:44:23 +020013#include <algorithm>
Tommif888bb52015-12-12 01:37:01 +010014#include <utility>
Henrik Lundin64dad832015-05-11 12:44:23 +020015
Ivo Creusenae856f22015-09-17 16:30:16 +020016#include "webrtc/base/checks.h"
tommi31fc21f2016-01-21 10:37:37 -080017#include "webrtc/base/criticalsection.h"
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +000018#include "webrtc/base/format_macros.h"
pbosad856222015-11-27 09:48:36 -080019#include "webrtc/base/logging.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010020#include "webrtc/base/thread_checker.h"
wu@webrtc.org94454b72014-06-05 20:34:08 +000021#include "webrtc/base/timeutils.h"
minyue@webrtc.orge509f942013-09-12 17:03:00 +000022#include "webrtc/common.h"
Henrik Lundin64dad832015-05-11 12:44:23 +020023#include "webrtc/config.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000024#include "webrtc/modules/audio_device/include/audio_device.h"
25#include "webrtc/modules/audio_processing/include/audio_processing.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010026#include "webrtc/modules/include/module_common_types.h"
Stefan Holmerb86d4e42015-12-07 10:26:18 +010027#include "webrtc/modules/pacing/packet_router.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010028#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
29#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
30#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
wu@webrtc.org822fbd82013-08-15 23:38:54 +000031#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
Henrik Kjellanderff761fb2015-11-04 08:31:52 +010032#include "webrtc/modules/utility/include/audio_frame_operations.h"
33#include "webrtc/modules/utility/include/process_thread.h"
Henrik Kjellander98f53512015-10-28 18:17:40 +010034#include "webrtc/system_wrappers/include/trace.h"
turaj@webrtc.org6388c3e2013-02-12 21:42:18 +000035#include "webrtc/voice_engine/include/voe_base.h"
36#include "webrtc/voice_engine/include/voe_external_media.h"
37#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
38#include "webrtc/voice_engine/output_mixer.h"
39#include "webrtc/voice_engine/statistics.h"
40#include "webrtc/voice_engine/transmit_mixer.h"
41#include "webrtc/voice_engine/utility.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000042
andrew@webrtc.org50419b02012-11-14 19:07:54 +000043namespace webrtc {
44namespace voe {
niklase@google.com470e71d2011-07-07 08:21:25 +000045
solenberg8842c3e2016-03-11 03:06:41 -080046const int kTelephoneEventAttenuationdB = 10;
47
Stefan Holmerb86d4e42015-12-07 10:26:18 +010048class TransportFeedbackProxy : public TransportFeedbackObserver {
49 public:
50 TransportFeedbackProxy() : feedback_observer_(nullptr) {
51 pacer_thread_.DetachFromThread();
52 network_thread_.DetachFromThread();
53 }
54
55 void SetTransportFeedbackObserver(
56 TransportFeedbackObserver* feedback_observer) {
57 RTC_DCHECK(thread_checker_.CalledOnValidThread());
58 rtc::CritScope lock(&crit_);
59 feedback_observer_ = feedback_observer;
60 }
61
62 // Implements TransportFeedbackObserver.
63 void AddPacket(uint16_t sequence_number,
64 size_t length,
65 bool was_paced) override {
66 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
67 rtc::CritScope lock(&crit_);
68 if (feedback_observer_)
69 feedback_observer_->AddPacket(sequence_number, length, was_paced);
70 }
71 void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
72 RTC_DCHECK(network_thread_.CalledOnValidThread());
73 rtc::CritScope lock(&crit_);
74 if (feedback_observer_)
75 feedback_observer_->OnTransportFeedback(feedback);
76 }
77
78 private:
79 rtc::CriticalSection crit_;
80 rtc::ThreadChecker thread_checker_;
81 rtc::ThreadChecker pacer_thread_;
82 rtc::ThreadChecker network_thread_;
83 TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
84};
85
86class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
87 public:
88 TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
89 pacer_thread_.DetachFromThread();
90 }
91
92 void SetSequenceNumberAllocator(
93 TransportSequenceNumberAllocator* seq_num_allocator) {
94 RTC_DCHECK(thread_checker_.CalledOnValidThread());
95 rtc::CritScope lock(&crit_);
96 seq_num_allocator_ = seq_num_allocator;
97 }
98
99 // Implements TransportSequenceNumberAllocator.
100 uint16_t AllocateSequenceNumber() override {
101 RTC_DCHECK(pacer_thread_.CalledOnValidThread());
102 rtc::CritScope lock(&crit_);
103 if (!seq_num_allocator_)
104 return 0;
105 return seq_num_allocator_->AllocateSequenceNumber();
106 }
107
108 private:
109 rtc::CriticalSection crit_;
110 rtc::ThreadChecker thread_checker_;
111 rtc::ThreadChecker pacer_thread_;
112 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
113};
114
115class RtpPacketSenderProxy : public RtpPacketSender {
116 public:
kwiberg55b97fe2016-01-28 05:22:45 -0800117 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100118
119 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
120 RTC_DCHECK(thread_checker_.CalledOnValidThread());
121 rtc::CritScope lock(&crit_);
122 rtp_packet_sender_ = rtp_packet_sender;
123 }
124
125 // Implements RtpPacketSender.
126 void InsertPacket(Priority priority,
127 uint32_t ssrc,
128 uint16_t sequence_number,
129 int64_t capture_time_ms,
130 size_t bytes,
131 bool retransmission) override {
132 rtc::CritScope lock(&crit_);
133 if (rtp_packet_sender_) {
134 rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
135 capture_time_ms, bytes, retransmission);
136 }
137 }
138
139 private:
140 rtc::ThreadChecker thread_checker_;
141 rtc::CriticalSection crit_;
142 RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
143};
144
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000145// Extend the default RTCP statistics struct with max_jitter, defined as the
146// maximum jitter value seen in an RTCP report block.
147struct ChannelStatistics : public RtcpStatistics {
148 ChannelStatistics() : rtcp(), max_jitter(0) {}
149
150 RtcpStatistics rtcp;
151 uint32_t max_jitter;
152};
153
154// Statistics callback, called at each generation of a new RTCP report block.
155class StatisticsProxy : public RtcpStatisticsCallback {
156 public:
tommi31fc21f2016-01-21 10:37:37 -0800157 StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000158 virtual ~StatisticsProxy() {}
159
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000160 void StatisticsUpdated(const RtcpStatistics& statistics,
161 uint32_t ssrc) override {
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000162 if (ssrc != ssrc_)
163 return;
164
tommi31fc21f2016-01-21 10:37:37 -0800165 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000166 stats_.rtcp = statistics;
167 if (statistics.jitter > stats_.max_jitter) {
168 stats_.max_jitter = statistics.jitter;
169 }
170 }
171
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000172 void CNameChanged(const char* cname, uint32_t ssrc) override {}
pbos@webrtc.orgce4e9a32014-12-18 13:50:16 +0000173
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000174 ChannelStatistics GetStats() {
tommi31fc21f2016-01-21 10:37:37 -0800175 rtc::CritScope cs(&stats_lock_);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000176 return stats_;
177 }
178
179 private:
180 // StatisticsUpdated calls are triggered from threads in the RTP module,
181 // while GetStats calls can be triggered from the public voice engine API,
182 // hence synchronization is needed.
tommi31fc21f2016-01-21 10:37:37 -0800183 rtc::CriticalSection stats_lock_;
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000184 const uint32_t ssrc_;
185 ChannelStatistics stats_;
186};
187
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000188class VoERtcpObserver : public RtcpBandwidthObserver {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000189 public:
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000190 explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
191 virtual ~VoERtcpObserver() {}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000192
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000193 void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
194 // Not used for Voice Engine.
195 }
196
kjellander@webrtc.org14665ff2015-03-04 12:58:35 +0000197 void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
198 int64_t rtt,
199 int64_t now_ms) override {
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000200 // TODO(mflodman): Do we need to aggregate reports here or can we jut send
201 // what we get? I.e. do we ever get multiple reports bundled into one RTCP
202 // report for VoiceEngine?
203 if (report_blocks.empty())
204 return;
205
206 int fraction_lost_aggregate = 0;
207 int total_number_of_packets = 0;
208
209 // If receiving multiple report blocks, calculate the weighted average based
210 // on the number of packets a report refers to.
211 for (ReportBlockList::const_iterator block_it = report_blocks.begin();
212 block_it != report_blocks.end(); ++block_it) {
213 // Find the previous extended high sequence number for this remote SSRC,
214 // to calculate the number of RTP packets this report refers to. Ignore if
215 // we haven't seen this SSRC before.
216 std::map<uint32_t, uint32_t>::iterator seq_num_it =
217 extended_max_sequence_number_.find(block_it->sourceSSRC);
218 int number_of_packets = 0;
219 if (seq_num_it != extended_max_sequence_number_.end()) {
220 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
221 }
222 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
223 total_number_of_packets += number_of_packets;
224
225 extended_max_sequence_number_[block_it->sourceSSRC] =
226 block_it->extendedHighSeqNum;
227 }
228 int weighted_fraction_lost = 0;
229 if (total_number_of_packets > 0) {
kwiberg55b97fe2016-01-28 05:22:45 -0800230 weighted_fraction_lost =
231 (fraction_lost_aggregate + total_number_of_packets / 2) /
232 total_number_of_packets;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000233 }
234 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000235 }
236
237 private:
238 Channel* owner_;
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +0000239 // Maps remote side ssrc to extended highest sequence number received.
240 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +0000241};
242
kwiberg55b97fe2016-01-28 05:22:45 -0800243int32_t Channel::SendData(FrameType frameType,
244 uint8_t payloadType,
245 uint32_t timeStamp,
246 const uint8_t* payloadData,
247 size_t payloadSize,
248 const RTPFragmentationHeader* fragmentation) {
249 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
250 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
251 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
252 frameType, payloadType, timeStamp, payloadSize, fragmentation);
niklase@google.com470e71d2011-07-07 08:21:25 +0000253
kwiberg55b97fe2016-01-28 05:22:45 -0800254 if (_includeAudioLevelIndication) {
255 // Store current audio level in the RTP/RTCP module.
256 // The level will be used in combination with voice-activity state
257 // (frameType) to add an RTP header extension
258 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
259 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000260
kwiberg55b97fe2016-01-28 05:22:45 -0800261 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
262 // packetization.
263 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
264 if (_rtpRtcpModule->SendOutgoingData(
265 (FrameType&)frameType, payloadType, timeStamp,
266 // Leaving the time when this frame was
267 // received from the capture device as
268 // undefined for voice for now.
269 -1, payloadData, payloadSize, fragmentation) == -1) {
270 _engineStatisticsPtr->SetLastError(
271 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
272 "Channel::SendData() failed to send data to RTP/RTCP module");
273 return -1;
274 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000275
kwiberg55b97fe2016-01-28 05:22:45 -0800276 _lastLocalTimeStamp = timeStamp;
277 _lastPayloadType = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000278
kwiberg55b97fe2016-01-28 05:22:45 -0800279 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000280}
281
kwiberg55b97fe2016-01-28 05:22:45 -0800282int32_t Channel::InFrameType(FrameType frame_type) {
283 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
284 "Channel::InFrameType(frame_type=%d)", frame_type);
niklase@google.com470e71d2011-07-07 08:21:25 +0000285
kwiberg55b97fe2016-01-28 05:22:45 -0800286 rtc::CritScope cs(&_callbackCritSect);
287 _sendFrameType = (frame_type == kAudioFrameSpeech);
288 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000289}
290
kwiberg55b97fe2016-01-28 05:22:45 -0800291int32_t Channel::OnRxVadDetected(int vadDecision) {
292 rtc::CritScope cs(&_callbackCritSect);
293 if (_rxVadObserverPtr) {
294 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
295 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000296
kwiberg55b97fe2016-01-28 05:22:45 -0800297 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000298}
299
stefan1d8a5062015-10-02 03:39:33 -0700300bool Channel::SendRtp(const uint8_t* data,
301 size_t len,
302 const PacketOptions& options) {
kwiberg55b97fe2016-01-28 05:22:45 -0800303 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
304 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000305
kwiberg55b97fe2016-01-28 05:22:45 -0800306 rtc::CritScope cs(&_callbackCritSect);
wu@webrtc.orgfb648da2013-10-18 21:10:51 +0000307
kwiberg55b97fe2016-01-28 05:22:45 -0800308 if (_transportPtr == NULL) {
309 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
310 "Channel::SendPacket() failed to send RTP packet due to"
311 " invalid transport object");
312 return false;
313 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000314
kwiberg55b97fe2016-01-28 05:22:45 -0800315 uint8_t* bufferToSendPtr = (uint8_t*)data;
316 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000317
kwiberg55b97fe2016-01-28 05:22:45 -0800318 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
319 std::string transport_name =
320 _externalTransport ? "external transport" : "WebRtc sockets";
321 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
322 "Channel::SendPacket() RTP transmission using %s failed",
323 transport_name.c_str());
324 return false;
325 }
326 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000327}
328
kwiberg55b97fe2016-01-28 05:22:45 -0800329bool Channel::SendRtcp(const uint8_t* data, size_t len) {
330 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
331 "Channel::SendRtcp(len=%" PRIuS ")", len);
niklase@google.com470e71d2011-07-07 08:21:25 +0000332
kwiberg55b97fe2016-01-28 05:22:45 -0800333 rtc::CritScope cs(&_callbackCritSect);
334 if (_transportPtr == NULL) {
335 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
336 "Channel::SendRtcp() failed to send RTCP packet"
337 " due to invalid transport object");
338 return false;
339 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000340
kwiberg55b97fe2016-01-28 05:22:45 -0800341 uint8_t* bufferToSendPtr = (uint8_t*)data;
342 size_t bufferLength = len;
niklase@google.com470e71d2011-07-07 08:21:25 +0000343
kwiberg55b97fe2016-01-28 05:22:45 -0800344 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
345 if (n < 0) {
346 std::string transport_name =
347 _externalTransport ? "external transport" : "WebRtc sockets";
348 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
349 "Channel::SendRtcp() transmission using %s failed",
350 transport_name.c_str());
351 return false;
352 }
353 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +0000354}
355
kwiberg55b97fe2016-01-28 05:22:45 -0800356void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
357 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
358 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000359
kwiberg55b97fe2016-01-28 05:22:45 -0800360 // Update ssrc so that NTP for AV sync can be updated.
361 _rtpRtcpModule->SetRemoteSSRC(ssrc);
niklase@google.com470e71d2011-07-07 08:21:25 +0000362}
363
Peter Boströmac547a62015-09-17 23:03:57 +0200364void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
365 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
366 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
367 added);
niklase@google.com470e71d2011-07-07 08:21:25 +0000368}
369
Peter Boströmac547a62015-09-17 23:03:57 +0200370int32_t Channel::OnInitializeDecoder(
pbos@webrtc.org92135212013-05-14 08:31:39 +0000371 int8_t payloadType,
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +0000372 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
pbos@webrtc.org92135212013-05-14 08:31:39 +0000373 int frequency,
Peter Kasting69558702016-01-12 16:26:35 -0800374 size_t channels,
Peter Boströmac547a62015-09-17 23:03:57 +0200375 uint32_t rate) {
kwiberg55b97fe2016-01-28 05:22:45 -0800376 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
377 "Channel::OnInitializeDecoder(payloadType=%d, "
378 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
379 payloadType, payloadName, frequency, channels, rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000380
kwiberg55b97fe2016-01-28 05:22:45 -0800381 CodecInst receiveCodec = {0};
382 CodecInst dummyCodec = {0};
niklase@google.com470e71d2011-07-07 08:21:25 +0000383
kwiberg55b97fe2016-01-28 05:22:45 -0800384 receiveCodec.pltype = payloadType;
385 receiveCodec.plfreq = frequency;
386 receiveCodec.channels = channels;
387 receiveCodec.rate = rate;
388 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
andrew@webrtc.orgae1a58b2013-01-22 04:44:30 +0000389
kwiberg55b97fe2016-01-28 05:22:45 -0800390 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
391 receiveCodec.pacsize = dummyCodec.pacsize;
niklase@google.com470e71d2011-07-07 08:21:25 +0000392
kwiberg55b97fe2016-01-28 05:22:45 -0800393 // Register the new codec to the ACM
394 if (audio_coding_->RegisterReceiveCodec(receiveCodec) == -1) {
395 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
396 "Channel::OnInitializeDecoder() invalid codec ("
397 "pt=%d, name=%s) received - 1",
398 payloadType, payloadName);
399 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
400 return -1;
401 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000402
kwiberg55b97fe2016-01-28 05:22:45 -0800403 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000404}
405
kwiberg55b97fe2016-01-28 05:22:45 -0800406int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
407 size_t payloadSize,
408 const WebRtcRTPHeader* rtpHeader) {
409 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
410 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
411 ","
412 " payloadType=%u, audioChannel=%" PRIuS ")",
413 payloadSize, rtpHeader->header.payloadType,
414 rtpHeader->type.Audio.channel);
niklase@google.com470e71d2011-07-07 08:21:25 +0000415
kwiberg55b97fe2016-01-28 05:22:45 -0800416 if (!channel_state_.Get().playing) {
417 // Avoid inserting into NetEQ when we are not playing. Count the
418 // packet as discarded.
419 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
420 "received packet is discarded since playing is not"
421 " activated");
422 _numberOfDiscardedPackets++;
niklase@google.com470e71d2011-07-07 08:21:25 +0000423 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -0800424 }
425
426 // Push the incoming payload (parsed and ready for decoding) into the ACM
427 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
428 0) {
429 _engineStatisticsPtr->SetLastError(
430 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
431 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
432 return -1;
433 }
434
435 // Update the packet delay.
436 UpdatePacketDelay(rtpHeader->header.timestamp,
437 rtpHeader->header.sequenceNumber);
438
439 int64_t round_trip_time = 0;
440 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
441 NULL);
442
443 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
444 if (!nack_list.empty()) {
445 // Can't use nack_list.data() since it's not supported by all
446 // compilers.
447 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
448 }
449 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000450}
451
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000452bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +0000453 size_t rtp_packet_length) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +0000454 RTPHeader header;
455 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
456 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
457 "IncomingPacket invalid RTP header");
458 return false;
459 }
460 header.payload_type_frequency =
461 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
462 if (header.payload_type_frequency < 0)
463 return false;
464 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
465}
466
kwiberg55b97fe2016-01-28 05:22:45 -0800467int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
468 if (event_log_) {
469 unsigned int ssrc;
470 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
471 event_log_->LogAudioPlayout(ssrc);
472 }
473 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
474 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame) ==
475 -1) {
476 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
477 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
478 // In all likelihood, the audio in this frame is garbage. We return an
479 // error so that the audio mixer module doesn't add it to the mix. As
480 // a result, it won't be played out and the actions skipped here are
481 // irrelevant.
482 return -1;
483 }
484
485 if (_RxVadDetection) {
486 UpdateRxVadDetection(*audioFrame);
487 }
488
489 // Convert module ID to internal VoE channel ID
490 audioFrame->id_ = VoEChannelId(audioFrame->id_);
491 // Store speech type for dead-or-alive detection
492 _outputSpeechType = audioFrame->speech_type_;
493
494 ChannelState::State state = channel_state_.Get();
495
496 if (state.rx_apm_is_enabled) {
497 int err = rx_audioproc_->ProcessStream(audioFrame);
498 if (err) {
499 LOG(LS_ERROR) << "ProcessStream() error: " << err;
500 assert(false);
Ivo Creusenae856f22015-09-17 16:30:16 +0200501 }
kwiberg55b97fe2016-01-28 05:22:45 -0800502 }
503
504 {
505 // Pass the audio buffers to an optional sink callback, before applying
506 // scaling/panning, as that applies to the mix operation.
507 // External recipients of the audio (e.g. via AudioTrack), will do their
508 // own mixing/dynamic processing.
509 rtc::CritScope cs(&_callbackCritSect);
510 if (audio_sink_) {
511 AudioSinkInterface::Data data(
512 &audioFrame->data_[0], audioFrame->samples_per_channel_,
513 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
514 audioFrame->timestamp_);
515 audio_sink_->OnData(data);
516 }
517 }
518
519 float output_gain = 1.0f;
520 float left_pan = 1.0f;
521 float right_pan = 1.0f;
522 {
523 rtc::CritScope cs(&volume_settings_critsect_);
524 output_gain = _outputGain;
525 left_pan = _panLeft;
526 right_pan = _panRight;
527 }
528
529 // Output volume scaling
530 if (output_gain < 0.99f || output_gain > 1.01f) {
531 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
532 }
533
534 // Scale left and/or right channel(s) if stereo and master balance is
535 // active
536
537 if (left_pan != 1.0f || right_pan != 1.0f) {
538 if (audioFrame->num_channels_ == 1) {
539 // Emulate stereo mode since panning is active.
540 // The mono signal is copied to both left and right channels here.
541 AudioFrameOperations::MonoToStereo(audioFrame);
542 }
543 // For true stereo mode (when we are receiving a stereo signal), no
544 // action is needed.
545
546 // Do the panning operation (the audio frame contains stereo at this
547 // stage)
548 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
549 }
550
551 // Mix decoded PCM output with file if file mixing is enabled
552 if (state.output_file_playing) {
553 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
554 }
555
556 // External media
557 if (_outputExternalMedia) {
558 rtc::CritScope cs(&_callbackCritSect);
559 const bool isStereo = (audioFrame->num_channels_ == 2);
560 if (_outputExternalMediaCallbackPtr) {
561 _outputExternalMediaCallbackPtr->Process(
562 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
563 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
564 isStereo);
565 }
566 }
567
568 // Record playout if enabled
569 {
570 rtc::CritScope cs(&_fileCritSect);
571
572 if (_outputFileRecording && _outputFileRecorderPtr) {
573 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
574 }
575 }
576
577 // Measure audio level (0-9)
578 _outputAudioLevel.ComputeLevel(*audioFrame);
579
580 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
581 // The first frame with a valid rtp timestamp.
582 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
583 }
584
585 if (capture_start_rtp_time_stamp_ >= 0) {
586 // audioFrame.timestamp_ should be valid from now on.
587
588 // Compute elapsed time.
589 int64_t unwrap_timestamp =
590 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
591 audioFrame->elapsed_time_ms_ =
592 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
593 (GetPlayoutFrequency() / 1000);
594
niklase@google.com470e71d2011-07-07 08:21:25 +0000595 {
kwiberg55b97fe2016-01-28 05:22:45 -0800596 rtc::CritScope lock(&ts_stats_lock_);
597 // Compute ntp time.
598 audioFrame->ntp_time_ms_ =
599 ntp_estimator_.Estimate(audioFrame->timestamp_);
600 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
601 if (audioFrame->ntp_time_ms_ > 0) {
602 // Compute |capture_start_ntp_time_ms_| so that
603 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
604 capture_start_ntp_time_ms_ =
605 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
andrew@webrtc.org60730cf2014-01-07 17:45:09 +0000606 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000607 }
kwiberg55b97fe2016-01-28 05:22:45 -0800608 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000609
kwiberg55b97fe2016-01-28 05:22:45 -0800610 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000611}
612
kwiberg55b97fe2016-01-28 05:22:45 -0800613int32_t Channel::NeededFrequency(int32_t id) const {
614 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
615 "Channel::NeededFrequency(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000616
kwiberg55b97fe2016-01-28 05:22:45 -0800617 int highestNeeded = 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000618
kwiberg55b97fe2016-01-28 05:22:45 -0800619 // Determine highest needed receive frequency
620 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +0000621
kwiberg55b97fe2016-01-28 05:22:45 -0800622 // Return the bigger of playout and receive frequency in the ACM.
623 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
624 highestNeeded = audio_coding_->PlayoutFrequency();
625 } else {
626 highestNeeded = receiveFrequency;
627 }
628
629 // Special case, if we're playing a file on the playout side
630 // we take that frequency into consideration as well
631 // This is not needed on sending side, since the codec will
632 // limit the spectrum anyway.
633 if (channel_state_.Get().output_file_playing) {
634 rtc::CritScope cs(&_fileCritSect);
635 if (_outputFilePlayerPtr) {
636 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
637 highestNeeded = _outputFilePlayerPtr->Frequency();
638 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000639 }
kwiberg55b97fe2016-01-28 05:22:45 -0800640 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000641
kwiberg55b97fe2016-01-28 05:22:45 -0800642 return (highestNeeded);
niklase@google.com470e71d2011-07-07 08:21:25 +0000643}
644
ivocb04965c2015-09-09 00:09:43 -0700645int32_t Channel::CreateChannel(Channel*& channel,
646 int32_t channelId,
647 uint32_t instanceId,
648 RtcEventLog* const event_log,
649 const Config& config) {
kwiberg55b97fe2016-01-28 05:22:45 -0800650 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
651 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
652 instanceId);
niklase@google.com470e71d2011-07-07 08:21:25 +0000653
kwiberg55b97fe2016-01-28 05:22:45 -0800654 channel = new Channel(channelId, instanceId, event_log, config);
655 if (channel == NULL) {
656 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
657 "Channel::CreateChannel() unable to allocate memory for"
658 " channel");
659 return -1;
660 }
661 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000662}
663
kwiberg55b97fe2016-01-28 05:22:45 -0800664void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
665 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
666 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
667 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000668
kwiberg55b97fe2016-01-28 05:22:45 -0800669 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000670}
671
kwiberg55b97fe2016-01-28 05:22:45 -0800672void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
673 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
674 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
675 durationMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000676
kwiberg55b97fe2016-01-28 05:22:45 -0800677 // Not implement yet
niklase@google.com470e71d2011-07-07 08:21:25 +0000678}
679
kwiberg55b97fe2016-01-28 05:22:45 -0800680void Channel::PlayFileEnded(int32_t id) {
681 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
682 "Channel::PlayFileEnded(id=%d)", id);
niklase@google.com470e71d2011-07-07 08:21:25 +0000683
kwiberg55b97fe2016-01-28 05:22:45 -0800684 if (id == _inputFilePlayerId) {
685 channel_state_.SetInputFilePlaying(false);
686 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
687 "Channel::PlayFileEnded() => input file player module is"
niklase@google.com470e71d2011-07-07 08:21:25 +0000688 " shutdown");
kwiberg55b97fe2016-01-28 05:22:45 -0800689 } else if (id == _outputFilePlayerId) {
690 channel_state_.SetOutputFilePlaying(false);
691 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
692 "Channel::PlayFileEnded() => output file player module is"
693 " shutdown");
694 }
695}
696
697void Channel::RecordFileEnded(int32_t id) {
698 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
699 "Channel::RecordFileEnded(id=%d)", id);
700
701 assert(id == _outputFileRecorderId);
702
703 rtc::CritScope cs(&_fileCritSect);
704
705 _outputFileRecording = false;
706 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
707 "Channel::RecordFileEnded() => output file recorder module is"
708 " shutdown");
niklase@google.com470e71d2011-07-07 08:21:25 +0000709}
710
pbos@webrtc.org92135212013-05-14 08:31:39 +0000711Channel::Channel(int32_t channelId,
minyue@webrtc.orge509f942013-09-12 17:03:00 +0000712 uint32_t instanceId,
ivocb04965c2015-09-09 00:09:43 -0700713 RtcEventLog* const event_log,
714 const Config& config)
tommi31fc21f2016-01-21 10:37:37 -0800715 : _instanceId(instanceId),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100716 _channelId(channelId),
717 event_log_(event_log),
718 rtp_header_parser_(RtpHeaderParser::Create()),
719 rtp_payload_registry_(
720 new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
721 rtp_receive_statistics_(
722 ReceiveStatistics::Create(Clock::GetRealTimeClock())),
723 rtp_receiver_(
724 RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100725 this,
726 this,
727 rtp_payload_registry_.get())),
728 telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
729 _outputAudioLevel(),
730 _externalTransport(false),
731 _inputFilePlayerPtr(NULL),
732 _outputFilePlayerPtr(NULL),
733 _outputFileRecorderPtr(NULL),
734 // Avoid conflict with other channels by adding 1024 - 1026,
735 // won't use as much as 1024 channels.
736 _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
737 _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
738 _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
739 _outputFileRecording(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100740 _outputExternalMedia(false),
741 _inputExternalMediaCallbackPtr(NULL),
742 _outputExternalMediaCallbackPtr(NULL),
743 _timeStamp(0), // This is just an offset, RTP module will add it's own
744 // random offset
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100745 ntp_estimator_(Clock::GetRealTimeClock()),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100746 playout_timestamp_rtp_(0),
747 playout_timestamp_rtcp_(0),
748 playout_delay_ms_(0),
749 _numberOfDiscardedPackets(0),
750 send_sequence_number_(0),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100751 rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
752 capture_start_rtp_time_stamp_(-1),
753 capture_start_ntp_time_ms_(-1),
754 _engineStatisticsPtr(NULL),
755 _outputMixerPtr(NULL),
756 _transmitMixerPtr(NULL),
757 _moduleProcessThreadPtr(NULL),
758 _audioDeviceModulePtr(NULL),
759 _voiceEngineObserverPtr(NULL),
760 _callbackCritSectPtr(NULL),
761 _transportPtr(NULL),
762 _rxVadObserverPtr(NULL),
763 _oldVadDecision(-1),
764 _sendFrameType(0),
765 _externalMixing(false),
766 _mixFileWithMicrophone(false),
solenberg1c2af8e2016-03-24 10:36:00 -0700767 input_mute_(false),
768 previous_frame_muted_(false),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100769 _panLeft(1.0f),
770 _panRight(1.0f),
771 _outputGain(1.0f),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100772 _lastLocalTimeStamp(0),
773 _lastPayloadType(0),
774 _includeAudioLevelIndication(false),
775 _outputSpeechType(AudioFrame::kNormalSpeech),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100776 _average_jitter_buffer_delay_us(0),
777 _previousTimestamp(0),
778 _recPacketDelayMs(20),
779 _RxVadDetection(false),
780 _rxAgcIsEnabled(false),
781 _rxNsIsEnabled(false),
782 restored_packet_in_use_(false),
783 rtcp_observer_(new VoERtcpObserver(this)),
784 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
Stefan Holmerb86d4e42015-12-07 10:26:18 +0100785 associate_send_channel_(ChannelOwner(nullptr)),
786 pacing_enabled_(config.Get<VoicePacing>().enabled),
stefanbba9dec2016-02-01 04:39:55 -0800787 feedback_observer_proxy_(new TransportFeedbackProxy()),
788 seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
789 rtp_packet_sender_proxy_(new RtpPacketSenderProxy()) {
kwiberg55b97fe2016-01-28 05:22:45 -0800790 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
791 "Channel::Channel() - ctor");
792 AudioCodingModule::Config acm_config;
793 acm_config.id = VoEModuleId(instanceId, channelId);
794 if (config.Get<NetEqCapacityConfig>().enabled) {
795 // Clamping the buffer capacity at 20 packets. While going lower will
796 // probably work, it makes little sense.
797 acm_config.neteq_config.max_packets_in_buffer =
798 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
799 }
800 acm_config.neteq_config.enable_fast_accelerate =
801 config.Get<NetEqFastAccelerate>().enabled;
802 audio_coding_.reset(AudioCodingModule::Create(acm_config));
Henrik Lundin64dad832015-05-11 12:44:23 +0200803
kwiberg55b97fe2016-01-28 05:22:45 -0800804 _outputAudioLevel.Clear();
niklase@google.com470e71d2011-07-07 08:21:25 +0000805
kwiberg55b97fe2016-01-28 05:22:45 -0800806 RtpRtcp::Configuration configuration;
807 configuration.audio = true;
808 configuration.outgoing_transport = this;
kwiberg55b97fe2016-01-28 05:22:45 -0800809 configuration.receive_statistics = rtp_receive_statistics_.get();
810 configuration.bandwidth_callback = rtcp_observer_.get();
stefanbba9dec2016-02-01 04:39:55 -0800811 if (pacing_enabled_) {
812 configuration.paced_sender = rtp_packet_sender_proxy_.get();
813 configuration.transport_sequence_number_allocator =
814 seq_num_allocator_proxy_.get();
815 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
816 }
kwiberg55b97fe2016-01-28 05:22:45 -0800817 configuration.event_log = event_log;
pwestin@webrtc.org2853dde2012-05-11 11:08:54 +0000818
kwiberg55b97fe2016-01-28 05:22:45 -0800819 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
Peter Boström3dd5d1d2016-02-25 16:56:48 +0100820 _rtpRtcpModule->SetSendingMediaStatus(false);
sprang@webrtc.org54ae4ff2013-12-19 13:26:02 +0000821
kwiberg55b97fe2016-01-28 05:22:45 -0800822 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
823 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
824 statistics_proxy_.get());
aluebs@webrtc.orgf927fd62014-04-16 11:58:18 +0000825
kwiberg55b97fe2016-01-28 05:22:45 -0800826 Config audioproc_config;
827 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
828 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
niklase@google.com470e71d2011-07-07 08:21:25 +0000829}
830
kwiberg55b97fe2016-01-28 05:22:45 -0800831Channel::~Channel() {
832 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
833 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
834 "Channel::~Channel() - dtor");
niklase@google.com470e71d2011-07-07 08:21:25 +0000835
kwiberg55b97fe2016-01-28 05:22:45 -0800836 if (_outputExternalMedia) {
837 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
838 }
839 if (channel_state_.Get().input_external_media) {
840 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
841 }
842 StopSend();
843 StopPlayout();
niklase@google.com470e71d2011-07-07 08:21:25 +0000844
kwiberg55b97fe2016-01-28 05:22:45 -0800845 {
846 rtc::CritScope cs(&_fileCritSect);
847 if (_inputFilePlayerPtr) {
848 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
849 _inputFilePlayerPtr->StopPlayingFile();
850 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
851 _inputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +0000852 }
kwiberg55b97fe2016-01-28 05:22:45 -0800853 if (_outputFilePlayerPtr) {
854 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
855 _outputFilePlayerPtr->StopPlayingFile();
856 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
857 _outputFilePlayerPtr = NULL;
858 }
859 if (_outputFileRecorderPtr) {
860 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
861 _outputFileRecorderPtr->StopRecording();
862 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
863 _outputFileRecorderPtr = NULL;
864 }
865 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000866
kwiberg55b97fe2016-01-28 05:22:45 -0800867 // The order to safely shutdown modules in a channel is:
868 // 1. De-register callbacks in modules
869 // 2. De-register modules in process thread
870 // 3. Destroy modules
871 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
872 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
873 "~Channel() failed to de-register transport callback"
874 " (Audio coding module)");
875 }
876 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
877 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
878 "~Channel() failed to de-register VAD callback"
879 " (Audio coding module)");
880 }
881 // De-register modules in process thread
882 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
tommi@webrtc.org3985f012015-02-27 13:36:34 +0000883
kwiberg55b97fe2016-01-28 05:22:45 -0800884 // End of modules shutdown
niklase@google.com470e71d2011-07-07 08:21:25 +0000885}
886
kwiberg55b97fe2016-01-28 05:22:45 -0800887int32_t Channel::Init() {
888 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
889 "Channel::Init()");
niklase@google.com470e71d2011-07-07 08:21:25 +0000890
kwiberg55b97fe2016-01-28 05:22:45 -0800891 channel_state_.Reset();
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +0000892
kwiberg55b97fe2016-01-28 05:22:45 -0800893 // --- Initial sanity
niklase@google.com470e71d2011-07-07 08:21:25 +0000894
kwiberg55b97fe2016-01-28 05:22:45 -0800895 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
896 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
897 "Channel::Init() must call SetEngineInformation() first");
898 return -1;
899 }
900
901 // --- Add modules to process thread (for periodic schedulation)
902
903 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
904
905 // --- ACM initialization
906
907 if (audio_coding_->InitializeReceiver() == -1) {
908 _engineStatisticsPtr->SetLastError(
909 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
910 "Channel::Init() unable to initialize the ACM - 1");
911 return -1;
912 }
913
914 // --- RTP/RTCP module initialization
915
916 // Ensure that RTCP is enabled by default for the created channel.
917 // Note that, the module will keep generating RTCP until it is explicitly
918 // disabled by the user.
919 // After StopListen (when no sockets exists), RTCP packets will no longer
920 // be transmitted since the Transport object will then be invalid.
921 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
922 // RTCP is enabled by default.
923 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
924 // --- Register all permanent callbacks
925 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
926 (audio_coding_->RegisterVADCallback(this) == -1);
927
928 if (fail) {
929 _engineStatisticsPtr->SetLastError(
930 VE_CANNOT_INIT_CHANNEL, kTraceError,
931 "Channel::Init() callbacks not registered");
932 return -1;
933 }
934
935 // --- Register all supported codecs to the receiving side of the
936 // RTP/RTCP module
937
938 CodecInst codec;
939 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
940
941 for (int idx = 0; idx < nSupportedCodecs; idx++) {
942 // Open up the RTP/RTCP receiver for all supported codecs
943 if ((audio_coding_->Codec(idx, &codec) == -1) ||
944 (rtp_receiver_->RegisterReceivePayload(
945 codec.plname, codec.pltype, codec.plfreq, codec.channels,
946 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
947 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
948 "Channel::Init() unable to register %s "
949 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
950 codec.plname, codec.pltype, codec.plfreq, codec.channels,
951 codec.rate);
952 } else {
953 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
954 "Channel::Init() %s (%d/%d/%" PRIuS
955 "/%d) has been "
956 "added to the RTP/RTCP receiver",
957 codec.plname, codec.pltype, codec.plfreq, codec.channels,
958 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000959 }
960
kwiberg55b97fe2016-01-28 05:22:45 -0800961 // Ensure that PCMU is used as default codec on the sending side
962 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
963 SetSendCodec(codec);
niklase@google.com470e71d2011-07-07 08:21:25 +0000964 }
965
kwiberg55b97fe2016-01-28 05:22:45 -0800966 // Register default PT for outband 'telephone-event'
967 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
968 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
969 (audio_coding_->RegisterReceiveCodec(codec) == -1)) {
970 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
971 "Channel::Init() failed to register outband "
972 "'telephone-event' (%d/%d) correctly",
973 codec.pltype, codec.plfreq);
974 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000975 }
976
kwiberg55b97fe2016-01-28 05:22:45 -0800977 if (!STR_CASE_CMP(codec.plname, "CN")) {
978 if ((audio_coding_->RegisterSendCodec(codec) == -1) ||
979 (audio_coding_->RegisterReceiveCodec(codec) == -1) ||
980 (_rtpRtcpModule->RegisterSendPayload(codec) == -1)) {
981 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
982 "Channel::Init() failed to register CN (%d/%d) "
983 "correctly - 1",
984 codec.pltype, codec.plfreq);
985 }
986 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000987#ifdef WEBRTC_CODEC_RED
kwiberg55b97fe2016-01-28 05:22:45 -0800988 // Register RED to the receiving side of the ACM.
989 // We will not receive an OnInitializeDecoder() callback for RED.
990 if (!STR_CASE_CMP(codec.plname, "RED")) {
991 if (audio_coding_->RegisterReceiveCodec(codec) == -1) {
992 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
993 "Channel::Init() failed to register RED (%d/%d) "
994 "correctly",
995 codec.pltype, codec.plfreq);
996 }
997 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000998#endif
kwiberg55b97fe2016-01-28 05:22:45 -0800999 }
pwestin@webrtc.org684f0572013-03-13 23:20:57 +00001000
kwiberg55b97fe2016-01-28 05:22:45 -08001001 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1002 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1003 return -1;
1004 }
1005 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1006 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1007 return -1;
1008 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001009
kwiberg55b97fe2016-01-28 05:22:45 -08001010 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001011}
1012
kwiberg55b97fe2016-01-28 05:22:45 -08001013int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1014 OutputMixer& outputMixer,
1015 voe::TransmitMixer& transmitMixer,
1016 ProcessThread& moduleProcessThread,
1017 AudioDeviceModule& audioDeviceModule,
1018 VoiceEngineObserver* voiceEngineObserver,
1019 rtc::CriticalSection* callbackCritSect) {
1020 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1021 "Channel::SetEngineInformation()");
1022 _engineStatisticsPtr = &engineStatistics;
1023 _outputMixerPtr = &outputMixer;
1024 _transmitMixerPtr = &transmitMixer,
1025 _moduleProcessThreadPtr = &moduleProcessThread;
1026 _audioDeviceModulePtr = &audioDeviceModule;
1027 _voiceEngineObserverPtr = voiceEngineObserver;
1028 _callbackCritSectPtr = callbackCritSect;
1029 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001030}
1031
kwiberg55b97fe2016-01-28 05:22:45 -08001032int32_t Channel::UpdateLocalTimeStamp() {
1033 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1034 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001035}
1036
kwibergb7f89d62016-02-17 10:04:18 -08001037void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
tommi31fc21f2016-01-21 10:37:37 -08001038 rtc::CritScope cs(&_callbackCritSect);
deadbeef2d110be2016-01-13 12:00:26 -08001039 audio_sink_ = std::move(sink);
Tommif888bb52015-12-12 01:37:01 +01001040}
1041
kwiberg55b97fe2016-01-28 05:22:45 -08001042int32_t Channel::StartPlayout() {
1043 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1044 "Channel::StartPlayout()");
1045 if (channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001046 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001047 }
1048
1049 if (!_externalMixing) {
1050 // Add participant as candidates for mixing.
1051 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1052 _engineStatisticsPtr->SetLastError(
1053 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1054 "StartPlayout() failed to add participant to mixer");
1055 return -1;
1056 }
1057 }
1058
1059 channel_state_.SetPlaying(true);
1060 if (RegisterFilePlayingToMixer() != 0)
1061 return -1;
1062
1063 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001064}
1065
kwiberg55b97fe2016-01-28 05:22:45 -08001066int32_t Channel::StopPlayout() {
1067 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1068 "Channel::StopPlayout()");
1069 if (!channel_state_.Get().playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001070 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001071 }
1072
1073 if (!_externalMixing) {
1074 // Remove participant as candidates for mixing
1075 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1076 _engineStatisticsPtr->SetLastError(
1077 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1078 "StopPlayout() failed to remove participant from mixer");
1079 return -1;
1080 }
1081 }
1082
1083 channel_state_.SetPlaying(false);
1084 _outputAudioLevel.Clear();
1085
1086 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001087}
1088
kwiberg55b97fe2016-01-28 05:22:45 -08001089int32_t Channel::StartSend() {
1090 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1091 "Channel::StartSend()");
1092 // Resume the previous sequence number which was reset by StopSend().
1093 // This needs to be done before |sending| is set to true.
1094 if (send_sequence_number_)
1095 SetInitSequenceNumber(send_sequence_number_);
xians@webrtc.org09e8c472013-07-31 16:30:19 +00001096
kwiberg55b97fe2016-01-28 05:22:45 -08001097 if (channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001098 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001099 }
1100 channel_state_.SetSending(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00001101
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001102 _rtpRtcpModule->SetSendingMediaStatus(true);
kwiberg55b97fe2016-01-28 05:22:45 -08001103 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1104 _engineStatisticsPtr->SetLastError(
1105 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1106 "StartSend() RTP/RTCP failed to start sending");
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001107 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001108 rtc::CritScope cs(&_callbackCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001109 channel_state_.SetSending(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001110 return -1;
1111 }
xians@webrtc.orge07247a2011-11-28 16:31:28 +00001112
kwiberg55b97fe2016-01-28 05:22:45 -08001113 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001114}
1115
kwiberg55b97fe2016-01-28 05:22:45 -08001116int32_t Channel::StopSend() {
1117 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1118 "Channel::StopSend()");
1119 if (!channel_state_.Get().sending) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001120 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001121 }
1122 channel_state_.SetSending(false);
1123
1124 // Store the sequence number to be able to pick up the same sequence for
1125 // the next StartSend(). This is needed for restarting device, otherwise
1126 // it might cause libSRTP to complain about packets being replayed.
1127 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1128 // CL is landed. See issue
1129 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1130 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1131
1132 // Reset sending SSRC and sequence number and triggers direct transmission
1133 // of RTCP BYE
1134 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1135 _engineStatisticsPtr->SetLastError(
1136 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1137 "StartSend() RTP/RTCP failed to stop sending");
1138 }
Peter Boström3dd5d1d2016-02-25 16:56:48 +01001139 _rtpRtcpModule->SetSendingMediaStatus(false);
kwiberg55b97fe2016-01-28 05:22:45 -08001140
1141 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001142}
1143
kwiberg55b97fe2016-01-28 05:22:45 -08001144int32_t Channel::StartReceiving() {
1145 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1146 "Channel::StartReceiving()");
1147 if (channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001148 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001149 }
1150 channel_state_.SetReceiving(true);
1151 _numberOfDiscardedPackets = 0;
1152 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001153}
1154
kwiberg55b97fe2016-01-28 05:22:45 -08001155int32_t Channel::StopReceiving() {
1156 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1157 "Channel::StopReceiving()");
1158 if (!channel_state_.Get().receiving) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001159 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001160 }
1161
1162 channel_state_.SetReceiving(false);
1163 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001164}
1165
kwiberg55b97fe2016-01-28 05:22:45 -08001166int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1167 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1168 "Channel::RegisterVoiceEngineObserver()");
1169 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001170
kwiberg55b97fe2016-01-28 05:22:45 -08001171 if (_voiceEngineObserverPtr) {
1172 _engineStatisticsPtr->SetLastError(
1173 VE_INVALID_OPERATION, kTraceError,
1174 "RegisterVoiceEngineObserver() observer already enabled");
1175 return -1;
1176 }
1177 _voiceEngineObserverPtr = &observer;
1178 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001179}
1180
kwiberg55b97fe2016-01-28 05:22:45 -08001181int32_t Channel::DeRegisterVoiceEngineObserver() {
1182 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1183 "Channel::DeRegisterVoiceEngineObserver()");
1184 rtc::CritScope cs(&_callbackCritSect);
1185
1186 if (!_voiceEngineObserverPtr) {
1187 _engineStatisticsPtr->SetLastError(
1188 VE_INVALID_OPERATION, kTraceWarning,
1189 "DeRegisterVoiceEngineObserver() observer already disabled");
1190 return 0;
1191 }
1192 _voiceEngineObserverPtr = NULL;
1193 return 0;
1194}
1195
1196int32_t Channel::GetSendCodec(CodecInst& codec) {
kwiberg1fd4a4a2015-11-03 11:20:50 -08001197 auto send_codec = audio_coding_->SendCodec();
1198 if (send_codec) {
1199 codec = *send_codec;
1200 return 0;
1201 }
1202 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001203}
1204
kwiberg55b97fe2016-01-28 05:22:45 -08001205int32_t Channel::GetRecCodec(CodecInst& codec) {
1206 return (audio_coding_->ReceiveCodec(&codec));
niklase@google.com470e71d2011-07-07 08:21:25 +00001207}
1208
kwiberg55b97fe2016-01-28 05:22:45 -08001209int32_t Channel::SetSendCodec(const CodecInst& codec) {
1210 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1211 "Channel::SetSendCodec()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001212
kwiberg55b97fe2016-01-28 05:22:45 -08001213 if (audio_coding_->RegisterSendCodec(codec) != 0) {
1214 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1215 "SetSendCodec() failed to register codec to ACM");
1216 return -1;
1217 }
1218
1219 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1220 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1221 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1222 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1223 "SetSendCodec() failed to register codec to"
1224 " RTP/RTCP module");
1225 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001226 }
kwiberg55b97fe2016-01-28 05:22:45 -08001227 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001228
kwiberg55b97fe2016-01-28 05:22:45 -08001229 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1230 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1231 "SetSendCodec() failed to set audio packet size");
1232 return -1;
1233 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001234
kwiberg55b97fe2016-01-28 05:22:45 -08001235 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001236}
1237
Ivo Creusenadf89b72015-04-29 16:03:33 +02001238void Channel::SetBitRate(int bitrate_bps) {
1239 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1240 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1241 audio_coding_->SetBitRate(bitrate_bps);
1242}
1243
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001244void Channel::OnIncomingFractionLoss(int fraction_lost) {
minyue@webrtc.org74aaf292014-07-16 21:28:26 +00001245 network_predictor_->UpdatePacketLossRate(fraction_lost);
mflodman@webrtc.org0a7d4ee2015-02-17 12:57:14 +00001246 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1247
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001248 // Normalizes rate to 0 - 100.
kwiberg55b97fe2016-01-28 05:22:45 -08001249 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1250 0) {
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00001251 assert(false); // This should not happen.
1252 }
1253}
1254
kwiberg55b97fe2016-01-28 05:22:45 -08001255int32_t Channel::SetVADStatus(bool enableVAD,
1256 ACMVADMode mode,
1257 bool disableDTX) {
1258 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1259 "Channel::SetVADStatus(mode=%d)", mode);
1260 assert(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1261 // To disable VAD, DTX must be disabled too
1262 disableDTX = ((enableVAD == false) ? true : disableDTX);
1263 if (audio_coding_->SetVAD(!disableDTX, enableVAD, mode) != 0) {
1264 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1265 kTraceError,
1266 "SetVADStatus() failed to set VAD");
1267 return -1;
1268 }
1269 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001270}
1271
kwiberg55b97fe2016-01-28 05:22:45 -08001272int32_t Channel::GetVADStatus(bool& enabledVAD,
1273 ACMVADMode& mode,
1274 bool& disabledDTX) {
1275 if (audio_coding_->VAD(&disabledDTX, &enabledVAD, &mode) != 0) {
1276 _engineStatisticsPtr->SetLastError(
1277 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1278 "GetVADStatus() failed to get VAD status");
1279 return -1;
1280 }
1281 disabledDTX = !disabledDTX;
1282 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001283}
1284
kwiberg55b97fe2016-01-28 05:22:45 -08001285int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1286 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1287 "Channel::SetRecPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001288
kwiberg55b97fe2016-01-28 05:22:45 -08001289 if (channel_state_.Get().playing) {
1290 _engineStatisticsPtr->SetLastError(
1291 VE_ALREADY_PLAYING, kTraceError,
1292 "SetRecPayloadType() unable to set PT while playing");
1293 return -1;
1294 }
1295 if (channel_state_.Get().receiving) {
1296 _engineStatisticsPtr->SetLastError(
1297 VE_ALREADY_LISTENING, kTraceError,
1298 "SetRecPayloadType() unable to set PT while listening");
1299 return -1;
1300 }
1301
1302 if (codec.pltype == -1) {
1303 // De-register the selected codec (RTP/RTCP module and ACM)
1304
1305 int8_t pltype(-1);
1306 CodecInst rxCodec = codec;
1307
1308 // Get payload type for the given codec
1309 rtp_payload_registry_->ReceivePayloadType(
1310 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1311 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1312 rxCodec.pltype = pltype;
1313
1314 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1315 _engineStatisticsPtr->SetLastError(
1316 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1317 "SetRecPayloadType() RTP/RTCP-module deregistration "
1318 "failed");
1319 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001320 }
kwiberg55b97fe2016-01-28 05:22:45 -08001321 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1322 _engineStatisticsPtr->SetLastError(
1323 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1324 "SetRecPayloadType() ACM deregistration failed - 1");
1325 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001326 }
kwiberg55b97fe2016-01-28 05:22:45 -08001327 return 0;
1328 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001329
kwiberg55b97fe2016-01-28 05:22:45 -08001330 if (rtp_receiver_->RegisterReceivePayload(
1331 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1332 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1333 // First attempt to register failed => de-register and try again
1334 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001335 if (rtp_receiver_->RegisterReceivePayload(
kwiberg55b97fe2016-01-28 05:22:45 -08001336 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1337 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1338 _engineStatisticsPtr->SetLastError(
1339 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1340 "SetRecPayloadType() RTP/RTCP-module registration failed");
1341 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001342 }
kwiberg55b97fe2016-01-28 05:22:45 -08001343 }
1344 if (audio_coding_->RegisterReceiveCodec(codec) != 0) {
1345 audio_coding_->UnregisterReceiveCodec(codec.pltype);
1346 if (audio_coding_->RegisterReceiveCodec(codec) != 0) {
1347 _engineStatisticsPtr->SetLastError(
1348 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1349 "SetRecPayloadType() ACM registration failed - 1");
1350 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001351 }
kwiberg55b97fe2016-01-28 05:22:45 -08001352 }
1353 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001354}
1355
kwiberg55b97fe2016-01-28 05:22:45 -08001356int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1357 int8_t payloadType(-1);
1358 if (rtp_payload_registry_->ReceivePayloadType(
1359 codec.plname, codec.plfreq, codec.channels,
1360 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1361 _engineStatisticsPtr->SetLastError(
1362 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1363 "GetRecPayloadType() failed to retrieve RX payload type");
1364 return -1;
1365 }
1366 codec.pltype = payloadType;
1367 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001368}
1369
kwiberg55b97fe2016-01-28 05:22:45 -08001370int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1371 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1372 "Channel::SetSendCNPayloadType()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001373
kwiberg55b97fe2016-01-28 05:22:45 -08001374 CodecInst codec;
1375 int32_t samplingFreqHz(-1);
1376 const size_t kMono = 1;
1377 if (frequency == kFreq32000Hz)
1378 samplingFreqHz = 32000;
1379 else if (frequency == kFreq16000Hz)
1380 samplingFreqHz = 16000;
niklase@google.com470e71d2011-07-07 08:21:25 +00001381
kwiberg55b97fe2016-01-28 05:22:45 -08001382 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1383 _engineStatisticsPtr->SetLastError(
1384 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1385 "SetSendCNPayloadType() failed to retrieve default CN codec "
1386 "settings");
1387 return -1;
1388 }
1389
1390 // Modify the payload type (must be set to dynamic range)
1391 codec.pltype = type;
1392
1393 if (audio_coding_->RegisterSendCodec(codec) != 0) {
1394 _engineStatisticsPtr->SetLastError(
1395 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1396 "SetSendCNPayloadType() failed to register CN to ACM");
1397 return -1;
1398 }
1399
1400 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1401 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1402 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1403 _engineStatisticsPtr->SetLastError(
1404 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1405 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1406 "module");
1407 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001408 }
kwiberg55b97fe2016-01-28 05:22:45 -08001409 }
1410 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001411}
1412
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001413int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001414 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001415 "Channel::SetOpusMaxPlaybackRate()");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001416
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001417 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001418 _engineStatisticsPtr->SetLastError(
1419 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgadee8f92014-09-03 12:28:06 +00001420 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
minyue@webrtc.org6aac93b2014-08-12 08:13:33 +00001421 return -1;
1422 }
1423 return 0;
1424}
1425
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001426int Channel::SetOpusDtx(bool enable_dtx) {
1427 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1428 "Channel::SetOpusDtx(%d)", enable_dtx);
Minyue Li092041c2015-05-11 12:19:35 +02001429 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001430 : audio_coding_->DisableOpusDtx();
1431 if (ret != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08001432 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1433 kTraceError, "SetOpusDtx() failed");
minyue@webrtc.org9b2e1142015-03-13 09:38:07 +00001434 return -1;
1435 }
1436 return 0;
1437}
1438
kwiberg55b97fe2016-01-28 05:22:45 -08001439int32_t Channel::RegisterExternalTransport(Transport& transport) {
1440 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00001441 "Channel::RegisterExternalTransport()");
1442
kwiberg55b97fe2016-01-28 05:22:45 -08001443 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001444
kwiberg55b97fe2016-01-28 05:22:45 -08001445 if (_externalTransport) {
1446 _engineStatisticsPtr->SetLastError(
1447 VE_INVALID_OPERATION, kTraceError,
1448 "RegisterExternalTransport() external transport already enabled");
1449 return -1;
1450 }
1451 _externalTransport = true;
1452 _transportPtr = &transport;
1453 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001454}
1455
kwiberg55b97fe2016-01-28 05:22:45 -08001456int32_t Channel::DeRegisterExternalTransport() {
1457 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1458 "Channel::DeRegisterExternalTransport()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001459
kwiberg55b97fe2016-01-28 05:22:45 -08001460 rtc::CritScope cs(&_callbackCritSect);
xians@webrtc.org83661f52011-11-25 10:58:15 +00001461
kwiberg55b97fe2016-01-28 05:22:45 -08001462 if (!_transportPtr) {
1463 _engineStatisticsPtr->SetLastError(
1464 VE_INVALID_OPERATION, kTraceWarning,
1465 "DeRegisterExternalTransport() external transport already "
1466 "disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00001467 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001468 }
1469 _externalTransport = false;
1470 _transportPtr = NULL;
1471 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1472 "DeRegisterExternalTransport() all transport is disabled");
1473 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001474}
1475
kwiberg55b97fe2016-01-28 05:22:45 -08001476int32_t Channel::ReceivedRTPPacket(const int8_t* data,
1477 size_t length,
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001478 const PacketTime& packet_time) {
kwiberg55b97fe2016-01-28 05:22:45 -08001479 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001480 "Channel::ReceivedRTPPacket()");
1481
1482 // Store playout timestamp for the received RTP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001483 UpdatePlayoutTimestamp(false);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001484
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001485 const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001486 RTPHeader header;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001487 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1488 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1489 "Incoming packet: invalid RTP header");
stefan@webrtc.orga5cb98c2013-05-29 12:12:51 +00001490 return -1;
1491 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001492 header.payload_type_frequency =
1493 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001494 if (header.payload_type_frequency < 0)
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001495 return -1;
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001496 bool in_order = IsPacketInOrder(header);
kwiberg55b97fe2016-01-28 05:22:45 -08001497 rtp_receive_statistics_->IncomingPacket(
1498 header, length, IsPacketRetransmitted(header, in_order));
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001499 rtp_payload_registry_->SetIncomingPayloadType(header);
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00001500
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001501 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001502}
1503
1504bool Channel::ReceivePacket(const uint8_t* packet,
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001505 size_t packet_length,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001506 const RTPHeader& header,
1507 bool in_order) {
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001508 if (rtp_payload_registry_->IsRtx(header)) {
1509 return HandleRtxPacket(packet, packet_length, header);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001510 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001511 const uint8_t* payload = packet + header.headerLength;
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001512 assert(packet_length >= header.headerLength);
1513 size_t payload_length = packet_length - header.headerLength;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001514 PayloadUnion payload_specific;
1515 if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001516 &payload_specific)) {
1517 return false;
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001518 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001519 return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1520 payload_specific, in_order);
1521}
1522
minyue@webrtc.org456f0142015-01-23 11:58:42 +00001523bool Channel::HandleRtxPacket(const uint8_t* packet,
1524 size_t packet_length,
1525 const RTPHeader& header) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001526 if (!rtp_payload_registry_->IsRtx(header))
1527 return false;
1528
1529 // Remove the RTX header and parse the original RTP header.
1530 if (packet_length < header.headerLength)
1531 return false;
1532 if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1533 return false;
1534 if (restored_packet_in_use_) {
1535 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1536 "Multiple RTX headers detected, dropping packet");
1537 return false;
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001538 }
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001539 if (!rtp_payload_registry_->RestoreOriginalPacket(
noahric65220a72015-10-14 11:29:49 -07001540 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
1541 header)) {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001542 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1543 "Incoming RTX packet: invalid RTP header");
1544 return false;
1545 }
1546 restored_packet_in_use_ = true;
noahric65220a72015-10-14 11:29:49 -07001547 bool ret = OnRecoveredPacket(restored_packet_, packet_length);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001548 restored_packet_in_use_ = false;
1549 return ret;
1550}
1551
1552bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1553 StreamStatistician* statistician =
1554 rtp_receive_statistics_->GetStatistician(header.ssrc);
1555 if (!statistician)
1556 return false;
1557 return statistician->IsPacketInOrder(header.sequenceNumber);
niklase@google.com470e71d2011-07-07 08:21:25 +00001558}
1559
stefan@webrtc.org48df3812013-11-08 15:18:52 +00001560bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1561 bool in_order) const {
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001562 // Retransmissions are handled separately if RTX is enabled.
1563 if (rtp_payload_registry_->RtxEnabled())
1564 return false;
1565 StreamStatistician* statistician =
1566 rtp_receive_statistics_->GetStatistician(header.ssrc);
1567 if (!statistician)
1568 return false;
1569 // Check if this is a retransmission.
pkasting@chromium.org16825b12015-01-12 21:51:21 +00001570 int64_t min_rtt = 0;
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00001571 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
kwiberg55b97fe2016-01-28 05:22:45 -08001572 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
wu@webrtc.org822fbd82013-08-15 23:38:54 +00001573}
1574
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001575int32_t Channel::ReceivedRTCPPacket(const int8_t* data, size_t length) {
kwiberg55b97fe2016-01-28 05:22:45 -08001576 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001577 "Channel::ReceivedRTCPPacket()");
1578 // Store playout timestamp for the received RTCP packet
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00001579 UpdatePlayoutTimestamp(true);
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001580
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001581 // Deliver RTCP packet to RTP/RTCP module for parsing
pkasting@chromium.org4591fbd2014-11-20 22:28:14 +00001582 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data, length) == -1) {
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001583 _engineStatisticsPtr->SetLastError(
1584 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1585 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1586 }
wu@webrtc.org82c4b852014-05-20 22:55:01 +00001587
Minyue2013aec2015-05-13 14:14:42 +02001588 int64_t rtt = GetRTT(true);
1589 if (rtt == 0) {
1590 // Waiting for valid RTT.
1591 return 0;
1592 }
1593 uint32_t ntp_secs = 0;
1594 uint32_t ntp_frac = 0;
1595 uint32_t rtp_timestamp = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001596 if (0 !=
1597 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1598 &rtp_timestamp)) {
Minyue2013aec2015-05-13 14:14:42 +02001599 // Waiting for RTCP.
1600 return 0;
1601 }
1602
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001603 {
tommi31fc21f2016-01-21 10:37:37 -08001604 rtc::CritScope lock(&ts_stats_lock_);
minyue@webrtc.org2c0cdbc2014-10-09 10:52:43 +00001605 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
stefan@webrtc.org8e24d872014-09-02 18:58:24 +00001606 }
pwestin@webrtc.org0c459572013-04-03 15:43:57 +00001607 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001608}
1609
niklase@google.com470e71d2011-07-07 08:21:25 +00001610int Channel::StartPlayingFileLocally(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001611 bool loop,
1612 FileFormats format,
1613 int startPosition,
1614 float volumeScaling,
1615 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001616 const CodecInst* codecInst) {
1617 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1618 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1619 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1620 "stopPosition=%d)",
1621 fileName, loop, format, volumeScaling, startPosition,
1622 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001623
kwiberg55b97fe2016-01-28 05:22:45 -08001624 if (channel_state_.Get().output_file_playing) {
1625 _engineStatisticsPtr->SetLastError(
1626 VE_ALREADY_PLAYING, kTraceError,
1627 "StartPlayingFileLocally() is already playing");
1628 return -1;
1629 }
1630
1631 {
1632 rtc::CritScope cs(&_fileCritSect);
1633
1634 if (_outputFilePlayerPtr) {
1635 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1636 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1637 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001638 }
1639
kwiberg55b97fe2016-01-28 05:22:45 -08001640 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1641 _outputFilePlayerId, (const FileFormats)format);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001642
kwiberg55b97fe2016-01-28 05:22:45 -08001643 if (_outputFilePlayerPtr == NULL) {
1644 _engineStatisticsPtr->SetLastError(
1645 VE_INVALID_ARGUMENT, kTraceError,
1646 "StartPlayingFileLocally() filePlayer format is not correct");
1647 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001648 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001649
kwiberg55b97fe2016-01-28 05:22:45 -08001650 const uint32_t notificationTime(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00001651
kwiberg55b97fe2016-01-28 05:22:45 -08001652 if (_outputFilePlayerPtr->StartPlayingFile(
1653 fileName, loop, startPosition, volumeScaling, notificationTime,
1654 stopPosition, (const CodecInst*)codecInst) != 0) {
1655 _engineStatisticsPtr->SetLastError(
1656 VE_BAD_FILE, kTraceError,
1657 "StartPlayingFile() failed to start file playout");
1658 _outputFilePlayerPtr->StopPlayingFile();
1659 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1660 _outputFilePlayerPtr = NULL;
1661 return -1;
1662 }
1663 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1664 channel_state_.SetOutputFilePlaying(true);
1665 }
1666
1667 if (RegisterFilePlayingToMixer() != 0)
1668 return -1;
1669
1670 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001671}
1672
1673int Channel::StartPlayingFileLocally(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001674 FileFormats format,
1675 int startPosition,
1676 float volumeScaling,
1677 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001678 const CodecInst* codecInst) {
1679 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1680 "Channel::StartPlayingFileLocally(format=%d,"
1681 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1682 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001683
kwiberg55b97fe2016-01-28 05:22:45 -08001684 if (stream == NULL) {
1685 _engineStatisticsPtr->SetLastError(
1686 VE_BAD_FILE, kTraceError,
1687 "StartPlayingFileLocally() NULL as input stream");
1688 return -1;
1689 }
1690
1691 if (channel_state_.Get().output_file_playing) {
1692 _engineStatisticsPtr->SetLastError(
1693 VE_ALREADY_PLAYING, kTraceError,
1694 "StartPlayingFileLocally() is already playing");
1695 return -1;
1696 }
1697
1698 {
1699 rtc::CritScope cs(&_fileCritSect);
1700
1701 // Destroy the old instance
1702 if (_outputFilePlayerPtr) {
1703 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1704 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1705 _outputFilePlayerPtr = NULL;
niklase@google.com470e71d2011-07-07 08:21:25 +00001706 }
1707
kwiberg55b97fe2016-01-28 05:22:45 -08001708 // Create the instance
1709 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1710 _outputFilePlayerId, (const FileFormats)format);
niklase@google.com470e71d2011-07-07 08:21:25 +00001711
kwiberg55b97fe2016-01-28 05:22:45 -08001712 if (_outputFilePlayerPtr == NULL) {
1713 _engineStatisticsPtr->SetLastError(
1714 VE_INVALID_ARGUMENT, kTraceError,
1715 "StartPlayingFileLocally() filePlayer format isnot correct");
1716 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001717 }
1718
kwiberg55b97fe2016-01-28 05:22:45 -08001719 const uint32_t notificationTime(0);
henrike@webrtc.orgb37c6282011-10-31 23:53:04 +00001720
kwiberg55b97fe2016-01-28 05:22:45 -08001721 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1722 volumeScaling, notificationTime,
1723 stopPosition, codecInst) != 0) {
1724 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1725 "StartPlayingFile() failed to "
1726 "start file playout");
1727 _outputFilePlayerPtr->StopPlayingFile();
1728 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1729 _outputFilePlayerPtr = NULL;
1730 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001731 }
kwiberg55b97fe2016-01-28 05:22:45 -08001732 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1733 channel_state_.SetOutputFilePlaying(true);
1734 }
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001735
kwiberg55b97fe2016-01-28 05:22:45 -08001736 if (RegisterFilePlayingToMixer() != 0)
1737 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00001738
kwiberg55b97fe2016-01-28 05:22:45 -08001739 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001740}
1741
kwiberg55b97fe2016-01-28 05:22:45 -08001742int Channel::StopPlayingFileLocally() {
1743 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1744 "Channel::StopPlayingFileLocally()");
niklase@google.com470e71d2011-07-07 08:21:25 +00001745
kwiberg55b97fe2016-01-28 05:22:45 -08001746 if (!channel_state_.Get().output_file_playing) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001747 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001748 }
1749
1750 {
1751 rtc::CritScope cs(&_fileCritSect);
1752
1753 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1754 _engineStatisticsPtr->SetLastError(
1755 VE_STOP_RECORDING_FAILED, kTraceError,
1756 "StopPlayingFile() could not stop playing");
1757 return -1;
1758 }
1759 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1760 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1761 _outputFilePlayerPtr = NULL;
1762 channel_state_.SetOutputFilePlaying(false);
1763 }
1764 // _fileCritSect cannot be taken while calling
1765 // SetAnonymousMixibilityStatus. Refer to comments in
1766 // StartPlayingFileLocally(const char* ...) for more details.
1767 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1768 _engineStatisticsPtr->SetLastError(
1769 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1770 "StopPlayingFile() failed to stop participant from playing as"
1771 "file in the mixer");
1772 return -1;
1773 }
1774
1775 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001776}
1777
kwiberg55b97fe2016-01-28 05:22:45 -08001778int Channel::IsPlayingFileLocally() const {
1779 return channel_state_.Get().output_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001780}
1781
kwiberg55b97fe2016-01-28 05:22:45 -08001782int Channel::RegisterFilePlayingToMixer() {
1783 // Return success for not registering for file playing to mixer if:
1784 // 1. playing file before playout is started on that channel.
1785 // 2. starting playout without file playing on that channel.
1786 if (!channel_state_.Get().playing ||
1787 !channel_state_.Get().output_file_playing) {
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001788 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001789 }
1790
1791 // |_fileCritSect| cannot be taken while calling
1792 // SetAnonymousMixabilityStatus() since as soon as the participant is added
1793 // frames can be pulled by the mixer. Since the frames are generated from
1794 // the file, _fileCritSect will be taken. This would result in a deadlock.
1795 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
1796 channel_state_.SetOutputFilePlaying(false);
1797 rtc::CritScope cs(&_fileCritSect);
1798 _engineStatisticsPtr->SetLastError(
1799 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1800 "StartPlayingFile() failed to add participant as file to mixer");
1801 _outputFilePlayerPtr->StopPlayingFile();
1802 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1803 _outputFilePlayerPtr = NULL;
1804 return -1;
1805 }
1806
1807 return 0;
braveyao@webrtc.orgab129902012-06-04 03:26:39 +00001808}
1809
niklase@google.com470e71d2011-07-07 08:21:25 +00001810int Channel::StartPlayingFileAsMicrophone(const char* fileName,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001811 bool loop,
1812 FileFormats format,
1813 int startPosition,
1814 float volumeScaling,
1815 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001816 const CodecInst* codecInst) {
1817 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1818 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
1819 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
1820 "stopPosition=%d)",
1821 fileName, loop, format, volumeScaling, startPosition,
1822 stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001823
kwiberg55b97fe2016-01-28 05:22:45 -08001824 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001825
kwiberg55b97fe2016-01-28 05:22:45 -08001826 if (channel_state_.Get().input_file_playing) {
1827 _engineStatisticsPtr->SetLastError(
1828 VE_ALREADY_PLAYING, kTraceWarning,
1829 "StartPlayingFileAsMicrophone() filePlayer is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001830 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001831 }
1832
1833 // Destroy the old instance
1834 if (_inputFilePlayerPtr) {
1835 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1836 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1837 _inputFilePlayerPtr = NULL;
1838 }
1839
1840 // Create the instance
1841 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1842 (const FileFormats)format);
1843
1844 if (_inputFilePlayerPtr == NULL) {
1845 _engineStatisticsPtr->SetLastError(
1846 VE_INVALID_ARGUMENT, kTraceError,
1847 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
1848 return -1;
1849 }
1850
1851 const uint32_t notificationTime(0);
1852
1853 if (_inputFilePlayerPtr->StartPlayingFile(
1854 fileName, loop, startPosition, volumeScaling, notificationTime,
1855 stopPosition, (const CodecInst*)codecInst) != 0) {
1856 _engineStatisticsPtr->SetLastError(
1857 VE_BAD_FILE, kTraceError,
1858 "StartPlayingFile() failed to start file playout");
1859 _inputFilePlayerPtr->StopPlayingFile();
1860 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1861 _inputFilePlayerPtr = NULL;
1862 return -1;
1863 }
1864 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1865 channel_state_.SetInputFilePlaying(true);
1866
1867 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001868}
1869
1870int Channel::StartPlayingFileAsMicrophone(InStream* stream,
pbos@webrtc.org92135212013-05-14 08:31:39 +00001871 FileFormats format,
1872 int startPosition,
1873 float volumeScaling,
1874 int stopPosition,
kwiberg55b97fe2016-01-28 05:22:45 -08001875 const CodecInst* codecInst) {
1876 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1877 "Channel::StartPlayingFileAsMicrophone(format=%d, "
1878 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1879 format, volumeScaling, startPosition, stopPosition);
niklase@google.com470e71d2011-07-07 08:21:25 +00001880
kwiberg55b97fe2016-01-28 05:22:45 -08001881 if (stream == NULL) {
1882 _engineStatisticsPtr->SetLastError(
1883 VE_BAD_FILE, kTraceError,
1884 "StartPlayingFileAsMicrophone NULL as input stream");
1885 return -1;
1886 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001887
kwiberg55b97fe2016-01-28 05:22:45 -08001888 rtc::CritScope cs(&_fileCritSect);
henrika@webrtc.org944cbeb2014-03-18 10:32:33 +00001889
kwiberg55b97fe2016-01-28 05:22:45 -08001890 if (channel_state_.Get().input_file_playing) {
1891 _engineStatisticsPtr->SetLastError(
1892 VE_ALREADY_PLAYING, kTraceWarning,
1893 "StartPlayingFileAsMicrophone() is playing");
niklase@google.com470e71d2011-07-07 08:21:25 +00001894 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001895 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001896
kwiberg55b97fe2016-01-28 05:22:45 -08001897 // Destroy the old instance
1898 if (_inputFilePlayerPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00001899 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1900 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1901 _inputFilePlayerPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08001902 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001903
kwiberg55b97fe2016-01-28 05:22:45 -08001904 // Create the instance
1905 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1906 (const FileFormats)format);
1907
1908 if (_inputFilePlayerPtr == NULL) {
1909 _engineStatisticsPtr->SetLastError(
1910 VE_INVALID_ARGUMENT, kTraceError,
1911 "StartPlayingInputFile() filePlayer format isnot correct");
1912 return -1;
1913 }
1914
1915 const uint32_t notificationTime(0);
1916
1917 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1918 volumeScaling, notificationTime,
1919 stopPosition, codecInst) != 0) {
1920 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1921 "StartPlayingFile() failed to start "
1922 "file playout");
1923 _inputFilePlayerPtr->StopPlayingFile();
1924 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1925 _inputFilePlayerPtr = NULL;
1926 return -1;
1927 }
1928
1929 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1930 channel_state_.SetInputFilePlaying(true);
1931
1932 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00001933}
1934
kwiberg55b97fe2016-01-28 05:22:45 -08001935int Channel::StopPlayingFileAsMicrophone() {
1936 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1937 "Channel::StopPlayingFileAsMicrophone()");
1938
1939 rtc::CritScope cs(&_fileCritSect);
1940
1941 if (!channel_state_.Get().input_file_playing) {
1942 return 0;
1943 }
1944
1945 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
1946 _engineStatisticsPtr->SetLastError(
1947 VE_STOP_RECORDING_FAILED, kTraceError,
1948 "StopPlayingFile() could not stop playing");
1949 return -1;
1950 }
1951 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1952 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1953 _inputFilePlayerPtr = NULL;
1954 channel_state_.SetInputFilePlaying(false);
1955
1956 return 0;
1957}
1958
1959int Channel::IsPlayingFileAsMicrophone() const {
1960 return channel_state_.Get().input_file_playing;
niklase@google.com470e71d2011-07-07 08:21:25 +00001961}
1962
leozwang@webrtc.org813e4b02012-03-01 18:34:25 +00001963int Channel::StartRecordingPlayout(const char* fileName,
kwiberg55b97fe2016-01-28 05:22:45 -08001964 const CodecInst* codecInst) {
1965 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1966 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
niklase@google.com470e71d2011-07-07 08:21:25 +00001967
kwiberg55b97fe2016-01-28 05:22:45 -08001968 if (_outputFileRecording) {
1969 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
1970 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00001971 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08001972 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001973
kwiberg55b97fe2016-01-28 05:22:45 -08001974 FileFormats format;
1975 const uint32_t notificationTime(0); // Not supported in VoE
1976 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
niklase@google.com470e71d2011-07-07 08:21:25 +00001977
kwiberg55b97fe2016-01-28 05:22:45 -08001978 if ((codecInst != NULL) &&
1979 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
1980 _engineStatisticsPtr->SetLastError(
1981 VE_BAD_ARGUMENT, kTraceError,
1982 "StartRecordingPlayout() invalid compression");
1983 return (-1);
1984 }
1985 if (codecInst == NULL) {
1986 format = kFileFormatPcm16kHzFile;
1987 codecInst = &dummyCodec;
1988 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
1989 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
1990 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
1991 format = kFileFormatWavFile;
1992 } else {
1993 format = kFileFormatCompressedFile;
1994 }
niklase@google.com470e71d2011-07-07 08:21:25 +00001995
kwiberg55b97fe2016-01-28 05:22:45 -08001996 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00001997
kwiberg55b97fe2016-01-28 05:22:45 -08001998 // Destroy the old instance
1999 if (_outputFileRecorderPtr) {
niklase@google.com470e71d2011-07-07 08:21:25 +00002000 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2001 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2002 _outputFileRecorderPtr = NULL;
kwiberg55b97fe2016-01-28 05:22:45 -08002003 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002004
kwiberg55b97fe2016-01-28 05:22:45 -08002005 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2006 _outputFileRecorderId, (const FileFormats)format);
2007 if (_outputFileRecorderPtr == NULL) {
2008 _engineStatisticsPtr->SetLastError(
2009 VE_INVALID_ARGUMENT, kTraceError,
2010 "StartRecordingPlayout() fileRecorder format isnot correct");
2011 return -1;
2012 }
2013
2014 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2015 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2016 _engineStatisticsPtr->SetLastError(
2017 VE_BAD_FILE, kTraceError,
2018 "StartRecordingAudioFile() failed to start file recording");
2019 _outputFileRecorderPtr->StopRecording();
2020 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2021 _outputFileRecorderPtr = NULL;
2022 return -1;
2023 }
2024 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2025 _outputFileRecording = true;
2026
2027 return 0;
2028}
2029
2030int Channel::StartRecordingPlayout(OutStream* stream,
2031 const CodecInst* codecInst) {
2032 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2033 "Channel::StartRecordingPlayout()");
2034
2035 if (_outputFileRecording) {
2036 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2037 "StartRecordingPlayout() is already recording");
niklase@google.com470e71d2011-07-07 08:21:25 +00002038 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002039 }
2040
2041 FileFormats format;
2042 const uint32_t notificationTime(0); // Not supported in VoE
2043 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2044
2045 if (codecInst != NULL && codecInst->channels != 1) {
2046 _engineStatisticsPtr->SetLastError(
2047 VE_BAD_ARGUMENT, kTraceError,
2048 "StartRecordingPlayout() invalid compression");
2049 return (-1);
2050 }
2051 if (codecInst == NULL) {
2052 format = kFileFormatPcm16kHzFile;
2053 codecInst = &dummyCodec;
2054 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2055 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2056 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2057 format = kFileFormatWavFile;
2058 } else {
2059 format = kFileFormatCompressedFile;
2060 }
2061
2062 rtc::CritScope cs(&_fileCritSect);
2063
2064 // Destroy the old instance
2065 if (_outputFileRecorderPtr) {
2066 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2067 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2068 _outputFileRecorderPtr = NULL;
2069 }
2070
2071 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2072 _outputFileRecorderId, (const FileFormats)format);
2073 if (_outputFileRecorderPtr == NULL) {
2074 _engineStatisticsPtr->SetLastError(
2075 VE_INVALID_ARGUMENT, kTraceError,
2076 "StartRecordingPlayout() fileRecorder format isnot correct");
2077 return -1;
2078 }
2079
2080 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2081 notificationTime) != 0) {
2082 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2083 "StartRecordingPlayout() failed to "
2084 "start file recording");
2085 _outputFileRecorderPtr->StopRecording();
2086 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2087 _outputFileRecorderPtr = NULL;
2088 return -1;
2089 }
2090
2091 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2092 _outputFileRecording = true;
2093
2094 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002095}
2096
kwiberg55b97fe2016-01-28 05:22:45 -08002097int Channel::StopRecordingPlayout() {
2098 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2099 "Channel::StopRecordingPlayout()");
2100
2101 if (!_outputFileRecording) {
2102 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2103 "StopRecordingPlayout() isnot recording");
2104 return -1;
2105 }
2106
2107 rtc::CritScope cs(&_fileCritSect);
2108
2109 if (_outputFileRecorderPtr->StopRecording() != 0) {
2110 _engineStatisticsPtr->SetLastError(
2111 VE_STOP_RECORDING_FAILED, kTraceError,
2112 "StopRecording() could not stop recording");
2113 return (-1);
2114 }
2115 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2116 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2117 _outputFileRecorderPtr = NULL;
2118 _outputFileRecording = false;
2119
2120 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002121}
2122
kwiberg55b97fe2016-01-28 05:22:45 -08002123void Channel::SetMixWithMicStatus(bool mix) {
2124 rtc::CritScope cs(&_fileCritSect);
2125 _mixFileWithMicrophone = mix;
niklase@google.com470e71d2011-07-07 08:21:25 +00002126}
2127
kwiberg55b97fe2016-01-28 05:22:45 -08002128int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2129 int8_t currentLevel = _outputAudioLevel.Level();
2130 level = static_cast<int32_t>(currentLevel);
2131 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002132}
2133
kwiberg55b97fe2016-01-28 05:22:45 -08002134int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2135 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2136 level = static_cast<int32_t>(currentLevel);
2137 return 0;
2138}
2139
solenberg1c2af8e2016-03-24 10:36:00 -07002140int Channel::SetInputMute(bool enable) {
kwiberg55b97fe2016-01-28 05:22:45 -08002141 rtc::CritScope cs(&volume_settings_critsect_);
2142 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002143 "Channel::SetMute(enable=%d)", enable);
solenberg1c2af8e2016-03-24 10:36:00 -07002144 input_mute_ = enable;
kwiberg55b97fe2016-01-28 05:22:45 -08002145 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002146}
2147
solenberg1c2af8e2016-03-24 10:36:00 -07002148bool Channel::InputMute() const {
kwiberg55b97fe2016-01-28 05:22:45 -08002149 rtc::CritScope cs(&volume_settings_critsect_);
solenberg1c2af8e2016-03-24 10:36:00 -07002150 return input_mute_;
niklase@google.com470e71d2011-07-07 08:21:25 +00002151}
2152
kwiberg55b97fe2016-01-28 05:22:45 -08002153int Channel::SetOutputVolumePan(float left, float right) {
2154 rtc::CritScope cs(&volume_settings_critsect_);
2155 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002156 "Channel::SetOutputVolumePan()");
kwiberg55b97fe2016-01-28 05:22:45 -08002157 _panLeft = left;
2158 _panRight = right;
2159 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002160}
2161
kwiberg55b97fe2016-01-28 05:22:45 -08002162int Channel::GetOutputVolumePan(float& left, float& right) const {
2163 rtc::CritScope cs(&volume_settings_critsect_);
2164 left = _panLeft;
2165 right = _panRight;
2166 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002167}
2168
kwiberg55b97fe2016-01-28 05:22:45 -08002169int Channel::SetChannelOutputVolumeScaling(float scaling) {
2170 rtc::CritScope cs(&volume_settings_critsect_);
2171 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002172 "Channel::SetChannelOutputVolumeScaling()");
kwiberg55b97fe2016-01-28 05:22:45 -08002173 _outputGain = scaling;
2174 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002175}
2176
kwiberg55b97fe2016-01-28 05:22:45 -08002177int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2178 rtc::CritScope cs(&volume_settings_critsect_);
2179 scaling = _outputGain;
2180 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002181}
2182
solenberg8842c3e2016-03-11 03:06:41 -08002183int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
kwiberg55b97fe2016-01-28 05:22:45 -08002184 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
solenberg8842c3e2016-03-11 03:06:41 -08002185 "Channel::SendTelephoneEventOutband(...)");
2186 RTC_DCHECK_LE(0, event);
2187 RTC_DCHECK_GE(255, event);
2188 RTC_DCHECK_LE(0, duration_ms);
2189 RTC_DCHECK_GE(65535, duration_ms);
kwiberg55b97fe2016-01-28 05:22:45 -08002190 if (!Sending()) {
2191 return -1;
2192 }
solenberg8842c3e2016-03-11 03:06:41 -08002193 if (_rtpRtcpModule->SendTelephoneEventOutband(
2194 event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
kwiberg55b97fe2016-01-28 05:22:45 -08002195 _engineStatisticsPtr->SetLastError(
2196 VE_SEND_DTMF_FAILED, kTraceWarning,
2197 "SendTelephoneEventOutband() failed to send event");
2198 return -1;
2199 }
2200 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002201}
2202
solenberg31642aa2016-03-14 08:00:37 -07002203int Channel::SetSendTelephoneEventPayloadType(int payload_type) {
kwiberg55b97fe2016-01-28 05:22:45 -08002204 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00002205 "Channel::SetSendTelephoneEventPayloadType()");
solenberg31642aa2016-03-14 08:00:37 -07002206 RTC_DCHECK_LE(0, payload_type);
2207 RTC_DCHECK_GE(127, payload_type);
2208 CodecInst codec = {0};
kwiberg55b97fe2016-01-28 05:22:45 -08002209 codec.plfreq = 8000;
solenberg31642aa2016-03-14 08:00:37 -07002210 codec.pltype = payload_type;
kwiberg55b97fe2016-01-28 05:22:45 -08002211 memcpy(codec.plname, "telephone-event", 16);
2212 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2213 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2214 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2215 _engineStatisticsPtr->SetLastError(
2216 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2217 "SetSendTelephoneEventPayloadType() failed to register send"
2218 "payload type");
2219 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002220 }
kwiberg55b97fe2016-01-28 05:22:45 -08002221 }
kwiberg55b97fe2016-01-28 05:22:45 -08002222 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002223}
2224
kwiberg55b97fe2016-01-28 05:22:45 -08002225int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2226 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2227 "Channel::UpdateRxVadDetection()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002228
kwiberg55b97fe2016-01-28 05:22:45 -08002229 int vadDecision = 1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002230
kwiberg55b97fe2016-01-28 05:22:45 -08002231 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002232
kwiberg55b97fe2016-01-28 05:22:45 -08002233 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2234 OnRxVadDetected(vadDecision);
2235 _oldVadDecision = vadDecision;
2236 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002237
kwiberg55b97fe2016-01-28 05:22:45 -08002238 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2239 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2240 vadDecision);
2241 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002242}
2243
kwiberg55b97fe2016-01-28 05:22:45 -08002244int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2245 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2246 "Channel::RegisterRxVadObserver()");
2247 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002248
kwiberg55b97fe2016-01-28 05:22:45 -08002249 if (_rxVadObserverPtr) {
2250 _engineStatisticsPtr->SetLastError(
2251 VE_INVALID_OPERATION, kTraceError,
2252 "RegisterRxVadObserver() observer already enabled");
2253 return -1;
2254 }
2255 _rxVadObserverPtr = &observer;
2256 _RxVadDetection = true;
2257 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002258}
2259
kwiberg55b97fe2016-01-28 05:22:45 -08002260int Channel::DeRegisterRxVadObserver() {
2261 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2262 "Channel::DeRegisterRxVadObserver()");
2263 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00002264
kwiberg55b97fe2016-01-28 05:22:45 -08002265 if (!_rxVadObserverPtr) {
2266 _engineStatisticsPtr->SetLastError(
2267 VE_INVALID_OPERATION, kTraceWarning,
2268 "DeRegisterRxVadObserver() observer already disabled");
niklase@google.com470e71d2011-07-07 08:21:25 +00002269 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002270 }
2271 _rxVadObserverPtr = NULL;
2272 _RxVadDetection = false;
2273 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002274}
2275
kwiberg55b97fe2016-01-28 05:22:45 -08002276int Channel::VoiceActivityIndicator(int& activity) {
2277 activity = _sendFrameType;
2278 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002279}
2280
2281#ifdef WEBRTC_VOICE_ENGINE_AGC
2282
kwiberg55b97fe2016-01-28 05:22:45 -08002283int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2284 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2285 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2286 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002287
kwiberg55b97fe2016-01-28 05:22:45 -08002288 GainControl::Mode agcMode = kDefaultRxAgcMode;
2289 switch (mode) {
2290 case kAgcDefault:
2291 break;
2292 case kAgcUnchanged:
2293 agcMode = rx_audioproc_->gain_control()->mode();
2294 break;
2295 case kAgcFixedDigital:
2296 agcMode = GainControl::kFixedDigital;
2297 break;
2298 case kAgcAdaptiveDigital:
2299 agcMode = GainControl::kAdaptiveDigital;
2300 break;
2301 default:
2302 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2303 "SetRxAgcStatus() invalid Agc mode");
2304 return -1;
2305 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002306
kwiberg55b97fe2016-01-28 05:22:45 -08002307 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2308 _engineStatisticsPtr->SetLastError(
2309 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2310 return -1;
2311 }
2312 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2313 _engineStatisticsPtr->SetLastError(
2314 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2315 return -1;
2316 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002317
kwiberg55b97fe2016-01-28 05:22:45 -08002318 _rxAgcIsEnabled = enable;
2319 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002320
kwiberg55b97fe2016-01-28 05:22:45 -08002321 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002322}
2323
kwiberg55b97fe2016-01-28 05:22:45 -08002324int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2325 bool enable = rx_audioproc_->gain_control()->is_enabled();
2326 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
niklase@google.com470e71d2011-07-07 08:21:25 +00002327
kwiberg55b97fe2016-01-28 05:22:45 -08002328 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002329
kwiberg55b97fe2016-01-28 05:22:45 -08002330 switch (agcMode) {
2331 case GainControl::kFixedDigital:
2332 mode = kAgcFixedDigital;
2333 break;
2334 case GainControl::kAdaptiveDigital:
2335 mode = kAgcAdaptiveDigital;
2336 break;
2337 default:
2338 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2339 "GetRxAgcStatus() invalid Agc mode");
2340 return -1;
2341 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002342
kwiberg55b97fe2016-01-28 05:22:45 -08002343 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002344}
2345
kwiberg55b97fe2016-01-28 05:22:45 -08002346int Channel::SetRxAgcConfig(AgcConfig config) {
2347 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2348 "Channel::SetRxAgcConfig()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002349
kwiberg55b97fe2016-01-28 05:22:45 -08002350 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2351 config.targetLeveldBOv) != 0) {
2352 _engineStatisticsPtr->SetLastError(
2353 VE_APM_ERROR, kTraceError,
2354 "SetRxAgcConfig() failed to set target peak |level|"
2355 "(or envelope) of the Agc");
2356 return -1;
2357 }
2358 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2359 config.digitalCompressionGaindB) != 0) {
2360 _engineStatisticsPtr->SetLastError(
2361 VE_APM_ERROR, kTraceError,
2362 "SetRxAgcConfig() failed to set the range in |gain| the"
2363 " digital compression stage may apply");
2364 return -1;
2365 }
2366 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2367 0) {
2368 _engineStatisticsPtr->SetLastError(
2369 VE_APM_ERROR, kTraceError,
2370 "SetRxAgcConfig() failed to set hard limiter to the signal");
2371 return -1;
2372 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002373
kwiberg55b97fe2016-01-28 05:22:45 -08002374 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002375}
2376
kwiberg55b97fe2016-01-28 05:22:45 -08002377int Channel::GetRxAgcConfig(AgcConfig& config) {
2378 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2379 config.digitalCompressionGaindB =
2380 rx_audioproc_->gain_control()->compression_gain_db();
2381 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
niklase@google.com470e71d2011-07-07 08:21:25 +00002382
kwiberg55b97fe2016-01-28 05:22:45 -08002383 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002384}
2385
kwiberg55b97fe2016-01-28 05:22:45 -08002386#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
niklase@google.com470e71d2011-07-07 08:21:25 +00002387
2388#ifdef WEBRTC_VOICE_ENGINE_NR
2389
kwiberg55b97fe2016-01-28 05:22:45 -08002390int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2391 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2392 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2393 (int)mode);
niklase@google.com470e71d2011-07-07 08:21:25 +00002394
kwiberg55b97fe2016-01-28 05:22:45 -08002395 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2396 switch (mode) {
2397 case kNsDefault:
2398 break;
2399 case kNsUnchanged:
2400 nsLevel = rx_audioproc_->noise_suppression()->level();
2401 break;
2402 case kNsConference:
2403 nsLevel = NoiseSuppression::kHigh;
2404 break;
2405 case kNsLowSuppression:
2406 nsLevel = NoiseSuppression::kLow;
2407 break;
2408 case kNsModerateSuppression:
2409 nsLevel = NoiseSuppression::kModerate;
2410 break;
2411 case kNsHighSuppression:
2412 nsLevel = NoiseSuppression::kHigh;
2413 break;
2414 case kNsVeryHighSuppression:
2415 nsLevel = NoiseSuppression::kVeryHigh;
2416 break;
2417 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002418
kwiberg55b97fe2016-01-28 05:22:45 -08002419 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2420 _engineStatisticsPtr->SetLastError(
2421 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2422 return -1;
2423 }
2424 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2425 _engineStatisticsPtr->SetLastError(
2426 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2427 return -1;
2428 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002429
kwiberg55b97fe2016-01-28 05:22:45 -08002430 _rxNsIsEnabled = enable;
2431 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
niklase@google.com470e71d2011-07-07 08:21:25 +00002432
kwiberg55b97fe2016-01-28 05:22:45 -08002433 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002434}
2435
kwiberg55b97fe2016-01-28 05:22:45 -08002436int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2437 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2438 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
niklase@google.com470e71d2011-07-07 08:21:25 +00002439
kwiberg55b97fe2016-01-28 05:22:45 -08002440 enabled = enable;
niklase@google.com470e71d2011-07-07 08:21:25 +00002441
kwiberg55b97fe2016-01-28 05:22:45 -08002442 switch (ncLevel) {
2443 case NoiseSuppression::kLow:
2444 mode = kNsLowSuppression;
2445 break;
2446 case NoiseSuppression::kModerate:
2447 mode = kNsModerateSuppression;
2448 break;
2449 case NoiseSuppression::kHigh:
2450 mode = kNsHighSuppression;
2451 break;
2452 case NoiseSuppression::kVeryHigh:
2453 mode = kNsVeryHighSuppression;
2454 break;
2455 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002456
kwiberg55b97fe2016-01-28 05:22:45 -08002457 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002458}
2459
kwiberg55b97fe2016-01-28 05:22:45 -08002460#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
niklase@google.com470e71d2011-07-07 08:21:25 +00002461
kwiberg55b97fe2016-01-28 05:22:45 -08002462int Channel::SetLocalSSRC(unsigned int ssrc) {
2463 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2464 "Channel::SetLocalSSRC()");
2465 if (channel_state_.Get().sending) {
2466 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2467 "SetLocalSSRC() already sending");
2468 return -1;
2469 }
2470 _rtpRtcpModule->SetSSRC(ssrc);
2471 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002472}
2473
kwiberg55b97fe2016-01-28 05:22:45 -08002474int Channel::GetLocalSSRC(unsigned int& ssrc) {
2475 ssrc = _rtpRtcpModule->SSRC();
2476 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002477}
2478
kwiberg55b97fe2016-01-28 05:22:45 -08002479int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2480 ssrc = rtp_receiver_->SSRC();
2481 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002482}
2483
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002484int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002485 _includeAudioLevelIndication = enable;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002486 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
niklase@google.com470e71d2011-07-07 08:21:25 +00002487}
andrew@webrtc.orgf3930e92013-09-18 22:37:32 +00002488
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002489int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2490 unsigned char id) {
kwiberg55b97fe2016-01-28 05:22:45 -08002491 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2492 if (enable &&
2493 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2494 id)) {
wu@webrtc.org93fd25c2014-04-24 20:33:08 +00002495 return -1;
2496 }
2497 return 0;
2498}
2499
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002500int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2501 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2502}
2503
2504int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2505 rtp_header_parser_->DeregisterRtpHeaderExtension(
2506 kRtpExtensionAbsoluteSendTime);
kwiberg55b97fe2016-01-28 05:22:45 -08002507 if (enable &&
2508 !rtp_header_parser_->RegisterRtpHeaderExtension(
2509 kRtpExtensionAbsoluteSendTime, id)) {
solenberg@webrtc.orgb1f50102014-03-24 10:38:25 +00002510 return -1;
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00002511 }
2512 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002513}
2514
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002515void Channel::EnableSendTransportSequenceNumber(int id) {
2516 int ret =
2517 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2518 RTC_DCHECK_EQ(0, ret);
2519}
2520
stefan3313ec92016-01-21 06:32:43 -08002521void Channel::EnableReceiveTransportSequenceNumber(int id) {
2522 rtp_header_parser_->DeregisterRtpHeaderExtension(
2523 kRtpExtensionTransportSequenceNumber);
2524 bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
2525 kRtpExtensionTransportSequenceNumber, id);
2526 RTC_DCHECK(ret);
2527}
2528
stefanbba9dec2016-02-01 04:39:55 -08002529void Channel::RegisterSenderCongestionControlObjects(
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002530 RtpPacketSender* rtp_packet_sender,
2531 TransportFeedbackObserver* transport_feedback_observer,
2532 PacketRouter* packet_router) {
stefanbba9dec2016-02-01 04:39:55 -08002533 RTC_DCHECK(rtp_packet_sender);
2534 RTC_DCHECK(transport_feedback_observer);
2535 RTC_DCHECK(packet_router && !packet_router_);
2536 feedback_observer_proxy_->SetTransportFeedbackObserver(
2537 transport_feedback_observer);
2538 seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
2539 rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
2540 _rtpRtcpModule->SetStorePacketsStatus(true, 600);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002541 packet_router->AddRtpModule(_rtpRtcpModule.get());
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002542 packet_router_ = packet_router;
2543}
2544
stefanbba9dec2016-02-01 04:39:55 -08002545void Channel::RegisterReceiverCongestionControlObjects(
2546 PacketRouter* packet_router) {
2547 RTC_DCHECK(packet_router && !packet_router_);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002548 packet_router->AddRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002549 packet_router_ = packet_router;
2550}
2551
2552void Channel::ResetCongestionControlObjects() {
2553 RTC_DCHECK(packet_router_);
2554 _rtpRtcpModule->SetStorePacketsStatus(false, 600);
2555 feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
2556 seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
Peter Boström3dd5d1d2016-02-25 16:56:48 +01002557 packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
stefanbba9dec2016-02-01 04:39:55 -08002558 packet_router_ = nullptr;
2559 rtp_packet_sender_proxy_->SetPacketSender(nullptr);
2560}
2561
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00002562void Channel::SetRTCPStatus(bool enable) {
2563 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2564 "Channel::SetRTCPStatus()");
pbosda903ea2015-10-02 02:36:56 -07002565 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
niklase@google.com470e71d2011-07-07 08:21:25 +00002566}
2567
kwiberg55b97fe2016-01-28 05:22:45 -08002568int Channel::GetRTCPStatus(bool& enabled) {
pbosda903ea2015-10-02 02:36:56 -07002569 RtcpMode method = _rtpRtcpModule->RTCP();
2570 enabled = (method != RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002571 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002572}
2573
kwiberg55b97fe2016-01-28 05:22:45 -08002574int Channel::SetRTCP_CNAME(const char cName[256]) {
2575 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2576 "Channel::SetRTCP_CNAME()");
2577 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2578 _engineStatisticsPtr->SetLastError(
2579 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2580 "SetRTCP_CNAME() failed to set RTCP CNAME");
2581 return -1;
2582 }
2583 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002584}
2585
kwiberg55b97fe2016-01-28 05:22:45 -08002586int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2587 if (cName == NULL) {
2588 _engineStatisticsPtr->SetLastError(
2589 VE_INVALID_ARGUMENT, kTraceError,
2590 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2591 return -1;
2592 }
2593 char cname[RTCP_CNAME_SIZE];
2594 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2595 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2596 _engineStatisticsPtr->SetLastError(
2597 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2598 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2599 return -1;
2600 }
2601 strcpy(cName, cname);
2602 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002603}
2604
kwiberg55b97fe2016-01-28 05:22:45 -08002605int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
2606 unsigned int& NTPLow,
2607 unsigned int& timestamp,
2608 unsigned int& playoutTimestamp,
2609 unsigned int* jitter,
2610 unsigned short* fractionLost) {
2611 // --- Information from sender info in received Sender Reports
niklase@google.com470e71d2011-07-07 08:21:25 +00002612
kwiberg55b97fe2016-01-28 05:22:45 -08002613 RTCPSenderInfo senderInfo;
2614 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
2615 _engineStatisticsPtr->SetLastError(
2616 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2617 "GetRemoteRTCPData() failed to retrieve sender info for remote "
2618 "side");
2619 return -1;
2620 }
2621
2622 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2623 // and octet count)
2624 NTPHigh = senderInfo.NTPseconds;
2625 NTPLow = senderInfo.NTPfraction;
2626 timestamp = senderInfo.RTPtimeStamp;
2627
2628 // --- Locally derived information
2629
2630 // This value is updated on each incoming RTCP packet (0 when no packet
2631 // has been received)
2632 playoutTimestamp = playout_timestamp_rtcp_;
2633
2634 if (NULL != jitter || NULL != fractionLost) {
2635 // Get all RTCP receiver report blocks that have been received on this
2636 // channel. If we receive RTP packets from a remote source we know the
2637 // remote SSRC and use the report block from him.
2638 // Otherwise use the first report block.
2639 std::vector<RTCPReportBlock> remote_stats;
2640 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2641 remote_stats.empty()) {
2642 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2643 "GetRemoteRTCPData() failed to measure statistics due"
2644 " to lack of received RTP and/or RTCP packets");
2645 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002646 }
2647
kwiberg55b97fe2016-01-28 05:22:45 -08002648 uint32_t remoteSSRC = rtp_receiver_->SSRC();
2649 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
2650 for (; it != remote_stats.end(); ++it) {
2651 if (it->remoteSSRC == remoteSSRC)
2652 break;
niklase@google.com470e71d2011-07-07 08:21:25 +00002653 }
kwiberg55b97fe2016-01-28 05:22:45 -08002654
2655 if (it == remote_stats.end()) {
2656 // If we have not received any RTCP packets from this SSRC it probably
2657 // means that we have not received any RTP packets.
2658 // Use the first received report block instead.
2659 it = remote_stats.begin();
2660 remoteSSRC = it->remoteSSRC;
2661 }
2662
2663 if (jitter) {
2664 *jitter = it->jitter;
2665 }
2666
2667 if (fractionLost) {
2668 *fractionLost = it->fractionLost;
2669 }
2670 }
2671 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002672}
2673
kwiberg55b97fe2016-01-28 05:22:45 -08002674int Channel::SendApplicationDefinedRTCPPacket(
2675 unsigned char subType,
2676 unsigned int name,
2677 const char* data,
2678 unsigned short dataLengthInBytes) {
2679 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2680 "Channel::SendApplicationDefinedRTCPPacket()");
2681 if (!channel_state_.Get().sending) {
2682 _engineStatisticsPtr->SetLastError(
2683 VE_NOT_SENDING, kTraceError,
2684 "SendApplicationDefinedRTCPPacket() not sending");
2685 return -1;
2686 }
2687 if (NULL == data) {
2688 _engineStatisticsPtr->SetLastError(
2689 VE_INVALID_ARGUMENT, kTraceError,
2690 "SendApplicationDefinedRTCPPacket() invalid data value");
2691 return -1;
2692 }
2693 if (dataLengthInBytes % 4 != 0) {
2694 _engineStatisticsPtr->SetLastError(
2695 VE_INVALID_ARGUMENT, kTraceError,
2696 "SendApplicationDefinedRTCPPacket() invalid length value");
2697 return -1;
2698 }
2699 RtcpMode status = _rtpRtcpModule->RTCP();
2700 if (status == RtcpMode::kOff) {
2701 _engineStatisticsPtr->SetLastError(
2702 VE_RTCP_ERROR, kTraceError,
2703 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
2704 return -1;
2705 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002706
kwiberg55b97fe2016-01-28 05:22:45 -08002707 // Create and schedule the RTCP APP packet for transmission
2708 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
2709 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
2710 _engineStatisticsPtr->SetLastError(
2711 VE_SEND_ERROR, kTraceError,
2712 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
2713 return -1;
2714 }
2715 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002716}
2717
kwiberg55b97fe2016-01-28 05:22:45 -08002718int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
2719 unsigned int& maxJitterMs,
2720 unsigned int& discardedPackets) {
2721 // The jitter statistics is updated for each received RTP packet and is
2722 // based on received packets.
2723 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
2724 // If RTCP is off, there is no timed thread in the RTCP module regularly
2725 // generating new stats, trigger the update manually here instead.
2726 StreamStatistician* statistician =
2727 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
2728 if (statistician) {
2729 // Don't use returned statistics, use data from proxy instead so that
2730 // max jitter can be fetched atomically.
2731 RtcpStatistics s;
2732 statistician->GetStatistics(&s, true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002733 }
kwiberg55b97fe2016-01-28 05:22:45 -08002734 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002735
kwiberg55b97fe2016-01-28 05:22:45 -08002736 ChannelStatistics stats = statistics_proxy_->GetStats();
2737 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
2738 if (playoutFrequency > 0) {
2739 // Scale RTP statistics given the current playout frequency
2740 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
2741 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
2742 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002743
kwiberg55b97fe2016-01-28 05:22:45 -08002744 discardedPackets = _numberOfDiscardedPackets;
niklase@google.com470e71d2011-07-07 08:21:25 +00002745
kwiberg55b97fe2016-01-28 05:22:45 -08002746 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002747}
2748
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002749int Channel::GetRemoteRTCPReportBlocks(
2750 std::vector<ReportBlock>* report_blocks) {
2751 if (report_blocks == NULL) {
kwiberg55b97fe2016-01-28 05:22:45 -08002752 _engineStatisticsPtr->SetLastError(
2753 VE_INVALID_ARGUMENT, kTraceError,
2754 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002755 return -1;
2756 }
2757
2758 // Get the report blocks from the latest received RTCP Sender or Receiver
2759 // Report. Each element in the vector contains the sender's SSRC and a
2760 // report block according to RFC 3550.
2761 std::vector<RTCPReportBlock> rtcp_report_blocks;
2762 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
henrika@webrtc.org8a2fc882012-08-22 08:53:55 +00002763 return -1;
2764 }
2765
2766 if (rtcp_report_blocks.empty())
2767 return 0;
2768
2769 std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
2770 for (; it != rtcp_report_blocks.end(); ++it) {
2771 ReportBlock report_block;
2772 report_block.sender_SSRC = it->remoteSSRC;
2773 report_block.source_SSRC = it->sourceSSRC;
2774 report_block.fraction_lost = it->fractionLost;
2775 report_block.cumulative_num_packets_lost = it->cumulativeLost;
2776 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
2777 report_block.interarrival_jitter = it->jitter;
2778 report_block.last_SR_timestamp = it->lastSR;
2779 report_block.delay_since_last_SR = it->delaySinceLastSR;
2780 report_blocks->push_back(report_block);
2781 }
2782 return 0;
2783}
2784
kwiberg55b97fe2016-01-28 05:22:45 -08002785int Channel::GetRTPStatistics(CallStatistics& stats) {
2786 // --- RtcpStatistics
niklase@google.com470e71d2011-07-07 08:21:25 +00002787
kwiberg55b97fe2016-01-28 05:22:45 -08002788 // The jitter statistics is updated for each received RTP packet and is
2789 // based on received packets.
2790 RtcpStatistics statistics;
2791 StreamStatistician* statistician =
2792 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
Peter Boström59013bc2016-02-12 11:35:08 +01002793 if (statistician) {
2794 statistician->GetStatistics(&statistics,
2795 _rtpRtcpModule->RTCP() == RtcpMode::kOff);
kwiberg55b97fe2016-01-28 05:22:45 -08002796 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002797
kwiberg55b97fe2016-01-28 05:22:45 -08002798 stats.fractionLost = statistics.fraction_lost;
2799 stats.cumulativeLost = statistics.cumulative_lost;
2800 stats.extendedMax = statistics.extended_max_sequence_number;
2801 stats.jitterSamples = statistics.jitter;
niklase@google.com470e71d2011-07-07 08:21:25 +00002802
kwiberg55b97fe2016-01-28 05:22:45 -08002803 // --- RTT
2804 stats.rttMs = GetRTT(true);
niklase@google.com470e71d2011-07-07 08:21:25 +00002805
kwiberg55b97fe2016-01-28 05:22:45 -08002806 // --- Data counters
niklase@google.com470e71d2011-07-07 08:21:25 +00002807
kwiberg55b97fe2016-01-28 05:22:45 -08002808 size_t bytesSent(0);
2809 uint32_t packetsSent(0);
2810 size_t bytesReceived(0);
2811 uint32_t packetsReceived(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00002812
kwiberg55b97fe2016-01-28 05:22:45 -08002813 if (statistician) {
2814 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
2815 }
wu@webrtc.org822fbd82013-08-15 23:38:54 +00002816
kwiberg55b97fe2016-01-28 05:22:45 -08002817 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
2818 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2819 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
2820 " output will not be complete");
2821 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002822
kwiberg55b97fe2016-01-28 05:22:45 -08002823 stats.bytesSent = bytesSent;
2824 stats.packetsSent = packetsSent;
2825 stats.bytesReceived = bytesReceived;
2826 stats.packetsReceived = packetsReceived;
niklase@google.com470e71d2011-07-07 08:21:25 +00002827
kwiberg55b97fe2016-01-28 05:22:45 -08002828 // --- Timestamps
2829 {
2830 rtc::CritScope lock(&ts_stats_lock_);
2831 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
2832 }
2833 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002834}
2835
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002836int Channel::SetREDStatus(bool enable, int redPayloadtype) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002837 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002838 "Channel::SetREDStatus()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002839
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002840 if (enable) {
2841 if (redPayloadtype < 0 || redPayloadtype > 127) {
2842 _engineStatisticsPtr->SetLastError(
2843 VE_PLTYPE_ERROR, kTraceError,
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002844 "SetREDStatus() invalid RED payload type");
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00002845 return -1;
2846 }
2847
2848 if (SetRedPayloadType(redPayloadtype) < 0) {
2849 _engineStatisticsPtr->SetLastError(
2850 VE_CODEC_ERROR, kTraceError,
2851 "SetSecondarySendCodec() Failed to register RED ACM");
2852 return -1;
2853 }
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002854 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002855
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002856 if (audio_coding_->SetREDStatus(enable) != 0) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002857 _engineStatisticsPtr->SetLastError(
2858 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
minyue@webrtc.orgaa5ea1c2014-05-23 15:16:51 +00002859 "SetREDStatus() failed to set RED state in the ACM");
turaj@webrtc.org42259e72012-12-11 02:15:12 +00002860 return -1;
2861 }
2862 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002863}
2864
kwiberg55b97fe2016-01-28 05:22:45 -08002865int Channel::GetREDStatus(bool& enabled, int& redPayloadtype) {
2866 enabled = audio_coding_->REDStatus();
2867 if (enabled) {
2868 int8_t payloadType = 0;
2869 if (_rtpRtcpModule->SendREDPayloadType(&payloadType) != 0) {
2870 _engineStatisticsPtr->SetLastError(
2871 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2872 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
2873 "module");
2874 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00002875 }
kwiberg55b97fe2016-01-28 05:22:45 -08002876 redPayloadtype = payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +00002877 return 0;
kwiberg55b97fe2016-01-28 05:22:45 -08002878 }
2879 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002880}
2881
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002882int Channel::SetCodecFECStatus(bool enable) {
2883 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2884 "Channel::SetCodecFECStatus()");
2885
2886 if (audio_coding_->SetCodecFEC(enable) != 0) {
2887 _engineStatisticsPtr->SetLastError(
2888 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
2889 "SetCodecFECStatus() failed to set FEC state");
2890 return -1;
2891 }
2892 return 0;
2893}
2894
2895bool Channel::GetCodecFECStatus() {
2896 bool enabled = audio_coding_->CodecFEC();
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00002897 return enabled;
2898}
2899
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002900void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
2901 // None of these functions can fail.
Stefan Holmerb86d4e42015-12-07 10:26:18 +01002902 // If pacing is enabled we always store packets.
2903 if (!pacing_enabled_)
2904 _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
stefan@webrtc.org7bb8f022013-09-06 13:40:11 +00002905 rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
2906 rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002907 if (enable)
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002908 audio_coding_->EnableNack(maxNumberOfPackets);
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002909 else
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00002910 audio_coding_->DisableNack();
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002911}
2912
pwestin@webrtc.orgd30859e2013-06-06 21:09:01 +00002913// Called when we are missing one or more packets.
2914int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
pwestin@webrtc.orgdb249952013-06-05 15:33:20 +00002915 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
2916}
2917
kwiberg55b97fe2016-01-28 05:22:45 -08002918uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
2919 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2920 "Channel::Demultiplex()");
2921 _audioFrame.CopyFrom(audioFrame);
2922 _audioFrame.id_ = _channelId;
2923 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002924}
2925
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002926void Channel::Demultiplex(const int16_t* audio_data,
xians@webrtc.org8fff1f02013-07-31 16:27:42 +00002927 int sample_rate,
Peter Kastingdce40cf2015-08-24 14:52:23 -07002928 size_t number_of_frames,
Peter Kasting69558702016-01-12 16:26:35 -08002929 size_t number_of_channels) {
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002930 CodecInst codec;
2931 GetSendCodec(codec);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002932
Alejandro Luebscdfe20b2015-09-23 12:49:12 -07002933 // Never upsample or upmix the capture signal here. This should be done at the
2934 // end of the send chain.
2935 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
2936 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
2937 RemixAndResample(audio_data, number_of_frames, number_of_channels,
2938 sample_rate, &input_resampler_, &_audioFrame);
xians@webrtc.org2f84afa2013-07-31 16:23:37 +00002939}
2940
kwiberg55b97fe2016-01-28 05:22:45 -08002941uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
2942 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2943 "Channel::PrepareEncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002944
kwiberg55b97fe2016-01-28 05:22:45 -08002945 if (_audioFrame.samples_per_channel_ == 0) {
2946 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2947 "Channel::PrepareEncodeAndSend() invalid audio frame");
2948 return 0xFFFFFFFF;
2949 }
2950
2951 if (channel_state_.Get().input_file_playing) {
2952 MixOrReplaceAudioWithFile(mixingFrequency);
2953 }
2954
solenberg1c2af8e2016-03-24 10:36:00 -07002955 bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
2956 AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
kwiberg55b97fe2016-01-28 05:22:45 -08002957
2958 if (channel_state_.Get().input_external_media) {
2959 rtc::CritScope cs(&_callbackCritSect);
2960 const bool isStereo = (_audioFrame.num_channels_ == 2);
2961 if (_inputExternalMediaCallbackPtr) {
2962 _inputExternalMediaCallbackPtr->Process(
2963 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
2964 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
2965 isStereo);
niklase@google.com470e71d2011-07-07 08:21:25 +00002966 }
kwiberg55b97fe2016-01-28 05:22:45 -08002967 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002968
kwiberg55b97fe2016-01-28 05:22:45 -08002969 if (_includeAudioLevelIndication) {
2970 size_t length =
2971 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
solenberg1c2af8e2016-03-24 10:36:00 -07002972 if (is_muted && previous_frame_muted_) {
kwiberg55b97fe2016-01-28 05:22:45 -08002973 rms_level_.ProcessMuted(length);
2974 } else {
2975 rms_level_.Process(_audioFrame.data_, length);
niklase@google.com470e71d2011-07-07 08:21:25 +00002976 }
kwiberg55b97fe2016-01-28 05:22:45 -08002977 }
solenberg1c2af8e2016-03-24 10:36:00 -07002978 previous_frame_muted_ = is_muted;
niklase@google.com470e71d2011-07-07 08:21:25 +00002979
kwiberg55b97fe2016-01-28 05:22:45 -08002980 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00002981}
2982
kwiberg55b97fe2016-01-28 05:22:45 -08002983uint32_t Channel::EncodeAndSend() {
2984 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2985 "Channel::EncodeAndSend()");
niklase@google.com470e71d2011-07-07 08:21:25 +00002986
kwiberg55b97fe2016-01-28 05:22:45 -08002987 assert(_audioFrame.num_channels_ <= 2);
2988 if (_audioFrame.samples_per_channel_ == 0) {
2989 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2990 "Channel::EncodeAndSend() invalid audio frame");
2991 return 0xFFFFFFFF;
2992 }
niklase@google.com470e71d2011-07-07 08:21:25 +00002993
kwiberg55b97fe2016-01-28 05:22:45 -08002994 _audioFrame.id_ = _channelId;
niklase@google.com470e71d2011-07-07 08:21:25 +00002995
kwiberg55b97fe2016-01-28 05:22:45 -08002996 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
niklase@google.com470e71d2011-07-07 08:21:25 +00002997
kwiberg55b97fe2016-01-28 05:22:45 -08002998 // The ACM resamples internally.
2999 _audioFrame.timestamp_ = _timeStamp;
3000 // This call will trigger AudioPacketizationCallback::SendData if encoding
3001 // is done and payload is ready for packetization and transmission.
3002 // Otherwise, it will return without invoking the callback.
3003 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
3004 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
3005 "Channel::EncodeAndSend() ACM encoding failed");
3006 return 0xFFFFFFFF;
3007 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003008
kwiberg55b97fe2016-01-28 05:22:45 -08003009 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
3010 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003011}
3012
Minyue2013aec2015-05-13 14:14:42 +02003013void Channel::DisassociateSendChannel(int channel_id) {
tommi31fc21f2016-01-21 10:37:37 -08003014 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003015 Channel* channel = associate_send_channel_.channel();
3016 if (channel && channel->ChannelId() == channel_id) {
3017 // If this channel is associated with a send channel of the specified
3018 // Channel ID, disassociate with it.
3019 ChannelOwner ref(NULL);
3020 associate_send_channel_ = ref;
3021 }
3022}
3023
kwiberg55b97fe2016-01-28 05:22:45 -08003024int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3025 VoEMediaProcess& processObject) {
3026 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3027 "Channel::RegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003028
kwiberg55b97fe2016-01-28 05:22:45 -08003029 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003030
kwiberg55b97fe2016-01-28 05:22:45 -08003031 if (kPlaybackPerChannel == type) {
3032 if (_outputExternalMediaCallbackPtr) {
3033 _engineStatisticsPtr->SetLastError(
3034 VE_INVALID_OPERATION, kTraceError,
3035 "Channel::RegisterExternalMediaProcessing() "
3036 "output external media already enabled");
3037 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003038 }
kwiberg55b97fe2016-01-28 05:22:45 -08003039 _outputExternalMediaCallbackPtr = &processObject;
3040 _outputExternalMedia = true;
3041 } else if (kRecordingPerChannel == type) {
3042 if (_inputExternalMediaCallbackPtr) {
3043 _engineStatisticsPtr->SetLastError(
3044 VE_INVALID_OPERATION, kTraceError,
3045 "Channel::RegisterExternalMediaProcessing() "
3046 "output external media already enabled");
3047 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003048 }
kwiberg55b97fe2016-01-28 05:22:45 -08003049 _inputExternalMediaCallbackPtr = &processObject;
3050 channel_state_.SetInputExternalMedia(true);
3051 }
3052 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003053}
3054
kwiberg55b97fe2016-01-28 05:22:45 -08003055int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3056 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3057 "Channel::DeRegisterExternalMediaProcessing()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003058
kwiberg55b97fe2016-01-28 05:22:45 -08003059 rtc::CritScope cs(&_callbackCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003060
kwiberg55b97fe2016-01-28 05:22:45 -08003061 if (kPlaybackPerChannel == type) {
3062 if (!_outputExternalMediaCallbackPtr) {
3063 _engineStatisticsPtr->SetLastError(
3064 VE_INVALID_OPERATION, kTraceWarning,
3065 "Channel::DeRegisterExternalMediaProcessing() "
3066 "output external media already disabled");
3067 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003068 }
kwiberg55b97fe2016-01-28 05:22:45 -08003069 _outputExternalMedia = false;
3070 _outputExternalMediaCallbackPtr = NULL;
3071 } else if (kRecordingPerChannel == type) {
3072 if (!_inputExternalMediaCallbackPtr) {
3073 _engineStatisticsPtr->SetLastError(
3074 VE_INVALID_OPERATION, kTraceWarning,
3075 "Channel::DeRegisterExternalMediaProcessing() "
3076 "input external media already disabled");
3077 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003078 }
kwiberg55b97fe2016-01-28 05:22:45 -08003079 channel_state_.SetInputExternalMedia(false);
3080 _inputExternalMediaCallbackPtr = NULL;
3081 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003082
kwiberg55b97fe2016-01-28 05:22:45 -08003083 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003084}
3085
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003086int Channel::SetExternalMixing(bool enabled) {
kwiberg55b97fe2016-01-28 05:22:45 -08003087 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3088 "Channel::SetExternalMixing(enabled=%d)", enabled);
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003089
kwiberg55b97fe2016-01-28 05:22:45 -08003090 if (channel_state_.Get().playing) {
3091 _engineStatisticsPtr->SetLastError(
3092 VE_INVALID_OPERATION, kTraceError,
3093 "Channel::SetExternalMixing() "
3094 "external mixing cannot be changed while playing.");
3095 return -1;
3096 }
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003097
kwiberg55b97fe2016-01-28 05:22:45 -08003098 _externalMixing = enabled;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003099
kwiberg55b97fe2016-01-28 05:22:45 -08003100 return 0;
roosa@google.com1b60ceb2012-12-12 23:00:29 +00003101}
3102
kwiberg55b97fe2016-01-28 05:22:45 -08003103int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3104 return audio_coding_->GetNetworkStatistics(&stats);
niklase@google.com470e71d2011-07-07 08:21:25 +00003105}
3106
wu@webrtc.org24301a62013-12-13 19:17:43 +00003107void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3108 audio_coding_->GetDecodingCallStatistics(stats);
3109}
3110
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003111bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3112 int* playout_buffer_delay_ms) const {
tommi31fc21f2016-01-21 10:37:37 -08003113 rtc::CritScope lock(&video_sync_lock_);
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003114 if (_average_jitter_buffer_delay_us == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003115 return false;
3116 }
kwiberg55b97fe2016-01-28 05:22:45 -08003117 *jitter_buffer_delay_ms =
3118 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003119 *playout_buffer_delay_ms = playout_delay_ms_;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003120 return true;
niklase@google.com470e71d2011-07-07 08:21:25 +00003121}
3122
solenberg358057b2015-11-27 10:46:42 -08003123uint32_t Channel::GetDelayEstimate() const {
3124 int jitter_buffer_delay_ms = 0;
3125 int playout_buffer_delay_ms = 0;
3126 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3127 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3128}
3129
deadbeef74375882015-08-13 12:09:10 -07003130int Channel::LeastRequiredDelayMs() const {
3131 return audio_coding_->LeastRequiredDelayMs();
3132}
3133
kwiberg55b97fe2016-01-28 05:22:45 -08003134int Channel::SetMinimumPlayoutDelay(int delayMs) {
3135 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3136 "Channel::SetMinimumPlayoutDelay()");
3137 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3138 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3139 _engineStatisticsPtr->SetLastError(
3140 VE_INVALID_ARGUMENT, kTraceError,
3141 "SetMinimumPlayoutDelay() invalid min delay");
3142 return -1;
3143 }
3144 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3145 _engineStatisticsPtr->SetLastError(
3146 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3147 "SetMinimumPlayoutDelay() failed to set min playout delay");
3148 return -1;
3149 }
3150 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003151}
3152
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003153int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
deadbeef74375882015-08-13 12:09:10 -07003154 uint32_t playout_timestamp_rtp = 0;
3155 {
tommi31fc21f2016-01-21 10:37:37 -08003156 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003157 playout_timestamp_rtp = playout_timestamp_rtp_;
3158 }
kwiberg55b97fe2016-01-28 05:22:45 -08003159 if (playout_timestamp_rtp == 0) {
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003160 _engineStatisticsPtr->SetLastError(
3161 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3162 "GetPlayoutTimestamp() failed to retrieve timestamp");
3163 return -1;
3164 }
deadbeef74375882015-08-13 12:09:10 -07003165 timestamp = playout_timestamp_rtp;
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003166 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003167}
3168
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003169int Channel::SetInitTimestamp(unsigned int timestamp) {
3170 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
niklase@google.com470e71d2011-07-07 08:21:25 +00003171 "Channel::SetInitTimestamp()");
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003172 if (channel_state_.Get().sending) {
3173 _engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
3174 "SetInitTimestamp() already sending");
3175 return -1;
3176 }
3177 _rtpRtcpModule->SetStartTimestamp(timestamp);
3178 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003179}
3180
pbos@webrtc.orgd16e8392014-12-19 13:49:55 +00003181int Channel::SetInitSequenceNumber(short sequenceNumber) {
3182 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3183 "Channel::SetInitSequenceNumber()");
3184 if (channel_state_.Get().sending) {
3185 _engineStatisticsPtr->SetLastError(
3186 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3187 return -1;
3188 }
3189 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3190 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003191}
3192
kwiberg55b97fe2016-01-28 05:22:45 -08003193int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3194 RtpReceiver** rtp_receiver) const {
3195 *rtpRtcpModule = _rtpRtcpModule.get();
3196 *rtp_receiver = rtp_receiver_.get();
3197 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003198}
3199
andrew@webrtc.orge59a0ac2012-05-08 17:12:40 +00003200// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3201// a shared helper.
kwiberg55b97fe2016-01-28 05:22:45 -08003202int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
kwibergb7f89d62016-02-17 10:04:18 -08003203 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
kwiberg55b97fe2016-01-28 05:22:45 -08003204 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003205
kwiberg55b97fe2016-01-28 05:22:45 -08003206 {
3207 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003208
kwiberg55b97fe2016-01-28 05:22:45 -08003209 if (_inputFilePlayerPtr == NULL) {
3210 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3211 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3212 " doesnt exist");
3213 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003214 }
3215
kwiberg55b97fe2016-01-28 05:22:45 -08003216 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3217 mixingFrequency) == -1) {
3218 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3219 "Channel::MixOrReplaceAudioWithFile() file mixing "
3220 "failed");
3221 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003222 }
kwiberg55b97fe2016-01-28 05:22:45 -08003223 if (fileSamples == 0) {
3224 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3225 "Channel::MixOrReplaceAudioWithFile() file is ended");
3226 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003227 }
kwiberg55b97fe2016-01-28 05:22:45 -08003228 }
3229
3230 assert(_audioFrame.samples_per_channel_ == fileSamples);
3231
3232 if (_mixFileWithMicrophone) {
3233 // Currently file stream is always mono.
3234 // TODO(xians): Change the code when FilePlayer supports real stereo.
3235 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3236 1, fileSamples);
3237 } else {
3238 // Replace ACM audio with file.
3239 // Currently file stream is always mono.
3240 // TODO(xians): Change the code when FilePlayer supports real stereo.
3241 _audioFrame.UpdateFrame(
3242 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3243 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3244 }
3245 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003246}
3247
kwiberg55b97fe2016-01-28 05:22:45 -08003248int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3249 assert(mixingFrequency <= 48000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003250
kwibergb7f89d62016-02-17 10:04:18 -08003251 std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
kwiberg55b97fe2016-01-28 05:22:45 -08003252 size_t fileSamples(0);
niklase@google.com470e71d2011-07-07 08:21:25 +00003253
kwiberg55b97fe2016-01-28 05:22:45 -08003254 {
3255 rtc::CritScope cs(&_fileCritSect);
niklase@google.com470e71d2011-07-07 08:21:25 +00003256
kwiberg55b97fe2016-01-28 05:22:45 -08003257 if (_outputFilePlayerPtr == NULL) {
3258 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3259 "Channel::MixAudioWithFile() file mixing failed");
3260 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003261 }
3262
kwiberg55b97fe2016-01-28 05:22:45 -08003263 // We should get the frequency we ask for.
3264 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3265 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3266 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3267 "Channel::MixAudioWithFile() file mixing failed");
3268 return -1;
niklase@google.com470e71d2011-07-07 08:21:25 +00003269 }
kwiberg55b97fe2016-01-28 05:22:45 -08003270 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003271
kwiberg55b97fe2016-01-28 05:22:45 -08003272 if (audioFrame.samples_per_channel_ == fileSamples) {
3273 // Currently file stream is always mono.
3274 // TODO(xians): Change the code when FilePlayer supports real stereo.
3275 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3276 fileSamples);
3277 } else {
3278 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3279 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3280 ") != "
3281 "fileSamples(%" PRIuS ")",
3282 audioFrame.samples_per_channel_, fileSamples);
3283 return -1;
3284 }
3285
3286 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +00003287}
3288
deadbeef74375882015-08-13 12:09:10 -07003289void Channel::UpdatePlayoutTimestamp(bool rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003290 jitter_buffer_playout_timestamp_ = audio_coding_->PlayoutTimestamp();
deadbeef74375882015-08-13 12:09:10 -07003291
henrik.lundin96bd5022016-04-06 04:13:56 -07003292 if (!jitter_buffer_playout_timestamp_) {
3293 // This can happen if this channel has not received any RTP packets. In
3294 // this case, NetEq is not capable of computing a playout timestamp.
deadbeef74375882015-08-13 12:09:10 -07003295 return;
3296 }
3297
3298 uint16_t delay_ms = 0;
3299 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
kwiberg55b97fe2016-01-28 05:22:45 -08003300 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003301 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3302 " delay from the ADM");
3303 _engineStatisticsPtr->SetLastError(
3304 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3305 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3306 return;
3307 }
3308
henrik.lundin96bd5022016-04-06 04:13:56 -07003309 RTC_DCHECK(jitter_buffer_playout_timestamp_);
3310 uint32_t playout_timestamp = *jitter_buffer_playout_timestamp_;
deadbeef74375882015-08-13 12:09:10 -07003311
3312 // Remove the playout delay.
henrik.lundin96bd5022016-04-06 04:13:56 -07003313 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
deadbeef74375882015-08-13 12:09:10 -07003314
kwiberg55b97fe2016-01-28 05:22:45 -08003315 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
deadbeef74375882015-08-13 12:09:10 -07003316 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
henrik.lundin96bd5022016-04-06 04:13:56 -07003317 playout_timestamp);
deadbeef74375882015-08-13 12:09:10 -07003318
3319 {
tommi31fc21f2016-01-21 10:37:37 -08003320 rtc::CritScope lock(&video_sync_lock_);
deadbeef74375882015-08-13 12:09:10 -07003321 if (rtcp) {
henrik.lundin96bd5022016-04-06 04:13:56 -07003322 playout_timestamp_rtcp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003323 } else {
henrik.lundin96bd5022016-04-06 04:13:56 -07003324 playout_timestamp_rtp_ = playout_timestamp;
deadbeef74375882015-08-13 12:09:10 -07003325 }
3326 playout_delay_ms_ = delay_ms;
3327 }
3328}
3329
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003330// Called for incoming RTP packets after successful RTP header parsing.
3331void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3332 uint16_t sequence_number) {
kwiberg55b97fe2016-01-28 05:22:45 -08003333 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003334 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3335 rtp_timestamp, sequence_number);
niklase@google.com470e71d2011-07-07 08:21:25 +00003336
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003337 // Get frequency of last received payload
wu@webrtc.org94454b72014-06-05 20:34:08 +00003338 int rtp_receive_frequency = GetPlayoutFrequency();
niklase@google.com470e71d2011-07-07 08:21:25 +00003339
turaj@webrtc.org167b6df2013-12-13 21:05:07 +00003340 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
henrik.lundin96bd5022016-04-06 04:13:56 -07003341 // every incoming packet. May be empty if no valid playout timestamp is
3342 // available.
3343 // If |rtp_timestamp| is newer than |jitter_buffer_playout_timestamp_|, the
3344 // resulting difference is positive and will be used. When the inverse is
3345 // true (can happen when a network glitch causes a packet to arrive late,
3346 // and during long comfort noise periods with clock drift), or when
3347 // |jitter_buffer_playout_timestamp_| has no value, the difference is not
3348 // changed from the initial 0.
3349 uint32_t timestamp_diff_ms = 0;
3350 if (jitter_buffer_playout_timestamp_ &&
3351 IsNewerTimestamp(rtp_timestamp, *jitter_buffer_playout_timestamp_)) {
3352 timestamp_diff_ms = (rtp_timestamp - *jitter_buffer_playout_timestamp_) /
3353 (rtp_receive_frequency / 1000);
3354 if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3355 // Diff is too large; set it to zero instead.
3356 timestamp_diff_ms = 0;
3357 }
henrik.lundin@webrtc.orgd6692992014-03-20 12:04:09 +00003358 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003359
kwiberg55b97fe2016-01-28 05:22:45 -08003360 uint16_t packet_delay_ms =
3361 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
niklase@google.com470e71d2011-07-07 08:21:25 +00003362
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003363 _previousTimestamp = rtp_timestamp;
niklase@google.com470e71d2011-07-07 08:21:25 +00003364
kwiberg55b97fe2016-01-28 05:22:45 -08003365 if (timestamp_diff_ms == 0)
3366 return;
niklase@google.com470e71d2011-07-07 08:21:25 +00003367
deadbeef74375882015-08-13 12:09:10 -07003368 {
tommi31fc21f2016-01-21 10:37:37 -08003369 rtc::CritScope lock(&video_sync_lock_);
niklase@google.com470e71d2011-07-07 08:21:25 +00003370
deadbeef74375882015-08-13 12:09:10 -07003371 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3372 _recPacketDelayMs = packet_delay_ms;
3373 }
pwestin@webrtc.org1de01352013-04-11 20:23:35 +00003374
deadbeef74375882015-08-13 12:09:10 -07003375 if (_average_jitter_buffer_delay_us == 0) {
3376 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3377 return;
3378 }
3379
3380 // Filter average delay value using exponential filter (alpha is
3381 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3382 // risk of rounding error) and compensate for it in GetDelayEstimate()
3383 // later.
kwiberg55b97fe2016-01-28 05:22:45 -08003384 _average_jitter_buffer_delay_us =
3385 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3386 8;
deadbeef74375882015-08-13 12:09:10 -07003387 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003388}
3389
kwiberg55b97fe2016-01-28 05:22:45 -08003390void Channel::RegisterReceiveCodecsToRTPModule() {
3391 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3392 "Channel::RegisterReceiveCodecsToRTPModule()");
niklase@google.com470e71d2011-07-07 08:21:25 +00003393
kwiberg55b97fe2016-01-28 05:22:45 -08003394 CodecInst codec;
3395 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
niklase@google.com470e71d2011-07-07 08:21:25 +00003396
kwiberg55b97fe2016-01-28 05:22:45 -08003397 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3398 // Open up the RTP/RTCP receiver for all supported codecs
3399 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3400 (rtp_receiver_->RegisterReceivePayload(
3401 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3402 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3403 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3404 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3405 " to register %s (%d/%d/%" PRIuS
3406 "/%d) to RTP/RTCP "
3407 "receiver",
3408 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3409 codec.rate);
3410 } else {
3411 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3412 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3413 "(%d/%d/%" PRIuS
3414 "/%d) has been added to the RTP/RTCP "
3415 "receiver",
3416 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3417 codec.rate);
niklase@google.com470e71d2011-07-07 08:21:25 +00003418 }
kwiberg55b97fe2016-01-28 05:22:45 -08003419 }
niklase@google.com470e71d2011-07-07 08:21:25 +00003420}
3421
turaj@webrtc.org8c8ad852013-01-31 18:20:17 +00003422// Assuming this method is called with valid payload type.
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003423int Channel::SetRedPayloadType(int red_payload_type) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003424 CodecInst codec;
3425 bool found_red = false;
3426
3427 // Get default RED settings from the ACM database
3428 const int num_codecs = AudioCodingModule::NumberOfCodecs();
3429 for (int idx = 0; idx < num_codecs; idx++) {
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003430 audio_coding_->Codec(idx, &codec);
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003431 if (!STR_CASE_CMP(codec.plname, "RED")) {
3432 found_red = true;
3433 break;
3434 }
3435 }
3436
3437 if (!found_red) {
3438 _engineStatisticsPtr->SetLastError(
3439 VE_CODEC_ERROR, kTraceError,
3440 "SetRedPayloadType() RED is not supported");
3441 return -1;
3442 }
3443
turaj@webrtc.org9d532fd2013-01-31 18:34:19 +00003444 codec.pltype = red_payload_type;
andrew@webrtc.orgeb524d92013-09-23 23:02:24 +00003445 if (audio_coding_->RegisterSendCodec(codec) < 0) {
turaj@webrtc.org42259e72012-12-11 02:15:12 +00003446 _engineStatisticsPtr->SetLastError(
3447 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3448 "SetRedPayloadType() RED registration in ACM module failed");
3449 return -1;
3450 }
3451
3452 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
3453 _engineStatisticsPtr->SetLastError(
3454 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3455 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
3456 return -1;
3457 }
3458 return 0;
3459}
3460
kwiberg55b97fe2016-01-28 05:22:45 -08003461int Channel::SetSendRtpHeaderExtension(bool enable,
3462 RTPExtensionType type,
wu@webrtc.orgebdb0e32014-03-06 23:49:08 +00003463 unsigned char id) {
3464 int error = 0;
3465 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
3466 if (enable) {
3467 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
3468 }
3469 return error;
3470}
minyue@webrtc.orgc1a40a72014-05-28 09:52:06 +00003471
wu@webrtc.org94454b72014-06-05 20:34:08 +00003472int32_t Channel::GetPlayoutFrequency() {
3473 int32_t playout_frequency = audio_coding_->PlayoutFrequency();
3474 CodecInst current_recive_codec;
3475 if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
3476 if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
3477 // Even though the actual sampling rate for G.722 audio is
3478 // 16,000 Hz, the RTP clock rate for the G722 payload format is
3479 // 8,000 Hz because that value was erroneously assigned in
3480 // RFC 1890 and must remain unchanged for backward compatibility.
3481 playout_frequency = 8000;
3482 } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
3483 // We are resampling Opus internally to 32,000 Hz until all our
3484 // DSP routines can operate at 48,000 Hz, but the RTP clock
3485 // rate for the Opus payload format is standardized to 48,000 Hz,
3486 // because that is the maximum supported decoding sampling rate.
3487 playout_frequency = 48000;
3488 }
3489 }
3490 return playout_frequency;
3491}
3492
Minyue2013aec2015-05-13 14:14:42 +02003493int64_t Channel::GetRTT(bool allow_associate_channel) const {
pbosda903ea2015-10-02 02:36:56 -07003494 RtcpMode method = _rtpRtcpModule->RTCP();
3495 if (method == RtcpMode::kOff) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003496 return 0;
3497 }
3498 std::vector<RTCPReportBlock> report_blocks;
3499 _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
Minyue2013aec2015-05-13 14:14:42 +02003500
3501 int64_t rtt = 0;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003502 if (report_blocks.empty()) {
Minyue2013aec2015-05-13 14:14:42 +02003503 if (allow_associate_channel) {
tommi31fc21f2016-01-21 10:37:37 -08003504 rtc::CritScope lock(&assoc_send_channel_lock_);
Minyue2013aec2015-05-13 14:14:42 +02003505 Channel* channel = associate_send_channel_.channel();
3506 // Tries to get RTT from an associated channel. This is important for
3507 // receive-only channels.
3508 if (channel) {
3509 // To prevent infinite recursion and deadlock, calling GetRTT of
3510 // associate channel should always use "false" for argument:
3511 // |allow_associate_channel|.
3512 rtt = channel->GetRTT(false);
3513 }
3514 }
3515 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003516 }
3517
3518 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3519 std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
3520 for (; it != report_blocks.end(); ++it) {
3521 if (it->remoteSSRC == remoteSSRC)
3522 break;
3523 }
3524 if (it == report_blocks.end()) {
3525 // We have not received packets with SSRC matching the report blocks.
3526 // To calculate RTT we try with the SSRC of the first report block.
3527 // This is very important for send-only channels where we don't know
3528 // the SSRC of the other end.
3529 remoteSSRC = report_blocks[0].remoteSSRC;
3530 }
Minyue2013aec2015-05-13 14:14:42 +02003531
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003532 int64_t avg_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003533 int64_t max_rtt = 0;
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003534 int64_t min_rtt = 0;
kwiberg55b97fe2016-01-28 05:22:45 -08003535 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
3536 0) {
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003537 return 0;
3538 }
pkasting@chromium.org16825b12015-01-12 21:51:21 +00003539 return rtt;
minyue@webrtc.org2b58a442014-09-11 07:51:53 +00003540}
3541
pbos@webrtc.orgd900e8b2013-07-03 15:12:26 +00003542} // namespace voe
3543} // namespace webrtc